Compare commits
2288 Commits
erikj/rest
...
erikj/file
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9854f4c7ff | ||
|
|
518b3a3f89 | ||
|
|
10f4856b0c | ||
|
|
dfde67a6fe | ||
|
|
10c843fcfb | ||
|
|
58930da52b | ||
|
|
0870588c20 | ||
|
|
f90cf150e2 | ||
|
|
067596d341 | ||
|
|
70d650be2b | ||
|
|
b92e7955be | ||
|
|
c98e1479bd | ||
|
|
67f2c901ea | ||
|
|
eef7778af9 | ||
|
|
a17e7caeb7 | ||
|
|
f0c06ac65c | ||
|
|
76b18df3d9 | ||
|
|
0da24cac8b | ||
|
|
2e3c8acc68 | ||
|
|
8d9a884cee | ||
|
|
896bc6cd46 | ||
|
|
be3548f7e1 | ||
|
|
4adf93e0f7 | ||
|
|
651faee698 | ||
|
|
caf33b2d9b | ||
|
|
8f8798bc0d | ||
|
|
7335f0adda | ||
|
|
ef535178ff | ||
|
|
04dee11e97 | ||
|
|
dd2ccee27d | ||
|
|
b6b0132ac7 | ||
|
|
e34cb5e7dc | ||
|
|
252ee2d979 | ||
|
|
14362bf359 | ||
|
|
1ee2584307 | ||
|
|
507b8bb091 | ||
|
|
d44d11d864 | ||
|
|
2d21d43c34 | ||
|
|
0fb76c71ac | ||
|
|
8bdaf5f7af | ||
|
|
a67bf0b074 | ||
|
|
f18d7546c6 | ||
|
|
3de8168343 | ||
|
|
bb069079bb | ||
|
|
2e5a31f197 | ||
|
|
fc8007dbec | ||
|
|
1238203bc4 | ||
|
|
41f072fd0e | ||
|
|
5a6ef20ef6 | ||
|
|
be8be535f7 | ||
|
|
ab71589c0b | ||
|
|
f328d95cef | ||
|
|
aac546c978 | ||
|
|
f52cb4cd78 | ||
|
|
6783534a0f | ||
|
|
a70688445d | ||
|
|
314b146b2e | ||
|
|
7846ac3125 | ||
|
|
56ec5869c9 | ||
|
|
1ea358b28b | ||
|
|
db74dcda5b | ||
|
|
551fe80bed | ||
|
|
63bb8f0df9 | ||
|
|
70d820c875 | ||
|
|
3f7652c56f | ||
|
|
535b6bfacc | ||
|
|
f7fe0e5f67 | ||
|
|
2455ad8468 | ||
|
|
0b640aa56b | ||
|
|
aa3a4944d5 | ||
|
|
46b7362304 | ||
|
|
870c45913e | ||
|
|
05f1a4596a | ||
|
|
13517e2914 | ||
|
|
b5fb7458d5 | ||
|
|
f73fdb04a6 | ||
|
|
3a4120e49a | ||
|
|
9fe894402f | ||
|
|
0a32208e5d | ||
|
|
774f3a692c | ||
|
|
5cc7564c5c | ||
|
|
0fe0b0eeb6 | ||
|
|
0126a5d60a | ||
|
|
13e334506c | ||
|
|
6b40e4f52a | ||
|
|
d5fb561709 | ||
|
|
d8ec81cc31 | ||
|
|
bc72d381b2 | ||
|
|
4d362a61ea | ||
|
|
00c281f6a4 | ||
|
|
c8cd41cdd8 | ||
|
|
41e4b2efea | ||
|
|
0c13d45522 | ||
|
|
9f1800fba8 | ||
|
|
8f4a9bbc16 | ||
|
|
9ba2bf1570 | ||
|
|
120c238705 | ||
|
|
1c1f633b13 | ||
|
|
6660f37558 | ||
|
|
20e5b46b20 | ||
|
|
0113ad36ee | ||
|
|
3e41de05cc | ||
|
|
2884712ca7 | ||
|
|
ded01c3bf6 | ||
|
|
8c75040c25 | ||
|
|
f1073ad43d | ||
|
|
a352b68acf | ||
|
|
364d616792 | ||
|
|
bde13833cb | ||
|
|
80a1bc7db5 | ||
|
|
f1f70bf4b5 | ||
|
|
dbb5a39b64 | ||
|
|
885ee861f7 | ||
|
|
486b9a6a2d | ||
|
|
1f31381611 | ||
|
|
ed5f43a55a | ||
|
|
09a17f965c | ||
|
|
1e9026e484 | ||
|
|
a60169ea09 | ||
|
|
78a16d395c | ||
|
|
5436848955 | ||
|
|
0477368e9a | ||
|
|
0ef0655b83 | ||
|
|
a64dbae90b | ||
|
|
d41a1a91d3 | ||
|
|
15bf3e3376 | ||
|
|
d9f7fa2e57 | ||
|
|
b31c49d676 | ||
|
|
255c229f23 | ||
|
|
d12134ce37 | ||
|
|
36e2aade87 | ||
|
|
33546b58aa | ||
|
|
fdc015c6e9 | ||
|
|
e9892b4b26 | ||
|
|
50f69e2ef2 | ||
|
|
41b35412bf | ||
|
|
7dbb473339 | ||
|
|
16a8884233 | ||
|
|
ba0406d10d | ||
|
|
8327d5df70 | ||
|
|
a31befbcd0 | ||
|
|
5ee5b655b2 | ||
|
|
0a43219a27 | ||
|
|
eba4ff1bcb | ||
|
|
2eab219a70 | ||
|
|
95f305c35a | ||
|
|
6e7dc7c7dd | ||
|
|
b7fbc9bd95 | ||
|
|
919a2c74f6 | ||
|
|
81c07a32fd | ||
|
|
b063784b78 | ||
|
|
defa28efa1 | ||
|
|
690029d1a3 | ||
|
|
d88faf92d1 | ||
|
|
958c968d02 | ||
|
|
746b2f5657 | ||
|
|
efeabd3180 | ||
|
|
1fd6eb695d | ||
|
|
128360d4f0 | ||
|
|
17aab5827a | ||
|
|
1a815fb04f | ||
|
|
66503a69c9 | ||
|
|
bab916bccc | ||
|
|
5c73115155 | ||
|
|
e0fda29f94 | ||
|
|
02270b4b3d | ||
|
|
6babdb671b | ||
|
|
0f2165ccf4 | ||
|
|
18f0cc7d99 | ||
|
|
64935d11f7 | ||
|
|
2d1d1025fa | ||
|
|
b01e71e719 | ||
|
|
dded389ac1 | ||
|
|
c54fcd9ee4 | ||
|
|
0b2158719c | ||
|
|
188f8d63e2 | ||
|
|
48e65099b5 | ||
|
|
75331c5fca | ||
|
|
8c966fbd51 | ||
|
|
efe8126290 | ||
|
|
88625db05f | ||
|
|
84379062f9 | ||
|
|
310197bab5 | ||
|
|
b0932b34cb | ||
|
|
4a5bbb1941 | ||
|
|
87f60e7053 | ||
|
|
5ef84da4f1 | ||
|
|
216a05b3e3 | ||
|
|
9f715573aa | ||
|
|
96dc600579 | ||
|
|
377eb480ca | ||
|
|
e4134c5e13 | ||
|
|
778c1fea8b | ||
|
|
7aa778fba9 | ||
|
|
70aee0717c | ||
|
|
3210f4c385 | ||
|
|
040a560a48 | ||
|
|
cffe46408f | ||
|
|
ac9716f154 | ||
|
|
8f79084bd4 | ||
|
|
10ea3f46ba | ||
|
|
f6be734be9 | ||
|
|
05e01f21d7 | ||
|
|
81d226888f | ||
|
|
72c4d482e9 | ||
|
|
fbf608decb | ||
|
|
06d40c8b98 | ||
|
|
5f88549f4a | ||
|
|
ca457f594e | ||
|
|
c11614bcdc | ||
|
|
21961c93c7 | ||
|
|
48340e4f13 | ||
|
|
fcbc282f56 | ||
|
|
51773bcbaf | ||
|
|
da491e75b2 | ||
|
|
0b3c80a234 | ||
|
|
dd6f62ed99 | ||
|
|
c367ba5dd2 | ||
|
|
eef541a291 | ||
|
|
80aade3805 | ||
|
|
ab116bdb0c | ||
|
|
4ce84a1acd | ||
|
|
a7ff5a1770 | ||
|
|
aa6fab0cc2 | ||
|
|
8d740132f4 | ||
|
|
3b096c5f5c | ||
|
|
43b7f371f5 | ||
|
|
abb151f3c9 | ||
|
|
4982b28868 | ||
|
|
d06f2a229e | ||
|
|
58a224a651 | ||
|
|
20eccd84d4 | ||
|
|
722472b48c | ||
|
|
73c7112433 | ||
|
|
b09f348530 | ||
|
|
4c04222fa5 | ||
|
|
ccb56fc24b | ||
|
|
81cf449daa | ||
|
|
e043ede4a2 | ||
|
|
b821c839dc | ||
|
|
597013caa5 | ||
|
|
6a0afa582a | ||
|
|
3ae915b27e | ||
|
|
59f2d73522 | ||
|
|
9c26b390a2 | ||
|
|
f88d747f79 | ||
|
|
065e739d6e | ||
|
|
696d7c5937 | ||
|
|
0eae075723 | ||
|
|
79d1f072f4 | ||
|
|
6bb9aacf9d | ||
|
|
745ddb4dd0 | ||
|
|
7a5a5f2df2 | ||
|
|
1f31cc37f8 | ||
|
|
2675c1e40e | ||
|
|
c71177f285 | ||
|
|
07a5559916 | ||
|
|
56d15a0530 | ||
|
|
812b5de0fe | ||
|
|
80f34d7b57 | ||
|
|
661a540dd1 | ||
|
|
70599ce925 | ||
|
|
fb2193cc63 | ||
|
|
356f13c069 | ||
|
|
02ac463dbf | ||
|
|
c5af1b6b00 | ||
|
|
3a3fb2f6f9 | ||
|
|
4a10510cd5 | ||
|
|
f84b89f0c6 | ||
|
|
a15ad60849 | ||
|
|
07233a1ec8 | ||
|
|
e793866398 | ||
|
|
aaa70e26a2 | ||
|
|
a04a2d043c | ||
|
|
0f06b496d1 | ||
|
|
83b70c9f63 | ||
|
|
e0deeff23e | ||
|
|
991af8b0d6 | ||
|
|
00c487a8db | ||
|
|
c8285564a3 | ||
|
|
1db79d6192 | ||
|
|
d60eed0710 | ||
|
|
195254cae8 | ||
|
|
43db0d9f6a | ||
|
|
8e539f13c0 | ||
|
|
6ecb2ca4ec | ||
|
|
58ee43d020 | ||
|
|
2a449fec4d | ||
|
|
6ca4d3ae9a | ||
|
|
dea9f20f8c | ||
|
|
963e3ed282 | ||
|
|
d240796ded | ||
|
|
c8c5bf950a | ||
|
|
c9ca285d33 | ||
|
|
1d4ee854e2 | ||
|
|
cca0093fa9 | ||
|
|
4efa389299 | ||
|
|
aefd2d1cbc | ||
|
|
10de8c2631 | ||
|
|
014e0799f9 | ||
|
|
c626fc576a | ||
|
|
e5b0bbcd33 | ||
|
|
70ecb415f5 | ||
|
|
e1625d62a8 | ||
|
|
163e48c0e3 | ||
|
|
887c6e6f05 | ||
|
|
42a9ea37e4 | ||
|
|
8b5dbee47e | ||
|
|
85b992f621 | ||
|
|
cc84f7cb8e | ||
|
|
209ba4d024 | ||
|
|
b007ee4606 | ||
|
|
95b86e6dad | ||
|
|
cbf8d146ac | ||
|
|
faad233ea6 | ||
|
|
6900303997 | ||
|
|
1c5ed2a19b | ||
|
|
b08ad0389e | ||
|
|
be2c677386 | ||
|
|
79bea8ab9a | ||
|
|
84f94e4cbb | ||
|
|
d16cc52b5d | ||
|
|
137e6a4557 | ||
|
|
680f1d9387 | ||
|
|
cb8a321bdd | ||
|
|
d437882581 | ||
|
|
88ea5ab2c3 | ||
|
|
989bdc9e56 | ||
|
|
6fe04ffef2 | ||
|
|
c0c79ef444 | ||
|
|
b5605dfecc | ||
|
|
31b5395ab6 | ||
|
|
09804c9862 | ||
|
|
c2da3406fc | ||
|
|
ccffb0965d | ||
|
|
18d68bfee4 | ||
|
|
d4503e25ed | ||
|
|
149fa411e2 | ||
|
|
60ff2e7984 | ||
|
|
332d7e9b97 | ||
|
|
6fb51eaf7b | ||
|
|
e837df6adb | ||
|
|
42368ea8db | ||
|
|
ee660c6b9b | ||
|
|
0cb441fedd | ||
|
|
5adf627551 | ||
|
|
6a30a0bfd3 | ||
|
|
c4c98ce8da | ||
|
|
523d5bcd0b | ||
|
|
43e1e0489c | ||
|
|
1ed33784a6 | ||
|
|
526bf8126f | ||
|
|
425e6b4983 | ||
|
|
b153f5b150 | ||
|
|
3fd8a07fca | ||
|
|
f68eea808a | ||
|
|
53e171f345 | ||
|
|
80cb9becd8 | ||
|
|
816df9f267 | ||
|
|
821306120a | ||
|
|
1a3a2002ff | ||
|
|
e168abbcff | ||
|
|
e501e9ecb2 | ||
|
|
cbd2adc95e | ||
|
|
3b86ecfa79 | ||
|
|
647781ca56 | ||
|
|
c39f305067 | ||
|
|
678c8a7f1e | ||
|
|
c5c5a7403b | ||
|
|
2d98c960ec | ||
|
|
eb79110beb | ||
|
|
dd95eb4cb5 | ||
|
|
60d53f9e95 | ||
|
|
c0112fabc2 | ||
|
|
0e8364c7f5 | ||
|
|
782471b7e1 | ||
|
|
0466454b00 | ||
|
|
077468f6a9 | ||
|
|
b3f29dc1e5 | ||
|
|
f03ddc98ec | ||
|
|
1f71f386f6 | ||
|
|
206eb9fd94 | ||
|
|
7d6e89ed22 | ||
|
|
21018c2c13 | ||
|
|
a8affd606e | ||
|
|
b7381d5338 | ||
|
|
3abab26458 | ||
|
|
99b5a2e560 | ||
|
|
ba5c616ff4 | ||
|
|
0c11c1be88 | ||
|
|
e00e8f2166 | ||
|
|
fd8e921b6e | ||
|
|
024cda9a97 | ||
|
|
c9aff0736c | ||
|
|
40aa6e8349 | ||
|
|
9295fa30a8 | ||
|
|
5e50058473 | ||
|
|
cdda850ce1 | ||
|
|
0e792e7903 | ||
|
|
3547e66bc6 | ||
|
|
6da7f39d95 | ||
|
|
b5e646a18c | ||
|
|
048b3ece36 | ||
|
|
13d37c3c56 | ||
|
|
a458a40337 | ||
|
|
7e23476814 | ||
|
|
260b498ee5 | ||
|
|
1620578b13 | ||
|
|
108434e53d | ||
|
|
1400bb1663 | ||
|
|
a284a32d78 | ||
|
|
458a435114 | ||
|
|
30057b1e15 | ||
|
|
ae1af262f6 | ||
|
|
90afc07f39 | ||
|
|
89b5ef7c4b | ||
|
|
35b6e6d2a8 | ||
|
|
3367e65476 | ||
|
|
0c4ccdcb83 | ||
|
|
f28643cea9 | ||
|
|
5f46be19a7 | ||
|
|
d46b18a00f | ||
|
|
3b1930e8ec | ||
|
|
fe97b81c09 | ||
|
|
997db04648 | ||
|
|
c00b484eff | ||
|
|
94040b0798 | ||
|
|
e581754f09 | ||
|
|
e04b1d6b0a | ||
|
|
5599608887 | ||
|
|
6b45ffd2d1 | ||
|
|
c9eb6dfc1b | ||
|
|
3f84da139c | ||
|
|
def64d6ef3 | ||
|
|
100e2c42f6 | ||
|
|
8715731559 | ||
|
|
34b3af3363 | ||
|
|
7354667bd3 | ||
|
|
08dfa8eee2 | ||
|
|
f904b1c60c | ||
|
|
1f1dee94f6 | ||
|
|
f6ebaf4a32 | ||
|
|
4ea762c1a2 | ||
|
|
012cb5416c | ||
|
|
fcb2c3f0db | ||
|
|
fd85b167ec | ||
|
|
b6e0be701e | ||
|
|
96d9d5d388 | ||
|
|
d13459636f | ||
|
|
1d275dba69 | ||
|
|
56b5e83e36 | ||
|
|
1f590f3e9a | ||
|
|
1b45e6a9bc | ||
|
|
53ca739f1f | ||
|
|
81c2176cba | ||
|
|
573ef3f1c9 | ||
|
|
8940281d1b | ||
|
|
9c272da05f | ||
|
|
a5974f89d6 | ||
|
|
5d8a93a10e | ||
|
|
1f0f5ffa1e | ||
|
|
634efb65f1 | ||
|
|
c64d5fc66c | ||
|
|
3c39fa8902 | ||
|
|
ce81ccb063 | ||
|
|
1cf5c379cb | ||
|
|
fee1118a20 | ||
|
|
97b9141245 | ||
|
|
fcd1eb642d | ||
|
|
8e6a163f27 | ||
|
|
39d0a99972 | ||
|
|
9ef05a12c3 | ||
|
|
80be396464 | ||
|
|
5650e38e7d | ||
|
|
8a04412fa1 | ||
|
|
8cc82aad87 | ||
|
|
de22001ab5 | ||
|
|
f1026418ea | ||
|
|
17cbf773b9 | ||
|
|
984d4a2c0f | ||
|
|
e6bffa4475 | ||
|
|
92f0f3d21d | ||
|
|
a438a6d2bc | ||
|
|
7ea3b4118d | ||
|
|
183f23f10d | ||
|
|
792def4928 | ||
|
|
2df75de505 | ||
|
|
b084e4d963 | ||
|
|
35b7b8e4bc | ||
|
|
6b9b6a9169 | ||
|
|
c7c75e87dc | ||
|
|
b0a1036d93 | ||
|
|
8f99cd5996 | ||
|
|
60f44c098d | ||
|
|
50ad8005e4 | ||
|
|
83618d719a | ||
|
|
e7a76b5123 | ||
|
|
29c8cf8db8 | ||
|
|
d3da5294e8 | ||
|
|
765f2b8446 | ||
|
|
ff5e5423e5 | ||
|
|
311b5ce051 | ||
|
|
3facde2536 | ||
|
|
4364ea1272 | ||
|
|
4b0c3a3270 | ||
|
|
5048455965 | ||
|
|
6c8957be7f | ||
|
|
18ce88bd2d | ||
|
|
40d40e470d | ||
|
|
56aae0eaf5 | ||
|
|
dc2c527ce9 | ||
|
|
62b51b8452 | ||
|
|
b2c04da8dc | ||
|
|
ec9cbe847d | ||
|
|
acded821c4 | ||
|
|
69e519052b | ||
|
|
46b5547a42 | ||
|
|
36bb5c2383 | ||
|
|
e800ee2f63 | ||
|
|
cc0874cf71 | ||
|
|
68f8fc2f14 | ||
|
|
4845c7359d | ||
|
|
351b50a887 | ||
|
|
60f86fc876 | ||
|
|
937c407eef | ||
|
|
dcfc10b129 | ||
|
|
aebd0c9717 | ||
|
|
1c188cf73c | ||
|
|
1a12766e3b | ||
|
|
6037349512 | ||
|
|
ebbabc4986 | ||
|
|
8d7ad44331 | ||
|
|
5367708236 | ||
|
|
9dba1b668c | ||
|
|
424a7f48f8 | ||
|
|
4ed1e45869 | ||
|
|
21d188bf95 | ||
|
|
8a65666454 | ||
|
|
8781083960 | ||
|
|
fa12209c1b | ||
|
|
c7c03bf303 | ||
|
|
871357d539 | ||
|
|
71df327190 | ||
|
|
c9eab73f2a | ||
|
|
47571d11db | ||
|
|
b80b93ea0f | ||
|
|
6df5a6a833 | ||
|
|
5164ccc3e5 | ||
|
|
3306cf45ca | ||
|
|
c487c42492 | ||
|
|
9c417c54d4 | ||
|
|
9843f2a657 | ||
|
|
7b4715bad7 | ||
|
|
f15e9e8de4 | ||
|
|
72e2fafa20 | ||
|
|
233bf78ab4 | ||
|
|
f22f46f4f9 | ||
|
|
290f125a13 | ||
|
|
52ecbc2843 | ||
|
|
05e49ffbdf | ||
|
|
bd0f9c2065 | ||
|
|
c5b3c6e101 | ||
|
|
83bf65297a | ||
|
|
e8701e64b9 | ||
|
|
c553797c4f | ||
|
|
5905f36f05 | ||
|
|
c3f8dbf6b5 | ||
|
|
62607d5452 | ||
|
|
e57df8fa86 | ||
|
|
e856036f4c | ||
|
|
9e7aa98c22 | ||
|
|
2022ae0fb9 | ||
|
|
64ec3493c1 | ||
|
|
4063fe0283 | ||
|
|
183cacac90 | ||
|
|
19d6b6cd7a | ||
|
|
c10ed26c30 | ||
|
|
ae571810f2 | ||
|
|
3ddbb1687c | ||
|
|
8fae3d7b1e | ||
|
|
b57dcb4b51 | ||
|
|
26db18bc90 | ||
|
|
b9675ef6e6 | ||
|
|
e395eb1108 | ||
|
|
3b0fa77f50 | ||
|
|
129e403487 | ||
|
|
a3ac837599 | ||
|
|
78741cf025 | ||
|
|
51bb339ab2 | ||
|
|
b743c1237e | ||
|
|
31719ad124 | ||
|
|
565c2edb0a | ||
|
|
c877f0f034 | ||
|
|
b15266fe06 | ||
|
|
cfe1ff4bdb | ||
|
|
d4823efad9 | ||
|
|
9f53491cab | ||
|
|
02a27a6c4f | ||
|
|
a611c968cc | ||
|
|
59698906eb | ||
|
|
c0d8e0eb63 | ||
|
|
2ed0adb075 | ||
|
|
68ebb81e86 | ||
|
|
05adc6c2de | ||
|
|
f63bd4ff47 | ||
|
|
5bbc321588 | ||
|
|
4cf4320593 | ||
|
|
eab47ea1e5 | ||
|
|
f52dd35ac3 | ||
|
|
61c7edfd34 | ||
|
|
5bbd424ee0 | ||
|
|
6ac40f7b65 | ||
|
|
f505575f69 | ||
|
|
4084c58aa1 | ||
|
|
e99365f601 | ||
|
|
e2a01455af | ||
|
|
e8884e5e9c | ||
|
|
a7001c311b | ||
|
|
9181e2f4c7 | ||
|
|
fb76a81ff7 | ||
|
|
07d765209d | ||
|
|
48af68ba8e | ||
|
|
0c93df89b6 | ||
|
|
43f0941e8f | ||
|
|
481119f7d6 | ||
|
|
9f56645038 | ||
|
|
eb8619e256 | ||
|
|
4ef7a25c10 | ||
|
|
3727a15764 | ||
|
|
aaabbd3e9e | ||
|
|
84f9cac4d0 | ||
|
|
914f1eafac | ||
|
|
6fd2f685fe | ||
|
|
737aee9295 | ||
|
|
cb9c465707 | ||
|
|
3c79bdd7a0 | ||
|
|
a4c56bf67b | ||
|
|
4c1b32d7e2 | ||
|
|
f78b479118 | ||
|
|
4802f9cdb6 | ||
|
|
83776d6219 | ||
|
|
e83f8c0aa5 | ||
|
|
bd77216d06 | ||
|
|
5a578ea4c7 | ||
|
|
9ae64c9910 | ||
|
|
b42ad359e9 | ||
|
|
757e2c79b4 | ||
|
|
86e9bbc74e | ||
|
|
e40f25ebe1 | ||
|
|
ff1d333a02 | ||
|
|
2ae91a9e2f | ||
|
|
d213d69fe3 | ||
|
|
56da835eaf | ||
|
|
96bcfb29c7 | ||
|
|
7be1065b8f | ||
|
|
a2546b9082 | ||
|
|
ceeb5b909f | ||
|
|
43a89cca8e | ||
|
|
f338bf9257 | ||
|
|
767fc0b739 | ||
|
|
54d08c8868 | ||
|
|
5880bc5417 | ||
|
|
f613a3e332 | ||
|
|
bfe586843f | ||
|
|
d0633e6dbe | ||
|
|
0f2ca8cde1 | ||
|
|
c53f9d561e | ||
|
|
65141161f6 | ||
|
|
72f454b752 | ||
|
|
10ebbaea2e | ||
|
|
aa5ce4d450 | ||
|
|
d33d623f0d | ||
|
|
7984ffdc6a | ||
|
|
c1267d04c5 | ||
|
|
a04c076b7f | ||
|
|
44891b4a0a | ||
|
|
7b39bcdaae | ||
|
|
d937f342bb | ||
|
|
318cb1f207 | ||
|
|
c48465dbaa | ||
|
|
8be1a37909 | ||
|
|
d3d0be4167 | ||
|
|
762ada1e07 | ||
|
|
0d3da210f0 | ||
|
|
cccf86dd05 | ||
|
|
8a76094965 | ||
|
|
790f5848b2 | ||
|
|
82d7eea7e3 | ||
|
|
2547dffccc | ||
|
|
9bb041791c | ||
|
|
17515bae14 | ||
|
|
4bd3d25218 | ||
|
|
5ffacc5e84 | ||
|
|
83b2f83da0 | ||
|
|
b36270b5e1 | ||
|
|
6ff7a79308 | ||
|
|
af582b66bb | ||
|
|
2460d904bd | ||
|
|
1ccabe2965 | ||
|
|
fb83f6a1fc | ||
|
|
b04f81284a | ||
|
|
ec9331f851 | ||
|
|
dafef5a688 | ||
|
|
d96a070a3a | ||
|
|
ed3979df5f | ||
|
|
79fc4ff6f9 | ||
|
|
7b6d519482 | ||
|
|
52d1008661 | ||
|
|
96bd8ff57c | ||
|
|
ce3fe52498 | ||
|
|
7e2f971c08 | ||
|
|
d63b49137a | ||
|
|
b9ee5650b0 | ||
|
|
caef337587 | ||
|
|
b4a5002a6e | ||
|
|
86be915cce | ||
|
|
d9f38561c8 | ||
|
|
4836864f56 | ||
|
|
a4a31fa8dc | ||
|
|
f942980c0b | ||
|
|
3fb35cbd6f | ||
|
|
15e0f1696f | ||
|
|
da84fa3d74 | ||
|
|
d6e7333ae4 | ||
|
|
6ec02e9ecf | ||
|
|
25cd5bb697 | ||
|
|
fa129ce5b5 | ||
|
|
e1e042f2a1 | ||
|
|
ceb599e789 | ||
|
|
8c82b06904 | ||
|
|
05d044aac3 | ||
|
|
2d5c693fd3 | ||
|
|
57fa1801c3 | ||
|
|
a294b04bf0 | ||
|
|
9c99ab4572 | ||
|
|
d549fdfa22 | ||
|
|
95ac3078da | ||
|
|
92e3071623 | ||
|
|
ee5aef6c72 | ||
|
|
639cd07d6d | ||
|
|
af03ecf352 | ||
|
|
60ec9793fb | ||
|
|
674379e673 | ||
|
|
a28d066732 | ||
|
|
8495b6d365 | ||
|
|
1ef0365670 | ||
|
|
87a30890a3 | ||
|
|
ed4d18f516 | ||
|
|
9c62fcdb68 | ||
|
|
27a0c21c38 | ||
|
|
3555a659ec | ||
|
|
4c5e8adf8b | ||
|
|
875ed05bdc | ||
|
|
67f3a50e9a | ||
|
|
afff321e9a | ||
|
|
8f0e47fae8 | ||
|
|
823b8be4b7 | ||
|
|
92767dd703 | ||
|
|
7b9319b1c8 | ||
|
|
3d95405e5f | ||
|
|
8d2bca1a90 | ||
|
|
0fd1cd2400 | ||
|
|
6bfec56796 | ||
|
|
e815763b7f | ||
|
|
7e2c89a37f | ||
|
|
1e05637e37 | ||
|
|
b713934b2e | ||
|
|
75fb9ac1be | ||
|
|
8aab9d87fa | ||
|
|
7d11f825aa | ||
|
|
196ebaf662 | ||
|
|
87f2dec8d4 | ||
|
|
a1e0d316ea | ||
|
|
11860637e1 | ||
|
|
2e308a3a38 | ||
|
|
c2b429ab24 | ||
|
|
6222ae51ce | ||
|
|
b29f98377d | ||
|
|
1d4deff25a | ||
|
|
df727f2126 | ||
|
|
7a77f8b6d5 | ||
|
|
0c53d750e7 | ||
|
|
92ab45a330 | ||
|
|
3d76b7cb2b | ||
|
|
bf14883a04 | ||
|
|
9f7dc2bef7 | ||
|
|
cf51c4120e | ||
|
|
0834b152fb | ||
|
|
8b98a7e8c3 | ||
|
|
eab4d462f8 | ||
|
|
c3916462f6 | ||
|
|
110780b18b | ||
|
|
b09e29a03c | ||
|
|
7426c86eb8 | ||
|
|
d1b154a10f | ||
|
|
9377157961 | ||
|
|
2c838f6459 | ||
|
|
5037ee0d37 | ||
|
|
b26e8604f1 | ||
|
|
5fd07da764 | ||
|
|
d76d89323c | ||
|
|
aa82cb38e9 | ||
|
|
89e6839a48 | ||
|
|
c906f30661 | ||
|
|
2a37467fa1 | ||
|
|
f2b916534b | ||
|
|
9bc5b4c663 | ||
|
|
35b5c4ba1b | ||
|
|
a853cdec5b | ||
|
|
3f4eb4c924 | ||
|
|
8d73cd502b | ||
|
|
a2866e2e6a | ||
|
|
e36bfbab38 | ||
|
|
35bb465b86 | ||
|
|
c42f46ab7d | ||
|
|
7753fc6570 | ||
|
|
c60b751694 | ||
|
|
683e564815 | ||
|
|
431aa8ada9 | ||
|
|
dc4c1579d4 | ||
|
|
03e406eefc | ||
|
|
72550c3803 | ||
|
|
5d06929169 | ||
|
|
76503f95ed | ||
|
|
fe95943305 | ||
|
|
bb9a2ca87c | ||
|
|
d35780eda0 | ||
|
|
0d3d7de6fc | ||
|
|
62e395f0e3 | ||
|
|
5260db7663 | ||
|
|
2ec5426035 | ||
|
|
c9500a9c1d | ||
|
|
f9d3665c88 | ||
|
|
c27c51484a | ||
|
|
f699b8f997 | ||
|
|
a8a5dd3b44 | ||
|
|
a68c1b15aa | ||
|
|
9113316b0e | ||
|
|
7178ab7da0 | ||
|
|
1fbb094c6f | ||
|
|
98c460cecd | ||
|
|
8b8052909f | ||
|
|
61407986b4 | ||
|
|
31a9eceda5 | ||
|
|
fc66df1e60 | ||
|
|
178c9fb200 | ||
|
|
73b6bf4629 | ||
|
|
08a8514b7a | ||
|
|
d24662b88a | ||
|
|
1e25f62ee6 | ||
|
|
5fbdf2bcec | ||
|
|
e7aaa7c61e | ||
|
|
fddb6fddc1 | ||
|
|
a932acaa6b | ||
|
|
82312d4fff | ||
|
|
f5bf45a2e5 | ||
|
|
3f9948a069 | ||
|
|
ae5831d303 | ||
|
|
721b2bfa85 | ||
|
|
19038582d3 | ||
|
|
64b4aead15 | ||
|
|
dd4287ca5d | ||
|
|
e0c2490a14 | ||
|
|
ec0cf996c9 | ||
|
|
d9d48aad2d | ||
|
|
adafa24b0a | ||
|
|
3e8bb99a2b | ||
|
|
77cba688ed | ||
|
|
54a546091a | ||
|
|
191c7bef6b | ||
|
|
31e6f8636f | ||
|
|
3b554bda26 | ||
|
|
15844040c2 | ||
|
|
7a3815b372 | ||
|
|
647b041d1a | ||
|
|
8122ad7bab | ||
|
|
2f0180b09e | ||
|
|
acdfef7b14 | ||
|
|
f96526ffc2 | ||
|
|
fe9794706a | ||
|
|
75daede92f | ||
|
|
fbdeb1778d | ||
|
|
b275765545 | ||
|
|
0c1a27b787 | ||
|
|
84afeb41f3 | ||
|
|
b2802a1351 | ||
|
|
0677fc1c4e | ||
|
|
2749da542c | ||
|
|
e14baa7a3b | ||
|
|
0e7363e0b3 | ||
|
|
d87a846ebc | ||
|
|
8b0dfc9fc4 | ||
|
|
34473a9c7f | ||
|
|
b6507869cd | ||
|
|
9e2e994395 | ||
|
|
d531ebcb57 | ||
|
|
c4a8cbd15a | ||
|
|
99f929f36b | ||
|
|
d787e41b20 | ||
|
|
6cf0ba1466 | ||
|
|
76d18a5776 | ||
|
|
cd9ba1ed89 | ||
|
|
5defb25ac6 | ||
|
|
fa2f96c2e3 | ||
|
|
f93304e77f | ||
|
|
2c86187a1b | ||
|
|
d6ac752538 | ||
|
|
97785bfc0f | ||
|
|
b591277620 | ||
|
|
63137bb901 | ||
|
|
d3654694d0 | ||
|
|
5244c0b48e | ||
|
|
3e7fac0d56 | ||
|
|
58f8226c7f | ||
|
|
e4054abfdc | ||
|
|
9adf0e92bc | ||
|
|
1660145a08 | ||
|
|
7f79a6405b | ||
|
|
8595ff7842 | ||
|
|
58e207cd77 | ||
|
|
67ed8065db | ||
|
|
916227b4df | ||
|
|
3c5f25507b | ||
|
|
56aa4e7a9a | ||
|
|
384ee6eafb | ||
|
|
2ec3460967 | ||
|
|
7a38612620 | ||
|
|
2cd9260500 | ||
|
|
673c96ce97 | ||
|
|
4ebb688f4f | ||
|
|
5670205e2a | ||
|
|
f984decd66 | ||
|
|
a7daa5ae13 | ||
|
|
48b2e853a8 | ||
|
|
b58d10a875 | ||
|
|
3ee7d7dc7f | ||
|
|
3176aebf9d | ||
|
|
9671e6750c | ||
|
|
742b6c6d15 | ||
|
|
f5e90422f5 | ||
|
|
ff7d3dc3a0 | ||
|
|
99797947aa | ||
|
|
c12b9d719a | ||
|
|
add89a03a6 | ||
|
|
467c1599c9 | ||
|
|
660ae8e0f3 | ||
|
|
ba660ecde2 | ||
|
|
a877209c8b | ||
|
|
ee32d622ce | ||
|
|
6df1c79c22 | ||
|
|
12904932c4 | ||
|
|
b6e8420aee | ||
|
|
91779b49c4 | ||
|
|
e5f0e58931 | ||
|
|
9e982750ee | ||
|
|
5ca695cc12 | ||
|
|
13e29a697c | ||
|
|
6b1e9b8dfe | ||
|
|
590fbbef03 | ||
|
|
a547e2df85 | ||
|
|
e462aa97bf | ||
|
|
398cd1edfb | ||
|
|
494d0c8e02 | ||
|
|
ffb9dd02fe | ||
|
|
15122da0e2 | ||
|
|
e9c1cabac2 | ||
|
|
ae6ff09494 | ||
|
|
b13035cc91 | ||
|
|
c081228439 | ||
|
|
b5afe6bc38 | ||
|
|
2dee03aee5 | ||
|
|
af59826a2f | ||
|
|
f523177850 | ||
|
|
57c444b3ad | ||
|
|
d5fda6e3b0 | ||
|
|
58443a022d | ||
|
|
aa11db5f11 | ||
|
|
2e2be463f8 | ||
|
|
379c60b08d | ||
|
|
465605d616 | ||
|
|
703826886c | ||
|
|
9669a99d1a | ||
|
|
c22a3f37a9 | ||
|
|
1be438f2a6 | ||
|
|
40160e24ab | ||
|
|
8a88684736 | ||
|
|
af2fe6110c | ||
|
|
3ecaabc7fd | ||
|
|
1309b8ca97 | ||
|
|
07cf96ebf7 | ||
|
|
b7b899cae6 | ||
|
|
b7dbe5147a | ||
|
|
158a322e82 | ||
|
|
ce829c2aef | ||
|
|
4814e7c9b2 | ||
|
|
866d0e7cb8 | ||
|
|
1748d4b739 | ||
|
|
5f5817ab05 | ||
|
|
b117f67227 | ||
|
|
3b97797c8d | ||
|
|
edca2d9891 | ||
|
|
c00f4e48ba | ||
|
|
7076082ae6 | ||
|
|
ea72bd9600 | ||
|
|
f40131b4d9 | ||
|
|
9a3c80a348 | ||
|
|
7bcee4733a | ||
|
|
239badea9b | ||
|
|
316c00936f | ||
|
|
874fd43257 | ||
|
|
80916e6884 | ||
|
|
2ab0bf4b97 | ||
|
|
b7a3be693b | ||
|
|
beebc0a40f | ||
|
|
9848b54cac | ||
|
|
deda48068c | ||
|
|
ebcbb23226 | ||
|
|
7e9fc9b6af | ||
|
|
1a1abd8d05 | ||
|
|
125f674eae | ||
|
|
13cbd31040 | ||
|
|
0ff9aaf6c1 | ||
|
|
3110c37d02 | ||
|
|
ec7460b4f2 | ||
|
|
1b4f4a936f | ||
|
|
ed61a49169 | ||
|
|
389d558a3b | ||
|
|
44b084a75e | ||
|
|
bb0e82fff1 | ||
|
|
5fc59f009c | ||
|
|
ce82b9e48f | ||
|
|
09b1d98070 | ||
|
|
dd463e246d | ||
|
|
fa6d6bbceb | ||
|
|
a92b4ea76f | ||
|
|
361fc53917 | ||
|
|
62d808becc | ||
|
|
a85179aff3 | ||
|
|
5d6fbc1655 | ||
|
|
0b3083c75b | ||
|
|
b4022cc487 | ||
|
|
50c250b808 | ||
|
|
c037170faa | ||
|
|
7678ec3f9b | ||
|
|
fc9c7b6cbc | ||
|
|
246b8c6e4a | ||
|
|
6789b63131 | ||
|
|
91f4ac602b | ||
|
|
690596b770 | ||
|
|
5c90451ea0 | ||
|
|
3406eba4ef | ||
|
|
ddf9e7b302 | ||
|
|
95481e7ba7 | ||
|
|
79f34bdbc2 | ||
|
|
b139e51041 | ||
|
|
74cd80e530 | ||
|
|
ff8b87118d | ||
|
|
2223204eba | ||
|
|
fc1f932cc0 | ||
|
|
c0147f86a1 | ||
|
|
47c361d2f8 | ||
|
|
863d3f26b3 | ||
|
|
9ff940a0ef | ||
|
|
2a78dac60d | ||
|
|
27185de752 | ||
|
|
dda2058d90 | ||
|
|
a1cf9e3bf3 | ||
|
|
05ea111c47 | ||
|
|
8a1d3b86af | ||
|
|
a612ce6659 | ||
|
|
d50ca1b1ed | ||
|
|
60a0f81c7a | ||
|
|
f9af8962f8 | ||
|
|
54172924c8 | ||
|
|
374f9b2f07 | ||
|
|
ce2cdced61 | ||
|
|
910fc0f28f | ||
|
|
742ec37ca3 | ||
|
|
72165e5b77 | ||
|
|
ff2d7551c7 | ||
|
|
903fb34b39 | ||
|
|
9c48f1ed22 | ||
|
|
bfdcc7b9b6 | ||
|
|
4bf13a8207 | ||
|
|
de27f7fc79 | ||
|
|
413e36b17a | ||
|
|
354d3842b5 | ||
|
|
9329cd5f13 | ||
|
|
87acd8fb07 | ||
|
|
a53774721a | ||
|
|
0f0b011440 | ||
|
|
faa3d172ab | ||
|
|
15c2ac2cac | ||
|
|
fb9b5b6f4a | ||
|
|
4ecfbac85f | ||
|
|
9892d017b2 | ||
|
|
e8d34bccbd | ||
|
|
33300673b7 | ||
|
|
869580206d | ||
|
|
278d6c0527 | ||
|
|
e7ab0e0f9f | ||
|
|
6451fcd085 | ||
|
|
b5f77eb12a | ||
|
|
e3e0ac6ec7 | ||
|
|
f1dd03548f | ||
|
|
28ad246bb4 | ||
|
|
577951b032 | ||
|
|
13f86c3489 | ||
|
|
6e0209112b | ||
|
|
c77dae7a1a | ||
|
|
a7b2ce32f7 | ||
|
|
0d4b3a133d | ||
|
|
02e928cf9b | ||
|
|
56a94ccd9e | ||
|
|
baf056bae8 | ||
|
|
10d581d1cf | ||
|
|
138c405974 | ||
|
|
8fe3b450d2 | ||
|
|
210b7d8e00 | ||
|
|
1dcfb201c4 | ||
|
|
f7e3de02ef | ||
|
|
4d14655c2b | ||
|
|
5e2890bd49 | ||
|
|
5be3944730 | ||
|
|
7641a90c34 | ||
|
|
c43609e035 | ||
|
|
9e696bd6a3 | ||
|
|
60bec24083 | ||
|
|
5c79ef9396 | ||
|
|
6c5b147a39 | ||
|
|
b82d6f70a4 | ||
|
|
700487a7c7 | ||
|
|
3dbaeef58c | ||
|
|
42ac5f0c1a | ||
|
|
05aee12652 | ||
|
|
24d9f2c140 | ||
|
|
b71ca2b014 | ||
|
|
4a95eb0a12 | ||
|
|
be799453aa | ||
|
|
ea7786e8ca | ||
|
|
929cb0ed7d | ||
|
|
5f4eca3816 | ||
|
|
5614b4dafb | ||
|
|
e5ad2e5267 | ||
|
|
e12ec335a5 | ||
|
|
220231d8e3 | ||
|
|
e6c5e3f28a | ||
|
|
42109a62a4 | ||
|
|
b8cdec92c7 | ||
|
|
9c902025bf | ||
|
|
b9977ea667 | ||
|
|
48b652bcbe | ||
|
|
b4796a62ee | ||
|
|
35cda2e692 | ||
|
|
f8d21e1431 | ||
|
|
9da9826b85 | ||
|
|
fe95f2217c | ||
|
|
8351538873 | ||
|
|
112283e230 | ||
|
|
b31ec214a5 | ||
|
|
114b929f8b | ||
|
|
ddca9c56fc | ||
|
|
58371fa263 | ||
|
|
7e90fb6a57 | ||
|
|
591af2d074 | ||
|
|
c229c87398 | ||
|
|
e5999bfb1a | ||
|
|
a4e278bfe7 | ||
|
|
9e7900da1e | ||
|
|
200de16440 | ||
|
|
536f949a1a | ||
|
|
97d1b3a506 | ||
|
|
71d5d2c669 | ||
|
|
6605adf669 | ||
|
|
458782bf67 | ||
|
|
c2025c0425 | ||
|
|
a9c9868957 | ||
|
|
d1fb790818 | ||
|
|
1f403325ac | ||
|
|
04686df17a | ||
|
|
feedaa37fa | ||
|
|
a182e5d721 | ||
|
|
4bfb32f685 | ||
|
|
1a2197d7bf | ||
|
|
e560045cfd | ||
|
|
8168341e9b | ||
|
|
1bbb67c452 | ||
|
|
150fcde0dc | ||
|
|
73e616df2a | ||
|
|
f318d4f2a4 | ||
|
|
e71095801f | ||
|
|
dbeed36dec | ||
|
|
4de08a4672 | ||
|
|
d7aa103f00 | ||
|
|
cf81375b94 | ||
|
|
66f9a49ce9 | ||
|
|
58c9f20692 | ||
|
|
ec0f3836ff | ||
|
|
4d54d87c3e | ||
|
|
ee4f332ec5 | ||
|
|
dc6da63e30 | ||
|
|
763360594d | ||
|
|
7e0a1683e6 | ||
|
|
2a24f906a9 | ||
|
|
a79af259e9 | ||
|
|
ce14c7a995 | ||
|
|
88a973cde5 | ||
|
|
1a830b751d | ||
|
|
abc1b22193 | ||
|
|
0eff740523 | ||
|
|
a1b7902944 | ||
|
|
7718303e71 | ||
|
|
103b432c84 | ||
|
|
a45cc801d2 | ||
|
|
7634687057 | ||
|
|
b3ecb96e36 | ||
|
|
907c1faf1e | ||
|
|
6c3126d950 | ||
|
|
6e89e69d08 | ||
|
|
e66d0bd03a | ||
|
|
4a2ace1857 | ||
|
|
5189bfdef4 | ||
|
|
24f00a6c33 | ||
|
|
8e49892b21 | ||
|
|
e557dc80b8 | ||
|
|
4eb8f9ca8a | ||
|
|
f7ef5c1d57 | ||
|
|
00c9ad49df | ||
|
|
9777c5f49a | ||
|
|
0214745239 | ||
|
|
46a02ff15b | ||
|
|
6ad9586c84 | ||
|
|
78a5482267 | ||
|
|
7b0d846407 | ||
|
|
f28cc45183 | ||
|
|
e664e9737c | ||
|
|
13ba8d878c | ||
|
|
78d6c1b5be | ||
|
|
feb294d552 | ||
|
|
70a8608749 | ||
|
|
7e3b586c1e | ||
|
|
eff12e838c | ||
|
|
82631c5f94 | ||
|
|
b58a8b1ee0 | ||
|
|
5b75b637b8 | ||
|
|
9ac9b75bc4 | ||
|
|
ebaa999f92 | ||
|
|
6c558ee8bc | ||
|
|
31a2b892d8 | ||
|
|
9daa4e2a85 | ||
|
|
3e2fcd67b2 | ||
|
|
241b71852e | ||
|
|
97294ef2fd | ||
|
|
549698b1e0 | ||
|
|
c486b7b41c | ||
|
|
f078ecbc8f | ||
|
|
2bb5f035af | ||
|
|
cca5c06679 | ||
|
|
0897357993 | ||
|
|
2c1fbea531 | ||
|
|
13e6262659 | ||
|
|
1d19a5ec0f | ||
|
|
77c7ed0e93 | ||
|
|
b052621f67 | ||
|
|
8f1031586f | ||
|
|
79a1c0574b | ||
|
|
489f92e0e5 | ||
|
|
737c4223ef | ||
|
|
db0da033eb | ||
|
|
6a9f1209df | ||
|
|
34dda7cc7f | ||
|
|
4d36e73230 | ||
|
|
709e09e1c3 | ||
|
|
aa4af94c69 | ||
|
|
b84d59c5f0 | ||
|
|
33c71c3a4b | ||
|
|
c8e4d5de7f | ||
|
|
156cea5b45 | ||
|
|
8450114098 | ||
|
|
24277fbb97 | ||
|
|
66bb255fcd | ||
|
|
5054806ec1 | ||
|
|
430e496050 | ||
|
|
d4f72a5bfb | ||
|
|
f8aae79a72 | ||
|
|
9cd80a7b5c | ||
|
|
772b45c745 | ||
|
|
5f280837a6 | ||
|
|
6f52e90065 | ||
|
|
771528ab13 | ||
|
|
b32121a5d1 | ||
|
|
2e36689df3 | ||
|
|
2df6114bc4 | ||
|
|
a644ac6d2c | ||
|
|
de11b5b9b5 | ||
|
|
d83d004ccd | ||
|
|
43e13dbd4d | ||
|
|
8a391e33ae | ||
|
|
477b1ed6cf | ||
|
|
04ad93e6fd | ||
|
|
65e92eca49 | ||
|
|
9039904f4c | ||
|
|
69214ea671 | ||
|
|
b023995538 | ||
|
|
793369791a | ||
|
|
7a8ea7e78b | ||
|
|
854ca32f10 | ||
|
|
d7ac861d3b | ||
|
|
89b40b225c | ||
|
|
4bf448be25 | ||
|
|
fa48020a52 | ||
|
|
1ef7cae41b | ||
|
|
2d3837bec7 | ||
|
|
498c2e60fd | ||
|
|
ceb6b8680a | ||
|
|
b264b9548f | ||
|
|
d98a9f2583 | ||
|
|
226a9a5fa6 | ||
|
|
25c311eaf6 | ||
|
|
cc9c97e0dc | ||
|
|
e70165039c | ||
|
|
c1de91aca4 | ||
|
|
b55b90bfb4 | ||
|
|
8da95b6f1b | ||
|
|
b91baae09d | ||
|
|
13724569ec | ||
|
|
4a6eb5eb45 | ||
|
|
6927d0e091 | ||
|
|
b5dbced938 | ||
|
|
f2d5ff5bf2 | ||
|
|
3d60686c0c | ||
|
|
45488e0ffa | ||
|
|
f67d60496a | ||
|
|
18579534ea | ||
|
|
47374a33fc | ||
|
|
0fcafbece8 | ||
|
|
96bb4bf38a | ||
|
|
fd142c29d9 | ||
|
|
ebc5f00efe | ||
|
|
ea320d3464 | ||
|
|
5687a00e4e | ||
|
|
b18114e19e | ||
|
|
02a9c3be6c | ||
|
|
4fce59f274 | ||
|
|
fb7299800f | ||
|
|
c046630c33 | ||
|
|
a30364c1f9 | ||
|
|
766526e114 | ||
|
|
50e18938a9 | ||
|
|
f3af1840cb | ||
|
|
467c27a1f9 | ||
|
|
3f5dd18bd4 | ||
|
|
40431251cb | ||
|
|
82cf3a8043 | ||
|
|
03b2c2577c | ||
|
|
0663c5bd52 | ||
|
|
35981c8b71 | ||
|
|
8fe8951a8d | ||
|
|
fdca8ec418 | ||
|
|
45cf827c8f | ||
|
|
00cb3eb24b | ||
|
|
c23a8c7833 | ||
|
|
e1941442d4 | ||
|
|
49c328a892 | ||
|
|
0935802f1e | ||
|
|
19fd425928 | ||
|
|
167d1df699 | ||
|
|
7ed2bbeb11 | ||
|
|
9101193242 | ||
|
|
571a566399 | ||
|
|
3c6518ddbf | ||
|
|
4e7948b47a | ||
|
|
ba8931829b | ||
|
|
61eaa6ec64 | ||
|
|
c5e7c0e436 | ||
|
|
e26390ca46 | ||
|
|
a6477d5933 | ||
|
|
5cba88ea7c | ||
|
|
5fc9b17518 | ||
|
|
fa90c180ee | ||
|
|
5610880003 | ||
|
|
e7febf4fbb | ||
|
|
aca3193efb | ||
|
|
b97f6626b6 | ||
|
|
f93ecf8783 | ||
|
|
0487c9441f | ||
|
|
a955cbfa49 | ||
|
|
8c97b49886 | ||
|
|
2152b320c5 | ||
|
|
d6d60b4d6c | ||
|
|
d6c831bd3d | ||
|
|
97b364cb25 | ||
|
|
03f4569dc3 | ||
|
|
8c94833b72 | ||
|
|
9fda8b5193 | ||
|
|
e4e33c743e | ||
|
|
87f9477b10 | ||
|
|
9959d9ece8 | ||
|
|
27b9775073 | ||
|
|
766c24b2e6 | ||
|
|
7179fdd550 | ||
|
|
c887c4cbd5 | ||
|
|
e18257f0e5 | ||
|
|
8431f62ebb | ||
|
|
f091b73e69 | ||
|
|
ce6fbbea94 | ||
|
|
aea5da0ef6 | ||
|
|
3a75159832 | ||
|
|
1ebf5e3d03 | ||
|
|
dc2647cd3d | ||
|
|
86896408b0 | ||
|
|
53cb173663 | ||
|
|
d59c58bc95 | ||
|
|
ddd25def01 | ||
|
|
8c6012a4af | ||
|
|
42deca50c2 | ||
|
|
d685ae73b4 | ||
|
|
4021f95261 | ||
|
|
7dd0c1730a | ||
|
|
f92fe15897 | ||
|
|
3fe8c56736 | ||
|
|
60965bd7e5 | ||
|
|
0e0e441b33 | ||
|
|
b4a41aa542 | ||
|
|
db6e26bb8c | ||
|
|
88baa3865e | ||
|
|
74f49f99f9 | ||
|
|
7065b75bfd | ||
|
|
7959e8b764 | ||
|
|
52bdd1b834 | ||
|
|
7a3fe48ba4 | ||
|
|
7cd418d38e | ||
|
|
cd80019eec | ||
|
|
d552861346 | ||
|
|
10f76dc5da | ||
|
|
5b142788d2 | ||
|
|
eaa836e8ca | ||
|
|
42eae4634f | ||
|
|
8acc5cb60f | ||
|
|
31a051b677 | ||
|
|
8f9c74e9f1 | ||
|
|
975903ae17 | ||
|
|
4efcaa43c8 | ||
|
|
330be18ec5 | ||
|
|
f1f8122120 | ||
|
|
297eded261 | ||
|
|
0e07f2e15d | ||
|
|
82b46f556d | ||
|
|
8f66fe6392 | ||
|
|
3a00f13436 | ||
|
|
c6549117a2 | ||
|
|
ed1d189e10 | ||
|
|
dfe1273d14 | ||
|
|
91a222c66d | ||
|
|
0503bdb316 | ||
|
|
930ba003f8 | ||
|
|
d54005059c | ||
|
|
d7c85ad916 | ||
|
|
c1a3021771 | ||
|
|
d049e81b10 | ||
|
|
c43b6dcc75 | ||
|
|
367cfab4e6 | ||
|
|
69adf8c384 | ||
|
|
73ca8e5834 | ||
|
|
b088291f14 | ||
|
|
a2ae01cc0f | ||
|
|
da417aa56d | ||
|
|
d4315bbf6b | ||
|
|
3fa344c037 | ||
|
|
7cc047455e | ||
|
|
d726597737 | ||
|
|
2309450a76 | ||
|
|
ea5eea2424 | ||
|
|
746f6e0eb3 | ||
|
|
7441d8cc0c | ||
|
|
ccf9387d57 | ||
|
|
d4cefb6289 | ||
|
|
259d1ecd1d | ||
|
|
191070123d | ||
|
|
afb7b377f2 | ||
|
|
af30140621 | ||
|
|
ac2842ff1e | ||
|
|
892ee473d9 | ||
|
|
40d9765123 | ||
|
|
2818a000aa | ||
|
|
fb5d8e58ff | ||
|
|
5a7d1ecffc | ||
|
|
d056a0a3d8 | ||
|
|
7a079adc8f | ||
|
|
b8518ffe65 | ||
|
|
9654ee0848 | ||
|
|
7ecd211163 | ||
|
|
05f78b3b52 | ||
|
|
f5fc8f2928 | ||
|
|
9a8949f022 | ||
|
|
3adcc4c86a | ||
|
|
47e7963e50 | ||
|
|
88af7bb48b | ||
|
|
0d241e1114 | ||
|
|
f750a442f7 | ||
|
|
003853e702 | ||
|
|
a284ad4092 | ||
|
|
47f82e4408 | ||
|
|
5cd2126a6a | ||
|
|
29c353c553 | ||
|
|
808a8aedab | ||
|
|
74474a6d63 | ||
|
|
d16dcf642e | ||
|
|
7dd14e5d1c | ||
|
|
866fe27e78 | ||
|
|
d1f56f732e | ||
|
|
0e39dcd135 | ||
|
|
345ff2196a | ||
|
|
2c176e02ae | ||
|
|
63485b3029 | ||
|
|
f59b564507 | ||
|
|
2068678b8c | ||
|
|
cc66a9a5e3 | ||
|
|
5de1563997 | ||
|
|
ac5a4477ad | ||
|
|
c049f60d4a | ||
|
|
b5ce4f0427 | ||
|
|
ac12b6d332 | ||
|
|
5819b7a78c | ||
|
|
5bf1a3d6dc | ||
|
|
3f8db3d597 | ||
|
|
a50013fd99 | ||
|
|
2978053d16 | ||
|
|
2c760372d6 | ||
|
|
2680043bc6 | ||
|
|
8db451f652 | ||
|
|
430d3d74f6 | ||
|
|
7ee1879ed4 | ||
|
|
d14fcfd24a | ||
|
|
27927463a1 | ||
|
|
fcb6df45e5 | ||
|
|
a7927c13fd | ||
|
|
339c8f0133 | ||
|
|
bce602eb4e | ||
|
|
939cbd7057 | ||
|
|
12623c99b6 | ||
|
|
2655d61d70 | ||
|
|
fcb05b4c82 | ||
|
|
806bae1ee7 | ||
|
|
f6fcff3602 | ||
|
|
244b356a37 | ||
|
|
49f33f6438 | ||
|
|
93afb40cd4 | ||
|
|
9c1f853d58 | ||
|
|
7d09ab8915 | ||
|
|
d9db819e23 | ||
|
|
37716d55ed | ||
|
|
4399684582 | ||
|
|
44b4fc5f50 | ||
|
|
f4dad9f639 | ||
|
|
8740e4e94a | ||
|
|
c0a279e808 | ||
|
|
96e400fee5 | ||
|
|
72ba26679b | ||
|
|
ea47760bd8 | ||
|
|
70dfe4dc96 | ||
|
|
a8e9e0b916 | ||
|
|
31de2953a3 | ||
|
|
fd5c28dc52 | ||
|
|
42aa1f3f33 | ||
|
|
2110e35fd6 | ||
|
|
b5d33a656f | ||
|
|
fe56138142 | ||
|
|
c110eb92f8 | ||
|
|
8f8b884430 | ||
|
|
a8cd1eb996 | ||
|
|
29e595e5d4 | ||
|
|
c232780081 | ||
|
|
7c816de442 | ||
|
|
8677b7d698 | ||
|
|
33bef689c1 | ||
|
|
fcbe63eaad | ||
|
|
5727922106 | ||
|
|
5dc5e29b9c | ||
|
|
3deffcdb1e | ||
|
|
c9ae1d1ee5 | ||
|
|
daadcf36c0 | ||
|
|
823b679232 | ||
|
|
6c28ac260c | ||
|
|
49c34dfd36 | ||
|
|
4106477e7f | ||
|
|
7ac6ca7311 | ||
|
|
11a974da21 | ||
|
|
09dc9854cd | ||
|
|
442fcc02f7 | ||
|
|
b6a585348a | ||
|
|
c582f178b7 | ||
|
|
4cec90a260 | ||
|
|
0e48f7f245 | ||
|
|
392773ccb2 | ||
|
|
bf32922e5a | ||
|
|
797691f908 | ||
|
|
e5ea4fad78 | ||
|
|
5880de186b | ||
|
|
992928304f | ||
|
|
ae1262a241 | ||
|
|
c79f221192 | ||
|
|
8ce5679813 | ||
|
|
eb03625626 | ||
|
|
87d577e023 | ||
|
|
2ef6de928d | ||
|
|
29e131df43 | ||
|
|
cfd07aafff | ||
|
|
a178eb1bc8 | ||
|
|
8737ead008 | ||
|
|
90921981be | ||
|
|
acb19068d0 | ||
|
|
d74c4e90d4 | ||
|
|
85ca8cb90c | ||
|
|
c3ea36304b | ||
|
|
1b5642604b | ||
|
|
07c33eff43 | ||
|
|
4eb7b950c8 | ||
|
|
dc65d0ae9d | ||
|
|
b18b99eb14 | ||
|
|
5680be70ae | ||
|
|
c77e7e60fc | ||
|
|
d74c6ace24 | ||
|
|
f1b67730fa | ||
|
|
92a1e74b20 | ||
|
|
c914d67cda | ||
|
|
f35f8d06ea | ||
|
|
d2709a5389 | ||
|
|
928c575c6f | ||
|
|
3051c9d002 | ||
|
|
34c09f33da | ||
|
|
cf3282d103 | ||
|
|
32d9fd0b26 | ||
|
|
c6e79c84de | ||
|
|
8d6dde7825 | ||
|
|
d12c00bdc3 | ||
|
|
ba39d3d5d7 | ||
|
|
f3948e001f | ||
|
|
7fa71e3267 | ||
|
|
517fb9a023 | ||
|
|
9ac417fa88 | ||
|
|
d2a92c6bde | ||
|
|
d79e90f078 | ||
|
|
9b4cd0cd0f | ||
|
|
140a50f641 | ||
|
|
5645d9747b | ||
|
|
3fbb031745 | ||
|
|
4c8f6a7e42 | ||
|
|
7df276d219 | ||
|
|
0ee0138325 | ||
|
|
77f06856b6 | ||
|
|
65c451cb38 | ||
|
|
251aafccca | ||
|
|
c058625959 | ||
|
|
cdd04f7055 | ||
|
|
542ab0f886 | ||
|
|
e525b46f12 | ||
|
|
b9b4466d0d | ||
|
|
c3fff251a9 | ||
|
|
45a9e0ae0c | ||
|
|
489a4cd1cf | ||
|
|
2a2b2ef834 | ||
|
|
2e2eeb43a6 | ||
|
|
7f3148865c | ||
|
|
bb9c7f2dd9 | ||
|
|
9036d2d6a8 | ||
|
|
c061b47c57 | ||
|
|
f73f154ec2 | ||
|
|
64b6606824 | ||
|
|
42a7a09eea | ||
|
|
091c545c4f | ||
|
|
a6ba41e078 | ||
|
|
f85949bde0 | ||
|
|
2f871ad143 | ||
|
|
c8ea2d5b1f | ||
|
|
b131fb1fe2 | ||
|
|
413d0d6a24 | ||
|
|
0a2d73fd60 | ||
|
|
ce4999268a | ||
|
|
633ceb9bb1 | ||
|
|
64374bda5b | ||
|
|
772ad4f715 | ||
|
|
bdacee476d | ||
|
|
10f82b4bea | ||
|
|
8c5f252edb | ||
|
|
8b9f471d27 | ||
|
|
a64f9bbfe0 | ||
|
|
42ad49f5b7 | ||
|
|
2b0f8a9482 | ||
|
|
af4422c42a | ||
|
|
0311612ce9 | ||
|
|
5fc03449c8 | ||
|
|
4fab578b43 | ||
|
|
661b76615b | ||
|
|
dcfc70e8ed | ||
|
|
63fdd9fe0b | ||
|
|
910956b0ec | ||
|
|
d3ac8fd87d | ||
|
|
e98e00558a | ||
|
|
3ddf0b9722 | ||
|
|
2acae8300f | ||
|
|
dbe7892e03 | ||
|
|
28c5181dfe | ||
|
|
15e9885197 | ||
|
|
8505a4ddc3 | ||
|
|
6051266924 | ||
|
|
070e28e203 | ||
|
|
834924248f | ||
|
|
98dfa7d24f | ||
|
|
338c0a8a69 | ||
|
|
a874c0894a | ||
|
|
f382a3bb7e | ||
|
|
76e69cc8de | ||
|
|
fde412b240 | ||
|
|
bfc52a2342 | ||
|
|
deeebbfcb7 | ||
|
|
1ee7280c4c | ||
|
|
cde49d3d2b | ||
|
|
e738920156 | ||
|
|
0065e554e0 | ||
|
|
5a3e4e43d8 | ||
|
|
d9a5c56930 | ||
|
|
51fb590c0e | ||
|
|
5577a61090 | ||
|
|
5e909c73d7 | ||
|
|
e0c9f30efa | ||
|
|
515548a47a | ||
|
|
aa667ee396 | ||
|
|
7d6b313312 | ||
|
|
a84a693327 | ||
|
|
99afb4b750 | ||
|
|
21f135ba76 | ||
|
|
d7ee7b589f | ||
|
|
a8589d1ff3 | ||
|
|
dd9430e758 | ||
|
|
05f6cb42db | ||
|
|
5bdb93c2a6 | ||
|
|
613748804a | ||
|
|
86345a511f | ||
|
|
a24eedada7 | ||
|
|
4a728beba1 | ||
|
|
019597555f | ||
|
|
e4bfe50e8f | ||
|
|
0f826b0b0d | ||
|
|
7c2ff8c889 | ||
|
|
7a8ba4c9a0 | ||
|
|
219027f580 | ||
|
|
6a5ff5f223 | ||
|
|
f7a1cdbbc6 | ||
|
|
d547afeae0 | ||
|
|
dd108286df | ||
|
|
266df8a9b8 | ||
|
|
9c9b2829ae | ||
|
|
50e5886de1 | ||
|
|
ba1d740239 | ||
|
|
a190b2e85e | ||
|
|
3dd1630848 | ||
|
|
07d18dcab1 | ||
|
|
41905784f7 | ||
|
|
4013216fcc | ||
|
|
84f2ad5dea | ||
|
|
44b2bf91be | ||
|
|
660dee94af | ||
|
|
262a97f02b | ||
|
|
bd0fa9e2d2 | ||
|
|
d57c5cda71 | ||
|
|
99e1d6777f | ||
|
|
3c85a317d6 | ||
|
|
5231737369 | ||
|
|
d6059bdd2a | ||
|
|
48a2526d62 | ||
|
|
b29d2fd7f8 | ||
|
|
edfcb83473 | ||
|
|
478b4e3ed4 | ||
|
|
b8680b82c3 | ||
|
|
ac213c2e08 | ||
|
|
e880164c59 | ||
|
|
526bc33e02 | ||
|
|
181616deed | ||
|
|
e515b48929 | ||
|
|
8810eb8c39 | ||
|
|
748c0f5efa | ||
|
|
491f3d16dc | ||
|
|
872c134807 | ||
|
|
f721fdbf87 | ||
|
|
976cb5aaa8 | ||
|
|
b2def42bfd | ||
|
|
b9acef5301 | ||
|
|
58d0927767 | ||
|
|
7dd6e5efca | ||
|
|
5dc09e82c4 | ||
|
|
c2c70f7daf | ||
|
|
477da77b46 | ||
|
|
37b2d69bbc | ||
|
|
addb248e0b | ||
|
|
4b1281f9b7 | ||
|
|
dede14f689 | ||
|
|
5eb4d13aaa | ||
|
|
c30cdb0d68 | ||
|
|
2a0ec3b89d | ||
|
|
03b2a6a8aa | ||
|
|
9fbd504b4e | ||
|
|
9670f226e3 | ||
|
|
6863466653 | ||
|
|
3d5c5e8be5 | ||
|
|
65a9bf2dd5 | ||
|
|
ae9f8cda7e | ||
|
|
5d321e4b9a | ||
|
|
a9526831a4 | ||
|
|
ed0f79bdc5 | ||
|
|
98ee629d00 | ||
|
|
f73ea0bda2 | ||
|
|
c533f69d38 | ||
|
|
a2922bb944 | ||
|
|
95f30ecd1f | ||
|
|
f487355364 | ||
|
|
6e70979973 | ||
|
|
14d7acfad4 | ||
|
|
27c5e1b374 | ||
|
|
af96c6f4d3 | ||
|
|
d32db0bc45 | ||
|
|
7b593af7e1 | ||
|
|
3d3da2b460 | ||
|
|
31069ecf6a | ||
|
|
71578e2bf2 | ||
|
|
2430fcd462 | ||
|
|
f593a6e5f8 | ||
|
|
c91a05776f | ||
|
|
5f9a2cb337 | ||
|
|
8c902431ba | ||
|
|
a33c0748e3 | ||
|
|
306415391d | ||
|
|
d0f28b46cd | ||
|
|
da7dd58641 | ||
|
|
bde8d78b8a | ||
|
|
4dcaa42b6d | ||
|
|
76936f43ae | ||
|
|
f280726037 | ||
|
|
6cd595e438 | ||
|
|
17dd5071ef | ||
|
|
df7cf6c0eb | ||
|
|
3e573a5c6b | ||
|
|
7dfa455508 | ||
|
|
924d85a75e | ||
|
|
91695150cc | ||
|
|
3dd09a8795 | ||
|
|
d7739c4e37 | ||
|
|
c6a15f5026 | ||
|
|
2ca01ed747 | ||
|
|
1b64cb019e | ||
|
|
8c3af5bc62 | ||
|
|
0eabfa55f6 | ||
|
|
6408541075 | ||
|
|
13130c2c9f | ||
|
|
7680ae16c9 | ||
|
|
2c1bc4392f | ||
|
|
93f7bb8dd5 | ||
|
|
1d9c1d4166 | ||
|
|
3f151da314 | ||
|
|
6b95a79724 | ||
|
|
e3dae653e8 | ||
|
|
9de1f328ad | ||
|
|
506874cca9 | ||
|
|
2f2bbb4d06 | ||
|
|
95c3306798 | ||
|
|
df6824a008 | ||
|
|
dd11bf8a79 | ||
|
|
1cfda3d2d8 | ||
|
|
8b5349c7bc | ||
|
|
37de8a7f4a | ||
|
|
7a802ec0ff | ||
|
|
d2ecde2cbb | ||
|
|
248cfd5eb3 | ||
|
|
9da4c5340d | ||
|
|
f9d9bd6aa0 | ||
|
|
5fcef78c6a | ||
|
|
c104fd3494 | ||
|
|
57a76c9aee | ||
|
|
06f74068f4 | ||
|
|
e5d91b8e57 | ||
|
|
f6e092f6cc | ||
|
|
24ae0eee8e | ||
|
|
3c3fc6b268 | ||
|
|
b361440738 | ||
|
|
f0ee1d515b | ||
|
|
628ba81a77 | ||
|
|
bed7889703 | ||
|
|
03204f54ac | ||
|
|
037ce4c68f | ||
|
|
2fcd9819ac | ||
|
|
162e2c1ce5 | ||
|
|
1fe973fa5a | ||
|
|
d153f482dd | ||
|
|
1c960fbb80 | ||
|
|
915e56e1af | ||
|
|
fbb76a4d5d | ||
|
|
8bae98b314 | ||
|
|
fe51b3628e | ||
|
|
ba26eb3d5d | ||
|
|
cf844e2ad6 | ||
|
|
b697a842a5 | ||
|
|
bd3de8f39a | ||
|
|
cbf3cd6151 | ||
|
|
a9770e5d24 | ||
|
|
cf4ef5f3c7 | ||
|
|
afdfd12bdf | ||
|
|
d3861b4442 | ||
|
|
391f2aa56c | ||
|
|
bceec65913 | ||
|
|
9eff52d1a6 | ||
|
|
0186aef814 | ||
|
|
e503848990 | ||
|
|
90b3a98df7 | ||
|
|
d34990141e | ||
|
|
f20d064e05 | ||
|
|
f5e25c5f35 | ||
|
|
f4db76692f | ||
|
|
09bb5cf02f | ||
|
|
3b90df21d5 | ||
|
|
1654d3b329 | ||
|
|
aca6e5bf46 | ||
|
|
4fbe6ca401 | ||
|
|
233af7c74b | ||
|
|
641420c5e0 | ||
|
|
6fed9fd697 | ||
|
|
9c3f4f8dfd | ||
|
|
0644f0eb7d | ||
|
|
da3dd4867d | ||
|
|
e4d622aaaf | ||
|
|
fddedd51d9 | ||
|
|
5ab4b0afe8 | ||
|
|
5dea4d37d1 | ||
|
|
fc27ca9006 | ||
|
|
468a2ed4ec | ||
|
|
018b504f5b | ||
|
|
49f1758d74 | ||
|
|
c0b3554401 | ||
|
|
3de46c7755 | ||
|
|
8fd8e72cec | ||
|
|
78f6010207 | ||
|
|
06bfd0a3c0 | ||
|
|
764e79d051 | ||
|
|
0d08670f61 | ||
|
|
320408ef47 | ||
|
|
fb7e260a20 | ||
|
|
14a9d805b9 | ||
|
|
39de87869c | ||
|
|
473a239d83 | ||
|
|
0a93df5f9c | ||
|
|
8ea5dccea1 | ||
|
|
50f1afbd5b | ||
|
|
2fc81af06a | ||
|
|
6a9c4cfd0b | ||
|
|
884e601683 | ||
|
|
63b28c7816 | ||
|
|
e327327174 | ||
|
|
aa3ab6c6a0 | ||
|
|
04034d0b56 | ||
|
|
6341be45c6 | ||
|
|
e93d550b79 | ||
|
|
e21cef9bb5 | ||
|
|
e1627388d1 | ||
|
|
f15ba926cc | ||
|
|
5d098a32c9 | ||
|
|
ffdc8e5e1c | ||
|
|
940a161192 | ||
|
|
2b779af10f | ||
|
|
dd2eb49385 | ||
|
|
cf437900e0 | ||
|
|
466b4ec01d | ||
|
|
38d82edf0e | ||
|
|
90b503216c | ||
|
|
36c58b18a3 | ||
|
|
a412b9a465 | ||
|
|
82e8a2d763 | ||
|
|
2ede7aa8a1 | ||
|
|
889388f105 | ||
|
|
c7db2068c8 | ||
|
|
0d63dc3ec9 | ||
|
|
c6a01f2ed0 | ||
|
|
9107ed23b7 | ||
|
|
b1953a9627 | ||
|
|
bbe10e8be7 | ||
|
|
c4135d85e1 | ||
|
|
dd40fb68e4 | ||
|
|
767c20a869 | ||
|
|
5335bf9c34 | ||
|
|
f2c4ee41b9 | ||
|
|
0da4b11efb | ||
|
|
0b31223c7a | ||
|
|
fece2f5c77 | ||
|
|
545a7b291a | ||
|
|
f23af34729 | ||
|
|
6be1b4b113 | ||
|
|
3a02a13e38 | ||
|
|
66d36b8e41 | ||
|
|
2aa98ff3bc | ||
|
|
5ee070d21f | ||
|
|
f1dcaf3296 | ||
|
|
2cebe53545 | ||
|
|
32fc0737d6 | ||
|
|
4df491b922 | ||
|
|
1ad6222ebf | ||
|
|
5bc690408d | ||
|
|
3640ddfbf6 | ||
|
|
729ea933ea | ||
|
|
347146be29 | ||
|
|
7a5ea067e2 | ||
|
|
7301e05122 | ||
|
|
ca2f90742d | ||
|
|
414a4a71b4 | ||
|
|
7a369e8a55 | ||
|
|
45f1827fb7 | ||
|
|
05c326d445 | ||
|
|
4e62ffdb21 | ||
|
|
f522f50a08 | ||
|
|
1758187715 | ||
|
|
33b3e04049 | ||
|
|
23cfd32e64 | ||
|
|
285d056629 | ||
|
|
c452dabc3d | ||
|
|
f74f48e9e6 | ||
|
|
6a3a840b19 | ||
|
|
a3bfef35fd | ||
|
|
6797fcd9ab | ||
|
|
97d792b28f | ||
|
|
7ce264ce5f | ||
|
|
8a0407c7e6 | ||
|
|
06986e46a3 | ||
|
|
5897e773fd | ||
|
|
2657140c58 | ||
|
|
eacb068ac2 | ||
|
|
57be722c46 | ||
|
|
771ca56c88 | ||
|
|
ddd8566f41 | ||
|
|
192241cf2a | ||
|
|
3eb62873f6 | ||
|
|
0e36756383 | ||
|
|
fb46937413 | ||
|
|
79b65f3875 | ||
|
|
621e84d9a0 | ||
|
|
fdf73c6855 | ||
|
|
0f432ba551 | ||
|
|
d58edd98e9 | ||
|
|
5cf22f0596 | ||
|
|
f6e6f3d87a | ||
|
|
f40b0ed5e1 | ||
|
|
5d80dad99e | ||
|
|
e83c4b8e3e | ||
|
|
a2e5f7f3d8 | ||
|
|
2f6ad79a80 | ||
|
|
a89b86dc47 | ||
|
|
892e70ec84 | ||
|
|
56dbcd1524 | ||
|
|
234d6f9f3e | ||
|
|
5cb298c934 | ||
|
|
d0b1968a4c | ||
|
|
c79c4f9b14 | ||
|
|
f69a5c9134 | ||
|
|
a299fede9d | ||
|
|
f73de2004e | ||
|
|
cea2039b56 | ||
|
|
f7e14bb535 | ||
|
|
87961d8dcf | ||
|
|
fa1cf5ef34 | ||
|
|
3f0a57eb9b | ||
|
|
4cf633d5e9 | ||
|
|
b8e37ed944 | ||
|
|
0c36098c1f | ||
|
|
216c976399 | ||
|
|
259d10f0e4 | ||
|
|
b051781ddb | ||
|
|
53c679b59b | ||
|
|
4e05aab4f7 | ||
|
|
671ac699f1 | ||
|
|
9b6f3bc742 | ||
|
|
2980136d75 | ||
|
|
fb0fecd0b9 | ||
|
|
61547106f5 | ||
|
|
232beb3a3c | ||
|
|
ba02bba88c | ||
|
|
1fc2d11a14 | ||
|
|
b0ac0a9438 | ||
|
|
8a98f0dc5b | ||
|
|
c9c82e8f4d | ||
|
|
e60dad86ba | ||
|
|
f142898f52 | ||
|
|
3993d6ecc2 | ||
|
|
4d25bc6c92 | ||
|
|
87da71bace | ||
|
|
3ce1b8c705 | ||
|
|
5025ba959f | ||
|
|
13a6e9beaf | ||
|
|
5201c66108 | ||
|
|
e94ffd89d6 | ||
|
|
d63a0ca34b | ||
|
|
e3d75f564a | ||
|
|
8627048787 | ||
|
|
4dec901c76 | ||
|
|
5c41224a89 | ||
|
|
c8baada94a | ||
|
|
ede07434e0 | ||
|
|
44e2933bf8 | ||
|
|
7be06680ed | ||
|
|
87deec824a | ||
|
|
45cd2b0233 | ||
|
|
f510586372 | ||
|
|
137fafce4e | ||
|
|
b02a342750 | ||
|
|
51d03e65b2 | ||
|
|
3c7d6202ea | ||
|
|
9ed784098a | ||
|
|
531e3aa75e | ||
|
|
68b7fc3e2b | ||
|
|
9261ef3a15 | ||
|
|
a8795c9644 | ||
|
|
07b58a431f | ||
|
|
0aab34004b | ||
|
|
e0bf0258ee | ||
|
|
aff4d850bd | ||
|
|
ae3082dd31 | ||
|
|
243a79d291 | ||
|
|
9371a35e89 | ||
|
|
0e5239ffc3 | ||
|
|
b19b9535f6 | ||
|
|
46d39343d9 | ||
|
|
f2d698cb52 | ||
|
|
33646eb000 | ||
|
|
524b708f98 | ||
|
|
380f148db7 | ||
|
|
fc012aa8dc | ||
|
|
e5acc8a47b | ||
|
|
d4b5621e0a | ||
|
|
23ed7dc0e7 | ||
|
|
315b03b58d | ||
|
|
c225d63e9e | ||
|
|
b8dd5b1a2d | ||
|
|
366af6b73a | ||
|
|
f2f031fd57 | ||
|
|
12122bfc36 | ||
|
|
edb998ba23 | ||
|
|
5df54de801 | ||
|
|
b62da463e1 | ||
|
|
3cf9948b8d | ||
|
|
73260ad01f | ||
|
|
22a8c91448 | ||
|
|
a8945d24d1 | ||
|
|
6296590bf7 | ||
|
|
bcfb653816 | ||
|
|
e46cdc08cc | ||
|
|
8189c4e3fd | ||
|
|
6ffbcf45c6 | ||
|
|
643b5fcdc8 | ||
|
|
f38df51e8d | ||
|
|
1a934e8bfd | ||
|
|
5338220d3a | ||
|
|
a059760954 | ||
|
|
d7c70d09f0 | ||
|
|
c185c1c413 | ||
|
|
f50c43464c | ||
|
|
f45aaf0e35 | ||
|
|
8c9df8774e | ||
|
|
99c7fbfef7 | ||
|
|
1d9e109820 | ||
|
|
d25b0f65ea | ||
|
|
858634e1d0 | ||
|
|
474274583f | ||
|
|
d82c5f7b5c | ||
|
|
0c38e8637f | ||
|
|
1941eb315d | ||
|
|
9020860479 | ||
|
|
14edea1aff | ||
|
|
b68db61222 | ||
|
|
bb407cd624 | ||
|
|
32d66738b0 | ||
|
|
95e53ac535 | ||
|
|
7639c3d9e5 | ||
|
|
7ecd11accb | ||
|
|
17dffef5ec | ||
|
|
3e2a1297b5 | ||
|
|
323d3e506d | ||
|
|
ff2b66f42e | ||
|
|
8897781558 | ||
|
|
2fa9e23e04 | ||
|
|
cacf0688c6 | ||
|
|
88971fd034 | ||
|
|
7ec9be9c53 | ||
|
|
17c80c8a3d | ||
|
|
cfd39d6b55 | ||
|
|
32a453d7ba | ||
|
|
f9340ea0d5 | ||
|
|
ec398af41c | ||
|
|
54414221e4 | ||
|
|
40b6a5aad1 | ||
|
|
ab9cf73258 | ||
|
|
30c2783d2f | ||
|
|
1a40afa756 | ||
|
|
f96b480670 | ||
|
|
956509dfec | ||
|
|
586beb8318 | ||
|
|
427943907f | ||
|
|
739464fbc5 | ||
|
|
ca53ad7425 | ||
|
|
f6fde343a1 | ||
|
|
927004e349 | ||
|
|
83b464e4f7 | ||
|
|
ab7f9bb861 | ||
|
|
54cb509d64 | ||
|
|
885301486c | ||
|
|
7f8fdc9814 | ||
|
|
01a5f1991c | ||
|
|
76421c496d | ||
|
|
7845f62c22 | ||
|
|
ae72e247fa | ||
|
|
61561b9df7 | ||
|
|
782f7fb489 | ||
|
|
a80ef851f7 | ||
|
|
347aa3c225 | ||
|
|
95f7661170 | ||
|
|
a9c299c0be | ||
|
|
e52f4dc599 | ||
|
|
625e13bfde | ||
|
|
22112f8d14 | ||
|
|
c33f5c1a24 | ||
|
|
1a46daf621 | ||
|
|
987803781e | ||
|
|
0a96a9a023 | ||
|
|
af7b214476 | ||
|
|
1b9802a0d9 | ||
|
|
c15cf6ac06 | ||
|
|
c85c912562 | ||
|
|
ce19fc0f11 | ||
|
|
51ef725647 | ||
|
|
dc72021748 | ||
|
|
dfef2b41aa | ||
|
|
91482cd6a0 | ||
|
|
e3d3205cd9 | ||
|
|
7c809abe86 | ||
|
|
db6e1e1fe3 | ||
|
|
61ee72517c | ||
|
|
1cacc71050 | ||
|
|
fac990a656 | ||
|
|
fcd9ba8802 | ||
|
|
93cc60e805 | ||
|
|
d4bb28c59b | ||
|
|
ca6496c27c | ||
|
|
492beb62a8 | ||
|
|
e0b466bcfd | ||
|
|
287c81abf3 | ||
|
|
c05b5ef7b0 | ||
|
|
ddd079c8f8 | ||
|
|
b28c7da0a4 | ||
|
|
34d26d3687 | ||
|
|
471555b3a8 | ||
|
|
58e6a58eb7 | ||
|
|
8fc52bc56a | ||
|
|
49ebd472fa | ||
|
|
40017a9a11 | ||
|
|
a086b7aa00 | ||
|
|
9c311dfce5 | ||
|
|
a38d36ccd0 | ||
|
|
d5e081c7ae | ||
|
|
5879edbb09 | ||
|
|
f31014b18f | ||
|
|
5b3e9713dd | ||
|
|
b43930d4c9 | ||
|
|
bad780a197 | ||
|
|
0a4b7226fc | ||
|
|
0ec78b360c | ||
|
|
ecd0c0dfc5 | ||
|
|
83892d0d30 | ||
|
|
9d39615b7d | ||
|
|
a14665bde7 | ||
|
|
257fa1c53e | ||
|
|
afe475e9be | ||
|
|
51b2448e05 | ||
|
|
c34ffd2736 | ||
|
|
54e688277a | ||
|
|
3a01901d6c | ||
|
|
744e7d2790 | ||
|
|
a3e332af19 | ||
|
|
4678055173 | ||
|
|
d59acb8c5b | ||
|
|
49ae42bbe1 | ||
|
|
9b05ef6f39 | ||
|
|
187320b019 | ||
|
|
b345853918 | ||
|
|
a88e16152f |
19
AUTHORS.rst
19
AUTHORS.rst
@@ -29,7 +29,7 @@ Matthew Hodgson <matthew at matrix.org>
|
|||||||
|
|
||||||
Emmanuel Rohee <manu at matrix.org>
|
Emmanuel Rohee <manu at matrix.org>
|
||||||
* Supporting iOS clients (testability and fallback registration)
|
* Supporting iOS clients (testability and fallback registration)
|
||||||
|
|
||||||
Turned to Dust <dwinslow86 at gmail.com>
|
Turned to Dust <dwinslow86 at gmail.com>
|
||||||
* ArchLinux installation instructions
|
* ArchLinux installation instructions
|
||||||
|
|
||||||
@@ -44,4 +44,19 @@ Eric Myhre <hash at exultant.us>
|
|||||||
repository API.
|
repository API.
|
||||||
|
|
||||||
Muthu Subramanian <muthu.subramanian.karunanidhi at ericsson.com>
|
Muthu Subramanian <muthu.subramanian.karunanidhi at ericsson.com>
|
||||||
* Add SAML2 support for registration and logins.
|
* Add SAML2 support for registration and login.
|
||||||
|
|
||||||
|
Steven Hammerton <steven.hammerton at openmarket.com>
|
||||||
|
* Add CAS support for registration and login.
|
||||||
|
|
||||||
|
Mads Robin Christensen <mads at v42 dot dk>
|
||||||
|
* CentOS 7 installation instructions.
|
||||||
|
|
||||||
|
Florent Violleau <floviolleau at gmail dot com>
|
||||||
|
* Add Raspberry Pi installation instructions and general troubleshooting items
|
||||||
|
|
||||||
|
Niklas Riekenbrauck <nikriek at gmail dot.com>
|
||||||
|
* Add JWT support for registration and login
|
||||||
|
|
||||||
|
Christoph Witzany <christoph at web.crofting.com>
|
||||||
|
* Add LDAP support for authentication
|
||||||
|
|||||||
476
CHANGES.rst
476
CHANGES.rst
@@ -1,3 +1,471 @@
|
|||||||
|
Changes in synapse v0.16.1-r1 (2016-07-08)
|
||||||
|
==========================================
|
||||||
|
|
||||||
|
THIS IS A CRITICAL SECURITY UPDATE.
|
||||||
|
|
||||||
|
This fixes a bug which allowed users' accounts to be accessed by unauthorised
|
||||||
|
users.
|
||||||
|
|
||||||
|
Changes in synapse v0.16.1 (2016-06-20)
|
||||||
|
=======================================
|
||||||
|
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* Fix assorted bugs in ``/preview_url`` (PR #872)
|
||||||
|
* Fix TypeError when setting unicode passwords (PR #873)
|
||||||
|
|
||||||
|
|
||||||
|
Performance improvements:
|
||||||
|
|
||||||
|
* Turn ``use_frozen_events`` off by default (PR #877)
|
||||||
|
* Disable responding with canonical json for federation (PR #878)
|
||||||
|
|
||||||
|
|
||||||
|
Changes in synapse v0.16.1-rc1 (2016-06-15)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
Features: None
|
||||||
|
|
||||||
|
Changes:
|
||||||
|
|
||||||
|
* Log requester for ``/publicRoom`` endpoints when possible (PR #856)
|
||||||
|
* 502 on ``/thumbnail`` when can't connect to remote server (PR #862)
|
||||||
|
* Linearize fetching of gaps on incoming events (PR #871)
|
||||||
|
|
||||||
|
|
||||||
|
Bugs fixes:
|
||||||
|
|
||||||
|
* Fix bug where rooms where marked as published by default (PR #857)
|
||||||
|
* Fix bug where joining room with an event with invalid sender (PR #868)
|
||||||
|
* Fix bug where backfilled events were sent down sync streams (PR #869)
|
||||||
|
* Fix bug where outgoing connections could wedge indefinitely, causing push
|
||||||
|
notifications to be unreliable (PR #870)
|
||||||
|
|
||||||
|
|
||||||
|
Performance improvements:
|
||||||
|
|
||||||
|
* Improve ``/publicRooms`` performance(PR #859)
|
||||||
|
|
||||||
|
|
||||||
|
Changes in synapse v0.16.0 (2016-06-09)
|
||||||
|
=======================================
|
||||||
|
|
||||||
|
NB: As of v0.14 all AS config files must have an ID field.
|
||||||
|
|
||||||
|
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* Don't make rooms published by default (PR #857)
|
||||||
|
|
||||||
|
Changes in synapse v0.16.0-rc2 (2016-06-08)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
Features:
|
||||||
|
|
||||||
|
* Add configuration option for tuning GC via ``gc.set_threshold`` (PR #849)
|
||||||
|
|
||||||
|
Changes:
|
||||||
|
|
||||||
|
* Record metrics about GC (PR #771, #847, #852)
|
||||||
|
* Add metric counter for number of persisted events (PR #841)
|
||||||
|
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* Fix 'From' header in email notifications (PR #843)
|
||||||
|
* Fix presence where timeouts were not being fired for the first 8h after
|
||||||
|
restarts (PR #842)
|
||||||
|
* Fix bug where synapse sent malformed transactions to AS's when retrying
|
||||||
|
transactions (Commits 310197b, 8437906)
|
||||||
|
|
||||||
|
Performance improvements:
|
||||||
|
|
||||||
|
* Remove event fetching from DB threads (PR #835)
|
||||||
|
* Change the way we cache events (PR #836)
|
||||||
|
* Add events to cache when we persist them (PR #840)
|
||||||
|
|
||||||
|
|
||||||
|
Changes in synapse v0.16.0-rc1 (2016-06-03)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
Version 0.15 was not released. See v0.15.0-rc1 below for additional changes.
|
||||||
|
|
||||||
|
Features:
|
||||||
|
|
||||||
|
* Add email notifications for missed messages (PR #759, #786, #799, #810, #815,
|
||||||
|
#821)
|
||||||
|
* Add a ``url_preview_ip_range_whitelist`` config param (PR #760)
|
||||||
|
* Add /report endpoint (PR #762)
|
||||||
|
* Add basic ignore user API (PR #763)
|
||||||
|
* Add an openidish mechanism for proving that you own a given user_id (PR #765)
|
||||||
|
* Allow clients to specify a server_name to avoid 'No known servers' (PR #794)
|
||||||
|
* Add secondary_directory_servers option to fetch room list from other servers
|
||||||
|
(PR #808, #813)
|
||||||
|
|
||||||
|
Changes:
|
||||||
|
|
||||||
|
* Report per request metrics for all of the things using request_handler (PR
|
||||||
|
#756)
|
||||||
|
* Correctly handle ``NULL`` password hashes from the database (PR #775)
|
||||||
|
* Allow receipts for events we haven't seen in the db (PR #784)
|
||||||
|
* Make synctl read a cache factor from config file (PR #785)
|
||||||
|
* Increment badge count per missed convo, not per msg (PR #793)
|
||||||
|
* Special case m.room.third_party_invite event auth to match invites (PR #814)
|
||||||
|
|
||||||
|
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* Fix typo in event_auth servlet path (PR #757)
|
||||||
|
* Fix password reset (PR #758)
|
||||||
|
|
||||||
|
|
||||||
|
Performance improvements:
|
||||||
|
|
||||||
|
* Reduce database inserts when sending transactions (PR #767)
|
||||||
|
* Queue events by room for persistence (PR #768)
|
||||||
|
* Add cache to ``get_user_by_id`` (PR #772)
|
||||||
|
* Add and use ``get_domain_from_id`` (PR #773)
|
||||||
|
* Use tree cache for ``get_linearized_receipts_for_room`` (PR #779)
|
||||||
|
* Remove unused indices (PR #782)
|
||||||
|
* Add caches to ``bulk_get_push_rules*`` (PR #804)
|
||||||
|
* Cache ``get_event_reference_hashes`` (PR #806)
|
||||||
|
* Add ``get_users_with_read_receipts_in_room`` cache (PR #809)
|
||||||
|
* Use state to calculate ``get_users_in_room`` (PR #811)
|
||||||
|
* Load push rules in storage layer so that they get cached (PR #825)
|
||||||
|
* Make ``get_joined_hosts_for_room`` use get_users_in_room (PR #828)
|
||||||
|
* Poke notifier on next reactor tick (PR #829)
|
||||||
|
* Change CacheMetrics to be quicker (PR #830)
|
||||||
|
|
||||||
|
|
||||||
|
Changes in synapse v0.15.0-rc1 (2016-04-26)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
Features:
|
||||||
|
|
||||||
|
* Add login support for Javascript Web Tokens, thanks to Niklas Riekenbrauck
|
||||||
|
(PR #671,#687)
|
||||||
|
* Add URL previewing support (PR #688)
|
||||||
|
* Add login support for LDAP, thanks to Christoph Witzany (PR #701)
|
||||||
|
* Add GET endpoint for pushers (PR #716)
|
||||||
|
|
||||||
|
Changes:
|
||||||
|
|
||||||
|
* Never notify for member events (PR #667)
|
||||||
|
* Deduplicate identical ``/sync`` requests (PR #668)
|
||||||
|
* Require user to have left room to forget room (PR #673)
|
||||||
|
* Use DNS cache if within TTL (PR #677)
|
||||||
|
* Let users see their own leave events (PR #699)
|
||||||
|
* Deduplicate membership changes (PR #700)
|
||||||
|
* Increase performance of pusher code (PR #705)
|
||||||
|
* Respond with error status 504 if failed to talk to remote server (PR #731)
|
||||||
|
* Increase search performance on postgres (PR #745)
|
||||||
|
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* Fix bug where disabling all notifications still resulted in push (PR #678)
|
||||||
|
* Fix bug where users couldn't reject remote invites if remote refused (PR #691)
|
||||||
|
* Fix bug where synapse attempted to backfill from itself (PR #693)
|
||||||
|
* Fix bug where profile information was not correctly added when joining remote
|
||||||
|
rooms (PR #703)
|
||||||
|
* Fix bug where register API required incorrect key name for AS registration
|
||||||
|
(PR #727)
|
||||||
|
|
||||||
|
|
||||||
|
Changes in synapse v0.14.0 (2016-03-30)
|
||||||
|
=======================================
|
||||||
|
|
||||||
|
No changes from v0.14.0-rc2
|
||||||
|
|
||||||
|
Changes in synapse v0.14.0-rc2 (2016-03-23)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
Features:
|
||||||
|
|
||||||
|
* Add published room list API (PR #657)
|
||||||
|
|
||||||
|
Changes:
|
||||||
|
|
||||||
|
* Change various caches to consume less memory (PR #656, #658, #660, #662,
|
||||||
|
#663, #665)
|
||||||
|
* Allow rooms to be published without requiring an alias (PR #664)
|
||||||
|
* Intern common strings in caches to reduce memory footprint (#666)
|
||||||
|
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* Fix reject invites over federation (PR #646)
|
||||||
|
* Fix bug where registration was not idempotent (PR #649)
|
||||||
|
* Update aliases event after deleting aliases (PR #652)
|
||||||
|
* Fix unread notification count, which was sometimes wrong (PR #661)
|
||||||
|
|
||||||
|
Changes in synapse v0.14.0-rc1 (2016-03-14)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
Features:
|
||||||
|
|
||||||
|
* Add event_id to response to state event PUT (PR #581)
|
||||||
|
* Allow guest users access to messages in rooms they have joined (PR #587)
|
||||||
|
* Add config for what state is included in a room invite (PR #598)
|
||||||
|
* Send the inviter's member event in room invite state (PR #607)
|
||||||
|
* Add error codes for malformed/bad JSON in /login (PR #608)
|
||||||
|
* Add support for changing the actions for default rules (PR #609)
|
||||||
|
* Add environment variable SYNAPSE_CACHE_FACTOR, default it to 0.1 (PR #612)
|
||||||
|
* Add ability for alias creators to delete aliases (PR #614)
|
||||||
|
* Add profile information to invites (PR #624)
|
||||||
|
|
||||||
|
Changes:
|
||||||
|
|
||||||
|
* Enforce user_id exclusivity for AS registrations (PR #572)
|
||||||
|
* Make adding push rules idempotent (PR #587)
|
||||||
|
* Improve presence performance (PR #582, #586)
|
||||||
|
* Change presence semantics for ``last_active_ago`` (PR #582, #586)
|
||||||
|
* Don't allow ``m.room.create`` to be changed (PR #596)
|
||||||
|
* Add 800x600 to default list of valid thumbnail sizes (PR #616)
|
||||||
|
* Always include kicks and bans in full /sync (PR #625)
|
||||||
|
* Send history visibility on boundary changes (PR #626)
|
||||||
|
* Register endpoint now returns a refresh_token (PR #637)
|
||||||
|
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* Fix bug where we returned incorrect state in /sync (PR #573)
|
||||||
|
* Always return a JSON object from push rule API (PR #606)
|
||||||
|
* Fix bug where registering without a user id sometimes failed (PR #610)
|
||||||
|
* Report size of ExpiringCache in cache size metrics (PR #611)
|
||||||
|
* Fix rejection of invites to empty rooms (PR #615)
|
||||||
|
* Fix usage of ``bcrypt`` to not use ``checkpw`` (PR #619)
|
||||||
|
* Pin ``pysaml2`` dependency (PR #634)
|
||||||
|
* Fix bug in ``/sync`` where timeline order was incorrect for backfilled events
|
||||||
|
(PR #635)
|
||||||
|
|
||||||
|
Changes in synapse v0.13.3 (2016-02-11)
|
||||||
|
=======================================
|
||||||
|
|
||||||
|
* Fix bug where ``/sync`` would occasionally return events in the wrong room.
|
||||||
|
|
||||||
|
Changes in synapse v0.13.2 (2016-02-11)
|
||||||
|
=======================================
|
||||||
|
|
||||||
|
* Fix bug where ``/events`` would fail to skip some events if there had been
|
||||||
|
more events than the limit specified since the last request (PR #570)
|
||||||
|
|
||||||
|
Changes in synapse v0.13.1 (2016-02-10)
|
||||||
|
=======================================
|
||||||
|
|
||||||
|
* Bump matrix-angular-sdk (matrix web console) dependency to 0.6.8 to
|
||||||
|
pull in the fix for SYWEB-361 so that the default client can display
|
||||||
|
HTML messages again(!)
|
||||||
|
|
||||||
|
Changes in synapse v0.13.0 (2016-02-10)
|
||||||
|
=======================================
|
||||||
|
|
||||||
|
This version includes an upgrade of the schema, specifically adding an index to
|
||||||
|
the ``events`` table. This may cause synapse to pause for several minutes the
|
||||||
|
first time it is started after the upgrade.
|
||||||
|
|
||||||
|
Changes:
|
||||||
|
|
||||||
|
* Improve general performance (PR #540, #543. #544, #54, #549, #567)
|
||||||
|
* Change guest user ids to be incrementing integers (PR #550)
|
||||||
|
* Improve performance of public room list API (PR #552)
|
||||||
|
* Change profile API to omit keys rather than return null (PR #557)
|
||||||
|
* Add ``/media/r0`` endpoint prefix, which is equivalent to ``/media/v1/``
|
||||||
|
(PR #595)
|
||||||
|
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* Fix bug with upgrading guest accounts where it would fail if you opened the
|
||||||
|
registration email on a different device (PR #547)
|
||||||
|
* Fix bug where unread count could be wrong (PR #568)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Changes in synapse v0.12.1-rc1 (2016-01-29)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
Features:
|
||||||
|
|
||||||
|
* Add unread notification counts in ``/sync`` (PR #456)
|
||||||
|
* Add support for inviting 3pids in ``/createRoom`` (PR #460)
|
||||||
|
* Add ability for guest accounts to upgrade (PR #462)
|
||||||
|
* Add ``/versions`` API (PR #468)
|
||||||
|
* Add ``event`` to ``/context`` API (PR #492)
|
||||||
|
* Add specific error code for invalid user names in ``/register`` (PR #499)
|
||||||
|
* Add support for push badge counts (PR #507)
|
||||||
|
* Add support for non-guest users to peek in rooms using ``/events`` (PR #510)
|
||||||
|
|
||||||
|
Changes:
|
||||||
|
|
||||||
|
* Change ``/sync`` so that guest users only get rooms they've joined (PR #469)
|
||||||
|
* Change to require unbanning before other membership changes (PR #501)
|
||||||
|
* Change default push rules to notify for all messages (PR #486)
|
||||||
|
* Change default push rules to not notify on membership changes (PR #514)
|
||||||
|
* Change default push rules in one to one rooms to only notify for events that
|
||||||
|
are messages (PR #529)
|
||||||
|
* Change ``/sync`` to reject requests with a ``from`` query param (PR #512)
|
||||||
|
* Change server manhole to use SSH rather than telnet (PR #473)
|
||||||
|
* Change server to require AS users to be registered before use (PR #487)
|
||||||
|
* Change server not to start when ASes are invalidly configured (PR #494)
|
||||||
|
* Change server to require ID and ``as_token`` to be unique for AS's (PR #496)
|
||||||
|
* Change maximum pagination limit to 1000 (PR #497)
|
||||||
|
|
||||||
|
Bug fixes:
|
||||||
|
|
||||||
|
* Fix bug where ``/sync`` didn't return when something under the leave key
|
||||||
|
changed (PR #461)
|
||||||
|
* Fix bug where we returned smaller rather than larger than requested
|
||||||
|
thumbnails when ``method=crop`` (PR #464)
|
||||||
|
* Fix thumbnails API to only return cropped thumbnails when asking for a
|
||||||
|
cropped thumbnail (PR #475)
|
||||||
|
* Fix bug where we occasionally still logged access tokens (PR #477)
|
||||||
|
* Fix bug where ``/events`` would always return immediately for guest users
|
||||||
|
(PR #480)
|
||||||
|
* Fix bug where ``/sync`` unexpectedly returned old left rooms (PR #481)
|
||||||
|
* Fix enabling and disabling push rules (PR #498)
|
||||||
|
* Fix bug where ``/register`` returned 500 when given unicode username
|
||||||
|
(PR #513)
|
||||||
|
|
||||||
|
Changes in synapse v0.12.0 (2016-01-04)
|
||||||
|
=======================================
|
||||||
|
|
||||||
|
* Expose ``/login`` under ``r0`` (PR #459)
|
||||||
|
|
||||||
|
Changes in synapse v0.12.0-rc3 (2015-12-23)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
* Allow guest accounts access to ``/sync`` (PR #455)
|
||||||
|
* Allow filters to include/exclude rooms at the room level
|
||||||
|
rather than just from the components of the sync for each
|
||||||
|
room. (PR #454)
|
||||||
|
* Include urls for room avatars in the response to ``/publicRooms`` (PR #453)
|
||||||
|
* Don't set a identicon as the avatar for a user when they register (PR #450)
|
||||||
|
* Add a ``display_name`` to third-party invites (PR #449)
|
||||||
|
* Send more information to the identity server for third-party invites so that
|
||||||
|
it can send richer messages to the invitee (PR #446)
|
||||||
|
* Cache the responses to ``/initialSync`` for 5 minutes. If a client
|
||||||
|
retries a request to ``/initialSync`` before the a response was computed
|
||||||
|
to the first request then the same response is used for both requests
|
||||||
|
(PR #457)
|
||||||
|
* Fix a bug where synapse would always request the signing keys of
|
||||||
|
remote servers even when the key was cached locally (PR #452)
|
||||||
|
* Fix 500 when pagination search results (PR #447)
|
||||||
|
* Fix a bug where synapse was leaking raw email address in third-party invites
|
||||||
|
(PR #448)
|
||||||
|
|
||||||
|
Changes in synapse v0.12.0-rc2 (2015-12-14)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
* Add caches for whether rooms have been forgotten by a user (PR #434)
|
||||||
|
* Remove instructions to use ``--process-dependency-link`` since all of the
|
||||||
|
dependencies of synapse are on PyPI (PR #436)
|
||||||
|
* Parallelise the processing of ``/sync`` requests (PR #437)
|
||||||
|
* Fix race updating presence in ``/events`` (PR #444)
|
||||||
|
* Fix bug back-populating search results (PR #441)
|
||||||
|
* Fix bug calculating state in ``/sync`` requests (PR #442)
|
||||||
|
|
||||||
|
Changes in synapse v0.12.0-rc1 (2015-12-10)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
* Host the client APIs released as r0 by
|
||||||
|
https://matrix.org/docs/spec/r0.0.0/client_server.html
|
||||||
|
on paths prefixed by ``/_matrix/client/r0``. (PR #430, PR #415, PR #400)
|
||||||
|
* Updates the client APIs to match r0 of the matrix specification.
|
||||||
|
|
||||||
|
* All APIs return events in the new event format, old APIs also include
|
||||||
|
the fields needed to parse the event using the old format for
|
||||||
|
compatibility. (PR #402)
|
||||||
|
* Search results are now given as a JSON array rather than
|
||||||
|
a JSON object (PR #405)
|
||||||
|
* Miscellaneous changes to search (PR #403, PR #406, PR #412)
|
||||||
|
* Filter JSON objects may now be passed as query parameters to ``/sync``
|
||||||
|
(PR #431)
|
||||||
|
* Fix implementation of ``/admin/whois`` (PR #418)
|
||||||
|
* Only include the rooms that user has left in ``/sync`` if the client
|
||||||
|
requests them in the filter (PR #423)
|
||||||
|
* Don't push for ``m.room.message`` by default (PR #411)
|
||||||
|
* Add API for setting per account user data (PR #392)
|
||||||
|
* Allow users to forget rooms (PR #385)
|
||||||
|
|
||||||
|
* Performance improvements and monitoring:
|
||||||
|
|
||||||
|
* Add per-request counters for CPU time spent on the main python thread.
|
||||||
|
(PR #421, PR #420)
|
||||||
|
* Add per-request counters for time spent in the database (PR #429)
|
||||||
|
* Make state updates in the C+S API idempotent (PR #416)
|
||||||
|
* Only fire ``user_joined_room`` if the user has actually joined. (PR #410)
|
||||||
|
* Reuse a single http client, rather than creating new ones (PR #413)
|
||||||
|
|
||||||
|
* Fixed a bug upgrading from older versions of synapse on postgresql (PR #417)
|
||||||
|
|
||||||
|
Changes in synapse v0.11.1 (2015-11-20)
|
||||||
|
=======================================
|
||||||
|
|
||||||
|
* Add extra options to search API (PR #394)
|
||||||
|
* Fix bug where we did not correctly cap federation retry timers. This meant it
|
||||||
|
could take several hours for servers to start talking to ressurected servers,
|
||||||
|
even when they were receiving traffic from them (PR #393)
|
||||||
|
* Don't advertise login token flow unless CAS is enabled. This caused issues
|
||||||
|
where some clients would always use the fallback API if they did not
|
||||||
|
recognize all login flows (PR #391)
|
||||||
|
* Change /v2 sync API to rename ``private_user_data`` to ``account_data``
|
||||||
|
(PR #386)
|
||||||
|
* Change /v2 sync API to remove the ``event_map`` and rename keys in ``rooms``
|
||||||
|
object (PR #389)
|
||||||
|
|
||||||
|
Changes in synapse v0.11.0-r2 (2015-11-19)
|
||||||
|
==========================================
|
||||||
|
|
||||||
|
* Fix bug in database port script (PR #387)
|
||||||
|
|
||||||
|
Changes in synapse v0.11.0-r1 (2015-11-18)
|
||||||
|
==========================================
|
||||||
|
|
||||||
|
* Retry and fail federation requests more aggressively for requests that block
|
||||||
|
client side requests (PR #384)
|
||||||
|
|
||||||
|
Changes in synapse v0.11.0 (2015-11-17)
|
||||||
|
=======================================
|
||||||
|
|
||||||
|
* Change CAS login API (PR #349)
|
||||||
|
|
||||||
|
Changes in synapse v0.11.0-rc2 (2015-11-13)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
* Various changes to /sync API response format (PR #373)
|
||||||
|
* Fix regression when setting display name in newly joined room over
|
||||||
|
federation (PR #368)
|
||||||
|
* Fix problem where /search was slow when using SQLite (PR #366)
|
||||||
|
|
||||||
|
Changes in synapse v0.11.0-rc1 (2015-11-11)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
* Add Search API (PR #307, #324, #327, #336, #350, #359)
|
||||||
|
* Add 'archived' state to v2 /sync API (PR #316)
|
||||||
|
* Add ability to reject invites (PR #317)
|
||||||
|
* Add config option to disable password login (PR #322)
|
||||||
|
* Add the login fallback API (PR #330)
|
||||||
|
* Add room context API (PR #334)
|
||||||
|
* Add room tagging support (PR #335)
|
||||||
|
* Update v2 /sync API to match spec (PR #305, #316, #321, #332, #337, #341)
|
||||||
|
* Change retry schedule for application services (PR #320)
|
||||||
|
* Change retry schedule for remote servers (PR #340)
|
||||||
|
* Fix bug where we hosted static content in the incorrect place (PR #329)
|
||||||
|
* Fix bug where we didn't increment retry interval for remote servers (PR #343)
|
||||||
|
|
||||||
|
Changes in synapse v0.10.1-rc1 (2015-10-15)
|
||||||
|
===========================================
|
||||||
|
|
||||||
|
* Add support for CAS, thanks to Steven Hammerton (PR #295, #296)
|
||||||
|
* Add support for using macaroons for ``access_token`` (PR #256, #229)
|
||||||
|
* Add support for ``m.room.canonical_alias`` (PR #287)
|
||||||
|
* Add support for viewing the history of rooms that they have left. (PR #276,
|
||||||
|
#294)
|
||||||
|
* Add support for refresh tokens (PR #240)
|
||||||
|
* Add flag on creation which disables federation of the room (PR #279)
|
||||||
|
* Add some room state to invites. (PR #275)
|
||||||
|
* Atomically persist events when joining a room over federation (PR #283)
|
||||||
|
* Change default history visibility for private rooms (PR #271)
|
||||||
|
* Allow users to redact their own sent events (PR #262)
|
||||||
|
* Use tox for tests (PR #247)
|
||||||
|
* Split up syutil into separate libraries (PR #243)
|
||||||
|
|
||||||
Changes in synapse v0.10.0-r2 (2015-09-16)
|
Changes in synapse v0.10.0-r2 (2015-09-16)
|
||||||
==========================================
|
==========================================
|
||||||
|
|
||||||
@@ -215,7 +683,7 @@ Configuration:
|
|||||||
|
|
||||||
* Add support for changing the bind host of the metrics listener via the
|
* Add support for changing the bind host of the metrics listener via the
|
||||||
``metrics_bind_host`` option.
|
``metrics_bind_host`` option.
|
||||||
|
|
||||||
|
|
||||||
Changes in synapse v0.9.0-r5 (2015-05-21)
|
Changes in synapse v0.9.0-r5 (2015-05-21)
|
||||||
=========================================
|
=========================================
|
||||||
@@ -557,7 +1025,7 @@ See UPGRADE for information about changes to the client server API, including
|
|||||||
breaking backwards compatibility with VoIP calls and registration API.
|
breaking backwards compatibility with VoIP calls and registration API.
|
||||||
|
|
||||||
Homeserver:
|
Homeserver:
|
||||||
* When a user changes their displayname or avatar the server will now update
|
* When a user changes their displayname or avatar the server will now update
|
||||||
all their join states to reflect this.
|
all their join states to reflect this.
|
||||||
* The server now adds "age" key to events to indicate how old they are. This
|
* The server now adds "age" key to events to indicate how old they are. This
|
||||||
is clock independent, so at no point does any server or webclient have to
|
is clock independent, so at no point does any server or webclient have to
|
||||||
@@ -615,7 +1083,7 @@ Changes in synapse 0.2.2 (2014-09-06)
|
|||||||
=====================================
|
=====================================
|
||||||
|
|
||||||
Homeserver:
|
Homeserver:
|
||||||
* When the server returns state events it now also includes the previous
|
* When the server returns state events it now also includes the previous
|
||||||
content.
|
content.
|
||||||
* Add support for inviting people when creating a new room.
|
* Add support for inviting people when creating a new room.
|
||||||
* Make the homeserver inform the room via `m.room.aliases` when a new alias
|
* Make the homeserver inform the room via `m.room.aliases` when a new alias
|
||||||
@@ -627,7 +1095,7 @@ Webclient:
|
|||||||
* Handle `m.room.aliases` events.
|
* Handle `m.room.aliases` events.
|
||||||
* Asynchronously send messages and show a local echo.
|
* Asynchronously send messages and show a local echo.
|
||||||
* Inform the UI when a message failed to send.
|
* Inform the UI when a message failed to send.
|
||||||
* Only autoscroll on receiving a new message if the user was already at the
|
* Only autoscroll on receiving a new message if the user was already at the
|
||||||
bottom of the screen.
|
bottom of the screen.
|
||||||
* Add support for ban/kick reasons.
|
* Add support for ban/kick reasons.
|
||||||
|
|
||||||
|
|||||||
11
MANIFEST.in
11
MANIFEST.in
@@ -11,12 +11,17 @@ recursive-include synapse/storage/schema *.sql
|
|||||||
recursive-include synapse/storage/schema *.py
|
recursive-include synapse/storage/schema *.py
|
||||||
|
|
||||||
recursive-include docs *
|
recursive-include docs *
|
||||||
|
recursive-include res *
|
||||||
recursive-include scripts *
|
recursive-include scripts *
|
||||||
recursive-include scripts-dev *
|
recursive-include scripts-dev *
|
||||||
recursive-include tests *.py
|
recursive-include tests *.py
|
||||||
|
|
||||||
recursive-include static *.css
|
recursive-include synapse/static *.css
|
||||||
recursive-include static *.html
|
recursive-include synapse/static *.gif
|
||||||
recursive-include static *.js
|
recursive-include synapse/static *.html
|
||||||
|
recursive-include synapse/static *.js
|
||||||
|
|
||||||
|
exclude jenkins.sh
|
||||||
|
exclude jenkins*.sh
|
||||||
|
|
||||||
prune demo/etc
|
prune demo/etc
|
||||||
|
|||||||
186
README.rst
186
README.rst
@@ -20,8 +20,8 @@ The overall architecture is::
|
|||||||
https://somewhere.org/_matrix https://elsewhere.net/_matrix
|
https://somewhere.org/_matrix https://elsewhere.net/_matrix
|
||||||
|
|
||||||
``#matrix:matrix.org`` is the official support room for Matrix, and can be
|
``#matrix:matrix.org`` is the official support room for Matrix, and can be
|
||||||
accessed by the web client at http://matrix.org/beta or via an IRC bridge at
|
accessed by any client from https://matrix.org/blog/try-matrix-now or via IRC
|
||||||
irc://irc.freenode.net/matrix.
|
bridge at irc://irc.freenode.net/matrix.
|
||||||
|
|
||||||
Synapse is currently in rapid development, but as of version 0.5 we believe it
|
Synapse is currently in rapid development, but as of version 0.5 we believe it
|
||||||
is sufficiently stable to be run as an internet-facing service for real usage!
|
is sufficiently stable to be run as an internet-facing service for real usage!
|
||||||
@@ -58,12 +58,13 @@ the spec in the context of a codebase and let you run your own homeserver and
|
|||||||
generally help bootstrap the ecosystem.
|
generally help bootstrap the ecosystem.
|
||||||
|
|
||||||
In Matrix, every user runs one or more Matrix clients, which connect through to
|
In Matrix, every user runs one or more Matrix clients, which connect through to
|
||||||
a Matrix homeserver which stores all their personal chat history and user
|
a Matrix homeserver. The homeserver stores all their personal chat history and
|
||||||
account information - much as a mail client connects through to an IMAP/SMTP
|
user account information - much as a mail client connects through to an
|
||||||
server. Just like email, you can either run your own Matrix homeserver and
|
IMAP/SMTP server. Just like email, you can either run your own Matrix
|
||||||
control and own your own communications and history or use one hosted by
|
homeserver and control and own your own communications and history or use one
|
||||||
someone else (e.g. matrix.org) - there is no single point of control or
|
hosted by someone else (e.g. matrix.org) - there is no single point of control
|
||||||
mandatory service provider in Matrix, unlike WhatsApp, Facebook, Hangouts, etc.
|
or mandatory service provider in Matrix, unlike WhatsApp, Facebook, Hangouts,
|
||||||
|
etc.
|
||||||
|
|
||||||
Synapse ships with two basic demo Matrix clients: webclient (a basic group chat
|
Synapse ships with two basic demo Matrix clients: webclient (a basic group chat
|
||||||
web client demo implemented in AngularJS) and cmdclient (a basic Python
|
web client demo implemented in AngularJS) and cmdclient (a basic Python
|
||||||
@@ -77,14 +78,14 @@ Meanwhile, iOS and Android SDKs and clients are available from:
|
|||||||
- https://github.com/matrix-org/matrix-android-sdk
|
- https://github.com/matrix-org/matrix-android-sdk
|
||||||
|
|
||||||
We'd like to invite you to join #matrix:matrix.org (via
|
We'd like to invite you to join #matrix:matrix.org (via
|
||||||
https://matrix.org/beta), run a homeserver, take a look at the Matrix spec at
|
https://matrix.org/blog/try-matrix-now), run a homeserver, take a look at the
|
||||||
https://matrix.org/docs/spec and API docs at https://matrix.org/docs/api,
|
Matrix spec at https://matrix.org/docs/spec and API docs at
|
||||||
experiment with the APIs and the demo clients, and report any bugs via
|
https://matrix.org/docs/api, experiment with the APIs and the demo clients, and
|
||||||
https://matrix.org/jira.
|
report any bugs via https://matrix.org/jira.
|
||||||
|
|
||||||
Thanks for using Matrix!
|
Thanks for using Matrix!
|
||||||
|
|
||||||
[1] End-to-end encryption is currently in development
|
[1] End-to-end encryption is currently in development - see https://matrix.org/git/olm
|
||||||
|
|
||||||
Synapse Installation
|
Synapse Installation
|
||||||
====================
|
====================
|
||||||
@@ -104,25 +105,41 @@ Installing prerequisites on Ubuntu or Debian::
|
|||||||
|
|
||||||
sudo apt-get install build-essential python2.7-dev libffi-dev \
|
sudo apt-get install build-essential python2.7-dev libffi-dev \
|
||||||
python-pip python-setuptools sqlite3 \
|
python-pip python-setuptools sqlite3 \
|
||||||
libssl-dev python-virtualenv libjpeg-dev
|
libssl-dev python-virtualenv libjpeg-dev libxslt1-dev
|
||||||
|
|
||||||
Installing prerequisites on ArchLinux::
|
Installing prerequisites on ArchLinux::
|
||||||
|
|
||||||
sudo pacman -S base-devel python2 python-pip \
|
sudo pacman -S base-devel python2 python-pip \
|
||||||
python-setuptools python-virtualenv sqlite3
|
python-setuptools python-virtualenv sqlite3
|
||||||
|
|
||||||
|
Installing prerequisites on CentOS 7::
|
||||||
|
|
||||||
|
sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||||
|
lcms2-devel libwebp-devel tcl-devel tk-devel \
|
||||||
|
python-virtualenv libffi-devel openssl-devel
|
||||||
|
sudo yum groupinstall "Development Tools"
|
||||||
|
|
||||||
Installing prerequisites on Mac OS X::
|
Installing prerequisites on Mac OS X::
|
||||||
|
|
||||||
xcode-select --install
|
xcode-select --install
|
||||||
sudo easy_install pip
|
sudo easy_install pip
|
||||||
sudo pip install virtualenv
|
sudo pip install virtualenv
|
||||||
|
|
||||||
|
Installing prerequisites on Raspbian::
|
||||||
|
|
||||||
|
sudo apt-get install build-essential python2.7-dev libffi-dev \
|
||||||
|
python-pip python-setuptools sqlite3 \
|
||||||
|
libssl-dev python-virtualenv libjpeg-dev
|
||||||
|
sudo pip install --upgrade pip
|
||||||
|
sudo pip install --upgrade ndg-httpsclient
|
||||||
|
sudo pip install --upgrade virtualenv
|
||||||
|
|
||||||
To install the synapse homeserver run::
|
To install the synapse homeserver run::
|
||||||
|
|
||||||
virtualenv -p python2.7 ~/.synapse
|
virtualenv -p python2.7 ~/.synapse
|
||||||
source ~/.synapse/bin/activate
|
source ~/.synapse/bin/activate
|
||||||
pip install --upgrade setuptools
|
pip install --upgrade setuptools
|
||||||
pip install --process-dependency-links https://github.com/matrix-org/synapse/tarball/master
|
pip install https://github.com/matrix-org/synapse/tarball/master
|
||||||
|
|
||||||
This installs synapse, along with the libraries it uses, into a virtual
|
This installs synapse, along with the libraries it uses, into a virtual
|
||||||
environment under ``~/.synapse``. Feel free to pick a different directory
|
environment under ``~/.synapse``. Feel free to pick a different directory
|
||||||
@@ -133,15 +150,20 @@ In case of problems, please see the _Troubleshooting section below.
|
|||||||
Alternatively, Silvio Fricke has contributed a Dockerfile to automate the
|
Alternatively, Silvio Fricke has contributed a Dockerfile to automate the
|
||||||
above in Docker at https://registry.hub.docker.com/u/silviof/docker-matrix/.
|
above in Docker at https://registry.hub.docker.com/u/silviof/docker-matrix/.
|
||||||
|
|
||||||
|
Also, Martin Giess has created an auto-deployment process with vagrant/ansible,
|
||||||
|
tested with VirtualBox/AWS/DigitalOcean - see https://github.com/EMnify/matrix-synapse-auto-deploy
|
||||||
|
for details.
|
||||||
|
|
||||||
To set up your homeserver, run (in your virtualenv, as before)::
|
To set up your homeserver, run (in your virtualenv, as before)::
|
||||||
|
|
||||||
cd ~/.synapse
|
cd ~/.synapse
|
||||||
python -m synapse.app.homeserver \
|
python -m synapse.app.homeserver \
|
||||||
--server-name machine.my.domain.name \
|
--server-name machine.my.domain.name \
|
||||||
--config-path homeserver.yaml \
|
--config-path homeserver.yaml \
|
||||||
--generate-config
|
--generate-config \
|
||||||
|
--report-stats=[yes|no]
|
||||||
|
|
||||||
Substituting your host and domain name as appropriate.
|
...substituting your host and domain name as appropriate.
|
||||||
|
|
||||||
This will generate you a config file that you can then customise, but it will
|
This will generate you a config file that you can then customise, but it will
|
||||||
also generate a set of keys for you. These keys will allow your Home Server to
|
also generate a set of keys for you. These keys will allow your Home Server to
|
||||||
@@ -149,15 +171,15 @@ identify itself to other Home Servers, so don't lose or delete them. It would be
|
|||||||
wise to back them up somewhere safe. If, for whatever reason, you do need to
|
wise to back them up somewhere safe. If, for whatever reason, you do need to
|
||||||
change your Home Server's keys, you may find that other Home Servers have the
|
change your Home Server's keys, you may find that other Home Servers have the
|
||||||
old key cached. If you update the signing key, you should change the name of the
|
old key cached. If you update the signing key, you should change the name of the
|
||||||
key in the <server name>.signing.key file (the second word, which by default is
|
key in the <server name>.signing.key file (the second word) to something different.
|
||||||
, 'auto') to something different.
|
|
||||||
|
|
||||||
By default, registration of new users is disabled. You can either enable
|
By default, registration of new users is disabled. You can either enable
|
||||||
registration in the config by specifying ``enable_registration: true``
|
registration in the config by specifying ``enable_registration: true``
|
||||||
(it is then recommended to also set up CAPTCHA), or
|
(it is then recommended to also set up CAPTCHA - see docs/CAPTCHA_SETUP), or
|
||||||
you can use the command line to register new users::
|
you can use the command line to register new users::
|
||||||
|
|
||||||
$ source ~/.synapse/bin/activate
|
$ source ~/.synapse/bin/activate
|
||||||
|
$ synctl start # if not already running
|
||||||
$ register_new_matrix_user -c homeserver.yaml https://localhost:8448
|
$ register_new_matrix_user -c homeserver.yaml https://localhost:8448
|
||||||
New user localpart: erikj
|
New user localpart: erikj
|
||||||
Password:
|
Password:
|
||||||
@@ -167,6 +189,16 @@ you can use the command line to register new users::
|
|||||||
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
||||||
a TURN server. See docs/turn-howto.rst for details.
|
a TURN server. See docs/turn-howto.rst for details.
|
||||||
|
|
||||||
|
Running Synapse
|
||||||
|
===============
|
||||||
|
|
||||||
|
To actually run your new homeserver, pick a working directory for Synapse to
|
||||||
|
run (e.g. ``~/.synapse``), and::
|
||||||
|
|
||||||
|
cd ~/.synapse
|
||||||
|
source ./bin/activate
|
||||||
|
synctl start
|
||||||
|
|
||||||
Using PostgreSQL
|
Using PostgreSQL
|
||||||
================
|
================
|
||||||
|
|
||||||
@@ -189,19 +221,22 @@ may have a few regressions relative to SQLite.
|
|||||||
For information on how to install and use PostgreSQL, please see
|
For information on how to install and use PostgreSQL, please see
|
||||||
`docs/postgres.rst <docs/postgres.rst>`_.
|
`docs/postgres.rst <docs/postgres.rst>`_.
|
||||||
|
|
||||||
Running Synapse
|
|
||||||
===============
|
|
||||||
|
|
||||||
To actually run your new homeserver, pick a working directory for Synapse to
|
|
||||||
run (e.g. ``~/.synapse``), and::
|
|
||||||
|
|
||||||
cd ~/.synapse
|
|
||||||
source ./bin/activate
|
|
||||||
synctl start
|
|
||||||
|
|
||||||
Platform Specific Instructions
|
Platform Specific Instructions
|
||||||
==============================
|
==============================
|
||||||
|
|
||||||
|
Debian
|
||||||
|
------
|
||||||
|
|
||||||
|
Matrix provides official Debian packages via apt from http://matrix.org/packages/debian/.
|
||||||
|
Note that these packages do not include a client - choose one from
|
||||||
|
https://matrix.org/blog/try-matrix-now/ (or build your own with one of our SDKs :)
|
||||||
|
|
||||||
|
Fedora
|
||||||
|
------
|
||||||
|
|
||||||
|
Oleg Girko provides Fedora RPMs at
|
||||||
|
https://obs.infoserver.lv/project/monitor/matrix-synapse
|
||||||
|
|
||||||
ArchLinux
|
ArchLinux
|
||||||
---------
|
---------
|
||||||
|
|
||||||
@@ -220,8 +255,7 @@ pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 )::
|
|||||||
You also may need to explicitly specify python 2.7 again during the install
|
You also may need to explicitly specify python 2.7 again during the install
|
||||||
request::
|
request::
|
||||||
|
|
||||||
pip2.7 install --process-dependency-links \
|
pip2.7 install https://github.com/matrix-org/synapse/tarball/master
|
||||||
https://github.com/matrix-org/synapse/tarball/master
|
|
||||||
|
|
||||||
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
|
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
|
||||||
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
||||||
@@ -241,6 +275,20 @@ During setup of Synapse you need to call python2.7 directly again::
|
|||||||
|
|
||||||
...substituting your host and domain name as appropriate.
|
...substituting your host and domain name as appropriate.
|
||||||
|
|
||||||
|
FreeBSD
|
||||||
|
-------
|
||||||
|
|
||||||
|
Synapse can be installed via FreeBSD Ports or Packages contributed by Brendan Molloy from:
|
||||||
|
|
||||||
|
- Ports: ``cd /usr/ports/net/py-matrix-synapse && make install clean``
|
||||||
|
- Packages: ``pkg install py27-matrix-synapse``
|
||||||
|
|
||||||
|
NixOS
|
||||||
|
-----
|
||||||
|
|
||||||
|
Robin Lambertz has packaged Synapse for NixOS at:
|
||||||
|
https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/matrix-synapse.nix
|
||||||
|
|
||||||
Windows Install
|
Windows Install
|
||||||
---------------
|
---------------
|
||||||
Synapse can be installed on Cygwin. It requires the following Cygwin packages:
|
Synapse can be installed on Cygwin. It requires the following Cygwin packages:
|
||||||
@@ -280,12 +328,23 @@ Troubleshooting
|
|||||||
Troubleshooting Installation
|
Troubleshooting Installation
|
||||||
----------------------------
|
----------------------------
|
||||||
|
|
||||||
Synapse requires pip 1.7 or later, so if your OS provides too old a version and
|
Synapse requires pip 1.7 or later, so if your OS provides too old a version you
|
||||||
you get errors about ``error: no such option: --process-dependency-links`` you
|
|
||||||
may need to manually upgrade it::
|
may need to manually upgrade it::
|
||||||
|
|
||||||
sudo pip install --upgrade pip
|
sudo pip install --upgrade pip
|
||||||
|
|
||||||
|
Installing may fail with ``Could not find any downloads that satisfy the requirement pymacaroons-pynacl (from matrix-synapse==0.12.0)``.
|
||||||
|
You can fix this by manually upgrading pip and virtualenv::
|
||||||
|
|
||||||
|
sudo pip install --upgrade virtualenv
|
||||||
|
|
||||||
|
You can next rerun ``virtualenv -p python2.7 synapse`` to update the virtual env.
|
||||||
|
|
||||||
|
Installing may fail during installing virtualenv with ``InsecurePlatformWarning: A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail. For more information, see https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning.``
|
||||||
|
You can fix this by manually installing ndg-httpsclient::
|
||||||
|
|
||||||
|
pip install --upgrade ndg-httpsclient
|
||||||
|
|
||||||
Installing may fail with ``mock requires setuptools>=17.1. Aborting installation``.
|
Installing may fail with ``mock requires setuptools>=17.1. Aborting installation``.
|
||||||
You can fix this by upgrading setuptools::
|
You can fix this by upgrading setuptools::
|
||||||
|
|
||||||
@@ -425,6 +484,10 @@ SRV record, as that is the name other machines will expect it to have::
|
|||||||
python -m synapse.app.homeserver --config-path homeserver.yaml
|
python -m synapse.app.homeserver --config-path homeserver.yaml
|
||||||
|
|
||||||
|
|
||||||
|
If you've already generated the config file, you need to edit the "server_name"
|
||||||
|
in you ```homeserver.yaml``` file. If you've already started Synapse and a
|
||||||
|
database has been created, you will have to recreate the database.
|
||||||
|
|
||||||
You may additionally want to pass one or more "-v" options, in order to
|
You may additionally want to pass one or more "-v" options, in order to
|
||||||
increase the verbosity of logging output; at least for initial testing.
|
increase the verbosity of logging output; at least for initial testing.
|
||||||
|
|
||||||
@@ -476,7 +539,6 @@ Logging In To An Existing Account
|
|||||||
Just enter the ``@localpart:my.domain.here`` Matrix user ID and password into
|
Just enter the ``@localpart:my.domain.here`` Matrix user ID and password into
|
||||||
the form and click the Login button.
|
the form and click the Login button.
|
||||||
|
|
||||||
|
|
||||||
Identity Servers
|
Identity Servers
|
||||||
================
|
================
|
||||||
|
|
||||||
@@ -497,6 +559,43 @@ we are running a single identity server (https://matrix.org) at the current
|
|||||||
time.
|
time.
|
||||||
|
|
||||||
|
|
||||||
|
URL Previews
|
||||||
|
============
|
||||||
|
|
||||||
|
Synapse 0.15.0 introduces an experimental new API for previewing URLs at
|
||||||
|
/_matrix/media/r0/preview_url. This is disabled by default. To turn it on
|
||||||
|
you must enable the `url_preview_enabled: True` config parameter and explicitly
|
||||||
|
specify the IP ranges that Synapse is not allowed to spider for previewing in
|
||||||
|
the `url_preview_ip_range_blacklist` configuration parameter. This is critical
|
||||||
|
from a security perspective to stop arbitrary Matrix users spidering 'internal'
|
||||||
|
URLs on your network. At the very least we recommend that your loopback and
|
||||||
|
RFC1918 IP addresses are blacklisted.
|
||||||
|
|
||||||
|
This also requires the optional lxml and netaddr python dependencies to be
|
||||||
|
installed.
|
||||||
|
|
||||||
|
|
||||||
|
Password reset
|
||||||
|
==============
|
||||||
|
|
||||||
|
If a user has registered an email address to their account using an identity
|
||||||
|
server, they can request a password-reset token via clients such as Vector.
|
||||||
|
|
||||||
|
A manual password reset can be done via direct database access as follows.
|
||||||
|
|
||||||
|
First calculate the hash of the new password:
|
||||||
|
|
||||||
|
$ source ~/.synapse/bin/activate
|
||||||
|
$ ./scripts/hash_password
|
||||||
|
Password:
|
||||||
|
Confirm password:
|
||||||
|
$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
||||||
|
|
||||||
|
Then update the `users` table in the database:
|
||||||
|
|
||||||
|
UPDATE users SET password_hash='$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
|
||||||
|
WHERE name='@test:test.com';
|
||||||
|
|
||||||
Where's the spec?!
|
Where's the spec?!
|
||||||
==================
|
==================
|
||||||
|
|
||||||
@@ -517,3 +616,20 @@ Building internal API documentation::
|
|||||||
|
|
||||||
python setup.py build_sphinx
|
python setup.py build_sphinx
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Help!! Synapse eats all my RAM!
|
||||||
|
===============================
|
||||||
|
|
||||||
|
Synapse's architecture is quite RAM hungry currently - we deliberately
|
||||||
|
cache a lot of recent room data and metadata in RAM in order to speed up
|
||||||
|
common requests. We'll improve this in future, but for now the easiest
|
||||||
|
way to either reduce the RAM usage (at the risk of slowing things down)
|
||||||
|
is to set the almost-undocumented ``SYNAPSE_CACHE_FACTOR`` environment
|
||||||
|
variable. Roughly speaking, a SYNAPSE_CACHE_FACTOR of 1.0 will max out
|
||||||
|
at around 3-4GB of resident memory - this is what we currently run the
|
||||||
|
matrix.org on. The default setting is currently 0.1, which is probably
|
||||||
|
around a ~700MB footprint. You can dial it down further to 0.02 if
|
||||||
|
desired, which targets roughly ~512MB. Conversely you can dial it up if
|
||||||
|
you need performance for lots of users and have a box with a lot of RAM.
|
||||||
|
|
||||||
|
|||||||
21
UPGRADE.rst
21
UPGRADE.rst
@@ -30,6 +30,27 @@ running:
|
|||||||
python synapse/python_dependencies.py | xargs -n1 pip install
|
python synapse/python_dependencies.py | xargs -n1 pip install
|
||||||
|
|
||||||
|
|
||||||
|
Upgrading to v0.15.0
|
||||||
|
====================
|
||||||
|
|
||||||
|
If you want to use the new URL previewing API (/_matrix/media/r0/preview_url)
|
||||||
|
then you have to explicitly enable it in the config and update your dependencies
|
||||||
|
dependencies. See README.rst for details.
|
||||||
|
|
||||||
|
|
||||||
|
Upgrading to v0.11.0
|
||||||
|
====================
|
||||||
|
|
||||||
|
This release includes the option to send anonymous usage stats to matrix.org,
|
||||||
|
and requires that administrators explictly opt in or out by setting the
|
||||||
|
``report_stats`` option to either ``true`` or ``false``.
|
||||||
|
|
||||||
|
We would really appreciate it if you could help our project out by reporting
|
||||||
|
anonymized usage statistics from your homeserver. Only very basic aggregate
|
||||||
|
data (e.g. number of users) will be reported, but it helps us to track the
|
||||||
|
growth of the Matrix community, and helps us to make Matrix a success, as well
|
||||||
|
as to convince other networks that they should peer with us.
|
||||||
|
|
||||||
|
|
||||||
Upgrading to v0.9.0
|
Upgrading to v0.9.0
|
||||||
===================
|
===================
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
# Copyright 2014 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2014 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2014 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2014 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
151
contrib/graph/graph3.py
Normal file
151
contrib/graph/graph3.py
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
# Copyright 2016 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
import pydot
|
||||||
|
import cgi
|
||||||
|
import simplejson as json
|
||||||
|
import datetime
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from synapse.events import FrozenEvent
|
||||||
|
from synapse.util.frozenutils import unfreeze
|
||||||
|
|
||||||
|
|
||||||
|
def make_graph(file_name, room_id, file_prefix, limit):
|
||||||
|
print "Reading lines"
|
||||||
|
with open(file_name) as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
|
||||||
|
print "Read lines"
|
||||||
|
|
||||||
|
events = [FrozenEvent(json.loads(line)) for line in lines]
|
||||||
|
|
||||||
|
print "Loaded events."
|
||||||
|
|
||||||
|
events.sort(key=lambda e: e.depth)
|
||||||
|
|
||||||
|
print "Sorted events"
|
||||||
|
|
||||||
|
if limit:
|
||||||
|
events = events[-int(limit):]
|
||||||
|
|
||||||
|
node_map = {}
|
||||||
|
|
||||||
|
graph = pydot.Dot(graph_name="Test")
|
||||||
|
|
||||||
|
for event in events:
|
||||||
|
t = datetime.datetime.fromtimestamp(
|
||||||
|
float(event.origin_server_ts) / 1000
|
||||||
|
).strftime('%Y-%m-%d %H:%M:%S,%f')
|
||||||
|
|
||||||
|
content = json.dumps(unfreeze(event.get_dict()["content"]), indent=4)
|
||||||
|
content = content.replace("\n", "<br/>\n")
|
||||||
|
|
||||||
|
print content
|
||||||
|
content = []
|
||||||
|
for key, value in unfreeze(event.get_dict()["content"]).items():
|
||||||
|
if value is None:
|
||||||
|
value = "<null>"
|
||||||
|
elif isinstance(value, basestring):
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
value = json.dumps(value)
|
||||||
|
|
||||||
|
content.append(
|
||||||
|
"<b>%s</b>: %s," % (
|
||||||
|
cgi.escape(key, quote=True).encode("ascii", 'xmlcharrefreplace'),
|
||||||
|
cgi.escape(value, quote=True).encode("ascii", 'xmlcharrefreplace'),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
content = "<br/>\n".join(content)
|
||||||
|
|
||||||
|
print content
|
||||||
|
|
||||||
|
label = (
|
||||||
|
"<"
|
||||||
|
"<b>%(name)s </b><br/>"
|
||||||
|
"Type: <b>%(type)s </b><br/>"
|
||||||
|
"State key: <b>%(state_key)s </b><br/>"
|
||||||
|
"Content: <b>%(content)s </b><br/>"
|
||||||
|
"Time: <b>%(time)s </b><br/>"
|
||||||
|
"Depth: <b>%(depth)s </b><br/>"
|
||||||
|
">"
|
||||||
|
) % {
|
||||||
|
"name": event.event_id,
|
||||||
|
"type": event.type,
|
||||||
|
"state_key": event.get("state_key", None),
|
||||||
|
"content": content,
|
||||||
|
"time": t,
|
||||||
|
"depth": event.depth,
|
||||||
|
}
|
||||||
|
|
||||||
|
node = pydot.Node(
|
||||||
|
name=event.event_id,
|
||||||
|
label=label,
|
||||||
|
)
|
||||||
|
|
||||||
|
node_map[event.event_id] = node
|
||||||
|
graph.add_node(node)
|
||||||
|
|
||||||
|
print "Created Nodes"
|
||||||
|
|
||||||
|
for event in events:
|
||||||
|
for prev_id, _ in event.prev_events:
|
||||||
|
try:
|
||||||
|
end_node = node_map[prev_id]
|
||||||
|
except:
|
||||||
|
end_node = pydot.Node(
|
||||||
|
name=prev_id,
|
||||||
|
label="<<b>%s</b>>" % (prev_id,),
|
||||||
|
)
|
||||||
|
|
||||||
|
node_map[prev_id] = end_node
|
||||||
|
graph.add_node(end_node)
|
||||||
|
|
||||||
|
edge = pydot.Edge(node_map[event.event_id], end_node)
|
||||||
|
graph.add_edge(edge)
|
||||||
|
|
||||||
|
print "Created edges"
|
||||||
|
|
||||||
|
graph.write('%s.dot' % file_prefix, format='raw', prog='dot')
|
||||||
|
|
||||||
|
print "Created Dot"
|
||||||
|
|
||||||
|
graph.write_svg("%s.svg" % file_prefix, prog='dot')
|
||||||
|
|
||||||
|
print "Created svg"
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Generate a PDU graph for a given room by reading "
|
||||||
|
"from a file with line deliminated events. \n"
|
||||||
|
"Requires pydot."
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-p", "--prefix", dest="prefix",
|
||||||
|
help="String to prefix output files with",
|
||||||
|
default="graph_output"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-l", "--limit",
|
||||||
|
help="Only retrieve the last N events.",
|
||||||
|
)
|
||||||
|
parser.add_argument('event_file')
|
||||||
|
parser.add_argument('room')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
make_graph(args.event_file, args.room, args.prefix, args.limit)
|
||||||
@@ -9,6 +9,7 @@ Description=Synapse Matrix homeserver
|
|||||||
Type=simple
|
Type=simple
|
||||||
User=synapse
|
User=synapse
|
||||||
Group=synapse
|
Group=synapse
|
||||||
|
EnvironmentFile=-/etc/sysconfig/synapse
|
||||||
WorkingDirectory=/var/lib/synapse
|
WorkingDirectory=/var/lib/synapse
|
||||||
ExecStart=/usr/bin/python2.7 -m synapse.app.homeserver --config-path=/etc/synapse/homeserver.yaml --log-config=/etc/synapse/log_config.yaml
|
ExecStart=/usr/bin/python2.7 -m synapse.app.homeserver --config-path=/etc/synapse/homeserver.yaml --log-config=/etc/synapse/log_config.yaml
|
||||||
|
|
||||||
|
|||||||
@@ -38,6 +38,13 @@ for port in 8080 8081 8082; do
|
|||||||
|
|
||||||
perl -p -i -e 's/^enable_registration:.*/enable_registration: true/g' $DIR/etc/$port.config
|
perl -p -i -e 's/^enable_registration:.*/enable_registration: true/g' $DIR/etc/$port.config
|
||||||
|
|
||||||
|
if ! grep -F "full_twisted_stacktraces" -q $DIR/etc/$port.config; then
|
||||||
|
echo "full_twisted_stacktraces: true" >> $DIR/etc/$port.config
|
||||||
|
fi
|
||||||
|
if ! grep -F "report_stats" -q $DIR/etc/$port.config ; then
|
||||||
|
echo "report_stats: false" >> $DIR/etc/$port.config
|
||||||
|
fi
|
||||||
|
|
||||||
python -m synapse.app.homeserver \
|
python -m synapse.app.homeserver \
|
||||||
--config-path "$DIR/etc/$port.config" \
|
--config-path "$DIR/etc/$port.config" \
|
||||||
-D \
|
-D \
|
||||||
|
|||||||
@@ -32,5 +32,4 @@ The format of the AS configuration file is as follows:
|
|||||||
|
|
||||||
See the spec_ for further details on how application services work.
|
See the spec_ for further details on how application services work.
|
||||||
|
|
||||||
.. _spec: https://github.com/matrix-org/matrix-doc/blob/master/specification/25_application_service_api.rst#application-service-api
|
.. _spec: https://matrix.org/docs/spec/application_service/unstable.html
|
||||||
|
|
||||||
|
|||||||
@@ -43,7 +43,10 @@ Basically, PEP8
|
|||||||
together, or want to deliberately extend or preserve vertical/horizontal
|
together, or want to deliberately extend or preserve vertical/horizontal
|
||||||
space)
|
space)
|
||||||
|
|
||||||
Comments should follow the google code style. This is so that we can generate
|
Comments should follow the `google code style <http://google.github.io/styleguide/pyguide.html?showone=Comments#Comments>`_.
|
||||||
documentation with sphinx (http://sphinxcontrib-napoleon.readthedocs.org/en/latest/)
|
This is so that we can generate documentation with
|
||||||
|
`sphinx <http://sphinxcontrib-napoleon.readthedocs.org/en/latest/>`_. See the
|
||||||
|
`examples <http://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html>`_
|
||||||
|
in the sphinx documentation.
|
||||||
|
|
||||||
Code should pass pep8 --max-line-length=100 without any warnings.
|
Code should pass pep8 --max-line-length=100 without any warnings.
|
||||||
|
|||||||
10
docs/log_contexts.rst
Normal file
10
docs/log_contexts.rst
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
What do I do about "Unexpected logging context" debug log-lines everywhere?
|
||||||
|
|
||||||
|
<Mjark> The logging context lives in thread local storage
|
||||||
|
<Mjark> Sometimes it gets out of sync with what it should actually be, usually because something scheduled something to run on the reactor without preserving the logging context.
|
||||||
|
<Matthew> what is the impact of it getting out of sync? and how and when should we preserve log context?
|
||||||
|
<Mjark> The impact is that some of the CPU and database metrics will be under-reported, and some log lines will be mis-attributed.
|
||||||
|
<Mjark> It should happen auto-magically in all the APIs that do IO or otherwise defer to the reactor.
|
||||||
|
<Erik> Mjark: the other place is if we branch, e.g. using defer.gatherResults
|
||||||
|
|
||||||
|
Unanswered: how and when should we preserve log context?
|
||||||
@@ -18,8 +18,8 @@ encoding use, e.g.::
|
|||||||
This would create an appropriate database named ``synapse`` owned by the
|
This would create an appropriate database named ``synapse`` owned by the
|
||||||
``synapse_user`` user (which must already exist).
|
``synapse_user`` user (which must already exist).
|
||||||
|
|
||||||
Set up client
|
Set up client in Debian/Ubuntu
|
||||||
=============
|
===========================
|
||||||
|
|
||||||
Postgres support depends on the postgres python connector ``psycopg2``. In the
|
Postgres support depends on the postgres python connector ``psycopg2``. In the
|
||||||
virtual env::
|
virtual env::
|
||||||
@@ -27,6 +27,19 @@ virtual env::
|
|||||||
sudo apt-get install libpq-dev
|
sudo apt-get install libpq-dev
|
||||||
pip install psycopg2
|
pip install psycopg2
|
||||||
|
|
||||||
|
Set up client in RHEL/CentOs 7
|
||||||
|
==============================
|
||||||
|
|
||||||
|
Make sure you have the appropriate version of postgres-devel installed. For a
|
||||||
|
postgres 9.4, use the postgres 9.4 packages from
|
||||||
|
[here](https://wiki.postgresql.org/wiki/YUM_Installation).
|
||||||
|
|
||||||
|
As with Debian/Ubuntu, postgres support depends on the postgres python connector
|
||||||
|
``psycopg2``. In the virtual env::
|
||||||
|
|
||||||
|
sudo yum install postgresql-devel libpqxx-devel.x86_64
|
||||||
|
export PATH=/usr/pgsql-9.4/bin/:$PATH
|
||||||
|
pip install psycopg2
|
||||||
|
|
||||||
Synapse config
|
Synapse config
|
||||||
==============
|
==============
|
||||||
|
|||||||
58
docs/replication.rst
Normal file
58
docs/replication.rst
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
Replication Architecture
|
||||||
|
========================
|
||||||
|
|
||||||
|
Motivation
|
||||||
|
----------
|
||||||
|
|
||||||
|
We'd like to be able to split some of the work that synapse does into multiple
|
||||||
|
python processes. In theory multiple synapse processes could share a single
|
||||||
|
postgresql database and we'd scale up by running more synapse processes.
|
||||||
|
However much of synapse assumes that only one process is interacting with the
|
||||||
|
database, both for assigning unique identifiers when inserting into tables,
|
||||||
|
notifying components about new updates, and for invalidating its caches.
|
||||||
|
|
||||||
|
So running multiple copies of the current code isn't an option. One way to
|
||||||
|
run multiple processes would be to have a single writer process and multiple
|
||||||
|
reader processes connected to the same database. In order to do this we'd need
|
||||||
|
a way for the reader process to invalidate its in-memory caches when an update
|
||||||
|
happens on the writer. One way to do this is for the writer to present an
|
||||||
|
append-only log of updates which the readers can consume to invalidate their
|
||||||
|
caches and to push updates to listening clients or pushers.
|
||||||
|
|
||||||
|
Synapse already stores much of its data as an append-only log so that it can
|
||||||
|
correctly respond to /sync requests so the amount of code changes needed to
|
||||||
|
expose the append-only log to the readers should be fairly minimal.
|
||||||
|
|
||||||
|
Architecture
|
||||||
|
------------
|
||||||
|
|
||||||
|
The Replication API
|
||||||
|
~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Synapse will optionally expose a long poll HTTP API for extracting updates. The
|
||||||
|
API will have a similar shape to /sync in that clients provide tokens
|
||||||
|
indicating where in the log they have reached and a timeout. The synapse server
|
||||||
|
then either responds with updates immediately if it already has updates or it
|
||||||
|
waits until the timeout for more updates. If the timeout expires and nothing
|
||||||
|
happened then the server returns an empty response.
|
||||||
|
|
||||||
|
However unlike the /sync API this replication API is returning synapse specific
|
||||||
|
data rather than trying to implement a matrix specification. The replication
|
||||||
|
results are returned as arrays of rows where the rows are mostly lifted
|
||||||
|
directly from the database. This avoids unnecessary JSON parsing on the server
|
||||||
|
and hopefully avoids an impedance mismatch between the data returned and the
|
||||||
|
required updates to the datastore.
|
||||||
|
|
||||||
|
This does not replicate all the database tables as many of the database tables
|
||||||
|
are indexes that can be recovered from the contents of other tables.
|
||||||
|
|
||||||
|
The format and parameters for the api are documented in
|
||||||
|
``synapse/replication/resource.py``.
|
||||||
|
|
||||||
|
|
||||||
|
The Slaved DataStore
|
||||||
|
~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
There are read-only version of the synapse storage layer in
|
||||||
|
``synapse/replication/slave/storage`` that use the response of the replication
|
||||||
|
API to invalidate their caches.
|
||||||
@@ -9,31 +9,35 @@ the Home Server to generate credentials that are valid for use on the TURN
|
|||||||
server through the use of a secret shared between the Home Server and the
|
server through the use of a secret shared between the Home Server and the
|
||||||
TURN server.
|
TURN server.
|
||||||
|
|
||||||
This document described how to install coturn
|
This document describes how to install coturn
|
||||||
(https://code.google.com/p/coturn/) which also supports the TURN REST API,
|
(https://github.com/coturn/coturn) which also supports the TURN REST API,
|
||||||
and integrate it with synapse.
|
and integrate it with synapse.
|
||||||
|
|
||||||
coturn Setup
|
coturn Setup
|
||||||
============
|
============
|
||||||
|
|
||||||
|
You may be able to setup coturn via your package manager, or set it up manually using the usual ``configure, make, make install`` process.
|
||||||
|
|
||||||
1. Check out coturn::
|
1. Check out coturn::
|
||||||
svn checkout http://coturn.googlecode.com/svn/trunk/ coturn
|
|
||||||
|
git clone https://github.com/coturn/coturn.git coturn
|
||||||
cd coturn
|
cd coturn
|
||||||
|
|
||||||
2. Configure it::
|
2. Configure it::
|
||||||
|
|
||||||
./configure
|
./configure
|
||||||
|
|
||||||
You may need to install libevent2: if so, you should do so
|
You may need to install ``libevent2``: if so, you should do so
|
||||||
in the way recommended by your operating system.
|
in the way recommended by your operating system.
|
||||||
You can ignore warnings about lack of database support: a
|
You can ignore warnings about lack of database support: a
|
||||||
database is unnecessary for this purpose.
|
database is unnecessary for this purpose.
|
||||||
|
|
||||||
3. Build and install it::
|
3. Build and install it::
|
||||||
|
|
||||||
make
|
make
|
||||||
make install
|
make install
|
||||||
|
|
||||||
4. Make a config file in /etc/turnserver.conf. You can customise
|
4. Create or edit the config file in ``/etc/turnserver.conf``. The relevant
|
||||||
a config file from turnserver.conf.default. The relevant
|
|
||||||
lines, with example values, are::
|
lines, with example values, are::
|
||||||
|
|
||||||
lt-cred-mech
|
lt-cred-mech
|
||||||
@@ -41,7 +45,7 @@ coturn Setup
|
|||||||
static-auth-secret=[your secret key here]
|
static-auth-secret=[your secret key here]
|
||||||
realm=turn.myserver.org
|
realm=turn.myserver.org
|
||||||
|
|
||||||
See turnserver.conf.default for explanations of the options.
|
See turnserver.conf for explanations of the options.
|
||||||
One way to generate the static-auth-secret is with pwgen::
|
One way to generate the static-auth-secret is with pwgen::
|
||||||
|
|
||||||
pwgen -s 64 1
|
pwgen -s 64 1
|
||||||
@@ -54,6 +58,7 @@ coturn Setup
|
|||||||
import your private key and certificate.
|
import your private key and certificate.
|
||||||
|
|
||||||
7. Start the turn server::
|
7. Start the turn server::
|
||||||
|
|
||||||
bin/turnserver -o
|
bin/turnserver -o
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
74
docs/url_previews.rst
Normal file
74
docs/url_previews.rst
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
URL Previews
|
||||||
|
============
|
||||||
|
|
||||||
|
Design notes on a URL previewing service for Matrix:
|
||||||
|
|
||||||
|
Options are:
|
||||||
|
|
||||||
|
1. Have an AS which listens for URLs, downloads them, and inserts an event that describes their metadata.
|
||||||
|
* Pros:
|
||||||
|
* Decouples the implementation entirely from Synapse.
|
||||||
|
* Uses existing Matrix events & content repo to store the metadata.
|
||||||
|
* Cons:
|
||||||
|
* Which AS should provide this service for a room, and why should you trust it?
|
||||||
|
* Doesn't work well with E2E; you'd have to cut the AS into every room
|
||||||
|
* the AS would end up subscribing to every room anyway.
|
||||||
|
|
||||||
|
2. Have a generic preview API (nothing to do with Matrix) that provides a previewing service:
|
||||||
|
* Pros:
|
||||||
|
* Simple and flexible; can be used by any clients at any point
|
||||||
|
* Cons:
|
||||||
|
* If each HS provides one of these independently, all the HSes in a room may needlessly DoS the target URI
|
||||||
|
* We need somewhere to store the URL metadata rather than just using Matrix itself
|
||||||
|
* We can't piggyback on matrix to distribute the metadata between HSes.
|
||||||
|
|
||||||
|
3. Make the synapse of the sending user responsible for spidering the URL and inserting an event asynchronously which describes the metadata.
|
||||||
|
* Pros:
|
||||||
|
* Works transparently for all clients
|
||||||
|
* Piggy-backs nicely on using Matrix for distributing the metadata.
|
||||||
|
* No confusion as to which AS
|
||||||
|
* Cons:
|
||||||
|
* Doesn't work with E2E
|
||||||
|
* We might want to decouple the implementation of the spider from the HS, given spider behaviour can be quite complicated and evolve much more rapidly than the HS. It's more like a bot than a core part of the server.
|
||||||
|
|
||||||
|
4. Make the sending client use the preview API and insert the event itself when successful.
|
||||||
|
* Pros:
|
||||||
|
* Works well with E2E
|
||||||
|
* No custom server functionality
|
||||||
|
* Lets the client customise the preview that they send (like on FB)
|
||||||
|
* Cons:
|
||||||
|
* Entirely specific to the sending client, whereas it'd be nice if /any/ URL was correctly previewed if clients support it.
|
||||||
|
|
||||||
|
5. Have the option of specifying a shared (centralised) previewing service used by a room, to avoid all the different HSes in the room DoSing the target.
|
||||||
|
|
||||||
|
Best solution is probably a combination of both 2 and 4.
|
||||||
|
* Sending clients do their best to create and send a preview at the point of sending the message, perhaps delaying the message until the preview is computed? (This also lets the user validate the preview before sending)
|
||||||
|
* Receiving clients have the option of going and creating their own preview if one doesn't arrive soon enough (or if the original sender didn't create one)
|
||||||
|
|
||||||
|
This is a bit magical though in that the preview could come from two entirely different sources - the sending HS or your local one. However, this can always be exposed to users: "Generate your own URL previews if none are available?"
|
||||||
|
|
||||||
|
This is tantamount also to senders calculating their own thumbnails for sending in advance of the main content - we are trusting the sender not to lie about the content in the thumbnail. Whereas currently thumbnails are calculated by the receiving homeserver to avoid this attack.
|
||||||
|
|
||||||
|
However, this kind of phishing attack does exist whether we let senders pick their thumbnails or not, in that a malicious sender can send normal text messages around the attachment claiming it to be legitimate. We could rely on (future) reputation/abuse management to punish users who phish (be it with bogus metadata or bogus descriptions). Bogus metadata is particularly bad though, especially if it's avoidable.
|
||||||
|
|
||||||
|
As a first cut, let's do #2 and have the receiver hit the API to calculate its own previews (as it does currently for image thumbnails). We can then extend/optimise this to option 4 as a special extra if needed.
|
||||||
|
|
||||||
|
API
|
||||||
|
---
|
||||||
|
|
||||||
|
GET /_matrix/media/r0/preview_url?url=http://wherever.com
|
||||||
|
200 OK
|
||||||
|
{
|
||||||
|
"og:type" : "article"
|
||||||
|
"og:url" : "https://twitter.com/matrixdotorg/status/684074366691356672"
|
||||||
|
"og:title" : "Matrix on Twitter"
|
||||||
|
"og:image" : "https://pbs.twimg.com/profile_images/500400952029888512/yI0qtFi7_400x400.png"
|
||||||
|
"og:description" : "“Synapse 0.12 is out! Lots of polishing, performance &amp; bugfixes: /sync API, /r0 prefix, fulltext search, 3PID invites https://t.co/5alhXLLEGP”"
|
||||||
|
"og:site_name" : "Twitter"
|
||||||
|
}
|
||||||
|
|
||||||
|
* Downloads the URL
|
||||||
|
* If HTML, just stores it in RAM and parses it for OG meta tags
|
||||||
|
* Download any media OG meta tags to the media repo, and refer to them in the OG via mxc:// URIs.
|
||||||
|
* If a media filetype we know we can thumbnail: store it on disk, and hand it to the thumbnailer. Generate OG meta tags from the thumbnailer contents.
|
||||||
|
* Otherwise, don't bother downloading further.
|
||||||
87
jenkins-dendron-postgres.sh
Executable file
87
jenkins-dendron-postgres.sh
Executable file
@@ -0,0 +1,87 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -eux
|
||||||
|
|
||||||
|
: ${WORKSPACE:="$(pwd)"}
|
||||||
|
|
||||||
|
export PYTHONDONTWRITEBYTECODE=yep
|
||||||
|
export SYNAPSE_CACHE_FACTOR=1
|
||||||
|
|
||||||
|
# Output test results as junit xml
|
||||||
|
export TRIAL_FLAGS="--reporter=subunit"
|
||||||
|
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
|
||||||
|
# Write coverage reports to a separate file for each process
|
||||||
|
export COVERAGE_OPTS="-p"
|
||||||
|
export DUMP_COVERAGE_COMMAND="coverage help"
|
||||||
|
|
||||||
|
# Output flake8 violations to violations.flake8.log
|
||||||
|
# Don't exit with non-0 status code on Jenkins,
|
||||||
|
# so that the build steps continue and a later step can decided whether to
|
||||||
|
# UNSTABLE or FAILURE this build.
|
||||||
|
export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
|
||||||
|
|
||||||
|
rm .coverage* || echo "No coverage files to remove"
|
||||||
|
|
||||||
|
tox --notest -e py27
|
||||||
|
|
||||||
|
TOX_BIN=$WORKSPACE/.tox/py27/bin
|
||||||
|
python synapse/python_dependencies.py | xargs -n1 $TOX_BIN/pip install
|
||||||
|
$TOX_BIN/pip install psycopg2
|
||||||
|
$TOX_BIN/pip install lxml
|
||||||
|
|
||||||
|
: ${GIT_BRANCH:="origin/$(git rev-parse --abbrev-ref HEAD)"}
|
||||||
|
|
||||||
|
if [[ ! -e .dendron-base ]]; then
|
||||||
|
git clone https://github.com/matrix-org/dendron.git .dendron-base --mirror
|
||||||
|
else
|
||||||
|
(cd .dendron-base; git fetch -p)
|
||||||
|
fi
|
||||||
|
|
||||||
|
rm -rf dendron
|
||||||
|
git clone .dendron-base dendron --shared
|
||||||
|
cd dendron
|
||||||
|
|
||||||
|
: ${GOPATH:=${WORKSPACE}/.gopath}
|
||||||
|
if [[ "${GOPATH}" != *:* ]]; then
|
||||||
|
mkdir -p "${GOPATH}"
|
||||||
|
export PATH="${GOPATH}/bin:${PATH}"
|
||||||
|
fi
|
||||||
|
export GOPATH
|
||||||
|
|
||||||
|
git checkout "${GIT_BRANCH}" || (echo >&2 "No ref ${GIT_BRANCH} found, falling back to develop" ; git checkout develop)
|
||||||
|
|
||||||
|
go get github.com/constabulary/gb/...
|
||||||
|
gb generate
|
||||||
|
gb build
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
|
||||||
|
|
||||||
|
if [[ ! -e .sytest-base ]]; then
|
||||||
|
git clone https://github.com/matrix-org/sytest.git .sytest-base --mirror
|
||||||
|
else
|
||||||
|
(cd .sytest-base; git fetch -p)
|
||||||
|
fi
|
||||||
|
|
||||||
|
rm -rf sytest
|
||||||
|
git clone .sytest-base sytest --shared
|
||||||
|
cd sytest
|
||||||
|
|
||||||
|
git checkout "${GIT_BRANCH}" || (echo >&2 "No ref ${GIT_BRANCH} found, falling back to develop" ; git checkout develop)
|
||||||
|
|
||||||
|
: ${PORT_BASE:=8000}
|
||||||
|
: ${PORT_COUNT=20}
|
||||||
|
|
||||||
|
./jenkins/prep_sytest_for_postgres.sh
|
||||||
|
|
||||||
|
mkdir -p var
|
||||||
|
|
||||||
|
echo >&2 "Running sytest with PostgreSQL";
|
||||||
|
./jenkins/install_and_run.sh --python $TOX_BIN/python \
|
||||||
|
--synapse-directory $WORKSPACE \
|
||||||
|
--dendron $WORKSPACE/dendron/bin/dendron \
|
||||||
|
--pusher \
|
||||||
|
--synchrotron \
|
||||||
|
--port-range ${PORT_BASE}:$((PORT_BASE+PORT_COUNT-1))
|
||||||
|
|
||||||
|
cd ..
|
||||||
22
jenkins-flake8.sh
Executable file
22
jenkins-flake8.sh
Executable file
@@ -0,0 +1,22 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -eux
|
||||||
|
|
||||||
|
: ${WORKSPACE:="$(pwd)"}
|
||||||
|
|
||||||
|
export PYTHONDONTWRITEBYTECODE=yep
|
||||||
|
export SYNAPSE_CACHE_FACTOR=1
|
||||||
|
|
||||||
|
# Output test results as junit xml
|
||||||
|
export TRIAL_FLAGS="--reporter=subunit"
|
||||||
|
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
|
||||||
|
# Write coverage reports to a separate file for each process
|
||||||
|
export COVERAGE_OPTS="-p"
|
||||||
|
export DUMP_COVERAGE_COMMAND="coverage help"
|
||||||
|
|
||||||
|
# Output flake8 violations to violations.flake8.log
|
||||||
|
export PEP8SUFFIX="--output-file=violations.flake8.log"
|
||||||
|
|
||||||
|
rm .coverage* || echo "No coverage files to remove"
|
||||||
|
|
||||||
|
tox -e packaging -e pep8
|
||||||
64
jenkins-postgres.sh
Executable file
64
jenkins-postgres.sh
Executable file
@@ -0,0 +1,64 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -eux
|
||||||
|
|
||||||
|
: ${WORKSPACE:="$(pwd)"}
|
||||||
|
|
||||||
|
export PYTHONDONTWRITEBYTECODE=yep
|
||||||
|
export SYNAPSE_CACHE_FACTOR=1
|
||||||
|
|
||||||
|
# Output test results as junit xml
|
||||||
|
export TRIAL_FLAGS="--reporter=subunit"
|
||||||
|
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
|
||||||
|
# Write coverage reports to a separate file for each process
|
||||||
|
export COVERAGE_OPTS="-p"
|
||||||
|
export DUMP_COVERAGE_COMMAND="coverage help"
|
||||||
|
|
||||||
|
# Output flake8 violations to violations.flake8.log
|
||||||
|
# Don't exit with non-0 status code on Jenkins,
|
||||||
|
# so that the build steps continue and a later step can decided whether to
|
||||||
|
# UNSTABLE or FAILURE this build.
|
||||||
|
export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
|
||||||
|
|
||||||
|
rm .coverage* || echo "No coverage files to remove"
|
||||||
|
|
||||||
|
tox --notest -e py27
|
||||||
|
|
||||||
|
TOX_BIN=$WORKSPACE/.tox/py27/bin
|
||||||
|
python synapse/python_dependencies.py | xargs -n1 $TOX_BIN/pip install
|
||||||
|
$TOX_BIN/pip install psycopg2
|
||||||
|
$TOX_BIN/pip install lxml
|
||||||
|
|
||||||
|
: ${GIT_BRANCH:="origin/$(git rev-parse --abbrev-ref HEAD)"}
|
||||||
|
|
||||||
|
if [[ ! -e .sytest-base ]]; then
|
||||||
|
git clone https://github.com/matrix-org/sytest.git .sytest-base --mirror
|
||||||
|
else
|
||||||
|
(cd .sytest-base; git fetch -p)
|
||||||
|
fi
|
||||||
|
|
||||||
|
rm -rf sytest
|
||||||
|
git clone .sytest-base sytest --shared
|
||||||
|
cd sytest
|
||||||
|
|
||||||
|
git checkout "${GIT_BRANCH}" || (echo >&2 "No ref ${GIT_BRANCH} found, falling back to develop" ; git checkout develop)
|
||||||
|
|
||||||
|
: ${PORT_BASE:=8000}
|
||||||
|
: ${PORT_COUNT=20}
|
||||||
|
|
||||||
|
./jenkins/prep_sytest_for_postgres.sh
|
||||||
|
|
||||||
|
echo >&2 "Running sytest with PostgreSQL";
|
||||||
|
./jenkins/install_and_run.sh --coverage \
|
||||||
|
--python $TOX_BIN/python \
|
||||||
|
--synapse-directory $WORKSPACE \
|
||||||
|
--port-range ${PORT_BASE}:$((PORT_BASE+PORT_COUNT-1)) \
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
cp sytest/.coverage.* .
|
||||||
|
|
||||||
|
# Combine the coverage reports
|
||||||
|
echo "Combining:" .coverage.*
|
||||||
|
$TOX_BIN/python -m coverage combine
|
||||||
|
# Output coverage to coverage.xml
|
||||||
|
$TOX_BIN/coverage xml -o coverage.xml
|
||||||
58
jenkins-sqlite.sh
Executable file
58
jenkins-sqlite.sh
Executable file
@@ -0,0 +1,58 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -eux
|
||||||
|
|
||||||
|
: ${WORKSPACE:="$(pwd)"}
|
||||||
|
|
||||||
|
export PYTHONDONTWRITEBYTECODE=yep
|
||||||
|
export SYNAPSE_CACHE_FACTOR=1
|
||||||
|
|
||||||
|
# Output test results as junit xml
|
||||||
|
export TRIAL_FLAGS="--reporter=subunit"
|
||||||
|
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
|
||||||
|
# Write coverage reports to a separate file for each process
|
||||||
|
export COVERAGE_OPTS="-p"
|
||||||
|
export DUMP_COVERAGE_COMMAND="coverage help"
|
||||||
|
|
||||||
|
# Output flake8 violations to violations.flake8.log
|
||||||
|
# Don't exit with non-0 status code on Jenkins,
|
||||||
|
# so that the build steps continue and a later step can decided whether to
|
||||||
|
# UNSTABLE or FAILURE this build.
|
||||||
|
export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
|
||||||
|
|
||||||
|
rm .coverage* || echo "No coverage files to remove"
|
||||||
|
|
||||||
|
tox --notest -e py27
|
||||||
|
TOX_BIN=$WORKSPACE/.tox/py27/bin
|
||||||
|
python synapse/python_dependencies.py | xargs -n1 $TOX_BIN/pip install
|
||||||
|
$TOX_BIN/pip install lxml
|
||||||
|
|
||||||
|
: ${GIT_BRANCH:="origin/$(git rev-parse --abbrev-ref HEAD)"}
|
||||||
|
|
||||||
|
if [[ ! -e .sytest-base ]]; then
|
||||||
|
git clone https://github.com/matrix-org/sytest.git .sytest-base --mirror
|
||||||
|
else
|
||||||
|
(cd .sytest-base; git fetch -p)
|
||||||
|
fi
|
||||||
|
|
||||||
|
rm -rf sytest
|
||||||
|
git clone .sytest-base sytest --shared
|
||||||
|
cd sytest
|
||||||
|
|
||||||
|
git checkout "${GIT_BRANCH}" || (echo >&2 "No ref ${GIT_BRANCH} found, falling back to develop" ; git checkout develop)
|
||||||
|
|
||||||
|
: ${PORT_COUNT=20}
|
||||||
|
: ${PORT_BASE:=8000}
|
||||||
|
./jenkins/install_and_run.sh --coverage \
|
||||||
|
--python $TOX_BIN/python \
|
||||||
|
--synapse-directory $WORKSPACE \
|
||||||
|
--port-range ${PORT_BASE}:$((PORT_BASE+PORT_COUNT-1)) \
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
cp sytest/.coverage.* .
|
||||||
|
|
||||||
|
# Combine the coverage reports
|
||||||
|
echo "Combining:" .coverage.*
|
||||||
|
$TOX_BIN/python -m coverage combine
|
||||||
|
# Output coverage to coverage.xml
|
||||||
|
$TOX_BIN/coverage xml -o coverage.xml
|
||||||
25
jenkins-unittests.sh
Executable file
25
jenkins-unittests.sh
Executable file
@@ -0,0 +1,25 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -eux
|
||||||
|
|
||||||
|
: ${WORKSPACE:="$(pwd)"}
|
||||||
|
|
||||||
|
export PYTHONDONTWRITEBYTECODE=yep
|
||||||
|
export SYNAPSE_CACHE_FACTOR=1
|
||||||
|
|
||||||
|
# Output test results as junit xml
|
||||||
|
export TRIAL_FLAGS="--reporter=subunit"
|
||||||
|
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
|
||||||
|
# Write coverage reports to a separate file for each process
|
||||||
|
export COVERAGE_OPTS="-p"
|
||||||
|
export DUMP_COVERAGE_COMMAND="coverage help"
|
||||||
|
|
||||||
|
# Output flake8 violations to violations.flake8.log
|
||||||
|
# Don't exit with non-0 status code on Jenkins,
|
||||||
|
# so that the build steps continue and a later step can decided whether to
|
||||||
|
# UNSTABLE or FAILURE this build.
|
||||||
|
export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
|
||||||
|
|
||||||
|
rm .coverage* || echo "No coverage files to remove"
|
||||||
|
|
||||||
|
tox -e py27
|
||||||
7
res/templates/mail-Vector.css
Normal file
7
res/templates/mail-Vector.css
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
.header {
|
||||||
|
border-bottom: 4px solid #e4f7ed ! important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.notif_link a, .footer a {
|
||||||
|
color: #76CFA6 ! important;
|
||||||
|
}
|
||||||
156
res/templates/mail.css
Normal file
156
res/templates/mail.css
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
body {
|
||||||
|
margin: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre, code {
|
||||||
|
word-break: break-word;
|
||||||
|
white-space: pre-wrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
#page {
|
||||||
|
font-family: 'Open Sans', Helvetica, Arial, Sans-Serif;
|
||||||
|
font-color: #454545;
|
||||||
|
font-size: 12pt;
|
||||||
|
width: 100%;
|
||||||
|
padding: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
#inner {
|
||||||
|
width: 640px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header {
|
||||||
|
width: 100%;
|
||||||
|
height: 87px;
|
||||||
|
color: #454545;
|
||||||
|
border-bottom: 4px solid #e5e5e5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.logo {
|
||||||
|
text-align: right;
|
||||||
|
margin-left: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.salutation {
|
||||||
|
padding-top: 10px;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
.summarytext {
|
||||||
|
}
|
||||||
|
|
||||||
|
.room {
|
||||||
|
width: 100%;
|
||||||
|
color: #454545;
|
||||||
|
border-bottom: 1px solid #e5e5e5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.room_header td {
|
||||||
|
padding-top: 38px;
|
||||||
|
padding-bottom: 10px;
|
||||||
|
border-bottom: 1px solid #e5e5e5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.room_name {
|
||||||
|
vertical-align: middle;
|
||||||
|
font-size: 18px;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
.room_header h2 {
|
||||||
|
margin-top: 0px;
|
||||||
|
margin-left: 75px;
|
||||||
|
font-size: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.room_avatar {
|
||||||
|
width: 56px;
|
||||||
|
line-height: 0px;
|
||||||
|
text-align: center;
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
|
||||||
|
.room_avatar img {
|
||||||
|
width: 48px;
|
||||||
|
height: 48px;
|
||||||
|
object-fit: cover;
|
||||||
|
border-radius: 24px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.notif {
|
||||||
|
border-bottom: 1px solid #e5e5e5;
|
||||||
|
margin-top: 16px;
|
||||||
|
padding-bottom: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.historical_message .sender_avatar {
|
||||||
|
opacity: 0.3;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* spell out opacity and historical_message class names for Outlook aka Word */
|
||||||
|
.historical_message .sender_name {
|
||||||
|
color: #e3e3e3;
|
||||||
|
}
|
||||||
|
|
||||||
|
.historical_message .message_time {
|
||||||
|
color: #e3e3e3;
|
||||||
|
}
|
||||||
|
|
||||||
|
.historical_message .message_body {
|
||||||
|
color: #c7c7c7;
|
||||||
|
}
|
||||||
|
|
||||||
|
.historical_message td,
|
||||||
|
.message td {
|
||||||
|
padding-top: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sender_avatar {
|
||||||
|
width: 56px;
|
||||||
|
text-align: center;
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sender_avatar img {
|
||||||
|
margin-top: -2px;
|
||||||
|
width: 32px;
|
||||||
|
height: 32px;
|
||||||
|
border-radius: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sender_name {
|
||||||
|
display: inline;
|
||||||
|
font-size: 13px;
|
||||||
|
color: #a2a2a2;
|
||||||
|
}
|
||||||
|
|
||||||
|
.message_time {
|
||||||
|
text-align: right;
|
||||||
|
width: 100px;
|
||||||
|
font-size: 11px;
|
||||||
|
color: #a2a2a2;
|
||||||
|
}
|
||||||
|
|
||||||
|
.message_body {
|
||||||
|
}
|
||||||
|
|
||||||
|
.notif_link td {
|
||||||
|
padding-top: 10px;
|
||||||
|
padding-bottom: 10px;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
.notif_link a, .footer a {
|
||||||
|
color: #454545;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.debug {
|
||||||
|
font-size: 10px;
|
||||||
|
color: #888;
|
||||||
|
}
|
||||||
|
|
||||||
|
.footer {
|
||||||
|
margin-top: 20px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
45
res/templates/notif.html
Normal file
45
res/templates/notif.html
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
{% for message in notif.messages %}
|
||||||
|
<tr class="{{ "historical_message" if message.is_historical else "message" }}">
|
||||||
|
<td class="sender_avatar">
|
||||||
|
{% if loop.index0 == 0 or notif.messages[loop.index0 - 1].sender_name != notif.messages[loop.index0].sender_name %}
|
||||||
|
{% if message.sender_avatar_url %}
|
||||||
|
<img alt="" class="sender_avatar" src="{{ message.sender_avatar_url|mxc_to_http(32,32) }}" />
|
||||||
|
{% else %}
|
||||||
|
{% if message.sender_hash % 3 == 0 %}
|
||||||
|
<img class="sender_avatar" src="https://vector.im/beta/img/76cfa6.png" />
|
||||||
|
{% elif message.sender_hash % 3 == 1 %}
|
||||||
|
<img class="sender_avatar" src="https://vector.im/beta/img/50e2c2.png" />
|
||||||
|
{% else %}
|
||||||
|
<img class="sender_avatar" src="https://vector.im/beta/img/f4c371.png" />
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
|
<td class="message_contents">
|
||||||
|
{% if loop.index0 == 0 or notif.messages[loop.index0 - 1].sender_name != notif.messages[loop.index0].sender_name %}
|
||||||
|
<div class="sender_name">{% if message.msgtype == "m.emote" %}*{% endif %} {{ message.sender_name }}</div>
|
||||||
|
{% endif %}
|
||||||
|
<div class="message_body">
|
||||||
|
{% if message.msgtype == "m.text" %}
|
||||||
|
{{ message.body_text_html }}
|
||||||
|
{% elif message.msgtype == "m.emote" %}
|
||||||
|
{{ message.body_text_html }}
|
||||||
|
{% elif message.msgtype == "m.notice" %}
|
||||||
|
{{ message.body_text_html }}
|
||||||
|
{% elif message.msgtype == "m.image" %}
|
||||||
|
<img src="{{ message.image_url|mxc_to_http(640, 480, scale) }}" />
|
||||||
|
{% elif message.msgtype == "m.file" %}
|
||||||
|
<span class="filename">{{ message.body_text_plain }}</span>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
<td class="message_time">{{ message.ts|format_ts("%H:%M") }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
<tr class="notif_link">
|
||||||
|
<td></td>
|
||||||
|
<td>
|
||||||
|
<a href="{{ notif.link }}">View {{ room.title }}</a>
|
||||||
|
</td>
|
||||||
|
<td></td>
|
||||||
|
</tr>
|
||||||
16
res/templates/notif.txt
Normal file
16
res/templates/notif.txt
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{% for message in notif.messages %}
|
||||||
|
{% if message.msgtype == "m.emote" %}* {% endif %}{{ message.sender_name }} ({{ message.ts|format_ts("%H:%M") }})
|
||||||
|
{% if message.msgtype == "m.text" %}
|
||||||
|
{{ message.body_text_plain }}
|
||||||
|
{% elif message.msgtype == "m.emote" %}
|
||||||
|
{{ message.body_text_plain }}
|
||||||
|
{% elif message.msgtype == "m.notice" %}
|
||||||
|
{{ message.body_text_plain }}
|
||||||
|
{% elif message.msgtype == "m.image" %}
|
||||||
|
{{ message.body_text_plain }}
|
||||||
|
{% elif message.msgtype == "m.file" %}
|
||||||
|
{{ message.body_text_plain }}
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
View {{ room.title }} at {{ notif.link }}
|
||||||
53
res/templates/notif_mail.html
Normal file
53
res/templates/notif_mail.html
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
<!doctype html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<style type="text/css">
|
||||||
|
{% include 'mail.css' without context %}
|
||||||
|
{% include "mail-%s.css" % app_name ignore missing without context %}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<table id="page">
|
||||||
|
<tr>
|
||||||
|
<td> </td>
|
||||||
|
<td id="inner">
|
||||||
|
<table class="header">
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
<div class="salutation">Hi {{ user_display_name }},</div>
|
||||||
|
<div class="summarytext">{{ summary_text }}</div>
|
||||||
|
</td>
|
||||||
|
<td class="logo">
|
||||||
|
{% if app_name == "Vector" %}
|
||||||
|
<img src="http://matrix.org/img/vector-logo-email.png" width="64" height="83" alt="[Vector]"/>
|
||||||
|
{% else %}
|
||||||
|
<img src="http://matrix.org/img/matrix-120x51.png" width="120" height="51" alt="[matrix]"/>
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
{% for room in rooms %}
|
||||||
|
{% include 'room.html' with context %}
|
||||||
|
{% endfor %}
|
||||||
|
<div class="footer">
|
||||||
|
<a href="{{ unsubscribe_link }}">Unsubscribe</a>
|
||||||
|
<br/>
|
||||||
|
<br/>
|
||||||
|
<div class="debug">
|
||||||
|
Sending email at {{ reason.now|format_ts("%c") }} due to activity in room {{ reason.room_name }} because
|
||||||
|
an event was received at {{ reason.received_at|format_ts("%c") }}
|
||||||
|
which is more than {{ "%.1f"|format(reason.delay_before_mail_ms / (60*1000)) }} ({{ reason.delay_before_mail_ms }}) mins ago,
|
||||||
|
{% if reason.last_sent_ts %}
|
||||||
|
and the last time we sent a mail for this room was {{ reason.last_sent_ts|format_ts("%c") }},
|
||||||
|
which is more than {{ "%.1f"|format(reason.throttle_ms / (60*1000)) }} (current throttle_ms) mins ago.
|
||||||
|
{% else %}
|
||||||
|
and we don't have a last time we sent a mail for this room.
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
<td> </td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
10
res/templates/notif_mail.txt
Normal file
10
res/templates/notif_mail.txt
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
Hi {{ user_display_name }},
|
||||||
|
|
||||||
|
{{ summary_text }}
|
||||||
|
|
||||||
|
{% for room in rooms %}
|
||||||
|
{% include 'room.txt' with context %}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
You can disable these notifications at {{ unsubscribe_link }}
|
||||||
|
|
||||||
33
res/templates/room.html
Normal file
33
res/templates/room.html
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
<table class="room">
|
||||||
|
<tr class="room_header">
|
||||||
|
<td class="room_avatar">
|
||||||
|
{% if room.avatar_url %}
|
||||||
|
<img alt="" src="{{ room.avatar_url|mxc_to_http(48,48) }}" />
|
||||||
|
{% else %}
|
||||||
|
{% if room.hash % 3 == 0 %}
|
||||||
|
<img alt="" src="https://vector.im/beta/img/76cfa6.png" />
|
||||||
|
{% elif room.hash % 3 == 1 %}
|
||||||
|
<img alt="" src="https://vector.im/beta/img/50e2c2.png" />
|
||||||
|
{% else %}
|
||||||
|
<img alt="" src="https://vector.im/beta/img/f4c371.png" />
|
||||||
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
|
<td class="room_name" colspan="2">
|
||||||
|
{{ room.title }}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
{% if room.invite %}
|
||||||
|
<tr>
|
||||||
|
<td></td>
|
||||||
|
<td>
|
||||||
|
<a href="{{ room.link }}">Join the conversation.</a>
|
||||||
|
</td>
|
||||||
|
<td></td>
|
||||||
|
</tr>
|
||||||
|
{% else %}
|
||||||
|
{% for notif in room.notifs %}
|
||||||
|
{% include 'notif.html' with context %}
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
|
</table>
|
||||||
9
res/templates/room.txt
Normal file
9
res/templates/room.txt
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
{{ room.title }}
|
||||||
|
|
||||||
|
{% if room.invite %}
|
||||||
|
You've been invited, join at {{ room.link }}
|
||||||
|
{% else %}
|
||||||
|
{% for notif in room.notifs %}
|
||||||
|
{% include 'notif.txt' with context %}
|
||||||
|
{% endfor %}
|
||||||
|
{% endif %}
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/perl -pi
|
#!/usr/bin/perl -pi
|
||||||
# Copyright 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -14,7 +14,7 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
$copyright = <<EOT;
|
$copyright = <<EOT;
|
||||||
/* Copyright 2015 OpenMarket Ltd
|
/* Copyright 2016 OpenMarket Ltd
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/perl -pi
|
#!/usr/bin/perl -pi
|
||||||
# Copyright 2014 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -14,7 +14,7 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
$copyright = <<EOT;
|
$copyright = <<EOT;
|
||||||
# Copyright 2015 OpenMarket Ltd
|
# Copyright 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -79,16 +79,19 @@ def defined_names(prefix, defs, names):
|
|||||||
defined_names(prefix + name + ".", funcs, names)
|
defined_names(prefix + name + ".", funcs, names)
|
||||||
|
|
||||||
|
|
||||||
def used_names(prefix, defs, names):
|
def used_names(prefix, item, defs, names):
|
||||||
for name, funcs in defs.get('def', {}).items():
|
for name, funcs in defs.get('def', {}).items():
|
||||||
used_names(prefix + name + ".", funcs, names)
|
used_names(prefix + name + ".", name, funcs, names)
|
||||||
|
|
||||||
for name, funcs in defs.get('class', {}).items():
|
for name, funcs in defs.get('class', {}).items():
|
||||||
used_names(prefix + name + ".", funcs, names)
|
used_names(prefix + name + ".", name, funcs, names)
|
||||||
|
|
||||||
|
path = prefix.rstrip('.')
|
||||||
for used in defs.get('uses', ()):
|
for used in defs.get('uses', ()):
|
||||||
if used in names:
|
if used in names:
|
||||||
names[used].setdefault('used', []).append(prefix.rstrip('.'))
|
if item:
|
||||||
|
names[item].setdefault('uses', []).append(used)
|
||||||
|
names[used].setdefault('used', {}).setdefault(item, []).append(path)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
@@ -109,6 +112,18 @@ if __name__ == '__main__':
|
|||||||
"directories", nargs='+', metavar="DIR",
|
"directories", nargs='+', metavar="DIR",
|
||||||
help="Directories to search for definitions"
|
help="Directories to search for definitions"
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--referrers", default=0, type=int,
|
||||||
|
help="Include referrers up to the given depth"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--referred", default=0, type=int,
|
||||||
|
help="Include referred down to the given depth"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--format", default="yaml",
|
||||||
|
help="Output format, one of 'yaml' or 'dot'"
|
||||||
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
definitions = {}
|
definitions = {}
|
||||||
@@ -124,7 +139,7 @@ if __name__ == '__main__':
|
|||||||
defined_names(filepath + ":", defs, names)
|
defined_names(filepath + ":", defs, names)
|
||||||
|
|
||||||
for filepath, defs in definitions.items():
|
for filepath, defs in definitions.items():
|
||||||
used_names(filepath + ":", defs, names)
|
used_names(filepath + ":", None, defs, names)
|
||||||
|
|
||||||
patterns = [re.compile(pattern) for pattern in args.pattern or ()]
|
patterns = [re.compile(pattern) for pattern in args.pattern or ()]
|
||||||
ignore = [re.compile(pattern) for pattern in args.ignore or ()]
|
ignore = [re.compile(pattern) for pattern in args.ignore or ()]
|
||||||
@@ -139,4 +154,43 @@ if __name__ == '__main__':
|
|||||||
continue
|
continue
|
||||||
result[name] = definition
|
result[name] = definition
|
||||||
|
|
||||||
yaml.dump(result, sys.stdout, default_flow_style=False)
|
referrer_depth = args.referrers
|
||||||
|
referrers = set()
|
||||||
|
while referrer_depth:
|
||||||
|
referrer_depth -= 1
|
||||||
|
for entry in result.values():
|
||||||
|
for used_by in entry.get("used", ()):
|
||||||
|
referrers.add(used_by)
|
||||||
|
for name, definition in names.items():
|
||||||
|
if not name in referrers:
|
||||||
|
continue
|
||||||
|
if ignore and any(pattern.match(name) for pattern in ignore):
|
||||||
|
continue
|
||||||
|
result[name] = definition
|
||||||
|
|
||||||
|
referred_depth = args.referred
|
||||||
|
referred = set()
|
||||||
|
while referred_depth:
|
||||||
|
referred_depth -= 1
|
||||||
|
for entry in result.values():
|
||||||
|
for uses in entry.get("uses", ()):
|
||||||
|
referred.add(uses)
|
||||||
|
for name, definition in names.items():
|
||||||
|
if not name in referred:
|
||||||
|
continue
|
||||||
|
if ignore and any(pattern.match(name) for pattern in ignore):
|
||||||
|
continue
|
||||||
|
result[name] = definition
|
||||||
|
|
||||||
|
if args.format == 'yaml':
|
||||||
|
yaml.dump(result, sys.stdout, default_flow_style=False)
|
||||||
|
elif args.format == 'dot':
|
||||||
|
print "digraph {"
|
||||||
|
for name, entry in result.items():
|
||||||
|
print name
|
||||||
|
for used_by in entry.get("used", ()):
|
||||||
|
if used_by in result:
|
||||||
|
print used_by, "->", name
|
||||||
|
print "}"
|
||||||
|
else:
|
||||||
|
raise ValueError("Unknown format %r" % (args.format))
|
||||||
|
|||||||
24
scripts-dev/dump_macaroon.py
Executable file
24
scripts-dev/dump_macaroon.py
Executable file
@@ -0,0 +1,24 @@
|
|||||||
|
#!/usr/bin/env python2
|
||||||
|
|
||||||
|
import pymacaroons
|
||||||
|
import sys
|
||||||
|
|
||||||
|
if len(sys.argv) == 1:
|
||||||
|
sys.stderr.write("usage: %s macaroon [key]\n" % (sys.argv[0],))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
macaroon_string = sys.argv[1]
|
||||||
|
key = sys.argv[2] if len(sys.argv) > 2 else None
|
||||||
|
|
||||||
|
macaroon = pymacaroons.Macaroon.deserialize(macaroon_string)
|
||||||
|
print macaroon.inspect()
|
||||||
|
|
||||||
|
print ""
|
||||||
|
|
||||||
|
verifier = pymacaroons.Verifier()
|
||||||
|
verifier.satisfy_general(lambda c: True)
|
||||||
|
try:
|
||||||
|
verifier.verify(macaroon, key)
|
||||||
|
print "Signature is correct"
|
||||||
|
except Exception as e:
|
||||||
|
print e.message
|
||||||
62
scripts-dev/list_url_patterns.py
Executable file
62
scripts-dev/list_url_patterns.py
Executable file
@@ -0,0 +1,62 @@
|
|||||||
|
#! /usr/bin/python
|
||||||
|
|
||||||
|
import ast
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
PATTERNS_V1 = []
|
||||||
|
PATTERNS_V2 = []
|
||||||
|
|
||||||
|
RESULT = {
|
||||||
|
"v1": PATTERNS_V1,
|
||||||
|
"v2": PATTERNS_V2,
|
||||||
|
}
|
||||||
|
|
||||||
|
class CallVisitor(ast.NodeVisitor):
|
||||||
|
def visit_Call(self, node):
|
||||||
|
if isinstance(node.func, ast.Name):
|
||||||
|
name = node.func.id
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
if name == "client_path_patterns":
|
||||||
|
PATTERNS_V1.append(node.args[0].s)
|
||||||
|
elif name == "client_v2_patterns":
|
||||||
|
PATTERNS_V2.append(node.args[0].s)
|
||||||
|
|
||||||
|
|
||||||
|
def find_patterns_in_code(input_code):
|
||||||
|
input_ast = ast.parse(input_code)
|
||||||
|
visitor = CallVisitor()
|
||||||
|
visitor.visit(input_ast)
|
||||||
|
|
||||||
|
|
||||||
|
def find_patterns_in_file(filepath):
|
||||||
|
with open(filepath) as f:
|
||||||
|
find_patterns_in_code(f.read())
|
||||||
|
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description='Find url patterns.')
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"directories", nargs='+', metavar="DIR",
|
||||||
|
help="Directories to search for definitions"
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
for directory in args.directories:
|
||||||
|
for root, dirs, files in os.walk(directory):
|
||||||
|
for filename in files:
|
||||||
|
if filename.endswith(".py"):
|
||||||
|
filepath = os.path.join(root, filename)
|
||||||
|
find_patterns_in_file(filepath)
|
||||||
|
|
||||||
|
PATTERNS_V1.sort()
|
||||||
|
PATTERNS_V2.sort()
|
||||||
|
|
||||||
|
yaml.dump(RESULT, sys.stdout, default_flow_style=False)
|
||||||
67
scripts-dev/tail-synapse.py
Normal file
67
scripts-dev/tail-synapse.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
import requests
|
||||||
|
import collections
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import json
|
||||||
|
|
||||||
|
Entry = collections.namedtuple("Entry", "name position rows")
|
||||||
|
|
||||||
|
ROW_TYPES = {}
|
||||||
|
|
||||||
|
|
||||||
|
def row_type_for_columns(name, column_names):
|
||||||
|
column_names = tuple(column_names)
|
||||||
|
row_type = ROW_TYPES.get((name, column_names))
|
||||||
|
if row_type is None:
|
||||||
|
row_type = collections.namedtuple(name, column_names)
|
||||||
|
ROW_TYPES[(name, column_names)] = row_type
|
||||||
|
return row_type
|
||||||
|
|
||||||
|
|
||||||
|
def parse_response(content):
|
||||||
|
streams = json.loads(content)
|
||||||
|
result = {}
|
||||||
|
for name, value in streams.items():
|
||||||
|
row_type = row_type_for_columns(name, value["field_names"])
|
||||||
|
position = value["position"]
|
||||||
|
rows = [row_type(*row) for row in value["rows"]]
|
||||||
|
result[name] = Entry(name, position, rows)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def replicate(server, streams):
|
||||||
|
return parse_response(requests.get(
|
||||||
|
server + "/_synapse/replication",
|
||||||
|
verify=False,
|
||||||
|
params=streams
|
||||||
|
).content)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
server = sys.argv[1]
|
||||||
|
|
||||||
|
streams = None
|
||||||
|
while not streams:
|
||||||
|
try:
|
||||||
|
streams = {
|
||||||
|
row.name: row.position
|
||||||
|
for row in replicate(server, {"streams":"-1"})["streams"].rows
|
||||||
|
}
|
||||||
|
except requests.exceptions.ConnectionError as e:
|
||||||
|
time.sleep(0.1)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
results = replicate(server, streams)
|
||||||
|
except:
|
||||||
|
sys.stdout.write("connection_lost("+ repr(streams) + ")\n")
|
||||||
|
break
|
||||||
|
for update in results.values():
|
||||||
|
for row in update.rows:
|
||||||
|
sys.stdout.write(repr(row) + "\n")
|
||||||
|
streams[update.name] = update.position
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__=='__main__':
|
||||||
|
main()
|
||||||
55
scripts/hash_password
Executable file
55
scripts/hash_password
Executable file
@@ -0,0 +1,55 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import bcrypt
|
||||||
|
import getpass
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
bcrypt_rounds=12
|
||||||
|
password_pepper = ""
|
||||||
|
|
||||||
|
def prompt_for_pass():
|
||||||
|
password = getpass.getpass("Password: ")
|
||||||
|
|
||||||
|
if not password:
|
||||||
|
raise Exception("Password cannot be blank.")
|
||||||
|
|
||||||
|
confirm_password = getpass.getpass("Confirm password: ")
|
||||||
|
|
||||||
|
if password != confirm_password:
|
||||||
|
raise Exception("Passwords do not match.")
|
||||||
|
|
||||||
|
return password
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Calculate the hash of a new password, so that passwords"
|
||||||
|
" can be reset")
|
||||||
|
parser.add_argument(
|
||||||
|
"-p", "--password",
|
||||||
|
default=None,
|
||||||
|
help="New password for user. Will prompt if omitted.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-c", "--config",
|
||||||
|
type=argparse.FileType('r'),
|
||||||
|
help="Path to server config file. Used to read in bcrypt_rounds and password_pepper.",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
if "config" in args and args.config:
|
||||||
|
config = yaml.safe_load(args.config)
|
||||||
|
bcrypt_rounds = config.get("bcrypt_rounds", bcrypt_rounds)
|
||||||
|
password_config = config.get("password_config", {})
|
||||||
|
password_pepper = password_config.get("pepper", password_pepper)
|
||||||
|
password = args.password
|
||||||
|
|
||||||
|
if not password:
|
||||||
|
password = prompt_for_pass()
|
||||||
|
|
||||||
|
print bcrypt.hashpw(password + password_pepper, bcrypt.gensalt(bcrypt_rounds))
|
||||||
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015 OpenMarket Ltd
|
# Copyright 2015, 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -25,18 +25,26 @@ import urllib2
|
|||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
def request_registration(user, password, server_location, shared_secret):
|
def request_registration(user, password, server_location, shared_secret, admin=False):
|
||||||
mac = hmac.new(
|
mac = hmac.new(
|
||||||
key=shared_secret,
|
key=shared_secret,
|
||||||
msg=user,
|
|
||||||
digestmod=hashlib.sha1,
|
digestmod=hashlib.sha1,
|
||||||
).hexdigest()
|
)
|
||||||
|
|
||||||
|
mac.update(user)
|
||||||
|
mac.update("\x00")
|
||||||
|
mac.update(password)
|
||||||
|
mac.update("\x00")
|
||||||
|
mac.update("admin" if admin else "notadmin")
|
||||||
|
|
||||||
|
mac = mac.hexdigest()
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
"user": user,
|
"user": user,
|
||||||
"password": password,
|
"password": password,
|
||||||
"mac": mac,
|
"mac": mac,
|
||||||
"type": "org.matrix.login.shared_secret",
|
"type": "org.matrix.login.shared_secret",
|
||||||
|
"admin": admin,
|
||||||
}
|
}
|
||||||
|
|
||||||
server_location = server_location.rstrip("/")
|
server_location = server_location.rstrip("/")
|
||||||
@@ -68,7 +76,7 @@ def request_registration(user, password, server_location, shared_secret):
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
def register_new_user(user, password, server_location, shared_secret):
|
def register_new_user(user, password, server_location, shared_secret, admin):
|
||||||
if not user:
|
if not user:
|
||||||
try:
|
try:
|
||||||
default_user = getpass.getuser()
|
default_user = getpass.getuser()
|
||||||
@@ -99,7 +107,14 @@ def register_new_user(user, password, server_location, shared_secret):
|
|||||||
print "Passwords do not match"
|
print "Passwords do not match"
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
request_registration(user, password, server_location, shared_secret)
|
if not admin:
|
||||||
|
admin = raw_input("Make admin [no]: ")
|
||||||
|
if admin in ("y", "yes", "true"):
|
||||||
|
admin = True
|
||||||
|
else:
|
||||||
|
admin = False
|
||||||
|
|
||||||
|
request_registration(user, password, server_location, shared_secret, bool(admin))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
@@ -119,6 +134,11 @@ if __name__ == "__main__":
|
|||||||
default=None,
|
default=None,
|
||||||
help="New password for user. Will prompt if omitted.",
|
help="New password for user. Will prompt if omitted.",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-a", "--admin",
|
||||||
|
action="store_true",
|
||||||
|
help="Register new user as an admin. Will prompt if omitted.",
|
||||||
|
)
|
||||||
|
|
||||||
group = parser.add_mutually_exclusive_group(required=True)
|
group = parser.add_mutually_exclusive_group(required=True)
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
@@ -151,4 +171,4 @@ if __name__ == "__main__":
|
|||||||
else:
|
else:
|
||||||
secret = args.shared_secret
|
secret = args.shared_secret
|
||||||
|
|
||||||
register_new_user(args.user, args.password, args.server_url, secret)
|
register_new_user(args.user, args.password, args.server_url, secret, args.admin)
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015 OpenMarket Ltd
|
# Copyright 2015, 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -19,6 +19,7 @@ from twisted.enterprise import adbapi
|
|||||||
|
|
||||||
from synapse.storage._base import LoggingTransaction, SQLBaseStore
|
from synapse.storage._base import LoggingTransaction, SQLBaseStore
|
||||||
from synapse.storage.engines import create_engine
|
from synapse.storage.engines import create_engine
|
||||||
|
from synapse.storage.prepare_database import prepare_database
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import curses
|
import curses
|
||||||
@@ -37,6 +38,7 @@ BOOLEAN_COLUMNS = {
|
|||||||
"rooms": ["is_public"],
|
"rooms": ["is_public"],
|
||||||
"event_edges": ["is_state"],
|
"event_edges": ["is_state"],
|
||||||
"presence_list": ["accepted"],
|
"presence_list": ["accepted"],
|
||||||
|
"presence_stream": ["currently_active"],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -68,6 +70,7 @@ APPEND_ONLY_TABLES = [
|
|||||||
"state_groups_state",
|
"state_groups_state",
|
||||||
"event_to_state_groups",
|
"event_to_state_groups",
|
||||||
"rejections",
|
"rejections",
|
||||||
|
"event_search",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -211,6 +214,10 @@ class Porter(object):
|
|||||||
|
|
||||||
self.progress.add_table(table, postgres_size, table_size)
|
self.progress.add_table(table, postgres_size, table_size)
|
||||||
|
|
||||||
|
if table == "event_search":
|
||||||
|
yield self.handle_search_table(postgres_size, table_size, next_chunk)
|
||||||
|
return
|
||||||
|
|
||||||
select = (
|
select = (
|
||||||
"SELECT rowid, * FROM %s WHERE rowid >= ? ORDER BY rowid LIMIT ?"
|
"SELECT rowid, * FROM %s WHERE rowid >= ? ORDER BY rowid LIMIT ?"
|
||||||
% (table,)
|
% (table,)
|
||||||
@@ -251,6 +258,73 @@ class Porter(object):
|
|||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def handle_search_table(self, postgres_size, table_size, next_chunk):
|
||||||
|
select = (
|
||||||
|
"SELECT es.rowid, es.*, e.origin_server_ts, e.stream_ordering"
|
||||||
|
" FROM event_search as es"
|
||||||
|
" INNER JOIN events AS e USING (event_id, room_id)"
|
||||||
|
" WHERE es.rowid >= ?"
|
||||||
|
" ORDER BY es.rowid LIMIT ?"
|
||||||
|
)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
def r(txn):
|
||||||
|
txn.execute(select, (next_chunk, self.batch_size,))
|
||||||
|
rows = txn.fetchall()
|
||||||
|
headers = [column[0] for column in txn.description]
|
||||||
|
|
||||||
|
return headers, rows
|
||||||
|
|
||||||
|
headers, rows = yield self.sqlite_store.runInteraction("select", r)
|
||||||
|
|
||||||
|
if rows:
|
||||||
|
next_chunk = rows[-1][0] + 1
|
||||||
|
|
||||||
|
# We have to treat event_search differently since it has a
|
||||||
|
# different structure in the two different databases.
|
||||||
|
def insert(txn):
|
||||||
|
sql = (
|
||||||
|
"INSERT INTO event_search (event_id, room_id, key,"
|
||||||
|
" sender, vector, origin_server_ts, stream_ordering)"
|
||||||
|
" VALUES (?,?,?,?,to_tsvector('english', ?),?,?)"
|
||||||
|
)
|
||||||
|
|
||||||
|
rows_dict = [
|
||||||
|
dict(zip(headers, row))
|
||||||
|
for row in rows
|
||||||
|
]
|
||||||
|
|
||||||
|
txn.executemany(sql, [
|
||||||
|
(
|
||||||
|
row["event_id"],
|
||||||
|
row["room_id"],
|
||||||
|
row["key"],
|
||||||
|
row["sender"],
|
||||||
|
row["value"],
|
||||||
|
row["origin_server_ts"],
|
||||||
|
row["stream_ordering"],
|
||||||
|
)
|
||||||
|
for row in rows_dict
|
||||||
|
])
|
||||||
|
|
||||||
|
self.postgres_store._simple_update_one_txn(
|
||||||
|
txn,
|
||||||
|
table="port_from_sqlite3",
|
||||||
|
keyvalues={"table_name": "event_search"},
|
||||||
|
updatevalues={"rowid": next_chunk},
|
||||||
|
)
|
||||||
|
|
||||||
|
yield self.postgres_store.execute(insert)
|
||||||
|
|
||||||
|
postgres_size += len(rows)
|
||||||
|
|
||||||
|
self.progress.update("event_search", postgres_size)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
def setup_db(self, db_config, database_engine):
|
def setup_db(self, db_config, database_engine):
|
||||||
db_conn = database_engine.module.connect(
|
db_conn = database_engine.module.connect(
|
||||||
**{
|
**{
|
||||||
@@ -259,7 +333,7 @@ class Porter(object):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
database_engine.prepare_database(db_conn)
|
prepare_database(db_conn, database_engine, config=None)
|
||||||
|
|
||||||
db_conn.commit()
|
db_conn.commit()
|
||||||
|
|
||||||
@@ -276,8 +350,8 @@ class Porter(object):
|
|||||||
**self.postgres_config["args"]
|
**self.postgres_config["args"]
|
||||||
)
|
)
|
||||||
|
|
||||||
sqlite_engine = create_engine("sqlite3")
|
sqlite_engine = create_engine(sqlite_config)
|
||||||
postgres_engine = create_engine("psycopg2")
|
postgres_engine = create_engine(postgres_config)
|
||||||
|
|
||||||
self.sqlite_store = Store(sqlite_db_pool, sqlite_engine)
|
self.sqlite_store = Store(sqlite_db_pool, sqlite_engine)
|
||||||
self.postgres_store = Store(postgres_db_pool, postgres_engine)
|
self.postgres_store = Store(postgres_db_pool, postgres_engine)
|
||||||
|
|||||||
@@ -16,3 +16,7 @@ ignore =
|
|||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
max-line-length = 90
|
max-line-length = 90
|
||||||
|
ignore = W503 ; W503 requires that binary operators be at the end, not start, of lines. Erik doesn't like it.
|
||||||
|
|
||||||
|
[pep8]
|
||||||
|
max-line-length = 90
|
||||||
|
|||||||
2
setup.py
2
setup.py
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
# Copyright 2014 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -16,4 +16,4 @@
|
|||||||
""" This is a reference implementation of a Matrix home server.
|
""" This is a reference implementation of a Matrix home server.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__version__ = "0.10.0-r2"
|
__version__ = "0.16.1-r1"
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014 - 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -13,14 +13,19 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
"""This module contains classes for authenticating the user."""
|
from canonicaljson import encode_canonical_json
|
||||||
|
from signedjson.key import decode_verify_key_bytes
|
||||||
|
from signedjson.sign import verify_signed_json, SignatureVerifyException
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.constants import EventTypes, Membership, JoinRules
|
from synapse.api.constants import EventTypes, Membership, JoinRules
|
||||||
from synapse.api.errors import AuthError, Codes, SynapseError
|
from synapse.api.errors import AuthError, Codes, SynapseError, EventSizeError
|
||||||
|
from synapse.types import Requester, UserID, get_domain_from_id
|
||||||
from synapse.util.logutils import log_function
|
from synapse.util.logutils import log_function
|
||||||
from synapse.types import UserID, EventID
|
from synapse.util.logcontext import preserve_context_over_fn
|
||||||
|
from synapse.util.metrics import Measure
|
||||||
|
from unpaddedbase64 import decode_base64
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import pymacaroons
|
import pymacaroons
|
||||||
@@ -31,18 +36,28 @@ logger = logging.getLogger(__name__)
|
|||||||
AuthEventTypes = (
|
AuthEventTypes = (
|
||||||
EventTypes.Create, EventTypes.Member, EventTypes.PowerLevels,
|
EventTypes.Create, EventTypes.Member, EventTypes.PowerLevels,
|
||||||
EventTypes.JoinRules, EventTypes.RoomHistoryVisibility,
|
EventTypes.JoinRules, EventTypes.RoomHistoryVisibility,
|
||||||
|
EventTypes.ThirdPartyInvite,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class Auth(object):
|
class Auth(object):
|
||||||
|
"""
|
||||||
|
FIXME: This class contains a mix of functions for authenticating users
|
||||||
|
of our client-server API and authenticating events added to room graphs.
|
||||||
|
"""
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
self.hs = hs
|
self.hs = hs
|
||||||
|
self.clock = hs.get_clock()
|
||||||
self.store = hs.get_datastore()
|
self.store = hs.get_datastore()
|
||||||
self.state = hs.get_state_handler()
|
self.state = hs.get_state_handler()
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS = 401
|
self.TOKEN_NOT_FOUND_HTTP_STATUS = 401
|
||||||
|
# Docs for these currently lives at
|
||||||
|
# https://github.com/matrix-org/matrix-doc/blob/master/drafts/macaroons_caveats.rst
|
||||||
|
# In addition, we have type == delete_pusher which grants access only to
|
||||||
|
# delete pushers.
|
||||||
self._KNOWN_CAVEAT_PREFIXES = set([
|
self._KNOWN_CAVEAT_PREFIXES = set([
|
||||||
"gen = ",
|
"gen = ",
|
||||||
|
"guest = ",
|
||||||
"type = ",
|
"type = ",
|
||||||
"time < ",
|
"time < ",
|
||||||
"user_id = ",
|
"user_id = ",
|
||||||
@@ -59,7 +74,9 @@ class Auth(object):
|
|||||||
Returns:
|
Returns:
|
||||||
True if the auth checks pass.
|
True if the auth checks pass.
|
||||||
"""
|
"""
|
||||||
try:
|
with Measure(self.clock, "auth.check"):
|
||||||
|
self.check_size_limits(event)
|
||||||
|
|
||||||
if not hasattr(event, "room_id"):
|
if not hasattr(event, "room_id"):
|
||||||
raise AuthError(500, "Event has no room_id: %s" % event)
|
raise AuthError(500, "Event has no room_id: %s" % event)
|
||||||
if auth_events is None:
|
if auth_events is None:
|
||||||
@@ -80,6 +97,15 @@ class Auth(object):
|
|||||||
"Room %r does not exist" % (event.room_id,)
|
"Room %r does not exist" % (event.room_id,)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
creating_domain = get_domain_from_id(event.room_id)
|
||||||
|
originating_domain = get_domain_from_id(event.sender)
|
||||||
|
if creating_domain != originating_domain:
|
||||||
|
if not self.can_federate(event, auth_events):
|
||||||
|
raise AuthError(
|
||||||
|
403,
|
||||||
|
"This room has been marked as unfederatable."
|
||||||
|
)
|
||||||
|
|
||||||
# FIXME: Temp hack
|
# FIXME: Temp hack
|
||||||
if event.type == EventTypes.Aliases:
|
if event.type == EventTypes.Aliases:
|
||||||
return True
|
return True
|
||||||
@@ -100,6 +126,24 @@ class Auth(object):
|
|||||||
return allowed
|
return allowed
|
||||||
|
|
||||||
self.check_event_sender_in_room(event, auth_events)
|
self.check_event_sender_in_room(event, auth_events)
|
||||||
|
|
||||||
|
# Special case to allow m.room.third_party_invite events wherever
|
||||||
|
# a user is allowed to issue invites. Fixes
|
||||||
|
# https://github.com/vector-im/vector-web/issues/1208 hopefully
|
||||||
|
if event.type == EventTypes.ThirdPartyInvite:
|
||||||
|
user_level = self._get_user_power_level(event.user_id, auth_events)
|
||||||
|
invite_level = self._get_named_level(auth_events, "invite", 0)
|
||||||
|
|
||||||
|
if user_level < invite_level:
|
||||||
|
raise AuthError(
|
||||||
|
403, (
|
||||||
|
"You cannot issue a third party invite for %s." %
|
||||||
|
(event.content.display_name,)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
self._can_send_event(event, auth_events)
|
self._can_send_event(event, auth_events)
|
||||||
|
|
||||||
if event.type == EventTypes.PowerLevels:
|
if event.type == EventTypes.PowerLevels:
|
||||||
@@ -109,13 +153,23 @@ class Auth(object):
|
|||||||
self.check_redaction(event, auth_events)
|
self.check_redaction(event, auth_events)
|
||||||
|
|
||||||
logger.debug("Allowing! %s", event)
|
logger.debug("Allowing! %s", event)
|
||||||
except AuthError as e:
|
|
||||||
logger.info(
|
def check_size_limits(self, event):
|
||||||
"Event auth check failed on event %s with msg: %s",
|
def too_big(field):
|
||||||
event, e.msg
|
raise EventSizeError("%s too large" % (field,))
|
||||||
)
|
|
||||||
logger.info("Denying! %s", event)
|
if len(event.user_id) > 255:
|
||||||
raise
|
too_big("user_id")
|
||||||
|
if len(event.room_id) > 255:
|
||||||
|
too_big("room_id")
|
||||||
|
if event.is_state() and len(event.state_key) > 255:
|
||||||
|
too_big("state_key")
|
||||||
|
if len(event.type) > 255:
|
||||||
|
too_big("type")
|
||||||
|
if len(event.event_id) > 255:
|
||||||
|
too_big("event_id")
|
||||||
|
if len(encode_canonical_json(event.get_pdu_json())) > 65536:
|
||||||
|
too_big("event")
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def check_joined_room(self, room_id, user_id, current_state=None):
|
def check_joined_room(self, room_id, user_id, current_state=None):
|
||||||
@@ -149,15 +203,11 @@ class Auth(object):
|
|||||||
defer.returnValue(member)
|
defer.returnValue(member)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def check_user_was_in_room(self, room_id, user_id, current_state=None):
|
def check_user_was_in_room(self, room_id, user_id):
|
||||||
"""Check if the user was in the room at some point.
|
"""Check if the user was in the room at some point.
|
||||||
Args:
|
Args:
|
||||||
room_id(str): The room to check.
|
room_id(str): The room to check.
|
||||||
user_id(str): The user to check.
|
user_id(str): The user to check.
|
||||||
current_state(dict): Optional map of the current state of the room.
|
|
||||||
If provided then that map is used to check whether they are a
|
|
||||||
member of the room. Otherwise the current membership is
|
|
||||||
loaded from the database.
|
|
||||||
Raises:
|
Raises:
|
||||||
AuthError if the user was never in the room.
|
AuthError if the user was never in the room.
|
||||||
Returns:
|
Returns:
|
||||||
@@ -165,17 +215,11 @@ class Auth(object):
|
|||||||
room. This will be the join event if they are currently joined to
|
room. This will be the join event if they are currently joined to
|
||||||
the room. This will be the leave event if they have left the room.
|
the room. This will be the leave event if they have left the room.
|
||||||
"""
|
"""
|
||||||
if current_state:
|
member = yield self.state.get_current_state(
|
||||||
member = current_state.get(
|
room_id=room_id,
|
||||||
(EventTypes.Member, user_id),
|
event_type=EventTypes.Member,
|
||||||
None
|
state_key=user_id
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
member = yield self.state.get_current_state(
|
|
||||||
room_id=room_id,
|
|
||||||
event_type=EventTypes.Member,
|
|
||||||
state_key=user_id
|
|
||||||
)
|
|
||||||
membership = member.membership if member else None
|
membership = member.membership if member else None
|
||||||
|
|
||||||
if membership not in (Membership.JOIN, Membership.LEAVE):
|
if membership not in (Membership.JOIN, Membership.LEAVE):
|
||||||
@@ -183,6 +227,13 @@ class Auth(object):
|
|||||||
user_id, room_id
|
user_id, room_id
|
||||||
))
|
))
|
||||||
|
|
||||||
|
if membership == Membership.LEAVE:
|
||||||
|
forgot = yield self.store.did_forget(user_id, room_id)
|
||||||
|
if forgot:
|
||||||
|
raise AuthError(403, "User %s not in room %s" % (
|
||||||
|
user_id, room_id
|
||||||
|
))
|
||||||
|
|
||||||
defer.returnValue(member)
|
defer.returnValue(member)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@@ -192,7 +243,7 @@ class Auth(object):
|
|||||||
for event in curr_state.values():
|
for event in curr_state.values():
|
||||||
if event.type == EventTypes.Member:
|
if event.type == EventTypes.Member:
|
||||||
try:
|
try:
|
||||||
if UserID.from_string(event.state_key).domain != host:
|
if get_domain_from_id(event.state_key) != host:
|
||||||
continue
|
continue
|
||||||
except:
|
except:
|
||||||
logger.warn("state_key not user_id: %s", event.state_key)
|
logger.warn("state_key not user_id: %s", event.state_key)
|
||||||
@@ -219,6 +270,11 @@ class Auth(object):
|
|||||||
user_id, room_id, repr(member)
|
user_id, room_id, repr(member)
|
||||||
))
|
))
|
||||||
|
|
||||||
|
def can_federate(self, event, auth_events):
|
||||||
|
creation_event = auth_events.get((EventTypes.Create, ""))
|
||||||
|
|
||||||
|
return creation_event.content.get("m.federate", True) is True
|
||||||
|
|
||||||
@log_function
|
@log_function
|
||||||
def is_membership_change_allowed(self, event, auth_events):
|
def is_membership_change_allowed(self, event, auth_events):
|
||||||
membership = event.content["membership"]
|
membership = event.content["membership"]
|
||||||
@@ -234,6 +290,15 @@ class Auth(object):
|
|||||||
|
|
||||||
target_user_id = event.state_key
|
target_user_id = event.state_key
|
||||||
|
|
||||||
|
creating_domain = get_domain_from_id(event.room_id)
|
||||||
|
target_domain = get_domain_from_id(target_user_id)
|
||||||
|
if creating_domain != target_domain:
|
||||||
|
if not self.can_federate(event, auth_events):
|
||||||
|
raise AuthError(
|
||||||
|
403,
|
||||||
|
"This room has been marked as unfederatable."
|
||||||
|
)
|
||||||
|
|
||||||
# get info about the caller
|
# get info about the caller
|
||||||
key = (EventTypes.Member, event.user_id, )
|
key = (EventTypes.Member, event.user_id, )
|
||||||
caller = auth_events.get(key)
|
caller = auth_events.get(key)
|
||||||
@@ -279,8 +344,17 @@ class Auth(object):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if Membership.INVITE == membership and "third_party_invite" in event.content:
|
||||||
|
if not self._verify_third_party_invite(event, auth_events):
|
||||||
|
raise AuthError(403, "You are not invited to this room.")
|
||||||
|
return True
|
||||||
|
|
||||||
if Membership.JOIN != membership:
|
if Membership.JOIN != membership:
|
||||||
# JOIN is the only action you can perform if you're not in the room
|
if (caller_invited
|
||||||
|
and Membership.LEAVE == membership
|
||||||
|
and target_user_id == event.user_id):
|
||||||
|
return True
|
||||||
|
|
||||||
if not caller_in_room: # caller isn't joined
|
if not caller_in_room: # caller isn't joined
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
403,
|
403,
|
||||||
@@ -344,6 +418,81 @@ class Auth(object):
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def _verify_third_party_invite(self, event, auth_events):
|
||||||
|
"""
|
||||||
|
Validates that the invite event is authorized by a previous third-party invite.
|
||||||
|
|
||||||
|
Checks that the public key, and keyserver, match those in the third party invite,
|
||||||
|
and that the invite event has a signature issued using that public key.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
event: The m.room.member join event being validated.
|
||||||
|
auth_events: All relevant previous context events which may be used
|
||||||
|
for authorization decisions.
|
||||||
|
|
||||||
|
Return:
|
||||||
|
True if the event fulfills the expectations of a previous third party
|
||||||
|
invite event.
|
||||||
|
"""
|
||||||
|
if "third_party_invite" not in event.content:
|
||||||
|
return False
|
||||||
|
if "signed" not in event.content["third_party_invite"]:
|
||||||
|
return False
|
||||||
|
signed = event.content["third_party_invite"]["signed"]
|
||||||
|
for key in {"mxid", "token"}:
|
||||||
|
if key not in signed:
|
||||||
|
return False
|
||||||
|
|
||||||
|
token = signed["token"]
|
||||||
|
|
||||||
|
invite_event = auth_events.get(
|
||||||
|
(EventTypes.ThirdPartyInvite, token,)
|
||||||
|
)
|
||||||
|
if not invite_event:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if event.user_id != invite_event.user_id:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if signed["mxid"] != event.state_key:
|
||||||
|
return False
|
||||||
|
if signed["token"] != token:
|
||||||
|
return False
|
||||||
|
|
||||||
|
for public_key_object in self.get_public_keys(invite_event):
|
||||||
|
public_key = public_key_object["public_key"]
|
||||||
|
try:
|
||||||
|
for server, signature_block in signed["signatures"].items():
|
||||||
|
for key_name, encoded_signature in signature_block.items():
|
||||||
|
if not key_name.startswith("ed25519:"):
|
||||||
|
continue
|
||||||
|
verify_key = decode_verify_key_bytes(
|
||||||
|
key_name,
|
||||||
|
decode_base64(public_key)
|
||||||
|
)
|
||||||
|
verify_signed_json(signed, server, verify_key)
|
||||||
|
|
||||||
|
# We got the public key from the invite, so we know that the
|
||||||
|
# correct server signed the signed bundle.
|
||||||
|
# The caller is responsible for checking that the signing
|
||||||
|
# server has not revoked that public key.
|
||||||
|
return True
|
||||||
|
except (KeyError, SignatureVerifyException,):
|
||||||
|
continue
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_public_keys(self, invite_event):
|
||||||
|
public_keys = []
|
||||||
|
if "public_key" in invite_event.content:
|
||||||
|
o = {
|
||||||
|
"public_key": invite_event.content["public_key"],
|
||||||
|
}
|
||||||
|
if "key_validity_url" in invite_event.content:
|
||||||
|
o["key_validity_url"] = invite_event.content["key_validity_url"]
|
||||||
|
public_keys.append(o)
|
||||||
|
public_keys.extend(invite_event.content.get("public_keys", []))
|
||||||
|
return public_keys
|
||||||
|
|
||||||
def _get_power_level_event(self, auth_events):
|
def _get_power_level_event(self, auth_events):
|
||||||
key = (EventTypes.PowerLevels, "", )
|
key = (EventTypes.PowerLevels, "", )
|
||||||
return auth_events.get(key)
|
return auth_events.get(key)
|
||||||
@@ -382,7 +531,7 @@ class Auth(object):
|
|||||||
return default
|
return default
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_user_by_req(self, request):
|
def get_user_by_req(self, request, allow_guest=False, rights="access"):
|
||||||
""" Get a registered user's ID.
|
""" Get a registered user's ID.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -396,38 +545,18 @@ class Auth(object):
|
|||||||
"""
|
"""
|
||||||
# Can optionally look elsewhere in the request (e.g. headers)
|
# Can optionally look elsewhere in the request (e.g. headers)
|
||||||
try:
|
try:
|
||||||
access_token = request.args["access_token"][0]
|
user_id = yield self._get_appservice_user_id(request.args)
|
||||||
|
if user_id:
|
||||||
# Check for application service tokens with a user_id override
|
|
||||||
try:
|
|
||||||
app_service = yield self.store.get_app_service_by_token(
|
|
||||||
access_token
|
|
||||||
)
|
|
||||||
if not app_service:
|
|
||||||
raise KeyError
|
|
||||||
|
|
||||||
user_id = app_service.sender
|
|
||||||
if "user_id" in request.args:
|
|
||||||
user_id = request.args["user_id"][0]
|
|
||||||
if not app_service.is_interested_in_user(user_id):
|
|
||||||
raise AuthError(
|
|
||||||
403,
|
|
||||||
"Application service cannot masquerade as this user."
|
|
||||||
)
|
|
||||||
|
|
||||||
if not user_id:
|
|
||||||
raise KeyError
|
|
||||||
|
|
||||||
request.authenticated_entity = user_id
|
request.authenticated_entity = user_id
|
||||||
|
defer.returnValue(
|
||||||
|
Requester(UserID.from_string(user_id), "", False)
|
||||||
|
)
|
||||||
|
|
||||||
defer.returnValue((UserID.from_string(user_id), ""))
|
access_token = request.args["access_token"][0]
|
||||||
return
|
user_info = yield self.get_user_by_access_token(access_token, rights)
|
||||||
except KeyError:
|
|
||||||
pass # normal users won't have the user_id query parameter set.
|
|
||||||
|
|
||||||
user_info = yield self._get_user_by_access_token(access_token)
|
|
||||||
user = user_info["user"]
|
user = user_info["user"]
|
||||||
token_id = user_info["token_id"]
|
token_id = user_info["token_id"]
|
||||||
|
is_guest = user_info["is_guest"]
|
||||||
|
|
||||||
ip_addr = self.hs.get_ip_from_request(request)
|
ip_addr = self.hs.get_ip_from_request(request)
|
||||||
user_agent = request.requestHeaders.getRawHeaders(
|
user_agent = request.requestHeaders.getRawHeaders(
|
||||||
@@ -435,16 +564,22 @@ class Auth(object):
|
|||||||
default=[""]
|
default=[""]
|
||||||
)[0]
|
)[0]
|
||||||
if user and access_token and ip_addr:
|
if user and access_token and ip_addr:
|
||||||
self.store.insert_client_ip(
|
preserve_context_over_fn(
|
||||||
|
self.store.insert_client_ip,
|
||||||
user=user,
|
user=user,
|
||||||
access_token=access_token,
|
access_token=access_token,
|
||||||
ip=ip_addr,
|
ip=ip_addr,
|
||||||
user_agent=user_agent
|
user_agent=user_agent
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if is_guest and not allow_guest:
|
||||||
|
raise AuthError(
|
||||||
|
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
|
||||||
|
)
|
||||||
|
|
||||||
request.authenticated_entity = user.to_string()
|
request.authenticated_entity = user.to_string()
|
||||||
|
|
||||||
defer.returnValue((user, token_id,))
|
defer.returnValue(Requester(user, token_id, is_guest))
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Missing access token.",
|
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Missing access token.",
|
||||||
@@ -452,7 +587,34 @@ class Auth(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _get_user_by_access_token(self, token):
|
def _get_appservice_user_id(self, request_args):
|
||||||
|
app_service = yield self.store.get_app_service_by_token(
|
||||||
|
request_args["access_token"][0]
|
||||||
|
)
|
||||||
|
if app_service is None:
|
||||||
|
defer.returnValue(None)
|
||||||
|
|
||||||
|
if "user_id" not in request_args:
|
||||||
|
defer.returnValue(app_service.sender)
|
||||||
|
|
||||||
|
user_id = request_args["user_id"][0]
|
||||||
|
if app_service.sender == user_id:
|
||||||
|
defer.returnValue(app_service.sender)
|
||||||
|
|
||||||
|
if not app_service.is_interested_in_user(user_id):
|
||||||
|
raise AuthError(
|
||||||
|
403,
|
||||||
|
"Application service cannot masquerade as this user."
|
||||||
|
)
|
||||||
|
if not (yield self.store.get_user_by_id(user_id)):
|
||||||
|
raise AuthError(
|
||||||
|
403,
|
||||||
|
"Application service has not registered this user"
|
||||||
|
)
|
||||||
|
defer.returnValue(user_id)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def get_user_by_access_token(self, token, rights="access"):
|
||||||
""" Get a registered user's ID.
|
""" Get a registered user's ID.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -463,7 +625,7 @@ class Auth(object):
|
|||||||
AuthError if no user by that token exists or the token is invalid.
|
AuthError if no user by that token exists or the token is invalid.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
ret = yield self._get_user_from_macaroon(token)
|
ret = yield self.get_user_from_macaroon(token, rights)
|
||||||
except AuthError:
|
except AuthError:
|
||||||
# TODO(daniel): Remove this fallback when all existing access tokens
|
# TODO(daniel): Remove this fallback when all existing access tokens
|
||||||
# have been re-issued as macaroons.
|
# have been re-issued as macaroons.
|
||||||
@@ -471,49 +633,92 @@ class Auth(object):
|
|||||||
defer.returnValue(ret)
|
defer.returnValue(ret)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _get_user_from_macaroon(self, macaroon_str):
|
def get_user_from_macaroon(self, macaroon_str, rights="access"):
|
||||||
try:
|
try:
|
||||||
macaroon = pymacaroons.Macaroon.deserialize(macaroon_str)
|
macaroon = pymacaroons.Macaroon.deserialize(macaroon_str)
|
||||||
self._validate_macaroon(macaroon)
|
|
||||||
|
|
||||||
user_prefix = "user_id = "
|
user_prefix = "user_id = "
|
||||||
|
user = None
|
||||||
|
user_id = None
|
||||||
|
guest = False
|
||||||
for caveat in macaroon.caveats:
|
for caveat in macaroon.caveats:
|
||||||
if caveat.caveat_id.startswith(user_prefix):
|
if caveat.caveat_id.startswith(user_prefix):
|
||||||
user = UserID.from_string(caveat.caveat_id[len(user_prefix):])
|
user_id = caveat.caveat_id[len(user_prefix):]
|
||||||
# This codepath exists so that we can actually return a
|
user = UserID.from_string(user_id)
|
||||||
# token ID, because we use token IDs in place of device
|
elif caveat.caveat_id == "guest = true":
|
||||||
# identifiers throughout the codebase.
|
guest = True
|
||||||
# TODO(daniel): Remove this fallback when device IDs are
|
|
||||||
# properly implemented.
|
self.validate_macaroon(
|
||||||
ret = yield self._look_up_user_by_access_token(macaroon_str)
|
macaroon, rights, self.hs.config.expire_access_token,
|
||||||
if ret["user"] != user:
|
user_id=user_id,
|
||||||
logger.error(
|
|
||||||
"Macaroon user (%s) != DB user (%s)",
|
|
||||||
user,
|
|
||||||
ret["user"]
|
|
||||||
)
|
|
||||||
raise AuthError(
|
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
|
||||||
"User mismatch in macaroon",
|
|
||||||
errcode=Codes.UNKNOWN_TOKEN
|
|
||||||
)
|
|
||||||
defer.returnValue(ret)
|
|
||||||
raise AuthError(
|
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "No user caveat in macaroon",
|
|
||||||
errcode=Codes.UNKNOWN_TOKEN
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if user is None:
|
||||||
|
raise AuthError(
|
||||||
|
self.TOKEN_NOT_FOUND_HTTP_STATUS, "No user caveat in macaroon",
|
||||||
|
errcode=Codes.UNKNOWN_TOKEN
|
||||||
|
)
|
||||||
|
|
||||||
|
if guest:
|
||||||
|
ret = {
|
||||||
|
"user": user,
|
||||||
|
"is_guest": True,
|
||||||
|
"token_id": None,
|
||||||
|
}
|
||||||
|
elif rights == "delete_pusher":
|
||||||
|
# We don't store these tokens in the database
|
||||||
|
ret = {
|
||||||
|
"user": user,
|
||||||
|
"is_guest": False,
|
||||||
|
"token_id": None,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
# This codepath exists so that we can actually return a
|
||||||
|
# token ID, because we use token IDs in place of device
|
||||||
|
# identifiers throughout the codebase.
|
||||||
|
# TODO(daniel): Remove this fallback when device IDs are
|
||||||
|
# properly implemented.
|
||||||
|
ret = yield self._look_up_user_by_access_token(macaroon_str)
|
||||||
|
if ret["user"] != user:
|
||||||
|
logger.error(
|
||||||
|
"Macaroon user (%s) != DB user (%s)",
|
||||||
|
user,
|
||||||
|
ret["user"]
|
||||||
|
)
|
||||||
|
raise AuthError(
|
||||||
|
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||||
|
"User mismatch in macaroon",
|
||||||
|
errcode=Codes.UNKNOWN_TOKEN
|
||||||
|
)
|
||||||
|
defer.returnValue(ret)
|
||||||
except (pymacaroons.exceptions.MacaroonException, TypeError, ValueError):
|
except (pymacaroons.exceptions.MacaroonException, TypeError, ValueError):
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Invalid macaroon passed.",
|
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Invalid macaroon passed.",
|
||||||
errcode=Codes.UNKNOWN_TOKEN
|
errcode=Codes.UNKNOWN_TOKEN
|
||||||
)
|
)
|
||||||
|
|
||||||
def _validate_macaroon(self, macaroon):
|
def validate_macaroon(self, macaroon, type_string, verify_expiry, user_id):
|
||||||
|
"""
|
||||||
|
validate that a Macaroon is understood by and was signed by this server.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
macaroon(pymacaroons.Macaroon): The macaroon to validate
|
||||||
|
type_string(str): The kind of token required (e.g. "access", "refresh",
|
||||||
|
"delete_pusher")
|
||||||
|
verify_expiry(bool): Whether to verify whether the macaroon has expired.
|
||||||
|
This should really always be True, but no clients currently implement
|
||||||
|
token refresh, so we can't enforce expiry yet.
|
||||||
|
"""
|
||||||
v = pymacaroons.Verifier()
|
v = pymacaroons.Verifier()
|
||||||
v.satisfy_exact("gen = 1")
|
v.satisfy_exact("gen = 1")
|
||||||
v.satisfy_exact("type = access")
|
v.satisfy_exact("type = " + type_string)
|
||||||
v.satisfy_general(lambda c: c.startswith("user_id = "))
|
v.satisfy_exact("user_id = %s" % user_id)
|
||||||
v.satisfy_general(self._verify_expiry)
|
v.satisfy_exact("guest = true")
|
||||||
|
if verify_expiry:
|
||||||
|
v.satisfy_general(self._verify_expiry)
|
||||||
|
else:
|
||||||
|
v.satisfy_general(lambda c: c.startswith("time < "))
|
||||||
|
|
||||||
v.verify(macaroon, self.hs.config.macaroon_secret_key)
|
v.verify(macaroon, self.hs.config.macaroon_secret_key)
|
||||||
|
|
||||||
v = pymacaroons.Verifier()
|
v = pymacaroons.Verifier()
|
||||||
@@ -524,9 +729,6 @@ class Auth(object):
|
|||||||
prefix = "time < "
|
prefix = "time < "
|
||||||
if not caveat.startswith(prefix):
|
if not caveat.startswith(prefix):
|
||||||
return False
|
return False
|
||||||
# TODO(daniel): Enable expiry check when clients actually know how to
|
|
||||||
# refresh tokens. (And remember to enable the tests)
|
|
||||||
return True
|
|
||||||
expiry = int(caveat[len(prefix):])
|
expiry = int(caveat[len(prefix):])
|
||||||
now = self.hs.get_clock().time_msec()
|
now = self.hs.get_clock().time_msec()
|
||||||
return now < expiry
|
return now < expiry
|
||||||
@@ -544,6 +746,7 @@ class Auth(object):
|
|||||||
def _look_up_user_by_access_token(self, token):
|
def _look_up_user_by_access_token(self, token):
|
||||||
ret = yield self.store.get_user_by_access_token(token)
|
ret = yield self.store.get_user_by_access_token(token)
|
||||||
if not ret:
|
if not ret:
|
||||||
|
logger.warn("Unrecognised access token - not in store: %s" % (token,))
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Unrecognised access token.",
|
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Unrecognised access token.",
|
||||||
errcode=Codes.UNKNOWN_TOKEN
|
errcode=Codes.UNKNOWN_TOKEN
|
||||||
@@ -551,6 +754,7 @@ class Auth(object):
|
|||||||
user_info = {
|
user_info = {
|
||||||
"user": UserID.from_string(ret.get("name")),
|
"user": UserID.from_string(ret.get("name")),
|
||||||
"token_id": ret.get("token_id", None),
|
"token_id": ret.get("token_id", None),
|
||||||
|
"is_guest": False,
|
||||||
}
|
}
|
||||||
defer.returnValue(user_info)
|
defer.returnValue(user_info)
|
||||||
|
|
||||||
@@ -560,6 +764,7 @@ class Auth(object):
|
|||||||
token = request.args["access_token"][0]
|
token = request.args["access_token"][0]
|
||||||
service = yield self.store.get_app_service_by_token(token)
|
service = yield self.store.get_app_service_by_token(token)
|
||||||
if not service:
|
if not service:
|
||||||
|
logger.warn("Unrecognised appservice access token: %s" % (token,))
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||||
"Unrecognised access token.",
|
"Unrecognised access token.",
|
||||||
@@ -626,23 +831,32 @@ class Auth(object):
|
|||||||
else:
|
else:
|
||||||
if member_event:
|
if member_event:
|
||||||
auth_ids.append(member_event.event_id)
|
auth_ids.append(member_event.event_id)
|
||||||
|
|
||||||
|
if e_type == Membership.INVITE:
|
||||||
|
if "third_party_invite" in event.content:
|
||||||
|
key = (
|
||||||
|
EventTypes.ThirdPartyInvite,
|
||||||
|
event.content["third_party_invite"]["signed"]["token"]
|
||||||
|
)
|
||||||
|
third_party_invite = current_state.get(key)
|
||||||
|
if third_party_invite:
|
||||||
|
auth_ids.append(third_party_invite.event_id)
|
||||||
elif member_event:
|
elif member_event:
|
||||||
if member_event.content["membership"] == Membership.JOIN:
|
if member_event.content["membership"] == Membership.JOIN:
|
||||||
auth_ids.append(member_event.event_id)
|
auth_ids.append(member_event.event_id)
|
||||||
|
|
||||||
return auth_ids
|
return auth_ids
|
||||||
|
|
||||||
@log_function
|
def _get_send_level(self, etype, state_key, auth_events):
|
||||||
def _can_send_event(self, event, auth_events):
|
|
||||||
key = (EventTypes.PowerLevels, "", )
|
key = (EventTypes.PowerLevels, "", )
|
||||||
send_level_event = auth_events.get(key)
|
send_level_event = auth_events.get(key)
|
||||||
send_level = None
|
send_level = None
|
||||||
if send_level_event:
|
if send_level_event:
|
||||||
send_level = send_level_event.content.get("events", {}).get(
|
send_level = send_level_event.content.get("events", {}).get(
|
||||||
event.type
|
etype
|
||||||
)
|
)
|
||||||
if send_level is None:
|
if send_level is None:
|
||||||
if hasattr(event, "state_key"):
|
if state_key is not None:
|
||||||
send_level = send_level_event.content.get(
|
send_level = send_level_event.content.get(
|
||||||
"state_default", 50
|
"state_default", 50
|
||||||
)
|
)
|
||||||
@@ -656,6 +870,13 @@ class Auth(object):
|
|||||||
else:
|
else:
|
||||||
send_level = 0
|
send_level = 0
|
||||||
|
|
||||||
|
return send_level
|
||||||
|
|
||||||
|
@log_function
|
||||||
|
def _can_send_event(self, event, auth_events):
|
||||||
|
send_level = self._get_send_level(
|
||||||
|
event.type, event.get("state_key", None), auth_events
|
||||||
|
)
|
||||||
user_level = self._get_user_power_level(event.user_id, auth_events)
|
user_level = self._get_user_power_level(event.user_id, auth_events)
|
||||||
|
|
||||||
if user_level < send_level:
|
if user_level < send_level:
|
||||||
@@ -703,11 +924,11 @@ class Auth(object):
|
|||||||
|
|
||||||
redact_level = self._get_named_level(auth_events, "redact", 50)
|
redact_level = self._get_named_level(auth_events, "redact", 50)
|
||||||
|
|
||||||
if user_level > redact_level:
|
if user_level >= redact_level:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
redacter_domain = EventID.from_string(event.event_id).domain
|
redacter_domain = get_domain_from_id(event.event_id)
|
||||||
redactee_domain = EventID.from_string(event.redacts).domain
|
redactee_domain = get_domain_from_id(event.redacts)
|
||||||
if redacter_domain == redactee_domain:
|
if redacter_domain == redactee_domain:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -800,3 +1021,43 @@ class Auth(object):
|
|||||||
"You don't have permission to add ops level greater "
|
"You don't have permission to add ops level greater "
|
||||||
"than your own"
|
"than your own"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def check_can_change_room_list(self, room_id, user):
|
||||||
|
"""Check if the user is allowed to edit the room's entry in the
|
||||||
|
published room list.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
room_id (str)
|
||||||
|
user (UserID)
|
||||||
|
"""
|
||||||
|
|
||||||
|
is_admin = yield self.is_server_admin(user)
|
||||||
|
if is_admin:
|
||||||
|
defer.returnValue(True)
|
||||||
|
|
||||||
|
user_id = user.to_string()
|
||||||
|
yield self.check_joined_room(room_id, user_id)
|
||||||
|
|
||||||
|
# We currently require the user is a "moderator" in the room. We do this
|
||||||
|
# by checking if they would (theoretically) be able to change the
|
||||||
|
# m.room.aliases events
|
||||||
|
power_level_event = yield self.state.get_current_state(
|
||||||
|
room_id, EventTypes.PowerLevels, ""
|
||||||
|
)
|
||||||
|
|
||||||
|
auth_events = {}
|
||||||
|
if power_level_event:
|
||||||
|
auth_events[(EventTypes.PowerLevels, "")] = power_level_event
|
||||||
|
|
||||||
|
send_level = self._get_send_level(
|
||||||
|
EventTypes.Aliases, "", auth_events
|
||||||
|
)
|
||||||
|
user_level = self._get_user_power_level(user_id, auth_events)
|
||||||
|
|
||||||
|
if user_level < send_level:
|
||||||
|
raise AuthError(
|
||||||
|
403,
|
||||||
|
"This server requires you to be a moderator in the room to"
|
||||||
|
" edit its room list entry"
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -32,7 +32,6 @@ class PresenceState(object):
|
|||||||
OFFLINE = u"offline"
|
OFFLINE = u"offline"
|
||||||
UNAVAILABLE = u"unavailable"
|
UNAVAILABLE = u"unavailable"
|
||||||
ONLINE = u"online"
|
ONLINE = u"online"
|
||||||
FREE_FOR_CHAT = u"free_for_chat"
|
|
||||||
|
|
||||||
|
|
||||||
class JoinRules(object):
|
class JoinRules(object):
|
||||||
@@ -63,10 +62,12 @@ class EventTypes(object):
|
|||||||
PowerLevels = "m.room.power_levels"
|
PowerLevels = "m.room.power_levels"
|
||||||
Aliases = "m.room.aliases"
|
Aliases = "m.room.aliases"
|
||||||
Redaction = "m.room.redaction"
|
Redaction = "m.room.redaction"
|
||||||
|
ThirdPartyInvite = "m.room.third_party_invite"
|
||||||
|
|
||||||
RoomHistoryVisibility = "m.room.history_visibility"
|
RoomHistoryVisibility = "m.room.history_visibility"
|
||||||
CanonicalAlias = "m.room.canonical_alias"
|
CanonicalAlias = "m.room.canonical_alias"
|
||||||
RoomAvatar = "m.room.avatar"
|
RoomAvatar = "m.room.avatar"
|
||||||
|
GuestAccess = "m.room.guest_access"
|
||||||
|
|
||||||
# These are used for validation
|
# These are used for validation
|
||||||
Message = "m.room.message"
|
Message = "m.room.message"
|
||||||
@@ -83,3 +84,4 @@ class RejectedReason(object):
|
|||||||
class RoomCreationPreset(object):
|
class RoomCreationPreset(object):
|
||||||
PRIVATE_CHAT = "private_chat"
|
PRIVATE_CHAT = "private_chat"
|
||||||
PUBLIC_CHAT = "public_chat"
|
PUBLIC_CHAT = "public_chat"
|
||||||
|
TRUSTED_PRIVATE_CHAT = "trusted_private_chat"
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -29,10 +29,12 @@ class Codes(object):
|
|||||||
USER_IN_USE = "M_USER_IN_USE"
|
USER_IN_USE = "M_USER_IN_USE"
|
||||||
ROOM_IN_USE = "M_ROOM_IN_USE"
|
ROOM_IN_USE = "M_ROOM_IN_USE"
|
||||||
BAD_PAGINATION = "M_BAD_PAGINATION"
|
BAD_PAGINATION = "M_BAD_PAGINATION"
|
||||||
|
BAD_STATE = "M_BAD_STATE"
|
||||||
UNKNOWN = "M_UNKNOWN"
|
UNKNOWN = "M_UNKNOWN"
|
||||||
NOT_FOUND = "M_NOT_FOUND"
|
NOT_FOUND = "M_NOT_FOUND"
|
||||||
MISSING_TOKEN = "M_MISSING_TOKEN"
|
MISSING_TOKEN = "M_MISSING_TOKEN"
|
||||||
UNKNOWN_TOKEN = "M_UNKNOWN_TOKEN"
|
UNKNOWN_TOKEN = "M_UNKNOWN_TOKEN"
|
||||||
|
GUEST_ACCESS_FORBIDDEN = "M_GUEST_ACCESS_FORBIDDEN"
|
||||||
LIMIT_EXCEEDED = "M_LIMIT_EXCEEDED"
|
LIMIT_EXCEEDED = "M_LIMIT_EXCEEDED"
|
||||||
CAPTCHA_NEEDED = "M_CAPTCHA_NEEDED"
|
CAPTCHA_NEEDED = "M_CAPTCHA_NEEDED"
|
||||||
CAPTCHA_INVALID = "M_CAPTCHA_INVALID"
|
CAPTCHA_INVALID = "M_CAPTCHA_INVALID"
|
||||||
@@ -40,14 +42,15 @@ class Codes(object):
|
|||||||
TOO_LARGE = "M_TOO_LARGE"
|
TOO_LARGE = "M_TOO_LARGE"
|
||||||
EXCLUSIVE = "M_EXCLUSIVE"
|
EXCLUSIVE = "M_EXCLUSIVE"
|
||||||
THREEPID_AUTH_FAILED = "M_THREEPID_AUTH_FAILED"
|
THREEPID_AUTH_FAILED = "M_THREEPID_AUTH_FAILED"
|
||||||
THREEPID_IN_USE = "THREEPID_IN_USE"
|
THREEPID_IN_USE = "M_THREEPID_IN_USE"
|
||||||
|
INVALID_USERNAME = "M_INVALID_USERNAME"
|
||||||
|
SERVER_NOT_TRUSTED = "M_SERVER_NOT_TRUSTED"
|
||||||
|
|
||||||
|
|
||||||
class CodeMessageException(RuntimeError):
|
class CodeMessageException(RuntimeError):
|
||||||
"""An exception with integer code and message string attributes."""
|
"""An exception with integer code and message string attributes."""
|
||||||
|
|
||||||
def __init__(self, code, msg):
|
def __init__(self, code, msg):
|
||||||
logger.info("%s: %s, %s", type(self).__name__, code, msg)
|
|
||||||
super(CodeMessageException, self).__init__("%d: %s" % (code, msg))
|
super(CodeMessageException, self).__init__("%d: %s" % (code, msg))
|
||||||
self.code = code
|
self.code = code
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
@@ -120,6 +123,15 @@ class AuthError(SynapseError):
|
|||||||
super(AuthError, self).__init__(*args, **kwargs)
|
super(AuthError, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class EventSizeError(SynapseError):
|
||||||
|
"""An error raised when an event is too big."""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
if "errcode" not in kwargs:
|
||||||
|
kwargs["errcode"] = Codes.TOO_LARGE
|
||||||
|
super(EventSizeError, self).__init__(413, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class EventStreamError(SynapseError):
|
class EventStreamError(SynapseError):
|
||||||
"""An error raised when there a problem with the event stream."""
|
"""An error raised when there a problem with the event stream."""
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015 OpenMarket Ltd
|
# Copyright 2015, 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -15,6 +15,10 @@
|
|||||||
from synapse.api.errors import SynapseError
|
from synapse.api.errors import SynapseError
|
||||||
from synapse.types import UserID, RoomID
|
from synapse.types import UserID, RoomID
|
||||||
|
|
||||||
|
from twisted.internet import defer
|
||||||
|
|
||||||
|
import ujson as json
|
||||||
|
|
||||||
|
|
||||||
class Filtering(object):
|
class Filtering(object):
|
||||||
|
|
||||||
@@ -22,20 +26,20 @@ class Filtering(object):
|
|||||||
super(Filtering, self).__init__()
|
super(Filtering, self).__init__()
|
||||||
self.store = hs.get_datastore()
|
self.store = hs.get_datastore()
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
def get_user_filter(self, user_localpart, filter_id):
|
def get_user_filter(self, user_localpart, filter_id):
|
||||||
result = self.store.get_user_filter(user_localpart, filter_id)
|
result = yield self.store.get_user_filter(user_localpart, filter_id)
|
||||||
result.addCallback(Filter)
|
defer.returnValue(FilterCollection(result))
|
||||||
return result
|
|
||||||
|
|
||||||
def add_user_filter(self, user_localpart, user_filter):
|
def add_user_filter(self, user_localpart, user_filter):
|
||||||
self._check_valid_filter(user_filter)
|
self.check_valid_filter(user_filter)
|
||||||
return self.store.add_user_filter(user_localpart, user_filter)
|
return self.store.add_user_filter(user_localpart, user_filter)
|
||||||
|
|
||||||
# TODO(paul): surely we should probably add a delete_user_filter or
|
# TODO(paul): surely we should probably add a delete_user_filter or
|
||||||
# replace_user_filter at some point? There's no REST API specified for
|
# replace_user_filter at some point? There's no REST API specified for
|
||||||
# them however
|
# them however
|
||||||
|
|
||||||
def _check_valid_filter(self, user_filter_json):
|
def check_valid_filter(self, user_filter_json):
|
||||||
"""Check if the provided filter is valid.
|
"""Check if the provided filter is valid.
|
||||||
|
|
||||||
This inspects all definitions contained within the filter.
|
This inspects all definitions contained within the filter.
|
||||||
@@ -50,11 +54,11 @@ class Filtering(object):
|
|||||||
# many definitions.
|
# many definitions.
|
||||||
|
|
||||||
top_level_definitions = [
|
top_level_definitions = [
|
||||||
"public_user_data", "private_user_data", "server_data"
|
"presence", "account_data"
|
||||||
]
|
]
|
||||||
|
|
||||||
room_level_definitions = [
|
room_level_definitions = [
|
||||||
"state", "events", "ephemeral"
|
"state", "timeline", "ephemeral", "account_data"
|
||||||
]
|
]
|
||||||
|
|
||||||
for key in top_level_definitions:
|
for key in top_level_definitions:
|
||||||
@@ -62,10 +66,29 @@ class Filtering(object):
|
|||||||
self._check_definition(user_filter_json[key])
|
self._check_definition(user_filter_json[key])
|
||||||
|
|
||||||
if "room" in user_filter_json:
|
if "room" in user_filter_json:
|
||||||
|
self._check_definition_room_lists(user_filter_json["room"])
|
||||||
for key in room_level_definitions:
|
for key in room_level_definitions:
|
||||||
if key in user_filter_json["room"]:
|
if key in user_filter_json["room"]:
|
||||||
self._check_definition(user_filter_json["room"][key])
|
self._check_definition(user_filter_json["room"][key])
|
||||||
|
|
||||||
|
def _check_definition_room_lists(self, definition):
|
||||||
|
"""Check that "rooms" and "not_rooms" are lists of room ids if they
|
||||||
|
are present
|
||||||
|
|
||||||
|
Args:
|
||||||
|
definition(dict): The filter definition
|
||||||
|
Raises:
|
||||||
|
SynapseError: If there was a problem with this definition.
|
||||||
|
"""
|
||||||
|
# check rooms are valid room IDs
|
||||||
|
room_id_keys = ["rooms", "not_rooms"]
|
||||||
|
for key in room_id_keys:
|
||||||
|
if key in definition:
|
||||||
|
if type(definition[key]) != list:
|
||||||
|
raise SynapseError(400, "Expected %s to be a list." % key)
|
||||||
|
for room_id in definition[key]:
|
||||||
|
RoomID.from_string(room_id)
|
||||||
|
|
||||||
def _check_definition(self, definition):
|
def _check_definition(self, definition):
|
||||||
"""Check if the provided definition is valid.
|
"""Check if the provided definition is valid.
|
||||||
|
|
||||||
@@ -85,14 +108,7 @@ class Filtering(object):
|
|||||||
400, "Expected JSON object, not %s" % (definition,)
|
400, "Expected JSON object, not %s" % (definition,)
|
||||||
)
|
)
|
||||||
|
|
||||||
# check rooms are valid room IDs
|
self._check_definition_room_lists(definition)
|
||||||
room_id_keys = ["rooms", "not_rooms"]
|
|
||||||
for key in room_id_keys:
|
|
||||||
if key in definition:
|
|
||||||
if type(definition[key]) != list:
|
|
||||||
raise SynapseError(400, "Expected %s to be a list." % key)
|
|
||||||
for room_id in definition[key]:
|
|
||||||
RoomID.from_string(room_id)
|
|
||||||
|
|
||||||
# check senders are valid user IDs
|
# check senders are valid user IDs
|
||||||
user_id_keys = ["senders", "not_senders"]
|
user_id_keys = ["senders", "not_senders"]
|
||||||
@@ -114,116 +130,145 @@ class Filtering(object):
|
|||||||
if not isinstance(event_type, basestring):
|
if not isinstance(event_type, basestring):
|
||||||
raise SynapseError(400, "Event type should be a string")
|
raise SynapseError(400, "Event type should be a string")
|
||||||
|
|
||||||
if "format" in definition:
|
|
||||||
event_format = definition["format"]
|
|
||||||
if event_format not in ["federation", "events"]:
|
|
||||||
raise SynapseError(400, "Invalid format: %s" % (event_format,))
|
|
||||||
|
|
||||||
if "select" in definition:
|
class FilterCollection(object):
|
||||||
event_select_list = definition["select"]
|
def __init__(self, filter_json):
|
||||||
for select_key in event_select_list:
|
self._filter_json = filter_json
|
||||||
if select_key not in ["event_id", "origin_server_ts",
|
|
||||||
"thread_id", "content", "content.body"]:
|
|
||||||
raise SynapseError(400, "Bad select: %s" % (select_key,))
|
|
||||||
|
|
||||||
if ("bundle_updates" in definition and
|
room_filter_json = self._filter_json.get("room", {})
|
||||||
type(definition["bundle_updates"]) != bool):
|
|
||||||
raise SynapseError(400, "Bad bundle_updates: expected bool.")
|
self._room_filter = Filter({
|
||||||
|
k: v for k, v in room_filter_json.items()
|
||||||
|
if k in ("rooms", "not_rooms")
|
||||||
|
})
|
||||||
|
|
||||||
|
self._room_timeline_filter = Filter(room_filter_json.get("timeline", {}))
|
||||||
|
self._room_state_filter = Filter(room_filter_json.get("state", {}))
|
||||||
|
self._room_ephemeral_filter = Filter(room_filter_json.get("ephemeral", {}))
|
||||||
|
self._room_account_data = Filter(room_filter_json.get("account_data", {}))
|
||||||
|
self._presence_filter = Filter(filter_json.get("presence", {}))
|
||||||
|
self._account_data = Filter(filter_json.get("account_data", {}))
|
||||||
|
|
||||||
|
self.include_leave = filter_json.get("room", {}).get(
|
||||||
|
"include_leave", False
|
||||||
|
)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<FilterCollection %s>" % (json.dumps(self._filter_json),)
|
||||||
|
|
||||||
|
def get_filter_json(self):
|
||||||
|
return self._filter_json
|
||||||
|
|
||||||
|
def timeline_limit(self):
|
||||||
|
return self._room_timeline_filter.limit()
|
||||||
|
|
||||||
|
def presence_limit(self):
|
||||||
|
return self._presence_filter.limit()
|
||||||
|
|
||||||
|
def ephemeral_limit(self):
|
||||||
|
return self._room_ephemeral_filter.limit()
|
||||||
|
|
||||||
|
def filter_presence(self, events):
|
||||||
|
return self._presence_filter.filter(events)
|
||||||
|
|
||||||
|
def filter_account_data(self, events):
|
||||||
|
return self._account_data.filter(events)
|
||||||
|
|
||||||
|
def filter_room_state(self, events):
|
||||||
|
return self._room_state_filter.filter(self._room_filter.filter(events))
|
||||||
|
|
||||||
|
def filter_room_timeline(self, events):
|
||||||
|
return self._room_timeline_filter.filter(self._room_filter.filter(events))
|
||||||
|
|
||||||
|
def filter_room_ephemeral(self, events):
|
||||||
|
return self._room_ephemeral_filter.filter(self._room_filter.filter(events))
|
||||||
|
|
||||||
|
def filter_room_account_data(self, events):
|
||||||
|
return self._room_account_data.filter(self._room_filter.filter(events))
|
||||||
|
|
||||||
|
|
||||||
class Filter(object):
|
class Filter(object):
|
||||||
def __init__(self, filter_json):
|
def __init__(self, filter_json):
|
||||||
self.filter_json = filter_json
|
self.filter_json = filter_json
|
||||||
|
|
||||||
def filter_public_user_data(self, events):
|
def check(self, event):
|
||||||
return self._filter_on_key(events, ["public_user_data"])
|
"""Checks whether the filter matches the given event.
|
||||||
|
|
||||||
def filter_private_user_data(self, events):
|
|
||||||
return self._filter_on_key(events, ["private_user_data"])
|
|
||||||
|
|
||||||
def filter_room_state(self, events):
|
|
||||||
return self._filter_on_key(events, ["room", "state"])
|
|
||||||
|
|
||||||
def filter_room_events(self, events):
|
|
||||||
return self._filter_on_key(events, ["room", "events"])
|
|
||||||
|
|
||||||
def filter_room_ephemeral(self, events):
|
|
||||||
return self._filter_on_key(events, ["room", "ephemeral"])
|
|
||||||
|
|
||||||
def _filter_on_key(self, events, keys):
|
|
||||||
filter_json = self.filter_json
|
|
||||||
if not filter_json:
|
|
||||||
return events
|
|
||||||
|
|
||||||
try:
|
|
||||||
# extract the right definition from the filter
|
|
||||||
definition = filter_json
|
|
||||||
for key in keys:
|
|
||||||
definition = definition[key]
|
|
||||||
return self._filter_with_definition(events, definition)
|
|
||||||
except KeyError:
|
|
||||||
# return all events if definition isn't specified.
|
|
||||||
return events
|
|
||||||
|
|
||||||
def _filter_with_definition(self, events, definition):
|
|
||||||
return [e for e in events if self._passes_definition(definition, e)]
|
|
||||||
|
|
||||||
def _passes_definition(self, definition, event):
|
|
||||||
"""Check if the event passes through the given definition.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
definition(dict): The definition to check against.
|
|
||||||
event(Event): The event to check.
|
|
||||||
Returns:
|
Returns:
|
||||||
True if the event passes through the filter.
|
bool: True if the event matches
|
||||||
"""
|
"""
|
||||||
# Algorithm notes:
|
sender = event.get("sender", None)
|
||||||
# For each key in the definition, check the event meets the criteria:
|
if not sender:
|
||||||
# * For types: Literal match or prefix match (if ends with wildcard)
|
# Presence events have their 'sender' in content.user_id
|
||||||
# * For senders/rooms: Literal match only
|
content = event.get("content")
|
||||||
# * "not_" checks take presedence (e.g. if "m.*" is in both 'types'
|
# account_data has been allowed to have non-dict content, so check type first
|
||||||
# and 'not_types' then it is treated as only being in 'not_types')
|
if isinstance(content, dict):
|
||||||
|
sender = content.get("user_id")
|
||||||
|
|
||||||
# room checks
|
return self.check_fields(
|
||||||
if hasattr(event, "room_id"):
|
event.get("room_id", None),
|
||||||
room_id = event.room_id
|
sender,
|
||||||
allow_rooms = definition.get("rooms", None)
|
event.get("type", None),
|
||||||
reject_rooms = definition.get("not_rooms", None)
|
)
|
||||||
if reject_rooms and room_id in reject_rooms:
|
|
||||||
return False
|
def check_fields(self, room_id, sender, event_type):
|
||||||
if allow_rooms and room_id not in allow_rooms:
|
"""Checks whether the filter matches the given event fields.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the event fields match
|
||||||
|
"""
|
||||||
|
literal_keys = {
|
||||||
|
"rooms": lambda v: room_id == v,
|
||||||
|
"senders": lambda v: sender == v,
|
||||||
|
"types": lambda v: _matches_wildcard(event_type, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
for name, match_func in literal_keys.items():
|
||||||
|
not_name = "not_%s" % (name,)
|
||||||
|
disallowed_values = self.filter_json.get(not_name, [])
|
||||||
|
if any(map(match_func, disallowed_values)):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# sender checks
|
allowed_values = self.filter_json.get(name, None)
|
||||||
if hasattr(event, "sender"):
|
if allowed_values is not None:
|
||||||
# Should we be including event.state_key for some event types?
|
if not any(map(match_func, allowed_values)):
|
||||||
sender = event.sender
|
|
||||||
allow_senders = definition.get("senders", None)
|
|
||||||
reject_senders = definition.get("not_senders", None)
|
|
||||||
if reject_senders and sender in reject_senders:
|
|
||||||
return False
|
|
||||||
if allow_senders and sender not in allow_senders:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# type checks
|
|
||||||
if "not_types" in definition:
|
|
||||||
for def_type in definition["not_types"]:
|
|
||||||
if self._event_matches_type(event, def_type):
|
|
||||||
return False
|
return False
|
||||||
if "types" in definition:
|
|
||||||
included = False
|
|
||||||
for def_type in definition["types"]:
|
|
||||||
if self._event_matches_type(event, def_type):
|
|
||||||
included = True
|
|
||||||
break
|
|
||||||
if not included:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _event_matches_type(self, event, def_type):
|
def filter_rooms(self, room_ids):
|
||||||
if def_type.endswith("*"):
|
"""Apply the 'rooms' filter to a given list of rooms.
|
||||||
type_prefix = def_type[:-1]
|
|
||||||
return event.type.startswith(type_prefix)
|
Args:
|
||||||
else:
|
room_ids (list): A list of room_ids.
|
||||||
return event.type == def_type
|
|
||||||
|
Returns:
|
||||||
|
list: A list of room_ids that match the filter
|
||||||
|
"""
|
||||||
|
room_ids = set(room_ids)
|
||||||
|
|
||||||
|
disallowed_rooms = set(self.filter_json.get("not_rooms", []))
|
||||||
|
room_ids -= disallowed_rooms
|
||||||
|
|
||||||
|
allowed_rooms = self.filter_json.get("rooms", None)
|
||||||
|
if allowed_rooms is not None:
|
||||||
|
room_ids &= set(allowed_rooms)
|
||||||
|
|
||||||
|
return room_ids
|
||||||
|
|
||||||
|
def filter(self, events):
|
||||||
|
return filter(self.check, events)
|
||||||
|
|
||||||
|
def limit(self):
|
||||||
|
return self.filter_json.get("limit", 10)
|
||||||
|
|
||||||
|
|
||||||
|
def _matches_wildcard(actual_value, filter_value):
|
||||||
|
if filter_value.endswith("*"):
|
||||||
|
type_prefix = filter_value[:-1]
|
||||||
|
return actual_value.startswith(type_prefix)
|
||||||
|
else:
|
||||||
|
return actual_value == filter_value
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_FILTER_COLLECTION = FilterCollection({})
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -23,5 +23,6 @@ WEB_CLIENT_PREFIX = "/_matrix/client"
|
|||||||
CONTENT_REPO_PREFIX = "/_matrix/content"
|
CONTENT_REPO_PREFIX = "/_matrix/content"
|
||||||
SERVER_KEY_PREFIX = "/_matrix/key/v1"
|
SERVER_KEY_PREFIX = "/_matrix/key/v1"
|
||||||
SERVER_KEY_V2_PREFIX = "/_matrix/key/v2"
|
SERVER_KEY_V2_PREFIX = "/_matrix/key/v2"
|
||||||
MEDIA_PREFIX = "/_matrix/media/v1"
|
MEDIA_PREFIX = "/_matrix/media/r0"
|
||||||
|
LEGACY_MEDIA_PREFIX = "/_matrix/media/v1"
|
||||||
APP_SERVICE_PREFIX = "/_matrix/appservice/v1"
|
APP_SERVICE_PREFIX = "/_matrix/appservice/v1"
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -12,3 +12,22 @@
|
|||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import sys
|
||||||
|
sys.dont_write_bytecode = True
|
||||||
|
|
||||||
|
from synapse.python_dependencies import (
|
||||||
|
check_requirements, MissingRequirementError
|
||||||
|
) # NOQA
|
||||||
|
|
||||||
|
try:
|
||||||
|
check_requirements()
|
||||||
|
except MissingRequirementError as e:
|
||||||
|
message = "\n".join([
|
||||||
|
"Missing Requirement: %s" % (e.message,),
|
||||||
|
"To install run:",
|
||||||
|
" pip install --upgrade --force \"%s\"" % (e.dependency,),
|
||||||
|
"",
|
||||||
|
])
|
||||||
|
sys.stderr.writelines(message)
|
||||||
|
sys.exit(1)
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -14,73 +14,57 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import synapse
|
||||||
|
|
||||||
|
import gc
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
import sys
|
import sys
|
||||||
sys.dont_write_bytecode = True
|
from synapse.config._base import ConfigError
|
||||||
|
|
||||||
from synapse.python_dependencies import (
|
from synapse.python_dependencies import (
|
||||||
check_requirements, DEPENDENCY_LINKS, MissingRequirementError
|
check_requirements, DEPENDENCY_LINKS
|
||||||
)
|
)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
from synapse.rest import ClientRestResource
|
||||||
try:
|
|
||||||
check_requirements()
|
|
||||||
except MissingRequirementError as e:
|
|
||||||
message = "\n".join([
|
|
||||||
"Missing Requirement: %s" % (e.message,),
|
|
||||||
"To install run:",
|
|
||||||
" pip install --upgrade --force \"%s\"" % (e.dependency,),
|
|
||||||
"",
|
|
||||||
])
|
|
||||||
sys.stderr.writelines(message)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
from synapse.storage.engines import create_engine, IncorrectDatabaseSetup
|
from synapse.storage.engines import create_engine, IncorrectDatabaseSetup
|
||||||
from synapse.storage import (
|
from synapse.storage import are_all_users_on_domain
|
||||||
are_all_users_on_domain, UpgradeDatabaseException,
|
from synapse.storage.prepare_database import UpgradeDatabaseException, prepare_database
|
||||||
)
|
|
||||||
|
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
|
|
||||||
|
|
||||||
from twisted.internet import reactor, task, defer
|
from twisted.internet import reactor, task, defer
|
||||||
from twisted.application import service
|
from twisted.application import service
|
||||||
from twisted.enterprise import adbapi
|
|
||||||
from twisted.web.resource import Resource, EncodingResourceWrapper
|
from twisted.web.resource import Resource, EncodingResourceWrapper
|
||||||
from twisted.web.static import File
|
from twisted.web.static import File
|
||||||
from twisted.web.server import Site, GzipEncoderFactory, Request
|
from twisted.web.server import GzipEncoderFactory
|
||||||
from synapse.http.server import JsonResource, RootRedirect
|
from synapse.http.server import RootRedirect
|
||||||
from synapse.rest.media.v0.content_repository import ContentRepoResource
|
from synapse.rest.media.v0.content_repository import ContentRepoResource
|
||||||
from synapse.rest.media.v1.media_repository import MediaRepositoryResource
|
from synapse.rest.media.v1.media_repository import MediaRepositoryResource
|
||||||
from synapse.rest.key.v1.server_key_resource import LocalKey
|
from synapse.rest.key.v1.server_key_resource import LocalKey
|
||||||
from synapse.rest.key.v2 import KeyApiV2Resource
|
from synapse.rest.key.v2 import KeyApiV2Resource
|
||||||
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
|
|
||||||
from synapse.api.urls import (
|
from synapse.api.urls import (
|
||||||
CLIENT_PREFIX, FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX,
|
FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX,
|
||||||
SERVER_KEY_PREFIX, MEDIA_PREFIX, CLIENT_V2_ALPHA_PREFIX, STATIC_PREFIX,
|
SERVER_KEY_PREFIX, LEGACY_MEDIA_PREFIX, MEDIA_PREFIX, STATIC_PREFIX,
|
||||||
SERVER_KEY_V2_PREFIX,
|
SERVER_KEY_V2_PREFIX,
|
||||||
)
|
)
|
||||||
from synapse.config.homeserver import HomeServerConfig
|
from synapse.config.homeserver import HomeServerConfig
|
||||||
from synapse.crypto import context_factory
|
from synapse.crypto import context_factory
|
||||||
from synapse.util.logcontext import LoggingContext
|
from synapse.util.logcontext import LoggingContext
|
||||||
from synapse.rest.client.v1 import ClientV1RestResource
|
|
||||||
from synapse.rest.client.v2_alpha import ClientV2AlphaRestResource
|
|
||||||
from synapse.metrics.resource import MetricsResource, METRICS_PREFIX
|
from synapse.metrics.resource import MetricsResource, METRICS_PREFIX
|
||||||
|
from synapse.replication.resource import ReplicationResource, REPLICATION_PREFIX
|
||||||
|
from synapse.federation.transport.server import TransportLayerServer
|
||||||
|
|
||||||
|
from synapse.util.rlimit import change_resource_limit
|
||||||
|
from synapse.util.versionstring import get_version_string
|
||||||
|
from synapse.util.httpresourcetree import create_resource_tree
|
||||||
|
from synapse.util.manhole import manhole
|
||||||
|
|
||||||
|
from synapse.http.site import SynapseSite
|
||||||
|
|
||||||
from synapse import events
|
from synapse import events
|
||||||
|
|
||||||
from daemonize import Daemonize
|
from daemonize import Daemonize
|
||||||
import twisted.manhole.telnet
|
|
||||||
|
|
||||||
import synapse
|
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import resource
|
|
||||||
import subprocess
|
|
||||||
import time
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger("synapse.app.homeserver")
|
logger = logging.getLogger("synapse.app.homeserver")
|
||||||
|
|
||||||
@@ -89,81 +73,37 @@ def gz_wrap(r):
|
|||||||
return EncodingResourceWrapper(r, [GzipEncoderFactory()])
|
return EncodingResourceWrapper(r, [GzipEncoderFactory()])
|
||||||
|
|
||||||
|
|
||||||
|
def build_resource_for_web_client(hs):
|
||||||
|
webclient_path = hs.get_config().web_client_location
|
||||||
|
if not webclient_path:
|
||||||
|
try:
|
||||||
|
import syweb
|
||||||
|
except ImportError:
|
||||||
|
quit_with_error(
|
||||||
|
"Could not find a webclient.\n\n"
|
||||||
|
"Please either install the matrix-angular-sdk or configure\n"
|
||||||
|
"the location of the source to serve via the configuration\n"
|
||||||
|
"option `web_client_location`\n\n"
|
||||||
|
"To install the `matrix-angular-sdk` via pip, run:\n\n"
|
||||||
|
" pip install '%(dep)s'\n"
|
||||||
|
"\n"
|
||||||
|
"You can also disable hosting of the webclient via the\n"
|
||||||
|
"configuration option `web_client`\n"
|
||||||
|
% {"dep": DEPENDENCY_LINKS["matrix-angular-sdk"]}
|
||||||
|
)
|
||||||
|
syweb_path = os.path.dirname(syweb.__file__)
|
||||||
|
webclient_path = os.path.join(syweb_path, "webclient")
|
||||||
|
# GZip is disabled here due to
|
||||||
|
# https://twistedmatrix.com/trac/ticket/7678
|
||||||
|
# (It can stay enabled for the API resources: they call
|
||||||
|
# write() with the whole body and then finish() straight
|
||||||
|
# after and so do not trigger the bug.
|
||||||
|
# GzipFile was removed in commit 184ba09
|
||||||
|
# return GzipFile(webclient_path) # TODO configurable?
|
||||||
|
return File(webclient_path) # TODO configurable?
|
||||||
|
|
||||||
|
|
||||||
class SynapseHomeServer(HomeServer):
|
class SynapseHomeServer(HomeServer):
|
||||||
|
|
||||||
def build_http_client(self):
|
|
||||||
return MatrixFederationHttpClient(self)
|
|
||||||
|
|
||||||
def build_resource_for_client(self):
|
|
||||||
return ClientV1RestResource(self)
|
|
||||||
|
|
||||||
def build_resource_for_client_v2_alpha(self):
|
|
||||||
return ClientV2AlphaRestResource(self)
|
|
||||||
|
|
||||||
def build_resource_for_federation(self):
|
|
||||||
return JsonResource(self)
|
|
||||||
|
|
||||||
def build_resource_for_web_client(self):
|
|
||||||
webclient_path = self.get_config().web_client_location
|
|
||||||
if not webclient_path:
|
|
||||||
try:
|
|
||||||
import syweb
|
|
||||||
except ImportError:
|
|
||||||
quit_with_error(
|
|
||||||
"Could not find a webclient.\n\n"
|
|
||||||
"Please either install the matrix-angular-sdk or configure\n"
|
|
||||||
"the location of the source to serve via the configuration\n"
|
|
||||||
"option `web_client_location`\n\n"
|
|
||||||
"To install the `matrix-angular-sdk` via pip, run:\n\n"
|
|
||||||
" pip install '%(dep)s'\n"
|
|
||||||
"\n"
|
|
||||||
"You can also disable hosting of the webclient via the\n"
|
|
||||||
"configuration option `web_client`\n"
|
|
||||||
% {"dep": DEPENDENCY_LINKS["matrix-angular-sdk"]}
|
|
||||||
)
|
|
||||||
syweb_path = os.path.dirname(syweb.__file__)
|
|
||||||
webclient_path = os.path.join(syweb_path, "webclient")
|
|
||||||
# GZip is disabled here due to
|
|
||||||
# https://twistedmatrix.com/trac/ticket/7678
|
|
||||||
# (It can stay enabled for the API resources: they call
|
|
||||||
# write() with the whole body and then finish() straight
|
|
||||||
# after and so do not trigger the bug.
|
|
||||||
# GzipFile was removed in commit 184ba09
|
|
||||||
# return GzipFile(webclient_path) # TODO configurable?
|
|
||||||
return File(webclient_path) # TODO configurable?
|
|
||||||
|
|
||||||
def build_resource_for_static_content(self):
|
|
||||||
# This is old and should go away: not going to bother adding gzip
|
|
||||||
return File("static")
|
|
||||||
|
|
||||||
def build_resource_for_content_repo(self):
|
|
||||||
return ContentRepoResource(
|
|
||||||
self, self.config.uploads_path, self.auth, self.content_addr
|
|
||||||
)
|
|
||||||
|
|
||||||
def build_resource_for_media_repository(self):
|
|
||||||
return MediaRepositoryResource(self)
|
|
||||||
|
|
||||||
def build_resource_for_server_key(self):
|
|
||||||
return LocalKey(self)
|
|
||||||
|
|
||||||
def build_resource_for_server_key_v2(self):
|
|
||||||
return KeyApiV2Resource(self)
|
|
||||||
|
|
||||||
def build_resource_for_metrics(self):
|
|
||||||
if self.get_config().enable_metrics:
|
|
||||||
return MetricsResource(self)
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def build_db_pool(self):
|
|
||||||
name = self.db_config["name"]
|
|
||||||
|
|
||||||
return adbapi.ConnectionPool(
|
|
||||||
name,
|
|
||||||
**self.db_config.get("args", {})
|
|
||||||
)
|
|
||||||
|
|
||||||
def _listener_http(self, config, listener_config):
|
def _listener_http(self, config, listener_config):
|
||||||
port = listener_config["port"]
|
port = listener_config["port"]
|
||||||
bind_address = listener_config.get("bind_address", "")
|
bind_address = listener_config.get("bind_address", "")
|
||||||
@@ -173,53 +113,65 @@ class SynapseHomeServer(HomeServer):
|
|||||||
if tls and config.no_tls:
|
if tls and config.no_tls:
|
||||||
return
|
return
|
||||||
|
|
||||||
metrics_resource = self.get_resource_for_metrics()
|
|
||||||
|
|
||||||
resources = {}
|
resources = {}
|
||||||
for res in listener_config["resources"]:
|
for res in listener_config["resources"]:
|
||||||
for name in res["names"]:
|
for name in res["names"]:
|
||||||
if name == "client":
|
if name == "client":
|
||||||
|
client_resource = ClientRestResource(self)
|
||||||
if res["compress"]:
|
if res["compress"]:
|
||||||
client_v1 = gz_wrap(self.get_resource_for_client())
|
client_resource = gz_wrap(client_resource)
|
||||||
client_v2 = gz_wrap(self.get_resource_for_client_v2_alpha())
|
|
||||||
else:
|
|
||||||
client_v1 = self.get_resource_for_client()
|
|
||||||
client_v2 = self.get_resource_for_client_v2_alpha()
|
|
||||||
|
|
||||||
resources.update({
|
resources.update({
|
||||||
CLIENT_PREFIX: client_v1,
|
"/_matrix/client/api/v1": client_resource,
|
||||||
CLIENT_V2_ALPHA_PREFIX: client_v2,
|
"/_matrix/client/r0": client_resource,
|
||||||
|
"/_matrix/client/unstable": client_resource,
|
||||||
|
"/_matrix/client/v2_alpha": client_resource,
|
||||||
|
"/_matrix/client/versions": client_resource,
|
||||||
})
|
})
|
||||||
|
|
||||||
if name == "federation":
|
if name == "federation":
|
||||||
resources.update({
|
resources.update({
|
||||||
FEDERATION_PREFIX: self.get_resource_for_federation(),
|
FEDERATION_PREFIX: TransportLayerServer(self),
|
||||||
})
|
})
|
||||||
|
|
||||||
if name in ["static", "client"]:
|
if name in ["static", "client"]:
|
||||||
resources.update({
|
resources.update({
|
||||||
STATIC_PREFIX: self.get_resource_for_static_content(),
|
STATIC_PREFIX: File(
|
||||||
|
os.path.join(os.path.dirname(synapse.__file__), "static")
|
||||||
|
),
|
||||||
})
|
})
|
||||||
|
|
||||||
if name in ["media", "federation", "client"]:
|
if name in ["media", "federation", "client"]:
|
||||||
|
media_repo = MediaRepositoryResource(self)
|
||||||
resources.update({
|
resources.update({
|
||||||
MEDIA_PREFIX: self.get_resource_for_media_repository(),
|
MEDIA_PREFIX: media_repo,
|
||||||
CONTENT_REPO_PREFIX: self.get_resource_for_content_repo(),
|
LEGACY_MEDIA_PREFIX: media_repo,
|
||||||
|
CONTENT_REPO_PREFIX: ContentRepoResource(
|
||||||
|
self, self.config.uploads_path
|
||||||
|
),
|
||||||
})
|
})
|
||||||
|
|
||||||
if name in ["keys", "federation"]:
|
if name in ["keys", "federation"]:
|
||||||
resources.update({
|
resources.update({
|
||||||
SERVER_KEY_PREFIX: self.get_resource_for_server_key(),
|
SERVER_KEY_PREFIX: LocalKey(self),
|
||||||
SERVER_KEY_V2_PREFIX: self.get_resource_for_server_key_v2(),
|
SERVER_KEY_V2_PREFIX: KeyApiV2Resource(self),
|
||||||
})
|
})
|
||||||
|
|
||||||
if name == "webclient":
|
if name == "webclient":
|
||||||
resources[WEB_CLIENT_PREFIX] = self.get_resource_for_web_client()
|
resources[WEB_CLIENT_PREFIX] = build_resource_for_web_client(self)
|
||||||
|
|
||||||
if name == "metrics" and metrics_resource:
|
if name == "metrics" and self.get_config().enable_metrics:
|
||||||
resources[METRICS_PREFIX] = metrics_resource
|
resources[METRICS_PREFIX] = MetricsResource(self)
|
||||||
|
|
||||||
root_resource = create_resource_tree(resources)
|
if name == "replication":
|
||||||
|
resources[REPLICATION_PREFIX] = ReplicationResource(self)
|
||||||
|
|
||||||
|
if WEB_CLIENT_PREFIX in resources:
|
||||||
|
root_resource = RootRedirect(WEB_CLIENT_PREFIX)
|
||||||
|
else:
|
||||||
|
root_resource = Resource()
|
||||||
|
|
||||||
|
root_resource = create_resource_tree(resources, root_resource)
|
||||||
if tls:
|
if tls:
|
||||||
reactor.listenSSL(
|
reactor.listenSSL(
|
||||||
port,
|
port,
|
||||||
@@ -252,13 +204,13 @@ class SynapseHomeServer(HomeServer):
|
|||||||
if listener["type"] == "http":
|
if listener["type"] == "http":
|
||||||
self._listener_http(config, listener)
|
self._listener_http(config, listener)
|
||||||
elif listener["type"] == "manhole":
|
elif listener["type"] == "manhole":
|
||||||
f = twisted.manhole.telnet.ShellFactory()
|
|
||||||
f.username = "matrix"
|
|
||||||
f.password = "rabbithole"
|
|
||||||
f.namespace['hs'] = self
|
|
||||||
reactor.listenTCP(
|
reactor.listenTCP(
|
||||||
listener["port"],
|
listener["port"],
|
||||||
f,
|
manhole(
|
||||||
|
username="matrix",
|
||||||
|
password="rabbithole",
|
||||||
|
globals={"hs": self},
|
||||||
|
),
|
||||||
interface=listener.get("bind_address", '127.0.0.1')
|
interface=listener.get("bind_address", '127.0.0.1')
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@@ -280,6 +232,19 @@ class SynapseHomeServer(HomeServer):
|
|||||||
except IncorrectDatabaseSetup as e:
|
except IncorrectDatabaseSetup as e:
|
||||||
quit_with_error(e.message)
|
quit_with_error(e.message)
|
||||||
|
|
||||||
|
def get_db_conn(self, run_new_connection=True):
|
||||||
|
# Any param beginning with cp_ is a parameter for adbapi, and should
|
||||||
|
# not be passed to the database engine.
|
||||||
|
db_params = {
|
||||||
|
k: v for k, v in self.db_config.get("args", {}).items()
|
||||||
|
if not k.startswith("cp_")
|
||||||
|
}
|
||||||
|
db_conn = self.database_engine.module.connect(**db_params)
|
||||||
|
|
||||||
|
if run_new_connection:
|
||||||
|
self.database_engine.on_new_connection(db_conn)
|
||||||
|
return db_conn
|
||||||
|
|
||||||
|
|
||||||
def quit_with_error(error_string):
|
def quit_with_error(error_string):
|
||||||
message_lines = error_string.split("\n")
|
message_lines = error_string.split("\n")
|
||||||
@@ -291,83 +256,6 @@ def quit_with_error(error_string):
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
def get_version_string():
|
|
||||||
try:
|
|
||||||
null = open(os.devnull, 'w')
|
|
||||||
cwd = os.path.dirname(os.path.abspath(__file__))
|
|
||||||
try:
|
|
||||||
git_branch = subprocess.check_output(
|
|
||||||
['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
|
|
||||||
stderr=null,
|
|
||||||
cwd=cwd,
|
|
||||||
).strip()
|
|
||||||
git_branch = "b=" + git_branch
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
git_branch = ""
|
|
||||||
|
|
||||||
try:
|
|
||||||
git_tag = subprocess.check_output(
|
|
||||||
['git', 'describe', '--exact-match'],
|
|
||||||
stderr=null,
|
|
||||||
cwd=cwd,
|
|
||||||
).strip()
|
|
||||||
git_tag = "t=" + git_tag
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
git_tag = ""
|
|
||||||
|
|
||||||
try:
|
|
||||||
git_commit = subprocess.check_output(
|
|
||||||
['git', 'rev-parse', '--short', 'HEAD'],
|
|
||||||
stderr=null,
|
|
||||||
cwd=cwd,
|
|
||||||
).strip()
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
git_commit = ""
|
|
||||||
|
|
||||||
try:
|
|
||||||
dirty_string = "-this_is_a_dirty_checkout"
|
|
||||||
is_dirty = subprocess.check_output(
|
|
||||||
['git', 'describe', '--dirty=' + dirty_string],
|
|
||||||
stderr=null,
|
|
||||||
cwd=cwd,
|
|
||||||
).strip().endswith(dirty_string)
|
|
||||||
|
|
||||||
git_dirty = "dirty" if is_dirty else ""
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
git_dirty = ""
|
|
||||||
|
|
||||||
if git_branch or git_tag or git_commit or git_dirty:
|
|
||||||
git_version = ",".join(
|
|
||||||
s for s in
|
|
||||||
(git_branch, git_tag, git_commit, git_dirty,)
|
|
||||||
if s
|
|
||||||
)
|
|
||||||
|
|
||||||
return (
|
|
||||||
"Synapse/%s (%s)" % (
|
|
||||||
synapse.__version__, git_version,
|
|
||||||
)
|
|
||||||
).encode("ascii")
|
|
||||||
except Exception as e:
|
|
||||||
logger.info("Failed to check for git repository: %s", e)
|
|
||||||
|
|
||||||
return ("Synapse/%s" % (synapse.__version__,)).encode("ascii")
|
|
||||||
|
|
||||||
|
|
||||||
def change_resource_limit(soft_file_no):
|
|
||||||
try:
|
|
||||||
soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
|
|
||||||
|
|
||||||
if not soft_file_no:
|
|
||||||
soft_file_no = hard
|
|
||||||
|
|
||||||
resource.setrlimit(resource.RLIMIT_NOFILE, (soft_file_no, hard))
|
|
||||||
|
|
||||||
logger.info("Set file limit to: %d", soft_file_no)
|
|
||||||
except (ValueError, resource.error) as e:
|
|
||||||
logger.warn("Failed to set file limit: %s", e)
|
|
||||||
|
|
||||||
|
|
||||||
def setup(config_options):
|
def setup(config_options):
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
@@ -377,18 +265,26 @@ def setup(config_options):
|
|||||||
Returns:
|
Returns:
|
||||||
HomeServer
|
HomeServer
|
||||||
"""
|
"""
|
||||||
config = HomeServerConfig.load_config(
|
try:
|
||||||
"Synapse Homeserver",
|
config = HomeServerConfig.load_or_generate_config(
|
||||||
config_options,
|
"Synapse Homeserver",
|
||||||
generate_section="Homeserver"
|
config_options,
|
||||||
)
|
)
|
||||||
|
except ConfigError as e:
|
||||||
|
sys.stderr.write("\n" + e.message + "\n")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if not config:
|
||||||
|
# If a config isn't returned, and an exception isn't raised, we're just
|
||||||
|
# generating config files and shouldn't try to continue.
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
config.setup_logging()
|
config.setup_logging()
|
||||||
|
|
||||||
# check any extra requirements we have now we have a config
|
# check any extra requirements we have now we have a config
|
||||||
check_requirements(config)
|
check_requirements(config)
|
||||||
|
|
||||||
version_string = get_version_string()
|
version_string = get_version_string("Synapse", synapse)
|
||||||
|
|
||||||
logger.info("Server hostname: %s", config.server_name)
|
logger.info("Server hostname: %s", config.server_name)
|
||||||
logger.info("Server version: %s", version_string)
|
logger.info("Server version: %s", version_string)
|
||||||
@@ -397,7 +293,7 @@ def setup(config_options):
|
|||||||
|
|
||||||
tls_server_context_factory = context_factory.ServerContextFactory(config)
|
tls_server_context_factory = context_factory.ServerContextFactory(config)
|
||||||
|
|
||||||
database_engine = create_engine(config.database_config["name"])
|
database_engine = create_engine(config.database_config)
|
||||||
config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection
|
config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection
|
||||||
|
|
||||||
hs = SynapseHomeServer(
|
hs = SynapseHomeServer(
|
||||||
@@ -405,7 +301,6 @@ def setup(config_options):
|
|||||||
db_config=config.database_config,
|
db_config=config.database_config,
|
||||||
tls_server_context_factory=tls_server_context_factory,
|
tls_server_context_factory=tls_server_context_factory,
|
||||||
config=config,
|
config=config,
|
||||||
content_addr=config.content_addr,
|
|
||||||
version_string=version_string,
|
version_string=version_string,
|
||||||
database_engine=database_engine,
|
database_engine=database_engine,
|
||||||
)
|
)
|
||||||
@@ -413,14 +308,10 @@ def setup(config_options):
|
|||||||
logger.info("Preparing database: %s...", config.database_config['name'])
|
logger.info("Preparing database: %s...", config.database_config['name'])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
db_conn = database_engine.module.connect(
|
db_conn = hs.get_db_conn(run_new_connection=False)
|
||||||
**{
|
prepare_database(db_conn, database_engine, config=config)
|
||||||
k: v for k, v in config.database_config.get("args", {}).items()
|
database_engine.on_new_connection(db_conn)
|
||||||
if not k.startswith("cp_")
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
database_engine.prepare_database(db_conn)
|
|
||||||
hs.run_startup_checks(db_conn, database_engine)
|
hs.run_startup_checks(db_conn, database_engine)
|
||||||
|
|
||||||
db_conn.commit()
|
db_conn.commit()
|
||||||
@@ -434,12 +325,17 @@ def setup(config_options):
|
|||||||
|
|
||||||
logger.info("Database prepared in %s.", config.database_config['name'])
|
logger.info("Database prepared in %s.", config.database_config['name'])
|
||||||
|
|
||||||
|
hs.setup()
|
||||||
hs.start_listening()
|
hs.start_listening()
|
||||||
|
|
||||||
hs.get_pusherpool().start()
|
def start():
|
||||||
hs.get_state_handler().start_caching()
|
hs.get_pusherpool().start()
|
||||||
hs.get_datastore().start_profiling()
|
hs.get_state_handler().start_caching()
|
||||||
hs.get_replication_layer().start_get_pdu_cache()
|
hs.get_datastore().start_profiling()
|
||||||
|
hs.get_datastore().start_doing_background_updates()
|
||||||
|
hs.get_replication_layer().start_get_pdu_cache()
|
||||||
|
|
||||||
|
reactor.callWhenRunning(start)
|
||||||
|
|
||||||
return hs
|
return hs
|
||||||
|
|
||||||
@@ -454,201 +350,13 @@ class SynapseService(service.Service):
|
|||||||
def startService(self):
|
def startService(self):
|
||||||
hs = setup(self.config)
|
hs = setup(self.config)
|
||||||
change_resource_limit(hs.config.soft_file_limit)
|
change_resource_limit(hs.config.soft_file_limit)
|
||||||
|
if hs.config.gc_thresholds:
|
||||||
|
gc.set_threshold(*hs.config.gc_thresholds)
|
||||||
|
|
||||||
def stopService(self):
|
def stopService(self):
|
||||||
return self._port.stopListening()
|
return self._port.stopListening()
|
||||||
|
|
||||||
|
|
||||||
class SynapseRequest(Request):
|
|
||||||
def __init__(self, site, *args, **kw):
|
|
||||||
Request.__init__(self, *args, **kw)
|
|
||||||
self.site = site
|
|
||||||
self.authenticated_entity = None
|
|
||||||
self.start_time = 0
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
# We overwrite this so that we don't log ``access_token``
|
|
||||||
return '<%s at 0x%x method=%s uri=%s clientproto=%s site=%s>' % (
|
|
||||||
self.__class__.__name__,
|
|
||||||
id(self),
|
|
||||||
self.method,
|
|
||||||
self.get_redacted_uri(),
|
|
||||||
self.clientproto,
|
|
||||||
self.site.site_tag,
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_redacted_uri(self):
|
|
||||||
return re.sub(
|
|
||||||
r'(\?.*access_token=)[^&]*(.*)$',
|
|
||||||
r'\1<redacted>\2',
|
|
||||||
self.uri
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_user_agent(self):
|
|
||||||
return self.requestHeaders.getRawHeaders("User-Agent", [None])[-1]
|
|
||||||
|
|
||||||
def started_processing(self):
|
|
||||||
self.site.access_logger.info(
|
|
||||||
"%s - %s - Received request: %s %s",
|
|
||||||
self.getClientIP(),
|
|
||||||
self.site.site_tag,
|
|
||||||
self.method,
|
|
||||||
self.get_redacted_uri()
|
|
||||||
)
|
|
||||||
self.start_time = int(time.time() * 1000)
|
|
||||||
|
|
||||||
def finished_processing(self):
|
|
||||||
self.site.access_logger.info(
|
|
||||||
"%s - %s - {%s}"
|
|
||||||
" Processed request: %dms %sB %s \"%s %s %s\" \"%s\"",
|
|
||||||
self.getClientIP(),
|
|
||||||
self.site.site_tag,
|
|
||||||
self.authenticated_entity,
|
|
||||||
int(time.time() * 1000) - self.start_time,
|
|
||||||
self.sentLength,
|
|
||||||
self.code,
|
|
||||||
self.method,
|
|
||||||
self.get_redacted_uri(),
|
|
||||||
self.clientproto,
|
|
||||||
self.get_user_agent(),
|
|
||||||
)
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def processing(self):
|
|
||||||
self.started_processing()
|
|
||||||
yield
|
|
||||||
self.finished_processing()
|
|
||||||
|
|
||||||
|
|
||||||
class XForwardedForRequest(SynapseRequest):
|
|
||||||
def __init__(self, *args, **kw):
|
|
||||||
SynapseRequest.__init__(self, *args, **kw)
|
|
||||||
|
|
||||||
"""
|
|
||||||
Add a layer on top of another request that only uses the value of an
|
|
||||||
X-Forwarded-For header as the result of C{getClientIP}.
|
|
||||||
"""
|
|
||||||
def getClientIP(self):
|
|
||||||
"""
|
|
||||||
@return: The client address (the first address) in the value of the
|
|
||||||
I{X-Forwarded-For header}. If the header is not present, return
|
|
||||||
C{b"-"}.
|
|
||||||
"""
|
|
||||||
return self.requestHeaders.getRawHeaders(
|
|
||||||
b"x-forwarded-for", [b"-"])[0].split(b",")[0].strip()
|
|
||||||
|
|
||||||
|
|
||||||
class SynapseRequestFactory(object):
|
|
||||||
def __init__(self, site, x_forwarded_for):
|
|
||||||
self.site = site
|
|
||||||
self.x_forwarded_for = x_forwarded_for
|
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
|
||||||
if self.x_forwarded_for:
|
|
||||||
return XForwardedForRequest(self.site, *args, **kwargs)
|
|
||||||
else:
|
|
||||||
return SynapseRequest(self.site, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class SynapseSite(Site):
|
|
||||||
"""
|
|
||||||
Subclass of a twisted http Site that does access logging with python's
|
|
||||||
standard logging
|
|
||||||
"""
|
|
||||||
def __init__(self, logger_name, site_tag, config, resource, *args, **kwargs):
|
|
||||||
Site.__init__(self, resource, *args, **kwargs)
|
|
||||||
|
|
||||||
self.site_tag = site_tag
|
|
||||||
|
|
||||||
proxied = config.get("x_forwarded", False)
|
|
||||||
self.requestFactory = SynapseRequestFactory(self, proxied)
|
|
||||||
self.access_logger = logging.getLogger(logger_name)
|
|
||||||
|
|
||||||
def log(self, request):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def create_resource_tree(desired_tree, redirect_root_to_web_client=True):
|
|
||||||
"""Create the resource tree for this Home Server.
|
|
||||||
|
|
||||||
This in unduly complicated because Twisted does not support putting
|
|
||||||
child resources more than 1 level deep at a time.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
web_client (bool): True to enable the web client.
|
|
||||||
redirect_root_to_web_client (bool): True to redirect '/' to the
|
|
||||||
location of the web client. This does nothing if web_client is not
|
|
||||||
True.
|
|
||||||
"""
|
|
||||||
if redirect_root_to_web_client and WEB_CLIENT_PREFIX in desired_tree:
|
|
||||||
root_resource = RootRedirect(WEB_CLIENT_PREFIX)
|
|
||||||
else:
|
|
||||||
root_resource = Resource()
|
|
||||||
|
|
||||||
# ideally we'd just use getChild and putChild but getChild doesn't work
|
|
||||||
# unless you give it a Request object IN ADDITION to the name :/ So
|
|
||||||
# instead, we'll store a copy of this mapping so we can actually add
|
|
||||||
# extra resources to existing nodes. See self._resource_id for the key.
|
|
||||||
resource_mappings = {}
|
|
||||||
for full_path, res in desired_tree.items():
|
|
||||||
logger.info("Attaching %s to path %s", res, full_path)
|
|
||||||
last_resource = root_resource
|
|
||||||
for path_seg in full_path.split('/')[1:-1]:
|
|
||||||
if path_seg not in last_resource.listNames():
|
|
||||||
# resource doesn't exist, so make a "dummy resource"
|
|
||||||
child_resource = Resource()
|
|
||||||
last_resource.putChild(path_seg, child_resource)
|
|
||||||
res_id = _resource_id(last_resource, path_seg)
|
|
||||||
resource_mappings[res_id] = child_resource
|
|
||||||
last_resource = child_resource
|
|
||||||
else:
|
|
||||||
# we have an existing Resource, use that instead.
|
|
||||||
res_id = _resource_id(last_resource, path_seg)
|
|
||||||
last_resource = resource_mappings[res_id]
|
|
||||||
|
|
||||||
# ===========================
|
|
||||||
# now attach the actual desired resource
|
|
||||||
last_path_seg = full_path.split('/')[-1]
|
|
||||||
|
|
||||||
# if there is already a resource here, thieve its children and
|
|
||||||
# replace it
|
|
||||||
res_id = _resource_id(last_resource, last_path_seg)
|
|
||||||
if res_id in resource_mappings:
|
|
||||||
# there is a dummy resource at this path already, which needs
|
|
||||||
# to be replaced with the desired resource.
|
|
||||||
existing_dummy_resource = resource_mappings[res_id]
|
|
||||||
for child_name in existing_dummy_resource.listNames():
|
|
||||||
child_res_id = _resource_id(
|
|
||||||
existing_dummy_resource, child_name
|
|
||||||
)
|
|
||||||
child_resource = resource_mappings[child_res_id]
|
|
||||||
# steal the children
|
|
||||||
res.putChild(child_name, child_resource)
|
|
||||||
|
|
||||||
# finally, insert the desired resource in the right place
|
|
||||||
last_resource.putChild(last_path_seg, res)
|
|
||||||
res_id = _resource_id(last_resource, last_path_seg)
|
|
||||||
resource_mappings[res_id] = res
|
|
||||||
|
|
||||||
return root_resource
|
|
||||||
|
|
||||||
|
|
||||||
def _resource_id(resource, path_seg):
|
|
||||||
"""Construct an arbitrary resource ID so you can retrieve the mapping
|
|
||||||
later.
|
|
||||||
|
|
||||||
If you want to represent resource A putChild resource B with path C,
|
|
||||||
the mapping should looks like _resource_id(A,C) = B.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
resource (Resource): The *parent* Resource
|
|
||||||
path_seg (str): The name of the child Resource to be attached.
|
|
||||||
Returns:
|
|
||||||
str: A unique string which can be a key to the child Resource.
|
|
||||||
"""
|
|
||||||
return "%s-%s" % (resource, path_seg)
|
|
||||||
|
|
||||||
|
|
||||||
def run(hs):
|
def run(hs):
|
||||||
PROFILE_SYNAPSE = False
|
PROFILE_SYNAPSE = False
|
||||||
if PROFILE_SYNAPSE:
|
if PROFILE_SYNAPSE:
|
||||||
@@ -676,6 +384,7 @@ def run(hs):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def phone_stats_home():
|
def phone_stats_home():
|
||||||
|
logger.info("Gathering stats for reporting")
|
||||||
now = int(hs.get_clock().time())
|
now = int(hs.get_clock().time())
|
||||||
uptime = int(now - start_time)
|
uptime = int(now - start_time)
|
||||||
if uptime < 0:
|
if uptime < 0:
|
||||||
@@ -687,8 +396,8 @@ def run(hs):
|
|||||||
stats["uptime_seconds"] = uptime
|
stats["uptime_seconds"] = uptime
|
||||||
stats["total_users"] = yield hs.get_datastore().count_all_users()
|
stats["total_users"] = yield hs.get_datastore().count_all_users()
|
||||||
|
|
||||||
all_rooms = yield hs.get_datastore().get_rooms(False)
|
room_count = yield hs.get_datastore().get_room_count()
|
||||||
stats["total_room_count"] = len(all_rooms)
|
stats["total_room_count"] = room_count
|
||||||
|
|
||||||
stats["daily_active_users"] = yield hs.get_datastore().count_daily_users()
|
stats["daily_active_users"] = yield hs.get_datastore().count_daily_users()
|
||||||
daily_messages = yield hs.get_datastore().count_daily_messages()
|
daily_messages = yield hs.get_datastore().count_daily_messages()
|
||||||
@@ -706,17 +415,22 @@ def run(hs):
|
|||||||
|
|
||||||
if hs.config.report_stats:
|
if hs.config.report_stats:
|
||||||
phone_home_task = task.LoopingCall(phone_stats_home)
|
phone_home_task = task.LoopingCall(phone_stats_home)
|
||||||
|
logger.info("Scheduling stats reporting for 24 hour intervals")
|
||||||
phone_home_task.start(60 * 60 * 24, now=False)
|
phone_home_task.start(60 * 60 * 24, now=False)
|
||||||
|
|
||||||
def in_thread():
|
def in_thread():
|
||||||
|
# Uncomment to enable tracing of log context changes.
|
||||||
|
# sys.settrace(logcontext_tracer)
|
||||||
with LoggingContext("run"):
|
with LoggingContext("run"):
|
||||||
change_resource_limit(hs.config.soft_file_limit)
|
change_resource_limit(hs.config.soft_file_limit)
|
||||||
|
if hs.config.gc_thresholds:
|
||||||
|
gc.set_threshold(*hs.config.gc_thresholds)
|
||||||
reactor.run()
|
reactor.run()
|
||||||
|
|
||||||
if hs.config.daemonize:
|
if hs.config.daemonize:
|
||||||
|
|
||||||
if hs.config.print_pidfile:
|
if hs.config.print_pidfile:
|
||||||
print hs.config.pid_file
|
print (hs.config.pid_file)
|
||||||
|
|
||||||
daemon = Daemonize(
|
daemon = Daemonize(
|
||||||
app="synapse-homeserver",
|
app="synapse-homeserver",
|
||||||
|
|||||||
314
synapse/app/pusher.py
Normal file
314
synapse/app/pusher.py
Normal file
@@ -0,0 +1,314 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright 2016 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import synapse
|
||||||
|
|
||||||
|
from synapse.server import HomeServer
|
||||||
|
from synapse.config._base import ConfigError
|
||||||
|
from synapse.config.logger import setup_logging
|
||||||
|
from synapse.config.homeserver import HomeServerConfig
|
||||||
|
from synapse.http.site import SynapseSite
|
||||||
|
from synapse.metrics.resource import MetricsResource, METRICS_PREFIX
|
||||||
|
from synapse.storage.roommember import RoomMemberStore
|
||||||
|
from synapse.replication.slave.storage.events import SlavedEventStore
|
||||||
|
from synapse.replication.slave.storage.pushers import SlavedPusherStore
|
||||||
|
from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
|
||||||
|
from synapse.replication.slave.storage.account_data import SlavedAccountDataStore
|
||||||
|
from synapse.storage.engines import create_engine
|
||||||
|
from synapse.storage import DataStore
|
||||||
|
from synapse.util.async import sleep
|
||||||
|
from synapse.util.httpresourcetree import create_resource_tree
|
||||||
|
from synapse.util.logcontext import LoggingContext, preserve_fn
|
||||||
|
from synapse.util.manhole import manhole
|
||||||
|
from synapse.util.rlimit import change_resource_limit
|
||||||
|
from synapse.util.versionstring import get_version_string
|
||||||
|
|
||||||
|
from twisted.internet import reactor, defer
|
||||||
|
from twisted.web.resource import Resource
|
||||||
|
|
||||||
|
from daemonize import Daemonize
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
import gc
|
||||||
|
|
||||||
|
logger = logging.getLogger("synapse.app.pusher")
|
||||||
|
|
||||||
|
|
||||||
|
class PusherSlaveStore(
|
||||||
|
SlavedEventStore, SlavedPusherStore, SlavedReceiptsStore,
|
||||||
|
SlavedAccountDataStore
|
||||||
|
):
|
||||||
|
update_pusher_last_stream_ordering_and_success = (
|
||||||
|
DataStore.update_pusher_last_stream_ordering_and_success.__func__
|
||||||
|
)
|
||||||
|
|
||||||
|
update_pusher_failing_since = (
|
||||||
|
DataStore.update_pusher_failing_since.__func__
|
||||||
|
)
|
||||||
|
|
||||||
|
update_pusher_last_stream_ordering = (
|
||||||
|
DataStore.update_pusher_last_stream_ordering.__func__
|
||||||
|
)
|
||||||
|
|
||||||
|
get_throttle_params_by_room = (
|
||||||
|
DataStore.get_throttle_params_by_room.__func__
|
||||||
|
)
|
||||||
|
|
||||||
|
set_throttle_params = (
|
||||||
|
DataStore.set_throttle_params.__func__
|
||||||
|
)
|
||||||
|
|
||||||
|
get_time_of_last_push_action_before = (
|
||||||
|
DataStore.get_time_of_last_push_action_before.__func__
|
||||||
|
)
|
||||||
|
|
||||||
|
get_profile_displayname = (
|
||||||
|
DataStore.get_profile_displayname.__func__
|
||||||
|
)
|
||||||
|
|
||||||
|
# XXX: This is a bit broken because we don't persist forgotten rooms
|
||||||
|
# in a way that they can be streamed. This means that we don't have a
|
||||||
|
# way to invalidate the forgotten rooms cache correctly.
|
||||||
|
# For now we expire the cache every 10 minutes.
|
||||||
|
BROKEN_CACHE_EXPIRY_MS = 60 * 60 * 1000
|
||||||
|
who_forgot_in_room = (
|
||||||
|
RoomMemberStore.__dict__["who_forgot_in_room"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PusherServer(HomeServer):
|
||||||
|
|
||||||
|
def get_db_conn(self, run_new_connection=True):
|
||||||
|
# Any param beginning with cp_ is a parameter for adbapi, and should
|
||||||
|
# not be passed to the database engine.
|
||||||
|
db_params = {
|
||||||
|
k: v for k, v in self.db_config.get("args", {}).items()
|
||||||
|
if not k.startswith("cp_")
|
||||||
|
}
|
||||||
|
db_conn = self.database_engine.module.connect(**db_params)
|
||||||
|
|
||||||
|
if run_new_connection:
|
||||||
|
self.database_engine.on_new_connection(db_conn)
|
||||||
|
return db_conn
|
||||||
|
|
||||||
|
def setup(self):
|
||||||
|
logger.info("Setting up.")
|
||||||
|
self.datastore = PusherSlaveStore(self.get_db_conn(), self)
|
||||||
|
logger.info("Finished setting up.")
|
||||||
|
|
||||||
|
def remove_pusher(self, app_id, push_key, user_id):
|
||||||
|
http_client = self.get_simple_http_client()
|
||||||
|
replication_url = self.config.worker_replication_url
|
||||||
|
url = replication_url + "/remove_pushers"
|
||||||
|
return http_client.post_json_get_json(url, {
|
||||||
|
"remove": [{
|
||||||
|
"app_id": app_id,
|
||||||
|
"push_key": push_key,
|
||||||
|
"user_id": user_id,
|
||||||
|
}]
|
||||||
|
})
|
||||||
|
|
||||||
|
def _listen_http(self, listener_config):
|
||||||
|
port = listener_config["port"]
|
||||||
|
bind_address = listener_config.get("bind_address", "")
|
||||||
|
site_tag = listener_config.get("tag", port)
|
||||||
|
resources = {}
|
||||||
|
for res in listener_config["resources"]:
|
||||||
|
for name in res["names"]:
|
||||||
|
if name == "metrics":
|
||||||
|
resources[METRICS_PREFIX] = MetricsResource(self)
|
||||||
|
|
||||||
|
root_resource = create_resource_tree(resources, Resource())
|
||||||
|
reactor.listenTCP(
|
||||||
|
port,
|
||||||
|
SynapseSite(
|
||||||
|
"synapse.access.http.%s" % (site_tag,),
|
||||||
|
site_tag,
|
||||||
|
listener_config,
|
||||||
|
root_resource,
|
||||||
|
),
|
||||||
|
interface=bind_address
|
||||||
|
)
|
||||||
|
logger.info("Synapse pusher now listening on port %d", port)
|
||||||
|
|
||||||
|
def start_listening(self, listeners):
|
||||||
|
for listener in listeners:
|
||||||
|
if listener["type"] == "http":
|
||||||
|
self._listen_http(listener)
|
||||||
|
elif listener["type"] == "manhole":
|
||||||
|
reactor.listenTCP(
|
||||||
|
listener["port"],
|
||||||
|
manhole(
|
||||||
|
username="matrix",
|
||||||
|
password="rabbithole",
|
||||||
|
globals={"hs": self},
|
||||||
|
),
|
||||||
|
interface=listener.get("bind_address", '127.0.0.1')
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def replicate(self):
|
||||||
|
http_client = self.get_simple_http_client()
|
||||||
|
store = self.get_datastore()
|
||||||
|
replication_url = self.config.worker_replication_url
|
||||||
|
pusher_pool = self.get_pusherpool()
|
||||||
|
clock = self.get_clock()
|
||||||
|
|
||||||
|
def stop_pusher(user_id, app_id, pushkey):
|
||||||
|
key = "%s:%s" % (app_id, pushkey)
|
||||||
|
pushers_for_user = pusher_pool.pushers.get(user_id, {})
|
||||||
|
pusher = pushers_for_user.pop(key, None)
|
||||||
|
if pusher is None:
|
||||||
|
return
|
||||||
|
logger.info("Stopping pusher %r / %r", user_id, key)
|
||||||
|
pusher.on_stop()
|
||||||
|
|
||||||
|
def start_pusher(user_id, app_id, pushkey):
|
||||||
|
key = "%s:%s" % (app_id, pushkey)
|
||||||
|
logger.info("Starting pusher %r / %r", user_id, key)
|
||||||
|
return pusher_pool._refresh_pusher(app_id, pushkey, user_id)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def poke_pushers(results):
|
||||||
|
pushers_rows = set(
|
||||||
|
map(tuple, results.get("pushers", {}).get("rows", []))
|
||||||
|
)
|
||||||
|
deleted_pushers_rows = set(
|
||||||
|
map(tuple, results.get("deleted_pushers", {}).get("rows", []))
|
||||||
|
)
|
||||||
|
for row in sorted(pushers_rows | deleted_pushers_rows):
|
||||||
|
if row in deleted_pushers_rows:
|
||||||
|
user_id, app_id, pushkey = row[1:4]
|
||||||
|
stop_pusher(user_id, app_id, pushkey)
|
||||||
|
elif row in pushers_rows:
|
||||||
|
user_id = row[1]
|
||||||
|
app_id = row[5]
|
||||||
|
pushkey = row[8]
|
||||||
|
yield start_pusher(user_id, app_id, pushkey)
|
||||||
|
|
||||||
|
stream = results.get("events")
|
||||||
|
if stream:
|
||||||
|
min_stream_id = stream["rows"][0][0]
|
||||||
|
max_stream_id = stream["position"]
|
||||||
|
preserve_fn(pusher_pool.on_new_notifications)(
|
||||||
|
min_stream_id, max_stream_id
|
||||||
|
)
|
||||||
|
|
||||||
|
stream = results.get("receipts")
|
||||||
|
if stream:
|
||||||
|
rows = stream["rows"]
|
||||||
|
affected_room_ids = set(row[1] for row in rows)
|
||||||
|
min_stream_id = rows[0][0]
|
||||||
|
max_stream_id = stream["position"]
|
||||||
|
preserve_fn(pusher_pool.on_new_receipts)(
|
||||||
|
min_stream_id, max_stream_id, affected_room_ids
|
||||||
|
)
|
||||||
|
|
||||||
|
def expire_broken_caches():
|
||||||
|
store.who_forgot_in_room.invalidate_all()
|
||||||
|
|
||||||
|
next_expire_broken_caches_ms = 0
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
args = store.stream_positions()
|
||||||
|
args["timeout"] = 30000
|
||||||
|
result = yield http_client.get_json(replication_url, args=args)
|
||||||
|
now_ms = clock.time_msec()
|
||||||
|
if now_ms > next_expire_broken_caches_ms:
|
||||||
|
expire_broken_caches()
|
||||||
|
next_expire_broken_caches_ms = (
|
||||||
|
now_ms + store.BROKEN_CACHE_EXPIRY_MS
|
||||||
|
)
|
||||||
|
yield store.process_replication(result)
|
||||||
|
poke_pushers(result)
|
||||||
|
except:
|
||||||
|
logger.exception("Error replicating from %r", replication_url)
|
||||||
|
yield sleep(30)
|
||||||
|
|
||||||
|
|
||||||
|
def start(config_options):
|
||||||
|
try:
|
||||||
|
config = HomeServerConfig.load_config(
|
||||||
|
"Synapse pusher", config_options
|
||||||
|
)
|
||||||
|
except ConfigError as e:
|
||||||
|
sys.stderr.write("\n" + e.message + "\n")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
assert config.worker_app == "synapse.app.pusher"
|
||||||
|
|
||||||
|
setup_logging(config.worker_log_config, config.worker_log_file)
|
||||||
|
|
||||||
|
if config.start_pushers:
|
||||||
|
sys.stderr.write(
|
||||||
|
"\nThe pushers must be disabled in the main synapse process"
|
||||||
|
"\nbefore they can be run in a separate worker."
|
||||||
|
"\nPlease add ``start_pushers: false`` to the main config"
|
||||||
|
"\n"
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Force the pushers to start since they will be disabled in the main config
|
||||||
|
config.start_pushers = True
|
||||||
|
|
||||||
|
database_engine = create_engine(config.database_config)
|
||||||
|
|
||||||
|
ps = PusherServer(
|
||||||
|
config.server_name,
|
||||||
|
db_config=config.database_config,
|
||||||
|
config=config,
|
||||||
|
version_string=get_version_string("Synapse", synapse),
|
||||||
|
database_engine=database_engine,
|
||||||
|
)
|
||||||
|
|
||||||
|
ps.setup()
|
||||||
|
ps.start_listening(config.worker_listeners)
|
||||||
|
|
||||||
|
def run():
|
||||||
|
with LoggingContext("run"):
|
||||||
|
logger.info("Running")
|
||||||
|
change_resource_limit(config.soft_file_limit)
|
||||||
|
if config.gc_thresholds:
|
||||||
|
gc.set_threshold(*config.gc_thresholds)
|
||||||
|
reactor.run()
|
||||||
|
|
||||||
|
def start():
|
||||||
|
ps.replicate()
|
||||||
|
ps.get_pusherpool().start()
|
||||||
|
ps.get_datastore().start_profiling()
|
||||||
|
|
||||||
|
reactor.callWhenRunning(start)
|
||||||
|
|
||||||
|
if config.worker_daemonize:
|
||||||
|
daemon = Daemonize(
|
||||||
|
app="synapse-pusher",
|
||||||
|
pid=config.worker_pid_file,
|
||||||
|
action=run,
|
||||||
|
auto_close_fds=False,
|
||||||
|
verbose=True,
|
||||||
|
logger=logger,
|
||||||
|
)
|
||||||
|
daemon.start()
|
||||||
|
else:
|
||||||
|
run()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
with LoggingContext("main"):
|
||||||
|
ps = start(sys.argv[1:])
|
||||||
465
synapse/app/synchrotron.py
Normal file
465
synapse/app/synchrotron.py
Normal file
@@ -0,0 +1,465 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright 2016 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import synapse
|
||||||
|
|
||||||
|
from synapse.api.constants import EventTypes, PresenceState
|
||||||
|
from synapse.config._base import ConfigError
|
||||||
|
from synapse.config.homeserver import HomeServerConfig
|
||||||
|
from synapse.config.logger import setup_logging
|
||||||
|
from synapse.events import FrozenEvent
|
||||||
|
from synapse.handlers.presence import PresenceHandler
|
||||||
|
from synapse.http.site import SynapseSite
|
||||||
|
from synapse.http.server import JsonResource
|
||||||
|
from synapse.metrics.resource import MetricsResource, METRICS_PREFIX
|
||||||
|
from synapse.rest.client.v2_alpha import sync
|
||||||
|
from synapse.replication.slave.storage._base import BaseSlavedStore
|
||||||
|
from synapse.replication.slave.storage.events import SlavedEventStore
|
||||||
|
from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
|
||||||
|
from synapse.replication.slave.storage.account_data import SlavedAccountDataStore
|
||||||
|
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
|
||||||
|
from synapse.replication.slave.storage.registration import SlavedRegistrationStore
|
||||||
|
from synapse.replication.slave.storage.filtering import SlavedFilteringStore
|
||||||
|
from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore
|
||||||
|
from synapse.replication.slave.storage.presence import SlavedPresenceStore
|
||||||
|
from synapse.server import HomeServer
|
||||||
|
from synapse.storage.client_ips import ClientIpStore
|
||||||
|
from synapse.storage.engines import create_engine
|
||||||
|
from synapse.storage.presence import PresenceStore, UserPresenceState
|
||||||
|
from synapse.storage.roommember import RoomMemberStore
|
||||||
|
from synapse.util.async import sleep
|
||||||
|
from synapse.util.httpresourcetree import create_resource_tree
|
||||||
|
from synapse.util.logcontext import LoggingContext, preserve_fn
|
||||||
|
from synapse.util.manhole import manhole
|
||||||
|
from synapse.util.rlimit import change_resource_limit
|
||||||
|
from synapse.util.stringutils import random_string
|
||||||
|
from synapse.util.versionstring import get_version_string
|
||||||
|
|
||||||
|
from twisted.internet import reactor, defer
|
||||||
|
from twisted.web.resource import Resource
|
||||||
|
|
||||||
|
from daemonize import Daemonize
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
import contextlib
|
||||||
|
import gc
|
||||||
|
import ujson as json
|
||||||
|
|
||||||
|
logger = logging.getLogger("synapse.app.synchrotron")
|
||||||
|
|
||||||
|
|
||||||
|
class SynchrotronSlavedStore(
|
||||||
|
SlavedPushRuleStore,
|
||||||
|
SlavedEventStore,
|
||||||
|
SlavedReceiptsStore,
|
||||||
|
SlavedAccountDataStore,
|
||||||
|
SlavedApplicationServiceStore,
|
||||||
|
SlavedRegistrationStore,
|
||||||
|
SlavedFilteringStore,
|
||||||
|
SlavedPresenceStore,
|
||||||
|
BaseSlavedStore,
|
||||||
|
ClientIpStore, # After BaseSlavedStore because the constructor is different
|
||||||
|
):
|
||||||
|
# XXX: This is a bit broken because we don't persist forgotten rooms
|
||||||
|
# in a way that they can be streamed. This means that we don't have a
|
||||||
|
# way to invalidate the forgotten rooms cache correctly.
|
||||||
|
# For now we expire the cache every 10 minutes.
|
||||||
|
BROKEN_CACHE_EXPIRY_MS = 60 * 60 * 1000
|
||||||
|
who_forgot_in_room = (
|
||||||
|
RoomMemberStore.__dict__["who_forgot_in_room"]
|
||||||
|
)
|
||||||
|
|
||||||
|
# XXX: This is a bit broken because we don't persist the accepted list in a
|
||||||
|
# way that can be replicated. This means that we don't have a way to
|
||||||
|
# invalidate the cache correctly.
|
||||||
|
get_presence_list_accepted = PresenceStore.__dict__[
|
||||||
|
"get_presence_list_accepted"
|
||||||
|
]
|
||||||
|
|
||||||
|
UPDATE_SYNCING_USERS_MS = 10 * 1000
|
||||||
|
|
||||||
|
|
||||||
|
class SynchrotronPresence(object):
|
||||||
|
def __init__(self, hs):
|
||||||
|
self.http_client = hs.get_simple_http_client()
|
||||||
|
self.store = hs.get_datastore()
|
||||||
|
self.user_to_num_current_syncs = {}
|
||||||
|
self.syncing_users_url = hs.config.worker_replication_url + "/syncing_users"
|
||||||
|
self.clock = hs.get_clock()
|
||||||
|
|
||||||
|
active_presence = self.store.take_presence_startup_info()
|
||||||
|
self.user_to_current_state = {
|
||||||
|
state.user_id: state
|
||||||
|
for state in active_presence
|
||||||
|
}
|
||||||
|
|
||||||
|
self.process_id = random_string(16)
|
||||||
|
logger.info("Presence process_id is %r", self.process_id)
|
||||||
|
|
||||||
|
self._sending_sync = False
|
||||||
|
self._need_to_send_sync = False
|
||||||
|
self.clock.looping_call(
|
||||||
|
self._send_syncing_users_regularly,
|
||||||
|
UPDATE_SYNCING_USERS_MS,
|
||||||
|
)
|
||||||
|
|
||||||
|
reactor.addSystemEventTrigger("before", "shutdown", self._on_shutdown)
|
||||||
|
|
||||||
|
def set_state(self, user, state):
|
||||||
|
# TODO Hows this supposed to work?
|
||||||
|
pass
|
||||||
|
|
||||||
|
get_states = PresenceHandler.get_states.__func__
|
||||||
|
current_state_for_users = PresenceHandler.current_state_for_users.__func__
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def user_syncing(self, user_id, affect_presence):
|
||||||
|
if affect_presence:
|
||||||
|
curr_sync = self.user_to_num_current_syncs.get(user_id, 0)
|
||||||
|
self.user_to_num_current_syncs[user_id] = curr_sync + 1
|
||||||
|
prev_states = yield self.current_state_for_users([user_id])
|
||||||
|
if prev_states[user_id].state == PresenceState.OFFLINE:
|
||||||
|
# TODO: Don't block the sync request on this HTTP hit.
|
||||||
|
yield self._send_syncing_users_now()
|
||||||
|
|
||||||
|
def _end():
|
||||||
|
# We check that the user_id is in user_to_num_current_syncs because
|
||||||
|
# user_to_num_current_syncs may have been cleared if we are
|
||||||
|
# shutting down.
|
||||||
|
if affect_presence and user_id in self.user_to_num_current_syncs:
|
||||||
|
self.user_to_num_current_syncs[user_id] -= 1
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def _user_syncing():
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
_end()
|
||||||
|
|
||||||
|
defer.returnValue(_user_syncing())
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def _on_shutdown(self):
|
||||||
|
# When the synchrotron is shutdown tell the master to clear the in
|
||||||
|
# progress syncs for this process
|
||||||
|
self.user_to_num_current_syncs.clear()
|
||||||
|
yield self._send_syncing_users_now()
|
||||||
|
|
||||||
|
def _send_syncing_users_regularly(self):
|
||||||
|
# Only send an update if we aren't in the middle of sending one.
|
||||||
|
if not self._sending_sync:
|
||||||
|
preserve_fn(self._send_syncing_users_now)()
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def _send_syncing_users_now(self):
|
||||||
|
if self._sending_sync:
|
||||||
|
# We don't want to race with sending another update.
|
||||||
|
# Instead we wait for that update to finish and send another
|
||||||
|
# update afterwards.
|
||||||
|
self._need_to_send_sync = True
|
||||||
|
return
|
||||||
|
|
||||||
|
# Flag that we are sending an update.
|
||||||
|
self._sending_sync = True
|
||||||
|
|
||||||
|
yield self.http_client.post_json_get_json(self.syncing_users_url, {
|
||||||
|
"process_id": self.process_id,
|
||||||
|
"syncing_users": [
|
||||||
|
user_id for user_id, count in self.user_to_num_current_syncs.items()
|
||||||
|
if count > 0
|
||||||
|
],
|
||||||
|
})
|
||||||
|
|
||||||
|
# Unset the flag as we are no longer sending an update.
|
||||||
|
self._sending_sync = False
|
||||||
|
if self._need_to_send_sync:
|
||||||
|
# If something happened while we were sending the update then
|
||||||
|
# we might need to send another update.
|
||||||
|
# TODO: Check if the update that was sent matches the current state
|
||||||
|
# as we only need to send an update if they are different.
|
||||||
|
self._need_to_send_sync = False
|
||||||
|
yield self._send_syncing_users_now()
|
||||||
|
|
||||||
|
def process_replication(self, result):
|
||||||
|
stream = result.get("presence", {"rows": []})
|
||||||
|
for row in stream["rows"]:
|
||||||
|
(
|
||||||
|
position, user_id, state, last_active_ts,
|
||||||
|
last_federation_update_ts, last_user_sync_ts, status_msg,
|
||||||
|
currently_active
|
||||||
|
) = row
|
||||||
|
self.user_to_current_state[user_id] = UserPresenceState(
|
||||||
|
user_id, state, last_active_ts,
|
||||||
|
last_federation_update_ts, last_user_sync_ts, status_msg,
|
||||||
|
currently_active
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class SynchrotronTyping(object):
|
||||||
|
def __init__(self, hs):
|
||||||
|
self._latest_room_serial = 0
|
||||||
|
self._room_serials = {}
|
||||||
|
self._room_typing = {}
|
||||||
|
|
||||||
|
def stream_positions(self):
|
||||||
|
return {"typing": self._latest_room_serial}
|
||||||
|
|
||||||
|
def process_replication(self, result):
|
||||||
|
stream = result.get("typing")
|
||||||
|
if stream:
|
||||||
|
self._latest_room_serial = int(stream["position"])
|
||||||
|
|
||||||
|
for row in stream["rows"]:
|
||||||
|
position, room_id, typing_json = row
|
||||||
|
typing = json.loads(typing_json)
|
||||||
|
self._room_serials[room_id] = position
|
||||||
|
self._room_typing[room_id] = typing
|
||||||
|
|
||||||
|
|
||||||
|
class SynchrotronApplicationService(object):
|
||||||
|
def notify_interested_services(self, event):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SynchrotronServer(HomeServer):
|
||||||
|
def get_db_conn(self, run_new_connection=True):
|
||||||
|
# Any param beginning with cp_ is a parameter for adbapi, and should
|
||||||
|
# not be passed to the database engine.
|
||||||
|
db_params = {
|
||||||
|
k: v for k, v in self.db_config.get("args", {}).items()
|
||||||
|
if not k.startswith("cp_")
|
||||||
|
}
|
||||||
|
db_conn = self.database_engine.module.connect(**db_params)
|
||||||
|
|
||||||
|
if run_new_connection:
|
||||||
|
self.database_engine.on_new_connection(db_conn)
|
||||||
|
return db_conn
|
||||||
|
|
||||||
|
def setup(self):
|
||||||
|
logger.info("Setting up.")
|
||||||
|
self.datastore = SynchrotronSlavedStore(self.get_db_conn(), self)
|
||||||
|
logger.info("Finished setting up.")
|
||||||
|
|
||||||
|
def _listen_http(self, listener_config):
|
||||||
|
port = listener_config["port"]
|
||||||
|
bind_address = listener_config.get("bind_address", "")
|
||||||
|
site_tag = listener_config.get("tag", port)
|
||||||
|
resources = {}
|
||||||
|
for res in listener_config["resources"]:
|
||||||
|
for name in res["names"]:
|
||||||
|
if name == "metrics":
|
||||||
|
resources[METRICS_PREFIX] = MetricsResource(self)
|
||||||
|
elif name == "client":
|
||||||
|
resource = JsonResource(self, canonical_json=False)
|
||||||
|
sync.register_servlets(self, resource)
|
||||||
|
resources.update({
|
||||||
|
"/_matrix/client/r0": resource,
|
||||||
|
"/_matrix/client/unstable": resource,
|
||||||
|
"/_matrix/client/v2_alpha": resource,
|
||||||
|
})
|
||||||
|
|
||||||
|
root_resource = create_resource_tree(resources, Resource())
|
||||||
|
reactor.listenTCP(
|
||||||
|
port,
|
||||||
|
SynapseSite(
|
||||||
|
"synapse.access.http.%s" % (site_tag,),
|
||||||
|
site_tag,
|
||||||
|
listener_config,
|
||||||
|
root_resource,
|
||||||
|
),
|
||||||
|
interface=bind_address
|
||||||
|
)
|
||||||
|
logger.info("Synapse synchrotron now listening on port %d", port)
|
||||||
|
|
||||||
|
def start_listening(self, listeners):
|
||||||
|
for listener in listeners:
|
||||||
|
if listener["type"] == "http":
|
||||||
|
self._listen_http(listener)
|
||||||
|
elif listener["type"] == "manhole":
|
||||||
|
reactor.listenTCP(
|
||||||
|
listener["port"],
|
||||||
|
manhole(
|
||||||
|
username="matrix",
|
||||||
|
password="rabbithole",
|
||||||
|
globals={"hs": self},
|
||||||
|
),
|
||||||
|
interface=listener.get("bind_address", '127.0.0.1')
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def replicate(self):
|
||||||
|
http_client = self.get_simple_http_client()
|
||||||
|
store = self.get_datastore()
|
||||||
|
replication_url = self.config.worker_replication_url
|
||||||
|
clock = self.get_clock()
|
||||||
|
notifier = self.get_notifier()
|
||||||
|
presence_handler = self.get_presence_handler()
|
||||||
|
typing_handler = self.get_typing_handler()
|
||||||
|
|
||||||
|
def expire_broken_caches():
|
||||||
|
store.who_forgot_in_room.invalidate_all()
|
||||||
|
store.get_presence_list_accepted.invalidate_all()
|
||||||
|
|
||||||
|
def notify_from_stream(
|
||||||
|
result, stream_name, stream_key, room=None, user=None
|
||||||
|
):
|
||||||
|
stream = result.get(stream_name)
|
||||||
|
if stream:
|
||||||
|
position_index = stream["field_names"].index("position")
|
||||||
|
if room:
|
||||||
|
room_index = stream["field_names"].index(room)
|
||||||
|
if user:
|
||||||
|
user_index = stream["field_names"].index(user)
|
||||||
|
|
||||||
|
users = ()
|
||||||
|
rooms = ()
|
||||||
|
for row in stream["rows"]:
|
||||||
|
position = row[position_index]
|
||||||
|
|
||||||
|
if user:
|
||||||
|
users = (row[user_index],)
|
||||||
|
|
||||||
|
if room:
|
||||||
|
rooms = (row[room_index],)
|
||||||
|
|
||||||
|
notifier.on_new_event(
|
||||||
|
stream_key, position, users=users, rooms=rooms
|
||||||
|
)
|
||||||
|
|
||||||
|
def notify(result):
|
||||||
|
stream = result.get("events")
|
||||||
|
if stream:
|
||||||
|
max_position = stream["position"]
|
||||||
|
for row in stream["rows"]:
|
||||||
|
position = row[0]
|
||||||
|
internal = json.loads(row[1])
|
||||||
|
event_json = json.loads(row[2])
|
||||||
|
event = FrozenEvent(event_json, internal_metadata_dict=internal)
|
||||||
|
extra_users = ()
|
||||||
|
if event.type == EventTypes.Member:
|
||||||
|
extra_users = (event.state_key,)
|
||||||
|
notifier.on_new_room_event(
|
||||||
|
event, position, max_position, extra_users
|
||||||
|
)
|
||||||
|
|
||||||
|
notify_from_stream(
|
||||||
|
result, "push_rules", "push_rules_key", user="user_id"
|
||||||
|
)
|
||||||
|
notify_from_stream(
|
||||||
|
result, "user_account_data", "account_data_key", user="user_id"
|
||||||
|
)
|
||||||
|
notify_from_stream(
|
||||||
|
result, "room_account_data", "account_data_key", user="user_id"
|
||||||
|
)
|
||||||
|
notify_from_stream(
|
||||||
|
result, "tag_account_data", "account_data_key", user="user_id"
|
||||||
|
)
|
||||||
|
notify_from_stream(
|
||||||
|
result, "receipts", "receipt_key", room="room_id"
|
||||||
|
)
|
||||||
|
notify_from_stream(
|
||||||
|
result, "typing", "typing_key", room="room_id"
|
||||||
|
)
|
||||||
|
|
||||||
|
next_expire_broken_caches_ms = 0
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
args = store.stream_positions()
|
||||||
|
args.update(typing_handler.stream_positions())
|
||||||
|
args["timeout"] = 30000
|
||||||
|
result = yield http_client.get_json(replication_url, args=args)
|
||||||
|
now_ms = clock.time_msec()
|
||||||
|
if now_ms > next_expire_broken_caches_ms:
|
||||||
|
expire_broken_caches()
|
||||||
|
next_expire_broken_caches_ms = (
|
||||||
|
now_ms + store.BROKEN_CACHE_EXPIRY_MS
|
||||||
|
)
|
||||||
|
yield store.process_replication(result)
|
||||||
|
typing_handler.process_replication(result)
|
||||||
|
presence_handler.process_replication(result)
|
||||||
|
notify(result)
|
||||||
|
except:
|
||||||
|
logger.exception("Error replicating from %r", replication_url)
|
||||||
|
yield sleep(5)
|
||||||
|
|
||||||
|
def build_presence_handler(self):
|
||||||
|
return SynchrotronPresence(self)
|
||||||
|
|
||||||
|
def build_typing_handler(self):
|
||||||
|
return SynchrotronTyping(self)
|
||||||
|
|
||||||
|
|
||||||
|
def start(config_options):
|
||||||
|
try:
|
||||||
|
config = HomeServerConfig.load_config(
|
||||||
|
"Synapse synchrotron", config_options
|
||||||
|
)
|
||||||
|
except ConfigError as e:
|
||||||
|
sys.stderr.write("\n" + e.message + "\n")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
assert config.worker_app == "synapse.app.synchrotron"
|
||||||
|
|
||||||
|
setup_logging(config.worker_log_config, config.worker_log_file)
|
||||||
|
|
||||||
|
database_engine = create_engine(config.database_config)
|
||||||
|
|
||||||
|
ss = SynchrotronServer(
|
||||||
|
config.server_name,
|
||||||
|
db_config=config.database_config,
|
||||||
|
config=config,
|
||||||
|
version_string=get_version_string("Synapse", synapse),
|
||||||
|
database_engine=database_engine,
|
||||||
|
application_service_handler=SynchrotronApplicationService(),
|
||||||
|
)
|
||||||
|
|
||||||
|
ss.setup()
|
||||||
|
ss.start_listening(config.worker_listeners)
|
||||||
|
|
||||||
|
def run():
|
||||||
|
with LoggingContext("run"):
|
||||||
|
logger.info("Running")
|
||||||
|
change_resource_limit(config.soft_file_limit)
|
||||||
|
if config.gc_thresholds:
|
||||||
|
gc.set_threshold(*config.gc_thresholds)
|
||||||
|
reactor.run()
|
||||||
|
|
||||||
|
def start():
|
||||||
|
ss.get_datastore().start_profiling()
|
||||||
|
ss.replicate()
|
||||||
|
|
||||||
|
reactor.callWhenRunning(start)
|
||||||
|
|
||||||
|
if config.worker_daemonize:
|
||||||
|
daemon = Daemonize(
|
||||||
|
app="synapse-synchrotron",
|
||||||
|
pid=config.worker_pid_file,
|
||||||
|
action=run,
|
||||||
|
auto_close_fds=False,
|
||||||
|
verbose=True,
|
||||||
|
logger=logger,
|
||||||
|
)
|
||||||
|
daemon.start()
|
||||||
|
else:
|
||||||
|
run()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
with LoggingContext("main"):
|
||||||
|
start(sys.argv[1:])
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -14,11 +14,14 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import sys
|
import argparse
|
||||||
|
import collections
|
||||||
|
import glob
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import subprocess
|
|
||||||
import signal
|
import signal
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
SYNAPSE = ["python", "-B", "-m", "synapse.app.homeserver"]
|
SYNAPSE = ["python", "-B", "-m", "synapse.app.homeserver"]
|
||||||
@@ -28,57 +31,182 @@ RED = "\x1b[1;31m"
|
|||||||
NORMAL = "\x1b[m"
|
NORMAL = "\x1b[m"
|
||||||
|
|
||||||
|
|
||||||
|
def write(message, colour=NORMAL, stream=sys.stdout):
|
||||||
|
if colour == NORMAL:
|
||||||
|
stream.write(message + "\n")
|
||||||
|
else:
|
||||||
|
stream.write(colour + message + NORMAL + "\n")
|
||||||
|
|
||||||
|
|
||||||
def start(configfile):
|
def start(configfile):
|
||||||
print "Starting ...",
|
write("Starting ...")
|
||||||
args = SYNAPSE
|
args = SYNAPSE
|
||||||
args.extend(["--daemonize", "-c", configfile])
|
args.extend(["--daemonize", "-c", configfile])
|
||||||
cwd = os.path.dirname(os.path.abspath(__file__))
|
|
||||||
try:
|
try:
|
||||||
subprocess.check_call(args, cwd=cwd)
|
subprocess.check_call(args)
|
||||||
print GREEN + "started" + NORMAL
|
write("started synapse.app.homeserver(%r)" % (configfile,), colour=GREEN)
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
print (
|
write(
|
||||||
RED +
|
"error starting (exit code: %d); see above for logs" % e.returncode,
|
||||||
"error starting (exit code: %d); see above for logs" % e.returncode +
|
colour=RED,
|
||||||
NORMAL
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def stop(pidfile):
|
def start_worker(app, configfile, worker_configfile):
|
||||||
|
args = [
|
||||||
|
"python", "-B",
|
||||||
|
"-m", app,
|
||||||
|
"-c", configfile,
|
||||||
|
"-c", worker_configfile
|
||||||
|
]
|
||||||
|
|
||||||
|
try:
|
||||||
|
subprocess.check_call(args)
|
||||||
|
write("started %s(%r)" % (app, worker_configfile), colour=GREEN)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
write(
|
||||||
|
"error starting %s(%r) (exit code: %d); see above for logs" % (
|
||||||
|
app, worker_configfile, e.returncode,
|
||||||
|
),
|
||||||
|
colour=RED,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def stop(pidfile, app):
|
||||||
if os.path.exists(pidfile):
|
if os.path.exists(pidfile):
|
||||||
pid = int(open(pidfile).read())
|
pid = int(open(pidfile).read())
|
||||||
os.kill(pid, signal.SIGTERM)
|
os.kill(pid, signal.SIGTERM)
|
||||||
print GREEN + "stopped" + NORMAL
|
write("stopped %s" % (app,), colour=GREEN)
|
||||||
|
|
||||||
|
|
||||||
|
Worker = collections.namedtuple("Worker", [
|
||||||
|
"app", "configfile", "pidfile", "cache_factor"
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
configfile = sys.argv[2] if len(sys.argv) == 3 else "homeserver.yaml"
|
|
||||||
|
|
||||||
if not os.path.exists(configfile):
|
parser = argparse.ArgumentParser()
|
||||||
sys.stderr.write(
|
|
||||||
"No config file found\n"
|
parser.add_argument(
|
||||||
"To generate a config file, run '%s -c %s --generate-config"
|
"action",
|
||||||
" --server-name=<server name>'\n" % (
|
choices=["start", "stop", "restart"],
|
||||||
" ".join(SYNAPSE), configfile
|
help="whether to start, stop or restart the synapse",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"configfile",
|
||||||
|
nargs="?",
|
||||||
|
default="homeserver.yaml",
|
||||||
|
help="the homeserver config file, defaults to homserver.yaml",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-w", "--worker",
|
||||||
|
metavar="WORKERCONFIG",
|
||||||
|
help="start or stop a single worker",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"-a", "--all-processes",
|
||||||
|
metavar="WORKERCONFIGDIR",
|
||||||
|
help="start or stop all the workers in the given directory"
|
||||||
|
" and the main synapse process",
|
||||||
|
)
|
||||||
|
|
||||||
|
options = parser.parse_args()
|
||||||
|
|
||||||
|
if options.worker and options.all_processes:
|
||||||
|
write(
|
||||||
|
'Cannot use "--worker" with "--all-processes"',
|
||||||
|
stream=sys.stderr
|
||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
config = yaml.load(open(configfile))
|
configfile = options.configfile
|
||||||
pidfile = config["pid_file"]
|
|
||||||
|
|
||||||
action = sys.argv[1] if sys.argv[1:] else "usage"
|
if not os.path.exists(configfile):
|
||||||
if action == "start":
|
write(
|
||||||
start(configfile)
|
"No config file found\n"
|
||||||
elif action == "stop":
|
"To generate a config file, run '%s -c %s --generate-config"
|
||||||
stop(pidfile)
|
" --server-name=<server name>'\n" % (
|
||||||
elif action == "restart":
|
" ".join(SYNAPSE), options.configfile
|
||||||
stop(pidfile)
|
),
|
||||||
start(configfile)
|
stream=sys.stderr,
|
||||||
else:
|
)
|
||||||
sys.stderr.write("Usage: %s [start|stop|restart] [configfile]\n" % (sys.argv[0],))
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
with open(configfile) as stream:
|
||||||
|
config = yaml.load(stream)
|
||||||
|
|
||||||
|
pidfile = config["pid_file"]
|
||||||
|
cache_factor = config.get("synctl_cache_factor")
|
||||||
|
start_stop_synapse = True
|
||||||
|
|
||||||
|
if cache_factor:
|
||||||
|
os.environ["SYNAPSE_CACHE_FACTOR"] = str(cache_factor)
|
||||||
|
|
||||||
|
worker_configfiles = []
|
||||||
|
if options.worker:
|
||||||
|
start_stop_synapse = False
|
||||||
|
worker_configfile = options.worker
|
||||||
|
if not os.path.exists(worker_configfile):
|
||||||
|
write(
|
||||||
|
"No worker config found at %r" % (worker_configfile,),
|
||||||
|
stream=sys.stderr,
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
worker_configfiles.append(worker_configfile)
|
||||||
|
|
||||||
|
if options.all_processes:
|
||||||
|
worker_configdir = options.all_processes
|
||||||
|
if not os.path.isdir(worker_configdir):
|
||||||
|
write(
|
||||||
|
"No worker config directory found at %r" % (worker_configdir,),
|
||||||
|
stream=sys.stderr,
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
worker_configfiles.extend(sorted(glob.glob(
|
||||||
|
os.path.join(worker_configdir, "*.yaml")
|
||||||
|
)))
|
||||||
|
|
||||||
|
workers = []
|
||||||
|
for worker_configfile in worker_configfiles:
|
||||||
|
with open(worker_configfile) as stream:
|
||||||
|
worker_config = yaml.load(stream)
|
||||||
|
worker_app = worker_config["worker_app"]
|
||||||
|
worker_pidfile = worker_config["worker_pid_file"]
|
||||||
|
worker_daemonize = worker_config["worker_daemonize"]
|
||||||
|
assert worker_daemonize # TODO print something more user friendly
|
||||||
|
worker_cache_factor = worker_config.get("synctl_cache_factor")
|
||||||
|
workers.append(Worker(
|
||||||
|
worker_app, worker_configfile, worker_pidfile, worker_cache_factor,
|
||||||
|
))
|
||||||
|
|
||||||
|
action = options.action
|
||||||
|
|
||||||
|
if action == "stop" or action == "restart":
|
||||||
|
for worker in workers:
|
||||||
|
stop(worker.pidfile, worker.app)
|
||||||
|
|
||||||
|
if start_stop_synapse:
|
||||||
|
stop(pidfile, "synapse.app.homeserver")
|
||||||
|
|
||||||
|
# TODO: Wait for synapse to actually shutdown before starting it again
|
||||||
|
|
||||||
|
if action == "start" or action == "restart":
|
||||||
|
if start_stop_synapse:
|
||||||
|
start(configfile)
|
||||||
|
|
||||||
|
for worker in workers:
|
||||||
|
if worker.cache_factor:
|
||||||
|
os.environ["SYNAPSE_CACHE_FACTOR"] = str(worker.cache_factor)
|
||||||
|
|
||||||
|
start_worker(worker.app, configfile, worker.configfile)
|
||||||
|
|
||||||
|
if cache_factor:
|
||||||
|
os.environ["SYNAPSE_CACHE_FACTOR"] = str(cache_factor)
|
||||||
|
else:
|
||||||
|
os.environ.pop("SYNAPSE_CACHE_FACTOR", None)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015 OpenMarket Ltd
|
# Copyright 2015, 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015 OpenMarket Ltd
|
# Copyright 2015, 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -29,7 +29,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||||||
pushing.
|
pushing.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(ApplicationServiceApi, self).__init__(hs)
|
super(ApplicationServiceApi, self).__init__(hs)
|
||||||
self.clock = hs.get_clock()
|
self.clock = hs.get_clock()
|
||||||
|
|
||||||
@@ -100,11 +100,6 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||||||
logger.warning("push_bulk to %s threw exception %s", uri, ex)
|
logger.warning("push_bulk to %s threw exception %s", uri, ex)
|
||||||
defer.returnValue(False)
|
defer.returnValue(False)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def push(self, service, event, txn_id=None):
|
|
||||||
response = yield self.push_bulk(service, [event], txn_id)
|
|
||||||
defer.returnValue(response)
|
|
||||||
|
|
||||||
def _serialize(self, events):
|
def _serialize(self, events):
|
||||||
time_now = self.clock.time_msec()
|
time_now = self.clock.time_msec()
|
||||||
return [
|
return [
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015 OpenMarket Ltd
|
# Copyright 2015, 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -56,22 +56,22 @@ import logging
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class AppServiceScheduler(object):
|
class ApplicationServiceScheduler(object):
|
||||||
""" Public facing API for this module. Does the required DI to tie the
|
""" Public facing API for this module. Does the required DI to tie the
|
||||||
components together. This also serves as the "event_pool", which in this
|
components together. This also serves as the "event_pool", which in this
|
||||||
case is a simple array.
|
case is a simple array.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, clock, store, as_api):
|
def __init__(self, hs):
|
||||||
self.clock = clock
|
self.clock = hs.get_clock()
|
||||||
self.store = store
|
self.store = hs.get_datastore()
|
||||||
self.as_api = as_api
|
self.as_api = hs.get_application_service_api()
|
||||||
|
|
||||||
def create_recoverer(service, callback):
|
def create_recoverer(service, callback):
|
||||||
return _Recoverer(clock, store, as_api, service, callback)
|
return _Recoverer(self.clock, self.store, self.as_api, service, callback)
|
||||||
|
|
||||||
self.txn_ctrl = _TransactionController(
|
self.txn_ctrl = _TransactionController(
|
||||||
clock, store, as_api, create_recoverer
|
self.clock, self.store, self.as_api, create_recoverer
|
||||||
)
|
)
|
||||||
self.queuer = _ServiceQueuer(self.txn_ctrl)
|
self.queuer = _ServiceQueuer(self.txn_ctrl)
|
||||||
|
|
||||||
@@ -224,8 +224,8 @@ class _Recoverer(object):
|
|||||||
self.clock.call_later((2 ** self.backoff_counter), self.retry)
|
self.clock.call_later((2 ** self.backoff_counter), self.retry)
|
||||||
|
|
||||||
def _backoff(self):
|
def _backoff(self):
|
||||||
# cap the backoff to be around 18h => (2^16) = 65536 secs
|
# cap the backoff to be around 8.5min => (2^9) = 512 secs
|
||||||
if self.backoff_counter < 16:
|
if self.backoff_counter < 9:
|
||||||
self.backoff_counter += 1
|
self.backoff_counter += 1
|
||||||
self.recover()
|
self.recover()
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015 OpenMarket Ltd
|
# Copyright 2015, 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -12,6 +12,7 @@
|
|||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
from synapse.config._base import ConfigError
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
import sys
|
import sys
|
||||||
@@ -21,9 +22,13 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
if action == "read":
|
if action == "read":
|
||||||
key = sys.argv[2]
|
key = sys.argv[2]
|
||||||
config = HomeServerConfig.load_config("", sys.argv[3:])
|
try:
|
||||||
|
config = HomeServerConfig.load_config("", sys.argv[3:])
|
||||||
|
except ConfigError as e:
|
||||||
|
sys.stderr.write("\n" + e.message + "\n")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
print getattr(config, key)
|
print (getattr(config, key))
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
else:
|
else:
|
||||||
sys.stderr.write("Unknown command %r\n" % (action,))
|
sys.stderr.write("Unknown command %r\n" % (action,))
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -14,9 +14,9 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import errno
|
||||||
import os
|
import os
|
||||||
import yaml
|
import yaml
|
||||||
import sys
|
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
|
|
||||||
@@ -24,18 +24,29 @@ class ConfigError(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# We split these messages out to allow packages to override with package
|
||||||
|
# specific instructions.
|
||||||
|
MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS = """\
|
||||||
|
Please opt in or out of reporting anonymized homeserver usage statistics, by
|
||||||
|
setting the `report_stats` key in your config file to either True or False.
|
||||||
|
"""
|
||||||
|
|
||||||
|
MISSING_REPORT_STATS_SPIEL = """\
|
||||||
|
We would really appreciate it if you could help our project out by reporting
|
||||||
|
anonymized usage statistics from your homeserver. Only very basic aggregate
|
||||||
|
data (e.g. number of users) will be reported, but it helps us to track the
|
||||||
|
growth of the Matrix community, and helps us to make Matrix a success, as well
|
||||||
|
as to convince other networks that they should peer with us.
|
||||||
|
|
||||||
|
Thank you.
|
||||||
|
"""
|
||||||
|
|
||||||
|
MISSING_SERVER_NAME = """\
|
||||||
|
Missing mandatory `server_name` config option.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
class Config(object):
|
class Config(object):
|
||||||
|
|
||||||
stats_reporting_begging_spiel = (
|
|
||||||
"We would really appreciate it if you could help our project out by"
|
|
||||||
" reporting anonymized usage statistics from your homeserver. Only very"
|
|
||||||
" basic aggregate data (e.g. number of users) will be reported, but it"
|
|
||||||
" helps us to track the growth of the Matrix community, and helps us to"
|
|
||||||
" make Matrix a success, as well as to convince other networks that they"
|
|
||||||
" should peer with us."
|
|
||||||
"\nThank you."
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_size(value):
|
def parse_size(value):
|
||||||
if isinstance(value, int) or isinstance(value, long):
|
if isinstance(value, int) or isinstance(value, long):
|
||||||
@@ -91,8 +102,11 @@ class Config(object):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def ensure_directory(cls, dir_path):
|
def ensure_directory(cls, dir_path):
|
||||||
dir_path = cls.abspath(dir_path)
|
dir_path = cls.abspath(dir_path)
|
||||||
if not os.path.exists(dir_path):
|
try:
|
||||||
os.makedirs(dir_path)
|
os.makedirs(dir_path)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno != errno.EEXIST:
|
||||||
|
raise
|
||||||
if not os.path.isdir(dir_path):
|
if not os.path.isdir(dir_path):
|
||||||
raise ConfigError(
|
raise ConfigError(
|
||||||
"%s is not a directory" % (dir_path,)
|
"%s is not a directory" % (dir_path,)
|
||||||
@@ -121,13 +135,20 @@ class Config(object):
|
|||||||
results.append(getattr(cls, name)(self, *args, **kargs))
|
results.append(getattr(cls, name)(self, *args, **kargs))
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def generate_config(self, config_dir_path, server_name, report_stats=None):
|
def generate_config(
|
||||||
|
self,
|
||||||
|
config_dir_path,
|
||||||
|
server_name,
|
||||||
|
is_generating_file,
|
||||||
|
report_stats=None,
|
||||||
|
):
|
||||||
default_config = "# vim:ft=yaml\n"
|
default_config = "# vim:ft=yaml\n"
|
||||||
|
|
||||||
default_config += "\n\n".join(dedent(conf) for conf in self.invoke_all(
|
default_config += "\n\n".join(dedent(conf) for conf in self.invoke_all(
|
||||||
"default_config",
|
"default_config",
|
||||||
config_dir_path=config_dir_path,
|
config_dir_path=config_dir_path,
|
||||||
server_name=server_name,
|
server_name=server_name,
|
||||||
|
is_generating_file=is_generating_file,
|
||||||
report_stats=report_stats,
|
report_stats=report_stats,
|
||||||
))
|
))
|
||||||
|
|
||||||
@@ -136,9 +157,40 @@ class Config(object):
|
|||||||
return default_config, config
|
return default_config, config
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load_config(cls, description, argv, generate_section=None):
|
def load_config(cls, description, argv):
|
||||||
obj = cls()
|
config_parser = argparse.ArgumentParser(
|
||||||
|
description=description,
|
||||||
|
)
|
||||||
|
config_parser.add_argument(
|
||||||
|
"-c", "--config-path",
|
||||||
|
action="append",
|
||||||
|
metavar="CONFIG_FILE",
|
||||||
|
help="Specify config file. Can be given multiple times and"
|
||||||
|
" may specify directories containing *.yaml files."
|
||||||
|
)
|
||||||
|
|
||||||
|
config_parser.add_argument(
|
||||||
|
"--keys-directory",
|
||||||
|
metavar="DIRECTORY",
|
||||||
|
help="Where files such as certs and signing keys are stored when"
|
||||||
|
" their location is given explicitly in the config."
|
||||||
|
" Defaults to the directory containing the last config file",
|
||||||
|
)
|
||||||
|
|
||||||
|
config_args = config_parser.parse_args(argv)
|
||||||
|
|
||||||
|
config_files = find_config_files(search_paths=config_args.config_path)
|
||||||
|
|
||||||
|
obj = cls()
|
||||||
|
obj.read_config_files(
|
||||||
|
config_files,
|
||||||
|
keys_directory=config_args.keys_directory,
|
||||||
|
generate_keys=False,
|
||||||
|
)
|
||||||
|
return obj
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def load_or_generate_config(cls, description, argv):
|
||||||
config_parser = argparse.ArgumentParser(add_help=False)
|
config_parser = argparse.ArgumentParser(add_help=False)
|
||||||
config_parser.add_argument(
|
config_parser.add_argument(
|
||||||
"-c", "--config-path",
|
"-c", "--config-path",
|
||||||
@@ -155,7 +207,7 @@ class Config(object):
|
|||||||
config_parser.add_argument(
|
config_parser.add_argument(
|
||||||
"--report-stats",
|
"--report-stats",
|
||||||
action="store",
|
action="store",
|
||||||
help="Stuff",
|
help="Whether the generated config reports anonymized usage statistics",
|
||||||
choices=["yes", "no"]
|
choices=["yes", "no"]
|
||||||
)
|
)
|
||||||
config_parser.add_argument(
|
config_parser.add_argument(
|
||||||
@@ -176,42 +228,17 @@ class Config(object):
|
|||||||
)
|
)
|
||||||
config_args, remaining_args = config_parser.parse_known_args(argv)
|
config_args, remaining_args = config_parser.parse_known_args(argv)
|
||||||
|
|
||||||
|
config_files = find_config_files(search_paths=config_args.config_path)
|
||||||
|
|
||||||
generate_keys = config_args.generate_keys
|
generate_keys = config_args.generate_keys
|
||||||
|
|
||||||
config_files = []
|
obj = cls()
|
||||||
if config_args.config_path:
|
|
||||||
for config_path in config_args.config_path:
|
|
||||||
if os.path.isdir(config_path):
|
|
||||||
# We accept specifying directories as config paths, we search
|
|
||||||
# inside that directory for all files matching *.yaml, and then
|
|
||||||
# we apply them in *sorted* order.
|
|
||||||
files = []
|
|
||||||
for entry in os.listdir(config_path):
|
|
||||||
entry_path = os.path.join(config_path, entry)
|
|
||||||
if not os.path.isfile(entry_path):
|
|
||||||
print (
|
|
||||||
"Found subdirectory in config directory: %r. IGNORING."
|
|
||||||
) % (entry_path, )
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not entry.endswith(".yaml"):
|
|
||||||
print (
|
|
||||||
"Found file in config directory that does not"
|
|
||||||
" end in '.yaml': %r. IGNORING."
|
|
||||||
) % (entry_path, )
|
|
||||||
continue
|
|
||||||
|
|
||||||
files.append(entry_path)
|
|
||||||
|
|
||||||
config_files.extend(sorted(files))
|
|
||||||
else:
|
|
||||||
config_files.append(config_path)
|
|
||||||
|
|
||||||
if config_args.generate_config:
|
if config_args.generate_config:
|
||||||
if config_args.report_stats is None:
|
if config_args.report_stats is None:
|
||||||
config_parser.error(
|
config_parser.error(
|
||||||
"Please specify either --report-stats=yes or --report-stats=no\n\n" +
|
"Please specify either --report-stats=yes or --report-stats=no\n\n" +
|
||||||
cls.stats_reporting_begging_spiel
|
MISSING_REPORT_STATS_SPIEL
|
||||||
)
|
)
|
||||||
if not config_files:
|
if not config_files:
|
||||||
config_parser.error(
|
config_parser.error(
|
||||||
@@ -229,8 +256,10 @@ class Config(object):
|
|||||||
|
|
||||||
server_name = config_args.server_name
|
server_name = config_args.server_name
|
||||||
if not server_name:
|
if not server_name:
|
||||||
print "Must specify a server_name to a generate config for."
|
raise ConfigError(
|
||||||
sys.exit(1)
|
"Must specify a server_name to a generate config for."
|
||||||
|
" Pass -H server.name."
|
||||||
|
)
|
||||||
if not os.path.exists(config_dir_path):
|
if not os.path.exists(config_dir_path):
|
||||||
os.makedirs(config_dir_path)
|
os.makedirs(config_dir_path)
|
||||||
with open(config_path, "wb") as config_file:
|
with open(config_path, "wb") as config_file:
|
||||||
@@ -238,6 +267,7 @@ class Config(object):
|
|||||||
config_dir_path=config_dir_path,
|
config_dir_path=config_dir_path,
|
||||||
server_name=server_name,
|
server_name=server_name,
|
||||||
report_stats=(config_args.report_stats == "yes"),
|
report_stats=(config_args.report_stats == "yes"),
|
||||||
|
is_generating_file=True
|
||||||
)
|
)
|
||||||
obj.invoke_all("generate_files", config)
|
obj.invoke_all("generate_files", config)
|
||||||
config_file.write(config_bytes)
|
config_file.write(config_bytes)
|
||||||
@@ -251,7 +281,7 @@ class Config(object):
|
|||||||
"If this server name is incorrect, you will need to"
|
"If this server name is incorrect, you will need to"
|
||||||
" regenerate the SSL certificates"
|
" regenerate the SSL certificates"
|
||||||
)
|
)
|
||||||
sys.exit(0)
|
return
|
||||||
else:
|
else:
|
||||||
print (
|
print (
|
||||||
"Config file %r already exists. Generating any missing key"
|
"Config file %r already exists. Generating any missing key"
|
||||||
@@ -275,39 +305,95 @@ class Config(object):
|
|||||||
" -c CONFIG-FILE\""
|
" -c CONFIG-FILE\""
|
||||||
)
|
)
|
||||||
|
|
||||||
if config_args.keys_directory:
|
obj.read_config_files(
|
||||||
config_dir_path = config_args.keys_directory
|
config_files,
|
||||||
else:
|
keys_directory=config_args.keys_directory,
|
||||||
config_dir_path = os.path.dirname(config_args.config_path[-1])
|
generate_keys=generate_keys,
|
||||||
config_dir_path = os.path.abspath(config_dir_path)
|
|
||||||
|
|
||||||
specified_config = {}
|
|
||||||
for config_file in config_files:
|
|
||||||
yaml_config = cls.read_config_file(config_file)
|
|
||||||
specified_config.update(yaml_config)
|
|
||||||
|
|
||||||
server_name = specified_config["server_name"]
|
|
||||||
_, config = obj.generate_config(
|
|
||||||
config_dir_path=config_dir_path,
|
|
||||||
server_name=server_name
|
|
||||||
)
|
)
|
||||||
config.pop("log_config")
|
|
||||||
config.update(specified_config)
|
|
||||||
if "report_stats" not in config:
|
|
||||||
sys.stderr.write(
|
|
||||||
"Please opt in or out of reporting anonymized homeserver usage "
|
|
||||||
"statistics, by setting the report_stats key in your config file "
|
|
||||||
" ( " + config_path + " ) " +
|
|
||||||
"to either True or False.\n\n" +
|
|
||||||
Config.stats_reporting_begging_spiel + "\n")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if generate_keys:
|
if generate_keys:
|
||||||
obj.invoke_all("generate_files", config)
|
return None
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
obj.invoke_all("read_config", config)
|
|
||||||
|
|
||||||
obj.invoke_all("read_arguments", args)
|
obj.invoke_all("read_arguments", args)
|
||||||
|
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
|
def read_config_files(self, config_files, keys_directory=None,
|
||||||
|
generate_keys=False):
|
||||||
|
if not keys_directory:
|
||||||
|
keys_directory = os.path.dirname(config_files[-1])
|
||||||
|
|
||||||
|
config_dir_path = os.path.abspath(keys_directory)
|
||||||
|
|
||||||
|
specified_config = {}
|
||||||
|
for config_file in config_files:
|
||||||
|
yaml_config = self.read_config_file(config_file)
|
||||||
|
specified_config.update(yaml_config)
|
||||||
|
|
||||||
|
if "server_name" not in specified_config:
|
||||||
|
raise ConfigError(MISSING_SERVER_NAME)
|
||||||
|
|
||||||
|
server_name = specified_config["server_name"]
|
||||||
|
_, config = self.generate_config(
|
||||||
|
config_dir_path=config_dir_path,
|
||||||
|
server_name=server_name,
|
||||||
|
is_generating_file=False,
|
||||||
|
)
|
||||||
|
config.pop("log_config")
|
||||||
|
config.update(specified_config)
|
||||||
|
|
||||||
|
if "report_stats" not in config:
|
||||||
|
raise ConfigError(
|
||||||
|
MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS + "\n" +
|
||||||
|
MISSING_REPORT_STATS_SPIEL
|
||||||
|
)
|
||||||
|
|
||||||
|
if generate_keys:
|
||||||
|
self.invoke_all("generate_files", config)
|
||||||
|
return
|
||||||
|
|
||||||
|
self.invoke_all("read_config", config)
|
||||||
|
|
||||||
|
|
||||||
|
def find_config_files(search_paths):
|
||||||
|
"""Finds config files using a list of search paths. If a path is a file
|
||||||
|
then that file path is added to the list. If a search path is a directory
|
||||||
|
then all the "*.yaml" files in that directory are added to the list in
|
||||||
|
sorted order.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
search_paths(list(str)): A list of paths to search.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list(str): A list of file paths.
|
||||||
|
"""
|
||||||
|
|
||||||
|
config_files = []
|
||||||
|
if search_paths:
|
||||||
|
for config_path in search_paths:
|
||||||
|
if os.path.isdir(config_path):
|
||||||
|
# We accept specifying directories as config paths, we search
|
||||||
|
# inside that directory for all files matching *.yaml, and then
|
||||||
|
# we apply them in *sorted* order.
|
||||||
|
files = []
|
||||||
|
for entry in os.listdir(config_path):
|
||||||
|
entry_path = os.path.join(config_path, entry)
|
||||||
|
if not os.path.isfile(entry_path):
|
||||||
|
print (
|
||||||
|
"Found subdirectory in config directory: %r. IGNORING."
|
||||||
|
) % (entry_path, )
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not entry.endswith(".yaml"):
|
||||||
|
print (
|
||||||
|
"Found file in config directory that does not"
|
||||||
|
" end in '.yaml': %r. IGNORING."
|
||||||
|
) % (entry_path, )
|
||||||
|
continue
|
||||||
|
|
||||||
|
files.append(entry_path)
|
||||||
|
|
||||||
|
config_files.extend(sorted(files))
|
||||||
|
else:
|
||||||
|
config_files.append(config_path)
|
||||||
|
return config_files
|
||||||
|
|||||||
40
synapse/config/api.py
Normal file
40
synapse/config/api.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
# Copyright 2015, 2016 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from ._base import Config
|
||||||
|
|
||||||
|
from synapse.api.constants import EventTypes
|
||||||
|
|
||||||
|
|
||||||
|
class ApiConfig(Config):
|
||||||
|
|
||||||
|
def read_config(self, config):
|
||||||
|
self.room_invite_state_types = config.get("room_invite_state_types", [
|
||||||
|
EventTypes.JoinRules,
|
||||||
|
EventTypes.CanonicalAlias,
|
||||||
|
EventTypes.RoomAvatar,
|
||||||
|
EventTypes.Name,
|
||||||
|
])
|
||||||
|
|
||||||
|
def default_config(cls, **kwargs):
|
||||||
|
return """\
|
||||||
|
## API Configuration ##
|
||||||
|
|
||||||
|
# A list of event types that will be included in the room_invite_state
|
||||||
|
room_invite_state_types:
|
||||||
|
- "{JoinRules}"
|
||||||
|
- "{CanonicalAlias}"
|
||||||
|
- "{RoomAvatar}"
|
||||||
|
- "{Name}"
|
||||||
|
""".format(**vars(EventTypes))
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2015 OpenMarket Ltd
|
# Copyright 2015, 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -12,7 +12,16 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from ._base import Config
|
from ._base import Config, ConfigError
|
||||||
|
|
||||||
|
from synapse.appservice import ApplicationService
|
||||||
|
from synapse.types import UserID
|
||||||
|
|
||||||
|
import urllib
|
||||||
|
import yaml
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class AppServiceConfig(Config):
|
class AppServiceConfig(Config):
|
||||||
@@ -25,3 +34,99 @@ class AppServiceConfig(Config):
|
|||||||
# A list of application service config file to use
|
# A list of application service config file to use
|
||||||
app_service_config_files: []
|
app_service_config_files: []
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def load_appservices(hostname, config_files):
|
||||||
|
"""Returns a list of Application Services from the config files."""
|
||||||
|
if not isinstance(config_files, list):
|
||||||
|
logger.warning(
|
||||||
|
"Expected %s to be a list of AS config files.", config_files
|
||||||
|
)
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Dicts of value -> filename
|
||||||
|
seen_as_tokens = {}
|
||||||
|
seen_ids = {}
|
||||||
|
|
||||||
|
appservices = []
|
||||||
|
|
||||||
|
for config_file in config_files:
|
||||||
|
try:
|
||||||
|
with open(config_file, 'r') as f:
|
||||||
|
appservice = _load_appservice(
|
||||||
|
hostname, yaml.load(f), config_file
|
||||||
|
)
|
||||||
|
if appservice.id in seen_ids:
|
||||||
|
raise ConfigError(
|
||||||
|
"Cannot reuse ID across application services: "
|
||||||
|
"%s (files: %s, %s)" % (
|
||||||
|
appservice.id, config_file, seen_ids[appservice.id],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
seen_ids[appservice.id] = config_file
|
||||||
|
if appservice.token in seen_as_tokens:
|
||||||
|
raise ConfigError(
|
||||||
|
"Cannot reuse as_token across application services: "
|
||||||
|
"%s (files: %s, %s)" % (
|
||||||
|
appservice.token,
|
||||||
|
config_file,
|
||||||
|
seen_as_tokens[appservice.token],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
seen_as_tokens[appservice.token] = config_file
|
||||||
|
logger.info("Loaded application service: %s", appservice)
|
||||||
|
appservices.append(appservice)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Failed to load appservice from '%s'", config_file)
|
||||||
|
logger.exception(e)
|
||||||
|
raise
|
||||||
|
return appservices
|
||||||
|
|
||||||
|
|
||||||
|
def _load_appservice(hostname, as_info, config_filename):
|
||||||
|
required_string_fields = [
|
||||||
|
"id", "url", "as_token", "hs_token", "sender_localpart"
|
||||||
|
]
|
||||||
|
for field in required_string_fields:
|
||||||
|
if not isinstance(as_info.get(field), basestring):
|
||||||
|
raise KeyError("Required string field: '%s' (%s)" % (
|
||||||
|
field, config_filename,
|
||||||
|
))
|
||||||
|
|
||||||
|
localpart = as_info["sender_localpart"]
|
||||||
|
if urllib.quote(localpart) != localpart:
|
||||||
|
raise ValueError(
|
||||||
|
"sender_localpart needs characters which are not URL encoded."
|
||||||
|
)
|
||||||
|
user = UserID(localpart, hostname)
|
||||||
|
user_id = user.to_string()
|
||||||
|
|
||||||
|
# namespace checks
|
||||||
|
if not isinstance(as_info.get("namespaces"), dict):
|
||||||
|
raise KeyError("Requires 'namespaces' object.")
|
||||||
|
for ns in ApplicationService.NS_LIST:
|
||||||
|
# specific namespaces are optional
|
||||||
|
if ns in as_info["namespaces"]:
|
||||||
|
# expect a list of dicts with exclusive and regex keys
|
||||||
|
for regex_obj in as_info["namespaces"][ns]:
|
||||||
|
if not isinstance(regex_obj, dict):
|
||||||
|
raise ValueError(
|
||||||
|
"Expected namespace entry in %s to be an object,"
|
||||||
|
" but got %s", ns, regex_obj
|
||||||
|
)
|
||||||
|
if not isinstance(regex_obj.get("regex"), basestring):
|
||||||
|
raise ValueError(
|
||||||
|
"Missing/bad type 'regex' key in %s", regex_obj
|
||||||
|
)
|
||||||
|
if not isinstance(regex_obj.get("exclusive"), bool):
|
||||||
|
raise ValueError(
|
||||||
|
"Missing/bad type 'exclusive' key in %s", regex_obj
|
||||||
|
)
|
||||||
|
return ApplicationService(
|
||||||
|
token=as_info["as_token"],
|
||||||
|
url=as_info["url"],
|
||||||
|
namespaces=as_info["namespaces"],
|
||||||
|
hs_token=as_info["hs_token"],
|
||||||
|
sender=user_id,
|
||||||
|
id=as_info["id"],
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -27,12 +27,13 @@ class CaptchaConfig(Config):
|
|||||||
def default_config(self, **kwargs):
|
def default_config(self, **kwargs):
|
||||||
return """\
|
return """\
|
||||||
## Captcha ##
|
## Captcha ##
|
||||||
|
# See docs/CAPTCHA_SETUP for full details of configuring this.
|
||||||
|
|
||||||
# This Home Server's ReCAPTCHA public key.
|
# This Home Server's ReCAPTCHA public key.
|
||||||
recaptcha_private_key: "YOUR_PRIVATE_KEY"
|
recaptcha_public_key: "YOUR_PUBLIC_KEY"
|
||||||
|
|
||||||
# This Home Server's ReCAPTCHA private key.
|
# This Home Server's ReCAPTCHA private key.
|
||||||
recaptcha_public_key: "YOUR_PUBLIC_KEY"
|
recaptcha_private_key: "YOUR_PRIVATE_KEY"
|
||||||
|
|
||||||
# Enables ReCaptcha checks when registering, preventing signup
|
# Enables ReCaptcha checks when registering, preventing signup
|
||||||
# unless a captcha is answered. Requires a valid ReCaptcha
|
# unless a captcha is answered. Requires a valid ReCaptcha
|
||||||
|
|||||||
47
synapse/config/cas.py
Normal file
47
synapse/config/cas.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright 2015, 2016 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from ._base import Config
|
||||||
|
|
||||||
|
|
||||||
|
class CasConfig(Config):
|
||||||
|
"""Cas Configuration
|
||||||
|
|
||||||
|
cas_server_url: URL of CAS server
|
||||||
|
"""
|
||||||
|
|
||||||
|
def read_config(self, config):
|
||||||
|
cas_config = config.get("cas_config", None)
|
||||||
|
if cas_config:
|
||||||
|
self.cas_enabled = cas_config.get("enabled", True)
|
||||||
|
self.cas_server_url = cas_config["server_url"]
|
||||||
|
self.cas_service_url = cas_config["service_url"]
|
||||||
|
self.cas_required_attributes = cas_config.get("required_attributes", {})
|
||||||
|
else:
|
||||||
|
self.cas_enabled = False
|
||||||
|
self.cas_server_url = None
|
||||||
|
self.cas_service_url = None
|
||||||
|
self.cas_required_attributes = {}
|
||||||
|
|
||||||
|
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||||
|
return """
|
||||||
|
# Enable CAS for registration and login.
|
||||||
|
#cas_config:
|
||||||
|
# enabled: true
|
||||||
|
# server_url: "https://cas-server.com"
|
||||||
|
# service_url: "https://homesever.domain.com:8448"
|
||||||
|
# #required_attributes:
|
||||||
|
# # name: value
|
||||||
|
"""
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
98
synapse/config/emailconfig.py
Normal file
98
synapse/config/emailconfig.py
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright 2015, 2016 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
# This file can't be called email.py because if it is, we cannot:
|
||||||
|
import email.utils
|
||||||
|
|
||||||
|
from ._base import Config
|
||||||
|
|
||||||
|
|
||||||
|
class EmailConfig(Config):
|
||||||
|
def read_config(self, config):
|
||||||
|
self.email_enable_notifs = False
|
||||||
|
|
||||||
|
email_config = config.get("email", {})
|
||||||
|
self.email_enable_notifs = email_config.get("enable_notifs", False)
|
||||||
|
|
||||||
|
if self.email_enable_notifs:
|
||||||
|
# make sure we can import the required deps
|
||||||
|
import jinja2
|
||||||
|
import bleach
|
||||||
|
# prevent unused warnings
|
||||||
|
jinja2
|
||||||
|
bleach
|
||||||
|
|
||||||
|
required = [
|
||||||
|
"smtp_host",
|
||||||
|
"smtp_port",
|
||||||
|
"notif_from",
|
||||||
|
"template_dir",
|
||||||
|
"notif_template_html",
|
||||||
|
"notif_template_text",
|
||||||
|
]
|
||||||
|
|
||||||
|
missing = []
|
||||||
|
for k in required:
|
||||||
|
if k not in email_config:
|
||||||
|
missing.append(k)
|
||||||
|
|
||||||
|
if (len(missing) > 0):
|
||||||
|
raise RuntimeError(
|
||||||
|
"email.enable_notifs is True but required keys are missing: %s" %
|
||||||
|
(", ".join(["email." + k for k in missing]),)
|
||||||
|
)
|
||||||
|
|
||||||
|
if config.get("public_baseurl") is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"email.enable_notifs is True but no public_baseurl is set"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.email_smtp_host = email_config["smtp_host"]
|
||||||
|
self.email_smtp_port = email_config["smtp_port"]
|
||||||
|
self.email_notif_from = email_config["notif_from"]
|
||||||
|
self.email_template_dir = email_config["template_dir"]
|
||||||
|
self.email_notif_template_html = email_config["notif_template_html"]
|
||||||
|
self.email_notif_template_text = email_config["notif_template_text"]
|
||||||
|
self.email_notif_for_new_users = email_config.get(
|
||||||
|
"notif_for_new_users", True
|
||||||
|
)
|
||||||
|
if "app_name" in email_config:
|
||||||
|
self.email_app_name = email_config["app_name"]
|
||||||
|
else:
|
||||||
|
self.email_app_name = "Matrix"
|
||||||
|
|
||||||
|
# make sure it's valid
|
||||||
|
parsed = email.utils.parseaddr(self.email_notif_from)
|
||||||
|
if parsed[1] == '':
|
||||||
|
raise RuntimeError("Invalid notif_from address")
|
||||||
|
else:
|
||||||
|
self.email_enable_notifs = False
|
||||||
|
# Not much point setting defaults for the rest: it would be an
|
||||||
|
# error for them to be used.
|
||||||
|
|
||||||
|
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||||
|
return """
|
||||||
|
# Enable sending emails for notification events
|
||||||
|
#email:
|
||||||
|
# enable_notifs: false
|
||||||
|
# smtp_host: "localhost"
|
||||||
|
# smtp_port: 25
|
||||||
|
# notif_from: "Your Friendly %(app)s Home Server <noreply@example.com>"
|
||||||
|
# app_name: Matrix
|
||||||
|
# template_dir: res/templates
|
||||||
|
# notif_template_html: notif_mail.html
|
||||||
|
# notif_template_text: notif_mail.txt
|
||||||
|
# notif_for_new_users: True
|
||||||
|
"""
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -23,15 +23,24 @@ from .captcha import CaptchaConfig
|
|||||||
from .voip import VoipConfig
|
from .voip import VoipConfig
|
||||||
from .registration import RegistrationConfig
|
from .registration import RegistrationConfig
|
||||||
from .metrics import MetricsConfig
|
from .metrics import MetricsConfig
|
||||||
|
from .api import ApiConfig
|
||||||
from .appservice import AppServiceConfig
|
from .appservice import AppServiceConfig
|
||||||
from .key import KeyConfig
|
from .key import KeyConfig
|
||||||
from .saml2 import SAML2Config
|
from .saml2 import SAML2Config
|
||||||
|
from .cas import CasConfig
|
||||||
|
from .password import PasswordConfig
|
||||||
|
from .jwt import JWTConfig
|
||||||
|
from .ldap import LDAPConfig
|
||||||
|
from .emailconfig import EmailConfig
|
||||||
|
from .workers import WorkerConfig
|
||||||
|
|
||||||
|
|
||||||
class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig,
|
class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig,
|
||||||
RatelimitConfig, ContentRepositoryConfig, CaptchaConfig,
|
RatelimitConfig, ContentRepositoryConfig, CaptchaConfig,
|
||||||
VoipConfig, RegistrationConfig, MetricsConfig,
|
VoipConfig, RegistrationConfig, MetricsConfig, ApiConfig,
|
||||||
AppServiceConfig, KeyConfig, SAML2Config, ):
|
AppServiceConfig, KeyConfig, SAML2Config, CasConfig,
|
||||||
|
JWTConfig, LDAPConfig, PasswordConfig, EmailConfig,
|
||||||
|
WorkerConfig,):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
54
synapse/config/jwt.py
Normal file
54
synapse/config/jwt.py
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright 2015 Niklas Riekenbrauck
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from ._base import Config, ConfigError
|
||||||
|
|
||||||
|
|
||||||
|
MISSING_JWT = (
|
||||||
|
"""Missing jwt library. This is required for jwt login.
|
||||||
|
|
||||||
|
Install by running:
|
||||||
|
pip install pyjwt
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class JWTConfig(Config):
|
||||||
|
def read_config(self, config):
|
||||||
|
jwt_config = config.get("jwt_config", None)
|
||||||
|
if jwt_config:
|
||||||
|
self.jwt_enabled = jwt_config.get("enabled", False)
|
||||||
|
self.jwt_secret = jwt_config["secret"]
|
||||||
|
self.jwt_algorithm = jwt_config["algorithm"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
import jwt
|
||||||
|
jwt # To stop unused lint.
|
||||||
|
except ImportError:
|
||||||
|
raise ConfigError(MISSING_JWT)
|
||||||
|
else:
|
||||||
|
self.jwt_enabled = False
|
||||||
|
self.jwt_secret = None
|
||||||
|
self.jwt_algorithm = None
|
||||||
|
|
||||||
|
def default_config(self, **kwargs):
|
||||||
|
return """\
|
||||||
|
# The JWT needs to contain a globally unique "sub" (subject) claim.
|
||||||
|
#
|
||||||
|
# jwt_config:
|
||||||
|
# enabled: true
|
||||||
|
# secret: "a secret"
|
||||||
|
# algorithm: "HS256"
|
||||||
|
"""
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015 OpenMarket Ltd
|
# Copyright 2015, 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -22,8 +22,14 @@ from signedjson.key import (
|
|||||||
read_signing_keys, write_signing_keys, NACL_ED25519
|
read_signing_keys, write_signing_keys, NACL_ED25519
|
||||||
)
|
)
|
||||||
from unpaddedbase64 import decode_base64
|
from unpaddedbase64 import decode_base64
|
||||||
|
from synapse.util.stringutils import random_string_with_symbols
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import hashlib
|
||||||
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class KeyConfig(Config):
|
class KeyConfig(Config):
|
||||||
@@ -40,9 +46,34 @@ class KeyConfig(Config):
|
|||||||
config["perspectives"]
|
config["perspectives"]
|
||||||
)
|
)
|
||||||
|
|
||||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
self.macaroon_secret_key = config.get(
|
||||||
|
"macaroon_secret_key", self.registration_shared_secret
|
||||||
|
)
|
||||||
|
|
||||||
|
if not self.macaroon_secret_key:
|
||||||
|
# Unfortunately, there are people out there that don't have this
|
||||||
|
# set. Lets just be "nice" and derive one from their secret key.
|
||||||
|
logger.warn("Config is missing missing macaroon_secret_key")
|
||||||
|
seed = self.signing_key[0].seed
|
||||||
|
self.macaroon_secret_key = hashlib.sha256(seed)
|
||||||
|
|
||||||
|
self.expire_access_token = config.get("expire_access_token", False)
|
||||||
|
|
||||||
|
def default_config(self, config_dir_path, server_name, is_generating_file=False,
|
||||||
|
**kwargs):
|
||||||
base_key_name = os.path.join(config_dir_path, server_name)
|
base_key_name = os.path.join(config_dir_path, server_name)
|
||||||
|
|
||||||
|
if is_generating_file:
|
||||||
|
macaroon_secret_key = random_string_with_symbols(50)
|
||||||
|
else:
|
||||||
|
macaroon_secret_key = None
|
||||||
|
|
||||||
return """\
|
return """\
|
||||||
|
macaroon_secret_key: "%(macaroon_secret_key)s"
|
||||||
|
|
||||||
|
# Used to enable access token expiration.
|
||||||
|
expire_access_token: False
|
||||||
|
|
||||||
## Signing Keys ##
|
## Signing Keys ##
|
||||||
|
|
||||||
# Path to the signing key to sign messages with
|
# Path to the signing key to sign messages with
|
||||||
|
|||||||
100
synapse/config/ldap.py
Normal file
100
synapse/config/ldap.py
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright 2015 Niklas Riekenbrauck
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from ._base import Config, ConfigError
|
||||||
|
|
||||||
|
|
||||||
|
MISSING_LDAP3 = (
|
||||||
|
"Missing ldap3 library. This is required for LDAP Authentication."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPMode(object):
|
||||||
|
SIMPLE = "simple",
|
||||||
|
SEARCH = "search",
|
||||||
|
|
||||||
|
LIST = (SIMPLE, SEARCH)
|
||||||
|
|
||||||
|
|
||||||
|
class LDAPConfig(Config):
|
||||||
|
def read_config(self, config):
|
||||||
|
ldap_config = config.get("ldap_config", {})
|
||||||
|
|
||||||
|
self.ldap_enabled = ldap_config.get("enabled", False)
|
||||||
|
|
||||||
|
if self.ldap_enabled:
|
||||||
|
# verify dependencies are available
|
||||||
|
try:
|
||||||
|
import ldap3
|
||||||
|
ldap3 # to stop unused lint
|
||||||
|
except ImportError:
|
||||||
|
raise ConfigError(MISSING_LDAP3)
|
||||||
|
|
||||||
|
self.ldap_mode = LDAPMode.SIMPLE
|
||||||
|
|
||||||
|
# verify config sanity
|
||||||
|
self.require_keys(ldap_config, [
|
||||||
|
"uri",
|
||||||
|
"base",
|
||||||
|
"attributes",
|
||||||
|
])
|
||||||
|
|
||||||
|
self.ldap_uri = ldap_config["uri"]
|
||||||
|
self.ldap_start_tls = ldap_config.get("start_tls", False)
|
||||||
|
self.ldap_base = ldap_config["base"]
|
||||||
|
self.ldap_attributes = ldap_config["attributes"]
|
||||||
|
|
||||||
|
if "bind_dn" in ldap_config:
|
||||||
|
self.ldap_mode = LDAPMode.SEARCH
|
||||||
|
self.require_keys(ldap_config, [
|
||||||
|
"bind_dn",
|
||||||
|
"bind_password",
|
||||||
|
])
|
||||||
|
|
||||||
|
self.ldap_bind_dn = ldap_config["bind_dn"]
|
||||||
|
self.ldap_bind_password = ldap_config["bind_password"]
|
||||||
|
self.ldap_filter = ldap_config.get("filter", None)
|
||||||
|
|
||||||
|
# verify attribute lookup
|
||||||
|
self.require_keys(ldap_config['attributes'], [
|
||||||
|
"uid",
|
||||||
|
"name",
|
||||||
|
"mail",
|
||||||
|
])
|
||||||
|
|
||||||
|
def require_keys(self, config, required):
|
||||||
|
missing = [key for key in required if key not in config]
|
||||||
|
if missing:
|
||||||
|
raise ConfigError(
|
||||||
|
"LDAP enabled but missing required config values: {}".format(
|
||||||
|
", ".join(missing)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def default_config(self, **kwargs):
|
||||||
|
return """\
|
||||||
|
# ldap_config:
|
||||||
|
# enabled: true
|
||||||
|
# uri: "ldap://ldap.example.com:389"
|
||||||
|
# start_tls: true
|
||||||
|
# base: "ou=users,dc=example,dc=com"
|
||||||
|
# attributes:
|
||||||
|
# uid: "cn"
|
||||||
|
# mail: "email"
|
||||||
|
# name: "givenName"
|
||||||
|
# #bind_dn:
|
||||||
|
# #bind_password:
|
||||||
|
# #filter: "(objectClass=posixAccount)"
|
||||||
|
"""
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -22,6 +22,7 @@ import yaml
|
|||||||
from string import Template
|
from string import Template
|
||||||
import os
|
import os
|
||||||
import signal
|
import signal
|
||||||
|
from synapse.util.debug import debug_deferreds
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_LOG_CONFIG = Template("""
|
DEFAULT_LOG_CONFIG = Template("""
|
||||||
@@ -69,6 +70,8 @@ class LoggingConfig(Config):
|
|||||||
self.verbosity = config.get("verbose", 0)
|
self.verbosity = config.get("verbose", 0)
|
||||||
self.log_config = self.abspath(config.get("log_config"))
|
self.log_config = self.abspath(config.get("log_config"))
|
||||||
self.log_file = self.abspath(config.get("log_file"))
|
self.log_file = self.abspath(config.get("log_file"))
|
||||||
|
if config.get("full_twisted_stacktraces"):
|
||||||
|
debug_deferreds()
|
||||||
|
|
||||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||||
log_file = self.abspath("homeserver.log")
|
log_file = self.abspath("homeserver.log")
|
||||||
@@ -84,6 +87,11 @@ class LoggingConfig(Config):
|
|||||||
|
|
||||||
# A yaml python logging config file
|
# A yaml python logging config file
|
||||||
log_config: "%(log_config)s"
|
log_config: "%(log_config)s"
|
||||||
|
|
||||||
|
# Stop twisted from discarding the stack traces of exceptions in
|
||||||
|
# deferreds by waiting a reactor tick before running a deferred's
|
||||||
|
# callbacks.
|
||||||
|
# full_twisted_stacktraces: true
|
||||||
""" % locals()
|
""" % locals()
|
||||||
|
|
||||||
def read_arguments(self, args):
|
def read_arguments(self, args):
|
||||||
@@ -118,54 +126,58 @@ class LoggingConfig(Config):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def setup_logging(self):
|
def setup_logging(self):
|
||||||
log_format = (
|
setup_logging(self.log_config, self.log_file, self.verbosity)
|
||||||
"%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s"
|
|
||||||
" - %(message)s"
|
|
||||||
)
|
|
||||||
if self.log_config is None:
|
|
||||||
|
|
||||||
level = logging.INFO
|
|
||||||
level_for_storage = logging.INFO
|
|
||||||
if self.verbosity:
|
|
||||||
level = logging.DEBUG
|
|
||||||
if self.verbosity > 1:
|
|
||||||
level_for_storage = logging.DEBUG
|
|
||||||
|
|
||||||
# FIXME: we need a logging.WARN for a -q quiet option
|
def setup_logging(log_config=None, log_file=None, verbosity=None):
|
||||||
logger = logging.getLogger('')
|
log_format = (
|
||||||
logger.setLevel(level)
|
"%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s"
|
||||||
|
" - %(message)s"
|
||||||
|
)
|
||||||
|
if log_config is None:
|
||||||
|
|
||||||
logging.getLogger('synapse.storage').setLevel(level_for_storage)
|
level = logging.INFO
|
||||||
|
level_for_storage = logging.INFO
|
||||||
|
if verbosity:
|
||||||
|
level = logging.DEBUG
|
||||||
|
if verbosity > 1:
|
||||||
|
level_for_storage = logging.DEBUG
|
||||||
|
|
||||||
formatter = logging.Formatter(log_format)
|
# FIXME: we need a logging.WARN for a -q quiet option
|
||||||
if self.log_file:
|
logger = logging.getLogger('')
|
||||||
# TODO: Customisable file size / backup count
|
logger.setLevel(level)
|
||||||
handler = logging.handlers.RotatingFileHandler(
|
|
||||||
self.log_file, maxBytes=(1000 * 1000 * 100), backupCount=3
|
|
||||||
)
|
|
||||||
|
|
||||||
def sighup(signum, stack):
|
logging.getLogger('synapse.storage').setLevel(level_for_storage)
|
||||||
logger.info("Closing log file due to SIGHUP")
|
|
||||||
handler.doRollover()
|
|
||||||
logger.info("Opened new log file due to SIGHUP")
|
|
||||||
|
|
||||||
# TODO(paul): obviously this is a terrible mechanism for
|
formatter = logging.Formatter(log_format)
|
||||||
# stealing SIGHUP, because it means no other part of synapse
|
if log_file:
|
||||||
# can use it instead. If we want to catch SIGHUP anywhere
|
# TODO: Customisable file size / backup count
|
||||||
# else as well, I'd suggest we find a nicer way to broadcast
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
# it around.
|
log_file, maxBytes=(1000 * 1000 * 100), backupCount=3
|
||||||
if getattr(signal, "SIGHUP"):
|
)
|
||||||
signal.signal(signal.SIGHUP, sighup)
|
|
||||||
else:
|
|
||||||
handler = logging.StreamHandler()
|
|
||||||
handler.setFormatter(formatter)
|
|
||||||
|
|
||||||
handler.addFilter(LoggingContextFilter(request=""))
|
def sighup(signum, stack):
|
||||||
|
logger.info("Closing log file due to SIGHUP")
|
||||||
|
handler.doRollover()
|
||||||
|
logger.info("Opened new log file due to SIGHUP")
|
||||||
|
|
||||||
logger.addHandler(handler)
|
# TODO(paul): obviously this is a terrible mechanism for
|
||||||
|
# stealing SIGHUP, because it means no other part of synapse
|
||||||
|
# can use it instead. If we want to catch SIGHUP anywhere
|
||||||
|
# else as well, I'd suggest we find a nicer way to broadcast
|
||||||
|
# it around.
|
||||||
|
if getattr(signal, "SIGHUP"):
|
||||||
|
signal.signal(signal.SIGHUP, sighup)
|
||||||
else:
|
else:
|
||||||
with open(self.log_config, 'r') as f:
|
handler = logging.StreamHandler()
|
||||||
logging.config.dictConfig(yaml.load(f))
|
handler.setFormatter(formatter)
|
||||||
|
|
||||||
observer = PythonLoggingObserver()
|
handler.addFilter(LoggingContextFilter(request=""))
|
||||||
observer.start()
|
|
||||||
|
logger.addHandler(handler)
|
||||||
|
else:
|
||||||
|
with open(log_config, 'r') as f:
|
||||||
|
logging.config.dictConfig(yaml.load(f))
|
||||||
|
|
||||||
|
observer = PythonLoggingObserver()
|
||||||
|
observer.start()
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015 OpenMarket Ltd
|
# Copyright 2015, 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
36
synapse/config/password.py
Normal file
36
synapse/config/password.py
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright 2015, 2016 OpenMarket Ltd
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from ._base import Config
|
||||||
|
|
||||||
|
|
||||||
|
class PasswordConfig(Config):
|
||||||
|
"""Password login configuration
|
||||||
|
"""
|
||||||
|
|
||||||
|
def read_config(self, config):
|
||||||
|
password_config = config.get("password_config", {})
|
||||||
|
self.password_enabled = password_config.get("enabled", True)
|
||||||
|
self.password_pepper = password_config.get("pepper", "")
|
||||||
|
|
||||||
|
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||||
|
return """
|
||||||
|
# Enable password for login.
|
||||||
|
password_config:
|
||||||
|
enabled: true
|
||||||
|
# Uncomment and change to a secret random string for extra security.
|
||||||
|
# DO NOT CHANGE THIS AFTER INITIAL SETUP!
|
||||||
|
#pepper: ""
|
||||||
|
"""
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015 OpenMarket Ltd
|
# Copyright 2015, 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -23,20 +23,28 @@ from distutils.util import strtobool
|
|||||||
class RegistrationConfig(Config):
|
class RegistrationConfig(Config):
|
||||||
|
|
||||||
def read_config(self, config):
|
def read_config(self, config):
|
||||||
self.disable_registration = not bool(
|
self.enable_registration = bool(
|
||||||
strtobool(str(config["enable_registration"]))
|
strtobool(str(config["enable_registration"]))
|
||||||
)
|
)
|
||||||
if "disable_registration" in config:
|
if "disable_registration" in config:
|
||||||
self.disable_registration = bool(
|
self.enable_registration = not bool(
|
||||||
strtobool(str(config["disable_registration"]))
|
strtobool(str(config["disable_registration"]))
|
||||||
)
|
)
|
||||||
|
|
||||||
self.registration_shared_secret = config.get("registration_shared_secret")
|
self.registration_shared_secret = config.get("registration_shared_secret")
|
||||||
self.macaroon_secret_key = config.get("macaroon_secret_key")
|
self.user_creation_max_duration = int(config["user_creation_max_duration"])
|
||||||
|
|
||||||
|
self.bcrypt_rounds = config.get("bcrypt_rounds", 12)
|
||||||
|
self.trusted_third_party_id_servers = config["trusted_third_party_id_servers"]
|
||||||
|
self.allow_guest_access = config.get("allow_guest_access", False)
|
||||||
|
|
||||||
|
self.invite_3pid_guest = (
|
||||||
|
self.allow_guest_access and config.get("invite_3pid_guest", False)
|
||||||
|
)
|
||||||
|
|
||||||
def default_config(self, **kwargs):
|
def default_config(self, **kwargs):
|
||||||
registration_shared_secret = random_string_with_symbols(50)
|
registration_shared_secret = random_string_with_symbols(50)
|
||||||
macaroon_secret_key = random_string_with_symbols(50)
|
|
||||||
return """\
|
return """\
|
||||||
## Registration ##
|
## Registration ##
|
||||||
|
|
||||||
@@ -47,7 +55,26 @@ class RegistrationConfig(Config):
|
|||||||
# secret, even if registration is otherwise disabled.
|
# secret, even if registration is otherwise disabled.
|
||||||
registration_shared_secret: "%(registration_shared_secret)s"
|
registration_shared_secret: "%(registration_shared_secret)s"
|
||||||
|
|
||||||
macaroon_secret_key: "%(macaroon_secret_key)s"
|
# Sets the expiry for the short term user creation in
|
||||||
|
# milliseconds. For instance the bellow duration is two weeks
|
||||||
|
# in milliseconds.
|
||||||
|
user_creation_max_duration: 1209600000
|
||||||
|
|
||||||
|
# Set the number of bcrypt rounds used to generate password hash.
|
||||||
|
# Larger numbers increase the work factor needed to generate the hash.
|
||||||
|
# The default number of rounds is 12.
|
||||||
|
bcrypt_rounds: 12
|
||||||
|
|
||||||
|
# Allows users to register as guests without a password/email/etc, and
|
||||||
|
# participate in rooms hosted on this server which have been made
|
||||||
|
# accessible to anonymous users.
|
||||||
|
allow_guest_access: False
|
||||||
|
|
||||||
|
# The list of identity servers trusted to verify third party
|
||||||
|
# identifiers by this server.
|
||||||
|
trusted_third_party_id_servers:
|
||||||
|
- matrix.org
|
||||||
|
- vector.im
|
||||||
""" % locals()
|
""" % locals()
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
@@ -59,6 +86,6 @@ class RegistrationConfig(Config):
|
|||||||
|
|
||||||
def read_arguments(self, args):
|
def read_arguments(self, args):
|
||||||
if args.enable_registration is not None:
|
if args.enable_registration is not None:
|
||||||
self.disable_registration = not bool(
|
self.enable_registration = bool(
|
||||||
strtobool(str(args.enable_registration))
|
strtobool(str(args.enable_registration))
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -13,9 +13,25 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from ._base import Config
|
from ._base import Config, ConfigError
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
|
||||||
|
|
||||||
|
MISSING_NETADDR = (
|
||||||
|
"Missing netaddr library. This is required for URL preview API."
|
||||||
|
)
|
||||||
|
|
||||||
|
MISSING_LXML = (
|
||||||
|
"""Missing lxml library. This is required for URL preview API.
|
||||||
|
|
||||||
|
Install by running:
|
||||||
|
pip install lxml
|
||||||
|
|
||||||
|
Requires libxslt1-dev system package.
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
ThumbnailRequirement = namedtuple(
|
ThumbnailRequirement = namedtuple(
|
||||||
"ThumbnailRequirement", ["width", "height", "method", "media_type"]
|
"ThumbnailRequirement", ["width", "height", "method", "media_type"]
|
||||||
)
|
)
|
||||||
@@ -23,7 +39,7 @@ ThumbnailRequirement = namedtuple(
|
|||||||
|
|
||||||
def parse_thumbnail_requirements(thumbnail_sizes):
|
def parse_thumbnail_requirements(thumbnail_sizes):
|
||||||
""" Takes a list of dictionaries with "width", "height", and "method" keys
|
""" Takes a list of dictionaries with "width", "height", and "method" keys
|
||||||
and creates a map from image media types to the thumbnail size, thumnailing
|
and creates a map from image media types to the thumbnail size, thumbnailing
|
||||||
method, and thumbnail media type to precalculate
|
method, and thumbnail media type to precalculate
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -53,12 +69,44 @@ class ContentRepositoryConfig(Config):
|
|||||||
def read_config(self, config):
|
def read_config(self, config):
|
||||||
self.max_upload_size = self.parse_size(config["max_upload_size"])
|
self.max_upload_size = self.parse_size(config["max_upload_size"])
|
||||||
self.max_image_pixels = self.parse_size(config["max_image_pixels"])
|
self.max_image_pixels = self.parse_size(config["max_image_pixels"])
|
||||||
|
self.max_spider_size = self.parse_size(config["max_spider_size"])
|
||||||
self.media_store_path = self.ensure_directory(config["media_store_path"])
|
self.media_store_path = self.ensure_directory(config["media_store_path"])
|
||||||
self.uploads_path = self.ensure_directory(config["uploads_path"])
|
self.uploads_path = self.ensure_directory(config["uploads_path"])
|
||||||
self.dynamic_thumbnails = config["dynamic_thumbnails"]
|
self.dynamic_thumbnails = config["dynamic_thumbnails"]
|
||||||
self.thumbnail_requirements = parse_thumbnail_requirements(
|
self.thumbnail_requirements = parse_thumbnail_requirements(
|
||||||
config["thumbnail_sizes"]
|
config["thumbnail_sizes"]
|
||||||
)
|
)
|
||||||
|
self.url_preview_enabled = config.get("url_preview_enabled", False)
|
||||||
|
if self.url_preview_enabled:
|
||||||
|
try:
|
||||||
|
import lxml
|
||||||
|
lxml # To stop unused lint.
|
||||||
|
except ImportError:
|
||||||
|
raise ConfigError(MISSING_LXML)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from netaddr import IPSet
|
||||||
|
except ImportError:
|
||||||
|
raise ConfigError(MISSING_NETADDR)
|
||||||
|
|
||||||
|
if "url_preview_ip_range_blacklist" in config:
|
||||||
|
self.url_preview_ip_range_blacklist = IPSet(
|
||||||
|
config["url_preview_ip_range_blacklist"]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise ConfigError(
|
||||||
|
"For security, you must specify an explicit target IP address "
|
||||||
|
"blacklist in url_preview_ip_range_blacklist for url previewing "
|
||||||
|
"to work"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.url_preview_ip_range_whitelist = IPSet(
|
||||||
|
config.get("url_preview_ip_range_whitelist", ())
|
||||||
|
)
|
||||||
|
|
||||||
|
self.url_preview_url_blacklist = config.get(
|
||||||
|
"url_preview_url_blacklist", ()
|
||||||
|
)
|
||||||
|
|
||||||
def default_config(self, **kwargs):
|
def default_config(self, **kwargs):
|
||||||
media_store = self.default_path("media_store")
|
media_store = self.default_path("media_store")
|
||||||
@@ -80,7 +128,7 @@ class ContentRepositoryConfig(Config):
|
|||||||
# the resolution requested by the client. If true then whenever
|
# the resolution requested by the client. If true then whenever
|
||||||
# a new resolution is requested by the client the server will
|
# a new resolution is requested by the client the server will
|
||||||
# generate a new thumbnail. If false the server will pick a thumbnail
|
# generate a new thumbnail. If false the server will pick a thumbnail
|
||||||
# from a precalcualted list.
|
# from a precalculated list.
|
||||||
dynamic_thumbnails: false
|
dynamic_thumbnails: false
|
||||||
|
|
||||||
# List of thumbnail to precalculate when an image is uploaded.
|
# List of thumbnail to precalculate when an image is uploaded.
|
||||||
@@ -97,4 +145,74 @@ class ContentRepositoryConfig(Config):
|
|||||||
- width: 640
|
- width: 640
|
||||||
height: 480
|
height: 480
|
||||||
method: scale
|
method: scale
|
||||||
|
- width: 800
|
||||||
|
height: 600
|
||||||
|
method: scale
|
||||||
|
|
||||||
|
# Is the preview URL API enabled? If enabled, you *must* specify
|
||||||
|
# an explicit url_preview_ip_range_blacklist of IPs that the spider is
|
||||||
|
# denied from accessing.
|
||||||
|
url_preview_enabled: False
|
||||||
|
|
||||||
|
# List of IP address CIDR ranges that the URL preview spider is denied
|
||||||
|
# from accessing. There are no defaults: you must explicitly
|
||||||
|
# specify a list for URL previewing to work. You should specify any
|
||||||
|
# internal services in your network that you do not want synapse to try
|
||||||
|
# to connect to, otherwise anyone in any Matrix room could cause your
|
||||||
|
# synapse to issue arbitrary GET requests to your internal services,
|
||||||
|
# causing serious security issues.
|
||||||
|
#
|
||||||
|
# url_preview_ip_range_blacklist:
|
||||||
|
# - '127.0.0.0/8'
|
||||||
|
# - '10.0.0.0/8'
|
||||||
|
# - '172.16.0.0/12'
|
||||||
|
# - '192.168.0.0/16'
|
||||||
|
#
|
||||||
|
# List of IP address CIDR ranges that the URL preview spider is allowed
|
||||||
|
# to access even if they are specified in url_preview_ip_range_blacklist.
|
||||||
|
# This is useful for specifying exceptions to wide-ranging blacklisted
|
||||||
|
# target IP ranges - e.g. for enabling URL previews for a specific private
|
||||||
|
# website only visible in your network.
|
||||||
|
#
|
||||||
|
# url_preview_ip_range_whitelist:
|
||||||
|
# - '192.168.1.1'
|
||||||
|
|
||||||
|
# Optional list of URL matches that the URL preview spider is
|
||||||
|
# denied from accessing. You should use url_preview_ip_range_blacklist
|
||||||
|
# in preference to this, otherwise someone could define a public DNS
|
||||||
|
# entry that points to a private IP address and circumvent the blacklist.
|
||||||
|
# This is more useful if you know there is an entire shape of URL that
|
||||||
|
# you know that will never want synapse to try to spider.
|
||||||
|
#
|
||||||
|
# Each list entry is a dictionary of url component attributes as returned
|
||||||
|
# by urlparse.urlsplit as applied to the absolute form of the URL. See
|
||||||
|
# https://docs.python.org/2/library/urlparse.html#urlparse.urlsplit
|
||||||
|
# The values of the dictionary are treated as an filename match pattern
|
||||||
|
# applied to that component of URLs, unless they start with a ^ in which
|
||||||
|
# case they are treated as a regular expression match. If all the
|
||||||
|
# specified component matches for a given list item succeed, the URL is
|
||||||
|
# blacklisted.
|
||||||
|
#
|
||||||
|
# url_preview_url_blacklist:
|
||||||
|
# # blacklist any URL with a username in its URI
|
||||||
|
# - username: '*'
|
||||||
|
#
|
||||||
|
# # blacklist all *.google.com URLs
|
||||||
|
# - netloc: 'google.com'
|
||||||
|
# - netloc: '*.google.com'
|
||||||
|
#
|
||||||
|
# # blacklist all plain HTTP URLs
|
||||||
|
# - scheme: 'http'
|
||||||
|
#
|
||||||
|
# # blacklist http(s)://www.acme.com/foo
|
||||||
|
# - netloc: 'www.acme.com'
|
||||||
|
# path: '/foo'
|
||||||
|
#
|
||||||
|
# # blacklist any URL with a literal IPv4 address
|
||||||
|
# - netloc: '^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$'
|
||||||
|
|
||||||
|
# The largest allowed URL preview spidering size in bytes
|
||||||
|
max_spider_size: "10M"
|
||||||
|
|
||||||
|
|
||||||
""" % locals()
|
""" % locals()
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ class SAML2Config(Config):
|
|||||||
def read_config(self, config):
|
def read_config(self, config):
|
||||||
saml2_config = config.get("saml2_config", None)
|
saml2_config = config.get("saml2_config", None)
|
||||||
if saml2_config:
|
if saml2_config:
|
||||||
self.saml2_enabled = True
|
self.saml2_enabled = saml2_config.get("enabled", True)
|
||||||
self.saml2_config_path = saml2_config["config_path"]
|
self.saml2_config_path = saml2_config["config_path"]
|
||||||
self.saml2_idp_redirect_url = saml2_config["idp_redirect_url"]
|
self.saml2_idp_redirect_url = saml2_config["idp_redirect_url"]
|
||||||
else:
|
else:
|
||||||
@@ -49,6 +49,7 @@ class SAML2Config(Config):
|
|||||||
# the user back to /login/saml2 with proper info.
|
# the user back to /login/saml2 with proper info.
|
||||||
# See pysaml2 docs for format of config.
|
# See pysaml2 docs for format of config.
|
||||||
#saml2_config:
|
#saml2_config:
|
||||||
|
# enabled: true
|
||||||
# config_path: "%s/sp_conf.py"
|
# config_path: "%s/sp_conf.py"
|
||||||
# idp_redirect_url: "http://%s/idp"
|
# idp_redirect_url: "http://%s/idp"
|
||||||
""" % (config_dir_path, server_name)
|
""" % (config_dir_path, server_name)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -13,7 +13,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from ._base import Config
|
from ._base import Config, ConfigError
|
||||||
|
|
||||||
|
|
||||||
class ServerConfig(Config):
|
class ServerConfig(Config):
|
||||||
@@ -26,10 +26,20 @@ class ServerConfig(Config):
|
|||||||
self.soft_file_limit = config["soft_file_limit"]
|
self.soft_file_limit = config["soft_file_limit"]
|
||||||
self.daemonize = config.get("daemonize")
|
self.daemonize = config.get("daemonize")
|
||||||
self.print_pidfile = config.get("print_pidfile")
|
self.print_pidfile = config.get("print_pidfile")
|
||||||
self.use_frozen_dicts = config.get("use_frozen_dicts", True)
|
self.user_agent_suffix = config.get("user_agent_suffix")
|
||||||
|
self.use_frozen_dicts = config.get("use_frozen_dicts", False)
|
||||||
|
self.public_baseurl = config.get("public_baseurl")
|
||||||
|
self.secondary_directory_servers = config.get("secondary_directory_servers", [])
|
||||||
|
|
||||||
|
if self.public_baseurl is not None:
|
||||||
|
if self.public_baseurl[-1] != '/':
|
||||||
|
self.public_baseurl += '/'
|
||||||
|
self.start_pushers = config.get("start_pushers", True)
|
||||||
|
|
||||||
self.listeners = config.get("listeners", [])
|
self.listeners = config.get("listeners", [])
|
||||||
|
|
||||||
|
self.gc_thresholds = read_gc_thresholds(config.get("gc_thresholds", None))
|
||||||
|
|
||||||
bind_port = config.get("bind_port")
|
bind_port = config.get("bind_port")
|
||||||
if bind_port:
|
if bind_port:
|
||||||
self.listeners = []
|
self.listeners = []
|
||||||
@@ -97,26 +107,6 @@ class ServerConfig(Config):
|
|||||||
]
|
]
|
||||||
})
|
})
|
||||||
|
|
||||||
# Attempt to guess the content_addr for the v0 content repostitory
|
|
||||||
content_addr = config.get("content_addr")
|
|
||||||
if not content_addr:
|
|
||||||
for listener in self.listeners:
|
|
||||||
if listener["type"] == "http" and not listener.get("tls", False):
|
|
||||||
unsecure_port = listener["port"]
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
raise RuntimeError("Could not determine 'content_addr'")
|
|
||||||
|
|
||||||
host = self.server_name
|
|
||||||
if ':' not in host:
|
|
||||||
host = "%s:%d" % (host, unsecure_port)
|
|
||||||
else:
|
|
||||||
host = host.split(':')[0]
|
|
||||||
host = "%s:%d" % (host, unsecure_port)
|
|
||||||
content_addr = "http://%s" % (host,)
|
|
||||||
|
|
||||||
self.content_addr = content_addr
|
|
||||||
|
|
||||||
def default_config(self, server_name, **kwargs):
|
def default_config(self, server_name, **kwargs):
|
||||||
if ":" in server_name:
|
if ":" in server_name:
|
||||||
bind_port = int(server_name.split(":")[1])
|
bind_port = int(server_name.split(":")[1])
|
||||||
@@ -132,6 +122,7 @@ class ServerConfig(Config):
|
|||||||
# The domain name of the server, with optional explicit port.
|
# The domain name of the server, with optional explicit port.
|
||||||
# This is used by remote servers to connect to this server,
|
# This is used by remote servers to connect to this server,
|
||||||
# e.g. matrix.org, localhost:8080, etc.
|
# e.g. matrix.org, localhost:8080, etc.
|
||||||
|
# This is also the last part of your UserID.
|
||||||
server_name: "%(server_name)s"
|
server_name: "%(server_name)s"
|
||||||
|
|
||||||
# When running as a daemon, the file to store the pid in
|
# When running as a daemon, the file to store the pid in
|
||||||
@@ -140,11 +131,25 @@ class ServerConfig(Config):
|
|||||||
# Whether to serve a web client from the HTTP/HTTPS root resource.
|
# Whether to serve a web client from the HTTP/HTTPS root resource.
|
||||||
web_client: True
|
web_client: True
|
||||||
|
|
||||||
|
# The public-facing base URL for the client API (not including _matrix/...)
|
||||||
|
# public_baseurl: https://example.com:8448/
|
||||||
|
|
||||||
# Set the soft limit on the number of file descriptors synapse can use
|
# Set the soft limit on the number of file descriptors synapse can use
|
||||||
# Zero is used to indicate synapse should set the soft limit to the
|
# Zero is used to indicate synapse should set the soft limit to the
|
||||||
# hard limit.
|
# hard limit.
|
||||||
soft_file_limit: 0
|
soft_file_limit: 0
|
||||||
|
|
||||||
|
# The GC threshold parameters to pass to `gc.set_threshold`, if defined
|
||||||
|
# gc_thresholds: [700, 10, 10]
|
||||||
|
|
||||||
|
# A list of other Home Servers to fetch the public room directory from
|
||||||
|
# and include in the public room directory of this home server
|
||||||
|
# This is a temporary stopgap solution to populate new server with a
|
||||||
|
# list of rooms until there exists a good solution of a decentralized
|
||||||
|
# room directory.
|
||||||
|
# secondary_directory_servers:
|
||||||
|
# - matrix.org
|
||||||
|
|
||||||
# List of ports that Synapse should listen on, their purpose and their
|
# List of ports that Synapse should listen on, their purpose and their
|
||||||
# configuration.
|
# configuration.
|
||||||
listeners:
|
listeners:
|
||||||
@@ -198,7 +203,7 @@ class ServerConfig(Config):
|
|||||||
- names: [federation]
|
- names: [federation]
|
||||||
compress: false
|
compress: false
|
||||||
|
|
||||||
# Turn on the twisted telnet manhole service on localhost on the given
|
# Turn on the twisted ssh manhole service on localhost on the given
|
||||||
# port.
|
# port.
|
||||||
# - port: 9000
|
# - port: 9000
|
||||||
# bind_address: 127.0.0.1
|
# bind_address: 127.0.0.1
|
||||||
@@ -226,3 +231,20 @@ class ServerConfig(Config):
|
|||||||
type=int,
|
type=int,
|
||||||
help="Turn on the twisted telnet manhole"
|
help="Turn on the twisted telnet manhole"
|
||||||
" service on the given port.")
|
" service on the given port.")
|
||||||
|
|
||||||
|
|
||||||
|
def read_gc_thresholds(thresholds):
|
||||||
|
"""Reads the three integer thresholds for garbage collection. Ensures that
|
||||||
|
the thresholds are integers if thresholds are supplied.
|
||||||
|
"""
|
||||||
|
if thresholds is None:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
assert len(thresholds) == 3
|
||||||
|
return (
|
||||||
|
int(thresholds[0]), int(thresholds[1]), int(thresholds[2]),
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
raise ConfigError(
|
||||||
|
"Value of `gc_threshold` must be a list of three integers if set"
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
31
synapse/config/workers.py
Normal file
31
synapse/config/workers.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright 2016 matrix.org
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from ._base import Config
|
||||||
|
|
||||||
|
|
||||||
|
class WorkerConfig(Config):
|
||||||
|
"""The workers are processes run separately to the main synapse process.
|
||||||
|
They have their own pid_file and listener configuration. They use the
|
||||||
|
replication_url to talk to the main synapse process."""
|
||||||
|
|
||||||
|
def read_config(self, config):
|
||||||
|
self.worker_app = config.get("worker_app")
|
||||||
|
self.worker_listeners = config.get("worker_listeners")
|
||||||
|
self.worker_daemonize = config.get("worker_daemonize")
|
||||||
|
self.worker_pid_file = config.get("worker_pid_file")
|
||||||
|
self.worker_log_file = config.get("worker_log_file")
|
||||||
|
self.worker_log_config = config.get("worker_log_config")
|
||||||
|
self.worker_replication_url = config.get("worker_replication_url")
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -36,6 +36,7 @@ def fetch_server_key(server_name, ssl_context_factory, path=KEY_API_V1):
|
|||||||
|
|
||||||
factory = SynapseKeyClientFactory()
|
factory = SynapseKeyClientFactory()
|
||||||
factory.path = path
|
factory.path = path
|
||||||
|
factory.host = server_name
|
||||||
endpoint = matrix_federation_endpoint(
|
endpoint = matrix_federation_endpoint(
|
||||||
reactor, server_name, ssl_context_factory, timeout=30
|
reactor, server_name, ssl_context_factory, timeout=30
|
||||||
)
|
)
|
||||||
@@ -81,6 +82,8 @@ class SynapseKeyClientProtocol(HTTPClient):
|
|||||||
self.host = self.transport.getHost()
|
self.host = self.transport.getHost()
|
||||||
logger.debug("Connected to %s", self.host)
|
logger.debug("Connected to %s", self.host)
|
||||||
self.sendCommand(b"GET", self.path)
|
self.sendCommand(b"GET", self.path)
|
||||||
|
if self.host:
|
||||||
|
self.sendHeader(b"Host", self.host)
|
||||||
self.endHeaders()
|
self.endHeaders()
|
||||||
self.timer = reactor.callLater(
|
self.timer = reactor.callLater(
|
||||||
self.timeout,
|
self.timeout,
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -18,6 +18,10 @@ from synapse.api.errors import SynapseError, Codes
|
|||||||
from synapse.util.retryutils import get_retry_limiter
|
from synapse.util.retryutils import get_retry_limiter
|
||||||
from synapse.util import unwrapFirstError
|
from synapse.util import unwrapFirstError
|
||||||
from synapse.util.async import ObservableDeferred
|
from synapse.util.async import ObservableDeferred
|
||||||
|
from synapse.util.logcontext import (
|
||||||
|
preserve_context_over_deferred, preserve_context_over_fn, PreserveLoggingContext,
|
||||||
|
preserve_fn
|
||||||
|
)
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
@@ -142,40 +146,43 @@ class Keyring(object):
|
|||||||
for server_name, _ in server_and_json
|
for server_name, _ in server_and_json
|
||||||
}
|
}
|
||||||
|
|
||||||
# We want to wait for any previous lookups to complete before
|
with PreserveLoggingContext():
|
||||||
# proceeding.
|
|
||||||
wait_on_deferred = self.wait_for_previous_lookups(
|
|
||||||
[server_name for server_name, _ in server_and_json],
|
|
||||||
server_to_deferred,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Actually start fetching keys.
|
# We want to wait for any previous lookups to complete before
|
||||||
wait_on_deferred.addBoth(
|
# proceeding.
|
||||||
lambda _: self.get_server_verify_keys(group_id_to_group, deferreds)
|
wait_on_deferred = self.wait_for_previous_lookups(
|
||||||
)
|
[server_name for server_name, _ in server_and_json],
|
||||||
|
server_to_deferred,
|
||||||
|
)
|
||||||
|
|
||||||
# When we've finished fetching all the keys for a given server_name,
|
# Actually start fetching keys.
|
||||||
# resolve the deferred passed to `wait_for_previous_lookups` so that
|
wait_on_deferred.addBoth(
|
||||||
# any lookups waiting will proceed.
|
lambda _: self.get_server_verify_keys(group_id_to_group, deferreds)
|
||||||
server_to_gids = {}
|
)
|
||||||
|
|
||||||
def remove_deferreds(res, server_name, group_id):
|
# When we've finished fetching all the keys for a given server_name,
|
||||||
server_to_gids[server_name].discard(group_id)
|
# resolve the deferred passed to `wait_for_previous_lookups` so that
|
||||||
if not server_to_gids[server_name]:
|
# any lookups waiting will proceed.
|
||||||
d = server_to_deferred.pop(server_name, None)
|
server_to_gids = {}
|
||||||
if d:
|
|
||||||
d.callback(None)
|
|
||||||
return res
|
|
||||||
|
|
||||||
for g_id, deferred in deferreds.items():
|
def remove_deferreds(res, server_name, group_id):
|
||||||
server_name = group_id_to_group[g_id].server_name
|
server_to_gids[server_name].discard(group_id)
|
||||||
server_to_gids.setdefault(server_name, set()).add(g_id)
|
if not server_to_gids[server_name]:
|
||||||
deferred.addBoth(remove_deferreds, server_name, g_id)
|
d = server_to_deferred.pop(server_name, None)
|
||||||
|
if d:
|
||||||
|
d.callback(None)
|
||||||
|
return res
|
||||||
|
|
||||||
|
for g_id, deferred in deferreds.items():
|
||||||
|
server_name = group_id_to_group[g_id].server_name
|
||||||
|
server_to_gids.setdefault(server_name, set()).add(g_id)
|
||||||
|
deferred.addBoth(remove_deferreds, server_name, g_id)
|
||||||
|
|
||||||
# Pass those keys to handle_key_deferred so that the json object
|
# Pass those keys to handle_key_deferred so that the json object
|
||||||
# signatures can be verified
|
# signatures can be verified
|
||||||
return [
|
return [
|
||||||
handle_key_deferred(
|
preserve_context_over_fn(
|
||||||
|
handle_key_deferred,
|
||||||
group_id_to_group[g_id],
|
group_id_to_group[g_id],
|
||||||
deferreds[g_id],
|
deferreds[g_id],
|
||||||
)
|
)
|
||||||
@@ -198,12 +205,13 @@ class Keyring(object):
|
|||||||
if server_name in self.key_downloads
|
if server_name in self.key_downloads
|
||||||
]
|
]
|
||||||
if wait_on:
|
if wait_on:
|
||||||
yield defer.DeferredList(wait_on)
|
with PreserveLoggingContext():
|
||||||
|
yield defer.DeferredList(wait_on)
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
|
||||||
for server_name, deferred in server_to_deferred.items():
|
for server_name, deferred in server_to_deferred.items():
|
||||||
d = ObservableDeferred(deferred)
|
d = ObservableDeferred(preserve_context_over_deferred(deferred))
|
||||||
self.key_downloads[server_name] = d
|
self.key_downloads[server_name] = d
|
||||||
|
|
||||||
def rm(r, server_name):
|
def rm(r, server_name):
|
||||||
@@ -230,7 +238,9 @@ class Keyring(object):
|
|||||||
|
|
||||||
missing_keys = {}
|
missing_keys = {}
|
||||||
for group in group_id_to_group.values():
|
for group in group_id_to_group.values():
|
||||||
missing_keys.setdefault(group.server_name, set()).union(group.key_ids)
|
missing_keys.setdefault(group.server_name, set()).update(
|
||||||
|
group.key_ids
|
||||||
|
)
|
||||||
|
|
||||||
for fn in key_fetch_fns:
|
for fn in key_fetch_fns:
|
||||||
results = yield fn(missing_keys.items())
|
results = yield fn(missing_keys.items())
|
||||||
@@ -242,12 +252,13 @@ class Keyring(object):
|
|||||||
for group in group_id_to_group.values():
|
for group in group_id_to_group.values():
|
||||||
for key_id in group.key_ids:
|
for key_id in group.key_ids:
|
||||||
if key_id in merged_results[group.server_name]:
|
if key_id in merged_results[group.server_name]:
|
||||||
group_id_to_deferred[group.group_id].callback((
|
with PreserveLoggingContext():
|
||||||
group.group_id,
|
group_id_to_deferred[group.group_id].callback((
|
||||||
group.server_name,
|
group.group_id,
|
||||||
key_id,
|
group.server_name,
|
||||||
merged_results[group.server_name][key_id],
|
key_id,
|
||||||
))
|
merged_results[group.server_name][key_id],
|
||||||
|
))
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
missing_groups.setdefault(
|
missing_groups.setdefault(
|
||||||
@@ -381,28 +392,24 @@ class Keyring(object):
|
|||||||
def get_server_verify_key_v2_indirect(self, server_names_and_key_ids,
|
def get_server_verify_key_v2_indirect(self, server_names_and_key_ids,
|
||||||
perspective_name,
|
perspective_name,
|
||||||
perspective_keys):
|
perspective_keys):
|
||||||
limiter = yield get_retry_limiter(
|
# TODO(mark): Set the minimum_valid_until_ts to that needed by
|
||||||
perspective_name, self.clock, self.store
|
# the events being validated or the current time if validating
|
||||||
)
|
# an incoming request.
|
||||||
|
query_response = yield self.client.post_json(
|
||||||
with limiter:
|
destination=perspective_name,
|
||||||
# TODO(mark): Set the minimum_valid_until_ts to that needed by
|
path=b"/_matrix/key/v2/query",
|
||||||
# the events being validated or the current time if validating
|
data={
|
||||||
# an incoming request.
|
u"server_keys": {
|
||||||
query_response = yield self.client.post_json(
|
server_name: {
|
||||||
destination=perspective_name,
|
key_id: {
|
||||||
path=b"/_matrix/key/v2/query",
|
u"minimum_valid_until_ts": 0
|
||||||
data={
|
} for key_id in key_ids
|
||||||
u"server_keys": {
|
|
||||||
server_name: {
|
|
||||||
key_id: {
|
|
||||||
u"minimum_valid_until_ts": 0
|
|
||||||
} for key_id in key_ids
|
|
||||||
}
|
|
||||||
for server_name, key_ids in server_names_and_key_ids
|
|
||||||
}
|
}
|
||||||
},
|
for server_name, key_ids in server_names_and_key_ids
|
||||||
)
|
}
|
||||||
|
},
|
||||||
|
long_retries=True,
|
||||||
|
)
|
||||||
|
|
||||||
keys = {}
|
keys = {}
|
||||||
|
|
||||||
@@ -506,7 +513,7 @@ class Keyring(object):
|
|||||||
|
|
||||||
yield defer.gatherResults(
|
yield defer.gatherResults(
|
||||||
[
|
[
|
||||||
self.store_keys(
|
preserve_fn(self.store_keys)(
|
||||||
server_name=key_server_name,
|
server_name=key_server_name,
|
||||||
from_server=server_name,
|
from_server=server_name,
|
||||||
verify_keys=verify_keys,
|
verify_keys=verify_keys,
|
||||||
@@ -575,7 +582,7 @@ class Keyring(object):
|
|||||||
|
|
||||||
yield defer.gatherResults(
|
yield defer.gatherResults(
|
||||||
[
|
[
|
||||||
self.store.store_server_keys_json(
|
preserve_fn(self.store.store_server_keys_json)(
|
||||||
server_name=server_name,
|
server_name=server_name,
|
||||||
key_id=key_id,
|
key_id=key_id,
|
||||||
from_server=server_name,
|
from_server=server_name,
|
||||||
@@ -677,7 +684,7 @@ class Keyring(object):
|
|||||||
# TODO(markjh): Store whether the keys have expired.
|
# TODO(markjh): Store whether the keys have expired.
|
||||||
yield defer.gatherResults(
|
yield defer.gatherResults(
|
||||||
[
|
[
|
||||||
self.store.store_server_verify_key(
|
preserve_fn(self.store.store_server_verify_key)(
|
||||||
server_name, server_name, key.time_added, key
|
server_name, server_name, key.time_added, key
|
||||||
)
|
)
|
||||||
for key_id, key in verify_keys.items()
|
for key_id, key in verify_keys.items()
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -14,6 +14,7 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from synapse.util.frozenutils import freeze
|
from synapse.util.frozenutils import freeze
|
||||||
|
from synapse.util.caches import intern_dict
|
||||||
|
|
||||||
|
|
||||||
# Whether we should use frozen_dict in FrozenEvent. Using frozen_dicts prevents
|
# Whether we should use frozen_dict in FrozenEvent. Using frozen_dicts prevents
|
||||||
@@ -30,7 +31,10 @@ class _EventInternalMetadata(object):
|
|||||||
return dict(self.__dict__)
|
return dict(self.__dict__)
|
||||||
|
|
||||||
def is_outlier(self):
|
def is_outlier(self):
|
||||||
return hasattr(self, "outlier") and self.outlier
|
return getattr(self, "outlier", False)
|
||||||
|
|
||||||
|
def is_invite_from_remote(self):
|
||||||
|
return getattr(self, "invite_from_remote", False)
|
||||||
|
|
||||||
|
|
||||||
def _event_dict_property(key):
|
def _event_dict_property(key):
|
||||||
@@ -117,6 +121,15 @@ class EventBase(object):
|
|||||||
def __set__(self, instance, value):
|
def __set__(self, instance, value):
|
||||||
raise AttributeError("Unrecognized attribute %s" % (instance,))
|
raise AttributeError("Unrecognized attribute %s" % (instance,))
|
||||||
|
|
||||||
|
def __getitem__(self, field):
|
||||||
|
return self._event_dict[field]
|
||||||
|
|
||||||
|
def __contains__(self, field):
|
||||||
|
return field in self._event_dict
|
||||||
|
|
||||||
|
def items(self):
|
||||||
|
return self._event_dict.items()
|
||||||
|
|
||||||
|
|
||||||
class FrozenEvent(EventBase):
|
class FrozenEvent(EventBase):
|
||||||
def __init__(self, event_dict, internal_metadata_dict={}, rejected_reason=None):
|
def __init__(self, event_dict, internal_metadata_dict={}, rejected_reason=None):
|
||||||
@@ -131,6 +144,10 @@ class FrozenEvent(EventBase):
|
|||||||
|
|
||||||
unsigned = dict(event_dict.pop("unsigned", {}))
|
unsigned = dict(event_dict.pop("unsigned", {}))
|
||||||
|
|
||||||
|
# We intern these strings because they turn up a lot (especially when
|
||||||
|
# caching).
|
||||||
|
event_dict = intern_dict(event_dict)
|
||||||
|
|
||||||
if USE_FROZEN_DICTS:
|
if USE_FROZEN_DICTS:
|
||||||
frozen_dict = freeze(event_dict)
|
frozen_dict = freeze(event_dict)
|
||||||
else:
|
else:
|
||||||
@@ -159,5 +176,7 @@ class FrozenEvent(EventBase):
|
|||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<FrozenEvent event_id='%s', type='%s', state_key='%s'>" % (
|
return "<FrozenEvent event_id='%s', type='%s', state_key='%s'>" % (
|
||||||
self.event_id, self.type, self.get("state_key", None),
|
self.get("event_id", None),
|
||||||
|
self.get("type", None),
|
||||||
|
self.get("state_key", None),
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -20,3 +20,4 @@ class EventContext(object):
|
|||||||
self.current_state = current_state
|
self.current_state = current_state
|
||||||
self.state_group = None
|
self.state_group = None
|
||||||
self.rejected = False
|
self.rejected = False
|
||||||
|
self.push_actions = []
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -66,7 +66,6 @@ def prune_event(event):
|
|||||||
"users_default",
|
"users_default",
|
||||||
"events",
|
"events",
|
||||||
"events_default",
|
"events_default",
|
||||||
"events_default",
|
|
||||||
"state_default",
|
"state_default",
|
||||||
"ban",
|
"ban",
|
||||||
"kick",
|
"kick",
|
||||||
@@ -101,19 +100,20 @@ def format_event_raw(d):
|
|||||||
|
|
||||||
|
|
||||||
def format_event_for_client_v1(d):
|
def format_event_for_client_v1(d):
|
||||||
d["user_id"] = d.pop("sender", None)
|
d = format_event_for_client_v2(d)
|
||||||
|
|
||||||
move_keys = ("age", "redacted_because", "replaces_state", "prev_content")
|
sender = d.get("sender")
|
||||||
for key in move_keys:
|
if sender is not None:
|
||||||
|
d["user_id"] = sender
|
||||||
|
|
||||||
|
copy_keys = (
|
||||||
|
"age", "redacted_because", "replaces_state", "prev_content",
|
||||||
|
"invite_room_state",
|
||||||
|
)
|
||||||
|
for key in copy_keys:
|
||||||
if key in d["unsigned"]:
|
if key in d["unsigned"]:
|
||||||
d[key] = d["unsigned"][key]
|
d[key] = d["unsigned"][key]
|
||||||
|
|
||||||
drop_keys = (
|
|
||||||
"auth_events", "prev_events", "hashes", "signatures", "depth",
|
|
||||||
"unsigned", "origin", "prev_state"
|
|
||||||
)
|
|
||||||
for key in drop_keys:
|
|
||||||
d.pop(key, None)
|
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
@@ -127,10 +127,9 @@ def format_event_for_client_v2(d):
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
def format_event_for_client_v2_without_event_id(d):
|
def format_event_for_client_v2_without_room_id(d):
|
||||||
d = format_event_for_client_v2(d)
|
d = format_event_for_client_v2(d)
|
||||||
d.pop("room_id", None)
|
d.pop("room_id", None)
|
||||||
d.pop("event_id", None)
|
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
@@ -152,7 +151,8 @@ def serialize_event(e, time_now_ms, as_client_event=True,
|
|||||||
|
|
||||||
if "redacted_because" in e.unsigned:
|
if "redacted_because" in e.unsigned:
|
||||||
d["unsigned"]["redacted_because"] = serialize_event(
|
d["unsigned"]["redacted_because"] = serialize_event(
|
||||||
e.unsigned["redacted_because"], time_now_ms
|
e.unsigned["redacted_because"], time_now_ms,
|
||||||
|
event_format=event_format
|
||||||
)
|
)
|
||||||
|
|
||||||
if token_id is not None:
|
if token_id is not None:
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -17,15 +17,10 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from .replication import ReplicationLayer
|
from .replication import ReplicationLayer
|
||||||
from .transport import TransportLayer
|
from .transport.client import TransportLayerClient
|
||||||
|
|
||||||
|
|
||||||
def initialize_http_replication(homeserver):
|
def initialize_http_replication(homeserver):
|
||||||
transport = TransportLayer(
|
transport = TransportLayerClient(homeserver)
|
||||||
homeserver,
|
|
||||||
homeserver.hostname,
|
|
||||||
server=homeserver.get_resource_for_federation(),
|
|
||||||
client=homeserver.get_http_client()
|
|
||||||
)
|
|
||||||
|
|
||||||
return ReplicationLayer(homeserver, transport)
|
return ReplicationLayer(homeserver, transport)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015 OpenMarket Ltd
|
# Copyright 2015, 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -31,6 +31,9 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class FederationBase(object):
|
class FederationBase(object):
|
||||||
|
def __init__(self, hs):
|
||||||
|
pass
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _check_sigs_and_hash_and_fetch(self, origin, pdus, outlier=False,
|
def _check_sigs_and_hash_and_fetch(self, origin, pdus, outlier=False,
|
||||||
include_none=False):
|
include_none=False):
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015 OpenMarket Ltd
|
# Copyright 2015, 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -17,12 +17,14 @@
|
|||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from .federation_base import FederationBase
|
from .federation_base import FederationBase
|
||||||
|
from synapse.api.constants import Membership
|
||||||
from .units import Edu
|
from .units import Edu
|
||||||
|
|
||||||
from synapse.api.errors import (
|
from synapse.api.errors import (
|
||||||
CodeMessageException, HttpResponseException, SynapseError,
|
CodeMessageException, HttpResponseException, SynapseError,
|
||||||
)
|
)
|
||||||
from synapse.util import unwrapFirstError
|
from synapse.util import unwrapFirstError
|
||||||
|
from synapse.util.async import concurrently_execute
|
||||||
from synapse.util.caches.expiringcache import ExpiringCache
|
from synapse.util.caches.expiringcache import ExpiringCache
|
||||||
from synapse.util.logutils import log_function
|
from synapse.util.logutils import log_function
|
||||||
from synapse.events import FrozenEvent
|
from synapse.events import FrozenEvent
|
||||||
@@ -50,13 +52,15 @@ sent_queries_counter = metrics.register_counter("sent_queries", labels=["type"])
|
|||||||
|
|
||||||
|
|
||||||
class FederationClient(FederationBase):
|
class FederationClient(FederationBase):
|
||||||
|
def __init__(self, hs):
|
||||||
|
super(FederationClient, self).__init__(hs)
|
||||||
|
|
||||||
def start_get_pdu_cache(self):
|
def start_get_pdu_cache(self):
|
||||||
self._get_pdu_cache = ExpiringCache(
|
self._get_pdu_cache = ExpiringCache(
|
||||||
cache_name="get_pdu_cache",
|
cache_name="get_pdu_cache",
|
||||||
clock=self._clock,
|
clock=self._clock,
|
||||||
max_len=1000,
|
max_len=1000,
|
||||||
expiry_ms=120*1000,
|
expiry_ms=120 * 1000,
|
||||||
reset_expiry_on_get=False,
|
reset_expiry_on_get=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -113,7 +117,7 @@ class FederationClient(FederationBase):
|
|||||||
|
|
||||||
@log_function
|
@log_function
|
||||||
def make_query(self, destination, query_type, args,
|
def make_query(self, destination, query_type, args,
|
||||||
retry_on_dns_fail=True):
|
retry_on_dns_fail=False):
|
||||||
"""Sends a federation Query to a remote homeserver of the given type
|
"""Sends a federation Query to a remote homeserver of the given type
|
||||||
and arguments.
|
and arguments.
|
||||||
|
|
||||||
@@ -356,19 +360,55 @@ class FederationClient(FederationBase):
|
|||||||
defer.returnValue(signed_auth)
|
defer.returnValue(signed_auth)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def make_join(self, destinations, room_id, user_id):
|
def make_membership_event(self, destinations, room_id, user_id, membership,
|
||||||
|
content={},):
|
||||||
|
"""
|
||||||
|
Creates an m.room.member event, with context, without participating in the room.
|
||||||
|
|
||||||
|
Does so by asking one of the already participating servers to create an
|
||||||
|
event with proper context.
|
||||||
|
|
||||||
|
Note that this does not append any events to any graphs.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
destinations (str): Candidate homeservers which are probably
|
||||||
|
participating in the room.
|
||||||
|
room_id (str): The room in which the event will happen.
|
||||||
|
user_id (str): The user whose membership is being evented.
|
||||||
|
membership (str): The "membership" property of the event. Must be
|
||||||
|
one of "join" or "leave".
|
||||||
|
content (object): Any additional data to put into the content field
|
||||||
|
of the event.
|
||||||
|
Return:
|
||||||
|
A tuple of (origin (str), event (object)) where origin is the remote
|
||||||
|
homeserver which generated the event.
|
||||||
|
"""
|
||||||
|
valid_memberships = {Membership.JOIN, Membership.LEAVE}
|
||||||
|
if membership not in valid_memberships:
|
||||||
|
raise RuntimeError(
|
||||||
|
"make_membership_event called with membership='%s', must be one of %s" %
|
||||||
|
(membership, ",".join(valid_memberships))
|
||||||
|
)
|
||||||
for destination in destinations:
|
for destination in destinations:
|
||||||
if destination == self.server_name:
|
if destination == self.server_name:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ret = yield self.transport_layer.make_join(
|
ret = yield self.transport_layer.make_membership_event(
|
||||||
destination, room_id, user_id
|
destination, room_id, user_id, membership
|
||||||
)
|
)
|
||||||
|
|
||||||
pdu_dict = ret["event"]
|
pdu_dict = ret["event"]
|
||||||
|
|
||||||
logger.debug("Got response to make_join: %s", pdu_dict)
|
logger.debug("Got response to make_%s: %s", membership, pdu_dict)
|
||||||
|
|
||||||
|
pdu_dict["content"].update(content)
|
||||||
|
|
||||||
|
# The protoevent received over the JSON wire may not have all
|
||||||
|
# the required fields. Lets just gloss over that because
|
||||||
|
# there's some we never care about
|
||||||
|
if "prev_state" not in pdu_dict:
|
||||||
|
pdu_dict["prev_state"] = []
|
||||||
|
|
||||||
defer.returnValue(
|
defer.returnValue(
|
||||||
(destination, self.event_from_pdu_json(pdu_dict))
|
(destination, self.event_from_pdu_json(pdu_dict))
|
||||||
@@ -378,9 +418,10 @@ class FederationClient(FederationBase):
|
|||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warn(
|
logger.warn(
|
||||||
"Failed to make_join via %s: %s",
|
"Failed to make_%s via %s: %s",
|
||||||
destination, e.message
|
membership, destination, e.message
|
||||||
)
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
raise RuntimeError("Failed to send to any server.")
|
raise RuntimeError("Failed to send to any server.")
|
||||||
|
|
||||||
@@ -485,6 +526,52 @@ class FederationClient(FederationBase):
|
|||||||
|
|
||||||
defer.returnValue(pdu)
|
defer.returnValue(pdu)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def send_leave(self, destinations, pdu):
|
||||||
|
for destination in destinations:
|
||||||
|
if destination == self.server_name:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
time_now = self._clock.time_msec()
|
||||||
|
_, content = yield self.transport_layer.send_leave(
|
||||||
|
destination=destination,
|
||||||
|
room_id=pdu.room_id,
|
||||||
|
event_id=pdu.event_id,
|
||||||
|
content=pdu.get_pdu_json(time_now),
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug("Got content: %s", content)
|
||||||
|
defer.returnValue(None)
|
||||||
|
except CodeMessageException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(
|
||||||
|
"Failed to send_leave via %s: %s",
|
||||||
|
destination, e.message
|
||||||
|
)
|
||||||
|
|
||||||
|
raise RuntimeError("Failed to send to any server.")
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def get_public_rooms(self, destinations):
|
||||||
|
results_by_server = {}
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def _get_result(s):
|
||||||
|
if s == self.server_name:
|
||||||
|
defer.returnValue()
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = yield self.transport_layer.get_public_rooms(s)
|
||||||
|
results_by_server[s] = result
|
||||||
|
except:
|
||||||
|
logger.exception("Error getting room list from server %r", s)
|
||||||
|
|
||||||
|
yield concurrently_execute(_get_result, destinations, 3)
|
||||||
|
|
||||||
|
defer.returnValue(results_by_server)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def query_auth(self, destination, room_id, event_id, local_auth):
|
def query_auth(self, destination, room_id, event_id, local_auth):
|
||||||
"""
|
"""
|
||||||
@@ -643,3 +730,26 @@ class FederationClient(FederationBase):
|
|||||||
event.internal_metadata.outlier = outlier
|
event.internal_metadata.outlier = outlier
|
||||||
|
|
||||||
return event
|
return event
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def forward_third_party_invite(self, destinations, room_id, event_dict):
|
||||||
|
for destination in destinations:
|
||||||
|
if destination == self.server_name:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield self.transport_layer.exchange_third_party_invite(
|
||||||
|
destination=destination,
|
||||||
|
room_id=room_id,
|
||||||
|
event_dict=event_dict,
|
||||||
|
)
|
||||||
|
defer.returnValue(None)
|
||||||
|
except CodeMessageException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception(
|
||||||
|
"Failed to send_third_party_invite via %s: %s",
|
||||||
|
destination, e.message
|
||||||
|
)
|
||||||
|
|
||||||
|
raise RuntimeError("Failed to send to any server.")
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015 OpenMarket Ltd
|
# Copyright 2015, 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -19,6 +19,7 @@ from twisted.internet import defer
|
|||||||
from .federation_base import FederationBase
|
from .federation_base import FederationBase
|
||||||
from .units import Transaction, Edu
|
from .units import Transaction, Edu
|
||||||
|
|
||||||
|
from synapse.util.async import Linearizer
|
||||||
from synapse.util.logutils import log_function
|
from synapse.util.logutils import log_function
|
||||||
from synapse.events import FrozenEvent
|
from synapse.events import FrozenEvent
|
||||||
import synapse.metrics
|
import synapse.metrics
|
||||||
@@ -44,6 +45,12 @@ received_queries_counter = metrics.register_counter("received_queries", labels=[
|
|||||||
|
|
||||||
|
|
||||||
class FederationServer(FederationBase):
|
class FederationServer(FederationBase):
|
||||||
|
def __init__(self, hs):
|
||||||
|
super(FederationServer, self).__init__(hs)
|
||||||
|
|
||||||
|
self._room_pdu_linearizer = Linearizer()
|
||||||
|
self._server_linearizer = Linearizer()
|
||||||
|
|
||||||
def set_handler(self, handler):
|
def set_handler(self, handler):
|
||||||
"""Sets the handler that the replication layer will use to communicate
|
"""Sets the handler that the replication layer will use to communicate
|
||||||
receipt of new PDUs from other home servers. The required methods are
|
receipt of new PDUs from other home servers. The required methods are
|
||||||
@@ -83,11 +90,14 @@ class FederationServer(FederationBase):
|
|||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@log_function
|
@log_function
|
||||||
def on_backfill_request(self, origin, room_id, versions, limit):
|
def on_backfill_request(self, origin, room_id, versions, limit):
|
||||||
pdus = yield self.handler.on_backfill_request(
|
with (yield self._server_linearizer.queue((origin, room_id))):
|
||||||
origin, room_id, versions, limit
|
pdus = yield self.handler.on_backfill_request(
|
||||||
)
|
origin, room_id, versions, limit
|
||||||
|
)
|
||||||
|
|
||||||
defer.returnValue((200, self._transaction_from_pdus(pdus).get_dict()))
|
res = self._transaction_from_pdus(pdus).get_dict()
|
||||||
|
|
||||||
|
defer.returnValue((200, res))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@log_function
|
@log_function
|
||||||
@@ -126,10 +136,8 @@ class FederationServer(FederationBase):
|
|||||||
results = []
|
results = []
|
||||||
|
|
||||||
for pdu in pdu_list:
|
for pdu in pdu_list:
|
||||||
d = self._handle_new_pdu(transaction.origin, pdu)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
yield d
|
yield self._handle_new_pdu(transaction.origin, pdu)
|
||||||
results.append({})
|
results.append({})
|
||||||
except FederationError as e:
|
except FederationError as e:
|
||||||
self.send_failure(e, transaction.origin)
|
self.send_failure(e, transaction.origin)
|
||||||
@@ -139,8 +147,8 @@ class FederationServer(FederationBase):
|
|||||||
logger.exception("Failed to handle PDU")
|
logger.exception("Failed to handle PDU")
|
||||||
|
|
||||||
if hasattr(transaction, "edus"):
|
if hasattr(transaction, "edus"):
|
||||||
for edu in [Edu(**x) for x in transaction.edus]:
|
for edu in (Edu(**x) for x in transaction.edus):
|
||||||
self.received_edu(
|
yield self.received_edu(
|
||||||
transaction.origin,
|
transaction.origin,
|
||||||
edu.edu_type,
|
edu.edu_type,
|
||||||
edu.content
|
edu.content
|
||||||
@@ -163,35 +171,45 @@ class FederationServer(FederationBase):
|
|||||||
)
|
)
|
||||||
defer.returnValue((200, response))
|
defer.returnValue((200, response))
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
def received_edu(self, origin, edu_type, content):
|
def received_edu(self, origin, edu_type, content):
|
||||||
received_edus_counter.inc()
|
received_edus_counter.inc()
|
||||||
|
|
||||||
if edu_type in self.edu_handlers:
|
if edu_type in self.edu_handlers:
|
||||||
self.edu_handlers[edu_type](origin, content)
|
try:
|
||||||
|
yield self.edu_handlers[edu_type](origin, content)
|
||||||
|
except SynapseError as e:
|
||||||
|
logger.info("Failed to handle edu %r: %r", edu_type, e)
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception("Failed to handle edu %r", edu_type, e)
|
||||||
else:
|
else:
|
||||||
logger.warn("Received EDU of type %s with no handler", edu_type)
|
logger.warn("Received EDU of type %s with no handler", edu_type)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@log_function
|
@log_function
|
||||||
def on_context_state_request(self, origin, room_id, event_id):
|
def on_context_state_request(self, origin, room_id, event_id):
|
||||||
if event_id:
|
with (yield self._server_linearizer.queue((origin, room_id))):
|
||||||
pdus = yield self.handler.get_state_for_pdu(
|
if event_id:
|
||||||
origin, room_id, event_id,
|
pdus = yield self.handler.get_state_for_pdu(
|
||||||
)
|
origin, room_id, event_id,
|
||||||
auth_chain = yield self.store.get_auth_chain(
|
|
||||||
[pdu.event_id for pdu in pdus]
|
|
||||||
)
|
|
||||||
|
|
||||||
for event in auth_chain:
|
|
||||||
event.signatures.update(
|
|
||||||
compute_event_signature(
|
|
||||||
event,
|
|
||||||
self.hs.hostname,
|
|
||||||
self.hs.config.signing_key[0]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
else:
|
auth_chain = yield self.store.get_auth_chain(
|
||||||
raise NotImplementedError("Specify an event")
|
[pdu.event_id for pdu in pdus]
|
||||||
|
)
|
||||||
|
|
||||||
|
for event in auth_chain:
|
||||||
|
# We sign these again because there was a bug where we
|
||||||
|
# incorrectly signed things the first time round
|
||||||
|
if self.hs.is_mine_id(event.event_id):
|
||||||
|
event.signatures.update(
|
||||||
|
compute_event_signature(
|
||||||
|
event,
|
||||||
|
self.hs.hostname,
|
||||||
|
self.hs.config.signing_key[0]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise NotImplementedError("Specify an event")
|
||||||
|
|
||||||
defer.returnValue((200, {
|
defer.returnValue((200, {
|
||||||
"pdus": [pdu.get_pdu_json() for pdu in pdus],
|
"pdus": [pdu.get_pdu_json() for pdu in pdus],
|
||||||
@@ -255,15 +273,31 @@ class FederationServer(FederationBase):
|
|||||||
}))
|
}))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_event_auth(self, origin, room_id, event_id):
|
def on_make_leave_request(self, room_id, user_id):
|
||||||
|
pdu = yield self.handler.on_make_leave_request(room_id, user_id)
|
||||||
time_now = self._clock.time_msec()
|
time_now = self._clock.time_msec()
|
||||||
auth_pdus = yield self.handler.on_event_auth(event_id)
|
defer.returnValue({"event": pdu.get_pdu_json(time_now)})
|
||||||
defer.returnValue((200, {
|
|
||||||
"auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus],
|
|
||||||
}))
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_query_auth_request(self, origin, content, event_id):
|
def on_send_leave_request(self, origin, content):
|
||||||
|
logger.debug("on_send_leave_request: content: %s", content)
|
||||||
|
pdu = self.event_from_pdu_json(content)
|
||||||
|
logger.debug("on_send_leave_request: pdu sigs: %s", pdu.signatures)
|
||||||
|
yield self.handler.on_send_leave_request(origin, pdu)
|
||||||
|
defer.returnValue((200, {}))
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def on_event_auth(self, origin, room_id, event_id):
|
||||||
|
with (yield self._server_linearizer.queue((origin, room_id))):
|
||||||
|
time_now = self._clock.time_msec()
|
||||||
|
auth_pdus = yield self.handler.on_event_auth(event_id)
|
||||||
|
res = {
|
||||||
|
"auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus],
|
||||||
|
}
|
||||||
|
defer.returnValue((200, res))
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def on_query_auth_request(self, origin, content, room_id, event_id):
|
||||||
"""
|
"""
|
||||||
Content is a dict with keys::
|
Content is a dict with keys::
|
||||||
auth_chain (list): A list of events that give the auth chain.
|
auth_chain (list): A list of events that give the auth chain.
|
||||||
@@ -282,32 +316,33 @@ class FederationServer(FederationBase):
|
|||||||
Returns:
|
Returns:
|
||||||
Deferred: Results in `dict` with the same format as `content`
|
Deferred: Results in `dict` with the same format as `content`
|
||||||
"""
|
"""
|
||||||
auth_chain = [
|
with (yield self._server_linearizer.queue((origin, room_id))):
|
||||||
self.event_from_pdu_json(e)
|
auth_chain = [
|
||||||
for e in content["auth_chain"]
|
self.event_from_pdu_json(e)
|
||||||
]
|
for e in content["auth_chain"]
|
||||||
|
]
|
||||||
|
|
||||||
signed_auth = yield self._check_sigs_and_hash_and_fetch(
|
signed_auth = yield self._check_sigs_and_hash_and_fetch(
|
||||||
origin, auth_chain, outlier=True
|
origin, auth_chain, outlier=True
|
||||||
)
|
)
|
||||||
|
|
||||||
ret = yield self.handler.on_query_auth(
|
ret = yield self.handler.on_query_auth(
|
||||||
origin,
|
origin,
|
||||||
event_id,
|
event_id,
|
||||||
signed_auth,
|
signed_auth,
|
||||||
content.get("rejects", []),
|
content.get("rejects", []),
|
||||||
content.get("missing", []),
|
content.get("missing", []),
|
||||||
)
|
)
|
||||||
|
|
||||||
time_now = self._clock.time_msec()
|
time_now = self._clock.time_msec()
|
||||||
send_content = {
|
send_content = {
|
||||||
"auth_chain": [
|
"auth_chain": [
|
||||||
e.get_pdu_json(time_now)
|
e.get_pdu_json(time_now)
|
||||||
for e in ret["auth_chain"]
|
for e in ret["auth_chain"]
|
||||||
],
|
],
|
||||||
"rejects": ret.get("rejects", []),
|
"rejects": ret.get("rejects", []),
|
||||||
"missing": ret.get("missing", []),
|
"missing": ret.get("missing", []),
|
||||||
}
|
}
|
||||||
|
|
||||||
defer.returnValue(
|
defer.returnValue(
|
||||||
(200, send_content)
|
(200, send_content)
|
||||||
@@ -359,16 +394,34 @@ class FederationServer(FederationBase):
|
|||||||
@log_function
|
@log_function
|
||||||
def on_get_missing_events(self, origin, room_id, earliest_events,
|
def on_get_missing_events(self, origin, room_id, earliest_events,
|
||||||
latest_events, limit, min_depth):
|
latest_events, limit, min_depth):
|
||||||
missing_events = yield self.handler.on_get_missing_events(
|
with (yield self._server_linearizer.queue((origin, room_id))):
|
||||||
origin, room_id, earliest_events, latest_events, limit, min_depth
|
logger.info(
|
||||||
)
|
"on_get_missing_events: earliest_events: %r, latest_events: %r,"
|
||||||
|
" limit: %d, min_depth: %d",
|
||||||
|
earliest_events, latest_events, limit, min_depth
|
||||||
|
)
|
||||||
|
missing_events = yield self.handler.on_get_missing_events(
|
||||||
|
origin, room_id, earliest_events, latest_events, limit, min_depth
|
||||||
|
)
|
||||||
|
|
||||||
time_now = self._clock.time_msec()
|
if len(missing_events) < 5:
|
||||||
|
logger.info(
|
||||||
|
"Returning %d events: %r", len(missing_events), missing_events
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.info("Returning %d events", len(missing_events))
|
||||||
|
|
||||||
|
time_now = self._clock.time_msec()
|
||||||
|
|
||||||
defer.returnValue({
|
defer.returnValue({
|
||||||
"events": [ev.get_pdu_json(time_now) for ev in missing_events],
|
"events": [ev.get_pdu_json(time_now) for ev in missing_events],
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@log_function
|
||||||
|
def on_openid_userinfo(self, token):
|
||||||
|
ts_now_ms = self._clock.time_msec()
|
||||||
|
return self.store.get_user_id_for_open_id_token(token, ts_now_ms)
|
||||||
|
|
||||||
@log_function
|
@log_function
|
||||||
def _get_persisted_pdu(self, origin, event_id, do_auth=True):
|
def _get_persisted_pdu(self, origin, event_id, do_auth=True):
|
||||||
""" Get a PDU from the database with given origin and id.
|
""" Get a PDU from the database with given origin and id.
|
||||||
@@ -458,42 +511,59 @@ class FederationServer(FederationBase):
|
|||||||
pdu.internal_metadata.outlier = True
|
pdu.internal_metadata.outlier = True
|
||||||
elif min_depth and pdu.depth > min_depth:
|
elif min_depth and pdu.depth > min_depth:
|
||||||
if get_missing and prevs - seen:
|
if get_missing and prevs - seen:
|
||||||
latest = yield self.store.get_latest_event_ids_in_room(
|
# If we're missing stuff, ensure we only fetch stuff one
|
||||||
pdu.room_id
|
# at a time.
|
||||||
)
|
with (yield self._room_pdu_linearizer.queue(pdu.room_id)):
|
||||||
|
# We recalculate seen, since it may have changed.
|
||||||
|
have_seen = yield self.store.have_events(prevs)
|
||||||
|
seen = set(have_seen.keys())
|
||||||
|
|
||||||
# We add the prev events that we have seen to the latest
|
if prevs - seen:
|
||||||
# list to ensure the remote server doesn't give them to us
|
latest = yield self.store.get_latest_event_ids_in_room(
|
||||||
latest = set(latest)
|
pdu.room_id
|
||||||
latest |= seen
|
)
|
||||||
|
|
||||||
missing_events = yield self.get_missing_events(
|
# We add the prev events that we have seen to the latest
|
||||||
origin,
|
# list to ensure the remote server doesn't give them to us
|
||||||
pdu.room_id,
|
latest = set(latest)
|
||||||
earliest_events_ids=list(latest),
|
latest |= seen
|
||||||
latest_events=[pdu],
|
|
||||||
limit=10,
|
|
||||||
min_depth=min_depth,
|
|
||||||
)
|
|
||||||
|
|
||||||
# We want to sort these by depth so we process them and
|
logger.info(
|
||||||
# tell clients about them in order.
|
"Missing %d events for room %r: %r...",
|
||||||
missing_events.sort(key=lambda x: x.depth)
|
len(prevs - seen), pdu.room_id, list(prevs - seen)[:5]
|
||||||
|
)
|
||||||
|
|
||||||
for e in missing_events:
|
missing_events = yield self.get_missing_events(
|
||||||
yield self._handle_new_pdu(
|
origin,
|
||||||
origin,
|
pdu.room_id,
|
||||||
e,
|
earliest_events_ids=list(latest),
|
||||||
get_missing=False
|
latest_events=[pdu],
|
||||||
)
|
limit=10,
|
||||||
|
min_depth=min_depth,
|
||||||
|
)
|
||||||
|
|
||||||
have_seen = yield self.store.have_events(
|
# We want to sort these by depth so we process them and
|
||||||
[ev for ev, _ in pdu.prev_events]
|
# tell clients about them in order.
|
||||||
)
|
missing_events.sort(key=lambda x: x.depth)
|
||||||
|
|
||||||
|
for e in missing_events:
|
||||||
|
yield self._handle_new_pdu(
|
||||||
|
origin,
|
||||||
|
e,
|
||||||
|
get_missing=False
|
||||||
|
)
|
||||||
|
|
||||||
|
have_seen = yield self.store.have_events(
|
||||||
|
[ev for ev, _ in pdu.prev_events]
|
||||||
|
)
|
||||||
|
|
||||||
prevs = {e_id for e_id, _ in pdu.prev_events}
|
prevs = {e_id for e_id, _ in pdu.prev_events}
|
||||||
seen = set(have_seen.keys())
|
seen = set(have_seen.keys())
|
||||||
if prevs - seen:
|
if prevs - seen:
|
||||||
|
logger.info(
|
||||||
|
"Still missing %d events for room %r: %r...",
|
||||||
|
len(prevs - seen), pdu.room_id, list(prevs - seen)[:5]
|
||||||
|
)
|
||||||
fetch_state = True
|
fetch_state = True
|
||||||
|
|
||||||
if fetch_state:
|
if fetch_state:
|
||||||
@@ -513,7 +583,6 @@ class FederationServer(FederationBase):
|
|||||||
yield self.handler.on_receive_pdu(
|
yield self.handler.on_receive_pdu(
|
||||||
origin,
|
origin,
|
||||||
pdu,
|
pdu,
|
||||||
backfilled=False,
|
|
||||||
state=state,
|
state=state,
|
||||||
auth_chain=auth_chain,
|
auth_chain=auth_chain,
|
||||||
)
|
)
|
||||||
@@ -529,3 +598,26 @@ class FederationServer(FederationBase):
|
|||||||
event.internal_metadata.outlier = outlier
|
event.internal_metadata.outlier = outlier
|
||||||
|
|
||||||
return event
|
return event
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def exchange_third_party_invite(
|
||||||
|
self,
|
||||||
|
sender_user_id,
|
||||||
|
target_user_id,
|
||||||
|
room_id,
|
||||||
|
signed,
|
||||||
|
):
|
||||||
|
ret = yield self.handler.exchange_third_party_invite(
|
||||||
|
sender_user_id,
|
||||||
|
target_user_id,
|
||||||
|
room_id,
|
||||||
|
signed,
|
||||||
|
)
|
||||||
|
defer.returnValue(ret)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def on_exchange_third_party_invite_request(self, origin, room_id, event_dict):
|
||||||
|
ret = yield self.handler.on_exchange_third_party_invite_request(
|
||||||
|
origin, room_id, event_dict
|
||||||
|
)
|
||||||
|
defer.returnValue(ret)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014, 2015 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user