Compare commits
1340 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d0ebe82871 | ||
|
|
aa98e38896 | ||
|
|
e615e95590 | ||
|
|
67c1924899 | ||
|
|
086e1a8f3e | ||
|
|
3b0a85fc8e | ||
|
|
2b791865c4 | ||
|
|
77d70a7646 | ||
|
|
6cb2e2448a | ||
|
|
379376e5e6 | ||
|
|
cb53ce9d64 | ||
|
|
e5da60d75d | ||
|
|
c85e063302 | ||
|
|
95ad128851 | ||
|
|
fcbd488e9a | ||
|
|
b94a43d5b5 | ||
|
|
e5481b22aa | ||
|
|
c99b6c66bf | ||
|
|
f8fe98812b | ||
|
|
f7f487e14c | ||
|
|
edd2d82809 | ||
|
|
46f98a6a29 | ||
|
|
fc33e81323 | ||
|
|
77d3b5772f | ||
|
|
ea69a84bbb | ||
|
|
663d9db8e7 | ||
|
|
07126e43a4 | ||
|
|
9ec2186586 | ||
|
|
9532caf6ef | ||
|
|
83d9ca7122 | ||
|
|
480d98c91f | ||
|
|
ab96ee29c9 | ||
|
|
0f4fb537ce | ||
|
|
3e438bfec8 | ||
|
|
56a05583ae | ||
|
|
94a49e0636 | ||
|
|
9f72c209ee | ||
|
|
78e8d4c3a5 | ||
|
|
3ad359e5be | ||
|
|
7328039117 | ||
|
|
3904cbf307 | ||
|
|
7e07d25ed6 | ||
|
|
ef771cc4c2 | ||
|
|
b313b9b009 | ||
|
|
47a9ba435d | ||
|
|
e0b9d5f0af | ||
|
|
dacbeb2e03 | ||
|
|
810715f79a | ||
|
|
cb23aa4c42 | ||
|
|
c573794b22 | ||
|
|
e564306e31 | ||
|
|
a67d8ace9b | ||
|
|
43c3f0b02f | ||
|
|
3e704822be | ||
|
|
329d18b39c | ||
|
|
6105c6101f | ||
|
|
b3f6dddad2 | ||
|
|
5c445114d3 | ||
|
|
1fe6bbb555 | ||
|
|
047ac0cbba | ||
|
|
6340141300 | ||
|
|
abd9914683 | ||
|
|
026cd91ac8 | ||
|
|
f749607c91 | ||
|
|
e7a16c6210 | ||
|
|
c7273c11bc | ||
|
|
5110f4e425 | ||
|
|
04277d0ed8 | ||
|
|
3e8b02c939 | ||
|
|
7aea00069c | ||
|
|
911db96658 | ||
|
|
058934b1cf | ||
|
|
a6f421e812 | ||
|
|
e1728dfcbe | ||
|
|
5c3d6ea9c7 | ||
|
|
3f357583ce | ||
|
|
9c2f99a3b7 | ||
|
|
08760b0d9a | ||
|
|
b85fe45f46 | ||
|
|
e5b52d0f94 | ||
|
|
81d4f51524 | ||
|
|
593389a077 | ||
|
|
eba48c0f16 | ||
|
|
f62c597d14 | ||
|
|
cc325c7069 | ||
|
|
e404ba9aac | ||
|
|
b69216f768 | ||
|
|
6a4d01ee94 | ||
|
|
3c580c2b47 | ||
|
|
1b4bf232b9 | ||
|
|
9fafdfa97d | ||
|
|
f9d6c677ea | ||
|
|
084046456e | ||
|
|
0d31109ed5 | ||
|
|
74e7617083 | ||
|
|
1d17fc52ae | ||
|
|
a36b0ec195 | ||
|
|
6190abe8da | ||
|
|
c69026a758 | ||
|
|
2baebace6a | ||
|
|
c00f4d237b | ||
|
|
03287c350e | ||
|
|
c632bc8654 | ||
|
|
926da4dda8 | ||
|
|
e77f24d80a | ||
|
|
8c2b8d7f0b | ||
|
|
c7d0f34a3c | ||
|
|
52e3d3813b | ||
|
|
0fd2321629 | ||
|
|
f1bfe6167a | ||
|
|
4e726783ea | ||
|
|
a5aea15a6b | ||
|
|
c8f2c19991 | ||
|
|
1519572961 | ||
|
|
3a5d8d5891 | ||
|
|
f6a0a02a62 | ||
|
|
1af16acd4c | ||
|
|
df33c164de | ||
|
|
d6a7797dd1 | ||
|
|
6ec9d8ba0a | ||
|
|
c6584f4b5f | ||
|
|
80736fd8ed | ||
|
|
fc0f13dd03 | ||
|
|
10405153c2 | ||
|
|
017eb9d17a | ||
|
|
4a28d3d36f | ||
|
|
15133477ee | ||
|
|
fc954960e9 | ||
|
|
947c7443eb | ||
|
|
6bd856caa2 | ||
|
|
e238013c44 | ||
|
|
a94967bc5f | ||
|
|
b8a5b0097c | ||
|
|
24bc15eab4 | ||
|
|
4e50fe3edb | ||
|
|
f726f2dc6c | ||
|
|
03c11032c3 | ||
|
|
f9ce1b4eb0 | ||
|
|
c187638ee9 | ||
|
|
06bc8d2fe5 | ||
|
|
fb216a22db | ||
|
|
1ccafb0c5e | ||
|
|
164f8e4843 | ||
|
|
7bb651de6a | ||
|
|
e3586f7c06 | ||
|
|
a2bfb778c8 | ||
|
|
a45f2c3a00 | ||
|
|
381d2cfdf0 | ||
|
|
8c0ff0287a | ||
|
|
306361b31b | ||
|
|
bddfad253a | ||
|
|
86ef9760a7 | ||
|
|
83e72bb2f0 | ||
|
|
8ddd0f273c | ||
|
|
e97d93948d | ||
|
|
7e561b5c1a | ||
|
|
49840f5ab2 | ||
|
|
3cbe8331e6 | ||
|
|
395276b405 | ||
|
|
b8d9e108be | ||
|
|
20733857ab | ||
|
|
bdc27d6716 | ||
|
|
d34657e1f2 | ||
|
|
d3464ce708 | ||
|
|
9eb1a79100 | ||
|
|
dc045ef202 | ||
|
|
2418e7811a | ||
|
|
f4a4dbcad1 | ||
|
|
0c905ee015 | ||
|
|
6982320572 | ||
|
|
495975e231 | ||
|
|
8a1817f0d2 | ||
|
|
497444f1fd | ||
|
|
8164f6daf3 | ||
|
|
f7199e8734 | ||
|
|
ed82043efb | ||
|
|
2dadc092b8 | ||
|
|
c6dbd216e6 | ||
|
|
d86794325f | ||
|
|
92faeb2a3f | ||
|
|
dd59dfc51f | ||
|
|
e5f080d6a7 | ||
|
|
a59d899668 | ||
|
|
055fe3589e | ||
|
|
8935ec5a93 | ||
|
|
81e2813948 | ||
|
|
52e6e815be | ||
|
|
01afcfc4e9 | ||
|
|
93a8603904 | ||
|
|
69e857853f | ||
|
|
d1b7c0ca05 | ||
|
|
6e0c66f651 | ||
|
|
495a9d06bb | ||
|
|
c69faf8c4a | ||
|
|
7c570bff74 | ||
|
|
9693625e55 | ||
|
|
2a4ea3baa8 | ||
|
|
3e39783d5d | ||
|
|
ae61ade891 | ||
|
|
ed763aeba8 | ||
|
|
ccce6f26a6 | ||
|
|
da8f82008d | ||
|
|
25baf3b2ac | ||
|
|
9c834b8ee9 | ||
|
|
058a4c665e | ||
|
|
8f614f842d | ||
|
|
7258d081a5 | ||
|
|
2b8d28b095 | ||
|
|
7232917f12 | ||
|
|
0f7033fb98 | ||
|
|
eeb755d17b | ||
|
|
334e075dd8 | ||
|
|
8c41b0ca66 | ||
|
|
bc29946809 | ||
|
|
8174c6725b | ||
|
|
5908a8f6f5 | ||
|
|
abdc141c2b | ||
|
|
b5b93f45d5 | ||
|
|
6e05fd032c | ||
|
|
5fa27eac78 | ||
|
|
82f922b4af | ||
|
|
8f5c23d0cd | ||
|
|
53c5fa4e6c | ||
|
|
4f3e3ac192 | ||
|
|
8ea887856c | ||
|
|
faa462ef79 | ||
|
|
23b6a0537f | ||
|
|
5b68f29f48 | ||
|
|
8f646f2d04 | ||
|
|
9a8bbc9a59 | ||
|
|
3deaad2fb4 | ||
|
|
26557c9db4 | ||
|
|
965154d60a | ||
|
|
5304449738 | ||
|
|
19475cf337 | ||
|
|
9c8cec5dab | ||
|
|
07340cdaca | ||
|
|
f094f715cf | ||
|
|
36c62a67c4 | ||
|
|
b5c976347b | ||
|
|
e1e3e77bfd | ||
|
|
6de3884e5e | ||
|
|
a512e637ac | ||
|
|
b3064532d0 | ||
|
|
861c063ebc | ||
|
|
484a9b8c81 | ||
|
|
a873896096 | ||
|
|
dc5db01ff2 | ||
|
|
51d33d5178 | ||
|
|
74bbdd0412 | ||
|
|
8607397ea7 | ||
|
|
a3c11f7320 | ||
|
|
73a089f461 | ||
|
|
3852c2bc39 | ||
|
|
0d36fe3563 | ||
|
|
948a6d8776 | ||
|
|
333bee27f5 | ||
|
|
bd61c82bdf | ||
|
|
a215b698c4 | ||
|
|
28223841e0 | ||
|
|
ad8e137062 | ||
|
|
e3c159863d | ||
|
|
92abd3d6d6 | ||
|
|
4a15a3e4d5 | ||
|
|
ae6ad4cf41 | ||
|
|
2c695fd1aa | ||
|
|
82fa31799c | ||
|
|
2a7b3439de | ||
|
|
b4c3bc1734 | ||
|
|
219606a6ed | ||
|
|
e70b4ce069 | ||
|
|
3c37c7e45b | ||
|
|
a87d419a85 | ||
|
|
0c4a99ea2d | ||
|
|
9453c65948 | ||
|
|
ab59f3d8da | ||
|
|
607eec0456 | ||
|
|
d514608b5c | ||
|
|
28781b65e7 | ||
|
|
d4e0861ff9 | ||
|
|
a5e70b31a1 | ||
|
|
8afddf7afe | ||
|
|
bf01efb864 | ||
|
|
8834396b8a | ||
|
|
4e8276a34a | ||
|
|
5b4028fa78 | ||
|
|
7ee94fc1ba | ||
|
|
66a1d57adb | ||
|
|
c2185f14d7 | ||
|
|
8ac9fa7375 | ||
|
|
3baf6e1667 | ||
|
|
f65163627f | ||
|
|
df55a943ca | ||
|
|
e7fa032126 | ||
|
|
0649306fde | ||
|
|
a1cd37390f | ||
|
|
4c3e7eeec5 | ||
|
|
6cf261930a | ||
|
|
94f7befc31 | ||
|
|
bd469adaa9 | ||
|
|
c53336986d | ||
|
|
e4e96486a9 | ||
|
|
45a1053d44 | ||
|
|
a9d84f4e44 | ||
|
|
787d22ed6c | ||
|
|
fbe5ba25f6 | ||
|
|
5121ae97f5 | ||
|
|
fc691ca97c | ||
|
|
6b6cb32297 | ||
|
|
e37c221b97 | ||
|
|
7d3f639844 | ||
|
|
04eed80a73 | ||
|
|
829213523e | ||
|
|
e3aa2c0b75 | ||
|
|
e302f40e20 | ||
|
|
19dc676d1a | ||
|
|
5230bc1471 | ||
|
|
fdd1a62e8d | ||
|
|
79eded1ae4 | ||
|
|
d3f80cbc9c | ||
|
|
8601c24287 | ||
|
|
ad53a5497d | ||
|
|
a2ddaa90f2 | ||
|
|
94ae1dea3c | ||
|
|
168491c412 | ||
|
|
9ea408441f | ||
|
|
3af7a95a9d | ||
|
|
8dfb33d325 | ||
|
|
13f6f1624b | ||
|
|
b28a7ed503 | ||
|
|
23b53b4ef8 | ||
|
|
6bb9a4b1d6 | ||
|
|
703de4ec13 | ||
|
|
1f3f5fcf52 | ||
|
|
3fd68d533b | ||
|
|
741571cf22 | ||
|
|
aeca5a5ed5 | ||
|
|
642199570c | ||
|
|
ce846bb620 | ||
|
|
1a24d4effa | ||
|
|
bbab6ebfd9 | ||
|
|
392a54128c | ||
|
|
83ee5592a5 | ||
|
|
b9158ac2bf | ||
|
|
cb016baa37 | ||
|
|
80d2d50f47 | ||
|
|
9407bcf37a | ||
|
|
6c48aa0256 | ||
|
|
6bbe3d5732 | ||
|
|
a334e1cace | ||
|
|
05b9937cd7 | ||
|
|
47c02e6332 | ||
|
|
3f0d8e6b09 | ||
|
|
3d6b24fb1b | ||
|
|
f773ecbd61 | ||
|
|
13b31a9baf | ||
|
|
35aec19f0a | ||
|
|
38ead946a9 | ||
|
|
a219ce8726 | ||
|
|
31c15dcb80 | ||
|
|
c600886d47 | ||
|
|
edabc18938 | ||
|
|
b3097396e7 | ||
|
|
8565d9a9fe | ||
|
|
550007cb0e | ||
|
|
286d6930b7 | ||
|
|
892432c818 | ||
|
|
1e09a1d48a | ||
|
|
ad95ec12ca | ||
|
|
1f4296efcf | ||
|
|
505abb38f0 | ||
|
|
4ecdf73fc7 | ||
|
|
2b41f2ed76 | ||
|
|
5baa087312 | ||
|
|
b58714789f | ||
|
|
06f2dbbb5d | ||
|
|
ac80cb08fe | ||
|
|
9a1cceeca9 | ||
|
|
c7131baefc | ||
|
|
f75b9961c6 | ||
|
|
2b39494cd5 | ||
|
|
0cb7afff35 | ||
|
|
f00a9d2636 | ||
|
|
c9c50284d7 | ||
|
|
1e70f1dbab | ||
|
|
b56ef14629 | ||
|
|
fe88907d04 | ||
|
|
c1ae6b1bce | ||
|
|
85a43f4167 | ||
|
|
c6363f7269 | ||
|
|
2a8996b67d | ||
|
|
e7b3b4d8c2 | ||
|
|
0e46ff6904 | ||
|
|
da864a92c9 | ||
|
|
f472abd792 | ||
|
|
9c749a6b61 | ||
|
|
c71b93f2a4 | ||
|
|
d42d79e3c3 | ||
|
|
9d13ff4da8 | ||
|
|
c8642720c9 | ||
|
|
24efb2a70d | ||
|
|
c30cfff572 | ||
|
|
335b23a078 | ||
|
|
fcfe7a850d | ||
|
|
024be6cf18 | ||
|
|
3e6e94fe9f | ||
|
|
8b3652831c | ||
|
|
bc9af88a2d | ||
|
|
90f8e606e2 | ||
|
|
d0f6c1ce21 | ||
|
|
9e2f9a7b57 | ||
|
|
941ac0f085 | ||
|
|
f6e82dcddb | ||
|
|
0a81038ea0 | ||
|
|
ad9198cc34 | ||
|
|
bef1d6f3bf | ||
|
|
7de1989ea2 | ||
|
|
984db8bb08 | ||
|
|
c971aa7b9d | ||
|
|
8f08d848f5 | ||
|
|
f1a7264663 | ||
|
|
7c33ab76da | ||
|
|
63755fa4c2 | ||
|
|
73884ebac5 | ||
|
|
7c27c4d51c | ||
|
|
1c3f4d9ca5 | ||
|
|
bc74925c5b | ||
|
|
6c0f8d9d50 | ||
|
|
ed5331a627 | ||
|
|
cb64fe2cb7 | ||
|
|
13193a6e2b | ||
|
|
3126b88d35 | ||
|
|
89a76d1889 | ||
|
|
bfa0b759e0 | ||
|
|
9cbd0094f0 | ||
|
|
9dbe38ea7d | ||
|
|
93139a1fb8 | ||
|
|
e7cd7cb0f0 | ||
|
|
c857f5ef9b | ||
|
|
b7d2fb5eb9 | ||
|
|
f30a303590 | ||
|
|
2ac1abbc7e | ||
|
|
fa0d464fa4 | ||
|
|
0403cf0783 | ||
|
|
0e200e366d | ||
|
|
11bfc2af1c | ||
|
|
3db016b641 | ||
|
|
8decd6233d | ||
|
|
8c5b84441b | ||
|
|
54f8616d2c | ||
|
|
65cd8ccc79 | ||
|
|
7ca097f77e | ||
|
|
5cea4e16c7 | ||
|
|
0ddf486724 | ||
|
|
546aee7e52 | ||
|
|
33716c4aea | ||
|
|
bc635026c5 | ||
|
|
02aa41809b | ||
|
|
8fd93b5eea | ||
|
|
4073f73edc | ||
|
|
649c647955 | ||
|
|
4084a774a8 | ||
|
|
b041115415 | ||
|
|
9a68778ac2 | ||
|
|
9e05c8d309 | ||
|
|
037a06e8f0 | ||
|
|
af10fa6536 | ||
|
|
e957428a15 | ||
|
|
e586916cda | ||
|
|
3572a206d3 | ||
|
|
7bc22539ff | ||
|
|
b7e7712f07 | ||
|
|
1e4c7fff5f | ||
|
|
9a5ea511b5 | ||
|
|
e33a538af3 | ||
|
|
edda9f5cac | ||
|
|
b8ad756bd0 | ||
|
|
771d213ac5 | ||
|
|
b60749a1ec | ||
|
|
6febd8e8f7 | ||
|
|
cd7ef43872 | ||
|
|
806964b5de | ||
|
|
52ec6e9dfa | ||
|
|
c5440b2ca0 | ||
|
|
84a750e0c3 | ||
|
|
7298efd361 | ||
|
|
f60c9e2a01 | ||
|
|
7baf66ef5d | ||
|
|
3801b8aa03 | ||
|
|
654324eded | ||
|
|
1d371fc5b3 | ||
|
|
b07a2cbee9 | ||
|
|
70fd75cd1d | ||
|
|
5d848992bf | ||
|
|
3b4223aa23 | ||
|
|
28f5bfdcf7 | ||
|
|
ee7c8bd2b5 | ||
|
|
6707a3212c | ||
|
|
135f3b4390 | ||
|
|
2608ebc04c | ||
|
|
599f65bb89 | ||
|
|
417e7077aa | ||
|
|
d64b24dfe6 | ||
|
|
4f8baab0c4 | ||
|
|
b3c2ebba32 | ||
|
|
625542878d | ||
|
|
2fd17b5ad1 | ||
|
|
10587f7f32 | ||
|
|
80189ed27c | ||
|
|
0cd7b209e2 | ||
|
|
78d1042c10 | ||
|
|
af3125226d | ||
|
|
9c8cd855da | ||
|
|
92657be7d0 | ||
|
|
61b05727fa | ||
|
|
dfba1d843d | ||
|
|
2254790ae4 | ||
|
|
7419764351 | ||
|
|
2d2828dcbc | ||
|
|
c127c8d042 | ||
|
|
804dd41e18 | ||
|
|
5f02017aea | ||
|
|
c91bd295f5 | ||
|
|
87c18d12ee | ||
|
|
a6cf7d9d9a | ||
|
|
7e9ced4178 | ||
|
|
70fc599ede | ||
|
|
bae37cd811 | ||
|
|
c42f7fd7b9 | ||
|
|
3e242dc149 | ||
|
|
87b111f96a | ||
|
|
b13836da7f | ||
|
|
77055dba92 | ||
|
|
567363e497 | ||
|
|
06ee2b7cc5 | ||
|
|
30cf40ff30 | ||
|
|
905f8de673 | ||
|
|
4fc4b881c5 | ||
|
|
81942c109d | ||
|
|
a395f1ddb3 | ||
|
|
99178f8602 | ||
|
|
301cb60d0b | ||
|
|
0b01281e77 | ||
|
|
e8e540630e | ||
|
|
a796bdd35e | ||
|
|
09f3cf1a7e | ||
|
|
3d6aa06577 | ||
|
|
ea068d6f3c | ||
|
|
14e4d4f4bf | ||
|
|
475253a88e | ||
|
|
7f0399586d | ||
|
|
8c0c51ecb3 | ||
|
|
79a8a347a6 | ||
|
|
82276a18d1 | ||
|
|
b1580f50fe | ||
|
|
414fa36f3e | ||
|
|
32eb1dedd2 | ||
|
|
71990b3cae | ||
|
|
0b07f02e19 | ||
|
|
16a31c6fce | ||
|
|
9fbaed325f | ||
|
|
9db2476991 | ||
|
|
69f7f1418f | ||
|
|
48b04c4a4c | ||
|
|
08abe8e13c | ||
|
|
05077e06fa | ||
|
|
83caead95a | ||
|
|
01a5a8b9e3 | ||
|
|
897d976c1e | ||
|
|
45fc0ead10 | ||
|
|
cdd24449ee | ||
|
|
14d49c51db | ||
|
|
84b4e76fed | ||
|
|
c780d84d66 | ||
|
|
1d67b13674 | ||
|
|
92d50e3c2a | ||
|
|
e94cdbaecf | ||
|
|
9b1bc593c5 | ||
|
|
7f147d623b | ||
|
|
15e8dd2ccc | ||
|
|
05fe8a6c1c | ||
|
|
f584d6108f | ||
|
|
28d7b546cb | ||
|
|
f2cbbda956 | ||
|
|
cd77270a66 | ||
|
|
242a0483eb | ||
|
|
60cf1c6e16 | ||
|
|
7e6e588e60 | ||
|
|
1ca2744621 | ||
|
|
dddb5aa7bb | ||
|
|
4eb8408ed2 | ||
|
|
c5842dff1a | ||
|
|
7a0da69eee | ||
|
|
a5806aba27 | ||
|
|
fd2dbf1836 | ||
|
|
9643a6f7f2 | ||
|
|
5c261107c9 | ||
|
|
c7181dcc6c | ||
|
|
74854a9719 | ||
|
|
48fec67536 | ||
|
|
3504982cb7 | ||
|
|
4e5a4549b6 | ||
|
|
9b7d9d8ba0 | ||
|
|
db10f553ba | ||
|
|
764030cf63 | ||
|
|
8432e2ebd7 | ||
|
|
a81f140880 | ||
|
|
47b25ba5f3 | ||
|
|
3bf8bab8f9 | ||
|
|
a4cf660a32 | ||
|
|
0d568ff403 | ||
|
|
d7585a4c83 | ||
|
|
365f588d98 | ||
|
|
eb7be75a10 | ||
|
|
dd0ac1614c | ||
|
|
bb81e78ec6 | ||
|
|
afb4b490a4 | ||
|
|
f7bf181a90 | ||
|
|
f7baff6f7b | ||
|
|
a52f276990 | ||
|
|
42a394caa2 | ||
|
|
46c832eaac | ||
|
|
2b1a4b2596 | ||
|
|
cd6937fb26 | ||
|
|
8550a7e9c2 | ||
|
|
c2c153dd3b | ||
|
|
79d3b4689e | ||
|
|
808d8e06aa | ||
|
|
3f6762f0bb | ||
|
|
3b5b64ac99 | ||
|
|
23d7e63a4a | ||
|
|
8bd585b09b | ||
|
|
012d612f9d | ||
|
|
f89f6b7c09 | ||
|
|
be6527325a | ||
|
|
55e6bdf287 | ||
|
|
e2c0aa2c26 | ||
|
|
b01a755498 | ||
|
|
1058d14127 | ||
|
|
80bf7d3580 | ||
|
|
9a2f960736 | ||
|
|
324525f40c | ||
|
|
4d664278af | ||
|
|
8dee601054 | ||
|
|
e21c368b8b | ||
|
|
cf6f9a8b53 | ||
|
|
48a910e128 | ||
|
|
f2a48d87df | ||
|
|
2aa7cc6a46 | ||
|
|
e07970165f | ||
|
|
c5171bf171 | ||
|
|
ba1fbf7d5b | ||
|
|
3cef867cc1 | ||
|
|
c144252a8c | ||
|
|
c334ca67bb | ||
|
|
04f5d2db62 | ||
|
|
ab822a2d1f | ||
|
|
91cdb6de08 | ||
|
|
d49b77404b | ||
|
|
63260397c6 | ||
|
|
3f8709ffe4 | ||
|
|
3ee57bdcbb | ||
|
|
4c22b4047b | ||
|
|
782689bd40 | ||
|
|
ca87ad1def | ||
|
|
b5f638f1f4 | ||
|
|
9fd161c6fb | ||
|
|
0195dfbf52 | ||
|
|
69c49d3fa3 | ||
|
|
a2d872e7b3 | ||
|
|
fa27073b14 | ||
|
|
38f708a2bb | ||
|
|
73737bd0f9 | ||
|
|
3b2dcfff78 | ||
|
|
521d369e7a | ||
|
|
b99a0f3941 | ||
|
|
a8ffc27db7 | ||
|
|
4c9da1440f | ||
|
|
d9efd87d55 | ||
|
|
0e8d78f6aa | ||
|
|
66f7dc8c87 | ||
|
|
7e51342196 | ||
|
|
bcfeb44afe | ||
|
|
372bf073c1 | ||
|
|
7edd11623d | ||
|
|
13ad9930c8 | ||
|
|
51b17ec566 | ||
|
|
3c1080b6e4 | ||
|
|
eff3ae3b9a | ||
|
|
b4d6db5c4a | ||
|
|
aae86a81ef | ||
|
|
7d5b1a60a3 | ||
|
|
bfb6c58624 | ||
|
|
a675f9c556 | ||
|
|
c151b32b1d | ||
|
|
762a758fea | ||
|
|
25d2b5d55f | ||
|
|
df1e4f259f | ||
|
|
c055c91655 | ||
|
|
3f543dc021 | ||
|
|
859989f958 | ||
|
|
8cfad2e686 | ||
|
|
81d727efa9 | ||
|
|
da7fe43899 | ||
|
|
e2e846628d | ||
|
|
2f78f432c4 | ||
|
|
fc5d937550 | ||
|
|
86a00e05e1 | ||
|
|
d82fa0e9a6 | ||
|
|
a87af25fbb | ||
|
|
eabc5f8271 | ||
|
|
c24fc9797b | ||
|
|
afcd655ab6 | ||
|
|
dc56c47dc0 | ||
|
|
4601129c44 | ||
|
|
ef184caf30 | ||
|
|
488ffe6fdb | ||
|
|
773db62a22 | ||
|
|
39176f27f7 | ||
|
|
2de813a611 | ||
|
|
eaaa2248ff | ||
|
|
87a824bad1 | ||
|
|
c4eb97518f | ||
|
|
55afba0fc5 | ||
|
|
75c663c7b9 | ||
|
|
1c5e690a6b | ||
|
|
fef2e65d12 | ||
|
|
596ce63576 | ||
|
|
b8429c7c81 | ||
|
|
ab035bdeac | ||
|
|
19b433e3f4 | ||
|
|
b586b8b986 | ||
|
|
70e48cbbb1 | ||
|
|
aa3220df6a | ||
|
|
7277216d01 | ||
|
|
cd0c749c4f | ||
|
|
9ecbaf8ba8 | ||
|
|
1522ed9c07 | ||
|
|
629f390035 | ||
|
|
e5962f845c | ||
|
|
e7d091fb86 | ||
|
|
414d54b61a | ||
|
|
2545993ce4 | ||
|
|
06b331ff40 | ||
|
|
8f9a7eb58d | ||
|
|
c74c71128d | ||
|
|
ed4bc3d2fc | ||
|
|
bd92c8eaa7 | ||
|
|
99ebaed8e6 | ||
|
|
9b5bf3d858 | ||
|
|
e25d87d97b | ||
|
|
e2c9fe0a6a | ||
|
|
614e6d517d | ||
|
|
0bdc362598 | ||
|
|
591bf87c6a | ||
|
|
bdfbd934d6 | ||
|
|
9b75c78b4d | ||
|
|
8f0c430ca4 | ||
|
|
63417c31e9 | ||
|
|
1f24d8681b | ||
|
|
f4b49152e2 | ||
|
|
b6d55588e1 | ||
|
|
8ba2dac6ea | ||
|
|
50bcaf1a27 | ||
|
|
cd32c19a60 | ||
|
|
34f51babc5 | ||
|
|
8226e5597a | ||
|
|
ce7de9ae6b | ||
|
|
0d43f991a1 | ||
|
|
99dd975dae | ||
|
|
4f7064f6b5 | ||
|
|
f0cede5556 | ||
|
|
54ac18e832 | ||
|
|
66a4ca1d28 | ||
|
|
72788cf9c1 | ||
|
|
edc427a351 | ||
|
|
fe87890b18 | ||
|
|
f6a3067868 | ||
|
|
15d513f16f | ||
|
|
174be586e5 | ||
|
|
b5eee511c7 | ||
|
|
93d174bcc4 | ||
|
|
5e42c45c96 | ||
|
|
982edca380 | ||
|
|
234611f347 | ||
|
|
14b3da63a3 | ||
|
|
9f0791b7bd | ||
|
|
9f500cb39e | ||
|
|
8d14598e90 | ||
|
|
ca0b052307 | ||
|
|
cac0253799 | ||
|
|
0abb205b47 | ||
|
|
69e51c7ba4 | ||
|
|
8ae64b270f | ||
|
|
cf1e2000f6 | ||
|
|
6b8c07abc2 | ||
|
|
0bc4627a73 | ||
|
|
53ace904b2 | ||
|
|
ac205a54b2 | ||
|
|
31fa743567 | ||
|
|
c79c4c0a7d | ||
|
|
6c6aba76e1 | ||
|
|
a001038b92 | ||
|
|
807449d8f2 | ||
|
|
c31793a784 | ||
|
|
c08f9d95b2 | ||
|
|
dd16e7dfcc | ||
|
|
8b3d9b6b19 | ||
|
|
b37c472419 | ||
|
|
c75b71a397 | ||
|
|
3c0213a217 | ||
|
|
178ab76ac0 | ||
|
|
638d35ef08 | ||
|
|
01021c812f | ||
|
|
2e9c73e8ca | ||
|
|
64899341dc | ||
|
|
885ea9c602 | ||
|
|
c1f9dec92a | ||
|
|
04df714259 | ||
|
|
09cf130898 | ||
|
|
5075e444f4 | ||
|
|
3e19beb941 | ||
|
|
bb99b1f550 | ||
|
|
ce6db0e547 | ||
|
|
152c0aa58e | ||
|
|
119451dcd1 | ||
|
|
c6b28fb479 | ||
|
|
d967653705 | ||
|
|
69ce057ea6 | ||
|
|
3dce9050cf | ||
|
|
0ad98e38d0 | ||
|
|
a5ef110749 | ||
|
|
a6c813761a | ||
|
|
54a9bea88c | ||
|
|
5c6226707d | ||
|
|
8876ce7f77 | ||
|
|
b179537f2a | ||
|
|
72d1902bbe | ||
|
|
984376745b | ||
|
|
67dbe4c899 | ||
|
|
dafe90a6be | ||
|
|
3d6f9fba19 | ||
|
|
484a0ebdfc | ||
|
|
5785b93711 | ||
|
|
bf7598f582 | ||
|
|
2bdafaf3c1 | ||
|
|
62564797f5 | ||
|
|
2511f3f8a0 | ||
|
|
bb89c84614 | ||
|
|
d5c0ce4cad | ||
|
|
e92fb00f32 | ||
|
|
839a317c96 | ||
|
|
5298d79fb5 | ||
|
|
8521ae13e3 | ||
|
|
d2f3ef98ac | ||
|
|
54685d294d | ||
|
|
cc187debf3 | ||
|
|
2b5baebeba | ||
|
|
be59910b93 | ||
|
|
990fe9fc23 | ||
|
|
312ae74746 | ||
|
|
d92675a486 | ||
|
|
360ba89c50 | ||
|
|
7f3d897e7a | ||
|
|
bebe325e6c | ||
|
|
5011417632 | ||
|
|
e6d73b8582 | ||
|
|
bab94da79c | ||
|
|
53bca4690b | ||
|
|
ef3589063a | ||
|
|
e8eba2b4e3 | ||
|
|
e5d2c67844 | ||
|
|
3523f5432a | ||
|
|
9b92720d88 | ||
|
|
23cd86554e | ||
|
|
848431be1d | ||
|
|
cf8ef11f35 | ||
|
|
3b9662339b | ||
|
|
f81f421086 | ||
|
|
cd9765805e | ||
|
|
495cb100d1 | ||
|
|
865d07cd38 | ||
|
|
ca9bc1f4fe | ||
|
|
a74b25faaa | ||
|
|
fbe255f9a4 | ||
|
|
7daa8a78c5 | ||
|
|
89834d9a29 | ||
|
|
e54794f5b6 | ||
|
|
7bcf126b18 | ||
|
|
1911c037cb | ||
|
|
33bd07d062 | ||
|
|
16d78be315 | ||
|
|
7f3f108561 | ||
|
|
19a17068f1 | ||
|
|
96a9a29645 | ||
|
|
62ace05c45 | ||
|
|
1e2bed9656 | ||
|
|
a3f5bf79a0 | ||
|
|
e26dbd82ef | ||
|
|
051a99c400 | ||
|
|
f900d50824 | ||
|
|
42c6823827 | ||
|
|
e40a510fbf | ||
|
|
d08296f9f2 | ||
|
|
886be75ad1 | ||
|
|
16d9701892 | ||
|
|
e10830e976 | ||
|
|
3777fa26aa | ||
|
|
0d63d93ca8 | ||
|
|
0ca459ea33 | ||
|
|
15c1ae45e5 | ||
|
|
5593ff6773 | ||
|
|
b2aab04d2c | ||
|
|
da90337d89 | ||
|
|
950807d93a | ||
|
|
df7f9871c1 | ||
|
|
897c51d274 | ||
|
|
cb298ff623 | ||
|
|
42960aa047 | ||
|
|
c3f596180f | ||
|
|
637b11b9ed | ||
|
|
feacd13932 | ||
|
|
c0affa7b4f | ||
|
|
5e2f7b8084 | ||
|
|
4ecb4bdac9 | ||
|
|
8a86d7ea96 | ||
|
|
70baa61e95 | ||
|
|
4c4a123bd0 | ||
|
|
489949879e | ||
|
|
1758f4e1c7 | ||
|
|
26a37f3d4d | ||
|
|
0d25724419 | ||
|
|
1fa98495d0 | ||
|
|
bdae8f2e68 | ||
|
|
74b1d46ad9 | ||
|
|
9180061b49 | ||
|
|
704c3e6239 | ||
|
|
43ecfe0b10 | ||
|
|
c2a83349f0 | ||
|
|
db1f33fb36 | ||
|
|
14a4e7d5a4 | ||
|
|
50d9d97408 | ||
|
|
8cefc690c9 | ||
|
|
0bf5ec0db7 | ||
|
|
a937497cf5 | ||
|
|
a013404292 | ||
|
|
14fa9d4d92 | ||
|
|
c4ffbecb68 | ||
|
|
0a65450d04 | ||
|
|
00f99f74b1 | ||
|
|
4a6725d9d1 | ||
|
|
165e067033 | ||
|
|
40c1c59cf4 | ||
|
|
08281fe6b7 | ||
|
|
c21d82bab3 | ||
|
|
ec716a35b2 | ||
|
|
d766f26de9 | ||
|
|
085435e13a | ||
|
|
b8d7d3996b | ||
|
|
908be65e64 | ||
|
|
b7f203a566 | ||
|
|
7ff44d9215 | ||
|
|
38b98e5a98 | ||
|
|
01e93f48ed | ||
|
|
018d75a148 | ||
|
|
fa7dc889f1 | ||
|
|
c82ccd3027 | ||
|
|
da7785147d | ||
|
|
c480c4c962 | ||
|
|
6eed16d8a2 | ||
|
|
303f1c851f | ||
|
|
a6d7b74915 | ||
|
|
4b256b9271 | ||
|
|
4e5ac901dd | ||
|
|
f9f5559971 | ||
|
|
4e6e00152c | ||
|
|
0aba3d361a | ||
|
|
2c54f1c225 | ||
|
|
c4842e16cb | ||
|
|
6e63d6868c | ||
|
|
f49147d14f | ||
|
|
cab782c17e | ||
|
|
6023cdd227 | ||
|
|
7931393495 | ||
|
|
c507fa15ce | ||
|
|
37be52ac34 | ||
|
|
70af98e361 | ||
|
|
5e2ee64660 | ||
|
|
1841672c85 | ||
|
|
6ef983ce5c | ||
|
|
bdbdceeafa | ||
|
|
16bd63f32f | ||
|
|
443da003bc | ||
|
|
729b672823 | ||
|
|
d81602b75a | ||
|
|
5de936caa1 | ||
|
|
5bb39b1e0c | ||
|
|
df2235e7fa | ||
|
|
0bc9b9e397 | ||
|
|
7d05406a07 | ||
|
|
82977477e3 | ||
|
|
9c14c2b561 | ||
|
|
6aab397ada | ||
|
|
52384f2ee5 | ||
|
|
e908b86832 | ||
|
|
254e8267e2 | ||
|
|
21276ff846 | ||
|
|
fef7e58ac6 | ||
|
|
cefac79c10 | ||
|
|
9b13817e06 | ||
|
|
251e6c1210 | ||
|
|
143f1a2532 | ||
|
|
2903e65aff | ||
|
|
a8cbce0ced | ||
|
|
e9b2d047f6 | ||
|
|
ad7cd95a78 | ||
|
|
f102c05856 | ||
|
|
8e3f75b39a | ||
|
|
b0b5566f36 | ||
|
|
781c2bd21a | ||
|
|
7041cd872b | ||
|
|
d42455cdc9 | ||
|
|
65c8dee900 | ||
|
|
a75231b507 | ||
|
|
9e68b1bd2d | ||
|
|
49254d43a6 | ||
|
|
7d32f0d745 | ||
|
|
ef9d51b081 | ||
|
|
85531a06a2 | ||
|
|
51d7df1915 | ||
|
|
5c1d301fd9 | ||
|
|
cf78eaebad | ||
|
|
bd4b25f4d0 | ||
|
|
1b4d73fa52 | ||
|
|
a15ed52267 | ||
|
|
21e878ebb6 | ||
|
|
03751a6420 | ||
|
|
1bcd0490c2 | ||
|
|
bc7944e6d2 | ||
|
|
a4fe9d2d36 | ||
|
|
6185650f9c | ||
|
|
d8e65ed7e1 | ||
|
|
2565804030 | ||
|
|
0620d27f4d | ||
|
|
0a7ee0ab8b | ||
|
|
7d9fb88617 | ||
|
|
78a691d005 | ||
|
|
3849f7f69f | ||
|
|
cee1ae1b72 | ||
|
|
32b30e15f2 | ||
|
|
4081bd1560 | ||
|
|
1bfb5bed1d | ||
|
|
7780a7b47c | ||
|
|
1be94440d3 | ||
|
|
07defd5fe6 | ||
|
|
9c237a7ab5 | ||
|
|
55acd6856c | ||
|
|
f59be4eb0e | ||
|
|
a297ff2b16 | ||
|
|
3f11d84534 | ||
|
|
371da42ae4 | ||
|
|
0b300d323a | ||
|
|
ec56121b0d | ||
|
|
cb5c37a57c | ||
|
|
38eaa5280d | ||
|
|
1e5dbdcbb1 | ||
|
|
1674a85238 | ||
|
|
87951d3891 | ||
|
|
f14c866e37 | ||
|
|
8b8c4f34a3 | ||
|
|
3188973857 | ||
|
|
60a1d147a7 | ||
|
|
709c309b0e | ||
|
|
8f65ab98d2 | ||
|
|
ed0dd68731 | ||
|
|
f33c596533 | ||
|
|
811ac73a42 | ||
|
|
a79410e7b8 | ||
|
|
f7b133fc26 | ||
|
|
81946db9cf | ||
|
|
a321f78991 | ||
|
|
93b0722c50 | ||
|
|
454f59b7ad | ||
|
|
1a01a5b964 | ||
|
|
223341205e | ||
|
|
e22700c3dd | ||
|
|
7417951117 | ||
|
|
eb1d911ab7 | ||
|
|
0a8e4f3af9 | ||
|
|
d19fba3655 | ||
|
|
cd241d6bda | ||
|
|
30bfed5aa5 | ||
|
|
97acd385a3 | ||
|
|
0fa73e4a63 | ||
|
|
2581eb3e1d | ||
|
|
69292de6cc | ||
|
|
ff5426f6b8 | ||
|
|
a678145010 | ||
|
|
d436ad332c | ||
|
|
2601ee28bd | ||
|
|
536bc63a4e | ||
|
|
cf2d15c6a9 | ||
|
|
c6e66821a9 | ||
|
|
8dff6e0322 | ||
|
|
30957a941a | ||
|
|
69fb5dbdab | ||
|
|
1938cffaea | ||
|
|
efcdacad7d | ||
|
|
004a83b43a | ||
|
|
d8709df739 | ||
|
|
ce0c18dec5 | ||
|
|
c1f80effbe | ||
|
|
adfe29ec0b | ||
|
|
254fb430d1 | ||
|
|
cc99256e90 | ||
|
|
5c705f70c9 | ||
|
|
f559119de0 | ||
|
|
8b9f164fff | ||
|
|
2d5bba151b | ||
|
|
50c60e5fad | ||
|
|
4f5cc8e4e7 | ||
|
|
dae6dc1e77 | ||
|
|
a646bdc670 | ||
|
|
9f41ad491d | ||
|
|
0faa3223cd | ||
|
|
37e87611bc | ||
|
|
a4d24781bf | ||
|
|
999bcf9d01 | ||
|
|
9c294ea864 | ||
|
|
4797ed000e | ||
|
|
726a0b1e64 | ||
|
|
f0a1b8e4cd | ||
|
|
8fbe418777 | ||
|
|
0b0b24cb82 | ||
|
|
4fc52b1037 | ||
|
|
f3182bb1d0 | ||
|
|
027bc01a1b | ||
|
|
e42510ba63 | ||
|
|
440b8845b5 | ||
|
|
842cdece42 | ||
|
|
959f4b9074 | ||
|
|
acbfdc3442 | ||
|
|
354a99c968 | ||
|
|
9b34f3ea3a | ||
|
|
c1bf2b587e | ||
|
|
3132b89f12 | ||
|
|
5c88bb722f | ||
|
|
0ecf68aedc | ||
|
|
ff48ab8527 | ||
|
|
5c30cb709a | ||
|
|
3d6df84658 | ||
|
|
4f67623674 | ||
|
|
e1a237eaab | ||
|
|
683f4058c1 | ||
|
|
7c712f95bb | ||
|
|
8462c26485 | ||
|
|
d7275eecf3 | ||
|
|
650daf5628 | ||
|
|
1fa4f7e03e | ||
|
|
2f558300cc | ||
|
|
bcaec2915a | ||
|
|
924eb34d94 | ||
|
|
7044af3298 | ||
|
|
5f3658baf5 | ||
|
|
fd9b08873c | ||
|
|
11d592290c | ||
|
|
87086ac69d | ||
|
|
7814e4cc86 | ||
|
|
6284f579bf | ||
|
|
7cf76c9a09 | ||
|
|
37af0d2a13 | ||
|
|
252f80094c | ||
|
|
9a8acdc720 | ||
|
|
d69decd5c7 | ||
|
|
38f53399a2 | ||
|
|
13d501c773 | ||
|
|
ce0545eca1 | ||
|
|
95ccb6e2ec | ||
|
|
ba6477feac | ||
|
|
be3adfc331 | ||
|
|
9e40834f74 | ||
|
|
f1a15ea206 | ||
|
|
1ffb7bec20 | ||
|
|
2de3d994f3 | ||
|
|
c754e006f4 | ||
|
|
a97c845271 | ||
|
|
c0685f67c0 | ||
|
|
18a2b2c0b4 | ||
|
|
00bc979137 | ||
|
|
f1dd89fe86 | ||
|
|
6f62a6ef21 | ||
|
|
77091d7e8e | ||
|
|
eed24893fa | ||
|
|
3f9e649f17 | ||
|
|
8c69b735e3 | ||
|
|
08436c556a | ||
|
|
667fba68f3 | ||
|
|
3a993a660d | ||
|
|
9b596177ae | ||
|
|
bacdf0cbf9 | ||
|
|
8cb8df55e9 | ||
|
|
65d6a0e477 | ||
|
|
5bd0a47fcd | ||
|
|
00845c49d2 | ||
|
|
e45a46b6e4 | ||
|
|
dab00faa83 | ||
|
|
c91a44572e | ||
|
|
92aecd557b | ||
|
|
6e3fc657b4 | ||
|
|
21d3b87943 | ||
|
|
5f3d02f6eb | ||
|
|
0aed3fc346 | ||
|
|
79eb339c66 | ||
|
|
4a11df5b64 | ||
|
|
d897be6a98 | ||
|
|
9c04b4abf9 | ||
|
|
94440ae994 | ||
|
|
bc006b3c9d | ||
|
|
c7320a5564 | ||
|
|
2172a3d8cb | ||
|
|
b2aa05a8d6 | ||
|
|
850238b4ef | ||
|
|
9952d18e4d | ||
|
|
547b1355d3 | ||
|
|
fe089b13cb | ||
|
|
3fe0938b76 | ||
|
|
fe10dd9fb2 | ||
|
|
9677b1d1c0 | ||
|
|
2731bf7ac3 | ||
|
|
09e29fb58b | ||
|
|
15b13b537f | ||
|
|
78a9ddcf9a | ||
|
|
ea69d35651 | ||
|
|
4a27000548 | ||
|
|
505530f36a | ||
|
|
8a4f05fefb | ||
|
|
8532953c04 | ||
|
|
a2374b2c7f | ||
|
|
33b60c01b5 | ||
|
|
516f960ad8 | ||
|
|
3366b9c534 | ||
|
|
32fd6910d0 | ||
|
|
bc832f822f | ||
|
|
2aba1f549c | ||
|
|
08546be40f | ||
|
|
b03a0e14db | ||
|
|
3b391d9c45 | ||
|
|
33b40d0a25 | ||
|
|
5f263b607e | ||
|
|
77b692e65d | ||
|
|
ba22b6a456 | ||
|
|
6dff49b8a9 | ||
|
|
37c4fba0ac | ||
|
|
38c5fa7ee4 | ||
|
|
12ec58301f | ||
|
|
fa5c2bc082 | ||
|
|
4c4dd6299d | ||
|
|
c969754a91 | ||
|
|
482d17b58b | ||
|
|
0456e05977 | ||
|
|
aff1dfdf3d | ||
|
|
5797f5542b | ||
|
|
1b5425527c | ||
|
|
36f4fd3e1e | ||
|
|
2ef3f84945 | ||
|
|
129ffd7b88 | ||
|
|
85354bb18e | ||
|
|
6ccefef07a | ||
|
|
ea752bdd99 | ||
|
|
bb3d536087 | ||
|
|
05f5dabc10 | ||
|
|
c3c29aa196 | ||
|
|
55370331da | ||
|
|
16b10666e7 | ||
|
|
4ea391a6ae | ||
|
|
b1fe697b3c | ||
|
|
6c1ec5a1bd | ||
|
|
f3b3b9dd8f | ||
|
|
e31e5dee38 | ||
|
|
395fa8d1fd | ||
|
|
b5e157d895 | ||
|
|
09477bd884 | ||
|
|
49af402019 | ||
|
|
2ee9f1bd1a | ||
|
|
1241156c82 | ||
|
|
3060bcc8e9 | ||
|
|
e845fd41c2 | ||
|
|
20ff89d6e1 | ||
|
|
1cfc2c4790 | ||
|
|
2087d5d046 | ||
|
|
1464a0578a | ||
|
|
c4e7ad0e0f | ||
|
|
a4ab491371 | ||
|
|
ff65916108 | ||
|
|
95341a8f6f | ||
|
|
26651b0d6a | ||
|
|
b7f34ee348 | ||
|
|
07b4f88de9 | ||
|
|
3d605853c8 | ||
|
|
94700e55fa | ||
|
|
76c80e3fdf | ||
|
|
c96d882a02 | ||
|
|
b800834351 | ||
|
|
8503dd0047 | ||
|
|
28f09fcdd5 | ||
|
|
5f6122fe10 | ||
|
|
7e15410f02 | ||
|
|
9bbb9f5556 | ||
|
|
8df7bad839 | ||
|
|
b69ff33d9e | ||
|
|
5e6b31f0da | ||
|
|
a6c8f7c875 | ||
|
|
7a6df013cc | ||
|
|
b2f2282947 | ||
|
|
478af0f720 | ||
|
|
366f730bf6 | ||
|
|
0b56290f0b | ||
|
|
fc5397fdf5 | ||
|
|
4f0493c850 | ||
|
|
f7dcc404f2 | ||
|
|
5b3b3aada8 | ||
|
|
bf49d2dca8 | ||
|
|
3bc5bd2d22 | ||
|
|
056a6df546 | ||
|
|
9f77001e27 | ||
|
|
4d0cfef6ee | ||
|
|
c9d72e4571 | ||
|
|
ccca02846d | ||
|
|
f0f9a0605b | ||
|
|
12350e3f9a | ||
|
|
14a9d2f73d | ||
|
|
afbf4d3dcc | ||
|
|
865377a70d | ||
|
|
b2aba9e430 | ||
|
|
52f7e23c72 | ||
|
|
1b1c137771 | ||
|
|
fdedcd1f4d | ||
|
|
97c0496cfa | ||
|
|
8713365265 | ||
|
|
9b334b3f97 |
172
.circleci/config.yml
Normal file
172
.circleci/config.yml
Normal file
@@ -0,0 +1,172 @@
|
||||
version: 2
|
||||
jobs:
|
||||
dockerhubuploadrelease:
|
||||
machine: true
|
||||
steps:
|
||||
- checkout
|
||||
- run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_TAG} .
|
||||
- run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_TAG}-py3 --build-arg PYTHON_VERSION=3.6 .
|
||||
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||
- run: docker push matrixdotorg/synapse:${CIRCLE_TAG}
|
||||
- run: docker push matrixdotorg/synapse:${CIRCLE_TAG}-py3
|
||||
dockerhubuploadlatest:
|
||||
machine: true
|
||||
steps:
|
||||
- checkout
|
||||
- run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_SHA1} .
|
||||
- run: docker build -f docker/Dockerfile -t matrixdotorg/synapse:${CIRCLE_SHA1}-py3 --build-arg PYTHON_VERSION=3.6 .
|
||||
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||
- run: docker tag matrixdotorg/synapse:${CIRCLE_SHA1} matrixdotorg/synapse:latest
|
||||
- run: docker tag matrixdotorg/synapse:${CIRCLE_SHA1}-py3 matrixdotorg/synapse:latest-py3
|
||||
- run: docker push matrixdotorg/synapse:${CIRCLE_SHA1}
|
||||
- run: docker push matrixdotorg/synapse:${CIRCLE_SHA1}-py3
|
||||
- run: docker push matrixdotorg/synapse:latest
|
||||
- run: docker push matrixdotorg/synapse:latest-py3
|
||||
sytestpy2:
|
||||
docker:
|
||||
- image: matrixdotorg/sytest-synapsepy2
|
||||
working_directory: /src
|
||||
steps:
|
||||
- checkout
|
||||
- run: /synapse_sytest.sh
|
||||
- store_artifacts:
|
||||
path: /logs
|
||||
destination: logs
|
||||
- store_test_results:
|
||||
path: /logs
|
||||
sytestpy2postgres:
|
||||
docker:
|
||||
- image: matrixdotorg/sytest-synapsepy2
|
||||
working_directory: /src
|
||||
steps:
|
||||
- checkout
|
||||
- run: POSTGRES=1 /synapse_sytest.sh
|
||||
- store_artifacts:
|
||||
path: /logs
|
||||
destination: logs
|
||||
- store_test_results:
|
||||
path: /logs
|
||||
sytestpy2merged:
|
||||
docker:
|
||||
- image: matrixdotorg/sytest-synapsepy2
|
||||
working_directory: /src
|
||||
steps:
|
||||
- checkout
|
||||
- run: bash .circleci/merge_base_branch.sh
|
||||
- run: /synapse_sytest.sh
|
||||
- store_artifacts:
|
||||
path: /logs
|
||||
destination: logs
|
||||
- store_test_results:
|
||||
path: /logs
|
||||
sytestpy2postgresmerged:
|
||||
docker:
|
||||
- image: matrixdotorg/sytest-synapsepy2
|
||||
working_directory: /src
|
||||
steps:
|
||||
- checkout
|
||||
- run: bash .circleci/merge_base_branch.sh
|
||||
- run: POSTGRES=1 /synapse_sytest.sh
|
||||
- store_artifacts:
|
||||
path: /logs
|
||||
destination: logs
|
||||
- store_test_results:
|
||||
path: /logs
|
||||
|
||||
sytestpy3:
|
||||
docker:
|
||||
- image: matrixdotorg/sytest-synapsepy3
|
||||
working_directory: /src
|
||||
steps:
|
||||
- checkout
|
||||
- run: /synapse_sytest.sh
|
||||
- store_artifacts:
|
||||
path: /logs
|
||||
destination: logs
|
||||
- store_test_results:
|
||||
path: /logs
|
||||
sytestpy3postgres:
|
||||
docker:
|
||||
- image: matrixdotorg/sytest-synapsepy3
|
||||
working_directory: /src
|
||||
steps:
|
||||
- checkout
|
||||
- run: POSTGRES=1 /synapse_sytest.sh
|
||||
- store_artifacts:
|
||||
path: /logs
|
||||
destination: logs
|
||||
- store_test_results:
|
||||
path: /logs
|
||||
sytestpy3merged:
|
||||
docker:
|
||||
- image: matrixdotorg/sytest-synapsepy3
|
||||
working_directory: /src
|
||||
steps:
|
||||
- checkout
|
||||
- run: bash .circleci/merge_base_branch.sh
|
||||
- run: /synapse_sytest.sh
|
||||
- store_artifacts:
|
||||
path: /logs
|
||||
destination: logs
|
||||
- store_test_results:
|
||||
path: /logs
|
||||
sytestpy3postgresmerged:
|
||||
docker:
|
||||
- image: matrixdotorg/sytest-synapsepy3
|
||||
working_directory: /src
|
||||
steps:
|
||||
- checkout
|
||||
- run: bash .circleci/merge_base_branch.sh
|
||||
- run: POSTGRES=1 /synapse_sytest.sh
|
||||
- store_artifacts:
|
||||
path: /logs
|
||||
destination: logs
|
||||
- store_test_results:
|
||||
path: /logs
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
build:
|
||||
jobs:
|
||||
- sytestpy2:
|
||||
filters:
|
||||
branches:
|
||||
only: /develop|master|release-.*/
|
||||
- sytestpy2postgres:
|
||||
filters:
|
||||
branches:
|
||||
only: /develop|master|release-.*/
|
||||
- sytestpy3:
|
||||
filters:
|
||||
branches:
|
||||
only: /develop|master|release-.*/
|
||||
- sytestpy3postgres:
|
||||
filters:
|
||||
branches:
|
||||
only: /develop|master|release-.*/
|
||||
- sytestpy2merged:
|
||||
filters:
|
||||
branches:
|
||||
ignore: /develop|master|release-.*/
|
||||
- sytestpy2postgresmerged:
|
||||
filters:
|
||||
branches:
|
||||
ignore: /develop|master|release-.*/
|
||||
- sytestpy3merged:
|
||||
filters:
|
||||
branches:
|
||||
ignore: /develop|master|release-.*/
|
||||
- sytestpy3postgresmerged:
|
||||
filters:
|
||||
branches:
|
||||
ignore: /develop|master|release-.*/
|
||||
- dockerhubuploadrelease:
|
||||
filters:
|
||||
tags:
|
||||
only: /v[0-9].[0-9]+.[0-9]+.*/
|
||||
branches:
|
||||
ignore: /.*/
|
||||
- dockerhubuploadlatest:
|
||||
filters:
|
||||
branches:
|
||||
only: master
|
||||
34
.circleci/merge_base_branch.sh
Executable file
34
.circleci/merge_base_branch.sh
Executable file
@@ -0,0 +1,34 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
# CircleCI doesn't give CIRCLE_PR_NUMBER in the environment for non-forked PRs. Wonderful.
|
||||
# In this case, we just need to do some ~shell magic~ to strip it out of the PULL_REQUEST URL.
|
||||
echo 'export CIRCLE_PR_NUMBER="${CIRCLE_PR_NUMBER:-${CIRCLE_PULL_REQUEST##*/}}"' >> $BASH_ENV
|
||||
source $BASH_ENV
|
||||
|
||||
if [[ -z "${CIRCLE_PR_NUMBER}" ]]
|
||||
then
|
||||
echo "Can't figure out what the PR number is! Assuming merge target is develop."
|
||||
|
||||
# It probably hasn't had a PR opened yet. Since all PRs land on develop, we
|
||||
# can probably assume it's based on it and will be merged into it.
|
||||
GITBASE="develop"
|
||||
else
|
||||
# Get the reference, using the GitHub API
|
||||
GITBASE=`wget -O- https://api.github.com/repos/matrix-org/synapse/pulls/${CIRCLE_PR_NUMBER} | jq -r '.base.ref'`
|
||||
fi
|
||||
|
||||
# Show what we are before
|
||||
git show -s
|
||||
|
||||
# Set up username so it can do a merge
|
||||
git config --global user.email bot@matrix.org
|
||||
git config --global user.name "A robot"
|
||||
|
||||
# Fetch and merge. If it doesn't work, it will raise due to set -e.
|
||||
git fetch -u origin $GITBASE
|
||||
git merge --no-edit origin/$GITBASE
|
||||
|
||||
# Show what we are after.
|
||||
git show -s
|
||||
@@ -3,3 +3,5 @@ Dockerfile
|
||||
.gitignore
|
||||
demo/etc
|
||||
tox.ini
|
||||
.git/*
|
||||
.tox/*
|
||||
|
||||
5
.github/ISSUE_TEMPLATE.md
vendored
5
.github/ISSUE_TEMPLATE.md
vendored
@@ -27,8 +27,9 @@ Describe here the problem that you are experiencing, or the feature you are requ
|
||||
|
||||
Describe how what happens differs from what you expected.
|
||||
|
||||
If you can identify any relevant log snippets from _homeserver.log_, please include
|
||||
those here (please be careful to remove any personal or private data):
|
||||
<!-- If you can identify any relevant log snippets from _homeserver.log_, please include
|
||||
those (please be careful to remove any personal or private data). Please surround them with
|
||||
``` (three backticks, on a line on their own), so that they are formatted legibly. -->
|
||||
|
||||
### Version information
|
||||
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,9 +1,11 @@
|
||||
*.pyc
|
||||
.*.swp
|
||||
*~
|
||||
*.lock
|
||||
|
||||
.DS_Store
|
||||
_trial_temp/
|
||||
_trial_temp*/
|
||||
logs/
|
||||
dbs/
|
||||
*.egg
|
||||
@@ -44,6 +46,7 @@ media_store/
|
||||
build/
|
||||
venv/
|
||||
venv*/
|
||||
*venv/
|
||||
|
||||
localhost-800*/
|
||||
static/client/register/register_config.js
|
||||
|
||||
51
.travis.yml
51
.travis.yml
@@ -1,12 +1,27 @@
|
||||
sudo: false
|
||||
language: python
|
||||
|
||||
# tell travis to cache ~/.cache/pip
|
||||
cache: pip
|
||||
cache:
|
||||
directories:
|
||||
# we only bother to cache the wheels; parts of the http cache get
|
||||
# invalidated every build (because they get served with a max-age of 600
|
||||
# seconds), which means that we end up re-uploading the whole cache for
|
||||
# every build, which is time-consuming In any case, it's not obvious that
|
||||
# downloading the cache from S3 would be much faster than downloading the
|
||||
# originals from pypi.
|
||||
#
|
||||
- $HOME/.cache/pip/wheels
|
||||
|
||||
before_script:
|
||||
- git remote set-branches --add origin develop
|
||||
- git fetch origin develop
|
||||
# don't clone the whole repo history, one commit will do
|
||||
git:
|
||||
depth: 1
|
||||
|
||||
# only build branches we care about (PRs are built seperately)
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
- develop
|
||||
- /^release-v/
|
||||
|
||||
matrix:
|
||||
fast_finish: true
|
||||
@@ -14,17 +29,39 @@ matrix:
|
||||
- python: 2.7
|
||||
env: TOX_ENV=packaging
|
||||
|
||||
- python: 2.7
|
||||
env: TOX_ENV=pep8
|
||||
- python: 3.6
|
||||
env: TOX_ENV="pep8,check_isort"
|
||||
|
||||
- python: 2.7
|
||||
env: TOX_ENV=py27
|
||||
|
||||
- python: 2.7
|
||||
env: TOX_ENV=py27-old
|
||||
|
||||
- python: 2.7
|
||||
env: TOX_ENV=py27-postgres TRIAL_FLAGS="-j 4"
|
||||
services:
|
||||
- postgresql
|
||||
|
||||
- python: 3.5
|
||||
env: TOX_ENV=py35
|
||||
|
||||
- python: 3.6
|
||||
env: TOX_ENV=py36
|
||||
|
||||
- python: 3.6
|
||||
env: TOX_ENV=py36-postgres TRIAL_FLAGS="-j 4"
|
||||
services:
|
||||
- postgresql
|
||||
|
||||
- # we only need to check for the newsfragment if it's a PR build
|
||||
if: type = pull_request
|
||||
python: 3.6
|
||||
env: TOX_ENV=check-newsfragment
|
||||
script:
|
||||
- git remote set-branches --add origin develop
|
||||
- git fetch origin develop
|
||||
- tox -e $TOX_ENV
|
||||
|
||||
install:
|
||||
- pip install tox
|
||||
|
||||
@@ -62,4 +62,7 @@ Christoph Witzany <christoph at web.crofting.com>
|
||||
* Add LDAP support for authentication
|
||||
|
||||
Pierre Jaury <pierre at jaury.eu>
|
||||
* Docker packaging
|
||||
* Docker packaging
|
||||
|
||||
Serban Constantin <serban.constantin at gmail dot com>
|
||||
* Small bug fix
|
||||
2980
CHANGES.md
Normal file
2980
CHANGES.md
Normal file
File diff suppressed because it is too large
Load Diff
2821
CHANGES.rst
2821
CHANGES.rst
File diff suppressed because it is too large
Load Diff
@@ -30,12 +30,28 @@ use github's pull request workflow to review the contribution, and either ask
|
||||
you to make any refinements needed or merge it and make them ourselves. The
|
||||
changes will then land on master when we next do a release.
|
||||
|
||||
We use `Jenkins <http://matrix.org/jenkins>`_ and
|
||||
`Travis <https://travis-ci.org/matrix-org/synapse>`_ for continuous
|
||||
integration. All pull requests to synapse get automatically tested by Travis;
|
||||
the Jenkins builds require an adminstrator to start them. If your change
|
||||
breaks the build, this will be shown in github, so please keep an eye on the
|
||||
pull request for feedback.
|
||||
We use `CircleCI <https://circleci.com/gh/matrix-org>`_ and `Travis CI
|
||||
<https://travis-ci.org/matrix-org/synapse>`_ for continuous integration. All
|
||||
pull requests to synapse get automatically tested by Travis and CircleCI.
|
||||
If your change breaks the build, this will be shown in GitHub, so please
|
||||
keep an eye on the pull request for feedback.
|
||||
|
||||
To run unit tests in a local development environment, you can use:
|
||||
|
||||
- ``tox -e py27`` (requires tox to be installed by ``pip install tox``) for
|
||||
SQLite-backed Synapse on Python 2.7.
|
||||
- ``tox -e py35`` for SQLite-backed Synapse on Python 3.5.
|
||||
- ``tox -e py36`` for SQLite-backed Synapse on Python 3.6.
|
||||
- ``tox -e py27-postgres`` for PostgreSQL-backed Synapse on Python 2.7
|
||||
(requires a running local PostgreSQL with access to create databases).
|
||||
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 2.7
|
||||
(requires Docker). Entirely self-contained, recommended if you don't want to
|
||||
set up PostgreSQL yourself.
|
||||
|
||||
Docker images are available for running the integration tests (SyTest) locally,
|
||||
see the `documentation in the SyTest repo
|
||||
<https://github.com/matrix-org/sytest/blob/develop/docker/README.md>`_ for more
|
||||
information.
|
||||
|
||||
Code style
|
||||
~~~~~~~~~~
|
||||
@@ -51,22 +67,23 @@ makes it horribly hard to review otherwise.
|
||||
Changelog
|
||||
~~~~~~~~~
|
||||
|
||||
All changes, even minor ones, need a corresponding changelog
|
||||
All changes, even minor ones, need a corresponding changelog / newsfragment
|
||||
entry. These are managed by Towncrier
|
||||
(https://github.com/hawkowl/towncrier).
|
||||
|
||||
To create a changelog entry, make a new file in the ``changelog.d``
|
||||
file named in the format of ``issuenumberOrPR.type``. The type can be
|
||||
file named in the format of ``PRnumber.type``. The type can be
|
||||
one of ``feature``, ``bugfix``, ``removal`` (also used for
|
||||
deprecations), or ``misc`` (for internal-only changes). The content of
|
||||
the file is your changelog entry, which can contain RestructuredText
|
||||
formatting. A note of contributors is welcomed in changelogs for
|
||||
non-misc changes (the content of misc changes is not displayed).
|
||||
the file is your changelog entry, which can contain Markdown
|
||||
formatting. Adding credits to the changelog is encouraged, we value
|
||||
your contributions and would like to have you shouted out in the
|
||||
release notes!
|
||||
|
||||
For example, a fix for a bug reported in #1234 would have its
|
||||
changelog entry in ``changelog.d/1234.bugfix``, and contain content
|
||||
like "The security levels of Florbs are now validated when
|
||||
recieved over federation. Contributed by Jane Matrix".
|
||||
For example, a fix in PR #1234 would have its changelog entry in
|
||||
``changelog.d/1234.bugfix``, and contain content like "The security levels of
|
||||
Florbs are now validated when recieved over federation. Contributed by Jane
|
||||
Matrix".
|
||||
|
||||
Attribution
|
||||
~~~~~~~~~~~
|
||||
@@ -76,7 +93,8 @@ AUTHORS.rst file for the project in question. Please feel free to include a
|
||||
change to AUTHORS.rst in your pull request to list yourself and a short
|
||||
description of the area(s) you've worked on. Also, we sometimes have swag to
|
||||
give away to contributors - if you feel that Matrix-branded apparel is missing
|
||||
from your life, please mail us your shipping address to matrix at matrix.org and we'll try to fix it :)
|
||||
from your life, please mail us your shipping address to matrix at matrix.org and
|
||||
we'll try to fix it :)
|
||||
|
||||
Sign off
|
||||
~~~~~~~~
|
||||
@@ -125,7 +143,7 @@ the contribution or otherwise have the right to contribute it to Matrix::
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
|
||||
|
||||
If you agree to this for your contribution, then all that's needed is to
|
||||
include the line in your commit or pull request comment::
|
||||
|
||||
@@ -143,4 +161,9 @@ flag to ``git commit``, which uses the name and email set in your
|
||||
Conclusion
|
||||
~~~~~~~~~~
|
||||
|
||||
That's it! Matrix is a very open and collaborative project as you might expect given our obsession with open communication. If we're going to successfully matrix together all the fragmented communication technologies out there we are reliant on contributions and collaboration from the community to do so. So please get involved - and we hope you have as much fun hacking on Matrix as we do!
|
||||
That's it! Matrix is a very open and collaborative project as you might expect
|
||||
given our obsession with open communication. If we're going to successfully
|
||||
matrix together all the fragmented communication technologies out there we are
|
||||
reliant on contributions and collaboration from the community to do so. So
|
||||
please get involved - and we hope you have as much fun hacking on Matrix as we
|
||||
do!
|
||||
|
||||
19
Dockerfile
19
Dockerfile
@@ -1,19 +0,0 @@
|
||||
FROM docker.io/python:2-alpine3.7
|
||||
|
||||
RUN apk add --no-cache --virtual .nacl_deps su-exec build-base libffi-dev zlib-dev libressl-dev libjpeg-turbo-dev linux-headers postgresql-dev libxslt-dev
|
||||
|
||||
COPY . /synapse
|
||||
|
||||
# A wheel cache may be provided in ./cache for faster build
|
||||
RUN cd /synapse \
|
||||
&& pip install --upgrade pip setuptools psycopg2 lxml \
|
||||
&& mkdir -p /synapse/cache \
|
||||
&& pip install -f /synapse/cache --upgrade --process-dependency-links . \
|
||||
&& mv /synapse/contrib/docker/start.py /synapse/contrib/docker/conf / \
|
||||
&& rm -rf setup.py setup.cfg synapse
|
||||
|
||||
VOLUME ["/data"]
|
||||
|
||||
EXPOSE 8008/tcp 8448/tcp
|
||||
|
||||
ENTRYPOINT ["/start.py"]
|
||||
13
MANIFEST.in
13
MANIFEST.in
@@ -2,6 +2,7 @@ include synctl
|
||||
include LICENSE
|
||||
include VERSION
|
||||
include *.rst
|
||||
include *.md
|
||||
include demo/README
|
||||
include demo/demo.tls.dh
|
||||
include demo/*.py
|
||||
@@ -11,26 +12,28 @@ recursive-include synapse/storage/schema *.sql
|
||||
recursive-include synapse/storage/schema *.py
|
||||
|
||||
recursive-include docs *
|
||||
recursive-include res *
|
||||
recursive-include scripts *
|
||||
recursive-include scripts-dev *
|
||||
recursive-include synapse *.pyi
|
||||
recursive-include tests *.py
|
||||
|
||||
recursive-include synapse/res *
|
||||
recursive-include synapse/static *.css
|
||||
recursive-include synapse/static *.gif
|
||||
recursive-include synapse/static *.html
|
||||
recursive-include synapse/static *.js
|
||||
|
||||
exclude jenkins.sh
|
||||
exclude jenkins*.sh
|
||||
exclude jenkins*
|
||||
exclude Dockerfile
|
||||
exclude .dockerignore
|
||||
recursive-exclude jenkins *.sh
|
||||
exclude test_postgresql.sh
|
||||
|
||||
include pyproject.toml
|
||||
recursive-include changelog.d *
|
||||
|
||||
prune .github
|
||||
prune demo/etc
|
||||
prune docker
|
||||
prune .circleci
|
||||
|
||||
exclude jenkins*
|
||||
recursive-exclude jenkins *.sh
|
||||
|
||||
35
MAP.rst
35
MAP.rst
@@ -1,35 +0,0 @@
|
||||
Directory Structure
|
||||
===================
|
||||
|
||||
Warning: this may be a bit stale...
|
||||
|
||||
::
|
||||
|
||||
.
|
||||
├── cmdclient Basic CLI python Matrix client
|
||||
├── demo Scripts for running standalone Matrix demos
|
||||
├── docs All doc, including the draft Matrix API spec
|
||||
│ ├── client-server The client-server Matrix API spec
|
||||
│ ├── model Domain-specific elements of the Matrix API spec
|
||||
│ ├── server-server The server-server model of the Matrix API spec
|
||||
│ └── sphinx The internal API doc of the Synapse homeserver
|
||||
├── experiments Early experiments of using Synapse's internal APIs
|
||||
├── graph Visualisation of Matrix's distributed message store
|
||||
├── synapse The reference Matrix homeserver implementation
|
||||
│ ├── api Common building blocks for the APIs
|
||||
│ │ ├── events Definition of state representation Events
|
||||
│ │ └── streams Definition of streamable Event objects
|
||||
│ ├── app The __main__ entry point for the homeserver
|
||||
│ ├── crypto The PKI client/server used for secure federation
|
||||
│ │ └── resource PKI helper objects (e.g. keys)
|
||||
│ ├── federation Server-server state replication logic
|
||||
│ ├── handlers The main business logic of the homeserver
|
||||
│ ├── http Wrappers around Twisted's HTTP server & client
|
||||
│ ├── rest Servlet-style RESTful API
|
||||
│ ├── storage Persistence subsystem (currently only sqlite3)
|
||||
│ │ └── schema sqlite persistence schema
|
||||
│ └── util Synapse-specific utilities
|
||||
├── tests Unit tests for the Synapse homeserver
|
||||
└── webclient Basic AngularJS Matrix web client
|
||||
|
||||
|
||||
184
README.rst
184
README.rst
@@ -71,7 +71,7 @@ We'd like to invite you to join #matrix:matrix.org (via
|
||||
https://matrix.org/docs/projects/try-matrix-now.html), run a homeserver, take a look
|
||||
at the `Matrix spec <https://matrix.org/docs/spec>`_, and experiment with the
|
||||
`APIs <https://matrix.org/docs/api>`_ and `Client SDKs
|
||||
<http://matrix.org/docs/projects/try-matrix-now.html#client-sdks>`_.
|
||||
<https://matrix.org/docs/projects/try-matrix-now.html#client-sdks>`_.
|
||||
|
||||
Thanks for using Matrix!
|
||||
|
||||
@@ -81,7 +81,7 @@ Thanks for using Matrix!
|
||||
Synapse Installation
|
||||
====================
|
||||
|
||||
Synapse is the reference python/twisted Matrix homeserver implementation.
|
||||
Synapse is the reference Python/Twisted Matrix homeserver implementation.
|
||||
|
||||
System requirements:
|
||||
|
||||
@@ -91,12 +91,13 @@ System requirements:
|
||||
|
||||
Installing from source
|
||||
----------------------
|
||||
|
||||
(Prebuilt packages are available for some platforms - see `Platform-Specific
|
||||
Instructions`_.)
|
||||
|
||||
Synapse is written in python but some of the libraries it uses are written in
|
||||
C. So before we can install synapse itself we need a working C compiler and the
|
||||
header files for python C extensions.
|
||||
Synapse is written in Python but some of the libraries it uses are written in
|
||||
C. So before we can install Synapse itself we need a working C compiler and the
|
||||
header files for Python C extensions.
|
||||
|
||||
Installing prerequisites on Ubuntu or Debian::
|
||||
|
||||
@@ -143,29 +144,43 @@ Installing prerequisites on OpenBSD::
|
||||
doas pkg_add python libffi py-pip py-setuptools sqlite3 py-virtualenv \
|
||||
libxslt
|
||||
|
||||
To install the synapse homeserver run::
|
||||
To install the Synapse homeserver run::
|
||||
|
||||
virtualenv -p python2.7 ~/.synapse
|
||||
source ~/.synapse/bin/activate
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade setuptools
|
||||
pip install https://github.com/matrix-org/synapse/tarball/master
|
||||
pip install matrix-synapse
|
||||
|
||||
This installs synapse, along with the libraries it uses, into a virtual
|
||||
This installs Synapse, along with the libraries it uses, into a virtual
|
||||
environment under ``~/.synapse``. Feel free to pick a different directory
|
||||
if you prefer.
|
||||
|
||||
This Synapse installation can then be later upgraded by using pip again with the
|
||||
update flag::
|
||||
|
||||
source ~/.synapse/bin/activate
|
||||
pip install -U matrix-synapse
|
||||
|
||||
In case of problems, please see the _`Troubleshooting` section below.
|
||||
|
||||
There is an offical synapse image available at https://hub.docker.com/r/matrixdotorg/synapse/tags/ which can be used with the docker-compose file available at `contrib/docker`. Further information on this including configuration options is available in `contrib/docker/README.md`.
|
||||
There is an offical synapse image available at
|
||||
https://hub.docker.com/r/matrixdotorg/synapse/tags/ which can be used with
|
||||
the docker-compose file available at `contrib/docker <contrib/docker>`_. Further information on
|
||||
this including configuration options is available in the README on
|
||||
hub.docker.com.
|
||||
|
||||
Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a Dockerfile to automate a synapse server in a single Docker image, at https://hub.docker.com/r/avhost/docker-matrix/tags/
|
||||
Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a
|
||||
Dockerfile to automate a synapse server in a single Docker image, at
|
||||
https://hub.docker.com/r/avhost/docker-matrix/tags/
|
||||
|
||||
Also, Martin Giess has created an auto-deployment process with vagrant/ansible,
|
||||
tested with VirtualBox/AWS/DigitalOcean - see https://github.com/EMnify/matrix-synapse-auto-deploy
|
||||
for details.
|
||||
Slavi Pantaleev has created an Ansible playbook,
|
||||
which installs the offical Docker image of Matrix Synapse
|
||||
along with many other Matrix-related services (Postgres database, riot-web, coturn, mxisd, SSL support, etc.).
|
||||
For more details, see
|
||||
https://github.com/spantaleev/matrix-docker-ansible-deploy
|
||||
|
||||
Configuring synapse
|
||||
Configuring Synapse
|
||||
-------------------
|
||||
|
||||
Before you can start Synapse, you will need to generate a configuration
|
||||
@@ -247,26 +262,6 @@ Setting up a TURN server
|
||||
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
||||
a TURN server. See `<docs/turn-howto.rst>`_ for details.
|
||||
|
||||
IPv6
|
||||
----
|
||||
|
||||
As of Synapse 0.19 we finally support IPv6, many thanks to @kyrias and @glyph
|
||||
for providing PR #1696.
|
||||
|
||||
However, for federation to work on hosts with IPv6 DNS servers you **must**
|
||||
be running Twisted 17.1.0 or later - see https://github.com/matrix-org/synapse/issues/1002
|
||||
for details. We can't make Synapse depend on Twisted 17.1 by default
|
||||
yet as it will break most older distributions (see https://github.com/matrix-org/synapse/pull/1909)
|
||||
so if you are using operating system dependencies you'll have to install your
|
||||
own Twisted 17.1 package via pip or backports etc.
|
||||
|
||||
If you're running in a virtualenv then pip should have installed the newest
|
||||
Twisted automatically, but if your virtualenv is old you will need to manually
|
||||
upgrade to a newer Twisted dependency via:
|
||||
|
||||
pip install Twisted>=17.1.0
|
||||
|
||||
|
||||
Running Synapse
|
||||
===============
|
||||
|
||||
@@ -283,7 +278,7 @@ Connecting to Synapse from a client
|
||||
|
||||
The easiest way to try out your new Synapse installation is by connecting to it
|
||||
from a web client. The easiest option is probably the one at
|
||||
http://riot.im/app. You will need to specify a "Custom server" when you log on
|
||||
https://riot.im/app. You will need to specify a "Custom server" when you log on
|
||||
or register: set this to ``https://domain.tld`` if you setup a reverse proxy
|
||||
following the recommended setup, or ``https://localhost:8448`` - remember to specify the
|
||||
port (``:8448``) if not ``:443`` unless you changed the configuration. (Leave the identity
|
||||
@@ -329,7 +324,7 @@ Security Note
|
||||
=============
|
||||
|
||||
Matrix serves raw user generated data in some APIs - specifically the `content
|
||||
repository endpoints <http://matrix.org/docs/spec/client_server/latest.html#get-matrix-media-r0-download-servername-mediaid>`_.
|
||||
repository endpoints <https://matrix.org/docs/spec/client_server/latest.html#get-matrix-media-r0-download-servername-mediaid>`_.
|
||||
|
||||
Whilst we have tried to mitigate against possible XSS attacks (e.g.
|
||||
https://github.com/matrix-org/synapse/pull/1021) we recommend running
|
||||
@@ -348,7 +343,7 @@ Platform-Specific Instructions
|
||||
Debian
|
||||
------
|
||||
|
||||
Matrix provides official Debian packages via apt from http://matrix.org/packages/debian/.
|
||||
Matrix provides official Debian packages via apt from https://matrix.org/packages/debian/.
|
||||
Note that these packages do not include a client - choose one from
|
||||
https://matrix.org/docs/projects/try-matrix-now.html (or build your own with one of our SDKs :)
|
||||
|
||||
@@ -362,6 +357,19 @@ Synapse is in the Fedora repositories as ``matrix-synapse``::
|
||||
Oleg Girko provides Fedora RPMs at
|
||||
https://obs.infoserver.lv/project/monitor/matrix-synapse
|
||||
|
||||
OpenSUSE
|
||||
--------
|
||||
|
||||
Synapse is in the OpenSUSE repositories as ``matrix-synapse``::
|
||||
|
||||
sudo zypper install matrix-synapse
|
||||
|
||||
SUSE Linux Enterprise Server
|
||||
----------------------------
|
||||
|
||||
Unofficial package are built for SLES 15 in the openSUSE:Backports:SLE-15 repository at
|
||||
https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15/standard/
|
||||
|
||||
ArchLinux
|
||||
---------
|
||||
|
||||
@@ -429,8 +437,7 @@ settings require a slightly more difficult installation process.
|
||||
using the ``.`` command, rather than ``bash``'s ``source``.
|
||||
5) Optionally, use ``pip`` to install ``lxml``, which Synapse needs to parse
|
||||
webpages for their titles.
|
||||
6) Use ``pip`` to install this repository: ``pip install
|
||||
https://github.com/matrix-org/synapse/tarball/master``
|
||||
6) Use ``pip`` to install this repository: ``pip install matrix-synapse``
|
||||
7) Optionally, change ``_synapse``'s shell to ``/bin/false`` to reduce the
|
||||
chance of a compromised Synapse server being used to take over your box.
|
||||
|
||||
@@ -444,37 +451,13 @@ https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/matrix-
|
||||
|
||||
Windows Install
|
||||
---------------
|
||||
Synapse can be installed on Cygwin. It requires the following Cygwin packages:
|
||||
|
||||
- gcc
|
||||
- git
|
||||
- libffi-devel
|
||||
- openssl (and openssl-devel, python-openssl)
|
||||
- python
|
||||
- python-setuptools
|
||||
|
||||
The content repository requires additional packages and will be unable to process
|
||||
uploads without them:
|
||||
|
||||
- libjpeg8
|
||||
- libjpeg8-devel
|
||||
- zlib
|
||||
|
||||
If you choose to install Synapse without these packages, you will need to reinstall
|
||||
``pillow`` for changes to be applied, e.g. ``pip uninstall pillow`` ``pip install
|
||||
pillow --user``
|
||||
|
||||
Troubleshooting:
|
||||
|
||||
- You may need to upgrade ``setuptools`` to get this to work correctly:
|
||||
``pip install setuptools --upgrade``.
|
||||
- You may encounter errors indicating that ``ffi.h`` is missing, even with
|
||||
``libffi-devel`` installed. If you do, copy the ``.h`` files:
|
||||
``cp /usr/lib/libffi-3.0.13/include/*.h /usr/include``
|
||||
- You may need to install libsodium from source in order to install PyNacl. If
|
||||
you do, you may need to create a symlink to ``libsodium.a`` so ``ld`` can find
|
||||
it: ``ln -s /usr/local/lib/libsodium.a /usr/lib/libsodium.a``
|
||||
|
||||
If you wish to run or develop Synapse on Windows, the Windows Subsystem For
|
||||
Linux provides a Linux environment on Windows 10 which is capable of using the
|
||||
Debian, Fedora, or source installation methods. More information about WSL can
|
||||
be found at https://docs.microsoft.com/en-us/windows/wsl/install-win10 for
|
||||
Windows 10 and https://docs.microsoft.com/en-us/windows/wsl/install-on-server
|
||||
for Windows Server.
|
||||
|
||||
Troubleshooting
|
||||
===============
|
||||
@@ -482,7 +465,7 @@ Troubleshooting
|
||||
Troubleshooting Installation
|
||||
----------------------------
|
||||
|
||||
Synapse requires pip 1.7 or later, so if your OS provides too old a version you
|
||||
Synapse requires pip 8 or later, so if your OS provides too old a version you
|
||||
may need to manually upgrade it::
|
||||
|
||||
sudo pip install --upgrade pip
|
||||
@@ -517,28 +500,6 @@ failing, e.g.::
|
||||
|
||||
pip install twisted
|
||||
|
||||
On OS X, if you encounter clang: error: unknown argument: '-mno-fused-madd' you
|
||||
will need to export CFLAGS=-Qunused-arguments.
|
||||
|
||||
Troubleshooting Running
|
||||
-----------------------
|
||||
|
||||
If synapse fails with ``missing "sodium.h"`` crypto errors, you may need
|
||||
to manually upgrade PyNaCL, as synapse uses NaCl (http://nacl.cr.yp.to/) for
|
||||
encryption and digital signatures.
|
||||
Unfortunately PyNACL currently has a few issues
|
||||
(https://github.com/pyca/pynacl/issues/53) and
|
||||
(https://github.com/pyca/pynacl/issues/79) that mean it may not install
|
||||
correctly, causing all tests to fail with errors about missing "sodium.h". To
|
||||
fix try re-installing from PyPI or directly from
|
||||
(https://github.com/pyca/pynacl)::
|
||||
|
||||
# Install from PyPI
|
||||
pip install --user --upgrade --force pynacl
|
||||
|
||||
# Install from github
|
||||
pip install --user https://github.com/pyca/pynacl/tarball/master
|
||||
|
||||
Running out of File Handles
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
@@ -672,8 +633,8 @@ useful just for development purposes. See `<demo/README>`_.
|
||||
Using PostgreSQL
|
||||
================
|
||||
|
||||
As of Synapse 0.9, `PostgreSQL <http://www.postgresql.org>`_ is supported as an
|
||||
alternative to the `SQLite <http://sqlite.org/>`_ database that Synapse has
|
||||
As of Synapse 0.9, `PostgreSQL <https://www.postgresql.org>`_ is supported as an
|
||||
alternative to the `SQLite <https://sqlite.org/>`_ database that Synapse has
|
||||
traditionally used for convenience and simplicity.
|
||||
|
||||
The advantages of Postgres include:
|
||||
@@ -696,8 +657,9 @@ Using a reverse proxy with Synapse
|
||||
|
||||
It is recommended to put a reverse proxy such as
|
||||
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
||||
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_ or
|
||||
`HAProxy <http://www.haproxy.org/>`_ in front of Synapse. One advantage of
|
||||
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
||||
`Caddy <https://caddyserver.com/docs/proxy>`_ or
|
||||
`HAProxy <https://www.haproxy.org/>`_ in front of Synapse. One advantage of
|
||||
doing so is that it means that you can expose the default https port (443) to
|
||||
Matrix clients without needing to run Synapse with root privileges.
|
||||
|
||||
@@ -727,6 +689,26 @@ so an example nginx configuration might look like::
|
||||
}
|
||||
}
|
||||
|
||||
an example Caddy configuration might look like::
|
||||
|
||||
matrix.example.com {
|
||||
proxy /_matrix http://localhost:8008 {
|
||||
transparent
|
||||
}
|
||||
}
|
||||
|
||||
and an example Apache configuration might look like::
|
||||
|
||||
<VirtualHost *:443>
|
||||
SSLEngine on
|
||||
ServerName matrix.example.com;
|
||||
|
||||
<Location /_matrix>
|
||||
ProxyPass http://127.0.0.1:8008/_matrix nocanon
|
||||
ProxyPassReverse http://127.0.0.1:8008/_matrix
|
||||
</Location>
|
||||
</VirtualHost>
|
||||
|
||||
You will also want to set ``bind_addresses: ['127.0.0.1']`` and ``x_forwarded: true``
|
||||
for port 8008 in ``homeserver.yaml`` to ensure that client IP addresses are
|
||||
recorded correctly.
|
||||
@@ -881,7 +863,7 @@ to install using pip and a virtualenv::
|
||||
|
||||
virtualenv -p python2.7 env
|
||||
source env/bin/activate
|
||||
python synapse/python_dependencies.py | xargs pip install
|
||||
python -m synapse.python_dependencies | xargs pip install
|
||||
pip install lxml mock
|
||||
|
||||
This will run a process of downloading and installing all the needed
|
||||
@@ -936,5 +918,13 @@ variable. The default is 0.5, which can be decreased to reduce RAM usage
|
||||
in memory constrained enviroments, or increased if performance starts to
|
||||
degrade.
|
||||
|
||||
Using `libjemalloc <http://jemalloc.net/>`_ can also yield a significant
|
||||
improvement in overall amount, and especially in terms of giving back RAM
|
||||
to the OS. To use it, the library must simply be put in the LD_PRELOAD
|
||||
environment variable when launching Synapse. On Debian, this can be done
|
||||
by installing the ``libjemalloc1`` package and adding this line to
|
||||
``/etc/default/matrix-synapse``::
|
||||
|
||||
LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.1
|
||||
|
||||
.. _`key_management`: https://matrix.org/docs/spec/server_server/unstable.html#retrieving-server-keys
|
||||
|
||||
19
UPGRADE.rst
19
UPGRADE.rst
@@ -18,7 +18,7 @@ instructions that may be required are listed later in this document.
|
||||
|
||||
.. code:: bash
|
||||
|
||||
pip install --upgrade --process-dependency-links https://github.com/matrix-org/synapse/tarball/master
|
||||
pip install --upgrade --process-dependency-links matrix-synapse
|
||||
|
||||
# restart synapse
|
||||
synctl restart
|
||||
@@ -48,11 +48,24 @@ returned by the Client-Server API:
|
||||
# configured on port 443.
|
||||
curl -kv https://<host.name>/_matrix/client/versions 2>&1 | grep "Server:"
|
||||
|
||||
Upgrading to $NEXT_VERSION
|
||||
Upgrading to v0.33.7
|
||||
====================
|
||||
|
||||
This release removes the example email notification templates from
|
||||
``res/templates`` (they are now internal to the python package). This should
|
||||
only affect you if you (a) deploy your Synapse instance from a git checkout or
|
||||
a github snapshot URL, and (b) have email notifications enabled.
|
||||
|
||||
If you have email notifications enabled, you should ensure that
|
||||
``email.template_dir`` is either configured to point at a directory where you
|
||||
have installed customised templates, or leave it unset to use the default
|
||||
templates.
|
||||
|
||||
Upgrading to v0.27.3
|
||||
====================
|
||||
|
||||
This release expands the anonymous usage stats sent if the opt-in
|
||||
``report_stats`` configuration is set to ``true``. We now capture RSS memory
|
||||
``report_stats`` configuration is set to ``true``. We now capture RSS memory
|
||||
and cpu use at a very coarse level. This requires administrators to install
|
||||
the optional ``psutil`` python module.
|
||||
|
||||
|
||||
1
changelog.d/.gitignore
vendored
Normal file
1
changelog.d/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
!.gitignore
|
||||
@@ -1,29 +1,5 @@
|
||||
# Synapse Docker
|
||||
|
||||
The `matrixdotorg/synapse` Docker image will run Synapse as a single process. It does not provide a
|
||||
database server or a TURN server, you should run these separately.
|
||||
|
||||
If you run a Postgres server, you should simply include it in the same Compose
|
||||
project or set the proper environment variables and the image will automatically
|
||||
use that server.
|
||||
|
||||
## Build
|
||||
|
||||
Build the docker image with the `docker build` command from the root of the synapse repository.
|
||||
|
||||
```
|
||||
docker build -t docker.io/matrixdotorg/synapse .
|
||||
```
|
||||
|
||||
The `-t` option sets the image tag. Official images are tagged `matrixdotorg/synapse:<version>` where `<version>` is the same as the release tag in the synapse git repository.
|
||||
|
||||
You may have a local Python wheel cache available, in which case copy the relevant packages in the ``cache/`` directory at the root of the project.
|
||||
|
||||
## Run
|
||||
|
||||
This image is designed to run either with an automatically generated configuration
|
||||
file or with a custom configuration that requires manual edition.
|
||||
|
||||
### Automated configuration
|
||||
|
||||
It is recommended that you use Docker Compose to run your containers, including
|
||||
@@ -60,94 +36,6 @@ Then, customize your configuration and run the server:
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
### Without Compose
|
||||
### More information
|
||||
|
||||
If you do not wish to use Compose, you may still run this image using plain
|
||||
Docker commands. Note that the following is just a guideline and you may need
|
||||
to add parameters to the docker run command to account for the network situation
|
||||
with your postgres database.
|
||||
|
||||
```
|
||||
docker run \
|
||||
-d \
|
||||
--name synapse \
|
||||
-v ${DATA_PATH}:/data \
|
||||
-e SYNAPSE_SERVER_NAME=my.matrix.host \
|
||||
-e SYNAPSE_REPORT_STATS=yes \
|
||||
docker.io/matrixdotorg/synapse:latest
|
||||
```
|
||||
|
||||
## Volumes
|
||||
|
||||
The image expects a single volume, located at ``/data``, that will hold:
|
||||
|
||||
* temporary files during uploads;
|
||||
* uploaded media and thumbnails;
|
||||
* the SQLite database if you do not configure postgres;
|
||||
* the appservices configuration.
|
||||
|
||||
You are free to use separate volumes depending on storage endpoints at your
|
||||
disposal. For instance, ``/data/media`` coud be stored on a large but low
|
||||
performance hdd storage while other files could be stored on high performance
|
||||
endpoints.
|
||||
|
||||
In order to setup an application service, simply create an ``appservices``
|
||||
directory in the data volume and write the application service Yaml
|
||||
configuration file there. Multiple application services are supported.
|
||||
|
||||
## Environment
|
||||
|
||||
Unless you specify a custom path for the configuration file, a very generic
|
||||
file will be generated, based on the following environment settings.
|
||||
These are a good starting point for setting up your own deployment.
|
||||
|
||||
Global settings:
|
||||
|
||||
* ``UID``, the user id Synapse will run as [default 991]
|
||||
* ``GID``, the group id Synapse will run as [default 991]
|
||||
* ``SYNAPSE_CONFIG_PATH``, path to a custom config file
|
||||
|
||||
If ``SYNAPSE_CONFIG_PATH`` is set, you should generate a configuration file
|
||||
then customize it manually. No other environment variable is required.
|
||||
|
||||
Otherwise, a dynamic configuration file will be used. The following environment
|
||||
variables are available for configuration:
|
||||
|
||||
* ``SYNAPSE_SERVER_NAME`` (mandatory), the current server public hostname.
|
||||
* ``SYNAPSE_REPORT_STATS``, (mandatory, ``yes`` or ``no``), enable anonymous
|
||||
statistics reporting back to the Matrix project which helps us to get funding.
|
||||
* ``SYNAPSE_NO_TLS``, set this variable to disable TLS in Synapse (use this if
|
||||
you run your own TLS-capable reverse proxy).
|
||||
* ``SYNAPSE_ENABLE_REGISTRATION``, set this variable to enable registration on
|
||||
the Synapse instance.
|
||||
* ``SYNAPSE_ALLOW_GUEST``, set this variable to allow guest joining this server.
|
||||
* ``SYNAPSE_EVENT_CACHE_SIZE``, the event cache size [default `10K`].
|
||||
* ``SYNAPSE_CACHE_FACTOR``, the cache factor [default `0.5`].
|
||||
* ``SYNAPSE_RECAPTCHA_PUBLIC_KEY``, set this variable to the recaptcha public
|
||||
key in order to enable recaptcha upon registration.
|
||||
* ``SYNAPSE_RECAPTCHA_PRIVATE_KEY``, set this variable to the recaptcha private
|
||||
key in order to enable recaptcha upon registration.
|
||||
* ``SYNAPSE_TURN_URIS``, set this variable to the coma-separated list of TURN
|
||||
uris to enable TURN for this homeserver.
|
||||
* ``SYNAPSE_TURN_SECRET``, set this to the TURN shared secret if required.
|
||||
|
||||
Shared secrets, that will be initialized to random values if not set:
|
||||
|
||||
* ``SYNAPSE_REGISTRATION_SHARED_SECRET``, secret for registrering users if
|
||||
registration is disable.
|
||||
* ``SYNAPSE_MACAROON_SECRET_KEY`` secret for signing access tokens
|
||||
to the server.
|
||||
|
||||
Database specific values (will use SQLite if not set):
|
||||
|
||||
* `POSTGRES_DB` - The database name for the synapse postgres database. [default: `synapse`]
|
||||
* `POSTGRES_HOST` - The host of the postgres database if you wish to use postgresql instead of sqlite3. [default: `db` which is useful when using a container on the same docker network in a compose file where the postgres service is called `db`]
|
||||
* `POSTGRES_PASSWORD` - The password for the synapse postgres database. **If this is set then postgres will be used instead of sqlite3.** [default: none] **NOTE**: You are highly encouraged to use postgresql! Please use the compose file to make it easier to deploy.
|
||||
* `POSTGRES_USER` - The user for the synapse postgres database. [default: `matrix`]
|
||||
|
||||
Mail server specific values (will not send emails if not set):
|
||||
|
||||
* ``SYNAPSE_SMTP_HOST``, hostname to the mail server.
|
||||
* ``SYNAPSE_SMTP_PORT``, TCP port for accessing the mail server [default ``25``].
|
||||
* ``SYNAPSE_SMTP_USER``, username for authenticating against the mail server if any.
|
||||
* ``SYNAPSE_SMTP_PASSWORD``, password for authenticating against the mail server if any.
|
||||
For more information on required environment variables and mounts, see the main docker documentation at [/docker/README.md](../../docker/README.md)
|
||||
|
||||
@@ -6,6 +6,7 @@ version: '3'
|
||||
services:
|
||||
|
||||
synapse:
|
||||
build: ../..
|
||||
image: docker.io/matrixdotorg/synapse:latest
|
||||
# Since snyapse does not retry to connect to the database, restart upon
|
||||
# failure
|
||||
|
||||
6
contrib/grafana/README.md
Normal file
6
contrib/grafana/README.md
Normal file
@@ -0,0 +1,6 @@
|
||||
# Using the Synapse Grafana dashboard
|
||||
|
||||
0. Set up Prometheus and Grafana. Out of scope for this readme. Useful documentation about using Grafana with Prometheus: http://docs.grafana.org/features/datasources/prometheus/
|
||||
1. Have your Prometheus scrape your Synapse. https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.rst
|
||||
2. Import dashboard into Grafana. Download `synapse.json`. Import it to Grafana and select the correct Prometheus datasource. http://docs.grafana.org/reference/export_import/
|
||||
3. Set up additional recording rules
|
||||
5211
contrib/grafana/synapse.json
Normal file
5211
contrib/grafana/synapse.json
Normal file
File diff suppressed because it is too large
Load Diff
63
docker/Dockerfile
Normal file
63
docker/Dockerfile
Normal file
@@ -0,0 +1,63 @@
|
||||
ARG PYTHON_VERSION=2
|
||||
|
||||
###
|
||||
### Stage 0: builder
|
||||
###
|
||||
FROM docker.io/python:${PYTHON_VERSION}-alpine3.8 as builder
|
||||
|
||||
# install the OS build deps
|
||||
|
||||
RUN apk add \
|
||||
build-base \
|
||||
libffi-dev \
|
||||
libjpeg-turbo-dev \
|
||||
libressl-dev \
|
||||
libxslt-dev \
|
||||
linux-headers \
|
||||
postgresql-dev \
|
||||
zlib-dev
|
||||
|
||||
# build things which have slow build steps, before we copy synapse, so that
|
||||
# the layer can be cached.
|
||||
#
|
||||
# (we really just care about caching a wheel here, as the "pip install" below
|
||||
# will install them again.)
|
||||
|
||||
RUN pip install --prefix="/install" --no-warn-script-location \
|
||||
cryptography \
|
||||
msgpack-python \
|
||||
pillow \
|
||||
pynacl
|
||||
|
||||
# now install synapse and all of the python deps to /install.
|
||||
|
||||
COPY . /synapse
|
||||
RUN pip install --prefix="/install" --no-warn-script-location \
|
||||
lxml \
|
||||
psycopg2 \
|
||||
/synapse
|
||||
|
||||
###
|
||||
### Stage 1: runtime
|
||||
###
|
||||
|
||||
FROM docker.io/python:${PYTHON_VERSION}-alpine3.8
|
||||
|
||||
RUN apk add --no-cache --virtual .runtime_deps \
|
||||
libffi \
|
||||
libjpeg-turbo \
|
||||
libressl \
|
||||
libxslt \
|
||||
libpq \
|
||||
zlib \
|
||||
su-exec
|
||||
|
||||
COPY --from=builder /install /usr/local
|
||||
COPY ./docker/start.py /start.py
|
||||
COPY ./docker/conf /conf
|
||||
|
||||
VOLUME ["/data"]
|
||||
|
||||
EXPOSE 8008/tcp 8448/tcp
|
||||
|
||||
ENTRYPOINT ["/start.py"]
|
||||
12
docker/Dockerfile-pgtests
Normal file
12
docker/Dockerfile-pgtests
Normal file
@@ -0,0 +1,12 @@
|
||||
# Use the Sytest image that comes with a lot of the build dependencies
|
||||
# pre-installed
|
||||
FROM matrixdotorg/sytest:latest
|
||||
|
||||
# The Sytest image doesn't come with python, so install that
|
||||
RUN apt-get -qq install -y python python-dev python-pip
|
||||
|
||||
# We need tox to run the tests in run_pg_tests.sh
|
||||
RUN pip install tox
|
||||
|
||||
ADD run_pg_tests.sh /pg_tests.sh
|
||||
ENTRYPOINT /pg_tests.sh
|
||||
125
docker/README.md
Normal file
125
docker/README.md
Normal file
@@ -0,0 +1,125 @@
|
||||
# Synapse Docker
|
||||
|
||||
This Docker image will run Synapse as a single process. It does not provide a database
|
||||
server or a TURN server, you should run these separately.
|
||||
|
||||
## Run
|
||||
|
||||
We do not currently offer a `latest` image, as this has somewhat undefined semantics.
|
||||
We instead release only tagged versions so upgrading between releases is entirely
|
||||
within your control.
|
||||
|
||||
### Using docker-compose (easier)
|
||||
|
||||
This image is designed to run either with an automatically generated configuration
|
||||
file or with a custom configuration that requires manual editing.
|
||||
|
||||
An easy way to make use of this image is via docker-compose. See the
|
||||
[contrib/docker](../contrib/docker)
|
||||
section of the synapse project for examples.
|
||||
|
||||
### Without Compose (harder)
|
||||
|
||||
If you do not wish to use Compose, you may still run this image using plain
|
||||
Docker commands. Note that the following is just a guideline and you may need
|
||||
to add parameters to the docker run command to account for the network situation
|
||||
with your postgres database.
|
||||
|
||||
```
|
||||
docker run \
|
||||
-d \
|
||||
--name synapse \
|
||||
-v ${DATA_PATH}:/data \
|
||||
-e SYNAPSE_SERVER_NAME=my.matrix.host \
|
||||
-e SYNAPSE_REPORT_STATS=yes \
|
||||
docker.io/matrixdotorg/synapse:latest
|
||||
```
|
||||
|
||||
## Volumes
|
||||
|
||||
The image expects a single volume, located at ``/data``, that will hold:
|
||||
|
||||
* temporary files during uploads;
|
||||
* uploaded media and thumbnails;
|
||||
* the SQLite database if you do not configure postgres;
|
||||
* the appservices configuration.
|
||||
|
||||
You are free to use separate volumes depending on storage endpoints at your
|
||||
disposal. For instance, ``/data/media`` coud be stored on a large but low
|
||||
performance hdd storage while other files could be stored on high performance
|
||||
endpoints.
|
||||
|
||||
In order to setup an application service, simply create an ``appservices``
|
||||
directory in the data volume and write the application service Yaml
|
||||
configuration file there. Multiple application services are supported.
|
||||
|
||||
## Environment
|
||||
|
||||
Unless you specify a custom path for the configuration file, a very generic
|
||||
file will be generated, based on the following environment settings.
|
||||
These are a good starting point for setting up your own deployment.
|
||||
|
||||
Global settings:
|
||||
|
||||
* ``UID``, the user id Synapse will run as [default 991]
|
||||
* ``GID``, the group id Synapse will run as [default 991]
|
||||
* ``SYNAPSE_CONFIG_PATH``, path to a custom config file
|
||||
|
||||
If ``SYNAPSE_CONFIG_PATH`` is set, you should generate a configuration file
|
||||
then customize it manually. No other environment variable is required.
|
||||
|
||||
Otherwise, a dynamic configuration file will be used. The following environment
|
||||
variables are available for configuration:
|
||||
|
||||
* ``SYNAPSE_SERVER_NAME`` (mandatory), the current server public hostname.
|
||||
* ``SYNAPSE_REPORT_STATS``, (mandatory, ``yes`` or ``no``), enable anonymous
|
||||
statistics reporting back to the Matrix project which helps us to get funding.
|
||||
* ``SYNAPSE_NO_TLS``, set this variable to disable TLS in Synapse (use this if
|
||||
you run your own TLS-capable reverse proxy).
|
||||
* ``SYNAPSE_ENABLE_REGISTRATION``, set this variable to enable registration on
|
||||
the Synapse instance.
|
||||
* ``SYNAPSE_ALLOW_GUEST``, set this variable to allow guest joining this server.
|
||||
* ``SYNAPSE_EVENT_CACHE_SIZE``, the event cache size [default `10K`].
|
||||
* ``SYNAPSE_CACHE_FACTOR``, the cache factor [default `0.5`].
|
||||
* ``SYNAPSE_RECAPTCHA_PUBLIC_KEY``, set this variable to the recaptcha public
|
||||
key in order to enable recaptcha upon registration.
|
||||
* ``SYNAPSE_RECAPTCHA_PRIVATE_KEY``, set this variable to the recaptcha private
|
||||
key in order to enable recaptcha upon registration.
|
||||
* ``SYNAPSE_TURN_URIS``, set this variable to the coma-separated list of TURN
|
||||
uris to enable TURN for this homeserver.
|
||||
* ``SYNAPSE_TURN_SECRET``, set this to the TURN shared secret if required.
|
||||
* ``SYNAPSE_MAX_UPLOAD_SIZE``, set this variable to change the max upload size [default `10M`].
|
||||
|
||||
Shared secrets, that will be initialized to random values if not set:
|
||||
|
||||
* ``SYNAPSE_REGISTRATION_SHARED_SECRET``, secret for registrering users if
|
||||
registration is disable.
|
||||
* ``SYNAPSE_MACAROON_SECRET_KEY`` secret for signing access tokens
|
||||
to the server.
|
||||
|
||||
Database specific values (will use SQLite if not set):
|
||||
|
||||
* `POSTGRES_DB` - The database name for the synapse postgres database. [default: `synapse`]
|
||||
* `POSTGRES_HOST` - The host of the postgres database if you wish to use postgresql instead of sqlite3. [default: `db` which is useful when using a container on the same docker network in a compose file where the postgres service is called `db`]
|
||||
* `POSTGRES_PASSWORD` - The password for the synapse postgres database. **If this is set then postgres will be used instead of sqlite3.** [default: none] **NOTE**: You are highly encouraged to use postgresql! Please use the compose file to make it easier to deploy.
|
||||
* `POSTGRES_USER` - The user for the synapse postgres database. [default: `matrix`]
|
||||
|
||||
Mail server specific values (will not send emails if not set):
|
||||
|
||||
* ``SYNAPSE_SMTP_HOST``, hostname to the mail server.
|
||||
* ``SYNAPSE_SMTP_PORT``, TCP port for accessing the mail server [default ``25``].
|
||||
* ``SYNAPSE_SMTP_USER``, username for authenticating against the mail server if any.
|
||||
* ``SYNAPSE_SMTP_PASSWORD``, password for authenticating against the mail server if any.
|
||||
|
||||
## Build
|
||||
|
||||
Build the docker image with the `docker build` command from the root of the synapse repository.
|
||||
|
||||
```
|
||||
docker build -t docker.io/matrixdotorg/synapse . -f docker/Dockerfile
|
||||
```
|
||||
|
||||
The `-t` option sets the image tag. Official images are tagged `matrixdotorg/synapse:<version>` where `<version>` is the same as the release tag in the synapse git repository.
|
||||
|
||||
You may have a local Python wheel cache available, in which case copy the relevant
|
||||
packages in the ``cache/`` directory at the root of the project.
|
||||
@@ -21,7 +21,7 @@ listeners:
|
||||
{% if not SYNAPSE_NO_TLS %}
|
||||
-
|
||||
port: 8448
|
||||
bind_addresses: ['0.0.0.0']
|
||||
bind_addresses: ['::']
|
||||
type: http
|
||||
tls: true
|
||||
x_forwarded: false
|
||||
@@ -34,7 +34,7 @@ listeners:
|
||||
|
||||
- port: 8008
|
||||
tls: false
|
||||
bind_addresses: ['0.0.0.0']
|
||||
bind_addresses: ['::']
|
||||
type: http
|
||||
x_forwarded: false
|
||||
|
||||
@@ -85,7 +85,7 @@ federation_rc_concurrent: 3
|
||||
|
||||
media_store_path: "/data/media"
|
||||
uploads_path: "/data/uploads"
|
||||
max_upload_size: "10M"
|
||||
max_upload_size: "{{ SYNAPSE_MAX_UPLOAD_SIZE or "10M" }}"
|
||||
max_image_pixels: "32M"
|
||||
dynamic_thumbnails: false
|
||||
|
||||
@@ -211,7 +211,9 @@ email:
|
||||
require_transport_security: False
|
||||
notif_from: "{{ SYNAPSE_SMTP_FROM or "hostmaster@" + SYNAPSE_SERVER_NAME }}"
|
||||
app_name: Matrix
|
||||
template_dir: res/templates
|
||||
# if template_dir is unset, uses the example templates that are part of
|
||||
# the Synapse distribution.
|
||||
#template_dir: res/templates
|
||||
notif_template_html: notif_mail.html
|
||||
notif_template_text: notif_mail.txt
|
||||
notif_for_new_users: True
|
||||
20
docker/run_pg_tests.sh
Executable file
20
docker/run_pg_tests.sh
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This script runs the PostgreSQL tests inside a Docker container. It expects
|
||||
# the relevant source files to be mounted into /src (done automatically by the
|
||||
# caller script). It will set up the database, run it, and then use the tox
|
||||
# configuration to run the tests.
|
||||
|
||||
set -e
|
||||
|
||||
# Set PGUSER so Synapse's tests know what user to connect to the database with
|
||||
export PGUSER=postgres
|
||||
|
||||
# Initialise & start the database
|
||||
su -c '/usr/lib/postgresql/9.6/bin/initdb -D /var/lib/postgresql/data -E "UTF-8" --lc-collate="en_US.UTF-8" --lc-ctype="en_US.UTF-8" --username=postgres' postgres
|
||||
su -c '/usr/lib/postgresql/9.6/bin/pg_ctl -w -D /var/lib/postgresql/data start' postgres
|
||||
|
||||
# Run the tests
|
||||
cd /src
|
||||
export TRIAL_FLAGS="-j 4"
|
||||
tox --workdir=/tmp -e py27-postgres
|
||||
@@ -5,6 +5,7 @@ import os
|
||||
import sys
|
||||
import subprocess
|
||||
import glob
|
||||
import codecs
|
||||
|
||||
# Utility functions
|
||||
convert = lambda src, dst, environ: open(dst, "w").write(jinja2.Template(open(src).read()).render(**environ))
|
||||
@@ -23,7 +24,7 @@ def generate_secrets(environ, secrets):
|
||||
with open(filename) as handle: value = handle.read()
|
||||
else:
|
||||
print("Generating a random secret for {}".format(name))
|
||||
value = os.urandom(32).encode("hex")
|
||||
value = codecs.encode(os.urandom(32), "hex").decode()
|
||||
with open(filename, "w") as handle: handle.write(value)
|
||||
environ[secret] = value
|
||||
|
||||
63
docs/admin_api/register_api.rst
Normal file
63
docs/admin_api/register_api.rst
Normal file
@@ -0,0 +1,63 @@
|
||||
Shared-Secret Registration
|
||||
==========================
|
||||
|
||||
This API allows for the creation of users in an administrative and
|
||||
non-interactive way. This is generally used for bootstrapping a Synapse
|
||||
instance with administrator accounts.
|
||||
|
||||
To authenticate yourself to the server, you will need both the shared secret
|
||||
(``registration_shared_secret`` in the homeserver configuration), and a
|
||||
one-time nonce. If the registration shared secret is not configured, this API
|
||||
is not enabled.
|
||||
|
||||
To fetch the nonce, you need to request one from the API::
|
||||
|
||||
> GET /_matrix/client/r0/admin/register
|
||||
|
||||
< {"nonce": "thisisanonce"}
|
||||
|
||||
Once you have the nonce, you can make a ``POST`` to the same URL with a JSON
|
||||
body containing the nonce, username, password, whether they are an admin
|
||||
(optional, False by default), and a HMAC digest of the content.
|
||||
|
||||
As an example::
|
||||
|
||||
> POST /_matrix/client/r0/admin/register
|
||||
> {
|
||||
"nonce": "thisisanonce",
|
||||
"username": "pepper_roni",
|
||||
"password": "pizza",
|
||||
"admin": true,
|
||||
"mac": "mac_digest_here"
|
||||
}
|
||||
|
||||
< {
|
||||
"access_token": "token_here",
|
||||
"user_id": "@pepper_roni:localhost",
|
||||
"home_server": "test",
|
||||
"device_id": "device_id_here"
|
||||
}
|
||||
|
||||
The MAC is the hex digest output of the HMAC-SHA1 algorithm, with the key being
|
||||
the shared secret and the content being the nonce, user, password, and either
|
||||
the string "admin" or "notadmin", each separated by NULs. For an example of
|
||||
generation in Python::
|
||||
|
||||
import hmac, hashlib
|
||||
|
||||
def generate_mac(nonce, user, password, admin=False):
|
||||
|
||||
mac = hmac.new(
|
||||
key=shared_secret,
|
||||
digestmod=hashlib.sha1,
|
||||
)
|
||||
|
||||
mac.update(nonce.encode('utf8'))
|
||||
mac.update(b"\x00")
|
||||
mac.update(user.encode('utf8'))
|
||||
mac.update(b"\x00")
|
||||
mac.update(password.encode('utf8'))
|
||||
mac.update(b"\x00")
|
||||
mac.update(b"admin" if admin else b"notadmin")
|
||||
|
||||
return mac.hexdigest()
|
||||
@@ -74,7 +74,7 @@ replication endpoints that it's talking to on the main synapse process.
|
||||
``worker_replication_port`` should point to the TCP replication listener port and
|
||||
``worker_replication_http_port`` should point to the HTTP replication port.
|
||||
|
||||
Currently, only the ``event_creator`` worker requires specifying
|
||||
Currently, the ``event_creator`` and ``federation_reader`` workers require specifying
|
||||
``worker_replication_http_port``.
|
||||
|
||||
For instance::
|
||||
@@ -173,10 +173,23 @@ endpoints matching the following regular expressions::
|
||||
^/_matrix/federation/v1/backfill/
|
||||
^/_matrix/federation/v1/get_missing_events/
|
||||
^/_matrix/federation/v1/publicRooms
|
||||
^/_matrix/federation/v1/query/
|
||||
^/_matrix/federation/v1/make_join/
|
||||
^/_matrix/federation/v1/make_leave/
|
||||
^/_matrix/federation/v1/send_join/
|
||||
^/_matrix/federation/v1/send_leave/
|
||||
^/_matrix/federation/v1/invite/
|
||||
^/_matrix/federation/v1/query_auth/
|
||||
^/_matrix/federation/v1/event_auth/
|
||||
^/_matrix/federation/v1/exchange_third_party_invite/
|
||||
^/_matrix/federation/v1/send/
|
||||
|
||||
The above endpoints should all be routed to the federation_reader worker by the
|
||||
reverse-proxy configuration.
|
||||
|
||||
The `^/_matrix/federation/v1/send/` endpoint must only be handled by a single
|
||||
instance.
|
||||
|
||||
``synapse.app.federation_sender``
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
@@ -206,6 +219,10 @@ Handles client API endpoints. It can handle REST endpoints matching the
|
||||
following regular expressions::
|
||||
|
||||
^/_matrix/client/(api/v1|r0|unstable)/publicRooms$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/joined_members$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/context/.*$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/members$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/state$
|
||||
|
||||
``synapse.app.user_dir``
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
@@ -224,6 +241,14 @@ regular expressions::
|
||||
|
||||
^/_matrix/client/(api/v1|r0|unstable)/keys/upload
|
||||
|
||||
If ``use_presence`` is False in the homeserver config, it can also handle REST
|
||||
endpoints matching the following regular expressions::
|
||||
|
||||
^/_matrix/client/(api/v1|r0|unstable)/presence/[^/]+/status
|
||||
|
||||
This "stub" presence handler will pass through ``GET`` request but make the
|
||||
``PUT`` effectively a no-op.
|
||||
|
||||
It will proxy any requests it cannot handle to the main synapse instance. It
|
||||
must therefore be configured with the location of the main instance, via
|
||||
the ``worker_main_http_uri`` setting in the frontend_proxy worker configuration
|
||||
@@ -240,6 +265,7 @@ Handles some event creation. It can handle REST endpoints matching::
|
||||
^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/send
|
||||
^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/(join|invite|leave|ban|unban|kick)$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/join/
|
||||
^/_matrix/client/(api/v1|r0|unstable)/profile/
|
||||
|
||||
It will create events locally and then send them on to the main synapse
|
||||
instance to be persisted and handled.
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eux
|
||||
|
||||
: ${WORKSPACE:="$(pwd)"}
|
||||
|
||||
export WORKSPACE
|
||||
export PYTHONDONTWRITEBYTECODE=yep
|
||||
export SYNAPSE_CACHE_FACTOR=1
|
||||
|
||||
export HAPROXY_BIN=/home/haproxy/haproxy-1.6.11/haproxy
|
||||
|
||||
./jenkins/prepare_synapse.sh
|
||||
./jenkins/clone.sh sytest https://github.com/matrix-org/sytest.git
|
||||
./jenkins/clone.sh dendron https://github.com/matrix-org/dendron.git
|
||||
./dendron/jenkins/build_dendron.sh
|
||||
./sytest/jenkins/prep_sytest_for_postgres.sh
|
||||
|
||||
./sytest/jenkins/install_and_run.sh \
|
||||
--python $WORKSPACE/.tox/py27/bin/python \
|
||||
--synapse-directory $WORKSPACE \
|
||||
--dendron $WORKSPACE/dendron/bin/dendron \
|
||||
--haproxy \
|
||||
@@ -1,20 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eux
|
||||
|
||||
: ${WORKSPACE:="$(pwd)"}
|
||||
|
||||
export WORKSPACE
|
||||
export PYTHONDONTWRITEBYTECODE=yep
|
||||
export SYNAPSE_CACHE_FACTOR=1
|
||||
|
||||
./jenkins/prepare_synapse.sh
|
||||
./jenkins/clone.sh sytest https://github.com/matrix-org/sytest.git
|
||||
./jenkins/clone.sh dendron https://github.com/matrix-org/dendron.git
|
||||
./dendron/jenkins/build_dendron.sh
|
||||
./sytest/jenkins/prep_sytest_for_postgres.sh
|
||||
|
||||
./sytest/jenkins/install_and_run.sh \
|
||||
--python $WORKSPACE/.tox/py27/bin/python \
|
||||
--synapse-directory $WORKSPACE \
|
||||
--dendron $WORKSPACE/dendron/bin/dendron \
|
||||
@@ -1,22 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eux
|
||||
|
||||
: ${WORKSPACE:="$(pwd)"}
|
||||
|
||||
export PYTHONDONTWRITEBYTECODE=yep
|
||||
export SYNAPSE_CACHE_FACTOR=1
|
||||
|
||||
# Output test results as junit xml
|
||||
export TRIAL_FLAGS="--reporter=subunit"
|
||||
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
|
||||
# Write coverage reports to a separate file for each process
|
||||
export COVERAGE_OPTS="-p"
|
||||
export DUMP_COVERAGE_COMMAND="coverage help"
|
||||
|
||||
# Output flake8 violations to violations.flake8.log
|
||||
export PEP8SUFFIX="--output-file=violations.flake8.log"
|
||||
|
||||
rm .coverage* || echo "No coverage files to remove"
|
||||
|
||||
tox -e packaging -e pep8
|
||||
@@ -1,18 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eux
|
||||
|
||||
: ${WORKSPACE:="$(pwd)"}
|
||||
|
||||
export WORKSPACE
|
||||
export PYTHONDONTWRITEBYTECODE=yep
|
||||
export SYNAPSE_CACHE_FACTOR=1
|
||||
|
||||
./jenkins/prepare_synapse.sh
|
||||
./jenkins/clone.sh sytest https://github.com/matrix-org/sytest.git
|
||||
|
||||
./sytest/jenkins/prep_sytest_for_postgres.sh
|
||||
|
||||
./sytest/jenkins/install_and_run.sh \
|
||||
--python $WORKSPACE/.tox/py27/bin/python \
|
||||
--synapse-directory $WORKSPACE \
|
||||
@@ -1,16 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eux
|
||||
|
||||
: ${WORKSPACE:="$(pwd)"}
|
||||
|
||||
export WORKSPACE
|
||||
export PYTHONDONTWRITEBYTECODE=yep
|
||||
export SYNAPSE_CACHE_FACTOR=1
|
||||
|
||||
./jenkins/prepare_synapse.sh
|
||||
./jenkins/clone.sh sytest https://github.com/matrix-org/sytest.git
|
||||
|
||||
./sytest/jenkins/install_and_run.sh \
|
||||
--python $WORKSPACE/.tox/py27/bin/python \
|
||||
--synapse-directory $WORKSPACE \
|
||||
@@ -1,30 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eux
|
||||
|
||||
: ${WORKSPACE:="$(pwd)"}
|
||||
|
||||
export PYTHONDONTWRITEBYTECODE=yep
|
||||
export SYNAPSE_CACHE_FACTOR=1
|
||||
|
||||
# Output test results as junit xml
|
||||
export TRIAL_FLAGS="--reporter=subunit"
|
||||
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
|
||||
# Write coverage reports to a separate file for each process
|
||||
export COVERAGE_OPTS="-p"
|
||||
export DUMP_COVERAGE_COMMAND="coverage help"
|
||||
|
||||
# Output flake8 violations to violations.flake8.log
|
||||
# Don't exit with non-0 status code on Jenkins,
|
||||
# so that the build steps continue and a later step can decided whether to
|
||||
# UNSTABLE or FAILURE this build.
|
||||
export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
|
||||
|
||||
rm .coverage* || echo "No coverage files to remove"
|
||||
|
||||
tox --notest -e py27
|
||||
TOX_BIN=$WORKSPACE/.tox/py27/bin
|
||||
python synapse/python_dependencies.py | xargs -n1 $TOX_BIN/pip install
|
||||
$TOX_BIN/pip install lxml
|
||||
|
||||
tox -e py27
|
||||
@@ -1,44 +0,0 @@
|
||||
#! /bin/bash
|
||||
|
||||
# This clones a project from github into a named subdirectory
|
||||
# If the project has a branch with the same name as this branch
|
||||
# then it will checkout that branch after cloning.
|
||||
# Otherwise it will checkout "origin/develop."
|
||||
# The first argument is the name of the directory to checkout
|
||||
# the branch into.
|
||||
# The second argument is the URL of the remote repository to checkout.
|
||||
# Usually something like https://github.com/matrix-org/sytest.git
|
||||
|
||||
set -eux
|
||||
|
||||
NAME=$1
|
||||
PROJECT=$2
|
||||
BASE=".$NAME-base"
|
||||
|
||||
# Update our mirror.
|
||||
if [ ! -d ".$NAME-base" ]; then
|
||||
# Create a local mirror of the source repository.
|
||||
# This saves us from having to download the entire repository
|
||||
# when this script is next run.
|
||||
git clone "$PROJECT" "$BASE" --mirror
|
||||
else
|
||||
# Fetch any updates from the source repository.
|
||||
(cd "$BASE"; git fetch -p)
|
||||
fi
|
||||
|
||||
# Remove the existing repository so that we have a clean copy
|
||||
rm -rf "$NAME"
|
||||
# Cloning with --shared means that we will share portions of the
|
||||
# .git directory with our local mirror.
|
||||
git clone "$BASE" "$NAME" --shared
|
||||
|
||||
# Jenkins may have supplied us with the name of the branch in the
|
||||
# environment. Otherwise we will have to guess based on the current
|
||||
# commit.
|
||||
: ${GIT_BRANCH:="origin/$(git rev-parse --abbrev-ref HEAD)"}
|
||||
cd "$NAME"
|
||||
# check out the relevant branch
|
||||
git checkout "${GIT_BRANCH}" || (
|
||||
echo >&2 "No ref ${GIT_BRANCH} found, falling back to develop"
|
||||
git checkout "origin/develop"
|
||||
)
|
||||
@@ -31,5 +31,5 @@ $TOX_BIN/pip install 'setuptools>=18.5'
|
||||
$TOX_BIN/pip install 'pip>=10'
|
||||
|
||||
{ python synapse/python_dependencies.py
|
||||
echo lxml psycopg2
|
||||
echo lxml
|
||||
} | xargs $TOX_BIN/pip install
|
||||
|
||||
@@ -1,5 +1,30 @@
|
||||
[tool.towncrier]
|
||||
package = "synapse"
|
||||
filename = "CHANGES.rst"
|
||||
filename = "CHANGES.md"
|
||||
directory = "changelog.d"
|
||||
issue_format = "`#{issue} <https://github.com/matrix-org/synapse/issues/{issue}>`_"
|
||||
issue_format = "[\\#{issue}](https://github.com/matrix-org/synapse/issues/{issue})"
|
||||
|
||||
[[tool.towncrier.type]]
|
||||
directory = "feature"
|
||||
name = "Features"
|
||||
showcontent = true
|
||||
|
||||
[[tool.towncrier.type]]
|
||||
directory = "bugfix"
|
||||
name = "Bugfixes"
|
||||
showcontent = true
|
||||
|
||||
[[tool.towncrier.type]]
|
||||
directory = "doc"
|
||||
name = "Improved Documentation"
|
||||
showcontent = true
|
||||
|
||||
[[tool.towncrier.type]]
|
||||
directory = "removal"
|
||||
name = "Deprecations and Removals"
|
||||
showcontent = true
|
||||
|
||||
[[tool.towncrier.type]]
|
||||
directory = "misc"
|
||||
name = "Internal Changes"
|
||||
showcontent = true
|
||||
|
||||
@@ -1,21 +1,20 @@
|
||||
from synapse.events import FrozenEvent
|
||||
from synapse.api.auth import Auth
|
||||
|
||||
from mock import Mock
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import itertools
|
||||
import json
|
||||
import sys
|
||||
|
||||
from mock import Mock
|
||||
|
||||
from synapse.api.auth import Auth
|
||||
from synapse.events import FrozenEvent
|
||||
|
||||
|
||||
def check_auth(auth, auth_chain, events):
|
||||
auth_chain.sort(key=lambda e: e.depth)
|
||||
|
||||
auth_map = {
|
||||
e.event_id: e
|
||||
for e in auth_chain
|
||||
}
|
||||
auth_map = {e.event_id: e for e in auth_chain}
|
||||
|
||||
create_events = {}
|
||||
for e in auth_chain:
|
||||
@@ -25,31 +24,26 @@ def check_auth(auth, auth_chain, events):
|
||||
for e in itertools.chain(auth_chain, events):
|
||||
auth_events_list = [auth_map[i] for i, _ in e.auth_events]
|
||||
|
||||
auth_events = {
|
||||
(e.type, e.state_key): e
|
||||
for e in auth_events_list
|
||||
}
|
||||
auth_events = {(e.type, e.state_key): e for e in auth_events_list}
|
||||
|
||||
auth_events[("m.room.create", "")] = create_events[e.room_id]
|
||||
|
||||
try:
|
||||
auth.check(e, auth_events=auth_events)
|
||||
except Exception as ex:
|
||||
print "Failed:", e.event_id, e.type, e.state_key
|
||||
print "Auth_events:", auth_events
|
||||
print ex
|
||||
print json.dumps(e.get_dict(), sort_keys=True, indent=4)
|
||||
print("Failed:", e.event_id, e.type, e.state_key)
|
||||
print("Auth_events:", auth_events)
|
||||
print(ex)
|
||||
print(json.dumps(e.get_dict(), sort_keys=True, indent=4))
|
||||
# raise
|
||||
print "Success:", e.event_id, e.type, e.state_key
|
||||
print("Success:", e.event_id, e.type, e.state_key)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
'json',
|
||||
nargs='?',
|
||||
type=argparse.FileType('r'),
|
||||
default=sys.stdin,
|
||||
'json', nargs='?', type=argparse.FileType('r'), default=sys.stdin
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
from synapse.crypto.event_signing import *
|
||||
from unpaddedbase64 import encode_base64
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import sys
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from unpaddedbase64 import encode_base64
|
||||
|
||||
from synapse.crypto.event_signing import (
|
||||
check_event_content_hash,
|
||||
compute_event_reference_hash,
|
||||
)
|
||||
|
||||
|
||||
class dictobj(dict):
|
||||
@@ -24,27 +29,26 @@ class dictobj(dict):
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("input_json", nargs="?", type=argparse.FileType('r'),
|
||||
default=sys.stdin)
|
||||
parser.add_argument(
|
||||
"input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin
|
||||
)
|
||||
args = parser.parse_args()
|
||||
logging.basicConfig()
|
||||
|
||||
event_json = dictobj(json.load(args.input_json))
|
||||
|
||||
algorithms = {
|
||||
"sha256": hashlib.sha256,
|
||||
}
|
||||
algorithms = {"sha256": hashlib.sha256}
|
||||
|
||||
for alg_name in event_json.hashes:
|
||||
if check_event_content_hash(event_json, algorithms[alg_name]):
|
||||
print "PASS content hash %s" % (alg_name,)
|
||||
print("PASS content hash %s" % (alg_name,))
|
||||
else:
|
||||
print "FAIL content hash %s" % (alg_name,)
|
||||
print("FAIL content hash %s" % (alg_name,))
|
||||
|
||||
for algorithm in algorithms.values():
|
||||
name, h_bytes = compute_event_reference_hash(event_json, algorithm)
|
||||
print "Reference hash %s: %s" % (name, encode_base64(h_bytes))
|
||||
print("Reference hash %s: %s" % (name, encode_base64(h_bytes)))
|
||||
|
||||
if __name__=="__main__":
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
|
||||
from signedjson.sign import verify_signed_json
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import urllib2
|
||||
|
||||
import dns.resolver
|
||||
from signedjson.key import decode_verify_key_bytes, write_signing_keys
|
||||
from signedjson.sign import verify_signed_json
|
||||
from unpaddedbase64 import decode_base64
|
||||
|
||||
import urllib2
|
||||
import json
|
||||
import sys
|
||||
import dns.resolver
|
||||
import pprint
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
def get_targets(server_name):
|
||||
if ":" in server_name:
|
||||
@@ -23,6 +23,7 @@ def get_targets(server_name):
|
||||
except dns.resolver.NXDOMAIN:
|
||||
yield (server_name, 8448)
|
||||
|
||||
|
||||
def get_server_keys(server_name, target, port):
|
||||
url = "https://%s:%i/_matrix/key/v1" % (target, port)
|
||||
keys = json.load(urllib2.urlopen(url))
|
||||
@@ -33,12 +34,14 @@ def get_server_keys(server_name, target, port):
|
||||
verify_keys[key_id] = verify_key
|
||||
return verify_keys
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("signature_name")
|
||||
parser.add_argument("input_json", nargs="?", type=argparse.FileType('r'),
|
||||
default=sys.stdin)
|
||||
parser.add_argument(
|
||||
"input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
logging.basicConfig()
|
||||
@@ -48,24 +51,23 @@ def main():
|
||||
for target, port in get_targets(server_name):
|
||||
try:
|
||||
keys = get_server_keys(server_name, target, port)
|
||||
print "Using keys from https://%s:%s/_matrix/key/v1" % (target, port)
|
||||
print("Using keys from https://%s:%s/_matrix/key/v1" % (target, port))
|
||||
write_signing_keys(sys.stdout, keys.values())
|
||||
break
|
||||
except:
|
||||
except Exception:
|
||||
logging.exception("Error talking to %s:%s", target, port)
|
||||
|
||||
json_to_check = json.load(args.input_json)
|
||||
print "Checking JSON:"
|
||||
print("Checking JSON:")
|
||||
for key_id in json_to_check["signatures"][args.signature_name]:
|
||||
try:
|
||||
key = keys[key_id]
|
||||
verify_signed_json(json_to_check, args.signature_name, key)
|
||||
print "PASS %s" % (key_id,)
|
||||
except:
|
||||
print("PASS %s" % (key_id,))
|
||||
except Exception:
|
||||
logging.exception("Check for key %s failed" % (key_id,))
|
||||
print "FAIL %s" % (key_id,)
|
||||
print("FAIL %s" % (key_id,))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
|
||||
@@ -1,13 +1,21 @@
|
||||
import hashlib
|
||||
import json
|
||||
import sys
|
||||
import time
|
||||
|
||||
import six
|
||||
|
||||
import psycopg2
|
||||
import yaml
|
||||
import sys
|
||||
import json
|
||||
import time
|
||||
import hashlib
|
||||
from unpaddedbase64 import encode_base64
|
||||
from canonicaljson import encode_canonical_json
|
||||
from signedjson.key import read_signing_keys
|
||||
from signedjson.sign import sign_json
|
||||
from canonicaljson import encode_canonical_json
|
||||
from unpaddedbase64 import encode_base64
|
||||
|
||||
if six.PY2:
|
||||
db_type = six.moves.builtins.buffer
|
||||
else:
|
||||
db_type = memoryview
|
||||
|
||||
|
||||
def select_v1_keys(connection):
|
||||
@@ -39,7 +47,9 @@ def select_v2_json(connection):
|
||||
cursor.close()
|
||||
results = {}
|
||||
for server_name, key_id, key_json in rows:
|
||||
results.setdefault(server_name, {})[key_id] = json.loads(str(key_json).decode("utf-8"))
|
||||
results.setdefault(server_name, {})[key_id] = json.loads(
|
||||
str(key_json).decode("utf-8")
|
||||
)
|
||||
return results
|
||||
|
||||
|
||||
@@ -47,10 +57,7 @@ def convert_v1_to_v2(server_name, valid_until, keys, certificate):
|
||||
return {
|
||||
"old_verify_keys": {},
|
||||
"server_name": server_name,
|
||||
"verify_keys": {
|
||||
key_id: {"key": key}
|
||||
for key_id, key in keys.items()
|
||||
},
|
||||
"verify_keys": {key_id: {"key": key} for key_id, key in keys.items()},
|
||||
"valid_until_ts": valid_until,
|
||||
"tls_fingerprints": [fingerprint(certificate)],
|
||||
}
|
||||
@@ -65,7 +72,7 @@ def rows_v2(server, json):
|
||||
valid_until = json["valid_until_ts"]
|
||||
key_json = encode_canonical_json(json)
|
||||
for key_id in json["verify_keys"]:
|
||||
yield (server, key_id, "-", valid_until, valid_until, buffer(key_json))
|
||||
yield (server, key_id, "-", valid_until, valid_until, db_type(key_json))
|
||||
|
||||
|
||||
def main():
|
||||
@@ -87,7 +94,7 @@ def main():
|
||||
|
||||
result = {}
|
||||
for server in keys:
|
||||
if not server in json:
|
||||
if server not in json:
|
||||
v2_json = convert_v1_to_v2(
|
||||
server, valid_until, keys[server], certificates[server]
|
||||
)
|
||||
@@ -96,10 +103,7 @@ def main():
|
||||
|
||||
yaml.safe_dump(result, sys.stdout, default_flow_style=False)
|
||||
|
||||
rows = list(
|
||||
row for server, json in result.items()
|
||||
for row in rows_v2(server, json)
|
||||
)
|
||||
rows = list(row for server, json in result.items() for row in rows_v2(server, json))
|
||||
|
||||
cursor = connection.cursor()
|
||||
cursor.executemany(
|
||||
@@ -107,7 +111,7 @@ def main():
|
||||
" server_name, key_id, from_server,"
|
||||
" ts_added_ms, ts_valid_until_ms, key_json"
|
||||
") VALUES (%s, %s, %s, %s, %s, %s)",
|
||||
rows
|
||||
rows,
|
||||
)
|
||||
connection.commit()
|
||||
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
#!/usr/bin/perl -pi
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
$copyright = <<EOT;
|
||||
/* Copyright 2016 OpenMarket Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
EOT
|
||||
|
||||
s/^(# -\*- coding: utf-8 -\*-\n)?/$1$copyright/ if ($. == 1);
|
||||
@@ -1,33 +0,0 @@
|
||||
#!/usr/bin/perl -pi
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
$copyright = <<EOT;
|
||||
# Copyright 2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
EOT
|
||||
|
||||
s/^(# -\*- coding: utf-8 -\*-\n)?/$1$copyright/ if ($. == 1);
|
||||
@@ -1,8 +1,16 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import ast
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
class DefinitionVisitor(ast.NodeVisitor):
|
||||
def __init__(self):
|
||||
super(DefinitionVisitor, self).__init__()
|
||||
@@ -42,15 +50,18 @@ def non_empty(defs):
|
||||
functions = {name: non_empty(f) for name, f in defs['def'].items()}
|
||||
classes = {name: non_empty(f) for name, f in defs['class'].items()}
|
||||
result = {}
|
||||
if functions: result['def'] = functions
|
||||
if classes: result['class'] = classes
|
||||
if functions:
|
||||
result['def'] = functions
|
||||
if classes:
|
||||
result['class'] = classes
|
||||
names = defs['names']
|
||||
uses = []
|
||||
for name in names.get('Load', ()):
|
||||
if name not in names.get('Param', ()) and name not in names.get('Store', ()):
|
||||
uses.append(name)
|
||||
uses.extend(defs['attrs'])
|
||||
if uses: result['uses'] = uses
|
||||
if uses:
|
||||
result['uses'] = uses
|
||||
result['names'] = names
|
||||
result['attrs'] = defs['attrs']
|
||||
return result
|
||||
@@ -95,7 +106,6 @@ def used_names(prefix, item, defs, names):
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys, os, argparse, re
|
||||
|
||||
parser = argparse.ArgumentParser(description='Find definitions.')
|
||||
parser.add_argument(
|
||||
@@ -105,24 +115,28 @@ if __name__ == '__main__':
|
||||
"--ignore", action="append", metavar="REGEXP", help="Ignore a pattern"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--pattern", action="append", metavar="REGEXP",
|
||||
help="Search for a pattern"
|
||||
"--pattern", action="append", metavar="REGEXP", help="Search for a pattern"
|
||||
)
|
||||
parser.add_argument(
|
||||
"directories", nargs='+', metavar="DIR",
|
||||
help="Directories to search for definitions"
|
||||
"directories",
|
||||
nargs='+',
|
||||
metavar="DIR",
|
||||
help="Directories to search for definitions",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--referrers", default=0, type=int,
|
||||
help="Include referrers up to the given depth"
|
||||
"--referrers",
|
||||
default=0,
|
||||
type=int,
|
||||
help="Include referrers up to the given depth",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--referred", default=0, type=int,
|
||||
help="Include referred down to the given depth"
|
||||
"--referred",
|
||||
default=0,
|
||||
type=int,
|
||||
help="Include referred down to the given depth",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--format", default="yaml",
|
||||
help="Output format, one of 'yaml' or 'dot'"
|
||||
"--format", default="yaml", help="Output format, one of 'yaml' or 'dot'"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
@@ -162,7 +176,7 @@ if __name__ == '__main__':
|
||||
for used_by in entry.get("used", ()):
|
||||
referrers.add(used_by)
|
||||
for name, definition in names.items():
|
||||
if not name in referrers:
|
||||
if name not in referrers:
|
||||
continue
|
||||
if ignore and any(pattern.match(name) for pattern in ignore):
|
||||
continue
|
||||
@@ -176,7 +190,7 @@ if __name__ == '__main__':
|
||||
for uses in entry.get("uses", ()):
|
||||
referred.add(uses)
|
||||
for name, definition in names.items():
|
||||
if not name in referred:
|
||||
if name not in referred:
|
||||
continue
|
||||
if ignore and any(pattern.match(name) for pattern in ignore):
|
||||
continue
|
||||
@@ -185,12 +199,12 @@ if __name__ == '__main__':
|
||||
if args.format == 'yaml':
|
||||
yaml.dump(result, sys.stdout, default_flow_style=False)
|
||||
elif args.format == 'dot':
|
||||
print "digraph {"
|
||||
print("digraph {")
|
||||
for name, entry in result.items():
|
||||
print name
|
||||
print(name)
|
||||
for used_by in entry.get("used", ()):
|
||||
if used_by in result:
|
||||
print used_by, "->", name
|
||||
print "}"
|
||||
print(used_by, "->", name)
|
||||
print("}")
|
||||
else:
|
||||
raise ValueError("Unknown format %r" % (args.format))
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
#!/usr/bin/env python2
|
||||
|
||||
import pymacaroons
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
import pymacaroons
|
||||
|
||||
if len(sys.argv) == 1:
|
||||
sys.stderr.write("usage: %s macaroon [key]\n" % (sys.argv[0],))
|
||||
sys.exit(1)
|
||||
@@ -11,14 +14,14 @@ macaroon_string = sys.argv[1]
|
||||
key = sys.argv[2] if len(sys.argv) > 2 else None
|
||||
|
||||
macaroon = pymacaroons.Macaroon.deserialize(macaroon_string)
|
||||
print macaroon.inspect()
|
||||
print(macaroon.inspect())
|
||||
|
||||
print ""
|
||||
print("")
|
||||
|
||||
verifier = pymacaroons.Verifier()
|
||||
verifier.satisfy_general(lambda c: True)
|
||||
try:
|
||||
verifier.verify(macaroon, key)
|
||||
print "Signature is correct"
|
||||
print("Signature is correct")
|
||||
except Exception as e:
|
||||
print e.message
|
||||
print(str(e))
|
||||
|
||||
@@ -18,21 +18,21 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import json
|
||||
import sys
|
||||
from urlparse import urlparse, urlunparse
|
||||
|
||||
import nacl.signing
|
||||
import json
|
||||
import base64
|
||||
import requests
|
||||
import sys
|
||||
|
||||
from requests.adapters import HTTPAdapter
|
||||
import srvlookup
|
||||
import yaml
|
||||
from requests.adapters import HTTPAdapter
|
||||
|
||||
# uncomment the following to enable debug logging of http requests
|
||||
#from httplib import HTTPConnection
|
||||
#HTTPConnection.debuglevel = 1
|
||||
# from httplib import HTTPConnection
|
||||
# HTTPConnection.debuglevel = 1
|
||||
|
||||
|
||||
def encode_base64(input_bytes):
|
||||
"""Encode bytes as a base64 string without any padding."""
|
||||
@@ -58,15 +58,15 @@ def decode_base64(input_string):
|
||||
|
||||
def encode_canonical_json(value):
|
||||
return json.dumps(
|
||||
value,
|
||||
# Encode code-points outside of ASCII as UTF-8 rather than \u escapes
|
||||
ensure_ascii=False,
|
||||
# Remove unecessary white space.
|
||||
separators=(',',':'),
|
||||
# Sort the keys of dictionaries.
|
||||
sort_keys=True,
|
||||
# Encode the resulting unicode as UTF-8 bytes.
|
||||
).encode("UTF-8")
|
||||
value,
|
||||
# Encode code-points outside of ASCII as UTF-8 rather than \u escapes
|
||||
ensure_ascii=False,
|
||||
# Remove unecessary white space.
|
||||
separators=(',', ':'),
|
||||
# Sort the keys of dictionaries.
|
||||
sort_keys=True,
|
||||
# Encode the resulting unicode as UTF-8 bytes.
|
||||
).encode("UTF-8")
|
||||
|
||||
|
||||
def sign_json(json_object, signing_key, signing_name):
|
||||
@@ -88,6 +88,7 @@ def sign_json(json_object, signing_key, signing_name):
|
||||
|
||||
NACL_ED25519 = "ed25519"
|
||||
|
||||
|
||||
def decode_signing_key_base64(algorithm, version, key_base64):
|
||||
"""Decode a base64 encoded signing key
|
||||
Args:
|
||||
@@ -143,14 +144,12 @@ def request_json(method, origin_name, origin_key, destination, path, content):
|
||||
authorization_headers = []
|
||||
|
||||
for key, sig in signed_json["signatures"][origin_name].items():
|
||||
header = "X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % (
|
||||
origin_name, key, sig,
|
||||
)
|
||||
header = "X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % (origin_name, key, sig)
|
||||
authorization_headers.append(bytes(header))
|
||||
print ("Authorization: %s" % header, file=sys.stderr)
|
||||
print("Authorization: %s" % header, file=sys.stderr)
|
||||
|
||||
dest = "matrix://%s%s" % (destination, path)
|
||||
print ("Requesting %s" % dest, file=sys.stderr)
|
||||
print("Requesting %s" % dest, file=sys.stderr)
|
||||
|
||||
s = requests.Session()
|
||||
s.mount("matrix://", MatrixConnectionAdapter())
|
||||
@@ -158,10 +157,7 @@ def request_json(method, origin_name, origin_key, destination, path, content):
|
||||
result = s.request(
|
||||
method=method,
|
||||
url=dest,
|
||||
headers={
|
||||
"Host": destination,
|
||||
"Authorization": authorization_headers[0]
|
||||
},
|
||||
headers={"Host": destination, "Authorization": authorization_headers[0]},
|
||||
verify=False,
|
||||
data=content,
|
||||
)
|
||||
@@ -171,50 +167,50 @@ def request_json(method, origin_name, origin_key, destination, path, content):
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description=
|
||||
"Signs and sends a federation request to a matrix homeserver",
|
||||
description="Signs and sends a federation request to a matrix homeserver"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-N", "--server-name",
|
||||
"-N",
|
||||
"--server-name",
|
||||
help="Name to give as the local homeserver. If unspecified, will be "
|
||||
"read from the config file.",
|
||||
"read from the config file.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-k", "--signing-key-path",
|
||||
"-k",
|
||||
"--signing-key-path",
|
||||
help="Path to the file containing the private ed25519 key to sign the "
|
||||
"request with.",
|
||||
"request with.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-c", "--config",
|
||||
"-c",
|
||||
"--config",
|
||||
default="homeserver.yaml",
|
||||
help="Path to server config file. Ignored if --server-name and "
|
||||
"--signing-key-path are both given.",
|
||||
"--signing-key-path are both given.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-d", "--destination",
|
||||
"-d",
|
||||
"--destination",
|
||||
default="matrix.org",
|
||||
help="name of the remote homeserver. We will do SRV lookups and "
|
||||
"connect appropriately.",
|
||||
"connect appropriately.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-X", "--method",
|
||||
"-X",
|
||||
"--method",
|
||||
help="HTTP method to use for the request. Defaults to GET if --data is"
|
||||
"unspecified, POST if it is."
|
||||
"unspecified, POST if it is.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--body",
|
||||
help="Data to send as the body of the HTTP request"
|
||||
)
|
||||
parser.add_argument("--body", help="Data to send as the body of the HTTP request")
|
||||
|
||||
parser.add_argument(
|
||||
"path",
|
||||
help="request path. We will add '/_matrix/federation/v1/' to this."
|
||||
"path", help="request path. We will add '/_matrix/federation/v1/' to this."
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
@@ -227,13 +223,15 @@ def main():
|
||||
|
||||
result = request_json(
|
||||
args.method,
|
||||
args.server_name, key, args.destination,
|
||||
args.server_name,
|
||||
key,
|
||||
args.destination,
|
||||
"/_matrix/federation/v1/" + args.path,
|
||||
content=args.body,
|
||||
)
|
||||
|
||||
json.dump(result, sys.stdout)
|
||||
print ("")
|
||||
print("")
|
||||
|
||||
|
||||
def read_args_from_config(args):
|
||||
@@ -253,7 +251,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
return s, 8448
|
||||
|
||||
if ":" in s:
|
||||
out = s.rsplit(":",1)
|
||||
out = s.rsplit(":", 1)
|
||||
try:
|
||||
port = int(out[1])
|
||||
except ValueError:
|
||||
@@ -263,7 +261,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
try:
|
||||
srv = srvlookup.lookup("matrix", "tcp", s)[0]
|
||||
return srv.host, srv.port
|
||||
except:
|
||||
except Exception:
|
||||
return s, 8448
|
||||
|
||||
def get_connection(self, url, proxies=None):
|
||||
@@ -272,10 +270,9 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
(host, port) = self.lookup(parsed.netloc)
|
||||
netloc = "%s:%d" % (host, port)
|
||||
print("Connecting to %s" % (netloc,), file=sys.stderr)
|
||||
url = urlunparse((
|
||||
"https", netloc, parsed.path, parsed.params, parsed.query,
|
||||
parsed.fragment,
|
||||
))
|
||||
url = urlunparse(
|
||||
("https", netloc, parsed.path, parsed.params, parsed.query, parsed.fragment)
|
||||
)
|
||||
return super(MatrixConnectionAdapter, self).get_connection(url, proxies)
|
||||
|
||||
|
||||
|
||||
@@ -1,23 +1,31 @@
|
||||
from synapse.storage.pdu import PduStore
|
||||
from synapse.storage.signatures import SignatureStore
|
||||
from synapse.storage._base import SQLBaseStore
|
||||
from synapse.federation.units import Pdu
|
||||
from synapse.crypto.event_signing import (
|
||||
add_event_pdu_content_hash, compute_pdu_event_reference_hash
|
||||
)
|
||||
from synapse.api.events.utils import prune_pdu
|
||||
from unpaddedbase64 import encode_base64, decode_base64
|
||||
from canonicaljson import encode_canonical_json
|
||||
from __future__ import print_function
|
||||
|
||||
import sqlite3
|
||||
import sys
|
||||
|
||||
from unpaddedbase64 import decode_base64, encode_base64
|
||||
|
||||
from synapse.crypto.event_signing import (
|
||||
add_event_pdu_content_hash,
|
||||
compute_pdu_event_reference_hash,
|
||||
)
|
||||
from synapse.federation.units import Pdu
|
||||
from synapse.storage._base import SQLBaseStore
|
||||
from synapse.storage.pdu import PduStore
|
||||
from synapse.storage.signatures import SignatureStore
|
||||
|
||||
|
||||
class Store(object):
|
||||
_get_pdu_tuples = PduStore.__dict__["_get_pdu_tuples"]
|
||||
_get_pdu_content_hashes_txn = SignatureStore.__dict__["_get_pdu_content_hashes_txn"]
|
||||
_get_prev_pdu_hashes_txn = SignatureStore.__dict__["_get_prev_pdu_hashes_txn"]
|
||||
_get_pdu_origin_signatures_txn = SignatureStore.__dict__["_get_pdu_origin_signatures_txn"]
|
||||
_get_pdu_origin_signatures_txn = SignatureStore.__dict__[
|
||||
"_get_pdu_origin_signatures_txn"
|
||||
]
|
||||
_store_pdu_content_hash_txn = SignatureStore.__dict__["_store_pdu_content_hash_txn"]
|
||||
_store_pdu_reference_hash_txn = SignatureStore.__dict__["_store_pdu_reference_hash_txn"]
|
||||
_store_pdu_reference_hash_txn = SignatureStore.__dict__[
|
||||
"_store_pdu_reference_hash_txn"
|
||||
]
|
||||
_store_prev_pdu_hash_txn = SignatureStore.__dict__["_store_prev_pdu_hash_txn"]
|
||||
_simple_insert_txn = SQLBaseStore.__dict__["_simple_insert_txn"]
|
||||
|
||||
@@ -26,9 +34,7 @@ store = Store()
|
||||
|
||||
|
||||
def select_pdus(cursor):
|
||||
cursor.execute(
|
||||
"SELECT pdu_id, origin FROM pdus ORDER BY depth ASC"
|
||||
)
|
||||
cursor.execute("SELECT pdu_id, origin FROM pdus ORDER BY depth ASC")
|
||||
|
||||
ids = cursor.fetchall()
|
||||
|
||||
@@ -41,23 +47,30 @@ def select_pdus(cursor):
|
||||
for pdu in pdus:
|
||||
try:
|
||||
if pdu.prev_pdus:
|
||||
print "PROCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus
|
||||
print("PROCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus)
|
||||
for pdu_id, origin, hashes in pdu.prev_pdus:
|
||||
ref_alg, ref_hsh = reference_hashes[(pdu_id, origin)]
|
||||
hashes[ref_alg] = encode_base64(ref_hsh)
|
||||
store._store_prev_pdu_hash_txn(cursor, pdu.pdu_id, pdu.origin, pdu_id, origin, ref_alg, ref_hsh)
|
||||
print "SUCCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus
|
||||
store._store_prev_pdu_hash_txn(
|
||||
cursor, pdu.pdu_id, pdu.origin, pdu_id, origin, ref_alg, ref_hsh
|
||||
)
|
||||
print("SUCCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus)
|
||||
pdu = add_event_pdu_content_hash(pdu)
|
||||
ref_alg, ref_hsh = compute_pdu_event_reference_hash(pdu)
|
||||
reference_hashes[(pdu.pdu_id, pdu.origin)] = (ref_alg, ref_hsh)
|
||||
store._store_pdu_reference_hash_txn(cursor, pdu.pdu_id, pdu.origin, ref_alg, ref_hsh)
|
||||
store._store_pdu_reference_hash_txn(
|
||||
cursor, pdu.pdu_id, pdu.origin, ref_alg, ref_hsh
|
||||
)
|
||||
|
||||
for alg, hsh_base64 in pdu.hashes.items():
|
||||
print alg, hsh_base64
|
||||
store._store_pdu_content_hash_txn(cursor, pdu.pdu_id, pdu.origin, alg, decode_base64(hsh_base64))
|
||||
print(alg, hsh_base64)
|
||||
store._store_pdu_content_hash_txn(
|
||||
cursor, pdu.pdu_id, pdu.origin, alg, decode_base64(hsh_base64)
|
||||
)
|
||||
|
||||
except Exception:
|
||||
print("FAILED_", pdu.pdu_id, pdu.origin, pdu.prev_pdus)
|
||||
|
||||
except:
|
||||
print "FAILED_", pdu.pdu_id, pdu.origin, pdu.prev_pdus
|
||||
|
||||
def main():
|
||||
conn = sqlite3.connect(sys.argv[1])
|
||||
@@ -65,5 +78,6 @@ def main():
|
||||
select_pdus(cursor)
|
||||
conn.commit()
|
||||
|
||||
if __name__=='__main__':
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
import ast
|
||||
import argparse
|
||||
import ast
|
||||
import os
|
||||
import sys
|
||||
|
||||
import yaml
|
||||
|
||||
PATTERNS_V1 = []
|
||||
PATTERNS_V2 = []
|
||||
|
||||
RESULT = {
|
||||
"v1": PATTERNS_V1,
|
||||
"v2": PATTERNS_V2,
|
||||
}
|
||||
RESULT = {"v1": PATTERNS_V1, "v2": PATTERNS_V2}
|
||||
|
||||
|
||||
class CallVisitor(ast.NodeVisitor):
|
||||
def visit_Call(self, node):
|
||||
@@ -21,7 +20,6 @@ class CallVisitor(ast.NodeVisitor):
|
||||
else:
|
||||
return
|
||||
|
||||
|
||||
if name == "client_path_patterns":
|
||||
PATTERNS_V1.append(node.args[0].s)
|
||||
elif name == "client_v2_patterns":
|
||||
@@ -42,8 +40,10 @@ def find_patterns_in_file(filepath):
|
||||
parser = argparse.ArgumentParser(description='Find url patterns.')
|
||||
|
||||
parser.add_argument(
|
||||
"directories", nargs='+', metavar="DIR",
|
||||
help="Directories to search for definitions"
|
||||
"directories",
|
||||
nargs='+',
|
||||
metavar="DIR",
|
||||
help="Directories to search for definitions",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
9
scripts-dev/next_github_number.sh
Executable file
9
scripts-dev/next_github_number.sh
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
# Fetch the current GitHub issue number, add one to it -- presto! The likely
|
||||
# next PR number.
|
||||
CURRENT_NUMBER=`curl -s "https://api.github.com/repos/matrix-org/synapse/issues?state=all&per_page=1" | jq -r ".[0].number"`
|
||||
CURRENT_NUMBER=$((CURRENT_NUMBER+1))
|
||||
echo $CURRENT_NUMBER
|
||||
@@ -1,57 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
## CAUTION:
|
||||
## This script will remove (hopefully) all trace of the given room ID from
|
||||
## your homeserver.db
|
||||
|
||||
## Do not run it lightly.
|
||||
|
||||
set -e
|
||||
|
||||
if [ "$1" == "-h" ] || [ "$1" == "" ]; then
|
||||
echo "Call with ROOM_ID as first option and then pipe it into the database. So for instance you might run"
|
||||
echo " nuke-room-from-db.sh <room_id> | sqlite3 homeserver.db"
|
||||
echo "or"
|
||||
echo " nuke-room-from-db.sh <room_id> | psql --dbname=synapse"
|
||||
exit
|
||||
fi
|
||||
|
||||
ROOMID="$1"
|
||||
|
||||
cat <<EOF
|
||||
DELETE FROM event_forward_extremities WHERE room_id = '$ROOMID';
|
||||
DELETE FROM event_backward_extremities WHERE room_id = '$ROOMID';
|
||||
DELETE FROM event_edges WHERE room_id = '$ROOMID';
|
||||
DELETE FROM room_depth WHERE room_id = '$ROOMID';
|
||||
DELETE FROM state_forward_extremities WHERE room_id = '$ROOMID';
|
||||
DELETE FROM events WHERE room_id = '$ROOMID';
|
||||
DELETE FROM event_json WHERE room_id = '$ROOMID';
|
||||
DELETE FROM state_events WHERE room_id = '$ROOMID';
|
||||
DELETE FROM current_state_events WHERE room_id = '$ROOMID';
|
||||
DELETE FROM room_memberships WHERE room_id = '$ROOMID';
|
||||
DELETE FROM feedback WHERE room_id = '$ROOMID';
|
||||
DELETE FROM topics WHERE room_id = '$ROOMID';
|
||||
DELETE FROM room_names WHERE room_id = '$ROOMID';
|
||||
DELETE FROM rooms WHERE room_id = '$ROOMID';
|
||||
DELETE FROM room_hosts WHERE room_id = '$ROOMID';
|
||||
DELETE FROM room_aliases WHERE room_id = '$ROOMID';
|
||||
DELETE FROM state_groups WHERE room_id = '$ROOMID';
|
||||
DELETE FROM state_groups_state WHERE room_id = '$ROOMID';
|
||||
DELETE FROM receipts_graph WHERE room_id = '$ROOMID';
|
||||
DELETE FROM receipts_linearized WHERE room_id = '$ROOMID';
|
||||
DELETE FROM event_search WHERE room_id = '$ROOMID';
|
||||
DELETE FROM guest_access WHERE room_id = '$ROOMID';
|
||||
DELETE FROM history_visibility WHERE room_id = '$ROOMID';
|
||||
DELETE FROM room_tags WHERE room_id = '$ROOMID';
|
||||
DELETE FROM room_tags_revisions WHERE room_id = '$ROOMID';
|
||||
DELETE FROM room_account_data WHERE room_id = '$ROOMID';
|
||||
DELETE FROM event_push_actions WHERE room_id = '$ROOMID';
|
||||
DELETE FROM local_invites WHERE room_id = '$ROOMID';
|
||||
DELETE FROM pusher_throttle WHERE room_id = '$ROOMID';
|
||||
DELETE FROM event_reports WHERE room_id = '$ROOMID';
|
||||
DELETE FROM public_room_list_stream WHERE room_id = '$ROOMID';
|
||||
DELETE FROM stream_ordering_to_exterm WHERE room_id = '$ROOMID';
|
||||
DELETE FROM event_auth WHERE room_id = '$ROOMID';
|
||||
DELETE FROM appservice_room_list WHERE room_id = '$ROOMID';
|
||||
VACUUM;
|
||||
EOF
|
||||
@@ -1,8 +1,9 @@
|
||||
import requests
|
||||
import collections
|
||||
import json
|
||||
import sys
|
||||
import time
|
||||
import json
|
||||
|
||||
import requests
|
||||
|
||||
Entry = collections.namedtuple("Entry", "name position rows")
|
||||
|
||||
@@ -30,11 +31,11 @@ def parse_response(content):
|
||||
|
||||
|
||||
def replicate(server, streams):
|
||||
return parse_response(requests.get(
|
||||
server + "/_synapse/replication",
|
||||
verify=False,
|
||||
params=streams
|
||||
).content)
|
||||
return parse_response(
|
||||
requests.get(
|
||||
server + "/_synapse/replication", verify=False, params=streams
|
||||
).content
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
@@ -45,16 +46,16 @@ def main():
|
||||
try:
|
||||
streams = {
|
||||
row.name: row.position
|
||||
for row in replicate(server, {"streams":"-1"})["streams"].rows
|
||||
for row in replicate(server, {"streams": "-1"})["streams"].rows
|
||||
}
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
except requests.exceptions.ConnectionError:
|
||||
time.sleep(0.1)
|
||||
|
||||
while True:
|
||||
try:
|
||||
results = replicate(server, streams)
|
||||
except:
|
||||
sys.stdout.write("connection_lost("+ repr(streams) + ")\n")
|
||||
except Exception:
|
||||
sys.stdout.write("connection_lost(" + repr(streams) + ")\n")
|
||||
break
|
||||
for update in results.values():
|
||||
for row in update.rows:
|
||||
@@ -62,6 +63,5 @@ def main():
|
||||
streams[update.name] = update.position
|
||||
|
||||
|
||||
|
||||
if __name__=='__main__':
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import argparse
|
||||
|
||||
import getpass
|
||||
import sys
|
||||
|
||||
import bcrypt
|
||||
import getpass
|
||||
|
||||
import yaml
|
||||
|
||||
bcrypt_rounds=12
|
||||
@@ -52,4 +50,3 @@ if __name__ == "__main__":
|
||||
password = prompt_for_pass()
|
||||
|
||||
print bcrypt.hashpw(password + password_pepper, bcrypt.gensalt(bcrypt_rounds))
|
||||
|
||||
|
||||
@@ -36,12 +36,9 @@ from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
import sys
|
||||
|
||||
import os
|
||||
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from synapse.rest.media.v1.filepath import MediaFilePaths
|
||||
|
||||
@@ -77,24 +74,23 @@ def move_media(origin_server, file_id, src_paths, dest_paths):
|
||||
if not os.path.exists(original_file):
|
||||
logger.warn(
|
||||
"Original for %s/%s (%s) does not exist",
|
||||
origin_server, file_id, original_file,
|
||||
origin_server,
|
||||
file_id,
|
||||
original_file,
|
||||
)
|
||||
else:
|
||||
mkdir_and_move(
|
||||
original_file,
|
||||
dest_paths.remote_media_filepath(origin_server, file_id),
|
||||
original_file, dest_paths.remote_media_filepath(origin_server, file_id)
|
||||
)
|
||||
|
||||
# now look for thumbnails
|
||||
original_thumb_dir = src_paths.remote_media_thumbnail_dir(
|
||||
origin_server, file_id,
|
||||
)
|
||||
original_thumb_dir = src_paths.remote_media_thumbnail_dir(origin_server, file_id)
|
||||
if not os.path.exists(original_thumb_dir):
|
||||
return
|
||||
|
||||
mkdir_and_move(
|
||||
original_thumb_dir,
|
||||
dest_paths.remote_media_thumbnail_dir(origin_server, file_id)
|
||||
dest_paths.remote_media_thumbnail_dir(origin_server, file_id),
|
||||
)
|
||||
|
||||
|
||||
@@ -109,24 +105,16 @@ def mkdir_and_move(original_file, dest_file):
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description=__doc__,
|
||||
formatter_class = argparse.RawDescriptionHelpFormatter,
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v", action='store_true', help='enable debug logging')
|
||||
parser.add_argument(
|
||||
"src_repo",
|
||||
help="Path to source content repo",
|
||||
)
|
||||
parser.add_argument(
|
||||
"dest_repo",
|
||||
help="Path to source content repo",
|
||||
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
|
||||
)
|
||||
parser.add_argument("-v", action='store_true', help='enable debug logging')
|
||||
parser.add_argument("src_repo", help="Path to source content repo")
|
||||
parser.add_argument("dest_repo", help="Path to source content repo")
|
||||
args = parser.parse_args()
|
||||
|
||||
logging_config = {
|
||||
"level": logging.DEBUG if args.v else logging.INFO,
|
||||
"format": "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s"
|
||||
"format": "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s",
|
||||
}
|
||||
logging.basicConfig(**logging_config)
|
||||
|
||||
|
||||
@@ -14,161 +14,9 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import getpass
|
||||
import hashlib
|
||||
import hmac
|
||||
import json
|
||||
import sys
|
||||
import urllib2
|
||||
import yaml
|
||||
|
||||
|
||||
def request_registration(user, password, server_location, shared_secret, admin=False):
|
||||
mac = hmac.new(
|
||||
key=shared_secret,
|
||||
digestmod=hashlib.sha1,
|
||||
)
|
||||
|
||||
mac.update(user)
|
||||
mac.update("\x00")
|
||||
mac.update(password)
|
||||
mac.update("\x00")
|
||||
mac.update("admin" if admin else "notadmin")
|
||||
|
||||
mac = mac.hexdigest()
|
||||
|
||||
data = {
|
||||
"user": user,
|
||||
"password": password,
|
||||
"mac": mac,
|
||||
"type": "org.matrix.login.shared_secret",
|
||||
"admin": admin,
|
||||
}
|
||||
|
||||
server_location = server_location.rstrip("/")
|
||||
|
||||
print "Sending registration request..."
|
||||
|
||||
req = urllib2.Request(
|
||||
"%s/_matrix/client/api/v1/register" % (server_location,),
|
||||
data=json.dumps(data),
|
||||
headers={'Content-Type': 'application/json'}
|
||||
)
|
||||
try:
|
||||
if sys.version_info[:3] >= (2, 7, 9):
|
||||
# As of version 2.7.9, urllib2 now checks SSL certs
|
||||
import ssl
|
||||
f = urllib2.urlopen(req, context=ssl.SSLContext(ssl.PROTOCOL_SSLv23))
|
||||
else:
|
||||
f = urllib2.urlopen(req)
|
||||
f.read()
|
||||
f.close()
|
||||
print "Success."
|
||||
except urllib2.HTTPError as e:
|
||||
print "ERROR! Received %d %s" % (e.code, e.reason,)
|
||||
if 400 <= e.code < 500:
|
||||
if e.info().type == "application/json":
|
||||
resp = json.load(e)
|
||||
if "error" in resp:
|
||||
print resp["error"]
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def register_new_user(user, password, server_location, shared_secret, admin):
|
||||
if not user:
|
||||
try:
|
||||
default_user = getpass.getuser()
|
||||
except:
|
||||
default_user = None
|
||||
|
||||
if default_user:
|
||||
user = raw_input("New user localpart [%s]: " % (default_user,))
|
||||
if not user:
|
||||
user = default_user
|
||||
else:
|
||||
user = raw_input("New user localpart: ")
|
||||
|
||||
if not user:
|
||||
print "Invalid user name"
|
||||
sys.exit(1)
|
||||
|
||||
if not password:
|
||||
password = getpass.getpass("Password: ")
|
||||
|
||||
if not password:
|
||||
print "Password cannot be blank."
|
||||
sys.exit(1)
|
||||
|
||||
confirm_password = getpass.getpass("Confirm password: ")
|
||||
|
||||
if password != confirm_password:
|
||||
print "Passwords do not match"
|
||||
sys.exit(1)
|
||||
|
||||
if not admin:
|
||||
admin = raw_input("Make admin [no]: ")
|
||||
if admin in ("y", "yes", "true"):
|
||||
admin = True
|
||||
else:
|
||||
admin = False
|
||||
|
||||
request_registration(user, password, server_location, shared_secret, bool(admin))
|
||||
|
||||
from synapse._scripts.register_new_matrix_user import main
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Used to register new users with a given home server when"
|
||||
" registration has been disabled. The home server must be"
|
||||
" configured with the 'registration_shared_secret' option"
|
||||
" set.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-u", "--user",
|
||||
default=None,
|
||||
help="Local part of the new user. Will prompt if omitted.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p", "--password",
|
||||
default=None,
|
||||
help="New password for user. Will prompt if omitted.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-a", "--admin",
|
||||
action="store_true",
|
||||
help="Register new user as an admin. Will prompt if omitted.",
|
||||
)
|
||||
|
||||
group = parser.add_mutually_exclusive_group(required=True)
|
||||
group.add_argument(
|
||||
"-c", "--config",
|
||||
type=argparse.FileType('r'),
|
||||
help="Path to server config file. Used to read in shared secret.",
|
||||
)
|
||||
|
||||
group.add_argument(
|
||||
"-k", "--shared-secret",
|
||||
help="Shared secret as defined in server config file.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"server_url",
|
||||
default="https://localhost:8448",
|
||||
nargs='?',
|
||||
help="URL to use to talk to the home server. Defaults to "
|
||||
" 'https://localhost:8448'.",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if "config" in args and args.config:
|
||||
config = yaml.safe_load(args.config)
|
||||
secret = config.get("registration_shared_secret", None)
|
||||
if not secret:
|
||||
print "No 'registration_shared_secret' defined in config."
|
||||
sys.exit(1)
|
||||
else:
|
||||
secret = args.shared_secret
|
||||
|
||||
register_new_user(args.user, args.password, args.server_url, secret, args.admin)
|
||||
main()
|
||||
|
||||
@@ -15,23 +15,23 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.enterprise import adbapi
|
||||
|
||||
from synapse.storage._base import LoggingTransaction, SQLBaseStore
|
||||
from synapse.storage.engines import create_engine
|
||||
from synapse.storage.prepare_database import prepare_database
|
||||
|
||||
import argparse
|
||||
import curses
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
import yaml
|
||||
|
||||
from six import string_types
|
||||
|
||||
import yaml
|
||||
|
||||
from twisted.enterprise import adbapi
|
||||
from twisted.internet import defer, reactor
|
||||
|
||||
from synapse.storage._base import LoggingTransaction, SQLBaseStore
|
||||
from synapse.storage.engines import create_engine
|
||||
from synapse.storage.prepare_database import prepare_database
|
||||
|
||||
logger = logging.getLogger("synapse_port_db")
|
||||
|
||||
@@ -105,6 +105,7 @@ class Store(object):
|
||||
|
||||
*All* database interactions should go through this object.
|
||||
"""
|
||||
|
||||
def __init__(self, db_pool, engine):
|
||||
self.db_pool = db_pool
|
||||
self.database_engine = engine
|
||||
@@ -135,7 +136,8 @@ class Store(object):
|
||||
txn = conn.cursor()
|
||||
return func(
|
||||
LoggingTransaction(txn, desc, self.database_engine, [], []),
|
||||
*args, **kwargs
|
||||
*args,
|
||||
**kwargs
|
||||
)
|
||||
except self.database_engine.module.DatabaseError as e:
|
||||
if self.database_engine.is_deadlock(e):
|
||||
@@ -158,22 +160,20 @@ class Store(object):
|
||||
def r(txn):
|
||||
txn.execute(sql, args)
|
||||
return txn.fetchall()
|
||||
|
||||
return self.runInteraction("execute_sql", r)
|
||||
|
||||
def insert_many_txn(self, txn, table, headers, rows):
|
||||
sql = "INSERT INTO %s (%s) VALUES (%s)" % (
|
||||
table,
|
||||
", ".join(k for k in headers),
|
||||
", ".join("%s" for _ in headers)
|
||||
", ".join("%s" for _ in headers),
|
||||
)
|
||||
|
||||
try:
|
||||
txn.executemany(sql, rows)
|
||||
except:
|
||||
logger.exception(
|
||||
"Failed to insert: %s",
|
||||
table,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Failed to insert: %s", table)
|
||||
raise
|
||||
|
||||
|
||||
@@ -206,7 +206,7 @@ class Porter(object):
|
||||
"table_name": table,
|
||||
"forward_rowid": 1,
|
||||
"backward_rowid": 0,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
forward_chunk = 1
|
||||
@@ -221,10 +221,10 @@ class Porter(object):
|
||||
table, forward_chunk, backward_chunk
|
||||
)
|
||||
else:
|
||||
|
||||
def delete_all(txn):
|
||||
txn.execute(
|
||||
"DELETE FROM port_from_sqlite3 WHERE table_name = %s",
|
||||
(table,)
|
||||
"DELETE FROM port_from_sqlite3 WHERE table_name = %s", (table,)
|
||||
)
|
||||
txn.execute("TRUNCATE %s CASCADE" % (table,))
|
||||
|
||||
@@ -232,11 +232,7 @@ class Porter(object):
|
||||
|
||||
yield self.postgres_store._simple_insert(
|
||||
table="port_from_sqlite3",
|
||||
values={
|
||||
"table_name": table,
|
||||
"forward_rowid": 1,
|
||||
"backward_rowid": 0,
|
||||
}
|
||||
values={"table_name": table, "forward_rowid": 1, "backward_rowid": 0},
|
||||
)
|
||||
|
||||
forward_chunk = 1
|
||||
@@ -251,12 +247,16 @@ class Porter(object):
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def handle_table(self, table, postgres_size, table_size, forward_chunk,
|
||||
backward_chunk):
|
||||
def handle_table(
|
||||
self, table, postgres_size, table_size, forward_chunk, backward_chunk
|
||||
):
|
||||
logger.info(
|
||||
"Table %s: %i/%i (rows %i-%i) already ported",
|
||||
table, postgres_size, table_size,
|
||||
backward_chunk+1, forward_chunk-1,
|
||||
table,
|
||||
postgres_size,
|
||||
table_size,
|
||||
backward_chunk + 1,
|
||||
forward_chunk - 1,
|
||||
)
|
||||
|
||||
if not table_size:
|
||||
@@ -271,7 +271,9 @@ class Porter(object):
|
||||
return
|
||||
|
||||
if table in (
|
||||
"user_directory", "user_directory_search", "users_who_share_rooms",
|
||||
"user_directory",
|
||||
"user_directory_search",
|
||||
"users_who_share_rooms",
|
||||
"users_in_pubic_room",
|
||||
):
|
||||
# We don't port these tables, as they're a faff and we can regenreate
|
||||
@@ -283,37 +285,35 @@ class Porter(object):
|
||||
# We need to make sure there is a single row, `(X, null), as that is
|
||||
# what synapse expects to be there.
|
||||
yield self.postgres_store._simple_insert(
|
||||
table=table,
|
||||
values={"stream_id": None},
|
||||
table=table, values={"stream_id": None}
|
||||
)
|
||||
self.progress.update(table, table_size) # Mark table as done
|
||||
return
|
||||
|
||||
forward_select = (
|
||||
"SELECT rowid, * FROM %s WHERE rowid >= ? ORDER BY rowid LIMIT ?"
|
||||
% (table,)
|
||||
"SELECT rowid, * FROM %s WHERE rowid >= ? ORDER BY rowid LIMIT ?" % (table,)
|
||||
)
|
||||
|
||||
backward_select = (
|
||||
"SELECT rowid, * FROM %s WHERE rowid <= ? ORDER BY rowid LIMIT ?"
|
||||
% (table,)
|
||||
"SELECT rowid, * FROM %s WHERE rowid <= ? ORDER BY rowid LIMIT ?" % (table,)
|
||||
)
|
||||
|
||||
do_forward = [True]
|
||||
do_backward = [True]
|
||||
|
||||
while True:
|
||||
|
||||
def r(txn):
|
||||
forward_rows = []
|
||||
backward_rows = []
|
||||
if do_forward[0]:
|
||||
txn.execute(forward_select, (forward_chunk, self.batch_size,))
|
||||
txn.execute(forward_select, (forward_chunk, self.batch_size))
|
||||
forward_rows = txn.fetchall()
|
||||
if not forward_rows:
|
||||
do_forward[0] = False
|
||||
|
||||
if do_backward[0]:
|
||||
txn.execute(backward_select, (backward_chunk, self.batch_size,))
|
||||
txn.execute(backward_select, (backward_chunk, self.batch_size))
|
||||
backward_rows = txn.fetchall()
|
||||
if not backward_rows:
|
||||
do_backward[0] = False
|
||||
@@ -325,9 +325,7 @@ class Porter(object):
|
||||
|
||||
return headers, forward_rows, backward_rows
|
||||
|
||||
headers, frows, brows = yield self.sqlite_store.runInteraction(
|
||||
"select", r
|
||||
)
|
||||
headers, frows, brows = yield self.sqlite_store.runInteraction("select", r)
|
||||
|
||||
if frows or brows:
|
||||
if frows:
|
||||
@@ -339,9 +337,7 @@ class Porter(object):
|
||||
rows = self._convert_rows(table, headers, rows)
|
||||
|
||||
def insert(txn):
|
||||
self.postgres_store.insert_many_txn(
|
||||
txn, table, headers[1:], rows
|
||||
)
|
||||
self.postgres_store.insert_many_txn(txn, table, headers[1:], rows)
|
||||
|
||||
self.postgres_store._simple_update_one_txn(
|
||||
txn,
|
||||
@@ -362,8 +358,9 @@ class Porter(object):
|
||||
return
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def handle_search_table(self, postgres_size, table_size, forward_chunk,
|
||||
backward_chunk):
|
||||
def handle_search_table(
|
||||
self, postgres_size, table_size, forward_chunk, backward_chunk
|
||||
):
|
||||
select = (
|
||||
"SELECT es.rowid, es.*, e.origin_server_ts, e.stream_ordering"
|
||||
" FROM event_search as es"
|
||||
@@ -373,8 +370,9 @@ class Porter(object):
|
||||
)
|
||||
|
||||
while True:
|
||||
|
||||
def r(txn):
|
||||
txn.execute(select, (forward_chunk, self.batch_size,))
|
||||
txn.execute(select, (forward_chunk, self.batch_size))
|
||||
rows = txn.fetchall()
|
||||
headers = [column[0] for column in txn.description]
|
||||
|
||||
@@ -402,18 +400,21 @@ class Porter(object):
|
||||
else:
|
||||
rows_dict.append(d)
|
||||
|
||||
txn.executemany(sql, [
|
||||
(
|
||||
row["event_id"],
|
||||
row["room_id"],
|
||||
row["key"],
|
||||
row["sender"],
|
||||
row["value"],
|
||||
row["origin_server_ts"],
|
||||
row["stream_ordering"],
|
||||
)
|
||||
for row in rows_dict
|
||||
])
|
||||
txn.executemany(
|
||||
sql,
|
||||
[
|
||||
(
|
||||
row["event_id"],
|
||||
row["room_id"],
|
||||
row["key"],
|
||||
row["sender"],
|
||||
row["value"],
|
||||
row["origin_server_ts"],
|
||||
row["stream_ordering"],
|
||||
)
|
||||
for row in rows_dict
|
||||
],
|
||||
)
|
||||
|
||||
self.postgres_store._simple_update_one_txn(
|
||||
txn,
|
||||
@@ -437,7 +438,8 @@ class Porter(object):
|
||||
def setup_db(self, db_config, database_engine):
|
||||
db_conn = database_engine.module.connect(
|
||||
**{
|
||||
k: v for k, v in db_config.get("args", {}).items()
|
||||
k: v
|
||||
for k, v in db_config.get("args", {}).items()
|
||||
if not k.startswith("cp_")
|
||||
}
|
||||
)
|
||||
@@ -450,13 +452,11 @@ class Porter(object):
|
||||
def run(self):
|
||||
try:
|
||||
sqlite_db_pool = adbapi.ConnectionPool(
|
||||
self.sqlite_config["name"],
|
||||
**self.sqlite_config["args"]
|
||||
self.sqlite_config["name"], **self.sqlite_config["args"]
|
||||
)
|
||||
|
||||
postgres_db_pool = adbapi.ConnectionPool(
|
||||
self.postgres_config["name"],
|
||||
**self.postgres_config["args"]
|
||||
self.postgres_config["name"], **self.postgres_config["args"]
|
||||
)
|
||||
|
||||
sqlite_engine = create_engine(sqlite_config)
|
||||
@@ -465,9 +465,7 @@ class Porter(object):
|
||||
self.sqlite_store = Store(sqlite_db_pool, sqlite_engine)
|
||||
self.postgres_store = Store(postgres_db_pool, postgres_engine)
|
||||
|
||||
yield self.postgres_store.execute(
|
||||
postgres_engine.check_database
|
||||
)
|
||||
yield self.postgres_store.execute(postgres_engine.check_database)
|
||||
|
||||
# Step 1. Set up databases.
|
||||
self.progress.set_state("Preparing SQLite3")
|
||||
@@ -477,6 +475,7 @@ class Porter(object):
|
||||
self.setup_db(postgres_config, postgres_engine)
|
||||
|
||||
self.progress.set_state("Creating port tables")
|
||||
|
||||
def create_port_table(txn):
|
||||
txn.execute(
|
||||
"CREATE TABLE IF NOT EXISTS port_from_sqlite3 ("
|
||||
@@ -501,10 +500,9 @@ class Porter(object):
|
||||
)
|
||||
|
||||
try:
|
||||
yield self.postgres_store.runInteraction(
|
||||
"alter_table", alter_table
|
||||
)
|
||||
except Exception as e:
|
||||
yield self.postgres_store.runInteraction("alter_table", alter_table)
|
||||
except Exception:
|
||||
# On Error Resume Next
|
||||
pass
|
||||
|
||||
yield self.postgres_store.runInteraction(
|
||||
@@ -514,11 +512,7 @@ class Porter(object):
|
||||
# Step 2. Get tables.
|
||||
self.progress.set_state("Fetching tables")
|
||||
sqlite_tables = yield self.sqlite_store._simple_select_onecol(
|
||||
table="sqlite_master",
|
||||
keyvalues={
|
||||
"type": "table",
|
||||
},
|
||||
retcol="name",
|
||||
table="sqlite_master", keyvalues={"type": "table"}, retcol="name"
|
||||
)
|
||||
|
||||
postgres_tables = yield self.postgres_store._simple_select_onecol(
|
||||
@@ -545,18 +539,14 @@ class Porter(object):
|
||||
# Step 4. Do the copying.
|
||||
self.progress.set_state("Copying to postgres")
|
||||
yield defer.gatherResults(
|
||||
[
|
||||
self.handle_table(*res)
|
||||
for res in setup_res
|
||||
],
|
||||
consumeErrors=True,
|
||||
[self.handle_table(*res) for res in setup_res], consumeErrors=True
|
||||
)
|
||||
|
||||
# Step 5. Do final post-processing
|
||||
yield self._setup_state_group_id_seq()
|
||||
|
||||
self.progress.done()
|
||||
except:
|
||||
except Exception:
|
||||
global end_error_exec_info
|
||||
end_error_exec_info = sys.exc_info()
|
||||
logger.exception("")
|
||||
@@ -566,9 +556,7 @@ class Porter(object):
|
||||
def _convert_rows(self, table, headers, rows):
|
||||
bool_col_names = BOOLEAN_COLUMNS.get(table, [])
|
||||
|
||||
bool_cols = [
|
||||
i for i, h in enumerate(headers) if h in bool_col_names
|
||||
]
|
||||
bool_cols = [i for i, h in enumerate(headers) if h in bool_col_names]
|
||||
|
||||
class BadValueException(Exception):
|
||||
pass
|
||||
@@ -577,18 +565,21 @@ class Porter(object):
|
||||
if j in bool_cols:
|
||||
return bool(col)
|
||||
elif isinstance(col, string_types) and "\0" in col:
|
||||
logger.warn("DROPPING ROW: NUL value in table %s col %s: %r", table, headers[j], col)
|
||||
raise BadValueException();
|
||||
logger.warn(
|
||||
"DROPPING ROW: NUL value in table %s col %s: %r",
|
||||
table,
|
||||
headers[j],
|
||||
col,
|
||||
)
|
||||
raise BadValueException()
|
||||
return col
|
||||
|
||||
outrows = []
|
||||
for i, row in enumerate(rows):
|
||||
try:
|
||||
outrows.append(tuple(
|
||||
conv(j, col)
|
||||
for j, col in enumerate(row)
|
||||
if j > 0
|
||||
))
|
||||
outrows.append(
|
||||
tuple(conv(j, col) for j, col in enumerate(row) if j > 0)
|
||||
)
|
||||
except BadValueException:
|
||||
pass
|
||||
|
||||
@@ -616,9 +607,7 @@ class Porter(object):
|
||||
|
||||
return headers, [r for r in rows if r[ts_ind] < yesterday]
|
||||
|
||||
headers, rows = yield self.sqlite_store.runInteraction(
|
||||
"select", r,
|
||||
)
|
||||
headers, rows = yield self.sqlite_store.runInteraction("select", r)
|
||||
|
||||
rows = self._convert_rows("sent_transactions", headers, rows)
|
||||
|
||||
@@ -639,7 +628,7 @@ class Porter(object):
|
||||
txn.execute(
|
||||
"SELECT rowid FROM sent_transactions WHERE ts >= ?"
|
||||
" ORDER BY rowid ASC LIMIT 1",
|
||||
(yesterday,)
|
||||
(yesterday,),
|
||||
)
|
||||
|
||||
rows = txn.fetchall()
|
||||
@@ -657,21 +646,17 @@ class Porter(object):
|
||||
"table_name": "sent_transactions",
|
||||
"forward_rowid": next_chunk,
|
||||
"backward_rowid": 0,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def get_sent_table_size(txn):
|
||||
txn.execute(
|
||||
"SELECT count(*) FROM sent_transactions"
|
||||
" WHERE ts >= ?",
|
||||
(yesterday,)
|
||||
"SELECT count(*) FROM sent_transactions" " WHERE ts >= ?", (yesterday,)
|
||||
)
|
||||
size, = txn.fetchone()
|
||||
return int(size)
|
||||
|
||||
remaining_count = yield self.sqlite_store.execute(
|
||||
get_sent_table_size
|
||||
)
|
||||
remaining_count = yield self.sqlite_store.execute(get_sent_table_size)
|
||||
|
||||
total_count = remaining_count + inserted_rows
|
||||
|
||||
@@ -680,13 +665,11 @@ class Porter(object):
|
||||
@defer.inlineCallbacks
|
||||
def _get_remaining_count_to_port(self, table, forward_chunk, backward_chunk):
|
||||
frows = yield self.sqlite_store.execute_sql(
|
||||
"SELECT count(*) FROM %s WHERE rowid >= ?" % (table,),
|
||||
forward_chunk,
|
||||
"SELECT count(*) FROM %s WHERE rowid >= ?" % (table,), forward_chunk
|
||||
)
|
||||
|
||||
brows = yield self.sqlite_store.execute_sql(
|
||||
"SELECT count(*) FROM %s WHERE rowid <= ?" % (table,),
|
||||
backward_chunk,
|
||||
"SELECT count(*) FROM %s WHERE rowid <= ?" % (table,), backward_chunk
|
||||
)
|
||||
|
||||
defer.returnValue(frows[0][0] + brows[0][0])
|
||||
@@ -694,7 +677,7 @@ class Porter(object):
|
||||
@defer.inlineCallbacks
|
||||
def _get_already_ported_count(self, table):
|
||||
rows = yield self.postgres_store.execute_sql(
|
||||
"SELECT count(*) FROM %s" % (table,),
|
||||
"SELECT count(*) FROM %s" % (table,)
|
||||
)
|
||||
|
||||
defer.returnValue(rows[0][0])
|
||||
@@ -717,22 +700,21 @@ class Porter(object):
|
||||
def _setup_state_group_id_seq(self):
|
||||
def r(txn):
|
||||
txn.execute("SELECT MAX(id) FROM state_groups")
|
||||
next_id = txn.fetchone()[0]+1
|
||||
txn.execute(
|
||||
"ALTER SEQUENCE state_group_id_seq RESTART WITH %s",
|
||||
(next_id,),
|
||||
)
|
||||
next_id = txn.fetchone()[0] + 1
|
||||
txn.execute("ALTER SEQUENCE state_group_id_seq RESTART WITH %s", (next_id,))
|
||||
|
||||
return self.postgres_store.runInteraction("setup_state_group_id_seq", r)
|
||||
|
||||
|
||||
##############################################
|
||||
###### The following is simply UI stuff ######
|
||||
# The following is simply UI stuff
|
||||
##############################################
|
||||
|
||||
|
||||
class Progress(object):
|
||||
"""Used to report progress of the port
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.tables = {}
|
||||
|
||||
@@ -758,6 +740,7 @@ class Progress(object):
|
||||
class CursesProgress(Progress):
|
||||
"""Reports progress to a curses window
|
||||
"""
|
||||
|
||||
def __init__(self, stdscr):
|
||||
self.stdscr = stdscr
|
||||
|
||||
@@ -801,7 +784,7 @@ class CursesProgress(Progress):
|
||||
duration = int(now) - int(self.start_time)
|
||||
|
||||
minutes, seconds = divmod(duration, 60)
|
||||
duration_str = '%02dm %02ds' % (minutes, seconds,)
|
||||
duration_str = '%02dm %02ds' % (minutes, seconds)
|
||||
|
||||
if self.finished:
|
||||
status = "Time spent: %s (Done!)" % (duration_str,)
|
||||
@@ -814,16 +797,12 @@ class CursesProgress(Progress):
|
||||
est_remaining_str = '%02dm %02ds remaining' % divmod(est_remaining, 60)
|
||||
else:
|
||||
est_remaining_str = "Unknown"
|
||||
status = (
|
||||
"Time spent: %s (est. remaining: %s)"
|
||||
% (duration_str, est_remaining_str,)
|
||||
status = "Time spent: %s (est. remaining: %s)" % (
|
||||
duration_str,
|
||||
est_remaining_str,
|
||||
)
|
||||
|
||||
self.stdscr.addstr(
|
||||
0, 0,
|
||||
status,
|
||||
curses.A_BOLD,
|
||||
)
|
||||
self.stdscr.addstr(0, 0, status, curses.A_BOLD)
|
||||
|
||||
max_len = max([len(t) for t in self.tables.keys()])
|
||||
|
||||
@@ -831,9 +810,7 @@ class CursesProgress(Progress):
|
||||
middle_space = 1
|
||||
|
||||
items = self.tables.items()
|
||||
items.sort(
|
||||
key=lambda i: (i[1]["perc"], i[0]),
|
||||
)
|
||||
items.sort(key=lambda i: (i[1]["perc"], i[0]))
|
||||
|
||||
for i, (table, data) in enumerate(items):
|
||||
if i + 2 >= rows:
|
||||
@@ -844,9 +821,7 @@ class CursesProgress(Progress):
|
||||
color = curses.color_pair(2) if perc == 100 else curses.color_pair(1)
|
||||
|
||||
self.stdscr.addstr(
|
||||
i + 2, left_margin + max_len - len(table),
|
||||
table,
|
||||
curses.A_BOLD | color,
|
||||
i + 2, left_margin + max_len - len(table), table, curses.A_BOLD | color
|
||||
)
|
||||
|
||||
size = 20
|
||||
@@ -857,15 +832,13 @@ class CursesProgress(Progress):
|
||||
)
|
||||
|
||||
self.stdscr.addstr(
|
||||
i + 2, left_margin + max_len + middle_space,
|
||||
i + 2,
|
||||
left_margin + max_len + middle_space,
|
||||
"%s %3d%% (%d/%d)" % (progress, perc, data["num_done"], data["total"]),
|
||||
)
|
||||
|
||||
if self.finished:
|
||||
self.stdscr.addstr(
|
||||
rows - 1, 0,
|
||||
"Press any key to exit...",
|
||||
)
|
||||
self.stdscr.addstr(rows - 1, 0, "Press any key to exit...")
|
||||
|
||||
self.stdscr.refresh()
|
||||
self.last_update = time.time()
|
||||
@@ -877,29 +850,25 @@ class CursesProgress(Progress):
|
||||
|
||||
def set_state(self, state):
|
||||
self.stdscr.clear()
|
||||
self.stdscr.addstr(
|
||||
0, 0,
|
||||
state + "...",
|
||||
curses.A_BOLD,
|
||||
)
|
||||
self.stdscr.addstr(0, 0, state + "...", curses.A_BOLD)
|
||||
self.stdscr.refresh()
|
||||
|
||||
|
||||
class TerminalProgress(Progress):
|
||||
"""Just prints progress to the terminal
|
||||
"""
|
||||
|
||||
def update(self, table, num_done):
|
||||
super(TerminalProgress, self).update(table, num_done)
|
||||
|
||||
data = self.tables[table]
|
||||
|
||||
print "%s: %d%% (%d/%d)" % (
|
||||
table, data["perc"],
|
||||
data["num_done"], data["total"],
|
||||
print(
|
||||
"%s: %d%% (%d/%d)" % (table, data["perc"], data["num_done"], data["total"])
|
||||
)
|
||||
|
||||
def set_state(self, state):
|
||||
print state + "..."
|
||||
print(state + "...")
|
||||
|
||||
|
||||
##############################################
|
||||
@@ -909,34 +878,38 @@ class TerminalProgress(Progress):
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="A script to port an existing synapse SQLite database to"
|
||||
" a new PostgreSQL database."
|
||||
" a new PostgreSQL database."
|
||||
)
|
||||
parser.add_argument("-v", action='store_true')
|
||||
parser.add_argument(
|
||||
"--sqlite-database", required=True,
|
||||
"--sqlite-database",
|
||||
required=True,
|
||||
help="The snapshot of the SQLite database file. This must not be"
|
||||
" currently used by a running synapse server"
|
||||
" currently used by a running synapse server",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--postgres-config", type=argparse.FileType('r'), required=True,
|
||||
help="The database config file for the PostgreSQL database"
|
||||
"--postgres-config",
|
||||
type=argparse.FileType('r'),
|
||||
required=True,
|
||||
help="The database config file for the PostgreSQL database",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--curses", action='store_true',
|
||||
help="display a curses based progress UI"
|
||||
"--curses", action='store_true', help="display a curses based progress UI"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--batch-size", type=int, default=1000,
|
||||
"--batch-size",
|
||||
type=int,
|
||||
default=1000,
|
||||
help="The number of rows to select from the SQLite table each"
|
||||
" iteration [default=1000]",
|
||||
" iteration [default=1000]",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
logging_config = {
|
||||
"level": logging.DEBUG if args.v else logging.INFO,
|
||||
"format": "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s"
|
||||
"format": "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s",
|
||||
}
|
||||
|
||||
if args.curses:
|
||||
|
||||
24
setup.cfg
24
setup.cfg
@@ -16,6 +16,24 @@ ignore =
|
||||
|
||||
[flake8]
|
||||
max-line-length = 90
|
||||
# W503 requires that binary operators be at the end, not start, of lines. Erik doesn't like it.
|
||||
# E203 is contrary to PEP8.
|
||||
ignore = W503,E203
|
||||
|
||||
# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes
|
||||
# for error codes. The ones we ignore are:
|
||||
# W503: line break before binary operator
|
||||
# W504: line break after binary operator
|
||||
# E203: whitespace before ':' (which is contrary to pep8?)
|
||||
# E731: do not assign a lambda expression, use a def
|
||||
ignore=W503,W504,E203,E731
|
||||
|
||||
[isort]
|
||||
line_length = 89
|
||||
not_skip = __init__.py
|
||||
sections=FUTURE,STDLIB,COMPAT,THIRDPARTY,TWISTED,FIRSTPARTY,TESTS,LOCALFOLDER
|
||||
default_section=THIRDPARTY
|
||||
known_first_party = synapse
|
||||
known_tests=tests
|
||||
known_compat = mock,six
|
||||
known_twisted=twisted,OpenSSL
|
||||
multi_line_output=3
|
||||
include_trailing_comma=true
|
||||
combine_as_imports=true
|
||||
|
||||
6
setup.py
6
setup.py
@@ -1,6 +1,8 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
# Copyright 2014-2017 OpenMarket Ltd
|
||||
# Copyright 2017 Vector Creations Ltd
|
||||
# Copyright 2017-2018 New Vector Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -86,7 +88,7 @@ setup(
|
||||
name="matrix-synapse",
|
||||
version=version,
|
||||
packages=find_packages(exclude=["tests", "tests.*"]),
|
||||
description="Reference Synapse Home Server",
|
||||
description="Reference homeserver for the Matrix decentralised comms protocol",
|
||||
install_requires=dependencies['requirements'](include_conditional=True).keys(),
|
||||
dependency_links=dependencies["DEPENDENCY_LINKS"].values(),
|
||||
include_package_data=True,
|
||||
|
||||
@@ -17,4 +17,14 @@
|
||||
""" This is a reference implementation of a Matrix home server.
|
||||
"""
|
||||
|
||||
__version__ = "0.32.0"
|
||||
try:
|
||||
from twisted.internet import protocol
|
||||
from twisted.internet.protocol import Factory
|
||||
from twisted.names.dns import DNSDatagramProtocol
|
||||
protocol.Factory.noisy = False
|
||||
Factory.noisy = False
|
||||
DNSDatagramProtocol.noisy = False
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
__version__ = "0.33.8"
|
||||
|
||||
0
synapse/_scripts/__init__.py
Normal file
0
synapse/_scripts/__init__.py
Normal file
215
synapse/_scripts/register_new_matrix_user.py
Normal file
215
synapse/_scripts/register_new_matrix_user.py
Normal file
@@ -0,0 +1,215 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2015, 2016 OpenMarket Ltd
|
||||
# Copyright 2018 New Vector
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import getpass
|
||||
import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from six.moves import input
|
||||
|
||||
import requests as _requests
|
||||
import yaml
|
||||
|
||||
|
||||
def request_registration(
|
||||
user,
|
||||
password,
|
||||
server_location,
|
||||
shared_secret,
|
||||
admin=False,
|
||||
requests=_requests,
|
||||
_print=print,
|
||||
exit=sys.exit,
|
||||
):
|
||||
|
||||
url = "%s/_matrix/client/r0/admin/register" % (server_location,)
|
||||
|
||||
# Get the nonce
|
||||
r = requests.get(url, verify=False)
|
||||
|
||||
if r.status_code is not 200:
|
||||
_print("ERROR! Received %d %s" % (r.status_code, r.reason))
|
||||
if 400 <= r.status_code < 500:
|
||||
try:
|
||||
_print(r.json()["error"])
|
||||
except Exception:
|
||||
pass
|
||||
return exit(1)
|
||||
|
||||
nonce = r.json()["nonce"]
|
||||
|
||||
mac = hmac.new(key=shared_secret.encode('utf8'), digestmod=hashlib.sha1)
|
||||
|
||||
mac.update(nonce.encode('utf8'))
|
||||
mac.update(b"\x00")
|
||||
mac.update(user.encode('utf8'))
|
||||
mac.update(b"\x00")
|
||||
mac.update(password.encode('utf8'))
|
||||
mac.update(b"\x00")
|
||||
mac.update(b"admin" if admin else b"notadmin")
|
||||
|
||||
mac = mac.hexdigest()
|
||||
|
||||
data = {
|
||||
"nonce": nonce,
|
||||
"username": user,
|
||||
"password": password,
|
||||
"mac": mac,
|
||||
"admin": admin,
|
||||
}
|
||||
|
||||
_print("Sending registration request...")
|
||||
r = requests.post(url, json=data, verify=False)
|
||||
|
||||
if r.status_code is not 200:
|
||||
_print("ERROR! Received %d %s" % (r.status_code, r.reason))
|
||||
if 400 <= r.status_code < 500:
|
||||
try:
|
||||
_print(r.json()["error"])
|
||||
except Exception:
|
||||
pass
|
||||
return exit(1)
|
||||
|
||||
_print("Success!")
|
||||
|
||||
|
||||
def register_new_user(user, password, server_location, shared_secret, admin):
|
||||
if not user:
|
||||
try:
|
||||
default_user = getpass.getuser()
|
||||
except Exception:
|
||||
default_user = None
|
||||
|
||||
if default_user:
|
||||
user = input("New user localpart [%s]: " % (default_user,))
|
||||
if not user:
|
||||
user = default_user
|
||||
else:
|
||||
user = input("New user localpart: ")
|
||||
|
||||
if not user:
|
||||
print("Invalid user name")
|
||||
sys.exit(1)
|
||||
|
||||
if not password:
|
||||
password = getpass.getpass("Password: ")
|
||||
|
||||
if not password:
|
||||
print("Password cannot be blank.")
|
||||
sys.exit(1)
|
||||
|
||||
confirm_password = getpass.getpass("Confirm password: ")
|
||||
|
||||
if password != confirm_password:
|
||||
print("Passwords do not match")
|
||||
sys.exit(1)
|
||||
|
||||
if admin is None:
|
||||
admin = input("Make admin [no]: ")
|
||||
if admin in ("y", "yes", "true"):
|
||||
admin = True
|
||||
else:
|
||||
admin = False
|
||||
|
||||
request_registration(user, password, server_location, shared_secret, bool(admin))
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
logging.captureWarnings(True)
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Used to register new users with a given home server when"
|
||||
" registration has been disabled. The home server must be"
|
||||
" configured with the 'registration_shared_secret' option"
|
||||
" set."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-u",
|
||||
"--user",
|
||||
default=None,
|
||||
help="Local part of the new user. Will prompt if omitted.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p",
|
||||
"--password",
|
||||
default=None,
|
||||
help="New password for user. Will prompt if omitted.",
|
||||
)
|
||||
admin_group = parser.add_mutually_exclusive_group()
|
||||
admin_group.add_argument(
|
||||
"-a",
|
||||
"--admin",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Register new user as an admin. "
|
||||
"Will prompt if --no-admin is not set either."
|
||||
),
|
||||
)
|
||||
admin_group.add_argument(
|
||||
"--no-admin",
|
||||
action="store_true",
|
||||
help=(
|
||||
"Register new user as a regular user. "
|
||||
"Will prompt if --admin is not set either."
|
||||
),
|
||||
)
|
||||
|
||||
group = parser.add_mutually_exclusive_group(required=True)
|
||||
group.add_argument(
|
||||
"-c",
|
||||
"--config",
|
||||
type=argparse.FileType('r'),
|
||||
help="Path to server config file. Used to read in shared secret.",
|
||||
)
|
||||
|
||||
group.add_argument(
|
||||
"-k", "--shared-secret", help="Shared secret as defined in server config file."
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"server_url",
|
||||
default="https://localhost:8448",
|
||||
nargs='?',
|
||||
help="URL to use to talk to the home server. Defaults to "
|
||||
" 'https://localhost:8448'.",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if "config" in args and args.config:
|
||||
config = yaml.safe_load(args.config)
|
||||
secret = config.get("registration_shared_secret", None)
|
||||
if not secret:
|
||||
print("No 'registration_shared_secret' defined in config.")
|
||||
sys.exit(1)
|
||||
else:
|
||||
secret = args.shared_secret
|
||||
|
||||
admin = None
|
||||
if args.admin or args.no_admin:
|
||||
admin = args.admin
|
||||
|
||||
register_new_user(args.user, args.password, args.server_url, secret, admin)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -18,15 +18,17 @@ import logging
|
||||
from six import itervalues
|
||||
|
||||
import pymacaroons
|
||||
from twisted.internet import defer
|
||||
from netaddr import IPAddress
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
import synapse.types
|
||||
from synapse import event_auth
|
||||
from synapse.api.constants import EventTypes, Membership, JoinRules
|
||||
from synapse.api.errors import AuthError, Codes
|
||||
from synapse.api.constants import EventTypes, JoinRules, Membership
|
||||
from synapse.api.errors import AuthError, Codes, ResourceLimitError
|
||||
from synapse.config.server import is_threepid_reserved
|
||||
from synapse.types import UserID
|
||||
from synapse.util.caches import register_cache, CACHE_SIZE_FACTOR
|
||||
from synapse.util.caches import CACHE_SIZE_FACTOR, register_cache
|
||||
from synapse.util.caches.lrucache import LruCache
|
||||
from synapse.util.metrics import Measure
|
||||
|
||||
@@ -64,8 +66,9 @@ class Auth(object):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_from_context(self, event, context, do_sig_check=True):
|
||||
prev_state_ids = yield context.get_prev_state_ids(self.store)
|
||||
auth_events_ids = yield self.compute_auth_events(
|
||||
event, context.prev_state_ids, for_verification=True,
|
||||
event, prev_state_ids, for_verification=True,
|
||||
)
|
||||
auth_events = yield self.store.get_events(auth_events_ids)
|
||||
auth_events = {
|
||||
@@ -192,7 +195,7 @@ class Auth(object):
|
||||
synapse.types.create_requester(user_id, app_service=app_service)
|
||||
)
|
||||
|
||||
access_token = get_access_token_from_request(
|
||||
access_token = self.get_access_token_from_request(
|
||||
request, self.TOKEN_NOT_FOUND_HTTP_STATUS
|
||||
)
|
||||
|
||||
@@ -209,9 +212,9 @@ class Auth(object):
|
||||
user_agent = request.requestHeaders.getRawHeaders(
|
||||
b"User-Agent",
|
||||
default=[b""]
|
||||
)[0]
|
||||
)[0].decode('ascii', 'surrogateescape')
|
||||
if user and access_token and ip_addr:
|
||||
self.store.insert_client_ip(
|
||||
yield self.store.insert_client_ip(
|
||||
user_id=user.to_string(),
|
||||
access_token=access_token,
|
||||
ip=ip_addr,
|
||||
@@ -238,7 +241,7 @@ class Auth(object):
|
||||
@defer.inlineCallbacks
|
||||
def _get_appservice_user_id(self, request):
|
||||
app_service = self.store.get_app_service_by_token(
|
||||
get_access_token_from_request(
|
||||
self.get_access_token_from_request(
|
||||
request, self.TOKEN_NOT_FOUND_HTTP_STATUS
|
||||
)
|
||||
)
|
||||
@@ -250,10 +253,10 @@ class Auth(object):
|
||||
if ip_address not in app_service.ip_range_whitelist:
|
||||
defer.returnValue((None, None))
|
||||
|
||||
if "user_id" not in request.args:
|
||||
if b"user_id" not in request.args:
|
||||
defer.returnValue((app_service.sender, app_service))
|
||||
|
||||
user_id = request.args["user_id"][0]
|
||||
user_id = request.args[b"user_id"][0].decode('utf8')
|
||||
if app_service.sender == user_id:
|
||||
defer.returnValue((app_service.sender, app_service))
|
||||
|
||||
@@ -512,7 +515,7 @@ class Auth(object):
|
||||
|
||||
def get_appservice_by_req(self, request):
|
||||
try:
|
||||
token = get_access_token_from_request(
|
||||
token = self.get_access_token_from_request(
|
||||
request, self.TOKEN_NOT_FOUND_HTTP_STATUS
|
||||
)
|
||||
service = self.store.get_app_service_by_token(token)
|
||||
@@ -543,7 +546,8 @@ class Auth(object):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def add_auth_events(self, builder, context):
|
||||
auth_ids = yield self.compute_auth_events(builder, context.prev_state_ids)
|
||||
prev_state_ids = yield context.get_prev_state_ids(self.store)
|
||||
auth_ids = yield self.compute_auth_events(builder, prev_state_ids)
|
||||
|
||||
auth_events_entries = yield self.store.add_event_hashes(
|
||||
auth_ids
|
||||
@@ -672,67 +676,156 @@ class Auth(object):
|
||||
" edit its room list entry"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def has_access_token(request):
|
||||
"""Checks if the request has an access_token.
|
||||
|
||||
def has_access_token(request):
|
||||
"""Checks if the request has an access_token.
|
||||
Returns:
|
||||
bool: False if no access_token was given, True otherwise.
|
||||
"""
|
||||
query_params = request.args.get(b"access_token")
|
||||
auth_headers = request.requestHeaders.getRawHeaders(b"Authorization")
|
||||
return bool(query_params) or bool(auth_headers)
|
||||
|
||||
Returns:
|
||||
bool: False if no access_token was given, True otherwise.
|
||||
"""
|
||||
query_params = request.args.get("access_token")
|
||||
auth_headers = request.requestHeaders.getRawHeaders(b"Authorization")
|
||||
return bool(query_params) or bool(auth_headers)
|
||||
@staticmethod
|
||||
def get_access_token_from_request(request, token_not_found_http_status=401):
|
||||
"""Extracts the access_token from the request.
|
||||
|
||||
Args:
|
||||
request: The http request.
|
||||
token_not_found_http_status(int): The HTTP status code to set in the
|
||||
AuthError if the token isn't found. This is used in some of the
|
||||
legacy APIs to change the status code to 403 from the default of
|
||||
401 since some of the old clients depended on auth errors returning
|
||||
403.
|
||||
Returns:
|
||||
unicode: The access_token
|
||||
Raises:
|
||||
AuthError: If there isn't an access_token in the request.
|
||||
"""
|
||||
|
||||
def get_access_token_from_request(request, token_not_found_http_status=401):
|
||||
"""Extracts the access_token from the request.
|
||||
|
||||
Args:
|
||||
request: The http request.
|
||||
token_not_found_http_status(int): The HTTP status code to set in the
|
||||
AuthError if the token isn't found. This is used in some of the
|
||||
legacy APIs to change the status code to 403 from the default of
|
||||
401 since some of the old clients depended on auth errors returning
|
||||
403.
|
||||
Returns:
|
||||
str: The access_token
|
||||
Raises:
|
||||
AuthError: If there isn't an access_token in the request.
|
||||
"""
|
||||
|
||||
auth_headers = request.requestHeaders.getRawHeaders(b"Authorization")
|
||||
query_params = request.args.get(b"access_token")
|
||||
if auth_headers:
|
||||
# Try the get the access_token from a "Authorization: Bearer"
|
||||
# header
|
||||
if query_params is not None:
|
||||
raise AuthError(
|
||||
token_not_found_http_status,
|
||||
"Mixing Authorization headers and access_token query parameters.",
|
||||
errcode=Codes.MISSING_TOKEN,
|
||||
)
|
||||
if len(auth_headers) > 1:
|
||||
raise AuthError(
|
||||
token_not_found_http_status,
|
||||
"Too many Authorization headers.",
|
||||
errcode=Codes.MISSING_TOKEN,
|
||||
)
|
||||
parts = auth_headers[0].split(" ")
|
||||
if parts[0] == "Bearer" and len(parts) == 2:
|
||||
return parts[1]
|
||||
auth_headers = request.requestHeaders.getRawHeaders(b"Authorization")
|
||||
query_params = request.args.get(b"access_token")
|
||||
if auth_headers:
|
||||
# Try the get the access_token from a "Authorization: Bearer"
|
||||
# header
|
||||
if query_params is not None:
|
||||
raise AuthError(
|
||||
token_not_found_http_status,
|
||||
"Mixing Authorization headers and access_token query parameters.",
|
||||
errcode=Codes.MISSING_TOKEN,
|
||||
)
|
||||
if len(auth_headers) > 1:
|
||||
raise AuthError(
|
||||
token_not_found_http_status,
|
||||
"Too many Authorization headers.",
|
||||
errcode=Codes.MISSING_TOKEN,
|
||||
)
|
||||
parts = auth_headers[0].split(b" ")
|
||||
if parts[0] == b"Bearer" and len(parts) == 2:
|
||||
return parts[1].decode('ascii')
|
||||
else:
|
||||
raise AuthError(
|
||||
token_not_found_http_status,
|
||||
"Invalid Authorization header.",
|
||||
errcode=Codes.MISSING_TOKEN,
|
||||
)
|
||||
else:
|
||||
raise AuthError(
|
||||
token_not_found_http_status,
|
||||
"Invalid Authorization header.",
|
||||
errcode=Codes.MISSING_TOKEN,
|
||||
# Try to get the access_token from the query params.
|
||||
if not query_params:
|
||||
raise AuthError(
|
||||
token_not_found_http_status,
|
||||
"Missing access token.",
|
||||
errcode=Codes.MISSING_TOKEN
|
||||
)
|
||||
|
||||
return query_params[0].decode('ascii')
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_in_room_or_world_readable(self, room_id, user_id):
|
||||
"""Checks that the user is or was in the room or the room is world
|
||||
readable. If it isn't then an exception is raised.
|
||||
|
||||
Returns:
|
||||
Deferred[tuple[str, str|None]]: Resolves to the current membership of
|
||||
the user in the room and the membership event ID of the user. If
|
||||
the user is not in the room and never has been, then
|
||||
`(Membership.JOIN, None)` is returned.
|
||||
"""
|
||||
|
||||
try:
|
||||
# check_user_was_in_room will return the most recent membership
|
||||
# event for the user if:
|
||||
# * The user is a non-guest user, and was ever in the room
|
||||
# * The user is a guest user, and has joined the room
|
||||
# else it will throw.
|
||||
member_event = yield self.check_user_was_in_room(room_id, user_id)
|
||||
defer.returnValue((member_event.membership, member_event.event_id))
|
||||
except AuthError:
|
||||
visibility = yield self.state.get_current_state(
|
||||
room_id, EventTypes.RoomHistoryVisibility, ""
|
||||
)
|
||||
else:
|
||||
# Try to get the access_token from the query params.
|
||||
if not query_params:
|
||||
if (
|
||||
visibility and
|
||||
visibility.content["history_visibility"] == "world_readable"
|
||||
):
|
||||
defer.returnValue((Membership.JOIN, None))
|
||||
return
|
||||
raise AuthError(
|
||||
token_not_found_http_status,
|
||||
"Missing access token.",
|
||||
errcode=Codes.MISSING_TOKEN
|
||||
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
|
||||
)
|
||||
|
||||
return query_params[0]
|
||||
@defer.inlineCallbacks
|
||||
def check_auth_blocking(self, user_id=None, threepid=None):
|
||||
"""Checks if the user should be rejected for some external reason,
|
||||
such as monthly active user limiting or global disable flag
|
||||
|
||||
Args:
|
||||
user_id(str|None): If present, checks for presence against existing
|
||||
MAU cohort
|
||||
|
||||
threepid(dict|None): If present, checks for presence against configured
|
||||
reserved threepid. Used in cases where the user is trying register
|
||||
with a MAU blocked server, normally they would be rejected but their
|
||||
threepid is on the reserved list. user_id and
|
||||
threepid should never be set at the same time.
|
||||
"""
|
||||
|
||||
# Never fail an auth check for the server notices users
|
||||
# This can be a problem where event creation is prohibited due to blocking
|
||||
if user_id == self.hs.config.server_notices_mxid:
|
||||
return
|
||||
|
||||
if self.hs.config.hs_disabled:
|
||||
raise ResourceLimitError(
|
||||
403, self.hs.config.hs_disabled_message,
|
||||
errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
|
||||
admin_contact=self.hs.config.admin_contact,
|
||||
limit_type=self.hs.config.hs_disabled_limit_type
|
||||
)
|
||||
if self.hs.config.limit_usage_by_mau is True:
|
||||
assert not (user_id and threepid)
|
||||
|
||||
# If the user is already part of the MAU cohort or a trial user
|
||||
if user_id:
|
||||
timestamp = yield self.store.user_last_seen_monthly_active(user_id)
|
||||
if timestamp:
|
||||
return
|
||||
|
||||
is_trial = yield self.store.is_trial_user(user_id)
|
||||
if is_trial:
|
||||
return
|
||||
elif threepid:
|
||||
# If the user does not exist yet, but is signing up with a
|
||||
# reserved threepid then pass auth check
|
||||
if is_threepid_reserved(self.hs.config, threepid):
|
||||
return
|
||||
# Else if there is no room in the MAU bucket, bail
|
||||
current_mau = yield self.store.get_monthly_active_count()
|
||||
if current_mau >= self.hs.config.max_mau_value:
|
||||
raise ResourceLimitError(
|
||||
403, "Monthly Active User Limit Exceeded",
|
||||
admin_contact=self.hs.config.admin_contact,
|
||||
errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
|
||||
limit_type="monthly_active_user"
|
||||
)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
# Copyright 2017 Vector Creations Ltd
|
||||
# Copyright 2018 New Vector Ltd.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -77,6 +78,7 @@ class EventTypes(object):
|
||||
Name = "m.room.name"
|
||||
|
||||
ServerACL = "m.room.server_acl"
|
||||
Pinned = "m.room.pinned_events"
|
||||
|
||||
|
||||
class RejectedReason(object):
|
||||
@@ -94,3 +96,19 @@ class RoomCreationPreset(object):
|
||||
class ThirdPartyEntityKind(object):
|
||||
USER = "user"
|
||||
LOCATION = "location"
|
||||
|
||||
|
||||
class RoomVersions(object):
|
||||
V1 = "1"
|
||||
VDH_TEST = "vdh-test-version"
|
||||
|
||||
|
||||
# the version we will give rooms which are created on this server
|
||||
DEFAULT_ROOM_VERSION = RoomVersions.V1
|
||||
|
||||
# vdh-test-version is a placeholder to get room versioning support working and tested
|
||||
# until we have a working v2.
|
||||
KNOWN_ROOM_VERSIONS = {RoomVersions.V1, RoomVersions.VDH_TEST}
|
||||
|
||||
ServerNoticeMsgType = "m.server_notice"
|
||||
ServerNoticeLimitReached = "m.server_notice.usage_limit_reached"
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
# Copyright 2018 New Vector Ltd.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -17,11 +18,11 @@
|
||||
|
||||
import logging
|
||||
|
||||
from canonicaljson import json
|
||||
|
||||
from six import iteritems
|
||||
from six.moves import http_client
|
||||
|
||||
from canonicaljson import json
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -55,6 +56,10 @@ class Codes(object):
|
||||
SERVER_NOT_TRUSTED = "M_SERVER_NOT_TRUSTED"
|
||||
CONSENT_NOT_GIVEN = "M_CONSENT_NOT_GIVEN"
|
||||
CANNOT_LEAVE_SERVER_NOTICE_ROOM = "M_CANNOT_LEAVE_SERVER_NOTICE_ROOM"
|
||||
RESOURCE_LIMIT_EXCEEDED = "M_RESOURCE_LIMIT_EXCEEDED"
|
||||
UNSUPPORTED_ROOM_VERSION = "M_UNSUPPORTED_ROOM_VERSION"
|
||||
INCOMPATIBLE_ROOM_VERSION = "M_INCOMPATIBLE_ROOM_VERSION"
|
||||
WRONG_ROOM_KEYS_VERSION = "M_WRONG_ROOM_KEYS_VERSION"
|
||||
|
||||
|
||||
class CodeMessageException(RuntimeError):
|
||||
@@ -69,20 +74,6 @@ class CodeMessageException(RuntimeError):
|
||||
self.code = code
|
||||
self.msg = msg
|
||||
|
||||
def error_dict(self):
|
||||
return cs_error(self.msg)
|
||||
|
||||
|
||||
class MatrixCodeMessageException(CodeMessageException):
|
||||
"""An error from a general matrix endpoint, eg. from a proxied Matrix API call.
|
||||
|
||||
Attributes:
|
||||
errcode (str): Matrix error code e.g 'M_FORBIDDEN'
|
||||
"""
|
||||
def __init__(self, code, msg, errcode=Codes.UNKNOWN):
|
||||
super(MatrixCodeMessageException, self).__init__(code, msg)
|
||||
self.errcode = errcode
|
||||
|
||||
|
||||
class SynapseError(CodeMessageException):
|
||||
"""A base exception type for matrix errors which have an errcode and error
|
||||
@@ -108,38 +99,28 @@ class SynapseError(CodeMessageException):
|
||||
self.errcode,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_http_response_exception(cls, err):
|
||||
"""Make a SynapseError based on an HTTPResponseException
|
||||
|
||||
This is useful when a proxied request has failed, and we need to
|
||||
decide how to map the failure onto a matrix error to send back to the
|
||||
client.
|
||||
class ProxiedRequestError(SynapseError):
|
||||
"""An error from a general matrix endpoint, eg. from a proxied Matrix API call.
|
||||
|
||||
An attempt is made to parse the body of the http response as a matrix
|
||||
error. If that succeeds, the errcode and error message from the body
|
||||
are used as the errcode and error message in the new synapse error.
|
||||
Attributes:
|
||||
errcode (str): Matrix error code e.g 'M_FORBIDDEN'
|
||||
"""
|
||||
def __init__(self, code, msg, errcode=Codes.UNKNOWN, additional_fields=None):
|
||||
super(ProxiedRequestError, self).__init__(
|
||||
code, msg, errcode
|
||||
)
|
||||
if additional_fields is None:
|
||||
self._additional_fields = {}
|
||||
else:
|
||||
self._additional_fields = dict(additional_fields)
|
||||
|
||||
Otherwise, the errcode is set to M_UNKNOWN, and the error message is
|
||||
set to the reason code from the HTTP response.
|
||||
|
||||
Args:
|
||||
err (HttpResponseException):
|
||||
|
||||
Returns:
|
||||
SynapseError:
|
||||
"""
|
||||
# try to parse the body as json, to get better errcode/msg, but
|
||||
# default to M_UNKNOWN with the HTTP status as the error text
|
||||
try:
|
||||
j = json.loads(err.response)
|
||||
except ValueError:
|
||||
j = {}
|
||||
errcode = j.get('errcode', Codes.UNKNOWN)
|
||||
errmsg = j.get('error', err.msg)
|
||||
|
||||
res = SynapseError(err.code, errmsg, errcode)
|
||||
return res
|
||||
def error_dict(self):
|
||||
return cs_error(
|
||||
self.msg,
|
||||
self.errcode,
|
||||
**self._additional_fields
|
||||
)
|
||||
|
||||
|
||||
class ConsentNotGivenError(SynapseError):
|
||||
@@ -251,6 +232,30 @@ class AuthError(SynapseError):
|
||||
super(AuthError, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class ResourceLimitError(SynapseError):
|
||||
"""
|
||||
Any error raised when there is a problem with resource usage.
|
||||
For instance, the monthly active user limit for the server has been exceeded
|
||||
"""
|
||||
def __init__(
|
||||
self, code, msg,
|
||||
errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
|
||||
admin_contact=None,
|
||||
limit_type=None,
|
||||
):
|
||||
self.admin_contact = admin_contact
|
||||
self.limit_type = limit_type
|
||||
super(ResourceLimitError, self).__init__(code, msg, errcode=errcode)
|
||||
|
||||
def error_dict(self):
|
||||
return cs_error(
|
||||
self.msg,
|
||||
self.errcode,
|
||||
admin_contact=self.admin_contact,
|
||||
limit_type=self.limit_type
|
||||
)
|
||||
|
||||
|
||||
class EventSizeError(SynapseError):
|
||||
"""An error raised when an event is too big."""
|
||||
|
||||
@@ -308,12 +313,39 @@ class LimitExceededError(SynapseError):
|
||||
)
|
||||
|
||||
|
||||
def cs_exception(exception):
|
||||
if isinstance(exception, CodeMessageException):
|
||||
return exception.error_dict()
|
||||
else:
|
||||
logger.error("Unknown exception type: %s", type(exception))
|
||||
return {}
|
||||
class RoomKeysVersionError(SynapseError):
|
||||
"""A client has tried to upload to a non-current version of the room_keys store
|
||||
"""
|
||||
def __init__(self, current_version):
|
||||
"""
|
||||
Args:
|
||||
current_version (str): the current version of the store they should have used
|
||||
"""
|
||||
super(RoomKeysVersionError, self).__init__(
|
||||
403, "Wrong room_keys version", Codes.WRONG_ROOM_KEYS_VERSION
|
||||
)
|
||||
self.current_version = current_version
|
||||
|
||||
|
||||
class IncompatibleRoomVersionError(SynapseError):
|
||||
"""A server is trying to join a room whose version it does not support."""
|
||||
|
||||
def __init__(self, room_version):
|
||||
super(IncompatibleRoomVersionError, self).__init__(
|
||||
code=400,
|
||||
msg="Your homeserver does not support the features required to "
|
||||
"join this room",
|
||||
errcode=Codes.INCOMPATIBLE_ROOM_VERSION,
|
||||
)
|
||||
|
||||
self._room_version = room_version
|
||||
|
||||
def error_dict(self):
|
||||
return cs_error(
|
||||
self.msg,
|
||||
self.errcode,
|
||||
room_version=self._room_version,
|
||||
)
|
||||
|
||||
|
||||
def cs_error(msg, code=Codes.UNKNOWN, **kwargs):
|
||||
@@ -372,7 +404,7 @@ class HttpResponseException(CodeMessageException):
|
||||
Represents an HTTP-level failure of an outbound request
|
||||
|
||||
Attributes:
|
||||
response (str): body of response
|
||||
response (bytes): body of response
|
||||
"""
|
||||
def __init__(self, code, msg, response):
|
||||
"""
|
||||
@@ -380,7 +412,39 @@ class HttpResponseException(CodeMessageException):
|
||||
Args:
|
||||
code (int): HTTP status code
|
||||
msg (str): reason phrase from HTTP response status line
|
||||
response (str): body of response
|
||||
response (bytes): body of response
|
||||
"""
|
||||
super(HttpResponseException, self).__init__(code, msg)
|
||||
self.response = response
|
||||
|
||||
def to_synapse_error(self):
|
||||
"""Make a SynapseError based on an HTTPResponseException
|
||||
|
||||
This is useful when a proxied request has failed, and we need to
|
||||
decide how to map the failure onto a matrix error to send back to the
|
||||
client.
|
||||
|
||||
An attempt is made to parse the body of the http response as a matrix
|
||||
error. If that succeeds, the errcode and error message from the body
|
||||
are used as the errcode and error message in the new synapse error.
|
||||
|
||||
Otherwise, the errcode is set to M_UNKNOWN, and the error message is
|
||||
set to the reason code from the HTTP response.
|
||||
|
||||
Returns:
|
||||
SynapseError:
|
||||
"""
|
||||
# try to parse the body as json, to get better errcode/msg, but
|
||||
# default to M_UNKNOWN with the HTTP status as the error text
|
||||
try:
|
||||
j = json.loads(self.response)
|
||||
except ValueError:
|
||||
j = {}
|
||||
|
||||
if not isinstance(j, dict):
|
||||
j = {}
|
||||
|
||||
errcode = j.pop('errcode', Codes.UNKNOWN)
|
||||
errmsg = j.pop('error', self.msg)
|
||||
|
||||
return ProxiedRequestError(self.code, errmsg, errcode, j)
|
||||
|
||||
@@ -12,15 +12,15 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.storage.presence import UserPresenceState
|
||||
from synapse.types import UserID, RoomID
|
||||
import jsonschema
|
||||
from canonicaljson import json
|
||||
from jsonschema import FormatChecker
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
from canonicaljson import json
|
||||
|
||||
import jsonschema
|
||||
from jsonschema import FormatChecker
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.storage.presence import UserPresenceState
|
||||
from synapse.types import RoomID, UserID
|
||||
|
||||
FILTER_SCHEMA = {
|
||||
"additionalProperties": False,
|
||||
@@ -113,7 +113,13 @@ ROOM_EVENT_FILTER_SCHEMA = {
|
||||
},
|
||||
"contains_url": {
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"lazy_load_members": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"include_redundant_members": {
|
||||
"type": "boolean"
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -166,7 +172,10 @@ USER_FILTER_SCHEMA = {
|
||||
# events a lot easier as we can then use a negative lookbehind
|
||||
# assertion to split '\.' If we allowed \\ then it would
|
||||
# incorrectly split '\\.' See synapse.events.utils.serialize_event
|
||||
"pattern": "^((?!\\\).)*$"
|
||||
#
|
||||
# Note that because this is a regular expression, we have to escape
|
||||
# each backslash in the pattern.
|
||||
"pattern": r"^((?!\\\\).)*$"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -220,7 +229,7 @@ class Filtering(object):
|
||||
jsonschema.validate(user_filter_json, USER_FILTER_SCHEMA,
|
||||
format_checker=FormatChecker())
|
||||
except jsonschema.ValidationError as e:
|
||||
raise SynapseError(400, e.message)
|
||||
raise SynapseError(400, str(e))
|
||||
|
||||
|
||||
class FilterCollection(object):
|
||||
@@ -245,6 +254,7 @@ class FilterCollection(object):
|
||||
"include_leave", False
|
||||
)
|
||||
self.event_fields = filter_json.get("event_fields", [])
|
||||
self.event_format = filter_json.get("event_format", "client")
|
||||
|
||||
def __repr__(self):
|
||||
return "<FilterCollection %s>" % (json.dumps(self._filter_json),)
|
||||
@@ -261,6 +271,12 @@ class FilterCollection(object):
|
||||
def ephemeral_limit(self):
|
||||
return self._room_ephemeral_filter.limit()
|
||||
|
||||
def lazy_load_members(self):
|
||||
return self._room_state_filter.lazy_load_members()
|
||||
|
||||
def include_redundant_members(self):
|
||||
return self._room_state_filter.include_redundant_members()
|
||||
|
||||
def filter_presence(self, events):
|
||||
return self._presence_filter.filter(events)
|
||||
|
||||
@@ -417,6 +433,12 @@ class Filter(object):
|
||||
def limit(self):
|
||||
return self.filter_json.get("limit", 10)
|
||||
|
||||
def lazy_load_members(self):
|
||||
return self.filter_json.get("lazy_load_members", False)
|
||||
|
||||
def include_redundant_members(self):
|
||||
return self.filter_json.get("include_redundant_members", False)
|
||||
|
||||
|
||||
def _matches_wildcard(actual_value, filter_value):
|
||||
if filter_value.endswith("*"):
|
||||
|
||||
@@ -72,7 +72,7 @@ class Ratelimiter(object):
|
||||
return allowed, time_allowed
|
||||
|
||||
def prune_message_counts(self, time_now_s):
|
||||
for user_id in self.message_counts.keys():
|
||||
for user_id in list(self.message_counts.keys()):
|
||||
message_count, time_start, msg_rate_hz = (
|
||||
self.message_counts[user_id]
|
||||
)
|
||||
|
||||
@@ -15,8 +15,8 @@
|
||||
# limitations under the License.
|
||||
|
||||
"""Contains the URL paths to prefix various aspects of the server with. """
|
||||
from hashlib import sha256
|
||||
import hmac
|
||||
from hashlib import sha256
|
||||
|
||||
from six.moves.urllib.parse import urlencode
|
||||
|
||||
@@ -64,7 +64,7 @@ class ConsentURIBuilder(object):
|
||||
"""
|
||||
mac = hmac.new(
|
||||
key=self._hmac_secret,
|
||||
msg=user_id,
|
||||
msg=user_id.encode('ascii'),
|
||||
digestmod=sha256,
|
||||
).hexdigest()
|
||||
consent_uri = "%s_matrix/consent?%s" % (
|
||||
|
||||
@@ -14,15 +14,17 @@
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
|
||||
from synapse import python_dependencies # noqa: E402
|
||||
|
||||
sys.dont_write_bytecode = True
|
||||
|
||||
from synapse import python_dependencies # noqa: E402
|
||||
|
||||
try:
|
||||
python_dependencies.check_requirements()
|
||||
except python_dependencies.MissingRequirementError as e:
|
||||
message = "\n".join([
|
||||
"Missing Requirement: %s" % (e.message,),
|
||||
"Missing Requirement: %s" % (str(e),),
|
||||
"To install run:",
|
||||
" pip install --upgrade --force \"%s\"" % (e.dependency,),
|
||||
"",
|
||||
|
||||
@@ -17,15 +17,13 @@ import gc
|
||||
import logging
|
||||
import sys
|
||||
|
||||
try:
|
||||
import affinity
|
||||
except Exception:
|
||||
affinity = None
|
||||
|
||||
import psutil
|
||||
from daemonize import Daemonize
|
||||
|
||||
from twisted.internet import error, reactor
|
||||
|
||||
from synapse.util import PreserveLoggingContext
|
||||
from synapse.util.rlimit import change_resource_limit
|
||||
from twisted.internet import error, reactor
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -86,15 +84,20 @@ def start_reactor(
|
||||
with PreserveLoggingContext():
|
||||
logger.info("Running")
|
||||
if cpu_affinity is not None:
|
||||
if not affinity:
|
||||
quit_with_error(
|
||||
"Missing package 'affinity' required for cpu_affinity\n"
|
||||
"option\n\n"
|
||||
"Install by running:\n\n"
|
||||
" pip install affinity\n\n"
|
||||
)
|
||||
logger.info("Setting CPU affinity to %s" % cpu_affinity)
|
||||
affinity.set_process_affinity_mask(0, cpu_affinity)
|
||||
# Turn the bitmask into bits, reverse it so we go from 0 up
|
||||
mask_to_bits = bin(cpu_affinity)[2:][::-1]
|
||||
|
||||
cpus = []
|
||||
cpu_num = 0
|
||||
|
||||
for i in mask_to_bits:
|
||||
if i == "1":
|
||||
cpus.append(cpu_num)
|
||||
cpu_num += 1
|
||||
|
||||
p = psutil.Process()
|
||||
p.cpu_affinity(cpus)
|
||||
|
||||
change_resource_limit(soft_file_limit)
|
||||
if gc_thresholds:
|
||||
gc.set_threshold(*gc_thresholds)
|
||||
@@ -137,7 +140,7 @@ def listen_metrics(bind_addresses, port):
|
||||
logger.info("Metrics now reporting on %s:%d", host, port)
|
||||
|
||||
|
||||
def listen_tcp(bind_addresses, port, factory, backlog=50):
|
||||
def listen_tcp(bind_addresses, port, factory, reactor=reactor, backlog=50):
|
||||
"""
|
||||
Create a TCP socket for a port and several addresses
|
||||
"""
|
||||
@@ -153,7 +156,9 @@ def listen_tcp(bind_addresses, port, factory, backlog=50):
|
||||
check_bind_error(e, address, bind_addresses)
|
||||
|
||||
|
||||
def listen_ssl(bind_addresses, port, factory, context_factory, backlog=50):
|
||||
def listen_ssl(
|
||||
bind_addresses, port, factory, context_factory, reactor=reactor, backlog=50
|
||||
):
|
||||
"""
|
||||
Create an SSL socket for a port and several addresses
|
||||
"""
|
||||
|
||||
@@ -16,6 +16,9 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
import synapse
|
||||
from synapse import events
|
||||
from synapse.app import _base
|
||||
@@ -36,8 +39,6 @@ from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.logcontext import LoggingContext, run_in_background
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.versionstring import get_version_string
|
||||
from twisted.internet import reactor, defer
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
logger = logging.getLogger("synapse.app.appservice")
|
||||
|
||||
@@ -50,10 +51,7 @@ class AppserviceSlaveStore(
|
||||
|
||||
|
||||
class AppserviceServer(HomeServer):
|
||||
def setup(self):
|
||||
logger.info("Setting up.")
|
||||
self.datastore = AppserviceSlaveStore(self.get_db_conn(), self)
|
||||
logger.info("Finished setting up.")
|
||||
DATASTORE_CLASS = AppserviceSlaveStore
|
||||
|
||||
def _listen_http(self, listener_config):
|
||||
port = listener_config["port"]
|
||||
@@ -116,8 +114,9 @@ class ASReplicationHandler(ReplicationClientHandler):
|
||||
super(ASReplicationHandler, self).__init__(hs.get_datastore())
|
||||
self.appservice_handler = hs.get_application_service_handler()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_rdata(self, stream_name, token, rows):
|
||||
super(ASReplicationHandler, self).on_rdata(stream_name, token, rows)
|
||||
yield super(ASReplicationHandler, self).on_rdata(stream_name, token, rows)
|
||||
|
||||
if stream_name == "events":
|
||||
max_stream_id = self.store.get_room_max_stream_ordering()
|
||||
@@ -137,7 +136,7 @@ def start(config_options):
|
||||
"Synapse appservice", config_options
|
||||
)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + e.message + "\n")
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
assert config.worker_app == "synapse.app.appservice"
|
||||
@@ -173,7 +172,6 @@ def start(config_options):
|
||||
|
||||
def start():
|
||||
ps.get_datastore().start_profiling()
|
||||
ps.get_state_handler().start_caching()
|
||||
|
||||
reactor.callWhenRunning(start)
|
||||
|
||||
|
||||
@@ -16,6 +16,9 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from twisted.internet import reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
import synapse
|
||||
from synapse import events
|
||||
from synapse.app import _base
|
||||
@@ -28,6 +31,7 @@ from synapse.http.site import SynapseSite
|
||||
from synapse.metrics import RegistryProxy
|
||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||
from synapse.replication.slave.storage._base import BaseSlavedStore
|
||||
from synapse.replication.slave.storage.account_data import SlavedAccountDataStore
|
||||
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
|
||||
from synapse.replication.slave.storage.client_ips import SlavedClientIpStore
|
||||
from synapse.replication.slave.storage.directory import DirectoryStore
|
||||
@@ -35,29 +39,34 @@ from synapse.replication.slave.storage.events import SlavedEventStore
|
||||
from synapse.replication.slave.storage.keys import SlavedKeyStore
|
||||
from synapse.replication.slave.storage.registration import SlavedRegistrationStore
|
||||
from synapse.replication.slave.storage.room import RoomStore
|
||||
from synapse.replication.slave.storage.transactions import TransactionStore
|
||||
from synapse.replication.slave.storage.transactions import SlavedTransactionStore
|
||||
from synapse.replication.tcp.client import ReplicationClientHandler
|
||||
from synapse.rest.client.v1.room import PublicRoomListRestServlet
|
||||
from synapse.rest.client.v1.room import (
|
||||
JoinedRoomMemberListRestServlet,
|
||||
PublicRoomListRestServlet,
|
||||
RoomEventContextServlet,
|
||||
RoomMemberListRestServlet,
|
||||
RoomStateRestServlet,
|
||||
)
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.engines import create_engine
|
||||
from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.versionstring import get_version_string
|
||||
from twisted.internet import reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
logger = logging.getLogger("synapse.app.client_reader")
|
||||
|
||||
|
||||
class ClientReaderSlavedStore(
|
||||
SlavedAccountDataStore,
|
||||
SlavedEventStore,
|
||||
SlavedKeyStore,
|
||||
RoomStore,
|
||||
DirectoryStore,
|
||||
SlavedApplicationServiceStore,
|
||||
SlavedRegistrationStore,
|
||||
TransactionStore,
|
||||
SlavedTransactionStore,
|
||||
SlavedClientIpStore,
|
||||
BaseSlavedStore,
|
||||
):
|
||||
@@ -65,10 +74,7 @@ class ClientReaderSlavedStore(
|
||||
|
||||
|
||||
class ClientReaderServer(HomeServer):
|
||||
def setup(self):
|
||||
logger.info("Setting up.")
|
||||
self.datastore = ClientReaderSlavedStore(self.get_db_conn(), self)
|
||||
logger.info("Finished setting up.")
|
||||
DATASTORE_CLASS = ClientReaderSlavedStore
|
||||
|
||||
def _listen_http(self, listener_config):
|
||||
port = listener_config["port"]
|
||||
@@ -81,7 +87,13 @@ class ClientReaderServer(HomeServer):
|
||||
resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
|
||||
elif name == "client":
|
||||
resource = JsonResource(self, canonical_json=False)
|
||||
|
||||
PublicRoomListRestServlet(self).register(resource)
|
||||
RoomMemberListRestServlet(self).register(resource)
|
||||
JoinedRoomMemberListRestServlet(self).register(resource)
|
||||
RoomStateRestServlet(self).register(resource)
|
||||
RoomEventContextServlet(self).register(resource)
|
||||
|
||||
resources.update({
|
||||
"/_matrix/client/r0": resource,
|
||||
"/_matrix/client/unstable": resource,
|
||||
@@ -141,7 +153,7 @@ def start(config_options):
|
||||
"Synapse client reader", config_options
|
||||
)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + e.message + "\n")
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
assert config.worker_app == "synapse.app.client_reader"
|
||||
@@ -153,11 +165,13 @@ def start(config_options):
|
||||
database_engine = create_engine(config.database_config)
|
||||
|
||||
tls_server_context_factory = context_factory.ServerContextFactory(config)
|
||||
tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
|
||||
|
||||
ss = ClientReaderServer(
|
||||
config.server_name,
|
||||
db_config=config.database_config,
|
||||
tls_server_context_factory=tls_server_context_factory,
|
||||
tls_client_options_factory=tls_client_options_factory,
|
||||
config=config,
|
||||
version_string="Synapse/" + get_version_string(synapse),
|
||||
database_engine=database_engine,
|
||||
@@ -167,7 +181,6 @@ def start(config_options):
|
||||
ss.start_listening(config.worker_listeners)
|
||||
|
||||
def start():
|
||||
ss.get_state_handler().start_caching()
|
||||
ss.get_datastore().start_profiling()
|
||||
|
||||
reactor.callWhenRunning(start)
|
||||
|
||||
@@ -16,6 +16,9 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from twisted.internet import reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
import synapse
|
||||
from synapse import events
|
||||
from synapse.app import _base
|
||||
@@ -40,27 +43,36 @@ from synapse.replication.slave.storage.pushers import SlavedPusherStore
|
||||
from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
|
||||
from synapse.replication.slave.storage.registration import SlavedRegistrationStore
|
||||
from synapse.replication.slave.storage.room import RoomStore
|
||||
from synapse.replication.slave.storage.transactions import TransactionStore
|
||||
from synapse.replication.slave.storage.transactions import SlavedTransactionStore
|
||||
from synapse.replication.tcp.client import ReplicationClientHandler
|
||||
from synapse.rest.client.v1.profile import (
|
||||
ProfileAvatarURLRestServlet,
|
||||
ProfileDisplaynameRestServlet,
|
||||
ProfileRestServlet,
|
||||
)
|
||||
from synapse.rest.client.v1.room import (
|
||||
RoomSendEventRestServlet, RoomMembershipRestServlet, RoomStateEventRestServlet,
|
||||
JoinRoomAliasServlet,
|
||||
RoomMembershipRestServlet,
|
||||
RoomSendEventRestServlet,
|
||||
RoomStateEventRestServlet,
|
||||
)
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.engines import create_engine
|
||||
from synapse.storage.user_directory import UserDirectoryStore
|
||||
from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.versionstring import get_version_string
|
||||
from twisted.internet import reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
logger = logging.getLogger("synapse.app.event_creator")
|
||||
|
||||
|
||||
class EventCreatorSlavedStore(
|
||||
# FIXME(#3714): We need to add UserDirectoryStore as we write directly
|
||||
# rather than going via the correct worker.
|
||||
UserDirectoryStore,
|
||||
DirectoryStore,
|
||||
TransactionStore,
|
||||
SlavedTransactionStore,
|
||||
SlavedProfileStore,
|
||||
SlavedAccountDataStore,
|
||||
SlavedPusherStore,
|
||||
@@ -78,10 +90,7 @@ class EventCreatorSlavedStore(
|
||||
|
||||
|
||||
class EventCreatorServer(HomeServer):
|
||||
def setup(self):
|
||||
logger.info("Setting up.")
|
||||
self.datastore = EventCreatorSlavedStore(self.get_db_conn(), self)
|
||||
logger.info("Finished setting up.")
|
||||
DATASTORE_CLASS = EventCreatorSlavedStore
|
||||
|
||||
def _listen_http(self, listener_config):
|
||||
port = listener_config["port"]
|
||||
@@ -98,6 +107,9 @@ class EventCreatorServer(HomeServer):
|
||||
RoomMembershipRestServlet(self).register(resource)
|
||||
RoomStateEventRestServlet(self).register(resource)
|
||||
JoinRoomAliasServlet(self).register(resource)
|
||||
ProfileAvatarURLRestServlet(self).register(resource)
|
||||
ProfileDisplaynameRestServlet(self).register(resource)
|
||||
ProfileRestServlet(self).register(resource)
|
||||
resources.update({
|
||||
"/_matrix/client/r0": resource,
|
||||
"/_matrix/client/unstable": resource,
|
||||
@@ -157,7 +169,7 @@ def start(config_options):
|
||||
"Synapse event creator", config_options
|
||||
)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + e.message + "\n")
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
assert config.worker_app == "synapse.app.event_creator"
|
||||
@@ -166,16 +178,21 @@ def start(config_options):
|
||||
|
||||
setup_logging(config, use_worker_options=True)
|
||||
|
||||
# This should only be done on the user directory worker or the master
|
||||
config.update_user_directory = False
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
database_engine = create_engine(config.database_config)
|
||||
|
||||
tls_server_context_factory = context_factory.ServerContextFactory(config)
|
||||
tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
|
||||
|
||||
ss = EventCreatorServer(
|
||||
config.server_name,
|
||||
db_config=config.database_config,
|
||||
tls_server_context_factory=tls_server_context_factory,
|
||||
tls_client_options_factory=tls_client_options_factory,
|
||||
config=config,
|
||||
version_string="Synapse/" + get_version_string(synapse),
|
||||
database_engine=database_engine,
|
||||
@@ -185,7 +202,6 @@ def start(config_options):
|
||||
ss.start_listening(config.worker_listeners)
|
||||
|
||||
def start():
|
||||
ss.get_state_handler().start_caching()
|
||||
ss.get_datastore().start_profiling()
|
||||
|
||||
reactor.callWhenRunning(start)
|
||||
|
||||
@@ -16,6 +16,9 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from twisted.internet import reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
import synapse
|
||||
from synapse import events
|
||||
from synapse.api.urls import FEDERATION_PREFIX
|
||||
@@ -29,11 +32,17 @@ from synapse.http.site import SynapseSite
|
||||
from synapse.metrics import RegistryProxy
|
||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||
from synapse.replication.slave.storage._base import BaseSlavedStore
|
||||
from synapse.replication.slave.storage.account_data import SlavedAccountDataStore
|
||||
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
|
||||
from synapse.replication.slave.storage.directory import DirectoryStore
|
||||
from synapse.replication.slave.storage.events import SlavedEventStore
|
||||
from synapse.replication.slave.storage.keys import SlavedKeyStore
|
||||
from synapse.replication.slave.storage.profile import SlavedProfileStore
|
||||
from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore
|
||||
from synapse.replication.slave.storage.pushers import SlavedPusherStore
|
||||
from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
|
||||
from synapse.replication.slave.storage.room import RoomStore
|
||||
from synapse.replication.slave.storage.transactions import TransactionStore
|
||||
from synapse.replication.slave.storage.transactions import SlavedTransactionStore
|
||||
from synapse.replication.tcp.client import ReplicationClientHandler
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.engines import create_engine
|
||||
@@ -41,28 +50,29 @@ from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.versionstring import get_version_string
|
||||
from twisted.internet import reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
logger = logging.getLogger("synapse.app.federation_reader")
|
||||
|
||||
|
||||
class FederationReaderSlavedStore(
|
||||
SlavedAccountDataStore,
|
||||
SlavedProfileStore,
|
||||
SlavedApplicationServiceStore,
|
||||
SlavedPusherStore,
|
||||
SlavedPushRuleStore,
|
||||
SlavedReceiptsStore,
|
||||
SlavedEventStore,
|
||||
SlavedKeyStore,
|
||||
RoomStore,
|
||||
DirectoryStore,
|
||||
TransactionStore,
|
||||
SlavedTransactionStore,
|
||||
BaseSlavedStore,
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
class FederationReaderServer(HomeServer):
|
||||
def setup(self):
|
||||
logger.info("Setting up.")
|
||||
self.datastore = FederationReaderSlavedStore(self.get_db_conn(), self)
|
||||
logger.info("Finished setting up.")
|
||||
DATASTORE_CLASS = FederationReaderSlavedStore
|
||||
|
||||
def _listen_http(self, listener_config):
|
||||
port = listener_config["port"]
|
||||
@@ -130,7 +140,7 @@ def start(config_options):
|
||||
"Synapse federation reader", config_options
|
||||
)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + e.message + "\n")
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
assert config.worker_app == "synapse.app.federation_reader"
|
||||
@@ -142,11 +152,13 @@ def start(config_options):
|
||||
database_engine = create_engine(config.database_config)
|
||||
|
||||
tls_server_context_factory = context_factory.ServerContextFactory(config)
|
||||
tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
|
||||
|
||||
ss = FederationReaderServer(
|
||||
config.server_name,
|
||||
db_config=config.database_config,
|
||||
tls_server_context_factory=tls_server_context_factory,
|
||||
tls_client_options_factory=tls_client_options_factory,
|
||||
config=config,
|
||||
version_string="Synapse/" + get_version_string(synapse),
|
||||
database_engine=database_engine,
|
||||
@@ -156,7 +168,6 @@ def start(config_options):
|
||||
ss.start_listening(config.worker_listeners)
|
||||
|
||||
def start():
|
||||
ss.get_state_handler().start_caching()
|
||||
ss.get_datastore().start_profiling()
|
||||
|
||||
reactor.callWhenRunning(start)
|
||||
|
||||
@@ -16,6 +16,9 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
import synapse
|
||||
from synapse import events
|
||||
from synapse.app import _base
|
||||
@@ -33,23 +36,21 @@ from synapse.replication.slave.storage.events import SlavedEventStore
|
||||
from synapse.replication.slave.storage.presence import SlavedPresenceStore
|
||||
from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
|
||||
from synapse.replication.slave.storage.registration import SlavedRegistrationStore
|
||||
from synapse.replication.slave.storage.transactions import TransactionStore
|
||||
from synapse.replication.slave.storage.transactions import SlavedTransactionStore
|
||||
from synapse.replication.tcp.client import ReplicationClientHandler
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.engines import create_engine
|
||||
from synapse.util.async import Linearizer
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.logcontext import LoggingContext, run_in_background
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.versionstring import get_version_string
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
logger = logging.getLogger("synapse.app.federation_sender")
|
||||
|
||||
|
||||
class FederationSenderSlaveStore(
|
||||
SlavedDeviceInboxStore, TransactionStore, SlavedReceiptsStore, SlavedEventStore,
|
||||
SlavedDeviceInboxStore, SlavedTransactionStore, SlavedReceiptsStore, SlavedEventStore,
|
||||
SlavedRegistrationStore, SlavedDeviceStore, SlavedPresenceStore,
|
||||
):
|
||||
def __init__(self, db_conn, hs):
|
||||
@@ -77,10 +78,7 @@ class FederationSenderSlaveStore(
|
||||
|
||||
|
||||
class FederationSenderServer(HomeServer):
|
||||
def setup(self):
|
||||
logger.info("Setting up.")
|
||||
self.datastore = FederationSenderSlaveStore(self.get_db_conn(), self)
|
||||
logger.info("Finished setting up.")
|
||||
DATASTORE_CLASS = FederationSenderSlaveStore
|
||||
|
||||
def _listen_http(self, listener_config):
|
||||
port = listener_config["port"]
|
||||
@@ -143,8 +141,9 @@ class FederationSenderReplicationHandler(ReplicationClientHandler):
|
||||
super(FederationSenderReplicationHandler, self).__init__(hs.get_datastore())
|
||||
self.send_handler = FederationSenderHandler(hs, self)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_rdata(self, stream_name, token, rows):
|
||||
super(FederationSenderReplicationHandler, self).on_rdata(
|
||||
yield super(FederationSenderReplicationHandler, self).on_rdata(
|
||||
stream_name, token, rows
|
||||
)
|
||||
self.send_handler.process_replication_rows(stream_name, token, rows)
|
||||
@@ -161,7 +160,7 @@ def start(config_options):
|
||||
"Synapse federation sender", config_options
|
||||
)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + e.message + "\n")
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
assert config.worker_app == "synapse.app.federation_sender"
|
||||
@@ -185,11 +184,13 @@ def start(config_options):
|
||||
config.send_federation = True
|
||||
|
||||
tls_server_context_factory = context_factory.ServerContextFactory(config)
|
||||
tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
|
||||
|
||||
ps = FederationSenderServer(
|
||||
config.server_name,
|
||||
db_config=config.database_config,
|
||||
tls_server_context_factory=tls_server_context_factory,
|
||||
tls_client_options_factory=tls_client_options_factory,
|
||||
config=config,
|
||||
version_string="Synapse/" + get_version_string(synapse),
|
||||
database_engine=database_engine,
|
||||
@@ -200,7 +201,6 @@ def start(config_options):
|
||||
|
||||
def start():
|
||||
ps.get_datastore().start_profiling()
|
||||
ps.get_state_handler().start_caching()
|
||||
|
||||
reactor.callWhenRunning(start)
|
||||
_base.start_worker_reactor("synapse-federation-sender", config)
|
||||
|
||||
@@ -16,6 +16,9 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
import synapse
|
||||
from synapse import events
|
||||
from synapse.api.errors import SynapseError
|
||||
@@ -25,9 +28,7 @@ from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.logger import setup_logging
|
||||
from synapse.crypto import context_factory
|
||||
from synapse.http.server import JsonResource
|
||||
from synapse.http.servlet import (
|
||||
RestServlet, parse_json_object_from_request,
|
||||
)
|
||||
from synapse.http.servlet import RestServlet, parse_json_object_from_request
|
||||
from synapse.http.site import SynapseSite
|
||||
from synapse.metrics import RegistryProxy
|
||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||
@@ -37,6 +38,7 @@ from synapse.replication.slave.storage.client_ips import SlavedClientIpStore
|
||||
from synapse.replication.slave.storage.devices import SlavedDeviceStore
|
||||
from synapse.replication.slave.storage.registration import SlavedRegistrationStore
|
||||
from synapse.replication.tcp.client import ReplicationClientHandler
|
||||
from synapse.rest.client.v1.base import ClientV1RestServlet, client_path_patterns
|
||||
from synapse.rest.client.v2_alpha._base import client_v2_patterns
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.engines import create_engine
|
||||
@@ -44,12 +46,39 @@ from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.versionstring import get_version_string
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
logger = logging.getLogger("synapse.app.frontend_proxy")
|
||||
|
||||
|
||||
class PresenceStatusStubServlet(ClientV1RestServlet):
|
||||
PATTERNS = client_path_patterns("/presence/(?P<user_id>[^/]*)/status")
|
||||
|
||||
def __init__(self, hs):
|
||||
super(PresenceStatusStubServlet, self).__init__(hs)
|
||||
self.http_client = hs.get_simple_http_client()
|
||||
self.auth = hs.get_auth()
|
||||
self.main_uri = hs.config.worker_main_http_uri
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request, user_id):
|
||||
# Pass through the auth headers, if any, in case the access token
|
||||
# is there.
|
||||
auth_headers = request.requestHeaders.getRawHeaders("Authorization", [])
|
||||
headers = {
|
||||
"Authorization": auth_headers,
|
||||
}
|
||||
result = yield self.http_client.get_json(
|
||||
self.main_uri + request.uri.decode('ascii'),
|
||||
headers=headers,
|
||||
)
|
||||
defer.returnValue((200, result))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_PUT(self, request, user_id):
|
||||
yield self.auth.get_user_by_req(request)
|
||||
defer.returnValue((200, {}))
|
||||
|
||||
|
||||
class KeyUploadServlet(RestServlet):
|
||||
PATTERNS = client_v2_patterns("/keys/upload(/(?P<device_id>[^/]+))?$")
|
||||
|
||||
@@ -96,7 +125,7 @@ class KeyUploadServlet(RestServlet):
|
||||
"Authorization": auth_headers,
|
||||
}
|
||||
result = yield self.http_client.post_json_get_json(
|
||||
self.main_uri + request.uri,
|
||||
self.main_uri + request.uri.decode('ascii'),
|
||||
body,
|
||||
headers=headers,
|
||||
)
|
||||
@@ -119,10 +148,7 @@ class FrontendProxySlavedStore(
|
||||
|
||||
|
||||
class FrontendProxyServer(HomeServer):
|
||||
def setup(self):
|
||||
logger.info("Setting up.")
|
||||
self.datastore = FrontendProxySlavedStore(self.get_db_conn(), self)
|
||||
logger.info("Finished setting up.")
|
||||
DATASTORE_CLASS = FrontendProxySlavedStore
|
||||
|
||||
def _listen_http(self, listener_config):
|
||||
port = listener_config["port"]
|
||||
@@ -136,6 +162,12 @@ class FrontendProxyServer(HomeServer):
|
||||
elif name == "client":
|
||||
resource = JsonResource(self, canonical_json=False)
|
||||
KeyUploadServlet(self).register(resource)
|
||||
|
||||
# If presence is disabled, use the stub servlet that does
|
||||
# not allow sending presence
|
||||
if not self.config.use_presence:
|
||||
PresenceStatusStubServlet(self).register(resource)
|
||||
|
||||
resources.update({
|
||||
"/_matrix/client/r0": resource,
|
||||
"/_matrix/client/unstable": resource,
|
||||
@@ -154,7 +186,8 @@ class FrontendProxyServer(HomeServer):
|
||||
listener_config,
|
||||
root_resource,
|
||||
self.version_string,
|
||||
)
|
||||
),
|
||||
reactor=self.get_reactor()
|
||||
)
|
||||
|
||||
logger.info("Synapse client reader now listening on port %d", port)
|
||||
@@ -195,7 +228,7 @@ def start(config_options):
|
||||
"Synapse frontend proxy", config_options
|
||||
)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + e.message + "\n")
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
assert config.worker_app == "synapse.app.frontend_proxy"
|
||||
@@ -209,11 +242,13 @@ def start(config_options):
|
||||
database_engine = create_engine(config.database_config)
|
||||
|
||||
tls_server_context_factory = context_factory.ServerContextFactory(config)
|
||||
tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
|
||||
|
||||
ss = FrontendProxyServer(
|
||||
config.server_name,
|
||||
db_config=config.database_config,
|
||||
tls_server_context_factory=tls_server_context_factory,
|
||||
tls_client_options_factory=tls_client_options_factory,
|
||||
config=config,
|
||||
version_string="Synapse/" + get_version_string(synapse),
|
||||
database_engine=database_engine,
|
||||
@@ -223,7 +258,6 @@ def start(config_options):
|
||||
ss.start_listening(config.worker_listeners)
|
||||
|
||||
def start():
|
||||
ss.get_state_handler().start_caching()
|
||||
ss.get_datastore().start_profiling()
|
||||
|
||||
reactor.callWhenRunning(start)
|
||||
|
||||
@@ -18,34 +18,52 @@ import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from six import iteritems
|
||||
|
||||
import psutil
|
||||
from prometheus_client import Gauge
|
||||
|
||||
from twisted.application import service
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web.resource import EncodingResourceWrapper, NoResource
|
||||
from twisted.web.server import GzipEncoderFactory
|
||||
from twisted.web.static import File
|
||||
|
||||
import synapse
|
||||
import synapse.config.logger
|
||||
from synapse import events
|
||||
from synapse.api.urls import CONTENT_REPO_PREFIX, FEDERATION_PREFIX, \
|
||||
LEGACY_MEDIA_PREFIX, MEDIA_PREFIX, SERVER_KEY_PREFIX, SERVER_KEY_V2_PREFIX, \
|
||||
STATIC_PREFIX, WEB_CLIENT_PREFIX
|
||||
from synapse.api.urls import (
|
||||
CONTENT_REPO_PREFIX,
|
||||
FEDERATION_PREFIX,
|
||||
LEGACY_MEDIA_PREFIX,
|
||||
MEDIA_PREFIX,
|
||||
SERVER_KEY_PREFIX,
|
||||
SERVER_KEY_V2_PREFIX,
|
||||
STATIC_PREFIX,
|
||||
WEB_CLIENT_PREFIX,
|
||||
)
|
||||
from synapse.app import _base
|
||||
from synapse.app._base import quit_with_error, listen_ssl, listen_tcp
|
||||
from synapse.app._base import listen_ssl, listen_tcp, quit_with_error
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.crypto import context_factory
|
||||
from synapse.federation.transport.server import TransportLayerServer
|
||||
from synapse.module_api import ModuleApi
|
||||
from synapse.http.additional_resource import AdditionalResource
|
||||
from synapse.http.server import RootRedirect
|
||||
from synapse.http.site import SynapseSite
|
||||
from synapse.metrics import RegistryProxy
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||
from synapse.python_dependencies import CONDITIONAL_REQUIREMENTS, \
|
||||
check_requirements
|
||||
from synapse.replication.http import ReplicationRestResource, REPLICATION_PREFIX
|
||||
from synapse.module_api import ModuleApi
|
||||
from synapse.python_dependencies import CONDITIONAL_REQUIREMENTS, check_requirements
|
||||
from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
|
||||
from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory
|
||||
from synapse.rest import ClientRestResource
|
||||
from synapse.rest.key.v1.server_key_resource import LocalKey
|
||||
from synapse.rest.key.v2 import KeyApiV2Resource
|
||||
from synapse.rest.media.v0.content_repository import ContentRepoResource
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage import are_all_users_on_domain
|
||||
from synapse.storage import DataStore, are_all_users_on_domain
|
||||
from synapse.storage.engines import IncorrectDatabaseSetup, create_engine
|
||||
from synapse.storage.prepare_database import UpgradeDatabaseException, prepare_database
|
||||
from synapse.util.caches import CACHE_SIZE_FACTOR
|
||||
@@ -55,11 +73,6 @@ from synapse.util.manhole import manhole
|
||||
from synapse.util.module_loader import load_module
|
||||
from synapse.util.rlimit import change_resource_limit
|
||||
from synapse.util.versionstring import get_version_string
|
||||
from twisted.application import service
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web.resource import EncodingResourceWrapper, NoResource
|
||||
from twisted.web.server import GzipEncoderFactory
|
||||
from twisted.web.static import File
|
||||
|
||||
logger = logging.getLogger("synapse.app.homeserver")
|
||||
|
||||
@@ -99,6 +112,8 @@ def build_resource_for_web_client(hs):
|
||||
|
||||
|
||||
class SynapseHomeServer(HomeServer):
|
||||
DATASTORE_CLASS = DataStore
|
||||
|
||||
def _listener_http(self, config, listener_config):
|
||||
port = listener_config["port"]
|
||||
bind_addresses = listener_config["bind_addresses"]
|
||||
@@ -287,7 +302,16 @@ class SynapseHomeServer(HomeServer):
|
||||
try:
|
||||
database_engine.check_database(db_conn.cursor())
|
||||
except IncorrectDatabaseSetup as e:
|
||||
quit_with_error(e.message)
|
||||
quit_with_error(str(e))
|
||||
|
||||
|
||||
# Gauges to expose monthly active user control metrics
|
||||
current_mau_gauge = Gauge("synapse_admin_mau:current", "Current MAU")
|
||||
max_mau_gauge = Gauge("synapse_admin_mau:max", "MAU Limit")
|
||||
registered_reserved_users_mau_gauge = Gauge(
|
||||
"synapse_admin_mau:registered_reserved_users",
|
||||
"Registered users with reserved threepids"
|
||||
)
|
||||
|
||||
|
||||
def setup(config_options):
|
||||
@@ -305,7 +329,7 @@ def setup(config_options):
|
||||
config_options,
|
||||
)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + e.message + "\n")
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
if not config:
|
||||
@@ -321,6 +345,7 @@ def setup(config_options):
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
tls_server_context_factory = context_factory.ServerContextFactory(config)
|
||||
tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
|
||||
|
||||
database_engine = create_engine(config.database_config)
|
||||
config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection
|
||||
@@ -329,6 +354,7 @@ def setup(config_options):
|
||||
config.server_name,
|
||||
db_config=config.database_config,
|
||||
tls_server_context_factory=tls_server_context_factory,
|
||||
tls_client_options_factory=tls_client_options_factory,
|
||||
config=config,
|
||||
version_string="Synapse/" + get_version_string(synapse),
|
||||
database_engine=database_engine,
|
||||
@@ -337,13 +363,13 @@ def setup(config_options):
|
||||
logger.info("Preparing database: %s...", config.database_config['name'])
|
||||
|
||||
try:
|
||||
db_conn = hs.get_db_conn(run_new_connection=False)
|
||||
prepare_database(db_conn, database_engine, config=config)
|
||||
database_engine.on_new_connection(db_conn)
|
||||
with hs.get_db_conn(run_new_connection=False) as db_conn:
|
||||
prepare_database(db_conn, database_engine, config=config)
|
||||
database_engine.on_new_connection(db_conn)
|
||||
|
||||
hs.run_startup_checks(db_conn, database_engine)
|
||||
hs.run_startup_checks(db_conn, database_engine)
|
||||
|
||||
db_conn.commit()
|
||||
db_conn.commit()
|
||||
except UpgradeDatabaseException:
|
||||
sys.stderr.write(
|
||||
"\nFailed to upgrade database.\n"
|
||||
@@ -359,10 +385,8 @@ def setup(config_options):
|
||||
|
||||
def start():
|
||||
hs.get_pusherpool().start()
|
||||
hs.get_state_handler().start_caching()
|
||||
hs.get_datastore().start_profiling()
|
||||
hs.get_datastore().start_doing_background_updates()
|
||||
hs.get_federation_client().start_get_pdu_cache()
|
||||
|
||||
reactor.callWhenRunning(start)
|
||||
|
||||
@@ -418,6 +442,9 @@ def run(hs):
|
||||
# currently either 0 or 1
|
||||
stats_process = []
|
||||
|
||||
def start_phone_stats_home():
|
||||
return run_as_background_process("phone_stats_home", phone_stats_home)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def phone_stats_home():
|
||||
logger.info("Gathering stats for reporting")
|
||||
@@ -429,13 +456,17 @@ def run(hs):
|
||||
stats["homeserver"] = hs.config.server_name
|
||||
stats["timestamp"] = now
|
||||
stats["uptime_seconds"] = uptime
|
||||
version = sys.version_info
|
||||
stats["python_version"] = "{}.{}.{}".format(
|
||||
version.major, version.minor, version.micro
|
||||
)
|
||||
stats["total_users"] = yield hs.get_datastore().count_all_users()
|
||||
|
||||
total_nonbridged_users = yield hs.get_datastore().count_nonbridged_users()
|
||||
stats["total_nonbridged_users"] = total_nonbridged_users
|
||||
|
||||
daily_user_type_results = yield hs.get_datastore().count_daily_user_type()
|
||||
for name, count in daily_user_type_results.iteritems():
|
||||
for name, count in iteritems(daily_user_type_results):
|
||||
stats["daily_user_type_" + name] = count
|
||||
|
||||
room_count = yield hs.get_datastore().get_room_count()
|
||||
@@ -446,7 +477,7 @@ def run(hs):
|
||||
stats["daily_messages"] = yield hs.get_datastore().count_daily_messages()
|
||||
|
||||
r30_results = yield hs.get_datastore().count_r30_users()
|
||||
for name, count in r30_results.iteritems():
|
||||
for name, count in iteritems(r30_results):
|
||||
stats["r30_users_" + name] = count
|
||||
|
||||
daily_sent_messages = yield hs.get_datastore().count_daily_sent_messages()
|
||||
@@ -472,7 +503,6 @@ def run(hs):
|
||||
|
||||
def performance_stats_init():
|
||||
try:
|
||||
import psutil
|
||||
process = psutil.Process()
|
||||
# Ensure we can fetch both, and make the initial request for cpu_percent
|
||||
# so the next request will use this as the initial point.
|
||||
@@ -480,25 +510,57 @@ def run(hs):
|
||||
process.cpu_percent(interval=None)
|
||||
logger.info("report_stats can use psutil")
|
||||
stats_process.append(process)
|
||||
except (ImportError, AttributeError):
|
||||
logger.warn(
|
||||
"report_stats enabled but psutil is not installed or incorrect version."
|
||||
" Disabling reporting of memory/cpu stats."
|
||||
" Ensuring psutil is available will help matrix.org track performance"
|
||||
" changes across releases."
|
||||
except (AttributeError):
|
||||
logger.warning(
|
||||
"Unable to read memory/cpu stats. Disabling reporting."
|
||||
)
|
||||
|
||||
def generate_user_daily_visit_stats():
|
||||
hs.get_datastore().generate_user_daily_visits()
|
||||
return run_as_background_process(
|
||||
"generate_user_daily_visits",
|
||||
hs.get_datastore().generate_user_daily_visits,
|
||||
)
|
||||
|
||||
# Rather than update on per session basis, batch up the requests.
|
||||
# If you increase the loop period, the accuracy of user_daily_visits
|
||||
# table will decrease
|
||||
clock.looping_call(generate_user_daily_visit_stats, 5 * 60 * 1000)
|
||||
|
||||
# monthly active user limiting functionality
|
||||
def reap_monthly_active_users():
|
||||
return run_as_background_process(
|
||||
"reap_monthly_active_users",
|
||||
hs.get_datastore().reap_monthly_active_users,
|
||||
)
|
||||
clock.looping_call(reap_monthly_active_users, 1000 * 60 * 60)
|
||||
reap_monthly_active_users()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def generate_monthly_active_users():
|
||||
current_mau_count = 0
|
||||
reserved_count = 0
|
||||
store = hs.get_datastore()
|
||||
if hs.config.limit_usage_by_mau:
|
||||
current_mau_count = yield store.get_monthly_active_count()
|
||||
reserved_count = yield store.get_registered_reserved_users_count()
|
||||
current_mau_gauge.set(float(current_mau_count))
|
||||
registered_reserved_users_mau_gauge.set(float(reserved_count))
|
||||
max_mau_gauge.set(float(hs.config.max_mau_value))
|
||||
|
||||
def start_generate_monthly_active_users():
|
||||
return run_as_background_process(
|
||||
"generate_monthly_active_users",
|
||||
generate_monthly_active_users,
|
||||
)
|
||||
|
||||
start_generate_monthly_active_users()
|
||||
if hs.config.limit_usage_by_mau:
|
||||
clock.looping_call(start_generate_monthly_active_users, 5 * 60 * 1000)
|
||||
# End of monthly active user settings
|
||||
|
||||
if hs.config.report_stats:
|
||||
logger.info("Scheduling stats reporting for 3 hour intervals")
|
||||
clock.looping_call(phone_stats_home, 3 * 60 * 60 * 1000)
|
||||
clock.looping_call(start_phone_stats_home, 3 * 60 * 60 * 1000)
|
||||
|
||||
# We need to defer this init for the cases that we daemonize
|
||||
# otherwise the process ID we get is that of the non-daemon process
|
||||
@@ -506,10 +568,10 @@ def run(hs):
|
||||
|
||||
# We wait 5 minutes to send the first set of stats as the server can
|
||||
# be quite busy the first few minutes
|
||||
clock.call_later(5 * 60, phone_stats_home)
|
||||
clock.call_later(5 * 60, start_phone_stats_home)
|
||||
|
||||
if hs.config.daemonize and hs.config.print_pidfile:
|
||||
print (hs.config.pid_file)
|
||||
print(hs.config.pid_file)
|
||||
|
||||
_base.start_reactor(
|
||||
"synapse-homeserver",
|
||||
|
||||
@@ -16,11 +16,12 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from twisted.internet import reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
import synapse
|
||||
from synapse import events
|
||||
from synapse.api.urls import (
|
||||
CONTENT_REPO_PREFIX, LEGACY_MEDIA_PREFIX, MEDIA_PREFIX
|
||||
)
|
||||
from synapse.api.urls import CONTENT_REPO_PREFIX, LEGACY_MEDIA_PREFIX, MEDIA_PREFIX
|
||||
from synapse.app import _base
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
@@ -33,7 +34,7 @@ from synapse.replication.slave.storage._base import BaseSlavedStore
|
||||
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
|
||||
from synapse.replication.slave.storage.client_ips import SlavedClientIpStore
|
||||
from synapse.replication.slave.storage.registration import SlavedRegistrationStore
|
||||
from synapse.replication.slave.storage.transactions import TransactionStore
|
||||
from synapse.replication.slave.storage.transactions import SlavedTransactionStore
|
||||
from synapse.replication.tcp.client import ReplicationClientHandler
|
||||
from synapse.rest.media.v0.content_repository import ContentRepoResource
|
||||
from synapse.server import HomeServer
|
||||
@@ -43,8 +44,6 @@ from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.versionstring import get_version_string
|
||||
from twisted.internet import reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
logger = logging.getLogger("synapse.app.media_repository")
|
||||
|
||||
@@ -53,7 +52,7 @@ class MediaRepositorySlavedStore(
|
||||
SlavedApplicationServiceStore,
|
||||
SlavedRegistrationStore,
|
||||
SlavedClientIpStore,
|
||||
TransactionStore,
|
||||
SlavedTransactionStore,
|
||||
BaseSlavedStore,
|
||||
MediaRepositoryStore,
|
||||
):
|
||||
@@ -61,10 +60,7 @@ class MediaRepositorySlavedStore(
|
||||
|
||||
|
||||
class MediaRepositoryServer(HomeServer):
|
||||
def setup(self):
|
||||
logger.info("Setting up.")
|
||||
self.datastore = MediaRepositorySlavedStore(self.get_db_conn(), self)
|
||||
logger.info("Finished setting up.")
|
||||
DATASTORE_CLASS = MediaRepositorySlavedStore
|
||||
|
||||
def _listen_http(self, listener_config):
|
||||
port = listener_config["port"]
|
||||
@@ -137,7 +133,7 @@ def start(config_options):
|
||||
"Synapse media repository", config_options
|
||||
)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + e.message + "\n")
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
assert config.worker_app == "synapse.app.media_repository"
|
||||
@@ -156,11 +152,13 @@ def start(config_options):
|
||||
database_engine = create_engine(config.database_config)
|
||||
|
||||
tls_server_context_factory = context_factory.ServerContextFactory(config)
|
||||
tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
|
||||
|
||||
ss = MediaRepositoryServer(
|
||||
config.server_name,
|
||||
db_config=config.database_config,
|
||||
tls_server_context_factory=tls_server_context_factory,
|
||||
tls_client_options_factory=tls_client_options_factory,
|
||||
config=config,
|
||||
version_string="Synapse/" + get_version_string(synapse),
|
||||
database_engine=database_engine,
|
||||
@@ -170,7 +168,6 @@ def start(config_options):
|
||||
ss.start_listening(config.worker_listeners)
|
||||
|
||||
def start():
|
||||
ss.get_state_handler().start_caching()
|
||||
ss.get_datastore().start_profiling()
|
||||
|
||||
reactor.callWhenRunning(start)
|
||||
|
||||
@@ -16,6 +16,9 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
import synapse
|
||||
from synapse import events
|
||||
from synapse.app import _base
|
||||
@@ -25,6 +28,7 @@ from synapse.config.logger import setup_logging
|
||||
from synapse.http.site import SynapseSite
|
||||
from synapse.metrics import RegistryProxy
|
||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||
from synapse.replication.slave.storage._base import __func__
|
||||
from synapse.replication.slave.storage.account_data import SlavedAccountDataStore
|
||||
from synapse.replication.slave.storage.events import SlavedEventStore
|
||||
from synapse.replication.slave.storage.pushers import SlavedPusherStore
|
||||
@@ -37,8 +41,6 @@ from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.logcontext import LoggingContext, run_in_background
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.versionstring import get_version_string
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
logger = logging.getLogger("synapse.app.pusher")
|
||||
|
||||
@@ -48,39 +50,36 @@ class PusherSlaveStore(
|
||||
SlavedAccountDataStore
|
||||
):
|
||||
update_pusher_last_stream_ordering_and_success = (
|
||||
DataStore.update_pusher_last_stream_ordering_and_success.__func__
|
||||
__func__(DataStore.update_pusher_last_stream_ordering_and_success)
|
||||
)
|
||||
|
||||
update_pusher_failing_since = (
|
||||
DataStore.update_pusher_failing_since.__func__
|
||||
__func__(DataStore.update_pusher_failing_since)
|
||||
)
|
||||
|
||||
update_pusher_last_stream_ordering = (
|
||||
DataStore.update_pusher_last_stream_ordering.__func__
|
||||
__func__(DataStore.update_pusher_last_stream_ordering)
|
||||
)
|
||||
|
||||
get_throttle_params_by_room = (
|
||||
DataStore.get_throttle_params_by_room.__func__
|
||||
__func__(DataStore.get_throttle_params_by_room)
|
||||
)
|
||||
|
||||
set_throttle_params = (
|
||||
DataStore.set_throttle_params.__func__
|
||||
__func__(DataStore.set_throttle_params)
|
||||
)
|
||||
|
||||
get_time_of_last_push_action_before = (
|
||||
DataStore.get_time_of_last_push_action_before.__func__
|
||||
__func__(DataStore.get_time_of_last_push_action_before)
|
||||
)
|
||||
|
||||
get_profile_displayname = (
|
||||
DataStore.get_profile_displayname.__func__
|
||||
__func__(DataStore.get_profile_displayname)
|
||||
)
|
||||
|
||||
|
||||
class PusherServer(HomeServer):
|
||||
def setup(self):
|
||||
logger.info("Setting up.")
|
||||
self.datastore = PusherSlaveStore(self.get_db_conn(), self)
|
||||
logger.info("Finished setting up.")
|
||||
DATASTORE_CLASS = PusherSlaveStore
|
||||
|
||||
def remove_pusher(self, app_id, push_key, user_id):
|
||||
self.get_tcp_replication().send_remove_pusher(app_id, push_key, user_id)
|
||||
@@ -147,8 +146,9 @@ class PusherReplicationHandler(ReplicationClientHandler):
|
||||
|
||||
self.pusher_pool = hs.get_pusherpool()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_rdata(self, stream_name, token, rows):
|
||||
super(PusherReplicationHandler, self).on_rdata(stream_name, token, rows)
|
||||
yield super(PusherReplicationHandler, self).on_rdata(stream_name, token, rows)
|
||||
run_in_background(self.poke_pushers, stream_name, token, rows)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@@ -183,7 +183,7 @@ class PusherReplicationHandler(ReplicationClientHandler):
|
||||
def start_pusher(self, user_id, app_id, pushkey):
|
||||
key = "%s:%s" % (app_id, pushkey)
|
||||
logger.info("Starting pusher %r / %r", user_id, key)
|
||||
return self.pusher_pool._refresh_pusher(app_id, pushkey, user_id)
|
||||
return self.pusher_pool.start_pusher_by_id(app_id, pushkey, user_id)
|
||||
|
||||
|
||||
def start(config_options):
|
||||
@@ -192,7 +192,7 @@ def start(config_options):
|
||||
"Synapse pusher", config_options
|
||||
)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + e.message + "\n")
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
assert config.worker_app == "synapse.app.pusher"
|
||||
@@ -229,7 +229,6 @@ def start(config_options):
|
||||
def start():
|
||||
ps.get_pusherpool().start()
|
||||
ps.get_datastore().start_profiling()
|
||||
ps.get_state_handler().start_caching()
|
||||
|
||||
reactor.callWhenRunning(start)
|
||||
|
||||
|
||||
@@ -17,6 +17,11 @@ import contextlib
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
import synapse
|
||||
from synapse.api.constants import EventTypes
|
||||
from synapse.app import _base
|
||||
@@ -28,7 +33,7 @@ from synapse.http.server import JsonResource
|
||||
from synapse.http.site import SynapseSite
|
||||
from synapse.metrics import RegistryProxy
|
||||
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
|
||||
from synapse.replication.slave.storage._base import BaseSlavedStore
|
||||
from synapse.replication.slave.storage._base import BaseSlavedStore, __func__
|
||||
from synapse.replication.slave.storage.account_data import SlavedAccountDataStore
|
||||
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
|
||||
from synapse.replication.slave.storage.client_ips import SlavedClientIpStore
|
||||
@@ -36,12 +41,12 @@ from synapse.replication.slave.storage.deviceinbox import SlavedDeviceInboxStore
|
||||
from synapse.replication.slave.storage.devices import SlavedDeviceStore
|
||||
from synapse.replication.slave.storage.events import SlavedEventStore
|
||||
from synapse.replication.slave.storage.filtering import SlavedFilteringStore
|
||||
from synapse.replication.slave.storage.groups import SlavedGroupServerStore
|
||||
from synapse.replication.slave.storage.presence import SlavedPresenceStore
|
||||
from synapse.replication.slave.storage.push_rule import SlavedPushRuleStore
|
||||
from synapse.replication.slave.storage.receipts import SlavedReceiptsStore
|
||||
from synapse.replication.slave.storage.registration import SlavedRegistrationStore
|
||||
from synapse.replication.slave.storage.room import RoomStore
|
||||
from synapse.replication.slave.storage.groups import SlavedGroupServerStore
|
||||
from synapse.replication.tcp.client import ReplicationClientHandler
|
||||
from synapse.rest.client.v1 import events
|
||||
from synapse.rest.client.v1.initial_sync import InitialSyncRestServlet
|
||||
@@ -50,16 +55,11 @@ from synapse.rest.client.v2_alpha import sync
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.engines import create_engine
|
||||
from synapse.storage.presence import UserPresenceState
|
||||
from synapse.storage.roommember import RoomMemberStore
|
||||
from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.logcontext import LoggingContext, run_in_background
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.stringutils import random_string
|
||||
from synapse.util.versionstring import get_version_string
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
from six import iteritems
|
||||
|
||||
logger = logging.getLogger("synapse.app.synchrotron")
|
||||
|
||||
@@ -80,9 +80,7 @@ class SynchrotronSlavedStore(
|
||||
RoomStore,
|
||||
BaseSlavedStore,
|
||||
):
|
||||
did_forget = (
|
||||
RoomMemberStore.__dict__["did_forget"]
|
||||
)
|
||||
pass
|
||||
|
||||
|
||||
UPDATE_SYNCING_USERS_MS = 10 * 1000
|
||||
@@ -116,7 +114,10 @@ class SynchrotronPresence(object):
|
||||
logger.info("Presence process_id is %r", self.process_id)
|
||||
|
||||
def send_user_sync(self, user_id, is_syncing, last_sync_ms):
|
||||
self.hs.get_tcp_replication().send_user_sync(user_id, is_syncing, last_sync_ms)
|
||||
if self.hs.config.use_presence:
|
||||
self.hs.get_tcp_replication().send_user_sync(
|
||||
user_id, is_syncing, last_sync_ms
|
||||
)
|
||||
|
||||
def mark_as_coming_online(self, user_id):
|
||||
"""A user has started syncing. Send a UserSync to the master, unless they
|
||||
@@ -146,7 +147,7 @@ class SynchrotronPresence(object):
|
||||
and haven't come back yet. If there are poke the master about them.
|
||||
"""
|
||||
now = self.clock.time_msec()
|
||||
for user_id, last_sync_ms in self.users_going_offline.items():
|
||||
for user_id, last_sync_ms in list(self.users_going_offline.items()):
|
||||
if now - last_sync_ms > 10 * 1000:
|
||||
self.users_going_offline.pop(user_id, None)
|
||||
self.send_user_sync(user_id, False, last_sync_ms)
|
||||
@@ -155,9 +156,9 @@ class SynchrotronPresence(object):
|
||||
# TODO Hows this supposed to work?
|
||||
pass
|
||||
|
||||
get_states = PresenceHandler.get_states.__func__
|
||||
get_state = PresenceHandler.get_state.__func__
|
||||
current_state_for_users = PresenceHandler.current_state_for_users.__func__
|
||||
get_states = __func__(PresenceHandler.get_states)
|
||||
get_state = __func__(PresenceHandler.get_state)
|
||||
current_state_for_users = __func__(PresenceHandler.current_state_for_users)
|
||||
|
||||
def user_syncing(self, user_id, affect_presence):
|
||||
if affect_presence:
|
||||
@@ -207,16 +208,19 @@ class SynchrotronPresence(object):
|
||||
) for row in rows]
|
||||
|
||||
for state in states:
|
||||
self.user_to_current_state[row.user_id] = state
|
||||
self.user_to_current_state[state.user_id] = state
|
||||
|
||||
stream_id = token
|
||||
yield self.notify_from_replication(states, stream_id)
|
||||
|
||||
def get_currently_syncing_users(self):
|
||||
return [
|
||||
user_id for user_id, count in iteritems(self.user_to_num_current_syncs)
|
||||
if count > 0
|
||||
]
|
||||
if self.hs.config.use_presence:
|
||||
return [
|
||||
user_id for user_id, count in iteritems(self.user_to_num_current_syncs)
|
||||
if count > 0
|
||||
]
|
||||
else:
|
||||
return set()
|
||||
|
||||
|
||||
class SynchrotronTyping(object):
|
||||
@@ -245,10 +249,7 @@ class SynchrotronApplicationService(object):
|
||||
|
||||
|
||||
class SynchrotronServer(HomeServer):
|
||||
def setup(self):
|
||||
logger.info("Setting up.")
|
||||
self.datastore = SynchrotronSlavedStore(self.get_db_conn(), self)
|
||||
logger.info("Finished setting up.")
|
||||
DATASTORE_CLASS = SynchrotronSlavedStore
|
||||
|
||||
def _listen_http(self, listener_config):
|
||||
port = listener_config["port"]
|
||||
@@ -334,8 +335,9 @@ class SyncReplicationHandler(ReplicationClientHandler):
|
||||
self.presence_handler = hs.get_presence_handler()
|
||||
self.notifier = hs.get_notifier()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_rdata(self, stream_name, token, rows):
|
||||
super(SyncReplicationHandler, self).on_rdata(stream_name, token, rows)
|
||||
yield super(SyncReplicationHandler, self).on_rdata(stream_name, token, rows)
|
||||
run_in_background(self.process_and_notify, stream_name, token, rows)
|
||||
|
||||
def get_streams_to_replicate(self):
|
||||
@@ -408,7 +410,7 @@ def start(config_options):
|
||||
"Synapse synchrotron", config_options
|
||||
)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + e.message + "\n")
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
assert config.worker_app == "synapse.app.synchrotron"
|
||||
@@ -433,7 +435,6 @@ def start(config_options):
|
||||
|
||||
def start():
|
||||
ss.get_datastore().start_profiling()
|
||||
ss.get_state_handler().start_caching()
|
||||
|
||||
reactor.callWhenRunning(start)
|
||||
|
||||
|
||||
@@ -1,281 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import collections
|
||||
import glob
|
||||
import os
|
||||
import os.path
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import yaml
|
||||
import errno
|
||||
import time
|
||||
|
||||
SYNAPSE = [sys.executable, "-B", "-m", "synapse.app.homeserver"]
|
||||
|
||||
GREEN = "\x1b[1;32m"
|
||||
YELLOW = "\x1b[1;33m"
|
||||
RED = "\x1b[1;31m"
|
||||
NORMAL = "\x1b[m"
|
||||
|
||||
|
||||
def pid_running(pid):
|
||||
try:
|
||||
os.kill(pid, 0)
|
||||
return True
|
||||
except OSError as err:
|
||||
if err.errno == errno.EPERM:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def write(message, colour=NORMAL, stream=sys.stdout):
|
||||
if colour == NORMAL:
|
||||
stream.write(message + "\n")
|
||||
else:
|
||||
stream.write(colour + message + NORMAL + "\n")
|
||||
|
||||
|
||||
def abort(message, colour=RED, stream=sys.stderr):
|
||||
write(message, colour, stream)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def start(configfile):
|
||||
write("Starting ...")
|
||||
args = SYNAPSE
|
||||
args.extend(["--daemonize", "-c", configfile])
|
||||
|
||||
try:
|
||||
subprocess.check_call(args)
|
||||
write("started synapse.app.homeserver(%r)" %
|
||||
(configfile,), colour=GREEN)
|
||||
except subprocess.CalledProcessError as e:
|
||||
write(
|
||||
"error starting (exit code: %d); see above for logs" % e.returncode,
|
||||
colour=RED,
|
||||
)
|
||||
|
||||
|
||||
def start_worker(app, configfile, worker_configfile):
|
||||
args = [
|
||||
"python", "-B",
|
||||
"-m", app,
|
||||
"-c", configfile,
|
||||
"-c", worker_configfile
|
||||
]
|
||||
|
||||
try:
|
||||
subprocess.check_call(args)
|
||||
write("started %s(%r)" % (app, worker_configfile), colour=GREEN)
|
||||
except subprocess.CalledProcessError as e:
|
||||
write(
|
||||
"error starting %s(%r) (exit code: %d); see above for logs" % (
|
||||
app, worker_configfile, e.returncode,
|
||||
),
|
||||
colour=RED,
|
||||
)
|
||||
|
||||
|
||||
def stop(pidfile, app):
|
||||
if os.path.exists(pidfile):
|
||||
pid = int(open(pidfile).read())
|
||||
try:
|
||||
os.kill(pid, signal.SIGTERM)
|
||||
write("stopped %s" % (app,), colour=GREEN)
|
||||
except OSError as err:
|
||||
if err.errno == errno.ESRCH:
|
||||
write("%s not running" % (app,), colour=YELLOW)
|
||||
elif err.errno == errno.EPERM:
|
||||
abort("Cannot stop %s: Operation not permitted" % (app,))
|
||||
else:
|
||||
abort("Cannot stop %s: Unknown error" % (app,))
|
||||
|
||||
|
||||
Worker = collections.namedtuple("Worker", [
|
||||
"app", "configfile", "pidfile", "cache_factor"
|
||||
])
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
"action",
|
||||
choices=["start", "stop", "restart"],
|
||||
help="whether to start, stop or restart the synapse",
|
||||
)
|
||||
parser.add_argument(
|
||||
"configfile",
|
||||
nargs="?",
|
||||
default="homeserver.yaml",
|
||||
help="the homeserver config file, defaults to homeserver.yaml",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-w", "--worker",
|
||||
metavar="WORKERCONFIG",
|
||||
help="start or stop a single worker",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-a", "--all-processes",
|
||||
metavar="WORKERCONFIGDIR",
|
||||
help="start or stop all the workers in the given directory"
|
||||
" and the main synapse process",
|
||||
)
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
if options.worker and options.all_processes:
|
||||
write(
|
||||
'Cannot use "--worker" with "--all-processes"',
|
||||
stream=sys.stderr
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
configfile = options.configfile
|
||||
|
||||
if not os.path.exists(configfile):
|
||||
write(
|
||||
"No config file found\n"
|
||||
"To generate a config file, run '%s -c %s --generate-config"
|
||||
" --server-name=<server name>'\n" % (
|
||||
" ".join(SYNAPSE), options.configfile
|
||||
),
|
||||
stream=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
with open(configfile) as stream:
|
||||
config = yaml.load(stream)
|
||||
|
||||
pidfile = config["pid_file"]
|
||||
cache_factor = config.get("synctl_cache_factor")
|
||||
start_stop_synapse = True
|
||||
|
||||
if cache_factor:
|
||||
os.environ["SYNAPSE_CACHE_FACTOR"] = str(cache_factor)
|
||||
|
||||
cache_factors = config.get("synctl_cache_factors", {})
|
||||
for cache_name, factor in cache_factors.iteritems():
|
||||
os.environ["SYNAPSE_CACHE_FACTOR_" + cache_name.upper()] = str(factor)
|
||||
|
||||
worker_configfiles = []
|
||||
if options.worker:
|
||||
start_stop_synapse = False
|
||||
worker_configfile = options.worker
|
||||
if not os.path.exists(worker_configfile):
|
||||
write(
|
||||
"No worker config found at %r" % (worker_configfile,),
|
||||
stream=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
worker_configfiles.append(worker_configfile)
|
||||
|
||||
if options.all_processes:
|
||||
# To start the main synapse with -a you need to add a worker file
|
||||
# with worker_app == "synapse.app.homeserver"
|
||||
start_stop_synapse = False
|
||||
worker_configdir = options.all_processes
|
||||
if not os.path.isdir(worker_configdir):
|
||||
write(
|
||||
"No worker config directory found at %r" % (worker_configdir,),
|
||||
stream=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
worker_configfiles.extend(sorted(glob.glob(
|
||||
os.path.join(worker_configdir, "*.yaml")
|
||||
)))
|
||||
|
||||
workers = []
|
||||
for worker_configfile in worker_configfiles:
|
||||
with open(worker_configfile) as stream:
|
||||
worker_config = yaml.load(stream)
|
||||
worker_app = worker_config["worker_app"]
|
||||
if worker_app == "synapse.app.homeserver":
|
||||
# We need to special case all of this to pick up options that may
|
||||
# be set in the main config file or in this worker config file.
|
||||
worker_pidfile = (
|
||||
worker_config.get("pid_file")
|
||||
or pidfile
|
||||
)
|
||||
worker_cache_factor = worker_config.get("synctl_cache_factor") or cache_factor
|
||||
daemonize = worker_config.get("daemonize") or config.get("daemonize")
|
||||
assert daemonize, "Main process must have daemonize set to true"
|
||||
|
||||
# The master process doesn't support using worker_* config.
|
||||
for key in worker_config:
|
||||
if key == "worker_app": # But we allow worker_app
|
||||
continue
|
||||
assert not key.startswith("worker_"), \
|
||||
"Main process cannot use worker_* config"
|
||||
else:
|
||||
worker_pidfile = worker_config["worker_pid_file"]
|
||||
worker_daemonize = worker_config["worker_daemonize"]
|
||||
assert worker_daemonize, "In config %r: expected '%s' to be True" % (
|
||||
worker_configfile, "worker_daemonize")
|
||||
worker_cache_factor = worker_config.get("synctl_cache_factor")
|
||||
workers.append(Worker(
|
||||
worker_app, worker_configfile, worker_pidfile, worker_cache_factor,
|
||||
))
|
||||
|
||||
action = options.action
|
||||
|
||||
if action == "stop" or action == "restart":
|
||||
for worker in workers:
|
||||
stop(worker.pidfile, worker.app)
|
||||
|
||||
if start_stop_synapse:
|
||||
stop(pidfile, "synapse.app.homeserver")
|
||||
|
||||
# Wait for synapse to actually shutdown before starting it again
|
||||
if action == "restart":
|
||||
running_pids = []
|
||||
if start_stop_synapse and os.path.exists(pidfile):
|
||||
running_pids.append(int(open(pidfile).read()))
|
||||
for worker in workers:
|
||||
if os.path.exists(worker.pidfile):
|
||||
running_pids.append(int(open(worker.pidfile).read()))
|
||||
if len(running_pids) > 0:
|
||||
write("Waiting for process to exit before restarting...")
|
||||
for running_pid in running_pids:
|
||||
while pid_running(running_pid):
|
||||
time.sleep(0.2)
|
||||
write("All processes exited; now restarting...")
|
||||
|
||||
if action == "start" or action == "restart":
|
||||
if start_stop_synapse:
|
||||
# Check if synapse is already running
|
||||
if os.path.exists(pidfile) and pid_running(int(open(pidfile).read())):
|
||||
abort("synapse.app.homeserver already running")
|
||||
start(configfile)
|
||||
|
||||
for worker in workers:
|
||||
if worker.cache_factor:
|
||||
os.environ["SYNAPSE_CACHE_FACTOR"] = str(worker.cache_factor)
|
||||
|
||||
start_worker(worker.app, configfile, worker.configfile)
|
||||
|
||||
if cache_factor:
|
||||
os.environ["SYNAPSE_CACHE_FACTOR"] = str(cache_factor)
|
||||
else:
|
||||
os.environ.pop("SYNAPSE_CACHE_FACTOR", None)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -17,6 +17,9 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
import synapse
|
||||
from synapse import events
|
||||
from synapse.app import _base
|
||||
@@ -43,8 +46,6 @@ from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.logcontext import LoggingContext, run_in_background
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.versionstring import get_version_string
|
||||
from twisted.internet import reactor, defer
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
logger = logging.getLogger("synapse.app.user_dir")
|
||||
|
||||
@@ -93,10 +94,7 @@ class UserDirectorySlaveStore(
|
||||
|
||||
|
||||
class UserDirectoryServer(HomeServer):
|
||||
def setup(self):
|
||||
logger.info("Setting up.")
|
||||
self.datastore = UserDirectorySlaveStore(self.get_db_conn(), self)
|
||||
logger.info("Finished setting up.")
|
||||
DATASTORE_CLASS = UserDirectorySlaveStore
|
||||
|
||||
def _listen_http(self, listener_config):
|
||||
port = listener_config["port"]
|
||||
@@ -168,8 +166,9 @@ class UserDirectoryReplicationHandler(ReplicationClientHandler):
|
||||
super(UserDirectoryReplicationHandler, self).__init__(hs.get_datastore())
|
||||
self.user_directory = hs.get_user_directory_handler()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_rdata(self, stream_name, token, rows):
|
||||
super(UserDirectoryReplicationHandler, self).on_rdata(
|
||||
yield super(UserDirectoryReplicationHandler, self).on_rdata(
|
||||
stream_name, token, rows
|
||||
)
|
||||
if stream_name == "current_state_deltas":
|
||||
@@ -189,7 +188,7 @@ def start(config_options):
|
||||
"Synapse user directory", config_options
|
||||
)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + e.message + "\n")
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
assert config.worker_app == "synapse.app.user_dir"
|
||||
@@ -213,11 +212,13 @@ def start(config_options):
|
||||
config.update_user_directory = True
|
||||
|
||||
tls_server_context_factory = context_factory.ServerContextFactory(config)
|
||||
tls_client_options_factory = context_factory.ClientTLSOptionsFactory(config)
|
||||
|
||||
ps = UserDirectoryServer(
|
||||
config.server_name,
|
||||
db_config=config.database_config,
|
||||
tls_server_context_factory=tls_server_context_factory,
|
||||
tls_client_options_factory=tls_client_options_factory,
|
||||
config=config,
|
||||
version_string="Synapse/" + get_version_string(synapse),
|
||||
database_engine=database_engine,
|
||||
@@ -228,7 +229,6 @@ def start(config_options):
|
||||
|
||||
def start():
|
||||
ps.get_datastore().start_profiling()
|
||||
ps.get_state_handler().start_caching()
|
||||
|
||||
reactor.callWhenRunning(start)
|
||||
|
||||
|
||||
@@ -12,17 +12,17 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from synapse.api.constants import EventTypes
|
||||
from synapse.util.caches.descriptors import cachedInlineCallbacks
|
||||
from synapse.types import GroupID, get_domain_from_id
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
import logging
|
||||
import re
|
||||
|
||||
from six import string_types
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.api.constants import EventTypes
|
||||
from synapse.types import GroupID, get_domain_from_id
|
||||
from synapse.util.caches.descriptors import cachedInlineCallbacks
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
@@ -12,19 +12,20 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
|
||||
from six.moves import urllib
|
||||
|
||||
from prometheus_client import Counter
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.api.constants import ThirdPartyEntityKind
|
||||
from synapse.api.errors import CodeMessageException
|
||||
from synapse.http.client import SimpleHttpClient
|
||||
from synapse.events.utils import serialize_event
|
||||
from synapse.util.caches.response_cache import ResponseCache
|
||||
from synapse.http.client import SimpleHttpClient
|
||||
from synapse.types import ThirdPartyInstanceID
|
||||
|
||||
import logging
|
||||
import urllib
|
||||
|
||||
from prometheus_client import Counter
|
||||
from synapse.util.caches.response_cache import ResponseCache
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -98,7 +99,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
def query_user(self, service, user_id):
|
||||
if service.url is None:
|
||||
defer.returnValue(False)
|
||||
uri = service.url + ("/users/%s" % urllib.quote(user_id))
|
||||
uri = service.url + ("/users/%s" % urllib.parse.quote(user_id))
|
||||
response = None
|
||||
try:
|
||||
response = yield self.get_json(uri, {
|
||||
@@ -119,7 +120,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
def query_alias(self, service, alias):
|
||||
if service.url is None:
|
||||
defer.returnValue(False)
|
||||
uri = service.url + ("/rooms/%s" % urllib.quote(alias))
|
||||
uri = service.url + ("/rooms/%s" % urllib.parse.quote(alias))
|
||||
response = None
|
||||
try:
|
||||
response = yield self.get_json(uri, {
|
||||
@@ -153,7 +154,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
service.url,
|
||||
APP_SERVICE_PREFIX,
|
||||
kind,
|
||||
urllib.quote(protocol)
|
||||
urllib.parse.quote(protocol)
|
||||
)
|
||||
try:
|
||||
response = yield self.get_json(uri, fields)
|
||||
@@ -188,7 +189,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
uri = "%s%s/thirdparty/protocol/%s" % (
|
||||
service.url,
|
||||
APP_SERVICE_PREFIX,
|
||||
urllib.quote(protocol)
|
||||
urllib.parse.quote(protocol)
|
||||
)
|
||||
try:
|
||||
info = yield self.get_json(uri, {})
|
||||
@@ -228,7 +229,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
txn_id = str(txn_id)
|
||||
|
||||
uri = service.url + ("/transactions/%s" %
|
||||
urllib.quote(txn_id))
|
||||
urllib.parse.quote(txn_id))
|
||||
try:
|
||||
yield self.put_json(
|
||||
uri=uri,
|
||||
|
||||
@@ -48,14 +48,14 @@ UP & quit +---------- YES SUCCESS
|
||||
This is all tied together by the AppServiceScheduler which DIs the required
|
||||
components.
|
||||
"""
|
||||
import logging
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.appservice import ApplicationServiceState
|
||||
from synapse.util.logcontext import run_in_background
|
||||
from synapse.util.metrics import Measure
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
@@ -25,10 +25,10 @@ if __name__ == "__main__":
|
||||
try:
|
||||
config = HomeServerConfig.load_config("", sys.argv[3:])
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + e.message + "\n")
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
print (getattr(config, key))
|
||||
print(getattr(config, key))
|
||||
sys.exit(0)
|
||||
else:
|
||||
sys.stderr.write("Unknown command %r\n" % (action,))
|
||||
|
||||
@@ -16,11 +16,12 @@
|
||||
import argparse
|
||||
import errno
|
||||
import os
|
||||
import yaml
|
||||
from textwrap import dedent
|
||||
|
||||
from six import integer_types
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
class ConfigError(Exception):
|
||||
pass
|
||||
@@ -105,10 +106,7 @@ class Config(object):
|
||||
@classmethod
|
||||
def check_file(cls, file_path, config_name):
|
||||
if file_path is None:
|
||||
raise ConfigError(
|
||||
"Missing config for %s."
|
||||
% (config_name,)
|
||||
)
|
||||
raise ConfigError("Missing config for %s." % (config_name,))
|
||||
try:
|
||||
os.stat(file_path)
|
||||
except OSError as e:
|
||||
@@ -127,9 +125,7 @@ class Config(object):
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
if not os.path.isdir(dir_path):
|
||||
raise ConfigError(
|
||||
"%s is not a directory" % (dir_path,)
|
||||
)
|
||||
raise ConfigError("%s is not a directory" % (dir_path,))
|
||||
return dir_path
|
||||
|
||||
@classmethod
|
||||
@@ -155,21 +151,20 @@ class Config(object):
|
||||
return results
|
||||
|
||||
def generate_config(
|
||||
self,
|
||||
config_dir_path,
|
||||
server_name,
|
||||
is_generating_file,
|
||||
report_stats=None,
|
||||
self, config_dir_path, server_name, is_generating_file, report_stats=None
|
||||
):
|
||||
default_config = "# vim:ft=yaml\n"
|
||||
|
||||
default_config += "\n\n".join(dedent(conf) for conf in self.invoke_all(
|
||||
"default_config",
|
||||
config_dir_path=config_dir_path,
|
||||
server_name=server_name,
|
||||
is_generating_file=is_generating_file,
|
||||
report_stats=report_stats,
|
||||
))
|
||||
default_config += "\n\n".join(
|
||||
dedent(conf)
|
||||
for conf in self.invoke_all(
|
||||
"default_config",
|
||||
config_dir_path=config_dir_path,
|
||||
server_name=server_name,
|
||||
is_generating_file=is_generating_file,
|
||||
report_stats=report_stats,
|
||||
)
|
||||
)
|
||||
|
||||
config = yaml.load(default_config)
|
||||
|
||||
@@ -177,23 +172,22 @@ class Config(object):
|
||||
|
||||
@classmethod
|
||||
def load_config(cls, description, argv):
|
||||
config_parser = argparse.ArgumentParser(
|
||||
description=description,
|
||||
)
|
||||
config_parser = argparse.ArgumentParser(description=description)
|
||||
config_parser.add_argument(
|
||||
"-c", "--config-path",
|
||||
"-c",
|
||||
"--config-path",
|
||||
action="append",
|
||||
metavar="CONFIG_FILE",
|
||||
help="Specify config file. Can be given multiple times and"
|
||||
" may specify directories containing *.yaml files."
|
||||
" may specify directories containing *.yaml files.",
|
||||
)
|
||||
|
||||
config_parser.add_argument(
|
||||
"--keys-directory",
|
||||
metavar="DIRECTORY",
|
||||
help="Where files such as certs and signing keys are stored when"
|
||||
" their location is given explicitly in the config."
|
||||
" Defaults to the directory containing the last config file",
|
||||
" their location is given explicitly in the config."
|
||||
" Defaults to the directory containing the last config file",
|
||||
)
|
||||
|
||||
config_args = config_parser.parse_args(argv)
|
||||
@@ -202,9 +196,7 @@ class Config(object):
|
||||
|
||||
obj = cls()
|
||||
obj.read_config_files(
|
||||
config_files,
|
||||
keys_directory=config_args.keys_directory,
|
||||
generate_keys=False,
|
||||
config_files, keys_directory=config_args.keys_directory, generate_keys=False
|
||||
)
|
||||
return obj
|
||||
|
||||
@@ -212,38 +204,38 @@ class Config(object):
|
||||
def load_or_generate_config(cls, description, argv):
|
||||
config_parser = argparse.ArgumentParser(add_help=False)
|
||||
config_parser.add_argument(
|
||||
"-c", "--config-path",
|
||||
"-c",
|
||||
"--config-path",
|
||||
action="append",
|
||||
metavar="CONFIG_FILE",
|
||||
help="Specify config file. Can be given multiple times and"
|
||||
" may specify directories containing *.yaml files."
|
||||
" may specify directories containing *.yaml files.",
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"--generate-config",
|
||||
action="store_true",
|
||||
help="Generate a config file for the server name"
|
||||
help="Generate a config file for the server name",
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"--report-stats",
|
||||
action="store",
|
||||
help="Whether the generated config reports anonymized usage statistics",
|
||||
choices=["yes", "no"]
|
||||
choices=["yes", "no"],
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"--generate-keys",
|
||||
action="store_true",
|
||||
help="Generate any missing key files then exit"
|
||||
help="Generate any missing key files then exit",
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"--keys-directory",
|
||||
metavar="DIRECTORY",
|
||||
help="Used with 'generate-*' options to specify where files such as"
|
||||
" certs and signing keys should be stored in, unless explicitly"
|
||||
" specified in the config."
|
||||
" certs and signing keys should be stored in, unless explicitly"
|
||||
" specified in the config.",
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"-H", "--server-name",
|
||||
help="The server name to generate a config file for"
|
||||
"-H", "--server-name", help="The server name to generate a config file for"
|
||||
)
|
||||
config_args, remaining_args = config_parser.parse_known_args(argv)
|
||||
|
||||
@@ -256,8 +248,8 @@ class Config(object):
|
||||
if config_args.generate_config:
|
||||
if config_args.report_stats is None:
|
||||
config_parser.error(
|
||||
"Please specify either --report-stats=yes or --report-stats=no\n\n" +
|
||||
MISSING_REPORT_STATS_SPIEL
|
||||
"Please specify either --report-stats=yes or --report-stats=no\n\n"
|
||||
+ MISSING_REPORT_STATS_SPIEL
|
||||
)
|
||||
if not config_files:
|
||||
config_parser.error(
|
||||
@@ -286,26 +278,32 @@ class Config(object):
|
||||
config_dir_path=config_dir_path,
|
||||
server_name=server_name,
|
||||
report_stats=(config_args.report_stats == "yes"),
|
||||
is_generating_file=True
|
||||
is_generating_file=True,
|
||||
)
|
||||
obj.invoke_all("generate_files", config)
|
||||
config_file.write(config_str)
|
||||
print((
|
||||
"A config file has been generated in %r for server name"
|
||||
" %r with corresponding SSL keys and self-signed"
|
||||
" certificates. Please review this file and customise it"
|
||||
" to your needs."
|
||||
) % (config_path, server_name))
|
||||
print(
|
||||
(
|
||||
"A config file has been generated in %r for server name"
|
||||
" %r with corresponding SSL keys and self-signed"
|
||||
" certificates. Please review this file and customise it"
|
||||
" to your needs."
|
||||
)
|
||||
% (config_path, server_name)
|
||||
)
|
||||
print(
|
||||
"If this server name is incorrect, you will need to"
|
||||
" regenerate the SSL certificates"
|
||||
)
|
||||
return
|
||||
else:
|
||||
print((
|
||||
"Config file %r already exists. Generating any missing key"
|
||||
" files."
|
||||
) % (config_path,))
|
||||
print(
|
||||
(
|
||||
"Config file %r already exists. Generating any missing key"
|
||||
" files."
|
||||
)
|
||||
% (config_path,)
|
||||
)
|
||||
generate_keys = True
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
@@ -337,8 +335,7 @@ class Config(object):
|
||||
|
||||
return obj
|
||||
|
||||
def read_config_files(self, config_files, keys_directory=None,
|
||||
generate_keys=False):
|
||||
def read_config_files(self, config_files, keys_directory=None, generate_keys=False):
|
||||
if not keys_directory:
|
||||
keys_directory = os.path.dirname(config_files[-1])
|
||||
|
||||
@@ -363,8 +360,9 @@ class Config(object):
|
||||
|
||||
if "report_stats" not in config:
|
||||
raise ConfigError(
|
||||
MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS + "\n" +
|
||||
MISSING_REPORT_STATS_SPIEL
|
||||
MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS
|
||||
+ "\n"
|
||||
+ MISSING_REPORT_STATS_SPIEL
|
||||
)
|
||||
|
||||
if generate_keys:
|
||||
@@ -398,16 +396,16 @@ def find_config_files(search_paths):
|
||||
for entry in os.listdir(config_path):
|
||||
entry_path = os.path.join(config_path, entry)
|
||||
if not os.path.isfile(entry_path):
|
||||
print (
|
||||
"Found subdirectory in config directory: %r. IGNORING."
|
||||
) % (entry_path, )
|
||||
err = "Found subdirectory in config directory: %r. IGNORING."
|
||||
print(err % (entry_path,))
|
||||
continue
|
||||
|
||||
if not entry.endswith(".yaml"):
|
||||
print (
|
||||
"Found file in config directory that does not"
|
||||
" end in '.yaml': %r. IGNORING."
|
||||
) % (entry_path, )
|
||||
err = (
|
||||
"Found file in config directory that does not end in "
|
||||
"'.yaml': %r. IGNORING."
|
||||
)
|
||||
print(err % (entry_path,))
|
||||
continue
|
||||
|
||||
files.append(entry_path)
|
||||
|
||||
@@ -12,10 +12,10 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from ._base import Config
|
||||
|
||||
from synapse.api.constants import EventTypes
|
||||
|
||||
from ._base import Config
|
||||
|
||||
|
||||
class ApiConfig(Config):
|
||||
|
||||
|
||||
@@ -12,19 +12,19 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
from synapse.appservice import ApplicationService
|
||||
from synapse.types import UserID
|
||||
|
||||
from netaddr import IPSet
|
||||
|
||||
import yaml
|
||||
import logging
|
||||
|
||||
from six import string_types
|
||||
from six.moves.urllib import parse as urlparse
|
||||
|
||||
import yaml
|
||||
from netaddr import IPSet
|
||||
|
||||
from synapse.appservice import ApplicationService
|
||||
from synapse.types import UserID
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
@@ -13,10 +13,18 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
# This file can't be called email.py because if it is, we cannot:
|
||||
import email.utils
|
||||
import logging
|
||||
import os
|
||||
|
||||
from ._base import Config
|
||||
import pkg_resources
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EmailConfig(Config):
|
||||
@@ -38,7 +46,6 @@ class EmailConfig(Config):
|
||||
"smtp_host",
|
||||
"smtp_port",
|
||||
"notif_from",
|
||||
"template_dir",
|
||||
"notif_template_html",
|
||||
"notif_template_text",
|
||||
]
|
||||
@@ -62,9 +69,26 @@ class EmailConfig(Config):
|
||||
self.email_smtp_host = email_config["smtp_host"]
|
||||
self.email_smtp_port = email_config["smtp_port"]
|
||||
self.email_notif_from = email_config["notif_from"]
|
||||
self.email_template_dir = email_config["template_dir"]
|
||||
self.email_notif_template_html = email_config["notif_template_html"]
|
||||
self.email_notif_template_text = email_config["notif_template_text"]
|
||||
|
||||
template_dir = email_config.get("template_dir")
|
||||
# we need an absolute path, because we change directory after starting (and
|
||||
# we don't yet know what auxilliary templates like mail.css we will need).
|
||||
# (Note that loading as package_resources with jinja.PackageLoader doesn't
|
||||
# work for the same reason.)
|
||||
if not template_dir:
|
||||
template_dir = pkg_resources.resource_filename(
|
||||
'synapse', 'res/templates'
|
||||
)
|
||||
template_dir = os.path.abspath(template_dir)
|
||||
|
||||
for f in self.email_notif_template_text, self.email_notif_template_html:
|
||||
p = os.path.join(template_dir, f)
|
||||
if not os.path.isfile(p):
|
||||
raise ConfigError("Unable to find email template file %s" % (p, ))
|
||||
self.email_template_dir = template_dir
|
||||
|
||||
self.email_notif_for_new_users = email_config.get(
|
||||
"notif_for_new_users", True
|
||||
)
|
||||
@@ -113,7 +137,9 @@ class EmailConfig(Config):
|
||||
# require_transport_security: False
|
||||
# notif_from: "Your Friendly %(app)s Home Server <noreply@example.com>"
|
||||
# app_name: Matrix
|
||||
# template_dir: res/templates
|
||||
# # if template_dir is unset, uses the example templates that are part of
|
||||
# # the Synapse distribution.
|
||||
# #template_dir: res/templates
|
||||
# notif_template_html: notif_mail.html
|
||||
# notif_template_text: notif_mail.txt
|
||||
# notif_for_new_users: True
|
||||
|
||||
@@ -13,32 +13,33 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from .tls import TlsConfig
|
||||
from .server import ServerConfig
|
||||
from .logger import LoggingConfig
|
||||
from .database import DatabaseConfig
|
||||
from .ratelimiting import RatelimitConfig
|
||||
from .repository import ContentRepositoryConfig
|
||||
from .captcha import CaptchaConfig
|
||||
from .voip import VoipConfig
|
||||
from .registration import RegistrationConfig
|
||||
from .metrics import MetricsConfig
|
||||
from .api import ApiConfig
|
||||
from .appservice import AppServiceConfig
|
||||
from .key import KeyConfig
|
||||
from .saml2 import SAML2Config
|
||||
from .captcha import CaptchaConfig
|
||||
from .cas import CasConfig
|
||||
from .password import PasswordConfig
|
||||
from .jwt import JWTConfig
|
||||
from .password_auth_providers import PasswordAuthProviderConfig
|
||||
from .emailconfig import EmailConfig
|
||||
from .workers import WorkerConfig
|
||||
from .push import PushConfig
|
||||
from .spam_checker import SpamCheckerConfig
|
||||
from .groups import GroupsConfig
|
||||
from .user_directory import UserDirectoryConfig
|
||||
from .consent_config import ConsentConfig
|
||||
from .database import DatabaseConfig
|
||||
from .emailconfig import EmailConfig
|
||||
from .groups import GroupsConfig
|
||||
from .jwt_config import JWTConfig
|
||||
from .key import KeyConfig
|
||||
from .logger import LoggingConfig
|
||||
from .metrics import MetricsConfig
|
||||
from .password import PasswordConfig
|
||||
from .password_auth_providers import PasswordAuthProviderConfig
|
||||
from .push import PushConfig
|
||||
from .ratelimiting import RatelimitConfig
|
||||
from .registration import RegistrationConfig
|
||||
from .repository import ContentRepositoryConfig
|
||||
from .room_directory import RoomDirectoryConfig
|
||||
from .saml2 import SAML2Config
|
||||
from .server import ServerConfig
|
||||
from .server_notices_config import ServerNoticesConfig
|
||||
from .spam_checker import SpamCheckerConfig
|
||||
from .tls import TlsConfig
|
||||
from .user_directory import UserDirectoryConfig
|
||||
from .voip import VoipConfig
|
||||
from .workers import WorkerConfig
|
||||
|
||||
|
||||
class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig,
|
||||
@@ -49,7 +50,7 @@ class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig,
|
||||
WorkerConfig, PasswordAuthProviderConfig, PushConfig,
|
||||
SpamCheckerConfig, GroupsConfig, UserDirectoryConfig,
|
||||
ConsentConfig,
|
||||
ServerNoticesConfig,
|
||||
ServerNoticesConfig, RoomDirectoryConfig,
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
|
||||
MISSING_JWT = (
|
||||
"""Missing jwt library. This is required for jwt login.
|
||||
|
||||
@@ -13,21 +13,24 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
from synapse.util.stringutils import random_string
|
||||
from signedjson.key import (
|
||||
generate_signing_key, is_signing_algorithm_supported,
|
||||
decode_signing_key_base64, decode_verify_key_bytes,
|
||||
read_signing_keys, write_signing_keys, NACL_ED25519
|
||||
)
|
||||
from unpaddedbase64 import decode_base64
|
||||
from synapse.util.stringutils import random_string_with_symbols
|
||||
|
||||
import os
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
|
||||
from signedjson.key import (
|
||||
NACL_ED25519,
|
||||
decode_signing_key_base64,
|
||||
decode_verify_key_bytes,
|
||||
generate_signing_key,
|
||||
is_signing_algorithm_supported,
|
||||
read_signing_keys,
|
||||
write_signing_keys,
|
||||
)
|
||||
from unpaddedbase64 import decode_base64
|
||||
|
||||
from synapse.util.stringutils import random_string, random_string_with_symbols
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -16,15 +16,17 @@ import logging
|
||||
import logging.config
|
||||
import os
|
||||
import signal
|
||||
from string import Template
|
||||
import sys
|
||||
from string import Template
|
||||
|
||||
import yaml
|
||||
|
||||
from twisted.logger import STDLibLogObserver, globalLogBeginner
|
||||
import yaml
|
||||
|
||||
import synapse
|
||||
from synapse.util.logcontext import LoggingContextFilter
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
from ._base import Config
|
||||
|
||||
DEFAULT_LOG_CONFIG = Template("""
|
||||
@@ -166,7 +168,8 @@ def setup_logging(config, use_worker_options=False):
|
||||
if log_file:
|
||||
# TODO: Customisable file size / backup count
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
log_file, maxBytes=(1000 * 1000 * 100), backupCount=3
|
||||
log_file, maxBytes=(1000 * 1000 * 100), backupCount=3,
|
||||
encoding='utf8'
|
||||
)
|
||||
|
||||
def sighup(signum, stack):
|
||||
@@ -191,9 +194,8 @@ def setup_logging(config, use_worker_options=False):
|
||||
|
||||
def sighup(signum, stack):
|
||||
# it might be better to use a file watcher or something for this.
|
||||
logging.info("Reloading log config from %s due to SIGHUP",
|
||||
log_config)
|
||||
load_log_config()
|
||||
logging.info("Reloaded log config from %s due to SIGHUP", log_config)
|
||||
|
||||
load_log_config()
|
||||
|
||||
@@ -225,7 +227,22 @@ def setup_logging(config, use_worker_options=False):
|
||||
#
|
||||
# However this may not be too much of a problem if we are just writing to a file.
|
||||
observer = STDLibLogObserver()
|
||||
|
||||
def _log(event):
|
||||
|
||||
if "log_text" in event:
|
||||
if event["log_text"].startswith("DNSDatagramProtocol starting on "):
|
||||
return
|
||||
|
||||
if event["log_text"].startswith("(UDP Port "):
|
||||
return
|
||||
|
||||
if event["log_text"].startswith("Timing out client"):
|
||||
return
|
||||
|
||||
return observer(event)
|
||||
|
||||
globalLogBeginner.beginLoggingTo(
|
||||
[observer],
|
||||
[_log],
|
||||
redirectStandardIO=not config.no_redirect_stdio,
|
||||
)
|
||||
|
||||
@@ -13,10 +13,10 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from ._base import Config
|
||||
|
||||
from synapse.util.module_loader import load_module
|
||||
|
||||
from ._base import Config
|
||||
|
||||
LDAP_PROVIDER = 'ldap_auth_provider.LdapAuthProvider'
|
||||
|
||||
|
||||
|
||||
@@ -13,12 +13,12 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from ._base import Config
|
||||
|
||||
from synapse.util.stringutils import random_string_with_symbols
|
||||
|
||||
from distutils.util import strtobool
|
||||
|
||||
from synapse.config._base import Config, ConfigError
|
||||
from synapse.types import RoomAlias
|
||||
from synapse.util.stringutils import random_string_with_symbols
|
||||
|
||||
|
||||
class RegistrationConfig(Config):
|
||||
|
||||
@@ -44,6 +44,10 @@ class RegistrationConfig(Config):
|
||||
)
|
||||
|
||||
self.auto_join_rooms = config.get("auto_join_rooms", [])
|
||||
for room_alias in self.auto_join_rooms:
|
||||
if not RoomAlias.is_valid(room_alias):
|
||||
raise ConfigError('Invalid auto_join_rooms entry %s' % (room_alias,))
|
||||
self.autocreate_auto_join_rooms = config.get("autocreate_auto_join_rooms", True)
|
||||
|
||||
def default_config(self, **kwargs):
|
||||
registration_shared_secret = random_string_with_symbols(50)
|
||||
@@ -98,6 +102,13 @@ class RegistrationConfig(Config):
|
||||
# to these rooms
|
||||
#auto_join_rooms:
|
||||
# - "#example:example.com"
|
||||
|
||||
# Where auto_join_rooms are specified, setting this flag ensures that the
|
||||
# the rooms exist by creating them when the first user on the
|
||||
# homeserver registers.
|
||||
# Setting to false means that if the rooms are not manually created,
|
||||
# users cannot be auto-joined since they do not exist.
|
||||
autocreate_auto_join_rooms: true
|
||||
""" % locals()
|
||||
|
||||
def add_arguments(self, parser):
|
||||
|
||||
@@ -13,11 +13,11 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
from collections import namedtuple
|
||||
|
||||
from synapse.util.module_loader import load_module
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
MISSING_NETADDR = (
|
||||
"Missing netaddr library. This is required for URL preview API."
|
||||
@@ -178,7 +178,7 @@ class ContentRepositoryConfig(Config):
|
||||
def default_config(self, **kwargs):
|
||||
media_store = self.default_path("media_store")
|
||||
uploads_path = self.default_path("uploads")
|
||||
return """
|
||||
return r"""
|
||||
# Directory where uploaded images and attachments are stored.
|
||||
media_store_path: "%(media_store)s"
|
||||
|
||||
|
||||
102
synapse/config/room_directory.py
Normal file
102
synapse/config/room_directory.py
Normal file
@@ -0,0 +1,102 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2018 New Vector Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse.util import glob_to_regex
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
|
||||
class RoomDirectoryConfig(Config):
|
||||
def read_config(self, config):
|
||||
alias_creation_rules = config["alias_creation_rules"]
|
||||
|
||||
self._alias_creation_rules = [
|
||||
_AliasRule(rule)
|
||||
for rule in alias_creation_rules
|
||||
]
|
||||
|
||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||
return """
|
||||
# The `alias_creation` option controls who's allowed to create aliases
|
||||
# on this server.
|
||||
#
|
||||
# The format of this option is a list of rules that contain globs that
|
||||
# match against user_id and the new alias (fully qualified with server
|
||||
# name). The action in the first rule that matches is taken, which can
|
||||
# currently either be "allow" or "deny".
|
||||
#
|
||||
# If no rules match the request is denied.
|
||||
alias_creation_rules:
|
||||
- user_id: "*"
|
||||
alias: "*"
|
||||
action: allow
|
||||
"""
|
||||
|
||||
def is_alias_creation_allowed(self, user_id, alias):
|
||||
"""Checks if the given user is allowed to create the given alias
|
||||
|
||||
Args:
|
||||
user_id (str)
|
||||
alias (str)
|
||||
|
||||
Returns:
|
||||
boolean: True if user is allowed to crate the alias
|
||||
"""
|
||||
for rule in self._alias_creation_rules:
|
||||
if rule.matches(user_id, alias):
|
||||
return rule.action == "allow"
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class _AliasRule(object):
|
||||
def __init__(self, rule):
|
||||
action = rule["action"]
|
||||
user_id = rule["user_id"]
|
||||
alias = rule["alias"]
|
||||
|
||||
if action in ("allow", "deny"):
|
||||
self.action = action
|
||||
else:
|
||||
raise ConfigError(
|
||||
"alias_creation_rules rules can only have action of 'allow'"
|
||||
" or 'deny'"
|
||||
)
|
||||
|
||||
try:
|
||||
self._user_id_regex = glob_to_regex(user_id)
|
||||
self._alias_regex = glob_to_regex(alias)
|
||||
except Exception as e:
|
||||
raise ConfigError("Failed to parse glob into regex: %s", e)
|
||||
|
||||
def matches(self, user_id, alias):
|
||||
"""Tests if this rule matches the given user_id and alias.
|
||||
|
||||
Args:
|
||||
user_id (str)
|
||||
alias (str)
|
||||
|
||||
Returns:
|
||||
boolean
|
||||
"""
|
||||
|
||||
# Note: The regexes are anchored at both ends
|
||||
if not self._user_id_regex.match(user_id):
|
||||
return False
|
||||
|
||||
if not self._alias_regex.match(alias):
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -17,6 +17,7 @@
|
||||
import logging
|
||||
|
||||
from synapse.http.endpoint import parse_and_validate_server_name
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
logger = logging.Logger(__name__)
|
||||
@@ -48,6 +49,9 @@ class ServerConfig(Config):
|
||||
# "disable" federation
|
||||
self.send_federation = config.get("send_federation", True)
|
||||
|
||||
# Whether to enable user presence.
|
||||
self.use_presence = config.get("use_presence", True)
|
||||
|
||||
# Whether to update the user directory or not. This should be set to
|
||||
# false only if we are updating the user directory in a worker
|
||||
self.update_user_directory = config.get("update_user_directory", True)
|
||||
@@ -66,6 +70,31 @@ class ServerConfig(Config):
|
||||
"block_non_admin_invites", False,
|
||||
)
|
||||
|
||||
# Options to control access by tracking MAU
|
||||
self.limit_usage_by_mau = config.get("limit_usage_by_mau", False)
|
||||
self.max_mau_value = 0
|
||||
if self.limit_usage_by_mau:
|
||||
self.max_mau_value = config.get(
|
||||
"max_mau_value", 0,
|
||||
)
|
||||
|
||||
self.mau_limits_reserved_threepids = config.get(
|
||||
"mau_limit_reserved_threepids", []
|
||||
)
|
||||
|
||||
self.mau_trial_days = config.get(
|
||||
"mau_trial_days", 0,
|
||||
)
|
||||
|
||||
# Options to disable HS
|
||||
self.hs_disabled = config.get("hs_disabled", False)
|
||||
self.hs_disabled_message = config.get("hs_disabled_message", "")
|
||||
self.hs_disabled_limit_type = config.get("hs_disabled_limit_type", "")
|
||||
|
||||
# Admin uri to direct users at should their instance become blocked
|
||||
# due to resource constraints
|
||||
self.admin_contact = config.get("admin_contact", None)
|
||||
|
||||
# FIXME: federation_domain_whitelist needs sytests
|
||||
self.federation_domain_whitelist = None
|
||||
federation_domain_whitelist = config.get(
|
||||
@@ -208,6 +237,8 @@ class ServerConfig(Config):
|
||||
# different cores. See
|
||||
# https://www.mirantis.com/blog/improve-performance-python-programs-restricting-single-cpu/.
|
||||
#
|
||||
# This setting requires the affinity package to be installed!
|
||||
#
|
||||
# cpu_affinity: 0xFFFFFFFF
|
||||
|
||||
# Whether to serve a web client from the HTTP/HTTPS root resource.
|
||||
@@ -227,6 +258,9 @@ class ServerConfig(Config):
|
||||
# hard limit.
|
||||
soft_file_limit: 0
|
||||
|
||||
# Set to false to disable presence tracking on this homeserver.
|
||||
use_presence: true
|
||||
|
||||
# The GC threshold parameters to pass to `gc.set_threshold`, if defined
|
||||
# gc_thresholds: [700, 10, 10]
|
||||
|
||||
@@ -318,6 +352,33 @@ class ServerConfig(Config):
|
||||
# - port: 9000
|
||||
# bind_addresses: ['::1', '127.0.0.1']
|
||||
# type: manhole
|
||||
|
||||
|
||||
# Homeserver blocking
|
||||
#
|
||||
# How to reach the server admin, used in ResourceLimitError
|
||||
# admin_contact: 'mailto:admin@server.com'
|
||||
#
|
||||
# Global block config
|
||||
#
|
||||
# hs_disabled: False
|
||||
# hs_disabled_message: 'Human readable reason for why the HS is blocked'
|
||||
# hs_disabled_limit_type: 'error code(str), to help clients decode reason'
|
||||
#
|
||||
# Monthly Active User Blocking
|
||||
#
|
||||
# Enables monthly active user checking
|
||||
# limit_usage_by_mau: False
|
||||
# max_mau_value: 50
|
||||
# mau_trial_days: 2
|
||||
#
|
||||
# Sometimes the server admin will want to ensure certain accounts are
|
||||
# never blocked by mau checking. These accounts are specified here.
|
||||
#
|
||||
# mau_limit_reserved_threepids:
|
||||
# - medium: 'email'
|
||||
# address: 'reserved_user@example.com'
|
||||
|
||||
""" % locals()
|
||||
|
||||
def read_arguments(self, args):
|
||||
@@ -343,6 +404,23 @@ class ServerConfig(Config):
|
||||
" service on the given port.")
|
||||
|
||||
|
||||
def is_threepid_reserved(config, threepid):
|
||||
"""Check the threepid against the reserved threepid config
|
||||
Args:
|
||||
config(ServerConfig) - to access server config attributes
|
||||
threepid(dict) - The threepid to test for
|
||||
|
||||
Returns:
|
||||
boolean Is the threepid undertest reserved_user
|
||||
"""
|
||||
|
||||
for tp in config.mau_limits_reserved_threepids:
|
||||
if (threepid['medium'] == tp['medium']
|
||||
and threepid['address'] == tp['address']):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def read_gc_thresholds(thresholds):
|
||||
"""Reads the three integer thresholds for garbage collection. Ensures that
|
||||
the thresholds are integers if thresholds are supplied.
|
||||
|
||||
@@ -12,9 +12,10 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from ._base import Config
|
||||
from synapse.types import UserID
|
||||
|
||||
from ._base import Config
|
||||
|
||||
DEFAULT_CONFIG = """\
|
||||
# Server Notices room configuration
|
||||
#
|
||||
|
||||
@@ -13,14 +13,15 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from ._base import Config
|
||||
import os
|
||||
import subprocess
|
||||
from hashlib import sha256
|
||||
|
||||
from unpaddedbase64 import encode_base64
|
||||
|
||||
from OpenSSL import crypto
|
||||
import subprocess
|
||||
import os
|
||||
|
||||
from hashlib import sha256
|
||||
from unpaddedbase64 import encode_base64
|
||||
from ._base import Config
|
||||
|
||||
GENERATE_DH_PARAMS = False
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user