Compare commits
434 Commits
erikj/acl_
...
erikj/rest
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ce7051df61 | ||
|
|
301141515a | ||
|
|
741777235c | ||
|
|
f87a11e0fd | ||
|
|
76328b85f6 | ||
|
|
17795161c3 | ||
|
|
cf1100887b | ||
|
|
314aabba82 | ||
|
|
7d55314277 | ||
|
|
1cd65a8d1e | ||
|
|
973ebb66ba | ||
|
|
e51aa4be96 | ||
|
|
92d8d724c5 | ||
|
|
c292dba70c | ||
|
|
396834f1c0 | ||
|
|
1d9036aff2 | ||
|
|
1ee3d26432 | ||
|
|
82b8d4b86a | ||
|
|
57338a9768 | ||
|
|
60728c8c9e | ||
|
|
04abf53a56 | ||
|
|
8a519ac76d | ||
|
|
d2fc591619 | ||
|
|
dc6094b908 | ||
|
|
3559a835a2 | ||
|
|
7dd4f79c49 | ||
|
|
bb4dddd6c4 | ||
|
|
7a5818ed81 | ||
|
|
184ba0968a | ||
|
|
a247729806 | ||
|
|
f2fcc0a8cf | ||
|
|
372ac60375 | ||
|
|
527d95dea0 | ||
|
|
cc3ab0c214 | ||
|
|
ca2abf9a6e | ||
|
|
b35baf6f3c | ||
|
|
f17aadd1b5 | ||
|
|
6d59ffe1ce | ||
|
|
b6e0303c83 | ||
|
|
eb011cd99b | ||
|
|
6d7f291b93 | ||
|
|
7213588083 | ||
|
|
ee2d722f0f | ||
|
|
49c0a0b5c4 | ||
|
|
95c304e3f9 | ||
|
|
0c16285989 | ||
|
|
1e101ed4a4 | ||
|
|
8e3bbc9bd0 | ||
|
|
0b5c9adeb5 | ||
|
|
b105996fc1 | ||
|
|
ffe8cf7e59 | ||
|
|
eb700cdc38 | ||
|
|
16026e60c5 | ||
|
|
0b1a55c60a | ||
|
|
663b96ae96 | ||
|
|
2048388cfd | ||
|
|
8148c48f11 | ||
|
|
2c8f16257a | ||
|
|
1107e83b54 | ||
|
|
3b05b67c89 | ||
|
|
d4af08a167 | ||
|
|
3bcbabc9fb | ||
|
|
9fc0aad567 | ||
|
|
929ae19d00 | ||
|
|
9cd5b9a802 | ||
|
|
728d07c8c1 | ||
|
|
91cb3b630d | ||
|
|
dffc9c4ae0 | ||
|
|
e2054ce21a | ||
|
|
4ba8189b74 | ||
|
|
ca32c7a065 | ||
|
|
184a5c81f0 | ||
|
|
30768dcf40 | ||
|
|
4ae73d16a9 | ||
|
|
a5b41b809f | ||
|
|
3f60481655 | ||
|
|
e1eb1f3fb9 | ||
|
|
09cb5c7d33 | ||
|
|
dd0867f5ba | ||
|
|
3c166a24c5 | ||
|
|
bc8b25eb56 | ||
|
|
2c746382e0 | ||
|
|
1d579df664 | ||
|
|
c0d1f37baf | ||
|
|
ddfe30ba83 | ||
|
|
89ae0166de | ||
|
|
6485f03d91 | ||
|
|
81a93ddcc8 | ||
|
|
e530208e68 | ||
|
|
dd42bb78d0 | ||
|
|
417485eefa | ||
|
|
2ff439cff7 | ||
|
|
709ba99afd | ||
|
|
9e4dacd5e7 | ||
|
|
d23bc77e2c | ||
|
|
73e4ad4b8b | ||
|
|
076e19da28 | ||
|
|
3ead04ceef | ||
|
|
227b77409f | ||
|
|
efeeff29f6 | ||
|
|
1002bbd732 | ||
|
|
9ad38c9807 | ||
|
|
bdf2e5865a | ||
|
|
fd0a919af3 | ||
|
|
e90f32646f | ||
|
|
aaf319820a | ||
|
|
a9ad647fb2 | ||
|
|
77580addc3 | ||
|
|
8e8955bcea | ||
|
|
8bab7abddd | ||
|
|
3cdfd37d95 | ||
|
|
7ab401d4dc | ||
|
|
00149c063b | ||
|
|
ab9e01809d | ||
|
|
236245f7d8 | ||
|
|
57df6fffa7 | ||
|
|
b62c1395d6 | ||
|
|
e255c2c32f | ||
|
|
9c8eb4a809 | ||
|
|
b854a375b0 | ||
|
|
cd800ad99a | ||
|
|
3e4de64bc9 | ||
|
|
d71af2ee12 | ||
|
|
b143641b20 | ||
|
|
4d1ea40008 | ||
|
|
8256a8ece7 | ||
|
|
a7122692d9 | ||
|
|
b442217d91 | ||
|
|
c961cd7736 | ||
|
|
5371c2a1f7 | ||
|
|
4a6d894850 | ||
|
|
ddf4d2bd98 | ||
|
|
66ec6cf9b8 | ||
|
|
53c2eed862 | ||
|
|
f02532baad | ||
|
|
25b32b63ae | ||
|
|
e330c802e4 | ||
|
|
c9cb354b58 | ||
|
|
5a9e0c3682 | ||
|
|
e85c7873dc | ||
|
|
86fac9c95e | ||
|
|
3063383547 | ||
|
|
81450fded8 | ||
|
|
4c56928263 | ||
|
|
6f0c344ca7 | ||
|
|
37f0ddca5f | ||
|
|
d3c0e48859 | ||
|
|
6a4b650d8a | ||
|
|
06094591c5 | ||
|
|
fd246fde89 | ||
|
|
4f6fa981ec | ||
|
|
3cab86a122 | ||
|
|
e768d7b3a6 | ||
|
|
efdaa5dd55 | ||
|
|
da51acf0e7 | ||
|
|
f4d552589e | ||
|
|
90fde4b8d7 | ||
|
|
0de2aad061 | ||
|
|
3f6f74686a | ||
|
|
82145912c3 | ||
|
|
a2355fae7e | ||
|
|
ee3fa1a99c | ||
|
|
59891a294f | ||
|
|
3e1029fe80 | ||
|
|
af7c1397d1 | ||
|
|
460cad7c11 | ||
|
|
825f0875bc | ||
|
|
a9d8bd95e7 | ||
|
|
bfb66773a4 | ||
|
|
57619d6058 | ||
|
|
a0b181bd17 | ||
|
|
1925a38f95 | ||
|
|
747535f20f | ||
|
|
133d90abfb | ||
|
|
3a20cdcd27 | ||
|
|
d046adf4ec | ||
|
|
1d1c303b9b | ||
|
|
d33f31d741 | ||
|
|
c63df2d4e0 | ||
|
|
f63208a1c0 | ||
|
|
43f2e42bfd | ||
|
|
4bd05573e9 | ||
|
|
12b1a47ba4 | ||
|
|
37403ab06c | ||
|
|
2e31dd2ad3 | ||
|
|
8b52fe48b5 | ||
|
|
d9088c923f | ||
|
|
86cef6a91b | ||
|
|
1c847af28a | ||
|
|
cf8c04948f | ||
|
|
aa361f51dc | ||
|
|
037481a033 | ||
|
|
01fc3943f1 | ||
|
|
571ac105e6 | ||
|
|
51c53369a3 | ||
|
|
f093873d69 | ||
|
|
61f36d9939 | ||
|
|
f8f3d72e2b | ||
|
|
78323ccdb3 | ||
|
|
457970c724 | ||
|
|
1bd1a43073 | ||
|
|
0f6a25f670 | ||
|
|
b9490e8cbb | ||
|
|
5dbd102470 | ||
|
|
fd5ad0f00e | ||
|
|
745b72660a | ||
|
|
42f12ad92f | ||
|
|
aa3c9c7bd0 | ||
|
|
1f7642efa9 | ||
|
|
3e9ee62db0 | ||
|
|
21b71b6d7c | ||
|
|
b1e35eabf2 | ||
|
|
c7788685b0 | ||
|
|
8c74bd8960 | ||
|
|
f483340b3e | ||
|
|
ea570ffaeb | ||
|
|
7049e1564f | ||
|
|
d5a825edee | ||
|
|
225c244aba | ||
|
|
4e706ec82c | ||
|
|
31621c2e06 | ||
|
|
f90ea3dc73 | ||
|
|
ce2a7ed6e4 | ||
|
|
e8cf77fa49 | ||
|
|
cecbd636e9 | ||
|
|
b578c822e3 | ||
|
|
3befc9ccc3 | ||
|
|
d5c31e01f2 | ||
|
|
cb8201ba12 | ||
|
|
c141d47a28 | ||
|
|
13a6517d89 | ||
|
|
61cd03466f | ||
|
|
f764f92647 | ||
|
|
ca0d28ef34 | ||
|
|
8a951540f6 | ||
|
|
482648123f | ||
|
|
fd88ea19c0 | ||
|
|
bb9611bd46 | ||
|
|
9b63def388 | ||
|
|
23b21e5215 | ||
|
|
9d720223f2 | ||
|
|
617501dd2a | ||
|
|
099ce4bc38 | ||
|
|
22346a0ee7 | ||
|
|
cbd053bb8f | ||
|
|
be27d81808 | ||
|
|
ade5342752 | ||
|
|
4cf302de5b | ||
|
|
c50ad14bae | ||
|
|
a0b8e5f2fe | ||
|
|
aadb2238c9 | ||
|
|
f9e7493ac2 | ||
|
|
ecc59ae66e | ||
|
|
70e265e695 | ||
|
|
09d23b6209 | ||
|
|
daa01842f8 | ||
|
|
7f08ebb772 | ||
|
|
d7272f8d9d | ||
|
|
78fa346b07 | ||
|
|
a45ec7c651 | ||
|
|
40da1f200d | ||
|
|
abc6986a24 | ||
|
|
ce832c38d4 | ||
|
|
42e858daeb | ||
|
|
e624cdec64 | ||
|
|
c3dd2ecd5e | ||
|
|
38a965b816 | ||
|
|
a82938416d | ||
|
|
0bfdaf1f4f | ||
|
|
a5cbd20001 | ||
|
|
128ed32e6b | ||
|
|
3e6fdfda00 | ||
|
|
ee59af9ac0 | ||
|
|
1469141023 | ||
|
|
cacdb529ab | ||
|
|
2d3462714e | ||
|
|
f704c10f29 | ||
|
|
6e7d36a72c | ||
|
|
d3da63f766 | ||
|
|
8199475ce0 | ||
|
|
0d4abf7777 | ||
|
|
e55291ce5e | ||
|
|
8e254862f4 | ||
|
|
85d0bc3bdc | ||
|
|
cfc503681f | ||
|
|
dc2a105fca | ||
|
|
83eb627b5a | ||
|
|
776ee6d92b | ||
|
|
f72ed6c6a3 | ||
|
|
8899df13bf | ||
|
|
8f4165628b | ||
|
|
d3d582bc1c | ||
|
|
4d8e1e1f9e | ||
|
|
afef6f5d16 | ||
|
|
2d97e65558 | ||
|
|
1a9510bb84 | ||
|
|
47abebfd6d | ||
|
|
f9d4da7f45 | ||
|
|
30883d8409 | ||
|
|
891dfd90bd | ||
|
|
68b255c5a1 | ||
|
|
95b0f5449d | ||
|
|
129ee4e149 | ||
|
|
c5966b2a97 | ||
|
|
0cceb2ac92 | ||
|
|
d6bcc68ea7 | ||
|
|
b16cd18a86 | ||
|
|
9f7f228ec2 | ||
|
|
3d77e56c12 | ||
|
|
d884047d34 | ||
|
|
2bb2c02571 | ||
|
|
3d1cdda762 | ||
|
|
57877b01d7 | ||
|
|
5db5677969 | ||
|
|
7e77a82c5f | ||
|
|
7eb4d626ba | ||
|
|
06750140f6 | ||
|
|
adbd720fab | ||
|
|
8b7ce2945b | ||
|
|
a6c27de1aa | ||
|
|
c044aca1fd | ||
|
|
ba5d34a832 | ||
|
|
6a191d62ed | ||
|
|
21ac8be5f7 | ||
|
|
3e4e367f09 | ||
|
|
0fbed2a8fa | ||
|
|
998a72d4d9 | ||
|
|
c10ac7806e | ||
|
|
101ee3fd00 | ||
|
|
df361d08f7 | ||
|
|
7b0e797080 | ||
|
|
2eb91e6694 | ||
|
|
cfa62007a3 | ||
|
|
5ce903e2f7 | ||
|
|
a7eeb34c64 | ||
|
|
f7e2f981ea | ||
|
|
bcc1d34d35 | ||
|
|
f4122c64b5 | ||
|
|
415c2f0549 | ||
|
|
f43041aacd | ||
|
|
73605f8070 | ||
|
|
de3b7b55d6 | ||
|
|
d46208c12c | ||
|
|
4f11a5b2b5 | ||
|
|
7bbaab9432 | ||
|
|
7b49236b37 | ||
|
|
fdb724cb70 | ||
|
|
7e3d1c7d92 | ||
|
|
d7451e0f22 | ||
|
|
4807616e16 | ||
|
|
275f7c987c | ||
|
|
b24d7ebd6e | ||
|
|
2df8dd9b37 | ||
|
|
a23a760b3f | ||
|
|
7568fe880d | ||
|
|
4ff0228c25 | ||
|
|
dcd5983fe4 | ||
|
|
45610305ea | ||
|
|
88e03da39f | ||
|
|
9dba813234 | ||
|
|
53a817518b | ||
|
|
6eaa116867 | ||
|
|
4762c276cb | ||
|
|
dc8399ee00 | ||
|
|
1b994a97dd | ||
|
|
10b874067b | ||
|
|
017b798e4f | ||
|
|
2c019eea11 | ||
|
|
bb0a475c30 | ||
|
|
dcefac3b06 | ||
|
|
559c51debc | ||
|
|
6f274f7e13 | ||
|
|
7ce71f2ffc | ||
|
|
8c3a62b5c7 | ||
|
|
86eaaa885b | ||
|
|
e0b6e49466 | ||
|
|
2cd6cb9f65 | ||
|
|
aa88582e00 | ||
|
|
5119e416e8 | ||
|
|
8f04b6fa7a | ||
|
|
7dec0b2bee | ||
|
|
06218ab125 | ||
|
|
2352974aab | ||
|
|
9c5385b53a | ||
|
|
ffab798a38 | ||
|
|
62126c996c | ||
|
|
3213ff630c | ||
|
|
20addfa358 | ||
|
|
9eb5b23d3a | ||
|
|
0211890134 | ||
|
|
ffdb8c3828 | ||
|
|
e69b669083 | ||
|
|
0db40d3e93 | ||
|
|
e3c8e2c13c | ||
|
|
efe60d5e8c | ||
|
|
b2c7bd4b09 | ||
|
|
b3768ec10a | ||
|
|
b8e386db59 | ||
|
|
fe994e728f | ||
|
|
0ac61b1c78 | ||
|
|
0caf30f94b | ||
|
|
1d08bf7c17 | ||
|
|
63b1eaf32c | ||
|
|
b811c98574 | ||
|
|
433314cc34 | ||
|
|
8049c9a71e | ||
|
|
f596ff402e | ||
|
|
2efb93af52 | ||
|
|
953dbd28a7 | ||
|
|
7eea3e356f | ||
|
|
3e1b77efc2 | ||
|
|
b52b4a84ec | ||
|
|
07507643cb | ||
|
|
185ac7ee6c | ||
|
|
a0dea6eaed | ||
|
|
c67ba143fa | ||
|
|
e7768e77f5 | ||
|
|
883aabe423 | ||
|
|
07ad03d5df | ||
|
|
e124128542 | ||
|
|
c77048e12f | ||
|
|
2e35a733cc | ||
|
|
413a4c289b | ||
|
|
4d6cb8814e | ||
|
|
39e21ea51c | ||
|
|
2da3b1e60b | ||
|
|
62c010283d | ||
|
|
459085184c | ||
|
|
33d83f3615 | ||
|
|
ff7c2e41de | ||
|
|
66da8f60d0 | ||
|
|
378a0f7a79 | ||
|
|
b8d49be5a1 | ||
|
|
90abdaf3bc |
4
.gitignore
vendored
4
.gitignore
vendored
@@ -42,3 +42,7 @@ build/
|
||||
|
||||
localhost-800*/
|
||||
static/client/register/register_config.js
|
||||
.tox
|
||||
|
||||
env/
|
||||
*.config
|
||||
|
||||
141
CHANGES.rst
141
CHANGES.rst
@@ -1,3 +1,144 @@
|
||||
Changes in synapse v0.10.0-r2 (2015-09-16)
|
||||
==========================================
|
||||
|
||||
* Fix bug where we always fetched remote server signing keys instead of using
|
||||
ones in our cache.
|
||||
* Fix adding threepids to an existing account.
|
||||
* Fix bug with invinting over federation where remote server was already in
|
||||
the room. (PR #281, SYN-392)
|
||||
|
||||
Changes in synapse v0.10.0-r1 (2015-09-08)
|
||||
==========================================
|
||||
|
||||
* Fix bug with python packaging
|
||||
|
||||
Changes in synapse v0.10.0 (2015-09-03)
|
||||
=======================================
|
||||
|
||||
No change from release candidate.
|
||||
|
||||
Changes in synapse v0.10.0-rc6 (2015-09-02)
|
||||
===========================================
|
||||
|
||||
* Remove some of the old database upgrade scripts.
|
||||
* Fix database port script to work with newly created sqlite databases.
|
||||
|
||||
Changes in synapse v0.10.0-rc5 (2015-08-27)
|
||||
===========================================
|
||||
|
||||
* Fix bug that broke downloading files with ascii filenames across federation.
|
||||
|
||||
Changes in synapse v0.10.0-rc4 (2015-08-27)
|
||||
===========================================
|
||||
|
||||
* Allow UTF-8 filenames for upload. (PR #259)
|
||||
|
||||
Changes in synapse v0.10.0-rc3 (2015-08-25)
|
||||
===========================================
|
||||
|
||||
* Add ``--keys-directory`` config option to specify where files such as
|
||||
certs and signing keys should be stored in, when using ``--generate-config``
|
||||
or ``--generate-keys``. (PR #250)
|
||||
* Allow ``--config-path`` to specify a directory, causing synapse to use all
|
||||
\*.yaml files in the directory as config files. (PR #249)
|
||||
* Add ``web_client_location`` config option to specify static files to be
|
||||
hosted by synapse under ``/_matrix/client``. (PR #245)
|
||||
* Add helper utility to synapse to read and parse the config files and extract
|
||||
the value of a given key. For example::
|
||||
|
||||
$ python -m synapse.config read server_name -c homeserver.yaml
|
||||
localhost
|
||||
|
||||
(PR #246)
|
||||
|
||||
|
||||
Changes in synapse v0.10.0-rc2 (2015-08-24)
|
||||
===========================================
|
||||
|
||||
* Fix bug where we incorrectly populated the ``event_forward_extremities``
|
||||
table, resulting in problems joining large remote rooms (e.g.
|
||||
``#matrix:matrix.org``)
|
||||
* Reduce the number of times we wake up pushers by not listening for presence
|
||||
or typing events, reducing the CPU cost of each pusher.
|
||||
|
||||
|
||||
Changes in synapse v0.10.0-rc1 (2015-08-21)
|
||||
===========================================
|
||||
|
||||
Also see v0.9.4-rc1 changelog, which has been amalgamated into this release.
|
||||
|
||||
General:
|
||||
|
||||
* Upgrade to Twisted 15 (PR #173)
|
||||
* Add support for serving and fetching encryption keys over federation.
|
||||
(PR #208)
|
||||
* Add support for logging in with email address (PR #234)
|
||||
* Add support for new ``m.room.canonical_alias`` event. (PR #233)
|
||||
* Change synapse to treat user IDs case insensitively during registration and
|
||||
login. (If two users already exist with case insensitive matching user ids,
|
||||
synapse will continue to require them to specify their user ids exactly.)
|
||||
* Error if a user tries to register with an email already in use. (PR #211)
|
||||
* Add extra and improve existing caches (PR #212, #219, #226, #228)
|
||||
* Batch various storage request (PR #226, #228)
|
||||
* Fix bug where we didn't correctly log the entity that triggered the request
|
||||
if the request came in via an application service (PR #230)
|
||||
* Fix bug where we needlessly regenerated the full list of rooms an AS is
|
||||
interested in. (PR #232)
|
||||
* Add support for AS's to use v2_alpha registration API (PR #210)
|
||||
|
||||
|
||||
Configuration:
|
||||
|
||||
* Add ``--generate-keys`` that will generate any missing cert and key files in
|
||||
the configuration files. This is equivalent to running ``--generate-config``
|
||||
on an existing configuration file. (PR #220)
|
||||
* ``--generate-config`` now no longer requires a ``--server-name`` parameter
|
||||
when used on existing configuration files. (PR #220)
|
||||
* Add ``--print-pidfile`` flag that controls the printing of the pid to stdout
|
||||
of the demonised process. (PR #213)
|
||||
|
||||
Media Repository:
|
||||
|
||||
* Fix bug where we picked a lower resolution image than requested. (PR #205)
|
||||
* Add support for specifying if a the media repository should dynamically
|
||||
thumbnail images or not. (PR #206)
|
||||
|
||||
Metrics:
|
||||
|
||||
* Add statistics from the reactor to the metrics API. (PR #224, #225)
|
||||
|
||||
Demo Homeservers:
|
||||
|
||||
* Fix starting the demo homeservers without rate-limiting enabled. (PR #182)
|
||||
* Fix enabling registration on demo homeservers (PR #223)
|
||||
|
||||
|
||||
Changes in synapse v0.9.4-rc1 (2015-07-21)
|
||||
==========================================
|
||||
|
||||
General:
|
||||
|
||||
* Add basic implementation of receipts. (SPEC-99)
|
||||
* Add support for configuration presets in room creation API. (PR #203)
|
||||
* Add auth event that limits the visibility of history for new users.
|
||||
(SPEC-134)
|
||||
* Add SAML2 login/registration support. (PR #201. Thanks Muthu Subramanian!)
|
||||
* Add client side key management APIs for end to end encryption. (PR #198)
|
||||
* Change power level semantics so that you cannot kick, ban or change power
|
||||
levels of users that have equal or greater power level than you. (SYN-192)
|
||||
* Improve performance by bulk inserting events where possible. (PR #193)
|
||||
* Improve performance by bulk verifying signatures where possible. (PR #194)
|
||||
|
||||
|
||||
Configuration:
|
||||
|
||||
* Add support for including TLS certificate chains.
|
||||
|
||||
Media Repository:
|
||||
|
||||
* Add Content-Disposition headers to content repository responses. (SYN-150)
|
||||
|
||||
|
||||
Changes in synapse v0.9.3 (2015-07-01)
|
||||
======================================
|
||||
|
||||
|
||||
13
MANIFEST.in
13
MANIFEST.in
@@ -3,13 +3,20 @@ include LICENSE
|
||||
include VERSION
|
||||
include *.rst
|
||||
include demo/README
|
||||
include demo/demo.tls.dh
|
||||
include demo/*.py
|
||||
include demo/*.sh
|
||||
|
||||
recursive-include synapse/storage/schema *.sql
|
||||
recursive-include synapse/storage/schema *.py
|
||||
|
||||
recursive-include demo *.dh
|
||||
recursive-include demo *.py
|
||||
recursive-include demo *.sh
|
||||
recursive-include docs *
|
||||
recursive-include scripts *
|
||||
recursive-include scripts-dev *
|
||||
recursive-include tests *.py
|
||||
|
||||
recursive-include static *.css
|
||||
recursive-include static *.html
|
||||
recursive-include static *.js
|
||||
|
||||
prune demo/etc
|
||||
|
||||
94
README.rst
94
README.rst
@@ -7,7 +7,7 @@ Matrix is an ambitious new ecosystem for open federated Instant Messaging and
|
||||
VoIP. The basics you need to know to get up and running are:
|
||||
|
||||
- Everything in Matrix happens in a room. Rooms are distributed and do not
|
||||
exist on any single server. Rooms can be located using convenience aliases
|
||||
exist on any single server. Rooms can be located using convenience aliases
|
||||
like ``#matrix:matrix.org`` or ``#test:localhost:8448``.
|
||||
|
||||
- Matrix user IDs look like ``@matthew:matrix.org`` (although in the future
|
||||
@@ -23,7 +23,7 @@ The overall architecture is::
|
||||
accessed by the web client at http://matrix.org/beta or via an IRC bridge at
|
||||
irc://irc.freenode.net/matrix.
|
||||
|
||||
Synapse is currently in rapid development, but as of version 0.5 we believe it
|
||||
Synapse is currently in rapid development, but as of version 0.5 we believe it
|
||||
is sufficiently stable to be run as an internet-facing service for real usage!
|
||||
|
||||
About Matrix
|
||||
@@ -94,6 +94,7 @@ Synapse is the reference python/twisted Matrix homeserver implementation.
|
||||
System requirements:
|
||||
- POSIX-compliant system (tested on Linux & OS X)
|
||||
- Python 2.7
|
||||
- At least 512 MB RAM.
|
||||
|
||||
Synapse is written in python but some of the libraries is uses are written in
|
||||
C. So before we can install synapse itself we need a working C compiler and the
|
||||
@@ -104,7 +105,7 @@ Installing prerequisites on Ubuntu or Debian::
|
||||
sudo apt-get install build-essential python2.7-dev libffi-dev \
|
||||
python-pip python-setuptools sqlite3 \
|
||||
libssl-dev python-virtualenv libjpeg-dev
|
||||
|
||||
|
||||
Installing prerequisites on ArchLinux::
|
||||
|
||||
sudo pacman -S base-devel python2 python-pip \
|
||||
@@ -115,11 +116,12 @@ Installing prerequisites on Mac OS X::
|
||||
xcode-select --install
|
||||
sudo easy_install pip
|
||||
sudo pip install virtualenv
|
||||
|
||||
|
||||
To install the synapse homeserver run::
|
||||
|
||||
virtualenv -p python2.7 ~/.synapse
|
||||
source ~/.synapse/bin/activate
|
||||
pip install --upgrade setuptools
|
||||
pip install --process-dependency-links https://github.com/matrix-org/synapse/tarball/master
|
||||
|
||||
This installs synapse, along with the libraries it uses, into a virtual
|
||||
@@ -174,13 +176,13 @@ traditionally used for convenience and simplicity.
|
||||
|
||||
The advantages of Postgres include:
|
||||
|
||||
* significant performance improvements due to the superior threading and
|
||||
caching model, smarter query optimiser
|
||||
* allowing the DB to be run on separate hardware
|
||||
* allowing basic active/backup high-availability with a "hot spare" synapse
|
||||
pointing at the same DB master, as well as enabling DB replication in
|
||||
synapse itself.
|
||||
|
||||
* significant performance improvements due to the superior threading and
|
||||
caching model, smarter query optimiser
|
||||
* allowing the DB to be run on separate hardware
|
||||
* allowing basic active/backup high-availability with a "hot spare" synapse
|
||||
pointing at the same DB master, as well as enabling DB replication in
|
||||
synapse itself.
|
||||
|
||||
The only disadvantage is that the code is relatively new as of April 2015 and
|
||||
may have a few regressions relative to SQLite.
|
||||
|
||||
@@ -190,8 +192,8 @@ For information on how to install and use PostgreSQL, please see
|
||||
Running Synapse
|
||||
===============
|
||||
|
||||
To actually run your new homeserver, pick a working directory for Synapse to run
|
||||
(e.g. ``~/.synapse``), and::
|
||||
To actually run your new homeserver, pick a working directory for Synapse to
|
||||
run (e.g. ``~/.synapse``), and::
|
||||
|
||||
cd ~/.synapse
|
||||
source ./bin/activate
|
||||
@@ -214,13 +216,13 @@ defaults to python 3, but synapse currently assumes python 2.7 by default:
|
||||
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 )::
|
||||
|
||||
sudo pip2.7 install --upgrade pip
|
||||
|
||||
|
||||
You also may need to explicitly specify python 2.7 again during the install
|
||||
request::
|
||||
|
||||
pip2.7 install --process-dependency-links \
|
||||
https://github.com/matrix-org/synapse/tarball/master
|
||||
|
||||
|
||||
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
|
||||
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
||||
compile it under the right architecture. (This should not be needed if
|
||||
@@ -228,7 +230,7 @@ installing under virtualenv)::
|
||||
|
||||
sudo pip2.7 uninstall py-bcrypt
|
||||
sudo pip2.7 install py-bcrypt
|
||||
|
||||
|
||||
During setup of Synapse you need to call python2.7 directly again::
|
||||
|
||||
cd ~/.synapse
|
||||
@@ -236,25 +238,27 @@ During setup of Synapse you need to call python2.7 directly again::
|
||||
--server-name machine.my.domain.name \
|
||||
--config-path homeserver.yaml \
|
||||
--generate-config
|
||||
|
||||
|
||||
...substituting your host and domain name as appropriate.
|
||||
|
||||
Windows Install
|
||||
---------------
|
||||
Synapse can be installed on Cygwin. It requires the following Cygwin packages:
|
||||
|
||||
- gcc
|
||||
- git
|
||||
- libffi-devel
|
||||
- openssl (and openssl-devel, python-openssl)
|
||||
- python
|
||||
- python-setuptools
|
||||
- gcc
|
||||
- git
|
||||
- libffi-devel
|
||||
- openssl (and openssl-devel, python-openssl)
|
||||
- python
|
||||
- python-setuptools
|
||||
|
||||
The content repository requires additional packages and will be unable to process
|
||||
uploads without them:
|
||||
- libjpeg8
|
||||
- libjpeg8-devel
|
||||
- zlib
|
||||
|
||||
- libjpeg8
|
||||
- libjpeg8-devel
|
||||
- zlib
|
||||
|
||||
If you choose to install Synapse without these packages, you will need to reinstall
|
||||
``pillow`` for changes to be applied, e.g. ``pip uninstall pillow`` ``pip install
|
||||
pillow --user``
|
||||
@@ -276,21 +280,26 @@ Troubleshooting
|
||||
Troubleshooting Installation
|
||||
----------------------------
|
||||
|
||||
Synapse requires pip 1.7 or later, so if your OS provides too old a version and
|
||||
you get errors about ``error: no such option: --process-dependency-links`` you
|
||||
Synapse requires pip 1.7 or later, so if your OS provides too old a version and
|
||||
you get errors about ``error: no such option: --process-dependency-links`` you
|
||||
may need to manually upgrade it::
|
||||
|
||||
sudo pip install --upgrade pip
|
||||
|
||||
Installing may fail with ``mock requires setuptools>=17.1. Aborting installation``.
|
||||
You can fix this by upgrading setuptools::
|
||||
|
||||
pip install --upgrade setuptools
|
||||
|
||||
If pip crashes mid-installation for reason (e.g. lost terminal), pip may
|
||||
refuse to run until you remove the temporary installation directory it
|
||||
created. To reset the installation::
|
||||
|
||||
rm -rf /tmp/pip_install_matrix
|
||||
|
||||
pip seems to leak *lots* of memory during installation. For instance, a Linux
|
||||
host with 512MB of RAM may run out of memory whilst installing Twisted. If this
|
||||
happens, you will have to individually install the dependencies which are
|
||||
pip seems to leak *lots* of memory during installation. For instance, a Linux
|
||||
host with 512MB of RAM may run out of memory whilst installing Twisted. If this
|
||||
happens, you will have to individually install the dependencies which are
|
||||
failing, e.g.::
|
||||
|
||||
pip install twisted
|
||||
@@ -301,8 +310,8 @@ will need to export CFLAGS=-Qunused-arguments.
|
||||
Troubleshooting Running
|
||||
-----------------------
|
||||
|
||||
If synapse fails with ``missing "sodium.h"`` crypto errors, you may need
|
||||
to manually upgrade PyNaCL, as synapse uses NaCl (http://nacl.cr.yp.to/) for
|
||||
If synapse fails with ``missing "sodium.h"`` crypto errors, you may need
|
||||
to manually upgrade PyNaCL, as synapse uses NaCl (http://nacl.cr.yp.to/) for
|
||||
encryption and digital signatures.
|
||||
Unfortunately PyNACL currently has a few issues
|
||||
(https://github.com/pyca/pynacl/issues/53) and
|
||||
@@ -313,7 +322,7 @@ fix try re-installing from PyPI or directly from
|
||||
|
||||
# Install from PyPI
|
||||
pip install --user --upgrade --force pynacl
|
||||
|
||||
|
||||
# Install from github
|
||||
pip install --user https://github.com/pyca/pynacl/tarball/master
|
||||
|
||||
@@ -324,7 +333,7 @@ If running `$ synctl start` fails with 'returned non-zero exit status 1',
|
||||
you will need to explicitly call Python2.7 - either running as::
|
||||
|
||||
python2.7 -m synapse.app.homeserver --daemonize -c homeserver.yaml
|
||||
|
||||
|
||||
...or by editing synctl with the correct python executable.
|
||||
|
||||
Synapse Development
|
||||
@@ -362,14 +371,11 @@ This should end with a 'PASSED' result::
|
||||
Upgrading an existing Synapse
|
||||
=============================
|
||||
|
||||
IMPORTANT: Before upgrading an existing synapse to a new version, please
|
||||
refer to UPGRADE.rst for any additional instructions.
|
||||
|
||||
Otherwise, simply re-install the new codebase over the current one - e.g.
|
||||
by ``pip install --process-dependency-links
|
||||
https://github.com/matrix-org/synapse/tarball/master``
|
||||
if using pip, or by ``git pull`` if running off a git working copy.
|
||||
The instructions for upgrading synapse are in `UPGRADE.rst`_.
|
||||
Please check these instructions as upgrading may require extra steps for some
|
||||
versions of synapse.
|
||||
|
||||
.. _UPGRADE.rst: UPGRADE.rst
|
||||
|
||||
Setting up Federation
|
||||
=====================
|
||||
@@ -431,7 +437,7 @@ private federation (``localhost:8080``, ``localhost:8081`` and
|
||||
http://localhost:8080. Simply run::
|
||||
|
||||
demo/start.sh
|
||||
|
||||
|
||||
This is mainly useful just for development purposes.
|
||||
|
||||
Running The Demo Web Client
|
||||
@@ -494,7 +500,7 @@ time.
|
||||
Where's the spec?!
|
||||
==================
|
||||
|
||||
The source of the matrix spec lives at https://github.com/matrix-org/matrix-doc.
|
||||
The source of the matrix spec lives at https://github.com/matrix-org/matrix-doc.
|
||||
A recent HTML snapshot of this lives at http://matrix.org/docs/spec
|
||||
|
||||
|
||||
|
||||
33
UPGRADE.rst
33
UPGRADE.rst
@@ -1,3 +1,36 @@
|
||||
Upgrading Synapse
|
||||
=================
|
||||
|
||||
Before upgrading check if any special steps are required to upgrade from the
|
||||
what you currently have installed to current version of synapse. The extra
|
||||
instructions that may be required are listed later in this document.
|
||||
|
||||
If synapse was installed in a virtualenv then active that virtualenv before
|
||||
upgrading. If synapse is installed in a virtualenv in ``~/.synapse/`` then run:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
source ~/.synapse/bin/activate
|
||||
|
||||
If synapse was installed using pip then upgrade to the latest version by
|
||||
running:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
pip install --upgrade --process-dependency-links https://github.com/matrix-org/synapse/tarball/master
|
||||
|
||||
If synapse was installed using git then upgrade to the latest version by
|
||||
running:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
# Pull the latest version of the master branch.
|
||||
git pull
|
||||
# Update the versions of synapse's python dependencies.
|
||||
python synapse/python_dependencies.py | xargs -n1 pip install
|
||||
|
||||
|
||||
|
||||
Upgrading to v0.9.0
|
||||
===================
|
||||
|
||||
|
||||
@@ -126,12 +126,26 @@ sub on_unknown_event
|
||||
if (!$bridgestate->{$room_id}->{gathered_candidates}) {
|
||||
$bridgestate->{$room_id}->{gathered_candidates} = 1;
|
||||
my $offer = $bridgestate->{$room_id}->{offer};
|
||||
my $candidate_block = "";
|
||||
my $candidate_block = {
|
||||
audio => '',
|
||||
video => '',
|
||||
};
|
||||
foreach (@{$event->{content}->{candidates}}) {
|
||||
$candidate_block .= "a=" . $_->{candidate} . "\r\n";
|
||||
if ($_->{sdpMid}) {
|
||||
$candidate_block->{$_->{sdpMid}} .= "a=" . $_->{candidate} . "\r\n";
|
||||
}
|
||||
else {
|
||||
$candidate_block->{audio} .= "a=" . $_->{candidate} . "\r\n";
|
||||
$candidate_block->{video} .= "a=" . $_->{candidate} . "\r\n";
|
||||
}
|
||||
}
|
||||
# XXX: collate using the right m= line - for now assume audio call
|
||||
$offer =~ s/(a=rtcp.*[\r\n]+)/$1$candidate_block/;
|
||||
|
||||
# XXX: assumes audio comes first
|
||||
#$offer =~ s/(a=rtcp-mux[\r\n]+)/$1$candidate_block->{audio}/;
|
||||
#$offer =~ s/(a=rtcp-mux[\r\n]+)/$1$candidate_block->{video}/;
|
||||
|
||||
$offer =~ s/(m=video)/$candidate_block->{audio}$1/;
|
||||
$offer =~ s/(.$)/$1\n$candidate_block->{video}$1/;
|
||||
|
||||
my $f = send_verto_json_request("verto.invite", {
|
||||
"sdp" => $offer,
|
||||
@@ -172,22 +186,18 @@ sub on_room_message
|
||||
warn "[Matrix] in $room_id: $from: " . $content->{body} . "\n";
|
||||
}
|
||||
|
||||
my $verto_connecting = $loop->new_future;
|
||||
$bot_verto->connect(
|
||||
%{ $CONFIG{"verto-bot"} },
|
||||
on_connect_error => sub { die "Cannot connect to verto - $_[-1]" },
|
||||
on_resolve_error => sub { die "Cannot resolve to verto - $_[-1]" },
|
||||
)->then( sub {
|
||||
warn("[Verto] connected to websocket");
|
||||
$verto_connecting->done($bot_verto) if not $verto_connecting->is_done;
|
||||
});
|
||||
|
||||
Future->needs_all(
|
||||
$bot_matrix->login( %{ $CONFIG{"matrix-bot"} } )->then( sub {
|
||||
$bot_matrix->start;
|
||||
}),
|
||||
|
||||
$verto_connecting,
|
||||
$bot_verto->connect(
|
||||
%{ $CONFIG{"verto-bot"} },
|
||||
on_connect_error => sub { die "Cannot connect to verto - $_[-1]" },
|
||||
on_resolve_error => sub { die "Cannot resolve to verto - $_[-1]" },
|
||||
)->on_done( sub {
|
||||
warn("[Verto] connected to websocket");
|
||||
}),
|
||||
)->get;
|
||||
|
||||
$loop->attach_signal(
|
||||
|
||||
@@ -11,7 +11,4 @@ requires 'YAML', 0;
|
||||
requires 'JSON', 0;
|
||||
requires 'Getopt::Long', 0;
|
||||
|
||||
on 'test' => sub {
|
||||
requires 'Test::More', '>= 0.98';
|
||||
};
|
||||
|
||||
|
||||
@@ -11,7 +11,9 @@ if [ -f $PID_FILE ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
find "$DIR" -name "*.log" -delete
|
||||
find "$DIR" -name "*.db" -delete
|
||||
for port in 8080 8081 8082; do
|
||||
rm -rf $DIR/$port
|
||||
rm -rf $DIR/media_store.$port
|
||||
done
|
||||
|
||||
rm -rf $DIR/etc
|
||||
|
||||
@@ -8,14 +8,6 @@ cd "$DIR/.."
|
||||
|
||||
mkdir -p demo/etc
|
||||
|
||||
# Check the --no-rate-limit param
|
||||
PARAMS=""
|
||||
if [ $# -eq 1 ]; then
|
||||
if [ $1 = "--no-rate-limit" ]; then
|
||||
PARAMS="--rc-messages-per-second 1000 --rc-message-burst-count 1000"
|
||||
fi
|
||||
fi
|
||||
|
||||
export PYTHONPATH=$(readlink -f $(pwd))
|
||||
|
||||
|
||||
@@ -31,9 +23,20 @@ for port in 8080 8081 8082; do
|
||||
#rm $DIR/etc/$port.config
|
||||
python -m synapse.app.homeserver \
|
||||
--generate-config \
|
||||
--enable_registration \
|
||||
-H "localhost:$https_port" \
|
||||
--config-path "$DIR/etc/$port.config" \
|
||||
--report-stats no
|
||||
|
||||
# Check script parameters
|
||||
if [ $# -eq 1 ]; then
|
||||
if [ $1 = "--no-rate-limit" ]; then
|
||||
# Set high limits in config file to disable rate limiting
|
||||
perl -p -i -e 's/rc_messages_per_second.*/rc_messages_per_second: 1000/g' $DIR/etc/$port.config
|
||||
perl -p -i -e 's/rc_message_burst_count.*/rc_message_burst_count: 1000/g' $DIR/etc/$port.config
|
||||
fi
|
||||
fi
|
||||
|
||||
perl -p -i -e 's/^enable_registration:.*/enable_registration: true/g' $DIR/etc/$port.config
|
||||
|
||||
python -m synapse.app.homeserver \
|
||||
--config-path "$DIR/etc/$port.config" \
|
||||
|
||||
@@ -55,9 +55,8 @@ Porting from SQLite
|
||||
Overview
|
||||
~~~~~~~~
|
||||
|
||||
The script ``port_from_sqlite_to_postgres.py`` allows porting an existing
|
||||
synapse server backed by SQLite to using PostgreSQL. This is done in as a two
|
||||
phase process:
|
||||
The script ``synapse_port_db`` allows porting an existing synapse server
|
||||
backed by SQLite to using PostgreSQL. This is done in as a two phase process:
|
||||
|
||||
1. Copy the existing SQLite database to a separate location (while the server
|
||||
is down) and running the port script against that offline database.
|
||||
@@ -86,8 +85,7 @@ Assuming your new config file (as described in the section *Synapse config*)
|
||||
is named ``homeserver-postgres.yaml`` and the SQLite snapshot is at
|
||||
``homeserver.db.snapshot`` then simply run::
|
||||
|
||||
python scripts/port_from_sqlite_to_postgres.py \
|
||||
--sqlite-database homeserver.db.snapshot \
|
||||
synapse_port_db --sqlite-database homeserver.db.snapshot \
|
||||
--postgres-config homeserver-postgres.yaml
|
||||
|
||||
The flag ``--curses`` displays a coloured curses progress UI.
|
||||
@@ -100,8 +98,7 @@ To complete the conversion shut down the synapse server and run the port
|
||||
script one last time, e.g. if the SQLite database is at ``homeserver.db``
|
||||
run::
|
||||
|
||||
python scripts/port_from_sqlite_to_postgres.py \
|
||||
--sqlite-database homeserver.db \
|
||||
synapse_port_db --sqlite-database homeserver.db \
|
||||
--postgres-config database_config.yaml
|
||||
|
||||
Once that has completed, change the synapse config to point at the PostgreSQL
|
||||
|
||||
@@ -56,10 +56,9 @@ if __name__ == '__main__':
|
||||
|
||||
js = json.load(args.json)
|
||||
|
||||
|
||||
auth = Auth(Mock())
|
||||
check_auth(
|
||||
auth,
|
||||
[FrozenEvent(d) for d in js["auth_chain"]],
|
||||
[FrozenEvent(d) for d in js["pdus"]],
|
||||
[FrozenEvent(d) for d in js.get("pdus", [])],
|
||||
)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from synapse.crypto.event_signing import *
|
||||
from syutil.base64util import encode_base64
|
||||
from unpaddedbase64 import encode_base64
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
|
||||
from syutil.crypto.jsonsign import verify_signed_json
|
||||
from syutil.crypto.signing_key import (
|
||||
decode_verify_key_bytes, write_signing_keys
|
||||
)
|
||||
from syutil.base64util import decode_base64
|
||||
from signedjson.sign import verify_signed_json
|
||||
from signedjson.key import decode_verify_key_bytes, write_signing_keys
|
||||
from unpaddedbase64 import decode_base64
|
||||
|
||||
import urllib2
|
||||
import json
|
||||
|
||||
@@ -4,10 +4,10 @@ import sys
|
||||
import json
|
||||
import time
|
||||
import hashlib
|
||||
from syutil.base64util import encode_base64
|
||||
from syutil.crypto.signing_key import read_signing_keys
|
||||
from syutil.crypto.jsonsign import sign_json
|
||||
from syutil.jsonutil import encode_canonical_json
|
||||
from unpaddedbase64 import encode_base64
|
||||
from signedjson.key import read_signing_keys
|
||||
from signedjson.sign import sign_json
|
||||
from canonicaljson import encode_canonical_json
|
||||
|
||||
|
||||
def select_v1_keys(connection):
|
||||
|
||||
142
scripts-dev/definitions.py
Executable file
142
scripts-dev/definitions.py
Executable file
@@ -0,0 +1,142 @@
|
||||
#! /usr/bin/python
|
||||
|
||||
import ast
|
||||
import yaml
|
||||
|
||||
class DefinitionVisitor(ast.NodeVisitor):
|
||||
def __init__(self):
|
||||
super(DefinitionVisitor, self).__init__()
|
||||
self.functions = {}
|
||||
self.classes = {}
|
||||
self.names = {}
|
||||
self.attrs = set()
|
||||
self.definitions = {
|
||||
'def': self.functions,
|
||||
'class': self.classes,
|
||||
'names': self.names,
|
||||
'attrs': self.attrs,
|
||||
}
|
||||
|
||||
def visit_Name(self, node):
|
||||
self.names.setdefault(type(node.ctx).__name__, set()).add(node.id)
|
||||
|
||||
def visit_Attribute(self, node):
|
||||
self.attrs.add(node.attr)
|
||||
for child in ast.iter_child_nodes(node):
|
||||
self.visit(child)
|
||||
|
||||
def visit_ClassDef(self, node):
|
||||
visitor = DefinitionVisitor()
|
||||
self.classes[node.name] = visitor.definitions
|
||||
for child in ast.iter_child_nodes(node):
|
||||
visitor.visit(child)
|
||||
|
||||
def visit_FunctionDef(self, node):
|
||||
visitor = DefinitionVisitor()
|
||||
self.functions[node.name] = visitor.definitions
|
||||
for child in ast.iter_child_nodes(node):
|
||||
visitor.visit(child)
|
||||
|
||||
|
||||
def non_empty(defs):
|
||||
functions = {name: non_empty(f) for name, f in defs['def'].items()}
|
||||
classes = {name: non_empty(f) for name, f in defs['class'].items()}
|
||||
result = {}
|
||||
if functions: result['def'] = functions
|
||||
if classes: result['class'] = classes
|
||||
names = defs['names']
|
||||
uses = []
|
||||
for name in names.get('Load', ()):
|
||||
if name not in names.get('Param', ()) and name not in names.get('Store', ()):
|
||||
uses.append(name)
|
||||
uses.extend(defs['attrs'])
|
||||
if uses: result['uses'] = uses
|
||||
result['names'] = names
|
||||
result['attrs'] = defs['attrs']
|
||||
return result
|
||||
|
||||
|
||||
def definitions_in_code(input_code):
|
||||
input_ast = ast.parse(input_code)
|
||||
visitor = DefinitionVisitor()
|
||||
visitor.visit(input_ast)
|
||||
definitions = non_empty(visitor.definitions)
|
||||
return definitions
|
||||
|
||||
|
||||
def definitions_in_file(filepath):
|
||||
with open(filepath) as f:
|
||||
return definitions_in_code(f.read())
|
||||
|
||||
|
||||
def defined_names(prefix, defs, names):
|
||||
for name, funcs in defs.get('def', {}).items():
|
||||
names.setdefault(name, {'defined': []})['defined'].append(prefix + name)
|
||||
defined_names(prefix + name + ".", funcs, names)
|
||||
|
||||
for name, funcs in defs.get('class', {}).items():
|
||||
names.setdefault(name, {'defined': []})['defined'].append(prefix + name)
|
||||
defined_names(prefix + name + ".", funcs, names)
|
||||
|
||||
|
||||
def used_names(prefix, defs, names):
|
||||
for name, funcs in defs.get('def', {}).items():
|
||||
used_names(prefix + name + ".", funcs, names)
|
||||
|
||||
for name, funcs in defs.get('class', {}).items():
|
||||
used_names(prefix + name + ".", funcs, names)
|
||||
|
||||
for used in defs.get('uses', ()):
|
||||
if used in names:
|
||||
names[used].setdefault('used', []).append(prefix.rstrip('.'))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys, os, argparse, re
|
||||
|
||||
parser = argparse.ArgumentParser(description='Find definitions.')
|
||||
parser.add_argument(
|
||||
"--unused", action="store_true", help="Only list unused definitions"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ignore", action="append", metavar="REGEXP", help="Ignore a pattern"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--pattern", action="append", metavar="REGEXP",
|
||||
help="Search for a pattern"
|
||||
)
|
||||
parser.add_argument(
|
||||
"directories", nargs='+', metavar="DIR",
|
||||
help="Directories to search for definitions"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
definitions = {}
|
||||
for directory in args.directories:
|
||||
for root, dirs, files in os.walk(directory):
|
||||
for filename in files:
|
||||
if filename.endswith(".py"):
|
||||
filepath = os.path.join(root, filename)
|
||||
definitions[filepath] = definitions_in_file(filepath)
|
||||
|
||||
names = {}
|
||||
for filepath, defs in definitions.items():
|
||||
defined_names(filepath + ":", defs, names)
|
||||
|
||||
for filepath, defs in definitions.items():
|
||||
used_names(filepath + ":", defs, names)
|
||||
|
||||
patterns = [re.compile(pattern) for pattern in args.pattern or ()]
|
||||
ignore = [re.compile(pattern) for pattern in args.ignore or ()]
|
||||
|
||||
result = {}
|
||||
for name, definition in names.items():
|
||||
if patterns and not any(pattern.match(name) for pattern in patterns):
|
||||
continue
|
||||
if ignore and any(pattern.match(name) for pattern in ignore):
|
||||
continue
|
||||
if args.unused and definition.get('used'):
|
||||
continue
|
||||
result[name] = definition
|
||||
|
||||
yaml.dump(result, sys.stdout, default_flow_style=False)
|
||||
@@ -6,8 +6,8 @@ from synapse.crypto.event_signing import (
|
||||
add_event_pdu_content_hash, compute_pdu_event_reference_hash
|
||||
)
|
||||
from synapse.api.events.utils import prune_pdu
|
||||
from syutil.base64util import encode_base64, decode_base64
|
||||
from syutil.jsonutil import encode_canonical_json
|
||||
from unpaddedbase64 import encode_base64, decode_base64
|
||||
from canonicaljson import encode_canonical_json
|
||||
import sqlite3
|
||||
import sys
|
||||
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This is will prepare a synapse database for running with v0.0.1 of synapse.
|
||||
# It will store all the user information, but will *delete* all messages and
|
||||
# room data.
|
||||
|
||||
set -e
|
||||
|
||||
cp "$1" "$1.bak"
|
||||
|
||||
DUMP=$(sqlite3 "$1" << 'EOF'
|
||||
.dump users
|
||||
.dump access_tokens
|
||||
.dump presence
|
||||
.dump profiles
|
||||
EOF
|
||||
)
|
||||
|
||||
rm "$1"
|
||||
|
||||
sqlite3 "$1" <<< "$DUMP"
|
||||
@@ -1,21 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This is will prepare a synapse database for running with v0.5.0 of synapse.
|
||||
# It will store all the user information, but will *delete* all messages and
|
||||
# room data.
|
||||
|
||||
set -e
|
||||
|
||||
cp "$1" "$1.bak"
|
||||
|
||||
DUMP=$(sqlite3 "$1" << 'EOF'
|
||||
.dump users
|
||||
.dump access_tokens
|
||||
.dump presence
|
||||
.dump profiles
|
||||
EOF
|
||||
)
|
||||
|
||||
rm "$1"
|
||||
|
||||
sqlite3 "$1" <<< "$DUMP"
|
||||
@@ -29,7 +29,7 @@ import traceback
|
||||
import yaml
|
||||
|
||||
|
||||
logger = logging.getLogger("port_from_sqlite_to_postgres")
|
||||
logger = logging.getLogger("synapse_port_db")
|
||||
|
||||
|
||||
BOOLEAN_COLUMNS = {
|
||||
@@ -95,8 +95,6 @@ class Store(object):
|
||||
_simple_update_one = SQLBaseStore.__dict__["_simple_update_one"]
|
||||
_simple_update_one_txn = SQLBaseStore.__dict__["_simple_update_one_txn"]
|
||||
|
||||
_execute_and_decode = SQLBaseStore.__dict__["_execute_and_decode"]
|
||||
|
||||
def runInteraction(self, desc, func, *args, **kwargs):
|
||||
def r(conn):
|
||||
try:
|
||||
@@ -412,14 +410,17 @@ class Porter(object):
|
||||
self._convert_rows("sent_transactions", headers, rows)
|
||||
|
||||
inserted_rows = len(rows)
|
||||
max_inserted_rowid = max(r[0] for r in rows)
|
||||
if inserted_rows:
|
||||
max_inserted_rowid = max(r[0] for r in rows)
|
||||
|
||||
def insert(txn):
|
||||
self.postgres_store.insert_many_txn(
|
||||
txn, "sent_transactions", headers[1:], rows
|
||||
)
|
||||
def insert(txn):
|
||||
self.postgres_store.insert_many_txn(
|
||||
txn, "sent_transactions", headers[1:], rows
|
||||
)
|
||||
|
||||
yield self.postgres_store.execute(insert)
|
||||
yield self.postgres_store.execute(insert)
|
||||
else:
|
||||
max_inserted_rowid = 0
|
||||
|
||||
def get_start_id(txn):
|
||||
txn.execute(
|
||||
@@ -1,331 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
from synapse.storage import SCHEMA_VERSION, read_schema
|
||||
from synapse.storage._base import SQLBaseStore
|
||||
from synapse.storage.signatures import SignatureStore
|
||||
from synapse.storage.event_federation import EventFederationStore
|
||||
|
||||
from syutil.base64util import encode_base64, decode_base64
|
||||
|
||||
from synapse.crypto.event_signing import compute_event_signature
|
||||
|
||||
from synapse.events.builder import EventBuilder
|
||||
from synapse.events.utils import prune_event
|
||||
|
||||
from synapse.crypto.event_signing import check_event_content_hash
|
||||
|
||||
from syutil.crypto.jsonsign import (
|
||||
verify_signed_json, SignatureVerifyException,
|
||||
)
|
||||
from syutil.crypto.signing_key import decode_verify_key_bytes
|
||||
|
||||
from syutil.jsonutil import encode_canonical_json
|
||||
|
||||
import argparse
|
||||
# import dns.resolver
|
||||
import hashlib
|
||||
import httplib
|
||||
import json
|
||||
import sqlite3
|
||||
import syutil
|
||||
import urllib2
|
||||
|
||||
|
||||
delta_sql = """
|
||||
CREATE TABLE IF NOT EXISTS event_json(
|
||||
event_id TEXT NOT NULL,
|
||||
room_id TEXT NOT NULL,
|
||||
internal_metadata NOT NULL,
|
||||
json BLOB NOT NULL,
|
||||
CONSTRAINT ev_j_uniq UNIQUE (event_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS event_json_id ON event_json(event_id);
|
||||
CREATE INDEX IF NOT EXISTS event_json_room_id ON event_json(room_id);
|
||||
|
||||
PRAGMA user_version = 10;
|
||||
"""
|
||||
|
||||
|
||||
class Store(object):
|
||||
_get_event_signatures_txn = SignatureStore.__dict__["_get_event_signatures_txn"]
|
||||
_get_event_content_hashes_txn = SignatureStore.__dict__["_get_event_content_hashes_txn"]
|
||||
_get_event_reference_hashes_txn = SignatureStore.__dict__["_get_event_reference_hashes_txn"]
|
||||
_get_prev_event_hashes_txn = SignatureStore.__dict__["_get_prev_event_hashes_txn"]
|
||||
_get_prev_events_and_state = EventFederationStore.__dict__["_get_prev_events_and_state"]
|
||||
_get_auth_events = EventFederationStore.__dict__["_get_auth_events"]
|
||||
cursor_to_dict = SQLBaseStore.__dict__["cursor_to_dict"]
|
||||
_simple_select_onecol_txn = SQLBaseStore.__dict__["_simple_select_onecol_txn"]
|
||||
_simple_select_list_txn = SQLBaseStore.__dict__["_simple_select_list_txn"]
|
||||
_simple_insert_txn = SQLBaseStore.__dict__["_simple_insert_txn"]
|
||||
|
||||
def _generate_event_json(self, txn, rows):
|
||||
events = []
|
||||
for row in rows:
|
||||
d = dict(row)
|
||||
|
||||
d.pop("stream_ordering", None)
|
||||
d.pop("topological_ordering", None)
|
||||
d.pop("processed", None)
|
||||
|
||||
if "origin_server_ts" not in d:
|
||||
d["origin_server_ts"] = d.pop("ts", 0)
|
||||
else:
|
||||
d.pop("ts", 0)
|
||||
|
||||
d.pop("prev_state", None)
|
||||
d.update(json.loads(d.pop("unrecognized_keys")))
|
||||
|
||||
d["sender"] = d.pop("user_id")
|
||||
|
||||
d["content"] = json.loads(d["content"])
|
||||
|
||||
if "age_ts" not in d:
|
||||
# For compatibility
|
||||
d["age_ts"] = d.get("origin_server_ts", 0)
|
||||
|
||||
d.setdefault("unsigned", {})["age_ts"] = d.pop("age_ts")
|
||||
|
||||
outlier = d.pop("outlier", False)
|
||||
|
||||
# d.pop("membership", None)
|
||||
|
||||
d.pop("state_hash", None)
|
||||
|
||||
d.pop("replaces_state", None)
|
||||
|
||||
b = EventBuilder(d)
|
||||
b.internal_metadata.outlier = outlier
|
||||
|
||||
events.append(b)
|
||||
|
||||
for i, ev in enumerate(events):
|
||||
signatures = self._get_event_signatures_txn(
|
||||
txn, ev.event_id,
|
||||
)
|
||||
|
||||
ev.signatures = {
|
||||
n: {
|
||||
k: encode_base64(v) for k, v in s.items()
|
||||
}
|
||||
for n, s in signatures.items()
|
||||
}
|
||||
|
||||
hashes = self._get_event_content_hashes_txn(
|
||||
txn, ev.event_id,
|
||||
)
|
||||
|
||||
ev.hashes = {
|
||||
k: encode_base64(v) for k, v in hashes.items()
|
||||
}
|
||||
|
||||
prevs = self._get_prev_events_and_state(txn, ev.event_id)
|
||||
|
||||
ev.prev_events = [
|
||||
(e_id, h)
|
||||
for e_id, h, is_state in prevs
|
||||
if is_state == 0
|
||||
]
|
||||
|
||||
# ev.auth_events = self._get_auth_events(txn, ev.event_id)
|
||||
|
||||
hashes = dict(ev.auth_events)
|
||||
|
||||
for e_id, hash in ev.prev_events:
|
||||
if e_id in hashes and not hash:
|
||||
hash.update(hashes[e_id])
|
||||
#
|
||||
# if hasattr(ev, "state_key"):
|
||||
# ev.prev_state = [
|
||||
# (e_id, h)
|
||||
# for e_id, h, is_state in prevs
|
||||
# if is_state == 1
|
||||
# ]
|
||||
|
||||
return [e.build() for e in events]
|
||||
|
||||
|
||||
store = Store()
|
||||
|
||||
|
||||
# def get_key(server_name):
|
||||
# print "Getting keys for: %s" % (server_name,)
|
||||
# targets = []
|
||||
# if ":" in server_name:
|
||||
# target, port = server_name.split(":")
|
||||
# targets.append((target, int(port)))
|
||||
# try:
|
||||
# answers = dns.resolver.query("_matrix._tcp." + server_name, "SRV")
|
||||
# for srv in answers:
|
||||
# targets.append((srv.target, srv.port))
|
||||
# except dns.resolver.NXDOMAIN:
|
||||
# targets.append((server_name, 8448))
|
||||
# except:
|
||||
# print "Failed to lookup keys for %s" % (server_name,)
|
||||
# return {}
|
||||
#
|
||||
# for target, port in targets:
|
||||
# url = "https://%s:%i/_matrix/key/v1" % (target, port)
|
||||
# try:
|
||||
# keys = json.load(urllib2.urlopen(url, timeout=2))
|
||||
# verify_keys = {}
|
||||
# for key_id, key_base64 in keys["verify_keys"].items():
|
||||
# verify_key = decode_verify_key_bytes(
|
||||
# key_id, decode_base64(key_base64)
|
||||
# )
|
||||
# verify_signed_json(keys, server_name, verify_key)
|
||||
# verify_keys[key_id] = verify_key
|
||||
# print "Got keys for: %s" % (server_name,)
|
||||
# return verify_keys
|
||||
# except urllib2.URLError:
|
||||
# pass
|
||||
# except urllib2.HTTPError:
|
||||
# pass
|
||||
# except httplib.HTTPException:
|
||||
# pass
|
||||
#
|
||||
# print "Failed to get keys for %s" % (server_name,)
|
||||
# return {}
|
||||
|
||||
|
||||
def reinsert_events(cursor, server_name, signing_key):
|
||||
print "Running delta: v10"
|
||||
|
||||
cursor.executescript(delta_sql)
|
||||
|
||||
cursor.execute(
|
||||
"SELECT * FROM events ORDER BY rowid ASC"
|
||||
)
|
||||
|
||||
print "Getting events..."
|
||||
|
||||
rows = store.cursor_to_dict(cursor)
|
||||
|
||||
events = store._generate_event_json(cursor, rows)
|
||||
|
||||
print "Got events from DB."
|
||||
|
||||
algorithms = {
|
||||
"sha256": hashlib.sha256,
|
||||
}
|
||||
|
||||
key_id = "%s:%s" % (signing_key.alg, signing_key.version)
|
||||
verify_key = signing_key.verify_key
|
||||
verify_key.alg = signing_key.alg
|
||||
verify_key.version = signing_key.version
|
||||
|
||||
server_keys = {
|
||||
server_name: {
|
||||
key_id: verify_key
|
||||
}
|
||||
}
|
||||
|
||||
i = 0
|
||||
N = len(events)
|
||||
|
||||
for event in events:
|
||||
if i % 100 == 0:
|
||||
print "Processed: %d/%d events" % (i,N,)
|
||||
i += 1
|
||||
|
||||
# for alg_name in event.hashes:
|
||||
# if check_event_content_hash(event, algorithms[alg_name]):
|
||||
# pass
|
||||
# else:
|
||||
# pass
|
||||
# print "FAIL content hash %s %s" % (alg_name, event.event_id, )
|
||||
|
||||
have_own_correctly_signed = False
|
||||
for host, sigs in event.signatures.items():
|
||||
pruned = prune_event(event)
|
||||
|
||||
for key_id in sigs:
|
||||
if host not in server_keys:
|
||||
server_keys[host] = {} # get_key(host)
|
||||
if key_id in server_keys[host]:
|
||||
try:
|
||||
verify_signed_json(
|
||||
pruned.get_pdu_json(),
|
||||
host,
|
||||
server_keys[host][key_id]
|
||||
)
|
||||
|
||||
if host == server_name:
|
||||
have_own_correctly_signed = True
|
||||
except SignatureVerifyException:
|
||||
print "FAIL signature check %s %s" % (
|
||||
key_id, event.event_id
|
||||
)
|
||||
|
||||
# TODO: Re sign with our own server key
|
||||
if not have_own_correctly_signed:
|
||||
sigs = compute_event_signature(event, server_name, signing_key)
|
||||
event.signatures.update(sigs)
|
||||
|
||||
pruned = prune_event(event)
|
||||
|
||||
for key_id in event.signatures[server_name]:
|
||||
verify_signed_json(
|
||||
pruned.get_pdu_json(),
|
||||
server_name,
|
||||
server_keys[server_name][key_id]
|
||||
)
|
||||
|
||||
event_json = encode_canonical_json(
|
||||
event.get_dict()
|
||||
).decode("UTF-8")
|
||||
|
||||
metadata_json = encode_canonical_json(
|
||||
event.internal_metadata.get_dict()
|
||||
).decode("UTF-8")
|
||||
|
||||
store._simple_insert_txn(
|
||||
cursor,
|
||||
table="event_json",
|
||||
values={
|
||||
"event_id": event.event_id,
|
||||
"room_id": event.room_id,
|
||||
"internal_metadata": metadata_json,
|
||||
"json": event_json,
|
||||
},
|
||||
or_replace=True,
|
||||
)
|
||||
|
||||
|
||||
def main(database, server_name, signing_key):
|
||||
conn = sqlite3.connect(database)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Do other deltas:
|
||||
cursor.execute("PRAGMA user_version")
|
||||
row = cursor.fetchone()
|
||||
|
||||
if row and row[0]:
|
||||
user_version = row[0]
|
||||
# Run every version since after the current version.
|
||||
for v in range(user_version + 1, 10):
|
||||
print "Running delta: %d" % (v,)
|
||||
sql_script = read_schema("delta/v%d" % (v,))
|
||||
cursor.executescript(sql_script)
|
||||
|
||||
reinsert_events(cursor, server_name, signing_key)
|
||||
|
||||
conn.commit()
|
||||
|
||||
print "Success!"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument("database")
|
||||
parser.add_argument("server_name")
|
||||
parser.add_argument(
|
||||
"signing_key", type=argparse.FileType('r'),
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
signing_key = syutil.crypto.signing_key.read_signing_keys(
|
||||
args.signing_key
|
||||
)
|
||||
|
||||
main(args.database, args.server_name, signing_key[0])
|
||||
@@ -3,9 +3,6 @@ source-dir = docs/sphinx
|
||||
build-dir = docs/build
|
||||
all_files = 1
|
||||
|
||||
[aliases]
|
||||
test = trial
|
||||
|
||||
[trial]
|
||||
test_suite = tests
|
||||
|
||||
@@ -16,3 +13,6 @@ ignore =
|
||||
docs/*
|
||||
pylint.cfg
|
||||
tox.ini
|
||||
|
||||
[flake8]
|
||||
max-line-length = 90
|
||||
|
||||
44
setup.py
44
setup.py
@@ -16,7 +16,8 @@
|
||||
|
||||
import glob
|
||||
import os
|
||||
from setuptools import setup, find_packages
|
||||
from setuptools import setup, find_packages, Command
|
||||
import sys
|
||||
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
@@ -37,6 +38,39 @@ def exec_file(path_segments):
|
||||
exec(code, result)
|
||||
return result
|
||||
|
||||
|
||||
class Tox(Command):
|
||||
user_options = [('tox-args=', 'a', "Arguments to pass to tox")]
|
||||
|
||||
def initialize_options(self):
|
||||
self.tox_args = None
|
||||
|
||||
def finalize_options(self):
|
||||
self.test_args = []
|
||||
self.test_suite = True
|
||||
|
||||
def run(self):
|
||||
#import here, cause outside the eggs aren't loaded
|
||||
try:
|
||||
import tox
|
||||
except ImportError:
|
||||
try:
|
||||
self.distribution.fetch_build_eggs("tox")
|
||||
import tox
|
||||
except:
|
||||
raise RuntimeError(
|
||||
"The tests need 'tox' to run. Please install 'tox'."
|
||||
)
|
||||
import shlex
|
||||
args = self.tox_args
|
||||
if args:
|
||||
args = shlex.split(self.tox_args)
|
||||
else:
|
||||
args = []
|
||||
errno = tox.cmdline(args=args)
|
||||
sys.exit(errno)
|
||||
|
||||
|
||||
version = exec_file(("synapse", "__init__.py"))["__version__"]
|
||||
dependencies = exec_file(("synapse", "python_dependencies.py"))
|
||||
long_description = read_file(("README.rst",))
|
||||
@@ -47,14 +81,10 @@ setup(
|
||||
packages=find_packages(exclude=["tests", "tests.*"]),
|
||||
description="Reference Synapse Home Server",
|
||||
install_requires=dependencies['requirements'](include_conditional=True).keys(),
|
||||
setup_requires=[
|
||||
"Twisted==14.0.2", # Here to override setuptools_trial's dependency on Twisted>=2.4.0
|
||||
"setuptools_trial",
|
||||
"mock"
|
||||
],
|
||||
dependency_links=dependencies["DEPENDENCY_LINKS"],
|
||||
dependency_links=dependencies["DEPENDENCY_LINKS"].values(),
|
||||
include_package_data=True,
|
||||
zip_safe=False,
|
||||
long_description=long_description,
|
||||
scripts=["synctl"] + glob.glob("scripts/*"),
|
||||
cmdclass={'test': Tox},
|
||||
)
|
||||
|
||||
@@ -16,4 +16,4 @@
|
||||
""" This is a reference implementation of a Matrix home server.
|
||||
"""
|
||||
|
||||
__version__ = "0.9.3"
|
||||
__version__ = "0.10.0-r2"
|
||||
|
||||
@@ -20,9 +20,10 @@ from twisted.internet import defer
|
||||
from synapse.api.constants import EventTypes, Membership, JoinRules
|
||||
from synapse.api.errors import AuthError, Codes, SynapseError
|
||||
from synapse.util.logutils import log_function
|
||||
from synapse.types import UserID, ClientInfo
|
||||
from synapse.types import UserID, EventID
|
||||
|
||||
import logging
|
||||
import pymacaroons
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -40,10 +41,21 @@ class Auth(object):
|
||||
self.store = hs.get_datastore()
|
||||
self.state = hs.get_state_handler()
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS = 401
|
||||
self._KNOWN_CAVEAT_PREFIXES = set([
|
||||
"gen = ",
|
||||
"type = ",
|
||||
"time < ",
|
||||
"user_id = ",
|
||||
])
|
||||
|
||||
def check(self, event, auth_events):
|
||||
""" Checks if this event is correctly authed.
|
||||
|
||||
Args:
|
||||
event: the event being checked.
|
||||
auth_events (dict: event-key -> event): the existing room state.
|
||||
|
||||
|
||||
Returns:
|
||||
True if the auth checks pass.
|
||||
"""
|
||||
@@ -60,6 +72,14 @@ class Auth(object):
|
||||
# FIXME
|
||||
return True
|
||||
|
||||
creation_event = auth_events.get((EventTypes.Create, ""), None)
|
||||
|
||||
if not creation_event:
|
||||
raise SynapseError(
|
||||
403,
|
||||
"Room %r does not exist" % (event.room_id,)
|
||||
)
|
||||
|
||||
# FIXME: Temp hack
|
||||
if event.type == EventTypes.Aliases:
|
||||
return True
|
||||
@@ -86,7 +106,7 @@ class Auth(object):
|
||||
self._check_power_levels(event, auth_events)
|
||||
|
||||
if event.type == EventTypes.Redaction:
|
||||
self._check_redaction(event, auth_events)
|
||||
self.check_redaction(event, auth_events)
|
||||
|
||||
logger.debug("Allowing! %s", event)
|
||||
except AuthError as e:
|
||||
@@ -99,6 +119,20 @@ class Auth(object):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_joined_room(self, room_id, user_id, current_state=None):
|
||||
"""Check if the user is currently joined in the room
|
||||
Args:
|
||||
room_id(str): The room to check.
|
||||
user_id(str): The user to check.
|
||||
current_state(dict): Optional map of the current state of the room.
|
||||
If provided then that map is used to check whether they are a
|
||||
member of the room. Otherwise the current membership is
|
||||
loaded from the database.
|
||||
Raises:
|
||||
AuthError if the user is not in the room.
|
||||
Returns:
|
||||
A deferred membership event for the user if the user is in
|
||||
the room.
|
||||
"""
|
||||
if current_state:
|
||||
member = current_state.get(
|
||||
(EventTypes.Member, user_id),
|
||||
@@ -114,6 +148,43 @@ class Auth(object):
|
||||
self._check_joined_room(member, user_id, room_id)
|
||||
defer.returnValue(member)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_user_was_in_room(self, room_id, user_id, current_state=None):
|
||||
"""Check if the user was in the room at some point.
|
||||
Args:
|
||||
room_id(str): The room to check.
|
||||
user_id(str): The user to check.
|
||||
current_state(dict): Optional map of the current state of the room.
|
||||
If provided then that map is used to check whether they are a
|
||||
member of the room. Otherwise the current membership is
|
||||
loaded from the database.
|
||||
Raises:
|
||||
AuthError if the user was never in the room.
|
||||
Returns:
|
||||
A deferred membership event for the user if the user was in the
|
||||
room. This will be the join event if they are currently joined to
|
||||
the room. This will be the leave event if they have left the room.
|
||||
"""
|
||||
if current_state:
|
||||
member = current_state.get(
|
||||
(EventTypes.Member, user_id),
|
||||
None
|
||||
)
|
||||
else:
|
||||
member = yield self.state.get_current_state(
|
||||
room_id=room_id,
|
||||
event_type=EventTypes.Member,
|
||||
state_key=user_id
|
||||
)
|
||||
membership = member.membership if member else None
|
||||
|
||||
if membership not in (Membership.JOIN, Membership.LEAVE):
|
||||
raise AuthError(403, "User %s not in room %s" % (
|
||||
user_id, room_id
|
||||
))
|
||||
|
||||
defer.returnValue(member)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_host_in_room(self, room_id, host):
|
||||
curr_state = yield self.state.get_current_state(room_id)
|
||||
@@ -317,9 +388,9 @@ class Auth(object):
|
||||
Args:
|
||||
request - An HTTP request with an access_token query parameter.
|
||||
Returns:
|
||||
tuple : of UserID and device string:
|
||||
User ID object of the user making the request
|
||||
Client ID object of the client instance the user is using
|
||||
tuple of:
|
||||
UserID (str)
|
||||
Access token ID (str)
|
||||
Raises:
|
||||
AuthError if no user by that token exists or the token is invalid.
|
||||
"""
|
||||
@@ -347,16 +418,15 @@ class Auth(object):
|
||||
if not user_id:
|
||||
raise KeyError
|
||||
|
||||
defer.returnValue(
|
||||
(UserID.from_string(user_id), ClientInfo("", ""))
|
||||
)
|
||||
request.authenticated_entity = user_id
|
||||
|
||||
defer.returnValue((UserID.from_string(user_id), ""))
|
||||
return
|
||||
except KeyError:
|
||||
pass # normal users won't have this query parameter set
|
||||
pass # normal users won't have the user_id query parameter set.
|
||||
|
||||
user_info = yield self.get_user_by_token(access_token)
|
||||
user_info = yield self._get_user_by_access_token(access_token)
|
||||
user = user_info["user"]
|
||||
device_id = user_info["device_id"]
|
||||
token_id = user_info["token_id"]
|
||||
|
||||
ip_addr = self.hs.get_ip_from_request(request)
|
||||
@@ -368,14 +438,13 @@ class Auth(object):
|
||||
self.store.insert_client_ip(
|
||||
user=user,
|
||||
access_token=access_token,
|
||||
device_id=user_info["device_id"],
|
||||
ip=ip_addr,
|
||||
user_agent=user_agent
|
||||
)
|
||||
|
||||
request.authenticated_entity = user.to_string()
|
||||
|
||||
defer.returnValue((user, ClientInfo(device_id, token_id)))
|
||||
defer.returnValue((user, token_id,))
|
||||
except KeyError:
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Missing access token.",
|
||||
@@ -383,30 +452,106 @@ class Auth(object):
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_user_by_token(self, token):
|
||||
def _get_user_by_access_token(self, token):
|
||||
""" Get a registered user's ID.
|
||||
|
||||
Args:
|
||||
token (str): The access token to get the user by.
|
||||
Returns:
|
||||
dict : dict that includes the user, device_id, and whether the
|
||||
user is a server admin.
|
||||
dict : dict that includes the user and the ID of their access token.
|
||||
Raises:
|
||||
AuthError if no user by that token exists or the token is invalid.
|
||||
"""
|
||||
ret = yield self.store.get_user_by_token(token)
|
||||
try:
|
||||
ret = yield self._get_user_from_macaroon(token)
|
||||
except AuthError:
|
||||
# TODO(daniel): Remove this fallback when all existing access tokens
|
||||
# have been re-issued as macaroons.
|
||||
ret = yield self._look_up_user_by_access_token(token)
|
||||
defer.returnValue(ret)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _get_user_from_macaroon(self, macaroon_str):
|
||||
try:
|
||||
macaroon = pymacaroons.Macaroon.deserialize(macaroon_str)
|
||||
self._validate_macaroon(macaroon)
|
||||
|
||||
user_prefix = "user_id = "
|
||||
for caveat in macaroon.caveats:
|
||||
if caveat.caveat_id.startswith(user_prefix):
|
||||
user = UserID.from_string(caveat.caveat_id[len(user_prefix):])
|
||||
# This codepath exists so that we can actually return a
|
||||
# token ID, because we use token IDs in place of device
|
||||
# identifiers throughout the codebase.
|
||||
# TODO(daniel): Remove this fallback when device IDs are
|
||||
# properly implemented.
|
||||
ret = yield self._look_up_user_by_access_token(macaroon_str)
|
||||
if ret["user"] != user:
|
||||
logger.error(
|
||||
"Macaroon user (%s) != DB user (%s)",
|
||||
user,
|
||||
ret["user"]
|
||||
)
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||
"User mismatch in macaroon",
|
||||
errcode=Codes.UNKNOWN_TOKEN
|
||||
)
|
||||
defer.returnValue(ret)
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "No user caveat in macaroon",
|
||||
errcode=Codes.UNKNOWN_TOKEN
|
||||
)
|
||||
except (pymacaroons.exceptions.MacaroonException, TypeError, ValueError):
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Invalid macaroon passed.",
|
||||
errcode=Codes.UNKNOWN_TOKEN
|
||||
)
|
||||
|
||||
def _validate_macaroon(self, macaroon):
|
||||
v = pymacaroons.Verifier()
|
||||
v.satisfy_exact("gen = 1")
|
||||
v.satisfy_exact("type = access")
|
||||
v.satisfy_general(lambda c: c.startswith("user_id = "))
|
||||
v.satisfy_general(self._verify_expiry)
|
||||
v.verify(macaroon, self.hs.config.macaroon_secret_key)
|
||||
|
||||
v = pymacaroons.Verifier()
|
||||
v.satisfy_general(self._verify_recognizes_caveats)
|
||||
v.verify(macaroon, self.hs.config.macaroon_secret_key)
|
||||
|
||||
def _verify_expiry(self, caveat):
|
||||
prefix = "time < "
|
||||
if not caveat.startswith(prefix):
|
||||
return False
|
||||
# TODO(daniel): Enable expiry check when clients actually know how to
|
||||
# refresh tokens. (And remember to enable the tests)
|
||||
return True
|
||||
expiry = int(caveat[len(prefix):])
|
||||
now = self.hs.get_clock().time_msec()
|
||||
return now < expiry
|
||||
|
||||
def _verify_recognizes_caveats(self, caveat):
|
||||
first_space = caveat.find(" ")
|
||||
if first_space < 0:
|
||||
return False
|
||||
second_space = caveat.find(" ", first_space + 1)
|
||||
if second_space < 0:
|
||||
return False
|
||||
return caveat[:second_space + 1] in self._KNOWN_CAVEAT_PREFIXES
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _look_up_user_by_access_token(self, token):
|
||||
ret = yield self.store.get_user_by_access_token(token)
|
||||
if not ret:
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Unrecognised access token.",
|
||||
errcode=Codes.UNKNOWN_TOKEN
|
||||
)
|
||||
user_info = {
|
||||
"admin": bool(ret.get("admin", False)),
|
||||
"device_id": ret.get("device_id"),
|
||||
"user": UserID.from_string(ret.get("name")),
|
||||
"token_id": ret.get("token_id", None),
|
||||
}
|
||||
|
||||
defer.returnValue(user_info)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@@ -420,6 +565,7 @@ class Auth(object):
|
||||
"Unrecognised access token.",
|
||||
errcode=Codes.UNKNOWN_TOKEN
|
||||
)
|
||||
request.authenticated_entity = service.sender
|
||||
defer.returnValue(service)
|
||||
except KeyError:
|
||||
raise AuthError(
|
||||
@@ -521,36 +667,54 @@ class Auth(object):
|
||||
|
||||
# Check state_key
|
||||
if hasattr(event, "state_key"):
|
||||
if not event.state_key.startswith("_"):
|
||||
if event.state_key.startswith("@"):
|
||||
if event.state_key != event.user_id:
|
||||
if event.state_key.startswith("@"):
|
||||
if event.state_key != event.user_id:
|
||||
raise AuthError(
|
||||
403,
|
||||
"You are not allowed to set others state"
|
||||
)
|
||||
else:
|
||||
sender_domain = UserID.from_string(
|
||||
event.user_id
|
||||
).domain
|
||||
|
||||
if sender_domain != event.state_key:
|
||||
raise AuthError(
|
||||
403,
|
||||
"You are not allowed to set others state"
|
||||
)
|
||||
else:
|
||||
sender_domain = UserID.from_string(
|
||||
event.user_id
|
||||
).domain
|
||||
|
||||
if sender_domain != event.state_key:
|
||||
raise AuthError(
|
||||
403,
|
||||
"You are not allowed to set others state"
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def _check_redaction(self, event, auth_events):
|
||||
def check_redaction(self, event, auth_events):
|
||||
"""Check whether the event sender is allowed to redact the target event.
|
||||
|
||||
Returns:
|
||||
True if the the sender is allowed to redact the target event if the
|
||||
target event was created by them.
|
||||
False if the sender is allowed to redact the target event with no
|
||||
further checks.
|
||||
|
||||
Raises:
|
||||
AuthError if the event sender is definitely not allowed to redact
|
||||
the target event.
|
||||
"""
|
||||
user_level = self._get_user_power_level(event.user_id, auth_events)
|
||||
|
||||
redact_level = self._get_named_level(auth_events, "redact", 50)
|
||||
|
||||
if user_level < redact_level:
|
||||
raise AuthError(
|
||||
403,
|
||||
"You don't have permission to redact events"
|
||||
)
|
||||
if user_level > redact_level:
|
||||
return False
|
||||
|
||||
redacter_domain = EventID.from_string(event.event_id).domain
|
||||
redactee_domain = EventID.from_string(event.redacts).domain
|
||||
if redacter_domain == redactee_domain:
|
||||
return True
|
||||
|
||||
raise AuthError(
|
||||
403,
|
||||
"You don't have permission to redact events"
|
||||
)
|
||||
|
||||
def _check_power_levels(self, event, auth_events):
|
||||
user_list = event.content.get("users", {})
|
||||
|
||||
@@ -27,16 +27,6 @@ class Membership(object):
|
||||
LIST = (INVITE, JOIN, KNOCK, LEAVE, BAN)
|
||||
|
||||
|
||||
class Feedback(object):
|
||||
|
||||
"""Represents the types of feedback a user can send in response to a
|
||||
message."""
|
||||
|
||||
DELIVERED = u"delivered"
|
||||
READ = u"read"
|
||||
LIST = (DELIVERED, READ)
|
||||
|
||||
|
||||
class PresenceState(object):
|
||||
"""Represents the presence state of a user."""
|
||||
OFFLINE = u"offline"
|
||||
@@ -73,9 +63,10 @@ class EventTypes(object):
|
||||
PowerLevels = "m.room.power_levels"
|
||||
Aliases = "m.room.aliases"
|
||||
Redaction = "m.room.redaction"
|
||||
Feedback = "m.room.message.feedback"
|
||||
|
||||
RoomHistoryVisibility = "m.room.history_visibility"
|
||||
CanonicalAlias = "m.room.canonical_alias"
|
||||
RoomAvatar = "m.room.avatar"
|
||||
|
||||
# These are used for validation
|
||||
Message = "m.room.message"
|
||||
|
||||
@@ -40,6 +40,7 @@ class Codes(object):
|
||||
TOO_LARGE = "M_TOO_LARGE"
|
||||
EXCLUSIVE = "M_EXCLUSIVE"
|
||||
THREEPID_AUTH_FAILED = "M_THREEPID_AUTH_FAILED"
|
||||
THREEPID_IN_USE = "THREEPID_IN_USE"
|
||||
|
||||
|
||||
class CodeMessageException(RuntimeError):
|
||||
@@ -76,11 +77,6 @@ class SynapseError(CodeMessageException):
|
||||
)
|
||||
|
||||
|
||||
class RoomError(SynapseError):
|
||||
"""An error raised when a room event fails."""
|
||||
pass
|
||||
|
||||
|
||||
class RegistrationError(SynapseError):
|
||||
"""An error raised when a registration event fails."""
|
||||
pass
|
||||
|
||||
@@ -16,10 +16,23 @@
|
||||
|
||||
import sys
|
||||
sys.dont_write_bytecode = True
|
||||
from synapse.python_dependencies import check_requirements
|
||||
from synapse.python_dependencies import (
|
||||
check_requirements, DEPENDENCY_LINKS, MissingRequirementError
|
||||
)
|
||||
|
||||
if __name__ == '__main__':
|
||||
check_requirements()
|
||||
try:
|
||||
check_requirements()
|
||||
except MissingRequirementError as e:
|
||||
message = "\n".join([
|
||||
"Missing Requirement: %s" % (e.message,),
|
||||
"To install run:",
|
||||
" pip install --upgrade --force \"%s\"" % (e.dependency,),
|
||||
"",
|
||||
])
|
||||
sys.stderr.writelines(message)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
from synapse.storage.engines import create_engine, IncorrectDatabaseSetup
|
||||
from synapse.storage import (
|
||||
@@ -29,7 +42,7 @@ from synapse.storage import (
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
from twisted.internet import reactor
|
||||
from twisted.internet import reactor, task, defer
|
||||
from twisted.application import service
|
||||
from twisted.enterprise import adbapi
|
||||
from twisted.web.resource import Resource, EncodingResourceWrapper
|
||||
@@ -58,6 +71,8 @@ from synapse import events
|
||||
from daemonize import Daemonize
|
||||
import twisted.manhole.telnet
|
||||
|
||||
from multiprocessing import Process
|
||||
|
||||
import synapse
|
||||
|
||||
import contextlib
|
||||
@@ -65,6 +80,7 @@ import logging
|
||||
import os
|
||||
import re
|
||||
import resource
|
||||
import signal
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
@@ -72,12 +88,6 @@ import time
|
||||
logger = logging.getLogger("synapse.app.homeserver")
|
||||
|
||||
|
||||
class GzipFile(File):
|
||||
def getChild(self, path, request):
|
||||
child = File.getChild(self, path, request)
|
||||
return EncodingResourceWrapper(child, [GzipEncoderFactory()])
|
||||
|
||||
|
||||
def gz_wrap(r):
|
||||
return EncodingResourceWrapper(r, [GzipEncoderFactory()])
|
||||
|
||||
@@ -97,14 +107,31 @@ class SynapseHomeServer(HomeServer):
|
||||
return JsonResource(self)
|
||||
|
||||
def build_resource_for_web_client(self):
|
||||
import syweb
|
||||
syweb_path = os.path.dirname(syweb.__file__)
|
||||
webclient_path = os.path.join(syweb_path, "webclient")
|
||||
webclient_path = self.get_config().web_client_location
|
||||
if not webclient_path:
|
||||
try:
|
||||
import syweb
|
||||
except ImportError:
|
||||
quit_with_error(
|
||||
"Could not find a webclient.\n\n"
|
||||
"Please either install the matrix-angular-sdk or configure\n"
|
||||
"the location of the source to serve via the configuration\n"
|
||||
"option `web_client_location`\n\n"
|
||||
"To install the `matrix-angular-sdk` via pip, run:\n\n"
|
||||
" pip install '%(dep)s'\n"
|
||||
"\n"
|
||||
"You can also disable hosting of the webclient via the\n"
|
||||
"configuration option `web_client`\n"
|
||||
% {"dep": DEPENDENCY_LINKS["matrix-angular-sdk"]}
|
||||
)
|
||||
syweb_path = os.path.dirname(syweb.__file__)
|
||||
webclient_path = os.path.join(syweb_path, "webclient")
|
||||
# GZip is disabled here due to
|
||||
# https://twistedmatrix.com/trac/ticket/7678
|
||||
# (It can stay enabled for the API resources: they call
|
||||
# write() with the whole body and then finish() straight
|
||||
# after and so do not trigger the bug.
|
||||
# GzipFile was removed in commit 184ba09
|
||||
# return GzipFile(webclient_path) # TODO configurable?
|
||||
return File(webclient_path) # TODO configurable?
|
||||
|
||||
@@ -205,7 +232,7 @@ class SynapseHomeServer(HomeServer):
|
||||
listener_config,
|
||||
root_resource,
|
||||
),
|
||||
self.tls_context_factory,
|
||||
self.tls_server_context_factory,
|
||||
interface=bind_address
|
||||
)
|
||||
else:
|
||||
@@ -259,11 +286,10 @@ class SynapseHomeServer(HomeServer):
|
||||
|
||||
def quit_with_error(error_string):
|
||||
message_lines = error_string.split("\n")
|
||||
line_length = max([len(l) for l in message_lines]) + 2
|
||||
line_length = max([len(l) for l in message_lines if len(l) < 80]) + 2
|
||||
sys.stderr.write("*" * line_length + '\n')
|
||||
for line in message_lines:
|
||||
if line.strip():
|
||||
sys.stderr.write(" %s\n" % (line.strip(),))
|
||||
sys.stderr.write(" %s\n" % (line.rstrip(),))
|
||||
sys.stderr.write("*" * line_length + '\n')
|
||||
sys.exit(1)
|
||||
|
||||
@@ -326,7 +352,7 @@ def get_version_string():
|
||||
)
|
||||
).encode("ascii")
|
||||
except Exception as e:
|
||||
logger.warn("Failed to check for git repository: %s", e)
|
||||
logger.info("Failed to check for git repository: %s", e)
|
||||
|
||||
return ("Synapse/%s" % (synapse.__version__,)).encode("ascii")
|
||||
|
||||
@@ -345,16 +371,16 @@ def change_resource_limit(soft_file_no):
|
||||
logger.warn("Failed to set file limit: %s", e)
|
||||
|
||||
|
||||
def setup(config_options):
|
||||
def load_config(config_options):
|
||||
"""
|
||||
Args:
|
||||
config_options_options: The options passed to Synapse. Usually
|
||||
`sys.argv[1:]`.
|
||||
should_run (bool): Whether to start the reactor.
|
||||
|
||||
Returns:
|
||||
HomeServer
|
||||
HomeServerConfig
|
||||
"""
|
||||
|
||||
config = HomeServerConfig.load_config(
|
||||
"Synapse Homeserver",
|
||||
config_options,
|
||||
@@ -363,9 +389,17 @@ def setup(config_options):
|
||||
|
||||
config.setup_logging()
|
||||
|
||||
# check any extra requirements we have now we have a config
|
||||
check_requirements(config)
|
||||
return config
|
||||
|
||||
|
||||
def setup(config):
|
||||
"""
|
||||
Args:
|
||||
config (Homeserver)
|
||||
|
||||
Returns:
|
||||
HomeServer
|
||||
"""
|
||||
version_string = get_version_string()
|
||||
|
||||
logger.info("Server hostname: %s", config.server_name)
|
||||
@@ -373,7 +407,7 @@ def setup(config_options):
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
tls_context_factory = context_factory.ServerContextFactory(config)
|
||||
tls_server_context_factory = context_factory.ServerContextFactory(config)
|
||||
|
||||
database_engine = create_engine(config.database_config["name"])
|
||||
config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection
|
||||
@@ -381,14 +415,14 @@ def setup(config_options):
|
||||
hs = SynapseHomeServer(
|
||||
config.server_name,
|
||||
db_config=config.database_config,
|
||||
tls_context_factory=tls_context_factory,
|
||||
tls_server_context_factory=tls_server_context_factory,
|
||||
config=config,
|
||||
content_addr=config.content_addr,
|
||||
version_string=version_string,
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
logger.info("Preparing database: %r...", config.database_config)
|
||||
logger.info("Preparing database: %s...", config.database_config['name'])
|
||||
|
||||
try:
|
||||
db_conn = database_engine.module.connect(
|
||||
@@ -410,7 +444,7 @@ def setup(config_options):
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
logger.info("Database prepared in %r.", config.database_config)
|
||||
logger.info("Database prepared in %s.", config.database_config['name'])
|
||||
|
||||
hs.start_listening()
|
||||
|
||||
@@ -419,6 +453,42 @@ def setup(config_options):
|
||||
hs.get_datastore().start_profiling()
|
||||
hs.get_replication_layer().start_get_pdu_cache()
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def phone_stats_home():
|
||||
now = int(time.time())
|
||||
uptime = int(now - start_time)
|
||||
if uptime < 0:
|
||||
uptime = 0
|
||||
|
||||
stats = {}
|
||||
stats["homeserver"] = config.server_name
|
||||
stats["timestamp"] = now
|
||||
stats["uptime_seconds"] = uptime
|
||||
stats["total_users"] = yield hs.get_datastore().count_all_users()
|
||||
|
||||
all_rooms = yield hs.get_datastore().get_rooms(False)
|
||||
stats["total_room_count"] = len(all_rooms)
|
||||
|
||||
stats["daily_active_users"] = yield hs.get_datastore().count_daily_users()
|
||||
daily_messages = yield hs.get_datastore().count_daily_messages()
|
||||
if daily_messages is not None:
|
||||
stats["daily_messages"] = daily_messages
|
||||
|
||||
logger.info("Reporting stats to matrix.org: %s" % (stats,))
|
||||
try:
|
||||
yield hs.get_simple_http_client().put_json(
|
||||
"https://matrix.org/report-usage-stats/push",
|
||||
stats
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warn("Error reporting stats: %s", e)
|
||||
|
||||
if hs.config.report_stats:
|
||||
phone_home_task = task.LoopingCall(phone_stats_home)
|
||||
phone_home_task.start(60 * 60 * 24, now=False)
|
||||
|
||||
return hs
|
||||
|
||||
|
||||
@@ -627,7 +697,7 @@ def _resource_id(resource, path_seg):
|
||||
return "%s-%s" % (resource, path_seg)
|
||||
|
||||
|
||||
def run(hs):
|
||||
def run(config):
|
||||
PROFILE_SYNAPSE = False
|
||||
if PROFILE_SYNAPSE:
|
||||
def profile(func):
|
||||
@@ -641,7 +711,7 @@ def run(hs):
|
||||
profile.disable()
|
||||
ident = current_thread().ident
|
||||
profile.dump_stats("/tmp/%s.%s.%i.pstat" % (
|
||||
hs.hostname, func.__name__, ident
|
||||
config.server_name, func.__name__, ident
|
||||
))
|
||||
|
||||
return profiled
|
||||
@@ -651,18 +721,51 @@ def run(hs):
|
||||
reactor.run = profile(reactor.run)
|
||||
|
||||
def in_thread():
|
||||
hs = setup(config)
|
||||
with LoggingContext("run"):
|
||||
change_resource_limit(hs.config.soft_file_limit)
|
||||
reactor.run()
|
||||
|
||||
if hs.config.daemonize:
|
||||
def start_in_process_checker():
|
||||
p = None
|
||||
should_restart = [True]
|
||||
|
||||
print hs.config.pid_file
|
||||
def proxy_signal(signum, stack):
|
||||
logger.info("Got signal: %r", signum)
|
||||
if p is not None:
|
||||
os.kill(p.pid, signum)
|
||||
|
||||
if signum == signal.SIGTERM:
|
||||
should_restart[0] = False
|
||||
|
||||
if getattr(signal, "SIGHUP"):
|
||||
signal.signal(signal.SIGHUP, proxy_signal)
|
||||
signal.signal(signal.SIGTERM, proxy_signal)
|
||||
|
||||
last_start = 0
|
||||
next_delay = 1
|
||||
|
||||
while should_restart[0]:
|
||||
last_start = time.time()
|
||||
p = Process(target=in_thread, args=())
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
if time.time() - last_start < 120:
|
||||
next_delay = min(next_delay * 5, 5 * 60)
|
||||
else:
|
||||
next_delay = 1
|
||||
|
||||
time.sleep(next_delay)
|
||||
|
||||
if config.daemonize:
|
||||
if config.print_pidfile:
|
||||
print config.pid_file
|
||||
|
||||
daemon = Daemonize(
|
||||
app="synapse-homeserver",
|
||||
pid=hs.config.pid_file,
|
||||
action=lambda: in_thread(),
|
||||
pid=config.pid_file,
|
||||
action=lambda: start_in_process_checker(),
|
||||
auto_close_fds=False,
|
||||
verbose=True,
|
||||
logger=logger,
|
||||
@@ -677,8 +780,8 @@ def main():
|
||||
with LoggingContext("main"):
|
||||
# check base requirements
|
||||
check_requirements()
|
||||
hs = setup(sys.argv[1:])
|
||||
run(hs)
|
||||
config = load_config(sys.argv[1:])
|
||||
run(config)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -16,57 +16,67 @@
|
||||
|
||||
import sys
|
||||
import os
|
||||
import os.path
|
||||
import subprocess
|
||||
import signal
|
||||
import yaml
|
||||
|
||||
SYNAPSE = ["python", "-B", "-m", "synapse.app.homeserver"]
|
||||
|
||||
CONFIGFILE = "homeserver.yaml"
|
||||
|
||||
GREEN = "\x1b[1;32m"
|
||||
RED = "\x1b[1;31m"
|
||||
NORMAL = "\x1b[m"
|
||||
|
||||
if not os.path.exists(CONFIGFILE):
|
||||
sys.stderr.write(
|
||||
"No config file found\n"
|
||||
"To generate a config file, run '%s -c %s --generate-config"
|
||||
" --server-name=<server name>'\n" % (
|
||||
" ".join(SYNAPSE), CONFIGFILE
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
CONFIG = yaml.load(open(CONFIGFILE))
|
||||
PIDFILE = CONFIG["pid_file"]
|
||||
|
||||
|
||||
def start():
|
||||
def start(configfile):
|
||||
print "Starting ...",
|
||||
args = SYNAPSE
|
||||
args.extend(["--daemonize", "-c", CONFIGFILE])
|
||||
subprocess.check_call(args)
|
||||
print GREEN + "started" + NORMAL
|
||||
args.extend(["--daemonize", "-c", configfile])
|
||||
cwd = os.path.dirname(os.path.abspath(__file__))
|
||||
try:
|
||||
subprocess.check_call(args, cwd=cwd)
|
||||
print GREEN + "started" + NORMAL
|
||||
except subprocess.CalledProcessError as e:
|
||||
print (
|
||||
RED +
|
||||
"error starting (exit code: %d); see above for logs" % e.returncode +
|
||||
NORMAL
|
||||
)
|
||||
|
||||
|
||||
def stop():
|
||||
if os.path.exists(PIDFILE):
|
||||
pid = int(open(PIDFILE).read())
|
||||
def stop(pidfile):
|
||||
if os.path.exists(pidfile):
|
||||
pid = int(open(pidfile).read())
|
||||
os.kill(pid, signal.SIGTERM)
|
||||
print GREEN + "stopped" + NORMAL
|
||||
|
||||
|
||||
def main():
|
||||
configfile = sys.argv[2] if len(sys.argv) == 3 else "homeserver.yaml"
|
||||
|
||||
if not os.path.exists(configfile):
|
||||
sys.stderr.write(
|
||||
"No config file found\n"
|
||||
"To generate a config file, run '%s -c %s --generate-config"
|
||||
" --server-name=<server name>'\n" % (
|
||||
" ".join(SYNAPSE), configfile
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
config = yaml.load(open(configfile))
|
||||
pidfile = config["pid_file"]
|
||||
|
||||
action = sys.argv[1] if sys.argv[1:] else "usage"
|
||||
if action == "start":
|
||||
start()
|
||||
start(configfile)
|
||||
elif action == "stop":
|
||||
stop()
|
||||
stop(pidfile)
|
||||
elif action == "restart":
|
||||
stop()
|
||||
start()
|
||||
stop(pidfile)
|
||||
start(configfile)
|
||||
else:
|
||||
sys.stderr.write("Usage: %s [start|stop|restart]\n" % (sys.argv[0],))
|
||||
sys.stderr.write("Usage: %s [start|stop|restart] [configfile]\n" % (sys.argv[0],))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
|
||||
30
synapse/config/__main__.py
Normal file
30
synapse/config/__main__.py
Normal file
@@ -0,0 +1,30 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
from homeserver import HomeServerConfig
|
||||
|
||||
action = sys.argv[1]
|
||||
|
||||
if action == "read":
|
||||
key = sys.argv[2]
|
||||
config = HomeServerConfig.load_config("", sys.argv[3:])
|
||||
|
||||
print getattr(config, key)
|
||||
sys.exit(0)
|
||||
else:
|
||||
sys.stderr.write("Unknown command %r\n" % (action,))
|
||||
sys.exit(1)
|
||||
@@ -26,6 +26,16 @@ class ConfigError(Exception):
|
||||
|
||||
class Config(object):
|
||||
|
||||
stats_reporting_begging_spiel = (
|
||||
"We would really appreciate it if you could help our project out by"
|
||||
" reporting anonymized usage statistics from your homeserver. Only very"
|
||||
" basic aggregate data (e.g. number of users) will be reported, but it"
|
||||
" helps us to track the growth of the Matrix community, and helps us to"
|
||||
" make Matrix a success, as well as to convince other networks that they"
|
||||
" should peer with us."
|
||||
"\nThank you."
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_size(value):
|
||||
if isinstance(value, int) or isinstance(value, long):
|
||||
@@ -111,11 +121,14 @@ class Config(object):
|
||||
results.append(getattr(cls, name)(self, *args, **kargs))
|
||||
return results
|
||||
|
||||
def generate_config(self, config_dir_path, server_name):
|
||||
def generate_config(self, config_dir_path, server_name, report_stats=None):
|
||||
default_config = "# vim:ft=yaml\n"
|
||||
|
||||
default_config += "\n\n".join(dedent(conf) for conf in self.invoke_all(
|
||||
"default_config", config_dir_path, server_name
|
||||
"default_config",
|
||||
config_dir_path=config_dir_path,
|
||||
server_name=server_name,
|
||||
report_stats=report_stats,
|
||||
))
|
||||
|
||||
config = yaml.load(default_config)
|
||||
@@ -131,71 +144,120 @@ class Config(object):
|
||||
"-c", "--config-path",
|
||||
action="append",
|
||||
metavar="CONFIG_FILE",
|
||||
help="Specify config file"
|
||||
help="Specify config file. Can be given multiple times and"
|
||||
" may specify directories containing *.yaml files."
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"--generate-config",
|
||||
action="store_true",
|
||||
help="Generate a config file for the server name"
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"--report-stats",
|
||||
action="store",
|
||||
help="Stuff",
|
||||
choices=["yes", "no"]
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"--generate-keys",
|
||||
action="store_true",
|
||||
help="Generate any missing key files then exit"
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"--keys-directory",
|
||||
metavar="DIRECTORY",
|
||||
help="Used with 'generate-*' options to specify where files such as"
|
||||
" certs and signing keys should be stored in, unless explicitly"
|
||||
" specified in the config."
|
||||
)
|
||||
config_parser.add_argument(
|
||||
"-H", "--server-name",
|
||||
help="The server name to generate a config file for"
|
||||
)
|
||||
config_args, remaining_args = config_parser.parse_known_args(argv)
|
||||
|
||||
generate_keys = config_args.generate_keys
|
||||
|
||||
config_files = []
|
||||
if config_args.config_path:
|
||||
for config_path in config_args.config_path:
|
||||
if os.path.isdir(config_path):
|
||||
# We accept specifying directories as config paths, we search
|
||||
# inside that directory for all files matching *.yaml, and then
|
||||
# we apply them in *sorted* order.
|
||||
files = []
|
||||
for entry in os.listdir(config_path):
|
||||
entry_path = os.path.join(config_path, entry)
|
||||
if not os.path.isfile(entry_path):
|
||||
print (
|
||||
"Found subdirectory in config directory: %r. IGNORING."
|
||||
) % (entry_path, )
|
||||
continue
|
||||
|
||||
if not entry.endswith(".yaml"):
|
||||
print (
|
||||
"Found file in config directory that does not"
|
||||
" end in '.yaml': %r. IGNORING."
|
||||
) % (entry_path, )
|
||||
continue
|
||||
|
||||
files.append(entry_path)
|
||||
|
||||
config_files.extend(sorted(files))
|
||||
else:
|
||||
config_files.append(config_path)
|
||||
|
||||
if config_args.generate_config:
|
||||
if not config_args.config_path:
|
||||
if config_args.report_stats is None:
|
||||
config_parser.error(
|
||||
"Please specify either --report-stats=yes or --report-stats=no\n\n" +
|
||||
cls.stats_reporting_begging_spiel
|
||||
)
|
||||
if not config_files:
|
||||
config_parser.error(
|
||||
"Must supply a config file.\nA config file can be automatically"
|
||||
" generated using \"--generate-config -H SERVER_NAME"
|
||||
" -c CONFIG-FILE\""
|
||||
)
|
||||
(config_path,) = config_files
|
||||
if not os.path.exists(config_path):
|
||||
if config_args.keys_directory:
|
||||
config_dir_path = config_args.keys_directory
|
||||
else:
|
||||
config_dir_path = os.path.dirname(config_path)
|
||||
config_dir_path = os.path.abspath(config_dir_path)
|
||||
|
||||
config_dir_path = os.path.dirname(config_args.config_path[0])
|
||||
config_dir_path = os.path.abspath(config_dir_path)
|
||||
|
||||
server_name = config_args.server_name
|
||||
if not server_name:
|
||||
print "Must specify a server_name to a generate config for."
|
||||
sys.exit(1)
|
||||
(config_path,) = config_args.config_path
|
||||
if not os.path.exists(config_dir_path):
|
||||
os.makedirs(config_dir_path)
|
||||
if os.path.exists(config_path):
|
||||
print "Config file %r already exists" % (config_path,)
|
||||
yaml_config = cls.read_config_file(config_path)
|
||||
yaml_name = yaml_config["server_name"]
|
||||
if server_name != yaml_name:
|
||||
print (
|
||||
"Config file %r has a different server_name: "
|
||||
" %r != %r" % (config_path, server_name, yaml_name)
|
||||
)
|
||||
server_name = config_args.server_name
|
||||
if not server_name:
|
||||
print "Must specify a server_name to a generate config for."
|
||||
sys.exit(1)
|
||||
config_bytes, config = obj.generate_config(
|
||||
config_dir_path, server_name
|
||||
)
|
||||
config.update(yaml_config)
|
||||
print "Generating any missing keys for %r" % (server_name,)
|
||||
obj.invoke_all("generate_files", config)
|
||||
sys.exit(0)
|
||||
with open(config_path, "wb") as config_file:
|
||||
config_bytes, config = obj.generate_config(
|
||||
config_dir_path, server_name
|
||||
)
|
||||
obj.invoke_all("generate_files", config)
|
||||
config_file.write(config_bytes)
|
||||
if not os.path.exists(config_dir_path):
|
||||
os.makedirs(config_dir_path)
|
||||
with open(config_path, "wb") as config_file:
|
||||
config_bytes, config = obj.generate_config(
|
||||
config_dir_path=config_dir_path,
|
||||
server_name=server_name,
|
||||
report_stats=(config_args.report_stats == "yes"),
|
||||
)
|
||||
obj.invoke_all("generate_files", config)
|
||||
config_file.write(config_bytes)
|
||||
print (
|
||||
"A config file has been generated in %s for server name"
|
||||
" '%s' with corresponding SSL keys and self-signed"
|
||||
" certificates. Please review this file and customise it to"
|
||||
" your needs."
|
||||
"A config file has been generated in %r for server name"
|
||||
" %r with corresponding SSL keys and self-signed"
|
||||
" certificates. Please review this file and customise it"
|
||||
" to your needs."
|
||||
) % (config_path, server_name)
|
||||
print (
|
||||
"If this server name is incorrect, you will need to regenerate"
|
||||
" the SSL certificates"
|
||||
)
|
||||
sys.exit(0)
|
||||
print (
|
||||
"If this server name is incorrect, you will need to"
|
||||
" regenerate the SSL certificates"
|
||||
)
|
||||
sys.exit(0)
|
||||
else:
|
||||
print (
|
||||
"Config file %r already exists. Generating any missing key"
|
||||
" files."
|
||||
) % (config_path,)
|
||||
generate_keys = True
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
parents=[config_parser],
|
||||
@@ -206,25 +268,43 @@ class Config(object):
|
||||
obj.invoke_all("add_arguments", parser)
|
||||
args = parser.parse_args(remaining_args)
|
||||
|
||||
if not config_args.config_path:
|
||||
if not config_files:
|
||||
config_parser.error(
|
||||
"Must supply a config file.\nA config file can be automatically"
|
||||
" generated using \"--generate-config -H SERVER_NAME"
|
||||
" -c CONFIG-FILE\""
|
||||
)
|
||||
|
||||
config_dir_path = os.path.dirname(config_args.config_path[0])
|
||||
if config_args.keys_directory:
|
||||
config_dir_path = config_args.keys_directory
|
||||
else:
|
||||
config_dir_path = os.path.dirname(config_args.config_path[-1])
|
||||
config_dir_path = os.path.abspath(config_dir_path)
|
||||
|
||||
specified_config = {}
|
||||
for config_path in config_args.config_path:
|
||||
yaml_config = cls.read_config_file(config_path)
|
||||
for config_file in config_files:
|
||||
yaml_config = cls.read_config_file(config_file)
|
||||
specified_config.update(yaml_config)
|
||||
|
||||
server_name = specified_config["server_name"]
|
||||
_, config = obj.generate_config(config_dir_path, server_name)
|
||||
_, config = obj.generate_config(
|
||||
config_dir_path=config_dir_path,
|
||||
server_name=server_name
|
||||
)
|
||||
config.pop("log_config")
|
||||
config.update(specified_config)
|
||||
if "report_stats" not in config:
|
||||
sys.stderr.write(
|
||||
"Please opt in or out of reporting anonymized homeserver usage "
|
||||
"statistics, by setting the report_stats key in your config file "
|
||||
" ( " + config_path + " ) " +
|
||||
"to either True or False.\n\n" +
|
||||
Config.stats_reporting_begging_spiel + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
if generate_keys:
|
||||
obj.invoke_all("generate_files", config)
|
||||
sys.exit(0)
|
||||
|
||||
obj.invoke_all("read_config", config)
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ class AppServiceConfig(Config):
|
||||
def read_config(self, config):
|
||||
self.app_service_config_files = config.get("app_service_config_files", [])
|
||||
|
||||
def default_config(cls, config_dir_path, server_name):
|
||||
def default_config(cls, **kwargs):
|
||||
return """\
|
||||
# A list of application service config file to use
|
||||
app_service_config_files: []
|
||||
|
||||
@@ -24,7 +24,7 @@ class CaptchaConfig(Config):
|
||||
self.captcha_bypass_secret = config.get("captcha_bypass_secret")
|
||||
self.recaptcha_siteverify_api = config["recaptcha_siteverify_api"]
|
||||
|
||||
def default_config(self, config_dir_path, server_name):
|
||||
def default_config(self, **kwargs):
|
||||
return """\
|
||||
## Captcha ##
|
||||
|
||||
|
||||
@@ -45,7 +45,7 @@ class DatabaseConfig(Config):
|
||||
|
||||
self.set_databasepath(config.get("database_path"))
|
||||
|
||||
def default_config(self, config, config_dir_path):
|
||||
def default_config(self, **kwargs):
|
||||
database_path = self.abspath("homeserver.db")
|
||||
return """\
|
||||
# Database configuration
|
||||
|
||||
@@ -13,14 +13,17 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from ._base import Config, ConfigError
|
||||
import syutil.crypto.signing_key
|
||||
from syutil.crypto.signing_key import (
|
||||
is_signing_algorithm_supported, decode_verify_key_bytes
|
||||
)
|
||||
from syutil.base64util import decode_base64
|
||||
|
||||
from synapse.util.stringutils import random_string
|
||||
from signedjson.key import (
|
||||
generate_signing_key, is_signing_algorithm_supported,
|
||||
decode_signing_key_base64, decode_verify_key_bytes,
|
||||
read_signing_keys, write_signing_keys, NACL_ED25519
|
||||
)
|
||||
from unpaddedbase64 import decode_base64
|
||||
|
||||
import os
|
||||
|
||||
|
||||
class KeyConfig(Config):
|
||||
@@ -37,7 +40,7 @@ class KeyConfig(Config):
|
||||
config["perspectives"]
|
||||
)
|
||||
|
||||
def default_config(self, config_dir_path, server_name):
|
||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||
base_key_name = os.path.join(config_dir_path, server_name)
|
||||
return """\
|
||||
## Signing Keys ##
|
||||
@@ -83,9 +86,7 @@ class KeyConfig(Config):
|
||||
def read_signing_key(self, signing_key_path):
|
||||
signing_keys = self.read_file(signing_key_path, "signing_key")
|
||||
try:
|
||||
return syutil.crypto.signing_key.read_signing_keys(
|
||||
signing_keys.splitlines(True)
|
||||
)
|
||||
return read_signing_keys(signing_keys.splitlines(True))
|
||||
except Exception:
|
||||
raise ConfigError(
|
||||
"Error reading signing_key."
|
||||
@@ -112,22 +113,18 @@ class KeyConfig(Config):
|
||||
if not os.path.exists(signing_key_path):
|
||||
with open(signing_key_path, "w") as signing_key_file:
|
||||
key_id = "a_" + random_string(4)
|
||||
syutil.crypto.signing_key.write_signing_keys(
|
||||
signing_key_file,
|
||||
(syutil.crypto.signing_key.generate_signing_key(key_id),),
|
||||
write_signing_keys(
|
||||
signing_key_file, (generate_signing_key(key_id),),
|
||||
)
|
||||
else:
|
||||
signing_keys = self.read_file(signing_key_path, "signing_key")
|
||||
if len(signing_keys.split("\n")[0].split()) == 1:
|
||||
# handle keys in the old format.
|
||||
key_id = "a_" + random_string(4)
|
||||
key = syutil.crypto.signing_key.decode_signing_key_base64(
|
||||
syutil.crypto.signing_key.NACL_ED25519,
|
||||
key_id,
|
||||
signing_keys.split("\n")[0]
|
||||
key = decode_signing_key_base64(
|
||||
NACL_ED25519, key_id, signing_keys.split("\n")[0]
|
||||
)
|
||||
with open(signing_key_path, "w") as signing_key_file:
|
||||
syutil.crypto.signing_key.write_signing_keys(
|
||||
signing_key_file,
|
||||
(key,),
|
||||
write_signing_keys(
|
||||
signing_key_file, (key,),
|
||||
)
|
||||
|
||||
@@ -21,6 +21,7 @@ import logging.config
|
||||
import yaml
|
||||
from string import Template
|
||||
import os
|
||||
import signal
|
||||
|
||||
|
||||
DEFAULT_LOG_CONFIG = Template("""
|
||||
@@ -69,7 +70,7 @@ class LoggingConfig(Config):
|
||||
self.log_config = self.abspath(config.get("log_config"))
|
||||
self.log_file = self.abspath(config.get("log_file"))
|
||||
|
||||
def default_config(self, config_dir_path, server_name):
|
||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||
log_file = self.abspath("homeserver.log")
|
||||
log_config = self.abspath(
|
||||
os.path.join(config_dir_path, server_name + ".log.config")
|
||||
@@ -142,6 +143,19 @@ class LoggingConfig(Config):
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
self.log_file, maxBytes=(1000 * 1000 * 100), backupCount=3
|
||||
)
|
||||
|
||||
def sighup(signum, stack):
|
||||
logger.info("Closing log file due to SIGHUP")
|
||||
handler.doRollover()
|
||||
logger.info("Opened new log file due to SIGHUP")
|
||||
|
||||
# TODO(paul): obviously this is a terrible mechanism for
|
||||
# stealing SIGHUP, because it means no other part of synapse
|
||||
# can use it instead. If we want to catch SIGHUP anywhere
|
||||
# else as well, I'd suggest we find a nicer way to broadcast
|
||||
# it around.
|
||||
if getattr(signal, "SIGHUP"):
|
||||
signal.signal(signal.SIGHUP, sighup)
|
||||
else:
|
||||
handler = logging.StreamHandler()
|
||||
handler.setFormatter(formatter)
|
||||
|
||||
@@ -19,13 +19,15 @@ from ._base import Config
|
||||
class MetricsConfig(Config):
|
||||
def read_config(self, config):
|
||||
self.enable_metrics = config["enable_metrics"]
|
||||
self.report_stats = config.get("report_stats", None)
|
||||
self.metrics_port = config.get("metrics_port")
|
||||
self.metrics_bind_host = config.get("metrics_bind_host", "127.0.0.1")
|
||||
|
||||
def default_config(self, config_dir_path, server_name):
|
||||
return """\
|
||||
def default_config(self, report_stats=None, **kwargs):
|
||||
suffix = "" if report_stats is None else "report_stats: %(report_stats)s\n"
|
||||
return ("""\
|
||||
## Metrics ###
|
||||
|
||||
# Enable collection and rendering of performance metrics
|
||||
enable_metrics: False
|
||||
"""
|
||||
""" + suffix) % locals()
|
||||
|
||||
@@ -27,7 +27,7 @@ class RatelimitConfig(Config):
|
||||
self.federation_rc_reject_limit = config["federation_rc_reject_limit"]
|
||||
self.federation_rc_concurrent = config["federation_rc_concurrent"]
|
||||
|
||||
def default_config(self, config_dir_path, server_name):
|
||||
def default_config(self, **kwargs):
|
||||
return """\
|
||||
## Ratelimiting ##
|
||||
|
||||
|
||||
@@ -32,9 +32,11 @@ class RegistrationConfig(Config):
|
||||
)
|
||||
|
||||
self.registration_shared_secret = config.get("registration_shared_secret")
|
||||
self.macaroon_secret_key = config.get("macaroon_secret_key")
|
||||
|
||||
def default_config(self, config_dir, server_name):
|
||||
def default_config(self, **kwargs):
|
||||
registration_shared_secret = random_string_with_symbols(50)
|
||||
macaroon_secret_key = random_string_with_symbols(50)
|
||||
return """\
|
||||
## Registration ##
|
||||
|
||||
@@ -44,6 +46,8 @@ class RegistrationConfig(Config):
|
||||
# If set, allows registration by anyone who also has the shared
|
||||
# secret, even if registration is otherwise disabled.
|
||||
registration_shared_secret: "%(registration_shared_secret)s"
|
||||
|
||||
macaroon_secret_key: "%(macaroon_secret_key)s"
|
||||
""" % locals()
|
||||
|
||||
def add_arguments(self, parser):
|
||||
|
||||
@@ -14,6 +14,39 @@
|
||||
# limitations under the License.
|
||||
|
||||
from ._base import Config
|
||||
from collections import namedtuple
|
||||
|
||||
ThumbnailRequirement = namedtuple(
|
||||
"ThumbnailRequirement", ["width", "height", "method", "media_type"]
|
||||
)
|
||||
|
||||
|
||||
def parse_thumbnail_requirements(thumbnail_sizes):
|
||||
""" Takes a list of dictionaries with "width", "height", and "method" keys
|
||||
and creates a map from image media types to the thumbnail size, thumnailing
|
||||
method, and thumbnail media type to precalculate
|
||||
|
||||
Args:
|
||||
thumbnail_sizes(list): List of dicts with "width", "height", and
|
||||
"method" keys
|
||||
Returns:
|
||||
Dictionary mapping from media type string to list of
|
||||
ThumbnailRequirement tuples.
|
||||
"""
|
||||
requirements = {}
|
||||
for size in thumbnail_sizes:
|
||||
width = size["width"]
|
||||
height = size["height"]
|
||||
method = size["method"]
|
||||
jpeg_thumbnail = ThumbnailRequirement(width, height, method, "image/jpeg")
|
||||
png_thumbnail = ThumbnailRequirement(width, height, method, "image/png")
|
||||
requirements.setdefault("image/jpeg", []).append(jpeg_thumbnail)
|
||||
requirements.setdefault("image/gif", []).append(png_thumbnail)
|
||||
requirements.setdefault("image/png", []).append(png_thumbnail)
|
||||
return {
|
||||
media_type: tuple(thumbnails)
|
||||
for media_type, thumbnails in requirements.items()
|
||||
}
|
||||
|
||||
|
||||
class ContentRepositoryConfig(Config):
|
||||
@@ -22,8 +55,12 @@ class ContentRepositoryConfig(Config):
|
||||
self.max_image_pixels = self.parse_size(config["max_image_pixels"])
|
||||
self.media_store_path = self.ensure_directory(config["media_store_path"])
|
||||
self.uploads_path = self.ensure_directory(config["uploads_path"])
|
||||
self.dynamic_thumbnails = config["dynamic_thumbnails"]
|
||||
self.thumbnail_requirements = parse_thumbnail_requirements(
|
||||
config["thumbnail_sizes"]
|
||||
)
|
||||
|
||||
def default_config(self, config_dir_path, server_name):
|
||||
def default_config(self, **kwargs):
|
||||
media_store = self.default_path("media_store")
|
||||
uploads_path = self.default_path("uploads")
|
||||
return """
|
||||
@@ -38,4 +75,26 @@ class ContentRepositoryConfig(Config):
|
||||
|
||||
# Maximum number of pixels that will be thumbnailed
|
||||
max_image_pixels: "32M"
|
||||
|
||||
# Whether to generate new thumbnails on the fly to precisely match
|
||||
# the resolution requested by the client. If true then whenever
|
||||
# a new resolution is requested by the client the server will
|
||||
# generate a new thumbnail. If false the server will pick a thumbnail
|
||||
# from a precalcualted list.
|
||||
dynamic_thumbnails: false
|
||||
|
||||
# List of thumbnail to precalculate when an image is uploaded.
|
||||
thumbnail_sizes:
|
||||
- width: 32
|
||||
height: 32
|
||||
method: crop
|
||||
- width: 96
|
||||
height: 96
|
||||
method: crop
|
||||
- width: 320
|
||||
height: 240
|
||||
method: scale
|
||||
- width: 640
|
||||
height: 480
|
||||
method: scale
|
||||
""" % locals()
|
||||
|
||||
@@ -41,7 +41,7 @@ class SAML2Config(Config):
|
||||
self.saml2_config_path = None
|
||||
self.saml2_idp_redirect_url = None
|
||||
|
||||
def default_config(self, config_dir_path, server_name):
|
||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||
return """
|
||||
# Enable SAML2 for registration and login. Uses pysaml2
|
||||
# config_path: Path to the sp_conf.py configuration file
|
||||
|
||||
@@ -22,8 +22,10 @@ class ServerConfig(Config):
|
||||
self.server_name = config["server_name"]
|
||||
self.pid_file = self.abspath(config.get("pid_file"))
|
||||
self.web_client = config["web_client"]
|
||||
self.web_client_location = config.get("web_client_location", None)
|
||||
self.soft_file_limit = config["soft_file_limit"]
|
||||
self.daemonize = config.get("daemonize")
|
||||
self.print_pidfile = config.get("print_pidfile")
|
||||
self.use_frozen_dicts = config.get("use_frozen_dicts", True)
|
||||
|
||||
self.listeners = config.get("listeners", [])
|
||||
@@ -115,7 +117,7 @@ class ServerConfig(Config):
|
||||
|
||||
self.content_addr = content_addr
|
||||
|
||||
def default_config(self, config_dir_path, server_name):
|
||||
def default_config(self, server_name, **kwargs):
|
||||
if ":" in server_name:
|
||||
bind_port = int(server_name.split(":")[1])
|
||||
unsecure_port = bind_port - 400
|
||||
@@ -208,12 +210,18 @@ class ServerConfig(Config):
|
||||
self.manhole = args.manhole
|
||||
if args.daemonize is not None:
|
||||
self.daemonize = args.daemonize
|
||||
if args.print_pidfile is not None:
|
||||
self.print_pidfile = args.print_pidfile
|
||||
|
||||
def add_arguments(self, parser):
|
||||
server_group = parser.add_argument_group("server")
|
||||
server_group.add_argument("-D", "--daemonize", action='store_true',
|
||||
default=None,
|
||||
help="Daemonize the home server")
|
||||
server_group.add_argument("--print-pidfile", action='store_true',
|
||||
default=None,
|
||||
help="Print the path to the pidfile just"
|
||||
" before daemonizing")
|
||||
server_group.add_argument("--manhole", metavar="PORT", dest="manhole",
|
||||
type=int,
|
||||
help="Turn on the twisted telnet manhole"
|
||||
|
||||
@@ -42,7 +42,15 @@ class TlsConfig(Config):
|
||||
config.get("tls_dh_params_path"), "tls_dh_params"
|
||||
)
|
||||
|
||||
def default_config(self, config_dir_path, server_name):
|
||||
# This config option applies to non-federation HTTP clients
|
||||
# (e.g. for talking to recaptcha, identity servers, and such)
|
||||
# It should never be used in production, and is intended for
|
||||
# use only when running tests.
|
||||
self.use_insecure_ssl_client_just_for_testing_do_not_use = config.get(
|
||||
"use_insecure_ssl_client_just_for_testing_do_not_use"
|
||||
)
|
||||
|
||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||
base_key_name = os.path.join(config_dir_path, server_name)
|
||||
|
||||
tls_certificate_path = base_key_name + ".tls.crt"
|
||||
|
||||
@@ -22,7 +22,7 @@ class VoipConfig(Config):
|
||||
self.turn_shared_secret = config["turn_shared_secret"]
|
||||
self.turn_user_lifetime = self.parse_duration(config["turn_user_lifetime"])
|
||||
|
||||
def default_config(self, config_dir_path, server_name):
|
||||
def default_config(self, **kwargs):
|
||||
return """\
|
||||
## Turn ##
|
||||
|
||||
|
||||
@@ -15,11 +15,12 @@
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from synapse.events.utils import prune_event
|
||||
from syutil.jsonutil import encode_canonical_json
|
||||
from syutil.base64util import encode_base64, decode_base64
|
||||
from syutil.crypto.jsonsign import sign_json
|
||||
from synapse.api.errors import SynapseError, Codes
|
||||
from synapse.events.utils import prune_event
|
||||
|
||||
from canonicaljson import encode_canonical_json
|
||||
from unpaddedbase64 import encode_base64, decode_base64
|
||||
from signedjson.sign import sign_json
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
|
||||
@@ -14,21 +14,21 @@
|
||||
# limitations under the License.
|
||||
|
||||
from synapse.crypto.keyclient import fetch_server_key
|
||||
from twisted.internet import defer
|
||||
from syutil.crypto.jsonsign import (
|
||||
verify_signed_json, signature_ids, sign_json, encode_canonical_json
|
||||
)
|
||||
from syutil.crypto.signing_key import (
|
||||
is_signing_algorithm_supported, decode_verify_key_bytes
|
||||
)
|
||||
from syutil.base64util import decode_base64, encode_base64
|
||||
from synapse.api.errors import SynapseError, Codes
|
||||
|
||||
from synapse.util.retryutils import get_retry_limiter
|
||||
from synapse.util import unwrapFirstError
|
||||
|
||||
from synapse.util.async import ObservableDeferred
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
from signedjson.sign import (
|
||||
verify_signed_json, signature_ids, sign_json, encode_canonical_json
|
||||
)
|
||||
from signedjson.key import (
|
||||
is_signing_algorithm_supported, decode_verify_key_bytes
|
||||
)
|
||||
from unpaddedbase64 import decode_base64, encode_base64
|
||||
|
||||
from OpenSSL import crypto
|
||||
|
||||
from collections import namedtuple
|
||||
@@ -162,7 +162,9 @@ class Keyring(object):
|
||||
def remove_deferreds(res, server_name, group_id):
|
||||
server_to_gids[server_name].discard(group_id)
|
||||
if not server_to_gids[server_name]:
|
||||
server_to_deferred.pop(server_name).callback(None)
|
||||
d = server_to_deferred.pop(server_name, None)
|
||||
if d:
|
||||
d.callback(None)
|
||||
return res
|
||||
|
||||
for g_id, deferred in deferreds.items():
|
||||
@@ -200,8 +202,15 @@ class Keyring(object):
|
||||
else:
|
||||
break
|
||||
|
||||
for server_name, deferred in server_to_deferred:
|
||||
self.key_downloads[server_name] = ObservableDeferred(deferred)
|
||||
for server_name, deferred in server_to_deferred.items():
|
||||
d = ObservableDeferred(deferred)
|
||||
self.key_downloads[server_name] = d
|
||||
|
||||
def rm(r, server_name):
|
||||
self.key_downloads.pop(server_name, None)
|
||||
return r
|
||||
|
||||
d.addBoth(rm, server_name)
|
||||
|
||||
def get_server_verify_keys(self, group_id_to_group, group_id_to_deferred):
|
||||
"""Takes a dict of KeyGroups and tries to find at least one key for
|
||||
@@ -219,11 +228,9 @@ class Keyring(object):
|
||||
def do_iterations():
|
||||
merged_results = {}
|
||||
|
||||
missing_keys = {
|
||||
group.server_name: key_id
|
||||
for group in group_id_to_group.values()
|
||||
for key_id in group.key_ids
|
||||
}
|
||||
missing_keys = {}
|
||||
for group in group_id_to_group.values():
|
||||
missing_keys.setdefault(group.server_name, set()).union(group.key_ids)
|
||||
|
||||
for fn in key_fetch_fns:
|
||||
results = yield fn(missing_keys.items())
|
||||
@@ -279,16 +286,15 @@ class Keyring(object):
|
||||
def get_keys_from_store(self, server_name_and_key_ids):
|
||||
res = yield defer.gatherResults(
|
||||
[
|
||||
self.store.get_server_verify_keys(server_name, key_ids)
|
||||
self.store.get_server_verify_keys(
|
||||
server_name, key_ids
|
||||
).addCallback(lambda ks, server: (server, ks), server_name)
|
||||
for server_name, key_ids in server_name_and_key_ids
|
||||
],
|
||||
consumeErrors=True,
|
||||
).addErrback(unwrapFirstError)
|
||||
|
||||
defer.returnValue(dict(zip(
|
||||
[server_name for server_name, _ in server_name_and_key_ids],
|
||||
res
|
||||
)))
|
||||
defer.returnValue(dict(res))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_keys_from_perspectives(self, server_name_and_key_ids):
|
||||
@@ -463,7 +469,7 @@ class Keyring(object):
|
||||
continue
|
||||
|
||||
(response, tls_certificate) = yield fetch_server_key(
|
||||
server_name, self.hs.tls_context_factory,
|
||||
server_name, self.hs.tls_server_context_factory,
|
||||
path=(b"/_matrix/key/v2/server/%s" % (
|
||||
urllib.quote(requested_key_id),
|
||||
)).encode("ascii"),
|
||||
@@ -597,7 +603,7 @@ class Keyring(object):
|
||||
# Try to fetch the key from the remote server.
|
||||
|
||||
(response, tls_certificate) = yield fetch_server_key(
|
||||
server_name, self.hs.tls_context_factory
|
||||
server_name, self.hs.tls_server_context_factory
|
||||
)
|
||||
|
||||
# Check the response.
|
||||
|
||||
@@ -90,7 +90,7 @@ class EventBase(object):
|
||||
d = dict(self._event_dict)
|
||||
d.update({
|
||||
"signatures": self.signatures,
|
||||
"unsigned": self.unsigned,
|
||||
"unsigned": dict(self.unsigned),
|
||||
})
|
||||
|
||||
return d
|
||||
@@ -109,6 +109,9 @@ class EventBase(object):
|
||||
pdu_json.setdefault("unsigned", {})["age"] = int(age)
|
||||
del pdu_json["unsigned"]["age_ts"]
|
||||
|
||||
# This may be a frozen event
|
||||
pdu_json["unsigned"].pop("redacted_because", None)
|
||||
|
||||
return pdu_json
|
||||
|
||||
def __set__(self, instance, value):
|
||||
|
||||
@@ -23,7 +23,7 @@ from synapse.api.errors import (
|
||||
CodeMessageException, HttpResponseException, SynapseError,
|
||||
)
|
||||
from synapse.util import unwrapFirstError
|
||||
from synapse.util.expiringcache import ExpiringCache
|
||||
from synapse.util.caches.expiringcache import ExpiringCache
|
||||
from synapse.util.logutils import log_function
|
||||
from synapse.events import FrozenEvent
|
||||
import synapse.metrics
|
||||
@@ -134,6 +134,36 @@ class FederationClient(FederationBase):
|
||||
destination, query_type, args, retry_on_dns_fail=retry_on_dns_fail
|
||||
)
|
||||
|
||||
@log_function
|
||||
def query_client_keys(self, destination, content):
|
||||
"""Query device keys for a device hosted on a remote server.
|
||||
|
||||
Args:
|
||||
destination (str): Domain name of the remote homeserver
|
||||
content (dict): The query content.
|
||||
|
||||
Returns:
|
||||
a Deferred which will eventually yield a JSON object from the
|
||||
response
|
||||
"""
|
||||
sent_queries_counter.inc("client_device_keys")
|
||||
return self.transport_layer.query_client_keys(destination, content)
|
||||
|
||||
@log_function
|
||||
def claim_client_keys(self, destination, content):
|
||||
"""Claims one-time keys for a device hosted on a remote server.
|
||||
|
||||
Args:
|
||||
destination (str): Domain name of the remote homeserver
|
||||
content (dict): The query content.
|
||||
|
||||
Returns:
|
||||
a Deferred which will eventually yield a JSON object from the
|
||||
response
|
||||
"""
|
||||
sent_queries_counter.inc("client_one_time_keys")
|
||||
return self.transport_layer.claim_client_keys(destination, content)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def backfill(self, dest, context, limit, extremities):
|
||||
|
||||
@@ -27,6 +27,7 @@ from synapse.api.errors import FederationError, SynapseError
|
||||
|
||||
from synapse.crypto.event_signing import compute_event_signature
|
||||
|
||||
import simplejson as json
|
||||
import logging
|
||||
|
||||
|
||||
@@ -312,6 +313,48 @@ class FederationServer(FederationBase):
|
||||
(200, send_content)
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def on_query_client_keys(self, origin, content):
|
||||
query = []
|
||||
for user_id, device_ids in content.get("device_keys", {}).items():
|
||||
if not device_ids:
|
||||
query.append((user_id, None))
|
||||
else:
|
||||
for device_id in device_ids:
|
||||
query.append((user_id, device_id))
|
||||
|
||||
results = yield self.store.get_e2e_device_keys(query)
|
||||
|
||||
json_result = {}
|
||||
for user_id, device_keys in results.items():
|
||||
for device_id, json_bytes in device_keys.items():
|
||||
json_result.setdefault(user_id, {})[device_id] = json.loads(
|
||||
json_bytes
|
||||
)
|
||||
|
||||
defer.returnValue({"device_keys": json_result})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def on_claim_client_keys(self, origin, content):
|
||||
query = []
|
||||
for user_id, device_keys in content.get("one_time_keys", {}).items():
|
||||
for device_id, algorithm in device_keys.items():
|
||||
query.append((user_id, device_id, algorithm))
|
||||
|
||||
results = yield self.store.claim_e2e_one_time_keys(query)
|
||||
|
||||
json_result = {}
|
||||
for user_id, device_keys in results.items():
|
||||
for device_id, keys in device_keys.items():
|
||||
for key_id, json_bytes in keys.items():
|
||||
json_result.setdefault(user_id, {})[device_id] = {
|
||||
key_id: json.loads(json_bytes)
|
||||
}
|
||||
|
||||
defer.returnValue({"one_time_keys": json_result})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def on_get_missing_events(self, origin, room_id, earliest_events,
|
||||
|
||||
@@ -222,6 +222,76 @@ class TransportLayerClient(object):
|
||||
|
||||
defer.returnValue(content)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def query_client_keys(self, destination, query_content):
|
||||
"""Query the device keys for a list of user ids hosted on a remote
|
||||
server.
|
||||
|
||||
Request:
|
||||
{
|
||||
"device_keys": {
|
||||
"<user_id>": ["<device_id>"]
|
||||
} }
|
||||
|
||||
Response:
|
||||
{
|
||||
"device_keys": {
|
||||
"<user_id>": {
|
||||
"<device_id>": {...}
|
||||
} } }
|
||||
|
||||
Args:
|
||||
destination(str): The server to query.
|
||||
query_content(dict): The user ids to query.
|
||||
Returns:
|
||||
A dict containg the device keys.
|
||||
"""
|
||||
path = PREFIX + "/user/keys/query"
|
||||
|
||||
content = yield self.client.post_json(
|
||||
destination=destination,
|
||||
path=path,
|
||||
data=query_content,
|
||||
)
|
||||
defer.returnValue(content)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def claim_client_keys(self, destination, query_content):
|
||||
"""Claim one-time keys for a list of devices hosted on a remote server.
|
||||
|
||||
Request:
|
||||
{
|
||||
"one_time_keys": {
|
||||
"<user_id>": {
|
||||
"<device_id>": "<algorithm>"
|
||||
} } }
|
||||
|
||||
Response:
|
||||
{
|
||||
"device_keys": {
|
||||
"<user_id>": {
|
||||
"<device_id>": {
|
||||
"<algorithm>:<key_id>": "<key_base64>"
|
||||
} } } }
|
||||
|
||||
Args:
|
||||
destination(str): The server to query.
|
||||
query_content(dict): The user ids to query.
|
||||
Returns:
|
||||
A dict containg the one-time keys.
|
||||
"""
|
||||
|
||||
path = PREFIX + "/user/keys/claim"
|
||||
|
||||
content = yield self.client.post_json(
|
||||
destination=destination,
|
||||
path=path,
|
||||
data=query_content,
|
||||
)
|
||||
defer.returnValue(content)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def get_missing_events(self, destination, room_id, earliest_events,
|
||||
|
||||
@@ -325,6 +325,24 @@ class FederationInviteServlet(BaseFederationServlet):
|
||||
defer.returnValue((200, content))
|
||||
|
||||
|
||||
class FederationClientKeysQueryServlet(BaseFederationServlet):
|
||||
PATH = "/user/keys/query"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_POST(self, origin, content, query):
|
||||
response = yield self.handler.on_query_client_keys(origin, content)
|
||||
defer.returnValue((200, response))
|
||||
|
||||
|
||||
class FederationClientKeysClaimServlet(BaseFederationServlet):
|
||||
PATH = "/user/keys/claim"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_POST(self, origin, content, query):
|
||||
response = yield self.handler.on_claim_client_keys(origin, content)
|
||||
defer.returnValue((200, response))
|
||||
|
||||
|
||||
class FederationQueryAuthServlet(BaseFederationServlet):
|
||||
PATH = "/query_auth/([^/]*)/([^/]*)"
|
||||
|
||||
@@ -373,4 +391,6 @@ SERVLET_CLASSES = (
|
||||
FederationQueryAuthServlet,
|
||||
FederationGetMissingEventsServlet,
|
||||
FederationEventAuthServlet,
|
||||
FederationClientKeysQueryServlet,
|
||||
FederationClientKeysClaimServlet,
|
||||
)
|
||||
|
||||
@@ -22,7 +22,6 @@ from .room import (
|
||||
from .message import MessageHandler
|
||||
from .events import EventStreamHandler, EventHandler
|
||||
from .federation import FederationHandler
|
||||
from .login import LoginHandler
|
||||
from .profile import ProfileHandler
|
||||
from .presence import PresenceHandler
|
||||
from .directory import DirectoryHandler
|
||||
@@ -54,7 +53,6 @@ class Handlers(object):
|
||||
self.profile_handler = ProfileHandler(hs)
|
||||
self.presence_handler = PresenceHandler(hs)
|
||||
self.room_list_handler = RoomListHandler(hs)
|
||||
self.login_handler = LoginHandler(hs)
|
||||
self.directory_handler = DirectoryHandler(hs)
|
||||
self.typing_notification_handler = TypingNotificationHandler(hs)
|
||||
self.admin_handler = AdminHandler(hs)
|
||||
|
||||
@@ -15,10 +15,10 @@
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.api.errors import LimitExceededError, SynapseError
|
||||
from synapse.api.errors import LimitExceededError, SynapseError, AuthError
|
||||
from synapse.crypto.event_signing import add_hashes_and_signatures
|
||||
from synapse.api.constants import Membership, EventTypes
|
||||
from synapse.types import UserID
|
||||
from synapse.types import UserID, RoomAlias
|
||||
|
||||
from synapse.util.logcontext import PreserveLoggingContext
|
||||
|
||||
@@ -107,6 +107,22 @@ class BaseHandler(object):
|
||||
if not suppress_auth:
|
||||
self.auth.check(event, auth_events=context.current_state)
|
||||
|
||||
if event.type == EventTypes.CanonicalAlias:
|
||||
# Check the alias is acually valid (at this time at least)
|
||||
room_alias_str = event.content.get("alias", None)
|
||||
if room_alias_str:
|
||||
room_alias = RoomAlias.from_string(room_alias_str)
|
||||
directory_handler = self.hs.get_handlers().directory_handler
|
||||
mapping = yield directory_handler.get_association(room_alias)
|
||||
|
||||
if mapping["room_id"] != event.room_id:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"Room alias %s does not point to the room" % (
|
||||
room_alias_str,
|
||||
)
|
||||
)
|
||||
|
||||
(event_stream_id, max_stream_id) = yield self.store.persist_event(
|
||||
event, context=context
|
||||
)
|
||||
@@ -130,6 +146,21 @@ class BaseHandler(object):
|
||||
returned_invite.signatures
|
||||
)
|
||||
|
||||
if event.type == EventTypes.Redaction:
|
||||
if self.auth.check_redaction(event, auth_events=context.current_state):
|
||||
original_event = yield self.store.get_event(
|
||||
event.redacts,
|
||||
check_redacted=False,
|
||||
get_prev_content=False,
|
||||
allow_rejected=False,
|
||||
allow_none=False
|
||||
)
|
||||
if event.user_id != original_event.user_id:
|
||||
raise AuthError(
|
||||
403,
|
||||
"You don't have permission to redact events"
|
||||
)
|
||||
|
||||
destinations = set(extra_destinations)
|
||||
for k, s in context.current_state.items():
|
||||
try:
|
||||
|
||||
@@ -34,6 +34,7 @@ class AdminHandler(BaseHandler):
|
||||
|
||||
d = {}
|
||||
for r in res:
|
||||
# Note that device_id is always None
|
||||
device = d.setdefault(r["device_id"], {})
|
||||
session = device.setdefault(r["access_token"], [])
|
||||
session.append({
|
||||
|
||||
@@ -19,13 +19,13 @@ from ._base import BaseHandler
|
||||
from synapse.api.constants import LoginType
|
||||
from synapse.types import UserID
|
||||
from synapse.api.errors import LoginError, Codes
|
||||
from synapse.http.client import SimpleHttpClient
|
||||
from synapse.util.async import run_on_reactor
|
||||
|
||||
from twisted.web.client import PartialDownloadError
|
||||
|
||||
import logging
|
||||
import bcrypt
|
||||
import pymacaroons
|
||||
import simplejson
|
||||
|
||||
import synapse.util.stringutils as stringutils
|
||||
@@ -47,17 +47,24 @@ class AuthHandler(BaseHandler):
|
||||
self.sessions = {}
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_auth(self, flows, clientdict, clientip=None):
|
||||
def check_auth(self, flows, clientdict, clientip):
|
||||
"""
|
||||
Takes a dictionary sent by the client in the login / registration
|
||||
protocol and handles the login flow.
|
||||
|
||||
As a side effect, this function fills in the 'creds' key on the user's
|
||||
session with a map, which maps each auth-type (str) to the relevant
|
||||
identity authenticated by that auth-type (mostly str, but for captcha, bool).
|
||||
|
||||
Args:
|
||||
flows: list of list of stages
|
||||
authdict: The dictionary from the client root level, not the
|
||||
'auth' key: this method prompts for auth if none is sent.
|
||||
flows (list): A list of login flows. Each flow is an ordered list of
|
||||
strings representing auth-types. At least one full
|
||||
flow must be completed in order for auth to be successful.
|
||||
clientdict: The dictionary from the client root level, not the
|
||||
'auth' key: this method prompts for auth if none is sent.
|
||||
clientip (str): The IP address of the client.
|
||||
Returns:
|
||||
A tuple of authed, dict, dict where authed is true if the client
|
||||
A tuple of (authed, dict, dict) where authed is true if the client
|
||||
has successfully completed an auth flow. If it is true, the first
|
||||
dict contains the authenticated credentials of each stage.
|
||||
|
||||
@@ -75,7 +82,7 @@ class AuthHandler(BaseHandler):
|
||||
del clientdict['auth']
|
||||
if 'session' in authdict:
|
||||
sid = authdict['session']
|
||||
sess = self._get_session_info(sid)
|
||||
session = self._get_session_info(sid)
|
||||
|
||||
if len(clientdict) > 0:
|
||||
# This was designed to allow the client to omit the parameters
|
||||
@@ -87,20 +94,19 @@ class AuthHandler(BaseHandler):
|
||||
# on a home server.
|
||||
# Revisit: Assumimg the REST APIs do sensible validation, the data
|
||||
# isn't arbintrary.
|
||||
sess['clientdict'] = clientdict
|
||||
self._save_session(sess)
|
||||
pass
|
||||
elif 'clientdict' in sess:
|
||||
clientdict = sess['clientdict']
|
||||
session['clientdict'] = clientdict
|
||||
self._save_session(session)
|
||||
elif 'clientdict' in session:
|
||||
clientdict = session['clientdict']
|
||||
|
||||
if not authdict:
|
||||
defer.returnValue(
|
||||
(False, self._auth_dict_for_flows(flows, sess), clientdict)
|
||||
(False, self._auth_dict_for_flows(flows, session), clientdict)
|
||||
)
|
||||
|
||||
if 'creds' not in sess:
|
||||
sess['creds'] = {}
|
||||
creds = sess['creds']
|
||||
if 'creds' not in session:
|
||||
session['creds'] = {}
|
||||
creds = session['creds']
|
||||
|
||||
# check auth type currently being presented
|
||||
if 'type' in authdict:
|
||||
@@ -109,15 +115,15 @@ class AuthHandler(BaseHandler):
|
||||
result = yield self.checkers[authdict['type']](authdict, clientip)
|
||||
if result:
|
||||
creds[authdict['type']] = result
|
||||
self._save_session(sess)
|
||||
self._save_session(session)
|
||||
|
||||
for f in flows:
|
||||
if len(set(f) - set(creds.keys())) == 0:
|
||||
logger.info("Auth completed with creds: %r", creds)
|
||||
self._remove_session(sess)
|
||||
self._remove_session(session)
|
||||
defer.returnValue((True, creds, clientdict))
|
||||
|
||||
ret = self._auth_dict_for_flows(flows, sess)
|
||||
ret = self._auth_dict_for_flows(flows, session)
|
||||
ret['completed'] = creds.keys()
|
||||
defer.returnValue((False, ret, clientdict))
|
||||
|
||||
@@ -151,22 +157,14 @@ class AuthHandler(BaseHandler):
|
||||
if "user" not in authdict or "password" not in authdict:
|
||||
raise LoginError(400, "", Codes.MISSING_PARAM)
|
||||
|
||||
user = authdict["user"]
|
||||
user_id = authdict["user"]
|
||||
password = authdict["password"]
|
||||
if not user.startswith('@'):
|
||||
user = UserID.create(user, self.hs.hostname).to_string()
|
||||
if not user_id.startswith('@'):
|
||||
user_id = UserID.create(user_id, self.hs.hostname).to_string()
|
||||
|
||||
user_info = yield self.store.get_user_by_id(user_id=user)
|
||||
if not user_info:
|
||||
logger.warn("Attempted to login as %s but they do not exist", user)
|
||||
raise LoginError(401, "", errcode=Codes.UNAUTHORIZED)
|
||||
|
||||
stored_hash = user_info["password_hash"]
|
||||
if bcrypt.checkpw(password, stored_hash):
|
||||
defer.returnValue(user)
|
||||
else:
|
||||
logger.warn("Failed password login for user %s", user)
|
||||
raise LoginError(401, "", errcode=Codes.UNAUTHORIZED)
|
||||
user_id, password_hash = yield self._find_user_id_and_pwd_hash(user_id)
|
||||
self._check_password(user_id, password, password_hash)
|
||||
defer.returnValue(user_id)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _check_recaptcha(self, authdict, clientip):
|
||||
@@ -188,7 +186,7 @@ class AuthHandler(BaseHandler):
|
||||
# TODO: get this from the homeserver rather than creating a new one for
|
||||
# each request
|
||||
try:
|
||||
client = SimpleHttpClient(self.hs)
|
||||
client = self.hs.get_simple_http_client()
|
||||
resp_body = yield client.post_urlencoded_get_json(
|
||||
self.hs.config.recaptcha_siteverify_api,
|
||||
args={
|
||||
@@ -270,6 +268,120 @@ class AuthHandler(BaseHandler):
|
||||
|
||||
return self.sessions[session_id]
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def login_with_password(self, user_id, password):
|
||||
"""
|
||||
Authenticates the user with their username and password.
|
||||
|
||||
Used only by the v1 login API.
|
||||
|
||||
Args:
|
||||
user_id (str): User ID
|
||||
password (str): Password
|
||||
Returns:
|
||||
A tuple of:
|
||||
The user's ID.
|
||||
The access token for the user's session.
|
||||
The refresh token for the user's session.
|
||||
Raises:
|
||||
StoreError if there was a problem storing the token.
|
||||
LoginError if there was an authentication problem.
|
||||
"""
|
||||
user_id, password_hash = yield self._find_user_id_and_pwd_hash(user_id)
|
||||
self._check_password(user_id, password, password_hash)
|
||||
|
||||
logger.info("Logging in user %s", user_id)
|
||||
access_token = yield self.issue_access_token(user_id)
|
||||
refresh_token = yield self.issue_refresh_token(user_id)
|
||||
defer.returnValue((user_id, access_token, refresh_token))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _find_user_id_and_pwd_hash(self, user_id):
|
||||
"""Checks to see if a user with the given id exists. Will check case
|
||||
insensitively, but will throw if there are multiple inexact matches.
|
||||
|
||||
Returns:
|
||||
tuple: A 2-tuple of `(canonical_user_id, password_hash)`
|
||||
"""
|
||||
user_infos = yield self.store.get_users_by_id_case_insensitive(user_id)
|
||||
if not user_infos:
|
||||
logger.warn("Attempted to login as %s but they do not exist", user_id)
|
||||
raise LoginError(403, "", errcode=Codes.FORBIDDEN)
|
||||
|
||||
if len(user_infos) > 1:
|
||||
if user_id not in user_infos:
|
||||
logger.warn(
|
||||
"Attempted to login as %s but it matches more than one user "
|
||||
"inexactly: %r",
|
||||
user_id, user_infos.keys()
|
||||
)
|
||||
raise LoginError(403, "", errcode=Codes.FORBIDDEN)
|
||||
|
||||
defer.returnValue((user_id, user_infos[user_id]))
|
||||
else:
|
||||
defer.returnValue(user_infos.popitem())
|
||||
|
||||
def _check_password(self, user_id, password, stored_hash):
|
||||
"""Checks that user_id has passed password, raises LoginError if not."""
|
||||
if not self.validate_hash(password, stored_hash):
|
||||
logger.warn("Failed password login for user %s", user_id)
|
||||
raise LoginError(403, "", errcode=Codes.FORBIDDEN)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def issue_access_token(self, user_id):
|
||||
access_token = self.generate_access_token(user_id)
|
||||
yield self.store.add_access_token_to_user(user_id, access_token)
|
||||
defer.returnValue(access_token)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def issue_refresh_token(self, user_id):
|
||||
refresh_token = self.generate_refresh_token(user_id)
|
||||
yield self.store.add_refresh_token_to_user(user_id, refresh_token)
|
||||
defer.returnValue(refresh_token)
|
||||
|
||||
def generate_access_token(self, user_id):
|
||||
macaroon = self._generate_base_macaroon(user_id)
|
||||
macaroon.add_first_party_caveat("type = access")
|
||||
now = self.hs.get_clock().time_msec()
|
||||
expiry = now + (60 * 60 * 1000)
|
||||
macaroon.add_first_party_caveat("time < %d" % (expiry,))
|
||||
return macaroon.serialize()
|
||||
|
||||
def generate_refresh_token(self, user_id):
|
||||
m = self._generate_base_macaroon(user_id)
|
||||
m.add_first_party_caveat("type = refresh")
|
||||
# Important to add a nonce, because otherwise every refresh token for a
|
||||
# user will be the same.
|
||||
m.add_first_party_caveat("nonce = %s" % (
|
||||
stringutils.random_string_with_symbols(16),
|
||||
))
|
||||
return m.serialize()
|
||||
|
||||
def _generate_base_macaroon(self, user_id):
|
||||
macaroon = pymacaroons.Macaroon(
|
||||
location=self.hs.config.server_name,
|
||||
identifier="key",
|
||||
key=self.hs.config.macaroon_secret_key)
|
||||
macaroon.add_first_party_caveat("gen = 1")
|
||||
macaroon.add_first_party_caveat("user_id = %s" % (user_id,))
|
||||
return macaroon
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def set_password(self, user_id, newpassword):
|
||||
password_hash = self.hash(newpassword)
|
||||
|
||||
yield self.store.user_set_password_hash(user_id, password_hash)
|
||||
yield self.store.user_delete_access_tokens(user_id)
|
||||
yield self.hs.get_pusherpool().remove_pushers_by_user(user_id)
|
||||
yield self.store.flush_user(user_id)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def add_threepid(self, user_id, medium, address, validated_at):
|
||||
yield self.store.user_add_threepid(
|
||||
user_id, medium, address, validated_at,
|
||||
self.hs.get_clock().time_msec()
|
||||
)
|
||||
|
||||
def _save_session(self, session):
|
||||
# TODO: Persistent storage
|
||||
logger.debug("Saving session %s", session)
|
||||
@@ -278,3 +390,26 @@ class AuthHandler(BaseHandler):
|
||||
def _remove_session(self, session):
|
||||
logger.debug("Removing session %s", session)
|
||||
del self.sessions[session["id"]]
|
||||
|
||||
def hash(self, password):
|
||||
"""Computes a secure hash of password.
|
||||
|
||||
Args:
|
||||
password (str): Password to hash.
|
||||
|
||||
Returns:
|
||||
Hashed password (str).
|
||||
"""
|
||||
return bcrypt.hashpw(password, bcrypt.gensalt())
|
||||
|
||||
def validate_hash(self, password, stored_hash):
|
||||
"""Validates that self.hash(password) == stored_hash.
|
||||
|
||||
Args:
|
||||
password (str): Password to hash.
|
||||
stored_hash (str): Expected hash value.
|
||||
|
||||
Returns:
|
||||
Whether self.hash(password) == stored_hash (bool).
|
||||
"""
|
||||
return bcrypt.checkpw(password, stored_hash)
|
||||
|
||||
@@ -49,7 +49,12 @@ class EventStreamHandler(BaseHandler):
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def get_stream(self, auth_user_id, pagin_config, timeout=0,
|
||||
as_client_event=True, affect_presence=True):
|
||||
as_client_event=True, affect_presence=True,
|
||||
only_room_events=False):
|
||||
"""Fetches the events stream for a given user.
|
||||
|
||||
If `only_room_events` is `True` only room events will be returned.
|
||||
"""
|
||||
auth_user = UserID.from_string(auth_user_id)
|
||||
|
||||
try:
|
||||
@@ -70,7 +75,15 @@ class EventStreamHandler(BaseHandler):
|
||||
self._streams_per_user[auth_user] += 1
|
||||
|
||||
rm_handler = self.hs.get_handlers().room_member_handler
|
||||
room_ids = yield rm_handler.get_joined_rooms_for_user(auth_user)
|
||||
|
||||
app_service = yield self.store.get_app_service_by_user_id(
|
||||
auth_user.to_string()
|
||||
)
|
||||
if app_service:
|
||||
rooms = yield self.store.get_app_service_rooms(app_service)
|
||||
room_ids = set(r.room_id for r in rooms)
|
||||
else:
|
||||
room_ids = yield rm_handler.get_joined_rooms_for_user(auth_user)
|
||||
|
||||
if timeout:
|
||||
# If they've set a timeout set a minimum limit.
|
||||
@@ -81,7 +94,8 @@ class EventStreamHandler(BaseHandler):
|
||||
timeout = random.randint(int(timeout*0.9), int(timeout*1.1))
|
||||
|
||||
events, tokens = yield self.notifier.get_events_for(
|
||||
auth_user, room_ids, pagin_config, timeout
|
||||
auth_user, room_ids, pagin_config, timeout,
|
||||
only_room_events=only_room_events
|
||||
)
|
||||
|
||||
time_now = self.clock.time_msec()
|
||||
|
||||
@@ -229,15 +229,15 @@ class FederationHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _filter_events_for_server(self, server_name, room_id, events):
|
||||
states = yield self.store.get_state_for_events(
|
||||
room_id, [e.event_id for e in events],
|
||||
event_to_state = yield self.store.get_state_for_events(
|
||||
room_id, frozenset(e.event_id for e in events),
|
||||
types=(
|
||||
(EventTypes.RoomHistoryVisibility, ""),
|
||||
(EventTypes.Member, None),
|
||||
)
|
||||
)
|
||||
|
||||
events_and_states = zip(events, states)
|
||||
|
||||
def redact_disallowed(event_and_state):
|
||||
event, state = event_and_state
|
||||
|
||||
def redact_disallowed(event, state):
|
||||
if not state:
|
||||
return event
|
||||
|
||||
@@ -271,11 +271,10 @@ class FederationHandler(BaseHandler):
|
||||
|
||||
return event
|
||||
|
||||
res = map(redact_disallowed, events_and_states)
|
||||
|
||||
logger.info("_filter_events_for_server %r", res)
|
||||
|
||||
defer.returnValue(res)
|
||||
defer.returnValue([
|
||||
redact_disallowed(e, event_to_state[e.event_id])
|
||||
for e in events
|
||||
])
|
||||
|
||||
@log_function
|
||||
@defer.inlineCallbacks
|
||||
@@ -503,7 +502,7 @@ class FederationHandler(BaseHandler):
|
||||
event_ids = list(extremities.keys())
|
||||
|
||||
states = yield defer.gatherResults([
|
||||
self.state_handler.resolve_state_groups([e])
|
||||
self.state_handler.resolve_state_groups(room_id, [e])
|
||||
for e in event_ids
|
||||
])
|
||||
states = dict(zip(event_ids, [s[1] for s in states]))
|
||||
@@ -875,7 +874,7 @@ class FederationHandler(BaseHandler):
|
||||
raise AuthError(403, "Host not in room.")
|
||||
|
||||
state_groups = yield self.store.get_state_groups(
|
||||
[event_id]
|
||||
room_id, [event_id]
|
||||
)
|
||||
|
||||
if state_groups:
|
||||
@@ -1457,52 +1456,3 @@ class FederationHandler(BaseHandler):
|
||||
},
|
||||
"missing": [e.event_id for e in missing_locals],
|
||||
})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _handle_auth_events(self, origin, auth_events):
|
||||
auth_ids_to_deferred = {}
|
||||
|
||||
def process_auth_ev(ev):
|
||||
auth_ids = [e_id for e_id, _ in ev.auth_events]
|
||||
|
||||
prev_ds = [
|
||||
auth_ids_to_deferred[i]
|
||||
for i in auth_ids
|
||||
if i in auth_ids_to_deferred
|
||||
]
|
||||
|
||||
d = defer.Deferred()
|
||||
|
||||
auth_ids_to_deferred[ev.event_id] = d
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def f(*_):
|
||||
ev.internal_metadata.outlier = True
|
||||
|
||||
try:
|
||||
auth = {
|
||||
(e.type, e.state_key): e for e in auth_events
|
||||
if e.event_id in auth_ids
|
||||
}
|
||||
|
||||
yield self._handle_new_event(
|
||||
origin, ev, auth_events=auth
|
||||
)
|
||||
except:
|
||||
logger.exception(
|
||||
"Failed to handle auth event %s",
|
||||
ev.event_id,
|
||||
)
|
||||
|
||||
d.callback(None)
|
||||
|
||||
if prev_ds:
|
||||
dx = defer.DeferredList(prev_ds)
|
||||
dx.addBoth(f)
|
||||
else:
|
||||
f()
|
||||
|
||||
for e in auth_events:
|
||||
process_auth_ev(e)
|
||||
|
||||
yield defer.DeferredList(auth_ids_to_deferred.values())
|
||||
|
||||
@@ -117,3 +117,28 @@ class IdentityHandler(BaseHandler):
|
||||
except CodeMessageException as e:
|
||||
data = json.loads(e.msg)
|
||||
defer.returnValue(data)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def requestEmailToken(self, id_server, email, client_secret, send_attempt, **kwargs):
|
||||
yield run_on_reactor()
|
||||
http_client = SimpleHttpClient(self.hs)
|
||||
|
||||
params = {
|
||||
'email': email,
|
||||
'client_secret': client_secret,
|
||||
'send_attempt': send_attempt,
|
||||
}
|
||||
params.update(kwargs)
|
||||
|
||||
try:
|
||||
data = yield http_client.post_urlencoded_get_json(
|
||||
"https://%s%s" % (
|
||||
id_server,
|
||||
"/_matrix/identity/api/v1/validate/email/requestToken"
|
||||
),
|
||||
params
|
||||
)
|
||||
defer.returnValue(data)
|
||||
except CodeMessageException as e:
|
||||
logger.info("Proxied requestToken failed: %r", e)
|
||||
raise e
|
||||
|
||||
@@ -1,83 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
from ._base import BaseHandler
|
||||
from synapse.api.errors import LoginError, Codes
|
||||
|
||||
import bcrypt
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LoginHandler(BaseHandler):
|
||||
|
||||
def __init__(self, hs):
|
||||
super(LoginHandler, self).__init__(hs)
|
||||
self.hs = hs
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def login(self, user, password):
|
||||
"""Login as the specified user with the specified password.
|
||||
|
||||
Args:
|
||||
user (str): The user ID.
|
||||
password (str): The password.
|
||||
Returns:
|
||||
The newly allocated access token.
|
||||
Raises:
|
||||
StoreError if there was a problem storing the token.
|
||||
LoginError if there was an authentication problem.
|
||||
"""
|
||||
# TODO do this better, it can't go in __init__ else it cyclic loops
|
||||
if not hasattr(self, "reg_handler"):
|
||||
self.reg_handler = self.hs.get_handlers().registration_handler
|
||||
|
||||
# pull out the hash for this user if they exist
|
||||
user_info = yield self.store.get_user_by_id(user_id=user)
|
||||
if not user_info:
|
||||
logger.warn("Attempted to login as %s but they do not exist", user)
|
||||
raise LoginError(403, "", errcode=Codes.FORBIDDEN)
|
||||
|
||||
stored_hash = user_info["password_hash"]
|
||||
if bcrypt.checkpw(password, stored_hash):
|
||||
# generate an access token and store it.
|
||||
token = self.reg_handler._generate_token(user)
|
||||
logger.info("Adding token %s for user %s", token, user)
|
||||
yield self.store.add_access_token_to_user(user, token)
|
||||
defer.returnValue(token)
|
||||
else:
|
||||
logger.warn("Failed password login for user %s", user)
|
||||
raise LoginError(403, "", errcode=Codes.FORBIDDEN)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def set_password(self, user_id, newpassword, token_id=None):
|
||||
password_hash = bcrypt.hashpw(newpassword, bcrypt.gensalt())
|
||||
|
||||
yield self.store.user_set_password_hash(user_id, password_hash)
|
||||
yield self.store.user_delete_access_tokens_apart_from(user_id, token_id)
|
||||
yield self.hs.get_pusherpool().remove_pushers_by_user_access_token(
|
||||
user_id, token_id
|
||||
)
|
||||
yield self.store.flush_user(user_id)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def add_threepid(self, user_id, medium, address, validated_at):
|
||||
yield self.store.user_add_threepid(
|
||||
user_id, medium, address, validated_at,
|
||||
self.hs.get_clock().time_msec()
|
||||
)
|
||||
@@ -16,13 +16,13 @@
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.api.constants import EventTypes, Membership
|
||||
from synapse.api.errors import RoomError, SynapseError
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.streams.config import PaginationConfig
|
||||
from synapse.events.utils import serialize_event
|
||||
from synapse.events.validator import EventValidator
|
||||
from synapse.util import unwrapFirstError
|
||||
from synapse.util.logcontext import PreserveLoggingContext
|
||||
from synapse.types import UserID, RoomStreamToken
|
||||
from synapse.types import UserID, RoomStreamToken, StreamToken
|
||||
|
||||
from ._base import BaseHandler
|
||||
|
||||
@@ -71,7 +71,7 @@ class MessageHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_messages(self, user_id=None, room_id=None, pagin_config=None,
|
||||
feedback=False, as_client_event=True):
|
||||
as_client_event=True):
|
||||
"""Get messages in a room.
|
||||
|
||||
Args:
|
||||
@@ -79,26 +79,52 @@ class MessageHandler(BaseHandler):
|
||||
room_id (str): The room they want messages from.
|
||||
pagin_config (synapse.api.streams.PaginationConfig): The pagination
|
||||
config rules to apply, if any.
|
||||
feedback (bool): True to get compressed feedback with the messages
|
||||
as_client_event (bool): True to get events in client-server format.
|
||||
Returns:
|
||||
dict: Pagination API results
|
||||
"""
|
||||
yield self.auth.check_joined_room(room_id, user_id)
|
||||
member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
|
||||
|
||||
data_source = self.hs.get_event_sources().sources["room"]
|
||||
|
||||
if not pagin_config.from_token:
|
||||
if pagin_config.from_token:
|
||||
room_token = pagin_config.from_token.room_key
|
||||
else:
|
||||
pagin_config.from_token = (
|
||||
yield self.hs.get_event_sources().get_current_token(
|
||||
direction='b'
|
||||
)
|
||||
)
|
||||
room_token = pagin_config.from_token.room_key
|
||||
|
||||
room_token = RoomStreamToken.parse(pagin_config.from_token.room_key)
|
||||
room_token = RoomStreamToken.parse(room_token)
|
||||
if room_token.topological is None:
|
||||
raise SynapseError(400, "Invalid token")
|
||||
|
||||
pagin_config.from_token = pagin_config.from_token.copy_and_replace(
|
||||
"room_key", str(room_token)
|
||||
)
|
||||
|
||||
source_config = pagin_config.get_source_config("room")
|
||||
|
||||
if member_event.membership == Membership.LEAVE:
|
||||
# If they have left the room then clamp the token to be before
|
||||
# they left the room
|
||||
leave_token = yield self.store.get_topological_token_for_event(
|
||||
member_event.event_id
|
||||
)
|
||||
leave_token = RoomStreamToken.parse(leave_token)
|
||||
if leave_token.topological < room_token.topological:
|
||||
source_config.from_key = str(leave_token)
|
||||
|
||||
if source_config.direction == "f":
|
||||
if source_config.to_key is None:
|
||||
source_config.to_key = str(leave_token)
|
||||
else:
|
||||
to_token = RoomStreamToken.parse(source_config.to_key)
|
||||
if leave_token.topological < to_token.topological:
|
||||
source_config.to_key = str(leave_token)
|
||||
|
||||
yield self.hs.get_handlers().federation_handler.maybe_backfill(
|
||||
room_id, room_token.topological
|
||||
)
|
||||
@@ -106,7 +132,7 @@ class MessageHandler(BaseHandler):
|
||||
user = UserID.from_string(user_id)
|
||||
|
||||
events, next_key = yield data_source.get_pagination_rows(
|
||||
user, pagin_config.get_source_config("room"), room_id
|
||||
user, source_config, room_id
|
||||
)
|
||||
|
||||
next_token = pagin_config.from_token.copy_and_replace(
|
||||
@@ -137,15 +163,15 @@ class MessageHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _filter_events_for_client(self, user_id, room_id, events):
|
||||
states = yield self.store.get_state_for_events(
|
||||
room_id, [e.event_id for e in events],
|
||||
event_id_to_state = yield self.store.get_state_for_events(
|
||||
room_id, frozenset(e.event_id for e in events),
|
||||
types=(
|
||||
(EventTypes.RoomHistoryVisibility, ""),
|
||||
(EventTypes.Member, user_id),
|
||||
)
|
||||
)
|
||||
|
||||
events_and_states = zip(events, states)
|
||||
|
||||
def allowed(event_and_state):
|
||||
event, state = event_and_state
|
||||
|
||||
def allowed(event, state):
|
||||
if event.type == EventTypes.RoomHistoryVisibility:
|
||||
return True
|
||||
|
||||
@@ -175,15 +201,15 @@ class MessageHandler(BaseHandler):
|
||||
|
||||
return True
|
||||
|
||||
events_and_states = filter(allowed, events_and_states)
|
||||
defer.returnValue([
|
||||
ev
|
||||
for ev, _ in events_and_states
|
||||
event
|
||||
for event in events
|
||||
if allowed(event, event_id_to_state[event.event_id])
|
||||
])
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def create_and_send_event(self, event_dict, ratelimit=True,
|
||||
client=None, txn_id=None):
|
||||
token_id=None, txn_id=None):
|
||||
""" Given a dict from a client, create and handle a new event.
|
||||
|
||||
Creates an FrozenEvent object, filling out auth_events, prev_events,
|
||||
@@ -217,11 +243,8 @@ class MessageHandler(BaseHandler):
|
||||
builder.content
|
||||
)
|
||||
|
||||
if client is not None:
|
||||
if client.token_id is not None:
|
||||
builder.internal_metadata.token_id = client.token_id
|
||||
if client.device_id is not None:
|
||||
builder.internal_metadata.device_id = client.device_id
|
||||
if token_id is not None:
|
||||
builder.internal_metadata.token_id = token_id
|
||||
|
||||
if txn_id is not None:
|
||||
builder.internal_metadata.txn_id = txn_id
|
||||
@@ -258,29 +281,26 @@ class MessageHandler(BaseHandler):
|
||||
Raises:
|
||||
SynapseError if something went wrong.
|
||||
"""
|
||||
have_joined = yield self.auth.check_joined_room(room_id, user_id)
|
||||
if not have_joined:
|
||||
raise RoomError(403, "User not in room.")
|
||||
member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
|
||||
|
||||
if member_event.membership == Membership.JOIN:
|
||||
data = yield self.state_handler.get_current_state(
|
||||
room_id, event_type, state_key
|
||||
)
|
||||
elif member_event.membership == Membership.LEAVE:
|
||||
key = (event_type, state_key)
|
||||
room_state = yield self.store.get_state_for_events(
|
||||
room_id, [member_event.event_id], [key]
|
||||
)
|
||||
data = room_state[member_event.event_id].get(key)
|
||||
|
||||
data = yield self.state_handler.get_current_state(
|
||||
room_id, event_type, state_key
|
||||
)
|
||||
defer.returnValue(data)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_feedback(self, event_id):
|
||||
# yield self.auth.check_joined_room(room_id, user_id)
|
||||
|
||||
# Pull out the feedback from the db
|
||||
fb = yield self.store.get_feedback(event_id)
|
||||
|
||||
if fb:
|
||||
defer.returnValue(fb)
|
||||
defer.returnValue(None)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_state_events(self, user_id, room_id):
|
||||
"""Retrieve all state events for a given room.
|
||||
"""Retrieve all state events for a given room. If the user is
|
||||
joined to the room then return the current state. If the user has
|
||||
left the room return the state events from when they left.
|
||||
|
||||
Args:
|
||||
user_id(str): The user requesting state events.
|
||||
@@ -288,18 +308,23 @@ class MessageHandler(BaseHandler):
|
||||
Returns:
|
||||
A list of dicts representing state events. [{}, {}, {}]
|
||||
"""
|
||||
yield self.auth.check_joined_room(room_id, user_id)
|
||||
member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
|
||||
|
||||
if member_event.membership == Membership.JOIN:
|
||||
room_state = yield self.state_handler.get_current_state(room_id)
|
||||
elif member_event.membership == Membership.LEAVE:
|
||||
room_state = yield self.store.get_state_for_events(
|
||||
room_id, [member_event.event_id], None
|
||||
)
|
||||
room_state = room_state[member_event.event_id]
|
||||
|
||||
# TODO: This is duplicating logic from snapshot_all_rooms
|
||||
current_state = yield self.state_handler.get_current_state(room_id)
|
||||
now = self.clock.time_msec()
|
||||
defer.returnValue(
|
||||
[serialize_event(c, now) for c in current_state.values()]
|
||||
[serialize_event(c, now) for c in room_state.values()]
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def snapshot_all_rooms(self, user_id=None, pagin_config=None,
|
||||
feedback=False, as_client_event=True):
|
||||
def snapshot_all_rooms(self, user_id=None, pagin_config=None, as_client_event=True):
|
||||
"""Retrieve a snapshot of all rooms the user is invited or has joined.
|
||||
|
||||
This snapshot may include messages for all rooms where the user is
|
||||
@@ -309,7 +334,6 @@ class MessageHandler(BaseHandler):
|
||||
user_id (str): The ID of the user making the request.
|
||||
pagin_config (synapse.api.streams.PaginationConfig): The pagination
|
||||
config used to determine how many messages *PER ROOM* to return.
|
||||
feedback (bool): True to get feedback along with these messages.
|
||||
as_client_event (bool): True to get events in client-server format.
|
||||
Returns:
|
||||
A list of dicts with "room_id" and "membership" keys for all rooms
|
||||
@@ -319,7 +343,9 @@ class MessageHandler(BaseHandler):
|
||||
"""
|
||||
room_list = yield self.store.get_rooms_for_user_where_membership_is(
|
||||
user_id=user_id,
|
||||
membership_list=[Membership.INVITE, Membership.JOIN]
|
||||
membership_list=[
|
||||
Membership.INVITE, Membership.JOIN, Membership.LEAVE
|
||||
]
|
||||
)
|
||||
|
||||
user = UserID.from_string(user_id)
|
||||
@@ -361,19 +387,32 @@ class MessageHandler(BaseHandler):
|
||||
|
||||
rooms_ret.append(d)
|
||||
|
||||
if event.membership != Membership.JOIN:
|
||||
if event.membership not in (Membership.JOIN, Membership.LEAVE):
|
||||
return
|
||||
|
||||
try:
|
||||
if event.membership == Membership.JOIN:
|
||||
room_end_token = now_token.room_key
|
||||
deferred_room_state = self.state_handler.get_current_state(
|
||||
event.room_id
|
||||
)
|
||||
elif event.membership == Membership.LEAVE:
|
||||
room_end_token = "s%d" % (event.stream_ordering,)
|
||||
deferred_room_state = self.store.get_state_for_events(
|
||||
event.room_id, [event.event_id], None
|
||||
)
|
||||
deferred_room_state.addCallback(
|
||||
lambda states: states[event.event_id]
|
||||
)
|
||||
|
||||
(messages, token), current_state = yield defer.gatherResults(
|
||||
[
|
||||
self.store.get_recent_events_for_room(
|
||||
event.room_id,
|
||||
limit=limit,
|
||||
end_token=now_token.room_key,
|
||||
),
|
||||
self.state_handler.get_current_state(
|
||||
event.room_id
|
||||
end_token=room_end_token,
|
||||
),
|
||||
deferred_room_state,
|
||||
]
|
||||
).addErrback(unwrapFirstError)
|
||||
|
||||
@@ -401,10 +440,14 @@ class MessageHandler(BaseHandler):
|
||||
except:
|
||||
logger.exception("Failed to get snapshot")
|
||||
|
||||
yield defer.gatherResults(
|
||||
[handle_room(e) for e in room_list],
|
||||
consumeErrors=True
|
||||
).addErrback(unwrapFirstError)
|
||||
# Only do N rooms at once
|
||||
n = 5
|
||||
d_list = [handle_room(e) for e in room_list]
|
||||
for i in range(0, len(d_list), n):
|
||||
yield defer.gatherResults(
|
||||
d_list[i:i + n],
|
||||
consumeErrors=True
|
||||
).addErrback(unwrapFirstError)
|
||||
|
||||
ret = {
|
||||
"rooms": rooms_ret,
|
||||
@@ -416,15 +459,85 @@ class MessageHandler(BaseHandler):
|
||||
defer.returnValue(ret)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def room_initial_sync(self, user_id, room_id, pagin_config=None,
|
||||
feedback=False):
|
||||
current_state = yield self.state.get_current_state(
|
||||
room_id=room_id,
|
||||
def room_initial_sync(self, user_id, room_id, pagin_config=None):
|
||||
"""Capture the a snapshot of a room. If user is currently a member of
|
||||
the room this will be what is currently in the room. If the user left
|
||||
the room this will be what was in the room when they left.
|
||||
|
||||
Args:
|
||||
user_id(str): The user to get a snapshot for.
|
||||
room_id(str): The room to get a snapshot of.
|
||||
pagin_config(synapse.streams.config.PaginationConfig):
|
||||
The pagination config used to determine how many messages to
|
||||
return.
|
||||
Raises:
|
||||
AuthError if the user wasn't in the room.
|
||||
Returns:
|
||||
A JSON serialisable dict with the snapshot of the room.
|
||||
"""
|
||||
|
||||
member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
|
||||
|
||||
if member_event.membership == Membership.JOIN:
|
||||
result = yield self._room_initial_sync_joined(
|
||||
user_id, room_id, pagin_config, member_event
|
||||
)
|
||||
elif member_event.membership == Membership.LEAVE:
|
||||
result = yield self._room_initial_sync_parted(
|
||||
user_id, room_id, pagin_config, member_event
|
||||
)
|
||||
defer.returnValue(result)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _room_initial_sync_parted(self, user_id, room_id, pagin_config,
|
||||
member_event):
|
||||
room_state = yield self.store.get_state_for_events(
|
||||
member_event.room_id, [member_event.event_id], None
|
||||
)
|
||||
|
||||
yield self.auth.check_joined_room(
|
||||
room_id, user_id,
|
||||
current_state=current_state
|
||||
room_state = room_state[member_event.event_id]
|
||||
|
||||
limit = pagin_config.limit if pagin_config else None
|
||||
if limit is None:
|
||||
limit = 10
|
||||
|
||||
stream_token = yield self.store.get_stream_token_for_event(
|
||||
member_event.event_id
|
||||
)
|
||||
|
||||
messages, token = yield self.store.get_recent_events_for_room(
|
||||
room_id,
|
||||
limit=limit,
|
||||
end_token=stream_token
|
||||
)
|
||||
|
||||
messages = yield self._filter_events_for_client(
|
||||
user_id, room_id, messages
|
||||
)
|
||||
|
||||
start_token = StreamToken(token[0], 0, 0, 0)
|
||||
end_token = StreamToken(token[1], 0, 0, 0)
|
||||
|
||||
time_now = self.clock.time_msec()
|
||||
|
||||
defer.returnValue({
|
||||
"membership": member_event.membership,
|
||||
"room_id": room_id,
|
||||
"messages": {
|
||||
"chunk": [serialize_event(m, time_now) for m in messages],
|
||||
"start": start_token.to_string(),
|
||||
"end": end_token.to_string(),
|
||||
},
|
||||
"state": [serialize_event(s, time_now) for s in room_state.values()],
|
||||
"presence": [],
|
||||
"receipts": [],
|
||||
})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _room_initial_sync_joined(self, user_id, room_id, pagin_config,
|
||||
member_event):
|
||||
current_state = yield self.state.get_current_state(
|
||||
room_id=room_id,
|
||||
)
|
||||
|
||||
# TODO(paul): I wish I was called with user objects not user_id
|
||||
@@ -438,8 +551,6 @@ class MessageHandler(BaseHandler):
|
||||
for x in current_state.values()
|
||||
]
|
||||
|
||||
member_event = current_state.get((EventTypes.Member, user_id,))
|
||||
|
||||
now_token = yield self.hs.get_event_sources().get_current_token()
|
||||
|
||||
limit = pagin_config.limit if pagin_config else None
|
||||
@@ -456,20 +567,14 @@ class MessageHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_presence():
|
||||
presence_defs = yield defer.DeferredList(
|
||||
[
|
||||
presence_handler.get_state(
|
||||
target_user=UserID.from_string(m.user_id),
|
||||
auth_user=auth_user,
|
||||
as_event=True,
|
||||
check_auth=False,
|
||||
)
|
||||
for m in room_members
|
||||
],
|
||||
consumeErrors=True,
|
||||
states = yield presence_handler.get_states(
|
||||
target_users=[UserID.from_string(m.user_id) for m in room_members],
|
||||
auth_user=auth_user,
|
||||
as_event=True,
|
||||
check_auth=False,
|
||||
)
|
||||
|
||||
defer.returnValue([p for success, p in presence_defs if success])
|
||||
defer.returnValue(states.values())
|
||||
|
||||
receipts_handler = self.hs.get_handlers().receipts_handler
|
||||
|
||||
|
||||
@@ -192,6 +192,20 @@ class PresenceHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_state(self, target_user, auth_user, as_event=False, check_auth=True):
|
||||
"""Get the current presence state of the given user.
|
||||
|
||||
Args:
|
||||
target_user (UserID): The user whose presence we want
|
||||
auth_user (UserID): The user requesting the presence, used for
|
||||
checking if said user is allowed to see the persence of the
|
||||
`target_user`
|
||||
as_event (bool): Format the return as an event or not?
|
||||
check_auth (bool): Perform the auth checks or not?
|
||||
|
||||
Returns:
|
||||
dict: The presence state of the `target_user`, whose format depends
|
||||
on the `as_event` argument.
|
||||
"""
|
||||
if self.hs.is_mine(target_user):
|
||||
if check_auth:
|
||||
visible = yield self.is_presence_visible(
|
||||
@@ -232,6 +246,81 @@ class PresenceHandler(BaseHandler):
|
||||
else:
|
||||
defer.returnValue(state)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_states(self, target_users, auth_user, as_event=False, check_auth=True):
|
||||
"""A batched version of the `get_state` method that accepts a list of
|
||||
`target_users`
|
||||
|
||||
Args:
|
||||
target_users (list): The list of UserID's whose presence we want
|
||||
auth_user (UserID): The user requesting the presence, used for
|
||||
checking if said user is allowed to see the persence of the
|
||||
`target_users`
|
||||
as_event (bool): Format the return as an event or not?
|
||||
check_auth (bool): Perform the auth checks or not?
|
||||
|
||||
Returns:
|
||||
dict: A mapping from user -> presence_state
|
||||
"""
|
||||
local_users, remote_users = partitionbool(
|
||||
target_users,
|
||||
lambda u: self.hs.is_mine(u)
|
||||
)
|
||||
|
||||
if check_auth:
|
||||
for user in local_users:
|
||||
visible = yield self.is_presence_visible(
|
||||
observer_user=auth_user,
|
||||
observed_user=user
|
||||
)
|
||||
|
||||
if not visible:
|
||||
raise SynapseError(404, "Presence information not visible")
|
||||
|
||||
results = {}
|
||||
if local_users:
|
||||
for user in local_users:
|
||||
if user in self._user_cachemap:
|
||||
results[user] = self._user_cachemap[user].get_state()
|
||||
|
||||
local_to_user = {u.localpart: u for u in local_users}
|
||||
|
||||
states = yield self.store.get_presence_states(
|
||||
[u.localpart for u in local_users if u not in results]
|
||||
)
|
||||
|
||||
for local_part, state in states.items():
|
||||
if state is None:
|
||||
continue
|
||||
res = {"presence": state["state"]}
|
||||
if "status_msg" in state and state["status_msg"]:
|
||||
res["status_msg"] = state["status_msg"]
|
||||
results[local_to_user[local_part]] = res
|
||||
|
||||
for user in remote_users:
|
||||
# TODO(paul): Have remote server send us permissions set
|
||||
results[user] = self._get_or_offline_usercache(user).get_state()
|
||||
|
||||
for state in results.values():
|
||||
if "last_active" in state:
|
||||
state["last_active_ago"] = int(
|
||||
self.clock.time_msec() - state.pop("last_active")
|
||||
)
|
||||
|
||||
if as_event:
|
||||
for user, state in results.items():
|
||||
content = state
|
||||
content["user_id"] = user.to_string()
|
||||
|
||||
if "last_active" in content:
|
||||
content["last_active_ago"] = int(
|
||||
self._clock.time_msec() - content.pop("last_active")
|
||||
)
|
||||
|
||||
results[user] = {"type": "m.presence", "content": content}
|
||||
|
||||
defer.returnValue(results)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def set_state(self, target_user, auth_user, state):
|
||||
|
||||
@@ -171,7 +171,6 @@ class ReceiptEventSource(object):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_new_events_for_user(self, user, from_key, limit):
|
||||
defer.returnValue(([], from_key))
|
||||
from_key = int(from_key)
|
||||
to_key = yield self.get_current_key()
|
||||
|
||||
@@ -194,7 +193,6 @@ class ReceiptEventSource(object):
|
||||
@defer.inlineCallbacks
|
||||
def get_pagination_rows(self, user, config, key):
|
||||
to_key = int(config.from_key)
|
||||
defer.returnValue(([], to_key))
|
||||
|
||||
if config.to_key:
|
||||
from_key = int(config.to_key)
|
||||
|
||||
@@ -25,8 +25,6 @@ import synapse.util.stringutils as stringutils
|
||||
from synapse.util.async import run_on_reactor
|
||||
from synapse.http.client import CaptchaServerHttpClient
|
||||
|
||||
import base64
|
||||
import bcrypt
|
||||
import logging
|
||||
import urllib
|
||||
|
||||
@@ -57,8 +55,8 @@ class RegistrationHandler(BaseHandler):
|
||||
|
||||
yield self.check_user_id_is_valid(user_id)
|
||||
|
||||
u = yield self.store.get_user_by_id(user_id)
|
||||
if u:
|
||||
users = yield self.store.get_users_by_id_case_insensitive(user_id)
|
||||
if users:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"User ID already taken.",
|
||||
@@ -83,7 +81,7 @@ class RegistrationHandler(BaseHandler):
|
||||
yield run_on_reactor()
|
||||
password_hash = None
|
||||
if password:
|
||||
password_hash = bcrypt.hashpw(password, bcrypt.gensalt())
|
||||
password_hash = self.auth_handler().hash(password)
|
||||
|
||||
if localpart:
|
||||
yield self.check_username(localpart)
|
||||
@@ -91,7 +89,7 @@ class RegistrationHandler(BaseHandler):
|
||||
user = UserID(localpart, self.hs.hostname)
|
||||
user_id = user.to_string()
|
||||
|
||||
token = self._generate_token(user_id)
|
||||
token = self.auth_handler().generate_access_token(user_id)
|
||||
yield self.store.register(
|
||||
user_id=user_id,
|
||||
token=token,
|
||||
@@ -111,7 +109,7 @@ class RegistrationHandler(BaseHandler):
|
||||
user_id = user.to_string()
|
||||
yield self.check_user_id_is_valid(user_id)
|
||||
|
||||
token = self._generate_token(user_id)
|
||||
token = self.auth_handler().generate_access_token(user_id)
|
||||
yield self.store.register(
|
||||
user_id=user_id,
|
||||
token=token,
|
||||
@@ -161,7 +159,7 @@ class RegistrationHandler(BaseHandler):
|
||||
400, "Invalid user localpart for this application service.",
|
||||
errcode=Codes.EXCLUSIVE
|
||||
)
|
||||
token = self._generate_token(user_id)
|
||||
token = self.auth_handler().generate_access_token(user_id)
|
||||
yield self.store.register(
|
||||
user_id=user_id,
|
||||
token=token,
|
||||
@@ -208,7 +206,7 @@ class RegistrationHandler(BaseHandler):
|
||||
user_id = user.to_string()
|
||||
|
||||
yield self.check_user_id_is_valid(user_id)
|
||||
token = self._generate_token(user_id)
|
||||
token = self.auth_handler().generate_access_token(user_id)
|
||||
try:
|
||||
yield self.store.register(
|
||||
user_id=user_id,
|
||||
@@ -273,13 +271,6 @@ class RegistrationHandler(BaseHandler):
|
||||
errcode=Codes.EXCLUSIVE
|
||||
)
|
||||
|
||||
def _generate_token(self, user_id):
|
||||
# urlsafe variant uses _ and - so use . as the separator and replace
|
||||
# all =s with .s so http clients don't quote =s when it is used as
|
||||
# query params.
|
||||
return (base64.urlsafe_b64encode(user_id).replace('=', '.') + '.' +
|
||||
stringutils.random_string(18))
|
||||
|
||||
def _generate_user_id(self):
|
||||
return "-" + stringutils.random_string(18)
|
||||
|
||||
@@ -322,3 +313,6 @@ class RegistrationHandler(BaseHandler):
|
||||
}
|
||||
)
|
||||
defer.returnValue(data)
|
||||
|
||||
def auth_handler(self):
|
||||
return self.hs.get_handlers().auth_handler
|
||||
|
||||
@@ -25,7 +25,6 @@ from synapse.api.constants import (
|
||||
from synapse.api.errors import StoreError, SynapseError
|
||||
from synapse.util import stringutils, unwrapFirstError
|
||||
from synapse.util.async import run_on_reactor
|
||||
from synapse.events.utils import serialize_event
|
||||
|
||||
from collections import OrderedDict
|
||||
import logging
|
||||
@@ -39,7 +38,7 @@ class RoomCreationHandler(BaseHandler):
|
||||
PRESETS_DICT = {
|
||||
RoomCreationPreset.PRIVATE_CHAT: {
|
||||
"join_rules": JoinRules.INVITE,
|
||||
"history_visibility": "invited",
|
||||
"history_visibility": "shared",
|
||||
"original_invitees_have_ops": False,
|
||||
},
|
||||
RoomCreationPreset.PUBLIC_CHAT: {
|
||||
@@ -247,9 +246,11 @@ class RoomCreationHandler(BaseHandler):
|
||||
},
|
||||
"users_default": 0,
|
||||
"events": {
|
||||
EventTypes.Name: 100,
|
||||
EventTypes.Name: 50,
|
||||
EventTypes.PowerLevels: 100,
|
||||
EventTypes.RoomHistoryVisibility: 100,
|
||||
EventTypes.CanonicalAlias: 50,
|
||||
EventTypes.RoomAvatar: 50,
|
||||
},
|
||||
"events_default": 0,
|
||||
"state_default": 50,
|
||||
@@ -340,41 +341,6 @@ class RoomMemberHandler(BaseHandler):
|
||||
if remotedomains is not None:
|
||||
remotedomains.add(member.domain)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_room_members_as_pagination_chunk(self, room_id=None, user_id=None,
|
||||
limit=0, start_tok=None,
|
||||
end_tok=None):
|
||||
"""Retrieve a list of room members in the room.
|
||||
|
||||
Args:
|
||||
room_id (str): The room to get the member list for.
|
||||
user_id (str): The ID of the user making the request.
|
||||
limit (int): The max number of members to return.
|
||||
start_tok (str): Optional. The start token if known.
|
||||
end_tok (str): Optional. The end token if known.
|
||||
Returns:
|
||||
dict: A Pagination streamable dict.
|
||||
Raises:
|
||||
SynapseError if something goes wrong.
|
||||
"""
|
||||
yield self.auth.check_joined_room(room_id, user_id)
|
||||
|
||||
member_list = yield self.store.get_room_members(room_id=room_id)
|
||||
time_now = self.clock.time_msec()
|
||||
event_list = [
|
||||
serialize_event(entry, time_now)
|
||||
for entry in member_list
|
||||
]
|
||||
chunk_data = {
|
||||
"start": "START", # FIXME (erikj): START is no longer valid
|
||||
"end": "END",
|
||||
"chunk": event_list
|
||||
}
|
||||
# TODO honor Pagination stream params
|
||||
# TODO snapshot this list to return on subsequent requests when
|
||||
# paginating
|
||||
defer.returnValue(chunk_data)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def change_membership(self, event, context, do_auth=True):
|
||||
""" Change the membership status of a user in a room.
|
||||
@@ -526,46 +492,14 @@ class RoomMemberHandler(BaseHandler):
|
||||
"user_joined_room", user=user, room_id=room_id
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _should_invite_join(self, room_id, prev_state, do_auth):
|
||||
logger.debug("_should_invite_join: room_id: %s", room_id)
|
||||
|
||||
# XXX: We don't do an auth check if we are doing an invite
|
||||
# join dance for now, since we're kinda implicitly checking
|
||||
# that we are allowed to join when we decide whether or not we
|
||||
# need to do the invite/join dance.
|
||||
|
||||
# Only do an invite join dance if a) we were invited,
|
||||
# b) the person inviting was from a differnt HS and c) we are
|
||||
# not currently in the room
|
||||
room_host = None
|
||||
if prev_state and prev_state.membership == Membership.INVITE:
|
||||
room = yield self.store.get_room(room_id)
|
||||
inviter = UserID.from_string(
|
||||
prev_state.sender
|
||||
)
|
||||
|
||||
is_remote_invite_join = not self.hs.is_mine(inviter) and not room
|
||||
room_host = inviter.domain
|
||||
else:
|
||||
is_remote_invite_join = False
|
||||
|
||||
defer.returnValue((is_remote_invite_join, room_host))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_joined_rooms_for_user(self, user):
|
||||
"""Returns a list of roomids that the user has any of the given
|
||||
membership states in."""
|
||||
|
||||
app_service = yield self.store.get_app_service_by_user_id(
|
||||
user.to_string()
|
||||
rooms = yield self.store.get_rooms_for_user(
|
||||
user.to_string(),
|
||||
)
|
||||
if app_service:
|
||||
rooms = yield self.store.get_app_service_rooms(app_service)
|
||||
else:
|
||||
rooms = yield self.store.get_rooms_for_user(
|
||||
user.to_string(),
|
||||
)
|
||||
|
||||
# For some reason the list of events contains duplicates
|
||||
# TODO(paul): work out why because I really don't think it should
|
||||
@@ -650,7 +584,6 @@ class RoomEventSource(object):
|
||||
to_key=config.to_key,
|
||||
direction=config.direction,
|
||||
limit=config.limit,
|
||||
with_feedback=True
|
||||
)
|
||||
|
||||
defer.returnValue((events, next_key))
|
||||
|
||||
@@ -28,7 +28,6 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
SyncConfig = collections.namedtuple("SyncConfig", [
|
||||
"user",
|
||||
"client_info",
|
||||
"limit",
|
||||
"gap",
|
||||
"sort",
|
||||
@@ -96,9 +95,18 @@ class SyncHandler(BaseHandler):
|
||||
return self.current_sync_for_user(sync_config, since_token)
|
||||
|
||||
rm_handler = self.hs.get_handlers().room_member_handler
|
||||
room_ids = yield rm_handler.get_joined_rooms_for_user(
|
||||
sync_config.user
|
||||
|
||||
app_service = yield self.store.get_app_service_by_user_id(
|
||||
sync_config.user.to_string()
|
||||
)
|
||||
if app_service:
|
||||
rooms = yield self.store.get_app_service_rooms(app_service)
|
||||
room_ids = set(r.room_id for r in rooms)
|
||||
else:
|
||||
room_ids = yield rm_handler.get_joined_rooms_for_user(
|
||||
sync_config.user
|
||||
)
|
||||
|
||||
result = yield self.notifier.wait_for_events(
|
||||
sync_config.user, room_ids,
|
||||
sync_config.filter, timeout, current_sync_callback
|
||||
@@ -229,7 +237,16 @@ class SyncHandler(BaseHandler):
|
||||
logger.debug("Typing %r", typing_by_room)
|
||||
|
||||
rm_handler = self.hs.get_handlers().room_member_handler
|
||||
room_ids = yield rm_handler.get_joined_rooms_for_user(sync_config.user)
|
||||
app_service = yield self.store.get_app_service_by_user_id(
|
||||
sync_config.user.to_string()
|
||||
)
|
||||
if app_service:
|
||||
rooms = yield self.store.get_app_service_rooms(app_service)
|
||||
room_ids = set(r.room_id for r in rooms)
|
||||
else:
|
||||
room_ids = yield rm_handler.get_joined_rooms_for_user(
|
||||
sync_config.user
|
||||
)
|
||||
|
||||
# TODO (mjark): Does public mean "published"?
|
||||
published_rooms = yield self.store.get_rooms(is_public=True)
|
||||
@@ -294,15 +311,15 @@ class SyncHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _filter_events_for_client(self, user_id, room_id, events):
|
||||
states = yield self.store.get_state_for_events(
|
||||
room_id, [e.event_id for e in events],
|
||||
event_id_to_state = yield self.store.get_state_for_events(
|
||||
room_id, frozenset(e.event_id for e in events),
|
||||
types=(
|
||||
(EventTypes.RoomHistoryVisibility, ""),
|
||||
(EventTypes.Member, user_id),
|
||||
)
|
||||
)
|
||||
|
||||
events_and_states = zip(events, states)
|
||||
|
||||
def allowed(event_and_state):
|
||||
event, state = event_and_state
|
||||
|
||||
def allowed(event, state):
|
||||
if event.type == EventTypes.RoomHistoryVisibility:
|
||||
return True
|
||||
|
||||
@@ -331,10 +348,11 @@ class SyncHandler(BaseHandler):
|
||||
return membership == Membership.INVITE
|
||||
|
||||
return True
|
||||
events_and_states = filter(allowed, events_and_states)
|
||||
|
||||
defer.returnValue([
|
||||
ev
|
||||
for ev, _ in events_and_states
|
||||
event
|
||||
for event in events
|
||||
if allowed(event, event_id_to_state[event.event_id])
|
||||
])
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
||||
@@ -204,15 +204,11 @@ class TypingNotificationHandler(BaseHandler):
|
||||
)
|
||||
|
||||
def _push_update_local(self, room_id, user, typing):
|
||||
if room_id not in self._room_serials:
|
||||
self._room_serials[room_id] = 0
|
||||
self._room_typing[room_id] = set()
|
||||
|
||||
room_set = self._room_typing[room_id]
|
||||
room_set = self._room_typing.setdefault(room_id, set())
|
||||
if typing:
|
||||
room_set.add(user)
|
||||
elif user in room_set:
|
||||
room_set.remove(user)
|
||||
else:
|
||||
room_set.discard(user)
|
||||
|
||||
self._latest_room_serial += 1
|
||||
self._room_serials[room_id] = self._latest_room_serial
|
||||
@@ -260,8 +256,8 @@ class TypingNotificationEventSource(object):
|
||||
)
|
||||
|
||||
events = []
|
||||
for room_id in handler._room_serials:
|
||||
if room_id not in joined_room_ids:
|
||||
for room_id in joined_room_ids:
|
||||
if room_id not in handler._room_serials:
|
||||
continue
|
||||
if handler._room_serials[room_id] <= from_key:
|
||||
continue
|
||||
|
||||
@@ -12,13 +12,16 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from OpenSSL import SSL
|
||||
from OpenSSL.SSL import VERIFY_NONE
|
||||
|
||||
from synapse.api.errors import CodeMessageException
|
||||
from synapse.util.logcontext import preserve_context_over_fn
|
||||
from syutil.jsonutil import encode_canonical_json
|
||||
import synapse.metrics
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from canonicaljson import encode_canonical_json
|
||||
|
||||
from twisted.internet import defer, reactor, ssl
|
||||
from twisted.web.client import (
|
||||
Agent, readBody, FileBodyProducer, PartialDownloadError,
|
||||
HTTPConnectionPool,
|
||||
@@ -58,7 +61,12 @@ class SimpleHttpClient(object):
|
||||
# 'like a browser'
|
||||
pool = HTTPConnectionPool(reactor)
|
||||
pool.maxPersistentPerHost = 10
|
||||
self.agent = Agent(reactor, pool=pool)
|
||||
self.agent = Agent(
|
||||
reactor,
|
||||
pool=pool,
|
||||
connectTimeout=15,
|
||||
contextFactory=hs.get_http_client_context_factory()
|
||||
)
|
||||
self.version_string = hs.version_string
|
||||
|
||||
def request(self, method, uri, *args, **kwargs):
|
||||
@@ -251,3 +259,18 @@ def _print_ex(e):
|
||||
_print_ex(ex)
|
||||
else:
|
||||
logger.exception(e)
|
||||
|
||||
|
||||
class InsecureInterceptableContextFactory(ssl.ContextFactory):
|
||||
"""
|
||||
Factory for PyOpenSSL SSL contexts which accepts any certificate for any domain.
|
||||
|
||||
Do not use this since it allows an attacker to intercept your communications.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._context = SSL.Context(SSL.SSLv23_METHOD)
|
||||
self._context.set_verify(VERIFY_NONE, lambda *_: None)
|
||||
|
||||
def getContext(self, hostname, port):
|
||||
return self._context
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
|
||||
from twisted.internet import defer, reactor, protocol
|
||||
from twisted.internet.error import DNSLookupError
|
||||
from twisted.web.client import readBody, _AgentBase, _URI, HTTPConnectionPool
|
||||
from twisted.web.client import readBody, HTTPConnectionPool, Agent
|
||||
from twisted.web.http_headers import Headers
|
||||
from twisted.web._newclient import ResponseDone
|
||||
|
||||
@@ -25,13 +25,13 @@ from synapse.util.async import sleep
|
||||
from synapse.util.logcontext import preserve_context_over_fn
|
||||
import synapse.metrics
|
||||
|
||||
from syutil.jsonutil import encode_canonical_json
|
||||
from canonicaljson import encode_canonical_json
|
||||
|
||||
from synapse.api.errors import (
|
||||
SynapseError, Codes, HttpResponseException,
|
||||
)
|
||||
|
||||
from syutil.crypto.jsonsign import sign_json
|
||||
from signedjson.sign import sign_json
|
||||
|
||||
import simplejson as json
|
||||
import logging
|
||||
@@ -55,41 +55,17 @@ incoming_responses_counter = metrics.register_counter(
|
||||
)
|
||||
|
||||
|
||||
class MatrixFederationHttpAgent(_AgentBase):
|
||||
class MatrixFederationEndpointFactory(object):
|
||||
def __init__(self, hs):
|
||||
self.tls_server_context_factory = hs.tls_server_context_factory
|
||||
|
||||
def __init__(self, reactor, pool=None):
|
||||
_AgentBase.__init__(self, reactor, pool)
|
||||
def endpointForURI(self, uri):
|
||||
destination = uri.netloc
|
||||
|
||||
def request(self, destination, endpoint, method, path, params, query,
|
||||
headers, body_producer):
|
||||
|
||||
outgoing_requests_counter.inc(method)
|
||||
|
||||
host = b""
|
||||
port = 0
|
||||
fragment = b""
|
||||
|
||||
parsed_URI = _URI(b"http", destination, host, port, path, params,
|
||||
query, fragment)
|
||||
|
||||
# Set the connection pool key to be the destination.
|
||||
key = destination
|
||||
|
||||
d = self._requestWithEndpoint(key, endpoint, method, parsed_URI,
|
||||
headers, body_producer,
|
||||
parsed_URI.originForm)
|
||||
|
||||
def _cb(response):
|
||||
incoming_responses_counter.inc(method, response.code)
|
||||
return response
|
||||
|
||||
def _eb(failure):
|
||||
incoming_responses_counter.inc(method, "ERR")
|
||||
return failure
|
||||
|
||||
d.addCallbacks(_cb, _eb)
|
||||
|
||||
return d
|
||||
return matrix_federation_endpoint(
|
||||
reactor, destination, timeout=10,
|
||||
ssl_context_factory=self.tls_server_context_factory
|
||||
)
|
||||
|
||||
|
||||
class MatrixFederationHttpClient(object):
|
||||
@@ -107,12 +83,18 @@ class MatrixFederationHttpClient(object):
|
||||
self.server_name = hs.hostname
|
||||
pool = HTTPConnectionPool(reactor)
|
||||
pool.maxPersistentPerHost = 10
|
||||
self.agent = MatrixFederationHttpAgent(reactor, pool=pool)
|
||||
self.agent = Agent.usingEndpointFactory(
|
||||
reactor, MatrixFederationEndpointFactory(hs), pool=pool
|
||||
)
|
||||
self.clock = hs.get_clock()
|
||||
self.version_string = hs.version_string
|
||||
|
||||
self._next_id = 1
|
||||
|
||||
def _create_url(self, destination, path_bytes, param_bytes, query_bytes):
|
||||
return urlparse.urlunparse(
|
||||
("matrix", destination, path_bytes, param_bytes, query_bytes, "")
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _create_request(self, destination, method, path_bytes,
|
||||
body_callback, headers_dict={}, param_bytes=b"",
|
||||
@@ -123,8 +105,8 @@ class MatrixFederationHttpClient(object):
|
||||
headers_dict[b"User-Agent"] = [self.version_string]
|
||||
headers_dict[b"Host"] = [destination]
|
||||
|
||||
url_bytes = urlparse.urlunparse(
|
||||
("", "", path_bytes, param_bytes, query_bytes, "",)
|
||||
url_bytes = self._create_url(
|
||||
destination, path_bytes, param_bytes, query_bytes
|
||||
)
|
||||
|
||||
txn_id = "%s-O-%s" % (method, self._next_id)
|
||||
@@ -139,8 +121,8 @@ class MatrixFederationHttpClient(object):
|
||||
# (once we have reliable transactions in place)
|
||||
retries_left = 5
|
||||
|
||||
endpoint = preserve_context_over_fn(
|
||||
self._getEndpoint, reactor, destination
|
||||
http_url_bytes = urlparse.urlunparse(
|
||||
("", "", path_bytes, param_bytes, query_bytes, "")
|
||||
)
|
||||
|
||||
log_result = None
|
||||
@@ -148,17 +130,14 @@ class MatrixFederationHttpClient(object):
|
||||
while True:
|
||||
producer = None
|
||||
if body_callback:
|
||||
producer = body_callback(method, url_bytes, headers_dict)
|
||||
producer = body_callback(method, http_url_bytes, headers_dict)
|
||||
|
||||
try:
|
||||
def send_request():
|
||||
request_deferred = self.agent.request(
|
||||
destination,
|
||||
endpoint,
|
||||
request_deferred = preserve_context_over_fn(
|
||||
self.agent.request,
|
||||
method,
|
||||
path_bytes,
|
||||
param_bytes,
|
||||
query_bytes,
|
||||
url_bytes,
|
||||
Headers(headers_dict),
|
||||
producer
|
||||
)
|
||||
@@ -452,12 +431,6 @@ class MatrixFederationHttpClient(object):
|
||||
|
||||
defer.returnValue((length, headers))
|
||||
|
||||
def _getEndpoint(self, reactor, destination):
|
||||
return matrix_federation_endpoint(
|
||||
reactor, destination, timeout=10,
|
||||
ssl_context_factory=self.hs.tls_context_factory
|
||||
)
|
||||
|
||||
|
||||
class _ReadBodyToFileProtocol(protocol.Protocol):
|
||||
def __init__(self, stream, deferred, max_size):
|
||||
|
||||
@@ -21,8 +21,8 @@ from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
|
||||
import synapse.metrics
|
||||
import synapse.events
|
||||
|
||||
from syutil.jsonutil import (
|
||||
encode_canonical_json, encode_pretty_printed_json, encode_json
|
||||
from canonicaljson import (
|
||||
encode_canonical_json, encode_pretty_printed_json
|
||||
)
|
||||
|
||||
from twisted.internet import defer
|
||||
@@ -33,6 +33,7 @@ from twisted.web.util import redirectTo
|
||||
import collections
|
||||
import logging
|
||||
import urllib
|
||||
import ujson
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -270,12 +271,11 @@ def respond_with_json(request, code, json_object, send_cors=False,
|
||||
if pretty_print:
|
||||
json_bytes = encode_pretty_printed_json(json_object) + "\n"
|
||||
else:
|
||||
if canonical_json:
|
||||
if canonical_json or synapse.events.USE_FROZEN_DICTS:
|
||||
json_bytes = encode_canonical_json(json_object)
|
||||
else:
|
||||
json_bytes = encode_json(
|
||||
json_object, using_frozen_dicts=synapse.events.USE_FROZEN_DICTS
|
||||
)
|
||||
# ujson doesn't like frozen_dicts.
|
||||
json_bytes = ujson.dumps(json_object, ensure_ascii=False)
|
||||
|
||||
return respond_with_json_bytes(
|
||||
request, code, json_bytes,
|
||||
|
||||
@@ -17,9 +17,13 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
from resource import getrusage, getpagesize, RUSAGE_SELF
|
||||
from resource import getrusage, RUSAGE_SELF
|
||||
import functools
|
||||
import os
|
||||
import stat
|
||||
import time
|
||||
|
||||
from twisted.internet import reactor
|
||||
|
||||
from .metric import (
|
||||
CounterMetric, CallbackMetric, DistributionMetric, CacheMetric
|
||||
@@ -96,7 +100,6 @@ def render_all():
|
||||
# process resource usage
|
||||
|
||||
rusage = None
|
||||
PAGE_SIZE = getpagesize()
|
||||
|
||||
|
||||
def update_resource_metrics():
|
||||
@@ -109,8 +112,8 @@ resource_metrics = get_metrics_for("process.resource")
|
||||
resource_metrics.register_callback("utime", lambda: rusage.ru_utime * 1000)
|
||||
resource_metrics.register_callback("stime", lambda: rusage.ru_stime * 1000)
|
||||
|
||||
# pages
|
||||
resource_metrics.register_callback("maxrss", lambda: rusage.ru_maxrss * PAGE_SIZE)
|
||||
# kilobytes
|
||||
resource_metrics.register_callback("maxrss", lambda: rusage.ru_maxrss * 1024)
|
||||
|
||||
TYPES = {
|
||||
stat.S_IFSOCK: "SOCK",
|
||||
@@ -127,6 +130,10 @@ def _process_fds():
|
||||
counts = {(k,): 0 for k in TYPES.values()}
|
||||
counts[("other",)] = 0
|
||||
|
||||
# Not every OS will have a /proc/self/fd directory
|
||||
if not os.path.exists("/proc/self/fd"):
|
||||
return counts
|
||||
|
||||
for fd in os.listdir("/proc/self/fd"):
|
||||
try:
|
||||
s = os.stat("/proc/self/fd/%s" % (fd))
|
||||
@@ -144,3 +151,50 @@ def _process_fds():
|
||||
return counts
|
||||
|
||||
get_metrics_for("process").register_callback("fds", _process_fds, labels=["type"])
|
||||
|
||||
reactor_metrics = get_metrics_for("reactor")
|
||||
tick_time = reactor_metrics.register_distribution("tick_time")
|
||||
pending_calls_metric = reactor_metrics.register_distribution("pending_calls")
|
||||
|
||||
|
||||
def runUntilCurrentTimer(func):
|
||||
|
||||
@functools.wraps(func)
|
||||
def f(*args, **kwargs):
|
||||
now = reactor.seconds()
|
||||
num_pending = 0
|
||||
|
||||
# _newTimedCalls is one long list of *all* pending calls. Below loop
|
||||
# is based off of impl of reactor.runUntilCurrent
|
||||
for delayed_call in reactor._newTimedCalls:
|
||||
if delayed_call.time > now:
|
||||
break
|
||||
|
||||
if delayed_call.delayed_time > 0:
|
||||
continue
|
||||
|
||||
num_pending += 1
|
||||
|
||||
num_pending += len(reactor.threadCallQueue)
|
||||
|
||||
start = time.time() * 1000
|
||||
ret = func(*args, **kwargs)
|
||||
end = time.time() * 1000
|
||||
tick_time.inc_by(end - start)
|
||||
pending_calls_metric.inc_by(num_pending)
|
||||
return ret
|
||||
|
||||
return f
|
||||
|
||||
|
||||
try:
|
||||
# Ensure the reactor has all the attributes we expect
|
||||
reactor.runUntilCurrent
|
||||
reactor._newTimedCalls
|
||||
reactor.threadCallQueue
|
||||
|
||||
# runUntilCurrent is called when we have pending calls. It is called once
|
||||
# per iteratation after fd polling.
|
||||
reactor.runUntilCurrent = runUntilCurrentTimer(reactor.runUntilCurrent)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
@@ -328,10 +328,13 @@ class Notifier(object):
|
||||
defer.returnValue(result)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_events_for(self, user, rooms, pagination_config, timeout):
|
||||
def get_events_for(self, user, rooms, pagination_config, timeout,
|
||||
only_room_events=False):
|
||||
""" For the given user and rooms, return any new events for them. If
|
||||
there are no new events wait for up to `timeout` milliseconds for any
|
||||
new events to happen before returning.
|
||||
|
||||
If `only_room_events` is `True` only room events will be returned.
|
||||
"""
|
||||
from_token = pagination_config.from_token
|
||||
if not from_token:
|
||||
@@ -352,6 +355,8 @@ class Notifier(object):
|
||||
after_id = getattr(after_token, keyname)
|
||||
if before_id == after_id:
|
||||
continue
|
||||
if only_room_events and name != "room":
|
||||
continue
|
||||
new_events, new_key = yield source.get_new_events_for_user(
|
||||
user, getattr(from_token, keyname), limit,
|
||||
)
|
||||
|
||||
@@ -249,7 +249,9 @@ class Pusher(object):
|
||||
# we fail to dispatch the push)
|
||||
config = PaginationConfig(from_token=None, limit='1')
|
||||
chunk = yield self.evStreamHandler.get_stream(
|
||||
self.user_name, config, timeout=0)
|
||||
self.user_name, config, timeout=0, affect_presence=False,
|
||||
only_room_events=True
|
||||
)
|
||||
self.last_token = chunk['end']
|
||||
self.store.update_pusher_last_token(
|
||||
self.app_id, self.pushkey, self.user_name, self.last_token
|
||||
@@ -280,8 +282,8 @@ class Pusher(object):
|
||||
config = PaginationConfig(from_token=from_tok, limit='1')
|
||||
timeout = (300 + random.randint(-60, 60)) * 1000
|
||||
chunk = yield self.evStreamHandler.get_stream(
|
||||
self.user_name, config,
|
||||
timeout=timeout, affect_presence=False
|
||||
self.user_name, config, timeout=timeout, affect_presence=False,
|
||||
only_room_events=True
|
||||
)
|
||||
|
||||
# limiting to 1 may get 1 event plus 1 presence event, so
|
||||
@@ -294,6 +296,12 @@ class Pusher(object):
|
||||
if not single_event:
|
||||
self.last_token = chunk['end']
|
||||
logger.debug("Event stream timeout for pushkey %s", self.pushkey)
|
||||
yield self.store.update_pusher_last_token(
|
||||
self.app_id,
|
||||
self.pushkey,
|
||||
self.user_name,
|
||||
self.last_token
|
||||
)
|
||||
return
|
||||
|
||||
if not self.alive:
|
||||
@@ -345,7 +353,7 @@ class Pusher(object):
|
||||
if processed:
|
||||
self.backoff_delay = Pusher.INITIAL_BACKOFF
|
||||
self.last_token = chunk['end']
|
||||
self.store.update_pusher_last_token_and_success(
|
||||
yield self.store.update_pusher_last_token_and_success(
|
||||
self.app_id,
|
||||
self.pushkey,
|
||||
self.user_name,
|
||||
@@ -354,7 +362,7 @@ class Pusher(object):
|
||||
)
|
||||
if self.failing_since:
|
||||
self.failing_since = None
|
||||
self.store.update_pusher_failing_since(
|
||||
yield self.store.update_pusher_failing_since(
|
||||
self.app_id,
|
||||
self.pushkey,
|
||||
self.user_name,
|
||||
@@ -362,7 +370,7 @@ class Pusher(object):
|
||||
else:
|
||||
if not self.failing_since:
|
||||
self.failing_since = self.clock.time_msec()
|
||||
self.store.update_pusher_failing_since(
|
||||
yield self.store.update_pusher_failing_since(
|
||||
self.app_id,
|
||||
self.pushkey,
|
||||
self.user_name,
|
||||
@@ -380,7 +388,7 @@ class Pusher(object):
|
||||
self.user_name, self.pushkey)
|
||||
self.backoff_delay = Pusher.INITIAL_BACKOFF
|
||||
self.last_token = chunk['end']
|
||||
self.store.update_pusher_last_token(
|
||||
yield self.store.update_pusher_last_token(
|
||||
self.app_id,
|
||||
self.pushkey,
|
||||
self.user_name,
|
||||
@@ -388,7 +396,7 @@ class Pusher(object):
|
||||
)
|
||||
|
||||
self.failing_since = None
|
||||
self.store.update_pusher_failing_since(
|
||||
yield self.store.update_pusher_failing_since(
|
||||
self.app_id,
|
||||
self.pushkey,
|
||||
self.user_name,
|
||||
|
||||
@@ -94,17 +94,14 @@ class PusherPool:
|
||||
self.remove_pusher(p['app_id'], p['pushkey'], p['user_name'])
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def remove_pushers_by_user_access_token(self, user_id, not_access_token_id):
|
||||
def remove_pushers_by_user(self, user_id):
|
||||
all = yield self.store.get_all_pushers()
|
||||
logger.info(
|
||||
"Removing all pushers for user %s except access token %s",
|
||||
user_id, not_access_token_id
|
||||
"Removing all pushers for user %s",
|
||||
user_id,
|
||||
)
|
||||
for p in all:
|
||||
if (
|
||||
p['user_name'] == user_id and
|
||||
p['access_token'] != not_access_token_id
|
||||
):
|
||||
if p['user_name'] == user_id:
|
||||
logger.info(
|
||||
"Removing pusher for app id %s, pushkey %s, user %s",
|
||||
p['app_id'], p['pushkey'], p['user_name']
|
||||
|
||||
@@ -18,21 +18,24 @@ from distutils.version import LooseVersion
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
REQUIREMENTS = {
|
||||
"syutil>=0.0.7": ["syutil>=0.0.7"],
|
||||
"Twisted==14.0.2": ["twisted==14.0.2"],
|
||||
"frozendict>=0.4": ["frozendict"],
|
||||
"unpaddedbase64>=1.0.1": ["unpaddedbase64>=1.0.1"],
|
||||
"canonicaljson>=1.0.0": ["canonicaljson>=1.0.0"],
|
||||
"signedjson>=1.0.0": ["signedjson>=1.0.0"],
|
||||
"pynacl>=0.3.0": ["nacl>=0.3.0", "nacl.bindings"],
|
||||
"service_identity>=1.0.0": ["service_identity>=1.0.0"],
|
||||
"Twisted>=15.1.0": ["twisted>=15.1.0"],
|
||||
"pyopenssl>=0.14": ["OpenSSL>=0.14"],
|
||||
"pyyaml": ["yaml"],
|
||||
"pyasn1": ["pyasn1"],
|
||||
"pynacl>=0.0.3": ["nacl>=0.0.3"],
|
||||
"daemonize": ["daemonize"],
|
||||
"py-bcrypt": ["bcrypt"],
|
||||
"frozendict>=0.4": ["frozendict"],
|
||||
"pillow": ["PIL"],
|
||||
"pydenticon": ["pydenticon"],
|
||||
"ujson": ["ujson"],
|
||||
"blist": ["blist"],
|
||||
"pysaml2": ["saml2"],
|
||||
"pymacaroons-pynacl": ["pymacaroons"],
|
||||
}
|
||||
CONDITIONAL_REQUIREMENTS = {
|
||||
"web_client": {
|
||||
@@ -43,8 +46,8 @@ CONDITIONAL_REQUIREMENTS = {
|
||||
|
||||
def requirements(config=None, include_conditional=False):
|
||||
reqs = REQUIREMENTS.copy()
|
||||
for key, req in CONDITIONAL_REQUIREMENTS.items():
|
||||
if (config and getattr(config, key)) or include_conditional:
|
||||
if include_conditional:
|
||||
for _, req in CONDITIONAL_REQUIREMENTS.items():
|
||||
reqs.update(req)
|
||||
return reqs
|
||||
|
||||
@@ -52,22 +55,15 @@ def requirements(config=None, include_conditional=False):
|
||||
def github_link(project, version, egg):
|
||||
return "https://github.com/%s/tarball/%s/#egg=%s" % (project, version, egg)
|
||||
|
||||
DEPENDENCY_LINKS = [
|
||||
github_link(
|
||||
project="matrix-org/syutil",
|
||||
version="v0.0.7",
|
||||
egg="syutil-0.0.7",
|
||||
),
|
||||
github_link(
|
||||
project="matrix-org/matrix-angular-sdk",
|
||||
version="v0.6.6",
|
||||
egg="matrix_angular_sdk-0.6.6",
|
||||
),
|
||||
]
|
||||
DEPENDENCY_LINKS = {
|
||||
}
|
||||
|
||||
|
||||
class MissingRequirementError(Exception):
|
||||
pass
|
||||
def __init__(self, message, module_name, dependency):
|
||||
super(MissingRequirementError, self).__init__(message)
|
||||
self.module_name = module_name
|
||||
self.dependency = dependency
|
||||
|
||||
|
||||
def check_requirements(config=None):
|
||||
@@ -95,7 +91,7 @@ def check_requirements(config=None):
|
||||
)
|
||||
raise MissingRequirementError(
|
||||
"Can't import %r which is part of %r"
|
||||
% (module_name, dependency)
|
||||
% (module_name, dependency), module_name, dependency
|
||||
)
|
||||
version = getattr(module, "__version__", None)
|
||||
file_path = getattr(module, "__file__", None)
|
||||
@@ -108,30 +104,32 @@ def check_requirements(config=None):
|
||||
if version is None:
|
||||
raise MissingRequirementError(
|
||||
"Version of %r isn't set as __version__ of module %r"
|
||||
% (dependency, module_name)
|
||||
% (dependency, module_name), module_name, dependency
|
||||
)
|
||||
if LooseVersion(version) < LooseVersion(required_version):
|
||||
raise MissingRequirementError(
|
||||
"Version of %r in %r is too old. %r < %r"
|
||||
% (dependency, file_path, version, required_version)
|
||||
% (dependency, file_path, version, required_version),
|
||||
module_name, dependency
|
||||
)
|
||||
elif version_test == "==":
|
||||
if version is None:
|
||||
raise MissingRequirementError(
|
||||
"Version of %r isn't set as __version__ of module %r"
|
||||
% (dependency, module_name)
|
||||
% (dependency, module_name), module_name, dependency
|
||||
)
|
||||
if LooseVersion(version) != LooseVersion(required_version):
|
||||
raise MissingRequirementError(
|
||||
"Unexpected version of %r in %r. %r != %r"
|
||||
% (dependency, file_path, version, required_version)
|
||||
% (dependency, file_path, version, required_version),
|
||||
module_name, dependency
|
||||
)
|
||||
|
||||
|
||||
def list_requirements():
|
||||
result = []
|
||||
linked = []
|
||||
for link in DEPENDENCY_LINKS:
|
||||
for link in DEPENDENCY_LINKS.values():
|
||||
egg = link.split("#egg=")[1]
|
||||
linked.append(egg.split('-')[0])
|
||||
result.append(link)
|
||||
|
||||
@@ -31,7 +31,7 @@ class WhoisRestServlet(ClientV1RestServlet):
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request, user_id):
|
||||
target_user = UserID.from_string(user_id)
|
||||
auth_user, client = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
is_admin = yield self.auth.is_server_admin(auth_user)
|
||||
|
||||
if not is_admin and target_user != auth_user:
|
||||
|
||||
@@ -69,7 +69,7 @@ class ClientDirectoryServer(ClientV1RestServlet):
|
||||
|
||||
try:
|
||||
# try to auth as a user
|
||||
user, client = yield self.auth.get_user_by_req(request)
|
||||
user, _ = yield self.auth.get_user_by_req(request)
|
||||
try:
|
||||
user_id = user.to_string()
|
||||
yield dir_handler.create_association(
|
||||
@@ -116,7 +116,7 @@ class ClientDirectoryServer(ClientV1RestServlet):
|
||||
# fallback to default user behaviour if they aren't an AS
|
||||
pass
|
||||
|
||||
user, client = yield self.auth.get_user_by_req(request)
|
||||
user, _ = yield self.auth.get_user_by_req(request)
|
||||
|
||||
is_admin = yield self.auth.is_server_admin(user)
|
||||
if not is_admin:
|
||||
|
||||
@@ -34,7 +34,7 @@ class EventStreamRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request):
|
||||
auth_user, client = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
try:
|
||||
handler = self.handlers.event_stream_handler
|
||||
pagin_config = PaginationConfig.from_request(request)
|
||||
@@ -71,7 +71,7 @@ class EventRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request, event_id):
|
||||
auth_user, client = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
handler = self.handlers.event_handler
|
||||
event = yield handler.get_event(auth_user, event_id)
|
||||
|
||||
|
||||
@@ -25,15 +25,13 @@ class InitialSyncRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request):
|
||||
user, client = yield self.auth.get_user_by_req(request)
|
||||
with_feedback = "feedback" in request.args
|
||||
user, _ = yield self.auth.get_user_by_req(request)
|
||||
as_client_event = "raw" not in request.args
|
||||
pagination_config = PaginationConfig.from_request(request)
|
||||
handler = self.handlers.message_handler
|
||||
content = yield handler.snapshot_all_rooms(
|
||||
user_id=user.to_string(),
|
||||
pagin_config=pagination_config,
|
||||
feedback=with_feedback,
|
||||
as_client_event=as_client_event
|
||||
)
|
||||
|
||||
|
||||
@@ -74,18 +74,27 @@ class LoginRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def do_password_login(self, login_submission):
|
||||
if not login_submission["user"].startswith('@'):
|
||||
login_submission["user"] = UserID.create(
|
||||
login_submission["user"], self.hs.hostname).to_string()
|
||||
if 'medium' in login_submission and 'address' in login_submission:
|
||||
user_id = yield self.hs.get_datastore().get_user_id_by_threepid(
|
||||
login_submission['medium'], login_submission['address']
|
||||
)
|
||||
else:
|
||||
user_id = login_submission['user']
|
||||
|
||||
handler = self.handlers.login_handler
|
||||
token = yield handler.login(
|
||||
user=login_submission["user"],
|
||||
if not user_id.startswith('@'):
|
||||
user_id = UserID.create(
|
||||
user_id, self.hs.hostname
|
||||
).to_string()
|
||||
|
||||
auth_handler = self.handlers.auth_handler
|
||||
user_id, access_token, refresh_token = yield auth_handler.login_with_password(
|
||||
user_id=user_id,
|
||||
password=login_submission["password"])
|
||||
|
||||
result = {
|
||||
"user_id": login_submission["user"], # may have changed
|
||||
"access_token": token,
|
||||
"user_id": user_id, # may have changed
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
"home_server": self.hs.hostname,
|
||||
}
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ class PresenceStatusRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request, user_id):
|
||||
auth_user, client = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
user = UserID.from_string(user_id)
|
||||
|
||||
state = yield self.handlers.presence_handler.get_state(
|
||||
@@ -42,7 +42,7 @@ class PresenceStatusRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_PUT(self, request, user_id):
|
||||
auth_user, client = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
user = UserID.from_string(user_id)
|
||||
|
||||
state = {}
|
||||
@@ -77,7 +77,7 @@ class PresenceListRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request, user_id):
|
||||
auth_user, client = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
user = UserID.from_string(user_id)
|
||||
|
||||
if not self.hs.is_mine(user):
|
||||
@@ -97,7 +97,7 @@ class PresenceListRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_POST(self, request, user_id):
|
||||
auth_user, client = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
user = UserID.from_string(user_id)
|
||||
|
||||
if not self.hs.is_mine(user):
|
||||
|
||||
@@ -37,7 +37,7 @@ class ProfileDisplaynameRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_PUT(self, request, user_id):
|
||||
auth_user, client = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
user = UserID.from_string(user_id)
|
||||
|
||||
try:
|
||||
@@ -70,7 +70,7 @@ class ProfileAvatarURLRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_PUT(self, request, user_id):
|
||||
auth_user, client = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
user = UserID.from_string(user_id)
|
||||
|
||||
try:
|
||||
|
||||
@@ -27,7 +27,7 @@ class PusherRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_POST(self, request):
|
||||
user, client = yield self.auth.get_user_by_req(request)
|
||||
user, token_id = yield self.auth.get_user_by_req(request)
|
||||
|
||||
content = _parse_json(request)
|
||||
|
||||
@@ -65,7 +65,7 @@ class PusherRestServlet(ClientV1RestServlet):
|
||||
try:
|
||||
yield pusher_pool.add_pusher(
|
||||
user_name=user.to_string(),
|
||||
access_token=client.token_id,
|
||||
access_token=token_id,
|
||||
profile_tag=content['profile_tag'],
|
||||
kind=content['kind'],
|
||||
app_id=content['app_id'],
|
||||
|
||||
@@ -62,7 +62,7 @@ class RoomCreateRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_POST(self, request):
|
||||
auth_user, client = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
|
||||
room_config = self.get_room_config(request)
|
||||
info = yield self.make_room(room_config, auth_user, None)
|
||||
@@ -125,7 +125,7 @@ class RoomStateEventRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request, room_id, event_type, state_key):
|
||||
user, client = yield self.auth.get_user_by_req(request)
|
||||
user, _ = yield self.auth.get_user_by_req(request)
|
||||
|
||||
msg_handler = self.handlers.message_handler
|
||||
data = yield msg_handler.get_room_data(
|
||||
@@ -143,7 +143,7 @@ class RoomStateEventRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_PUT(self, request, room_id, event_type, state_key, txn_id=None):
|
||||
user, client = yield self.auth.get_user_by_req(request)
|
||||
user, token_id = yield self.auth.get_user_by_req(request)
|
||||
|
||||
content = _parse_json(request)
|
||||
|
||||
@@ -159,7 +159,7 @@ class RoomStateEventRestServlet(ClientV1RestServlet):
|
||||
|
||||
msg_handler = self.handlers.message_handler
|
||||
yield msg_handler.create_and_send_event(
|
||||
event_dict, client=client, txn_id=txn_id,
|
||||
event_dict, token_id=token_id, txn_id=txn_id,
|
||||
)
|
||||
|
||||
defer.returnValue((200, {}))
|
||||
@@ -175,7 +175,7 @@ class RoomSendEventRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_POST(self, request, room_id, event_type, txn_id=None):
|
||||
user, client = yield self.auth.get_user_by_req(request)
|
||||
user, token_id = yield self.auth.get_user_by_req(request)
|
||||
content = _parse_json(request)
|
||||
|
||||
msg_handler = self.handlers.message_handler
|
||||
@@ -186,7 +186,7 @@ class RoomSendEventRestServlet(ClientV1RestServlet):
|
||||
"room_id": room_id,
|
||||
"sender": user.to_string(),
|
||||
},
|
||||
client=client,
|
||||
token_id=token_id,
|
||||
txn_id=txn_id,
|
||||
)
|
||||
|
||||
@@ -220,7 +220,7 @@ class JoinRoomAliasServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_POST(self, request, room_identifier, txn_id=None):
|
||||
user, client = yield self.auth.get_user_by_req(request)
|
||||
user, token_id = yield self.auth.get_user_by_req(request)
|
||||
|
||||
# the identifier could be a room alias or a room id. Try one then the
|
||||
# other if it fails to parse, without swallowing other valid
|
||||
@@ -250,7 +250,7 @@ class JoinRoomAliasServlet(ClientV1RestServlet):
|
||||
"sender": user.to_string(),
|
||||
"state_key": user.to_string(),
|
||||
},
|
||||
client=client,
|
||||
token_id=token_id,
|
||||
txn_id=txn_id,
|
||||
)
|
||||
|
||||
@@ -289,13 +289,19 @@ class RoomMemberListRestServlet(ClientV1RestServlet):
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request, room_id):
|
||||
# TODO support Pagination stream API (limit/tokens)
|
||||
user, client = yield self.auth.get_user_by_req(request)
|
||||
handler = self.handlers.room_member_handler
|
||||
members = yield handler.get_room_members_as_pagination_chunk(
|
||||
user, _ = yield self.auth.get_user_by_req(request)
|
||||
handler = self.handlers.message_handler
|
||||
events = yield handler.get_state_events(
|
||||
room_id=room_id,
|
||||
user_id=user.to_string())
|
||||
user_id=user.to_string(),
|
||||
)
|
||||
|
||||
for event in members["chunk"]:
|
||||
chunk = []
|
||||
|
||||
for event in events:
|
||||
if event["type"] != EventTypes.Member:
|
||||
continue
|
||||
chunk.append(event)
|
||||
# FIXME: should probably be state_key here, not user_id
|
||||
target_user = UserID.from_string(event["user_id"])
|
||||
# Presence is an optional cache; don't fail if we can't fetch it
|
||||
@@ -308,7 +314,9 @@ class RoomMemberListRestServlet(ClientV1RestServlet):
|
||||
except:
|
||||
pass
|
||||
|
||||
defer.returnValue((200, members))
|
||||
defer.returnValue((200, {
|
||||
"chunk": chunk
|
||||
}))
|
||||
|
||||
|
||||
# TODO: Needs unit testing
|
||||
@@ -317,18 +325,16 @@ class RoomMessageListRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request, room_id):
|
||||
user, client = yield self.auth.get_user_by_req(request)
|
||||
user, _ = yield self.auth.get_user_by_req(request)
|
||||
pagination_config = PaginationConfig.from_request(
|
||||
request, default_limit=10,
|
||||
)
|
||||
with_feedback = "feedback" in request.args
|
||||
as_client_event = "raw" not in request.args
|
||||
handler = self.handlers.message_handler
|
||||
msgs = yield handler.get_messages(
|
||||
room_id=room_id,
|
||||
user_id=user.to_string(),
|
||||
pagin_config=pagination_config,
|
||||
feedback=with_feedback,
|
||||
as_client_event=as_client_event
|
||||
)
|
||||
|
||||
@@ -341,7 +347,7 @@ class RoomStateRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request, room_id):
|
||||
user, client = yield self.auth.get_user_by_req(request)
|
||||
user, _ = yield self.auth.get_user_by_req(request)
|
||||
handler = self.handlers.message_handler
|
||||
# Get all the current state for this room
|
||||
events = yield handler.get_state_events(
|
||||
@@ -357,7 +363,7 @@ class RoomInitialSyncRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request, room_id):
|
||||
user, client = yield self.auth.get_user_by_req(request)
|
||||
user, _ = yield self.auth.get_user_by_req(request)
|
||||
pagination_config = PaginationConfig.from_request(request)
|
||||
content = yield self.handlers.message_handler.room_initial_sync(
|
||||
room_id=room_id,
|
||||
@@ -402,7 +408,7 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_POST(self, request, room_id, membership_action, txn_id=None):
|
||||
user, client = yield self.auth.get_user_by_req(request)
|
||||
user, token_id = yield self.auth.get_user_by_req(request)
|
||||
|
||||
content = _parse_json(request)
|
||||
|
||||
@@ -427,7 +433,7 @@ class RoomMembershipRestServlet(ClientV1RestServlet):
|
||||
"sender": user.to_string(),
|
||||
"state_key": state_key,
|
||||
},
|
||||
client=client,
|
||||
token_id=token_id,
|
||||
txn_id=txn_id,
|
||||
)
|
||||
|
||||
@@ -457,7 +463,7 @@ class RoomRedactEventRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_POST(self, request, room_id, event_id, txn_id=None):
|
||||
user, client = yield self.auth.get_user_by_req(request)
|
||||
user, token_id = yield self.auth.get_user_by_req(request)
|
||||
content = _parse_json(request)
|
||||
|
||||
msg_handler = self.handlers.message_handler
|
||||
@@ -469,7 +475,7 @@ class RoomRedactEventRestServlet(ClientV1RestServlet):
|
||||
"sender": user.to_string(),
|
||||
"redacts": event_id,
|
||||
},
|
||||
client=client,
|
||||
token_id=token_id,
|
||||
txn_id=txn_id,
|
||||
)
|
||||
|
||||
@@ -497,7 +503,7 @@ class RoomTypingRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_PUT(self, request, room_id, user_id):
|
||||
auth_user, client = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
|
||||
room_id = urllib.unquote(room_id)
|
||||
target_user = UserID.from_string(urllib.unquote(user_id))
|
||||
|
||||
@@ -28,7 +28,7 @@ class VoipRestServlet(ClientV1RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request):
|
||||
auth_user, client = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
|
||||
turnUris = self.hs.config.turn_uris
|
||||
turnSecret = self.hs.config.turn_shared_secret
|
||||
@@ -40,7 +40,7 @@ class VoipRestServlet(ClientV1RestServlet):
|
||||
username = "%d:%s" % (expiry, auth_user.to_string())
|
||||
|
||||
mac = hmac.new(turnSecret, msg=username, digestmod=hashlib.sha1)
|
||||
# We need to use standard base64 encoding here, *not* syutil's
|
||||
# We need to use standard padded base64 encoding here
|
||||
# encode_base64 because we need to add the standard padding to get the
|
||||
# same result as the TURN server.
|
||||
password = base64.b64encode(mac.digest())
|
||||
|
||||
@@ -21,6 +21,7 @@ from . import (
|
||||
auth,
|
||||
receipts,
|
||||
keys,
|
||||
tokenrefresh,
|
||||
)
|
||||
|
||||
from synapse.http.server import JsonResource
|
||||
@@ -42,3 +43,4 @@ class ClientV2AlphaRestResource(JsonResource):
|
||||
auth.register_servlets(hs, client_resource)
|
||||
receipts.register_servlets(hs, client_resource)
|
||||
keys.register_servlets(hs, client_resource)
|
||||
tokenrefresh.register_servlets(hs, client_resource)
|
||||
|
||||
@@ -36,7 +36,6 @@ class PasswordRestServlet(RestServlet):
|
||||
self.hs = hs
|
||||
self.auth = hs.get_auth()
|
||||
self.auth_handler = hs.get_handlers().auth_handler
|
||||
self.login_handler = hs.get_handlers().login_handler
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_POST(self, request):
|
||||
@@ -47,7 +46,7 @@ class PasswordRestServlet(RestServlet):
|
||||
authed, result, params = yield self.auth_handler.check_auth([
|
||||
[LoginType.PASSWORD],
|
||||
[LoginType.EMAIL_IDENTITY]
|
||||
], body)
|
||||
], body, self.hs.get_ip_from_request(request))
|
||||
|
||||
if not authed:
|
||||
defer.returnValue((401, result))
|
||||
@@ -56,7 +55,7 @@ class PasswordRestServlet(RestServlet):
|
||||
|
||||
if LoginType.PASSWORD in result:
|
||||
# if using password, they should also be logged in
|
||||
auth_user, client = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
if auth_user.to_string() != result[LoginType.PASSWORD]:
|
||||
raise LoginError(400, "", Codes.UNKNOWN)
|
||||
user_id = auth_user.to_string()
|
||||
@@ -79,8 +78,8 @@ class PasswordRestServlet(RestServlet):
|
||||
raise SynapseError(400, "", Codes.MISSING_PARAM)
|
||||
new_password = params['new_password']
|
||||
|
||||
yield self.login_handler.set_password(
|
||||
user_id, new_password, None
|
||||
yield self.auth_handler.set_password(
|
||||
user_id, new_password
|
||||
)
|
||||
|
||||
defer.returnValue((200, {}))
|
||||
@@ -95,9 +94,9 @@ class ThreepidRestServlet(RestServlet):
|
||||
def __init__(self, hs):
|
||||
super(ThreepidRestServlet, self).__init__()
|
||||
self.hs = hs
|
||||
self.login_handler = hs.get_handlers().login_handler
|
||||
self.identity_handler = hs.get_handlers().identity_handler
|
||||
self.auth = hs.get_auth()
|
||||
self.auth_handler = hs.get_handlers().auth_handler
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request):
|
||||
@@ -121,7 +120,7 @@ class ThreepidRestServlet(RestServlet):
|
||||
raise SynapseError(400, "Missing param", Codes.MISSING_PARAM)
|
||||
threePidCreds = body['threePidCreds']
|
||||
|
||||
auth_user, client = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
|
||||
threepid = yield self.identity_handler.threepid_from_creds(threePidCreds)
|
||||
|
||||
@@ -135,7 +134,7 @@ class ThreepidRestServlet(RestServlet):
|
||||
logger.warn("Couldn't add 3pid: invalid response from ID sevrer")
|
||||
raise SynapseError(500, "Invalid response from ID Server")
|
||||
|
||||
yield self.login_handler.add_threepid(
|
||||
yield self.auth_handler.add_threepid(
|
||||
auth_user.to_string(),
|
||||
threepid['medium'],
|
||||
threepid['address'],
|
||||
|
||||
@@ -40,7 +40,7 @@ class GetFilterRestServlet(RestServlet):
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request, user_id, filter_id):
|
||||
target_user = UserID.from_string(user_id)
|
||||
auth_user, client = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
|
||||
if target_user != auth_user:
|
||||
raise AuthError(403, "Cannot get filters for other users")
|
||||
@@ -76,7 +76,7 @@ class CreateFilterRestServlet(RestServlet):
|
||||
@defer.inlineCallbacks
|
||||
def on_POST(self, request, user_id):
|
||||
target_user = UserID.from_string(user_id)
|
||||
auth_user, client = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
|
||||
if target_user != auth_user:
|
||||
raise AuthError(403, "Cannot create filters for other users")
|
||||
|
||||
@@ -17,7 +17,9 @@ from twisted.internet import defer
|
||||
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.http.servlet import RestServlet
|
||||
from syutil.jsonutil import encode_canonical_json
|
||||
from synapse.types import UserID
|
||||
|
||||
from canonicaljson import encode_canonical_json
|
||||
|
||||
from ._base import client_v2_pattern
|
||||
|
||||
@@ -62,7 +64,7 @@ class KeyUploadServlet(RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_POST(self, request, device_id):
|
||||
auth_user, client_info = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
user_id = auth_user.to_string()
|
||||
# TODO: Check that the device_id matches that in the authentication
|
||||
# or derive the device_id from the authentication instead.
|
||||
@@ -107,7 +109,7 @@ class KeyUploadServlet(RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request, device_id):
|
||||
auth_user, client_info = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
user_id = auth_user.to_string()
|
||||
|
||||
result = yield self.store.count_e2e_one_time_keys(user_id, device_id)
|
||||
@@ -164,45 +166,63 @@ class KeyQueryServlet(RestServlet):
|
||||
super(KeyQueryServlet, self).__init__()
|
||||
self.store = hs.get_datastore()
|
||||
self.auth = hs.get_auth()
|
||||
self.federation = hs.get_replication_layer()
|
||||
self.is_mine = hs.is_mine
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_POST(self, request, user_id, device_id):
|
||||
logger.debug("onPOST")
|
||||
yield self.auth.get_user_by_req(request)
|
||||
try:
|
||||
body = json.loads(request.content.read())
|
||||
except:
|
||||
raise SynapseError(400, "Invalid key JSON")
|
||||
query = []
|
||||
for user_id, device_ids in body.get("device_keys", {}).items():
|
||||
if not device_ids:
|
||||
query.append((user_id, None))
|
||||
else:
|
||||
for device_id in device_ids:
|
||||
query.append((user_id, device_id))
|
||||
results = yield self.store.get_e2e_device_keys(query)
|
||||
defer.returnValue(self.json_result(request, results))
|
||||
result = yield self.handle_request(body)
|
||||
defer.returnValue(result)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request, user_id, device_id):
|
||||
auth_user, client_info = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
auth_user_id = auth_user.to_string()
|
||||
if not user_id:
|
||||
user_id = auth_user_id
|
||||
if not device_id:
|
||||
device_id = None
|
||||
# Returns a map of user_id->device_id->json_bytes.
|
||||
results = yield self.store.get_e2e_device_keys([(user_id, device_id)])
|
||||
defer.returnValue(self.json_result(request, results))
|
||||
user_id = user_id if user_id else auth_user_id
|
||||
device_ids = [device_id] if device_id else []
|
||||
result = yield self.handle_request(
|
||||
{"device_keys": {user_id: device_ids}}
|
||||
)
|
||||
defer.returnValue(result)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def handle_request(self, body):
|
||||
local_query = []
|
||||
remote_queries = {}
|
||||
for user_id, device_ids in body.get("device_keys", {}).items():
|
||||
user = UserID.from_string(user_id)
|
||||
if self.is_mine(user):
|
||||
if not device_ids:
|
||||
local_query.append((user_id, None))
|
||||
else:
|
||||
for device_id in device_ids:
|
||||
local_query.append((user_id, device_id))
|
||||
else:
|
||||
remote_queries.setdefault(user.domain, {})[user_id] = list(
|
||||
device_ids
|
||||
)
|
||||
results = yield self.store.get_e2e_device_keys(local_query)
|
||||
|
||||
def json_result(self, request, results):
|
||||
json_result = {}
|
||||
for user_id, device_keys in results.items():
|
||||
for device_id, json_bytes in device_keys.items():
|
||||
json_result.setdefault(user_id, {})[device_id] = json.loads(
|
||||
json_bytes
|
||||
)
|
||||
return (200, {"device_keys": json_result})
|
||||
|
||||
for destination, device_keys in remote_queries.items():
|
||||
remote_result = yield self.federation.query_client_keys(
|
||||
destination, {"device_keys": device_keys}
|
||||
)
|
||||
for user_id, keys in remote_result["device_keys"].items():
|
||||
if user_id in device_keys:
|
||||
json_result[user_id] = keys
|
||||
defer.returnValue((200, {"device_keys": json_result}))
|
||||
|
||||
|
||||
class OneTimeKeyServlet(RestServlet):
|
||||
@@ -236,14 +256,16 @@ class OneTimeKeyServlet(RestServlet):
|
||||
self.store = hs.get_datastore()
|
||||
self.auth = hs.get_auth()
|
||||
self.clock = hs.get_clock()
|
||||
self.federation = hs.get_replication_layer()
|
||||
self.is_mine = hs.is_mine
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request, user_id, device_id, algorithm):
|
||||
yield self.auth.get_user_by_req(request)
|
||||
results = yield self.store.claim_e2e_one_time_keys(
|
||||
[(user_id, device_id, algorithm)]
|
||||
result = yield self.handle_request(
|
||||
{"one_time_keys": {user_id: {device_id: algorithm}}}
|
||||
)
|
||||
defer.returnValue(self.json_result(request, results))
|
||||
defer.returnValue(result)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_POST(self, request, user_id, device_id, algorithm):
|
||||
@@ -252,14 +274,24 @@ class OneTimeKeyServlet(RestServlet):
|
||||
body = json.loads(request.content.read())
|
||||
except:
|
||||
raise SynapseError(400, "Invalid key JSON")
|
||||
query = []
|
||||
for user_id, device_keys in body.get("one_time_keys", {}).items():
|
||||
for device_id, algorithm in device_keys.items():
|
||||
query.append((user_id, device_id, algorithm))
|
||||
results = yield self.store.claim_e2e_one_time_keys(query)
|
||||
defer.returnValue(self.json_result(request, results))
|
||||
result = yield self.handle_request(body)
|
||||
defer.returnValue(result)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def handle_request(self, body):
|
||||
local_query = []
|
||||
remote_queries = {}
|
||||
for user_id, device_keys in body.get("one_time_keys", {}).items():
|
||||
user = UserID.from_string(user_id)
|
||||
if self.is_mine(user):
|
||||
for device_id, algorithm in device_keys.items():
|
||||
local_query.append((user_id, device_id, algorithm))
|
||||
else:
|
||||
remote_queries.setdefault(user.domain, {})[user_id] = (
|
||||
device_keys
|
||||
)
|
||||
results = yield self.store.claim_e2e_one_time_keys(local_query)
|
||||
|
||||
def json_result(self, request, results):
|
||||
json_result = {}
|
||||
for user_id, device_keys in results.items():
|
||||
for device_id, keys in device_keys.items():
|
||||
@@ -267,7 +299,16 @@ class OneTimeKeyServlet(RestServlet):
|
||||
json_result.setdefault(user_id, {})[device_id] = {
|
||||
key_id: json.loads(json_bytes)
|
||||
}
|
||||
return (200, {"one_time_keys": json_result})
|
||||
|
||||
for destination, device_keys in remote_queries.items():
|
||||
remote_result = yield self.federation.claim_client_keys(
|
||||
destination, {"one_time_keys": device_keys}
|
||||
)
|
||||
for user_id, keys in remote_result["one_time_keys"].items():
|
||||
if user_id in device_keys:
|
||||
json_result[user_id] = keys
|
||||
|
||||
defer.returnValue((200, {"one_time_keys": json_result}))
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
|
||||
@@ -39,7 +39,7 @@ class ReceiptRestServlet(RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_POST(self, request, room_id, receipt_type, event_id):
|
||||
user, client = yield self.auth.get_user_by_req(request)
|
||||
user, _ = yield self.auth.get_user_by_req(request)
|
||||
|
||||
yield self.receipts_handler.received_client_receipt(
|
||||
room_id,
|
||||
|
||||
@@ -50,11 +50,15 @@ class RegisterRestServlet(RestServlet):
|
||||
self.auth_handler = hs.get_handlers().auth_handler
|
||||
self.registration_handler = hs.get_handlers().registration_handler
|
||||
self.identity_handler = hs.get_handlers().identity_handler
|
||||
self.login_handler = hs.get_handlers().login_handler
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_POST(self, request):
|
||||
yield run_on_reactor()
|
||||
|
||||
if '/register/email/requestToken' in request.path:
|
||||
ret = yield self.onEmailTokenRequest(request)
|
||||
defer.returnValue(ret)
|
||||
|
||||
body = parse_json_dict_from_request(request)
|
||||
|
||||
# we do basic sanity checks here because the auth layer will store these
|
||||
@@ -143,7 +147,7 @@ class RegisterRestServlet(RestServlet):
|
||||
if reqd not in threepid:
|
||||
logger.info("Can't add incomplete 3pid")
|
||||
else:
|
||||
yield self.login_handler.add_threepid(
|
||||
yield self.auth_handler.add_threepid(
|
||||
user_id,
|
||||
threepid['medium'],
|
||||
threepid['address'],
|
||||
@@ -209,6 +213,29 @@ class RegisterRestServlet(RestServlet):
|
||||
"home_server": self.hs.hostname,
|
||||
}
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def onEmailTokenRequest(self, request):
|
||||
body = parse_json_dict_from_request(request)
|
||||
|
||||
required = ['id_server', 'client_secret', 'email', 'send_attempt']
|
||||
absent = []
|
||||
for k in required:
|
||||
if k not in body:
|
||||
absent.append(k)
|
||||
|
||||
if len(absent) > 0:
|
||||
raise SynapseError(400, "Missing params: %r" % absent, Codes.MISSING_PARAM)
|
||||
|
||||
existingUid = yield self.hs.get_datastore().get_user_id_by_threepid(
|
||||
'email', body['email']
|
||||
)
|
||||
|
||||
if existingUid is not None:
|
||||
raise SynapseError(400, "Email is already in use", Codes.THREEPID_IN_USE)
|
||||
|
||||
ret = yield self.identity_handler.requestEmailToken(**body)
|
||||
defer.returnValue((200, ret))
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
RegisterRestServlet(hs).register(http_server)
|
||||
|
||||
@@ -87,7 +87,7 @@ class SyncRestServlet(RestServlet):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, request):
|
||||
user, client = yield self.auth.get_user_by_req(request)
|
||||
user, token_id = yield self.auth.get_user_by_req(request)
|
||||
|
||||
timeout = parse_integer(request, "timeout", default=0)
|
||||
limit = parse_integer(request, "limit", required=True)
|
||||
@@ -125,7 +125,6 @@ class SyncRestServlet(RestServlet):
|
||||
|
||||
sync_config = SyncConfig(
|
||||
user=user,
|
||||
client_info=client,
|
||||
gap=gap,
|
||||
limit=limit,
|
||||
sort=sort,
|
||||
@@ -152,7 +151,7 @@ class SyncRestServlet(RestServlet):
|
||||
sync_result.private_user_data, filter, time_now
|
||||
),
|
||||
"rooms": self.encode_rooms(
|
||||
sync_result.rooms, filter, time_now, client.token_id
|
||||
sync_result.rooms, filter, time_now, token_id
|
||||
),
|
||||
"next_batch": sync_result.next_batch.to_string(),
|
||||
}
|
||||
|
||||
56
synapse/rest/client/v2_alpha/tokenrefresh.py
Normal file
56
synapse/rest/client/v2_alpha/tokenrefresh.py
Normal file
@@ -0,0 +1,56 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.api.errors import AuthError, StoreError, SynapseError
|
||||
from synapse.http.servlet import RestServlet
|
||||
|
||||
from ._base import client_v2_pattern, parse_json_dict_from_request
|
||||
|
||||
|
||||
class TokenRefreshRestServlet(RestServlet):
|
||||
"""
|
||||
Exchanges refresh tokens for a pair of an access token and a new refresh
|
||||
token.
|
||||
"""
|
||||
PATTERN = client_v2_pattern("/tokenrefresh")
|
||||
|
||||
def __init__(self, hs):
|
||||
super(TokenRefreshRestServlet, self).__init__()
|
||||
self.hs = hs
|
||||
self.store = hs.get_datastore()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_POST(self, request):
|
||||
body = parse_json_dict_from_request(request)
|
||||
try:
|
||||
old_refresh_token = body["refresh_token"]
|
||||
auth_handler = self.hs.get_handlers().auth_handler
|
||||
(user_id, new_refresh_token) = yield self.store.exchange_refresh_token(
|
||||
old_refresh_token, auth_handler.generate_refresh_token)
|
||||
new_access_token = yield auth_handler.issue_access_token(user_id)
|
||||
defer.returnValue((200, {
|
||||
"access_token": new_access_token,
|
||||
"refresh_token": new_refresh_token,
|
||||
}))
|
||||
except KeyError:
|
||||
raise SynapseError(400, "Missing required key 'refresh_token'.")
|
||||
except StoreError:
|
||||
raise AuthError(403, "Did not recognize refresh token")
|
||||
|
||||
|
||||
def register_servlets(hs, http_server):
|
||||
TokenRefreshRestServlet(hs).register(http_server)
|
||||
@@ -16,9 +16,9 @@
|
||||
|
||||
from twisted.web.resource import Resource
|
||||
from synapse.http.server import respond_with_json_bytes
|
||||
from syutil.crypto.jsonsign import sign_json
|
||||
from syutil.base64util import encode_base64
|
||||
from syutil.jsonutil import encode_canonical_json
|
||||
from signedjson.sign import sign_json
|
||||
from unpaddedbase64 import encode_base64
|
||||
from canonicaljson import encode_canonical_json
|
||||
from OpenSSL import crypto
|
||||
import logging
|
||||
|
||||
|
||||
@@ -16,9 +16,9 @@
|
||||
|
||||
from twisted.web.resource import Resource
|
||||
from synapse.http.server import respond_with_json_bytes
|
||||
from syutil.crypto.jsonsign import sign_json
|
||||
from syutil.base64util import encode_base64
|
||||
from syutil.jsonutil import encode_canonical_json
|
||||
from signedjson.sign import sign_json
|
||||
from unpaddedbase64 import encode_base64
|
||||
from canonicaljson import encode_canonical_json
|
||||
from hashlib import sha256
|
||||
from OpenSSL import crypto
|
||||
import logging
|
||||
|
||||
@@ -66,7 +66,7 @@ class ContentRepoResource(resource.Resource):
|
||||
@defer.inlineCallbacks
|
||||
def map_request_to_name(self, request):
|
||||
# auth the user
|
||||
auth_user, client = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
|
||||
# namespace all file uploads on the user
|
||||
prefix = base64.urlsafe_b64encode(
|
||||
|
||||
@@ -33,6 +33,8 @@ import os
|
||||
|
||||
import cgi
|
||||
import logging
|
||||
import urllib
|
||||
import urlparse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -42,10 +44,13 @@ def parse_media_id(request):
|
||||
# This allows users to append e.g. /test.png to the URL. Useful for
|
||||
# clients that parse the URL to see content type.
|
||||
server_name, media_id = request.postpath[:2]
|
||||
if len(request.postpath) > 2 and is_ascii(request.postpath[-1]):
|
||||
return server_name, media_id, request.postpath[-1]
|
||||
else:
|
||||
return server_name, media_id, None
|
||||
file_name = None
|
||||
if len(request.postpath) > 2:
|
||||
try:
|
||||
file_name = urlparse.unquote(request.postpath[-1]).decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
return server_name, media_id, file_name
|
||||
except:
|
||||
raise SynapseError(
|
||||
404,
|
||||
@@ -69,6 +74,8 @@ class BaseMediaResource(Resource):
|
||||
self.filepaths = filepaths
|
||||
self.version_string = hs.version_string
|
||||
self.downloads = {}
|
||||
self.dynamic_thumbnails = hs.config.dynamic_thumbnails
|
||||
self.thumbnail_requirements = hs.config.thumbnail_requirements
|
||||
|
||||
def _respond_404(self, request):
|
||||
respond_with_json(
|
||||
@@ -138,9 +145,26 @@ class BaseMediaResource(Resource):
|
||||
content_disposition = headers.get("Content-Disposition", None)
|
||||
if content_disposition:
|
||||
_, params = cgi.parse_header(content_disposition[0],)
|
||||
upload_name = params.get("filename", None)
|
||||
if upload_name and not is_ascii(upload_name):
|
||||
upload_name = None
|
||||
upload_name = None
|
||||
|
||||
# First check if there is a valid UTF-8 filename
|
||||
upload_name_utf8 = params.get("filename*", None)
|
||||
if upload_name_utf8:
|
||||
if upload_name_utf8.lower().startswith("utf-8''"):
|
||||
upload_name = upload_name_utf8[7:]
|
||||
|
||||
# If there isn't check for an ascii name.
|
||||
if not upload_name:
|
||||
upload_name_ascii = params.get("filename", None)
|
||||
if upload_name_ascii and is_ascii(upload_name_ascii):
|
||||
upload_name = upload_name_ascii
|
||||
|
||||
if upload_name:
|
||||
upload_name = urlparse.unquote(upload_name)
|
||||
try:
|
||||
upload_name = upload_name.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
upload_name = None
|
||||
else:
|
||||
upload_name = None
|
||||
|
||||
@@ -179,10 +203,20 @@ class BaseMediaResource(Resource):
|
||||
if os.path.isfile(file_path):
|
||||
request.setHeader(b"Content-Type", media_type.encode("UTF-8"))
|
||||
if upload_name:
|
||||
request.setHeader(
|
||||
b"Content-Disposition",
|
||||
b"inline; filename=%s" % (upload_name.encode("utf-8"),),
|
||||
)
|
||||
if is_ascii(upload_name):
|
||||
request.setHeader(
|
||||
b"Content-Disposition",
|
||||
b"inline; filename=%s" % (
|
||||
urllib.quote(upload_name.encode("utf-8")),
|
||||
),
|
||||
)
|
||||
else:
|
||||
request.setHeader(
|
||||
b"Content-Disposition",
|
||||
b"inline; filename*=utf-8''%s" % (
|
||||
urllib.quote(upload_name.encode("utf-8")),
|
||||
),
|
||||
)
|
||||
|
||||
# cache for at least a day.
|
||||
# XXX: we might want to turn this off for data we don't want to
|
||||
@@ -208,22 +242,74 @@ class BaseMediaResource(Resource):
|
||||
self._respond_404(request)
|
||||
|
||||
def _get_thumbnail_requirements(self, media_type):
|
||||
if media_type == "image/jpeg":
|
||||
return (
|
||||
(32, 32, "crop", "image/jpeg"),
|
||||
(96, 96, "crop", "image/jpeg"),
|
||||
(320, 240, "scale", "image/jpeg"),
|
||||
(640, 480, "scale", "image/jpeg"),
|
||||
)
|
||||
elif (media_type == "image/png") or (media_type == "image/gif"):
|
||||
return (
|
||||
(32, 32, "crop", "image/png"),
|
||||
(96, 96, "crop", "image/png"),
|
||||
(320, 240, "scale", "image/png"),
|
||||
(640, 480, "scale", "image/png"),
|
||||
return self.thumbnail_requirements.get(media_type, ())
|
||||
|
||||
def _generate_thumbnail(self, input_path, t_path, t_width, t_height,
|
||||
t_method, t_type):
|
||||
thumbnailer = Thumbnailer(input_path)
|
||||
m_width = thumbnailer.width
|
||||
m_height = thumbnailer.height
|
||||
|
||||
if m_width * m_height >= self.max_image_pixels:
|
||||
logger.info(
|
||||
"Image too large to thumbnail %r x %r > %r",
|
||||
m_width, m_height, self.max_image_pixels
|
||||
)
|
||||
return
|
||||
|
||||
if t_method == "crop":
|
||||
t_len = thumbnailer.crop(t_path, t_width, t_height, t_type)
|
||||
elif t_method == "scale":
|
||||
t_len = thumbnailer.scale(t_path, t_width, t_height, t_type)
|
||||
else:
|
||||
return ()
|
||||
t_len = None
|
||||
|
||||
return t_len
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _generate_local_exact_thumbnail(self, media_id, t_width, t_height,
|
||||
t_method, t_type):
|
||||
input_path = self.filepaths.local_media_filepath(media_id)
|
||||
|
||||
t_path = self.filepaths.local_media_thumbnail(
|
||||
media_id, t_width, t_height, t_type, t_method
|
||||
)
|
||||
self._makedirs(t_path)
|
||||
|
||||
t_len = yield threads.deferToThread(
|
||||
self._generate_thumbnail,
|
||||
input_path, t_path, t_width, t_height, t_method, t_type
|
||||
)
|
||||
|
||||
if t_len:
|
||||
yield self.store.store_local_thumbnail(
|
||||
media_id, t_width, t_height, t_type, t_method, t_len
|
||||
)
|
||||
|
||||
defer.returnValue(t_path)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _generate_remote_exact_thumbnail(self, server_name, file_id, media_id,
|
||||
t_width, t_height, t_method, t_type):
|
||||
input_path = self.filepaths.remote_media_filepath(server_name, file_id)
|
||||
|
||||
t_path = self.filepaths.remote_media_thumbnail(
|
||||
server_name, file_id, t_width, t_height, t_type, t_method
|
||||
)
|
||||
self._makedirs(t_path)
|
||||
|
||||
t_len = yield threads.deferToThread(
|
||||
self._generate_thumbnail,
|
||||
input_path, t_path, t_width, t_height, t_method, t_type
|
||||
)
|
||||
|
||||
if t_len:
|
||||
yield self.store.store_remote_media_thumbnail(
|
||||
server_name, media_id, file_id,
|
||||
t_width, t_height, t_type, t_method, t_len
|
||||
)
|
||||
|
||||
defer.returnValue(t_path)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _generate_local_thumbnails(self, media_id, media_info):
|
||||
|
||||
@@ -43,14 +43,25 @@ class ThumbnailResource(BaseMediaResource):
|
||||
m_type = parse_string(request, "type", "image/png")
|
||||
|
||||
if server_name == self.server_name:
|
||||
yield self._respond_local_thumbnail(
|
||||
request, media_id, width, height, method, m_type
|
||||
)
|
||||
if self.dynamic_thumbnails:
|
||||
yield self._select_or_generate_local_thumbnail(
|
||||
request, media_id, width, height, method, m_type
|
||||
)
|
||||
else:
|
||||
yield self._respond_local_thumbnail(
|
||||
request, media_id, width, height, method, m_type
|
||||
)
|
||||
else:
|
||||
yield self._respond_remote_thumbnail(
|
||||
request, server_name, media_id,
|
||||
width, height, method, m_type
|
||||
)
|
||||
if self.dynamic_thumbnails:
|
||||
yield self._select_or_generate_remote_thumbnail(
|
||||
request, server_name, media_id,
|
||||
width, height, method, m_type
|
||||
)
|
||||
else:
|
||||
yield self._respond_remote_thumbnail(
|
||||
request, server_name, media_id,
|
||||
width, height, method, m_type
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _respond_local_thumbnail(self, request, media_id, width, height,
|
||||
@@ -82,6 +93,87 @@ class ThumbnailResource(BaseMediaResource):
|
||||
request, media_info, width, height, method, m_type,
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _select_or_generate_local_thumbnail(self, request, media_id, desired_width,
|
||||
desired_height, desired_method,
|
||||
desired_type):
|
||||
media_info = yield self.store.get_local_media(media_id)
|
||||
|
||||
if not media_info:
|
||||
self._respond_404(request)
|
||||
return
|
||||
|
||||
thumbnail_infos = yield self.store.get_local_media_thumbnails(media_id)
|
||||
for info in thumbnail_infos:
|
||||
t_w = info["thumbnail_width"] == desired_width
|
||||
t_h = info["thumbnail_height"] == desired_height
|
||||
t_method = info["thumbnail_method"] == desired_method
|
||||
t_type = info["thumbnail_type"] == desired_type
|
||||
|
||||
if t_w and t_h and t_method and t_type:
|
||||
file_path = self.filepaths.local_media_thumbnail(
|
||||
media_id, desired_width, desired_height, desired_type, desired_method,
|
||||
)
|
||||
yield self._respond_with_file(request, desired_type, file_path)
|
||||
return
|
||||
|
||||
logger.debug("We don't have a local thumbnail of that size. Generating")
|
||||
|
||||
# Okay, so we generate one.
|
||||
file_path = yield self._generate_local_exact_thumbnail(
|
||||
media_id, desired_width, desired_height, desired_method, desired_type
|
||||
)
|
||||
|
||||
if file_path:
|
||||
yield self._respond_with_file(request, desired_type, file_path)
|
||||
else:
|
||||
yield self._respond_default_thumbnail(
|
||||
request, media_info, desired_width, desired_height,
|
||||
desired_method, desired_type,
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _select_or_generate_remote_thumbnail(self, request, server_name, media_id,
|
||||
desired_width, desired_height,
|
||||
desired_method, desired_type):
|
||||
media_info = yield self._get_remote_media(server_name, media_id)
|
||||
|
||||
thumbnail_infos = yield self.store.get_remote_media_thumbnails(
|
||||
server_name, media_id,
|
||||
)
|
||||
|
||||
file_id = media_info["filesystem_id"]
|
||||
|
||||
for info in thumbnail_infos:
|
||||
t_w = info["thumbnail_width"] == desired_width
|
||||
t_h = info["thumbnail_height"] == desired_height
|
||||
t_method = info["thumbnail_method"] == desired_method
|
||||
t_type = info["thumbnail_type"] == desired_type
|
||||
|
||||
if t_w and t_h and t_method and t_type:
|
||||
file_path = self.filepaths.remote_media_thumbnail(
|
||||
server_name, file_id, desired_width, desired_height,
|
||||
desired_type, desired_method,
|
||||
)
|
||||
yield self._respond_with_file(request, desired_type, file_path)
|
||||
return
|
||||
|
||||
logger.debug("We don't have a local thumbnail of that size. Generating")
|
||||
|
||||
# Okay, so we generate one.
|
||||
file_path = yield self._generate_remote_exact_thumbnail(
|
||||
server_name, file_id, media_id, desired_width,
|
||||
desired_height, desired_method, desired_type
|
||||
)
|
||||
|
||||
if file_path:
|
||||
yield self._respond_with_file(request, desired_type, file_path)
|
||||
else:
|
||||
yield self._respond_default_thumbnail(
|
||||
request, media_info, desired_width, desired_height,
|
||||
desired_method, desired_type,
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _respond_remote_thumbnail(self, request, server_name, media_id, width,
|
||||
height, method, m_type):
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
|
||||
from synapse.http.server import respond_with_json, request_handler
|
||||
|
||||
from synapse.util.stringutils import random_string, is_ascii
|
||||
from synapse.util.stringutils import random_string
|
||||
from synapse.api.errors import SynapseError
|
||||
|
||||
from twisted.web.server import NOT_DONE_YET
|
||||
@@ -70,7 +70,7 @@ class UploadResource(BaseMediaResource):
|
||||
@request_handler
|
||||
@defer.inlineCallbacks
|
||||
def _async_render_POST(self, request):
|
||||
auth_user, client = yield self.auth.get_user_by_req(request)
|
||||
auth_user, _ = yield self.auth.get_user_by_req(request)
|
||||
# TODO: The checks here are a bit late. The content will have
|
||||
# already been uploaded to a tmp file at this point
|
||||
content_length = request.getHeader("Content-Length")
|
||||
@@ -86,9 +86,13 @@ class UploadResource(BaseMediaResource):
|
||||
|
||||
upload_name = request.args.get("filename", None)
|
||||
if upload_name:
|
||||
upload_name = upload_name[0]
|
||||
if upload_name and not is_ascii(upload_name):
|
||||
raise SynapseError(400, "filename must be ascii")
|
||||
try:
|
||||
upload_name = upload_name[0].decode('UTF-8')
|
||||
except UnicodeDecodeError:
|
||||
raise SynapseError(
|
||||
msg="Invalid UTF-8 filename parameter: %r" % (upload_name),
|
||||
code=400,
|
||||
)
|
||||
|
||||
headers = request.requestHeaders
|
||||
|
||||
|
||||
@@ -19,7 +19,9 @@
|
||||
# partial one for unit test mocking.
|
||||
|
||||
# Imports required for the default HomeServer() implementation
|
||||
from twisted.web.client import BrowserLikePolicyForHTTPS
|
||||
from synapse.federation import initialize_http_replication
|
||||
from synapse.http.client import SimpleHttpClient, InsecureInterceptableContextFactory
|
||||
from synapse.notifier import Notifier
|
||||
from synapse.api.auth import Auth
|
||||
from synapse.handlers import Handlers
|
||||
@@ -87,6 +89,8 @@ class BaseHomeServer(object):
|
||||
'pusherpool',
|
||||
'event_builder_factory',
|
||||
'filtering',
|
||||
'http_client_context_factory',
|
||||
'simple_http_client',
|
||||
]
|
||||
|
||||
def __init__(self, hostname, **kwargs):
|
||||
@@ -174,6 +178,17 @@ class HomeServer(BaseHomeServer):
|
||||
def build_auth(self):
|
||||
return Auth(self)
|
||||
|
||||
def build_http_client_context_factory(self):
|
||||
config = self.get_config()
|
||||
return (
|
||||
InsecureInterceptableContextFactory()
|
||||
if config.use_insecure_ssl_client_just_for_testing_do_not_use
|
||||
else BrowserLikePolicyForHTTPS()
|
||||
)
|
||||
|
||||
def build_simple_http_client(self):
|
||||
return SimpleHttpClient(self)
|
||||
|
||||
def build_v1auth(self):
|
||||
orf = Auth(self)
|
||||
# Matrix spec makes no reference to what HTTP status code is returned,
|
||||
|
||||
@@ -17,8 +17,7 @@
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.util.logutils import log_function
|
||||
from synapse.util.async import run_on_reactor
|
||||
from synapse.util.expiringcache import ExpiringCache
|
||||
from synapse.util.caches.expiringcache import ExpiringCache
|
||||
from synapse.api.constants import EventTypes
|
||||
from synapse.api.errors import AuthError
|
||||
from synapse.api.auth import AuthEventTypes
|
||||
@@ -32,10 +31,6 @@ import hashlib
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_state_key_from_event(event):
|
||||
return event.state_key
|
||||
|
||||
|
||||
KeyStateTuple = namedtuple("KeyStateTuple", ("context", "type", "state_key"))
|
||||
|
||||
|
||||
@@ -96,7 +91,7 @@ class StateHandler(object):
|
||||
cache.ts = self.clock.time_msec()
|
||||
state = cache.state
|
||||
else:
|
||||
res = yield self.resolve_state_groups(event_ids)
|
||||
res = yield self.resolve_state_groups(room_id, event_ids)
|
||||
state = res[1]
|
||||
|
||||
if event_type:
|
||||
@@ -119,8 +114,6 @@ class StateHandler(object):
|
||||
Returns:
|
||||
an EventContext
|
||||
"""
|
||||
yield run_on_reactor()
|
||||
|
||||
context = EventContext()
|
||||
|
||||
if outlier:
|
||||
@@ -155,13 +148,13 @@ class StateHandler(object):
|
||||
|
||||
if event.is_state():
|
||||
ret = yield self.resolve_state_groups(
|
||||
[e for e, _ in event.prev_events],
|
||||
event.room_id, [e for e, _ in event.prev_events],
|
||||
event_type=event.type,
|
||||
state_key=event.state_key,
|
||||
)
|
||||
else:
|
||||
ret = yield self.resolve_state_groups(
|
||||
[e for e, _ in event.prev_events],
|
||||
event.room_id, [e for e, _ in event.prev_events],
|
||||
)
|
||||
|
||||
group, curr_state, prev_state = ret
|
||||
@@ -180,7 +173,7 @@ class StateHandler(object):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def resolve_state_groups(self, event_ids, event_type=None, state_key=""):
|
||||
def resolve_state_groups(self, room_id, event_ids, event_type=None, state_key=""):
|
||||
""" Given a list of event_ids this method fetches the state at each
|
||||
event, resolves conflicts between them and returns them.
|
||||
|
||||
@@ -205,7 +198,7 @@ class StateHandler(object):
|
||||
)
|
||||
|
||||
state_groups = yield self.store.get_state_groups(
|
||||
event_ids
|
||||
room_id, event_ids
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
|
||||
@@ -54,7 +54,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
# Remember to update this number every time a change is made to database
|
||||
# schema files, so the users will be informed on server restarts.
|
||||
SCHEMA_VERSION = 21
|
||||
SCHEMA_VERSION = 24
|
||||
|
||||
dir_path = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
@@ -94,12 +94,12 @@ class DataStore(RoomMemberStore, RoomStore,
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def insert_client_ip(self, user, access_token, device_id, ip, user_agent):
|
||||
def insert_client_ip(self, user, access_token, ip, user_agent):
|
||||
now = int(self._clock.time_msec())
|
||||
key = (user.to_string(), access_token, device_id, ip)
|
||||
key = (user.to_string(), access_token, ip)
|
||||
|
||||
try:
|
||||
last_seen = self.client_ip_last_seen.get(*key)
|
||||
last_seen = self.client_ip_last_seen.get(key)
|
||||
except KeyError:
|
||||
last_seen = None
|
||||
|
||||
@@ -107,7 +107,7 @@ class DataStore(RoomMemberStore, RoomStore,
|
||||
if last_seen is not None and (now - last_seen) < LAST_SEEN_GRANULARITY:
|
||||
defer.returnValue(None)
|
||||
|
||||
self.client_ip_last_seen.prefill(*key + (now,))
|
||||
self.client_ip_last_seen.prefill(key, now)
|
||||
|
||||
# It's safe not to lock here: a) no unique constraint,
|
||||
# b) LAST_SEEN_GRANULARITY makes concurrent updates incredibly unlikely
|
||||
@@ -120,19 +120,39 @@ class DataStore(RoomMemberStore, RoomStore,
|
||||
"user_agent": user_agent,
|
||||
},
|
||||
values={
|
||||
"device_id": device_id,
|
||||
"last_seen": now,
|
||||
},
|
||||
desc="insert_client_ip",
|
||||
lock=False,
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def count_daily_users(self):
|
||||
"""
|
||||
Counts the number of users who used this homeserver in the last 24 hours.
|
||||
"""
|
||||
def _count_users(txn):
|
||||
txn.execute(
|
||||
"SELECT COUNT(DISTINCT user_id) AS users"
|
||||
" FROM user_ips"
|
||||
" WHERE last_seen > ?",
|
||||
# This is close enough to a day for our purposes.
|
||||
(int(self._clock.time_msec()) - (1000 * 60 * 60 * 24),)
|
||||
)
|
||||
rows = self.cursor_to_dict(txn)
|
||||
if rows:
|
||||
return rows[0]["users"]
|
||||
return 0
|
||||
|
||||
ret = yield self.runInteraction("count_users", _count_users)
|
||||
defer.returnValue(ret)
|
||||
|
||||
def get_user_ip_and_agents(self, user):
|
||||
return self._simple_select_list(
|
||||
table="user_ips",
|
||||
keyvalues={"user_id": user.to_string()},
|
||||
retcols=[
|
||||
"device_id", "access_token", "ip", "user_agent", "last_seen"
|
||||
"access_token", "ip", "user_agent", "last_seen"
|
||||
],
|
||||
desc="get_user_ip_and_agents",
|
||||
)
|
||||
@@ -354,6 +374,11 @@ def _upgrade_existing_database(cur, current_version, applied_delta_files,
|
||||
)
|
||||
logger.debug("Running script %s", relative_path)
|
||||
module.run_upgrade(cur, database_engine)
|
||||
elif ext == ".pyc":
|
||||
# Sometimes .pyc files turn up anyway even though we've
|
||||
# disabled their generation; e.g. from distribution package
|
||||
# installers. Silently skip it
|
||||
pass
|
||||
elif ext == ".sql":
|
||||
# A plain old .sql file, just read and execute it
|
||||
logger.debug("Applying schema %s", relative_path)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user