1
0

Compare commits

...

167 Commits

Author SHA1 Message Date
Erik Johnston
894a89d99b Update CHANGES.rst with missing changes 2015-02-19 13:46:39 +00:00
Erik Johnston
8321e8a2e0 Merge branch 'release-v0.7.1' of github.com:matrix-org/synapse 2015-02-19 10:38:48 +00:00
Erik Johnston
63c1f4fa98 Update release date 2015-02-19 10:33:31 +00:00
David Baker
b457f1677c Send room ID in http notifications so clients know which room to go to if the user responds to the notification. 2015-02-19 10:06:17 +00:00
Erik Johnston
faf4f67847 Update CHANGES 2015-02-18 18:02:54 +00:00
Erik Johnston
7025781df8 Merge branch 'develop' of github.com:matrix-org/synapse into release-v0.7.1 2015-02-18 17:37:43 +00:00
Erik Johnston
142f1263f6 Merge pull request #82 from matrix-org/git_tag_version
Git tag version
2015-02-18 17:37:19 +00:00
Erik Johnston
6311ae8968 Conform to header spec take two 2015-02-18 17:34:26 +00:00
Erik Johnston
3f1871021e Make /keys/ return correct Server version 2015-02-18 17:32:12 +00:00
Erik Johnston
b6771037a6 Make version_string conform to User-Agent and Server spec 2015-02-18 17:31:50 +00:00
Erik Johnston
5b753d472b Bump matrix-angular-sdk version 2015-02-18 17:02:40 +00:00
Erik Johnston
1df8bad63e pyflakes 2015-02-18 16:54:25 +00:00
Erik Johnston
5358966a87 Use git aware version string in User-Agent and Server headers 2015-02-18 16:52:04 +00:00
Erik Johnston
aa577df064 When computing git version run git commands in same dir as source files 2015-02-18 16:52:04 +00:00
Erik Johnston
d122e215ff Generate a version string that includes git details if run from git checkout 2015-02-18 16:52:04 +00:00
Erik Johnston
a7925259a1 Merge branch 'develop' of github.com:matrix-org/synapse into release-v0.7.1 2015-02-18 13:57:55 +00:00
Erik Johnston
7d304ae11c Merge pull request #80 from matrix-org/restrict-destinations
Restrict the destinations that synapse can talk to
2015-02-18 13:56:48 +00:00
Erik Johnston
d4952e6849 Merge pull request #81 from matrix-org/bugs/SYN-282
SYN-282: Don't log tracebacks for client errors
2015-02-18 13:37:06 +00:00
Erik Johnston
446ef58992 Add errback to all deferreds in transaction_queue 2015-02-18 12:03:26 +00:00
Erik Johnston
cc3d3babb0 Remove unused import 2015-02-18 12:01:41 +00:00
Mark Haines
6375bd3e33 SYN-282: Don't log tracebacks for client errors 2015-02-18 12:01:37 +00:00
Mark Haines
2462aacd77 Restrict the destinations that synapse can talk to 2015-02-18 11:52:51 +00:00
Erik Johnston
b68e4a729f Discard destination 'localhost' 2015-02-18 11:32:39 +00:00
Erik Johnston
47d3ff4cf8 Don't send failure to self 2015-02-18 11:30:37 +00:00
Erik Johnston
36e144091b Remove spurious comma. Remove temp run_on_reactor 2015-02-18 11:25:20 +00:00
Erik Johnston
b17bd31da0 Temporarily add a run_on_reactor() call 2015-02-18 11:17:26 +00:00
Mark Haines
5806d52423 Fix syntax 2015-02-18 11:01:37 +00:00
Mark Haines
87e9aeb914 Move pynacl to the top of the depedency link list so that it is
installed before syutil
2015-02-18 11:00:13 +00:00
Erik Johnston
7e9d59f3b4 Don't convert DNSLookupError to a 4xx SynapseError 2015-02-18 10:58:13 +00:00
Erik Johnston
cedad8fbd6 Bump version 2015-02-18 10:54:34 +00:00
Erik Johnston
65ca713ff5 Add .__name__ after type(e) 2015-02-18 10:51:32 +00:00
Erik Johnston
5e24471469 Fix up ResponseNeverReceived to str 2015-02-18 10:50:10 +00:00
Erik Johnston
e482541e1d Fix pyflakes 2015-02-18 10:44:22 +00:00
Erik Johnston
0db52d43fa strings.join() expects iterable of strings 2015-02-18 10:41:46 +00:00
Erik Johnston
859fbd4423 s/self._clock/self.clock/ 2015-02-18 10:39:14 +00:00
Erik Johnston
1be67eca8a Merge branch 'keyclient_retry_scheme' of github.com:matrix-org/synapse into develop 2015-02-18 10:34:40 +00:00
Erik Johnston
2635d4e634 Merge branch 'develop' of github.com:matrix-org/synapse into develop 2015-02-18 10:29:54 +00:00
Erik Johnston
fe672a04f7 Merge pull request #77 from matrix-org/failures
Failures
2015-02-18 10:29:29 +00:00
Erik Johnston
08f804208b Merge pull request #79 from matrix-org/get_pdu_limiting
Get pdu limiting
2015-02-18 10:29:10 +00:00
Erik Johnston
ec847059f3 Rename _fail_fetch_pdu_cache to _get_pdu_cache 2015-02-18 10:14:10 +00:00
Erik Johnston
4fd176a41d More docs 2015-02-18 10:11:24 +00:00
Erik Johnston
d77912ff44 Docs. 2015-02-18 10:09:54 +00:00
Erik Johnston
9371019133 Try to only back off if we think we failed to connect to the remote 2015-02-17 18:13:34 +00:00
Erik Johnston
649dc8a7e2 Merge branch 'develop' of github.com:matrix-org/synapse into failures 2015-02-17 17:43:14 +00:00
Erik Johnston
c8436b38a0 Only update destination_retry_timings if we have succeeded when retrying 2015-02-17 17:38:38 +00:00
Erik Johnston
f91263b1e0 Remove spurious self 2015-02-17 17:37:51 +00:00
Erik Johnston
1177245e86 Merge branch 'hotfixes-v0.7.0g' of github.com:matrix-org/synapse into develop 2015-02-17 17:30:11 +00:00
Erik Johnston
20e3172f38 Merge pull request #75 from matrix-org/dont_write_bytecode
Don't write bytecode
2015-02-17 17:29:55 +00:00
Erik Johnston
58554fa658 Merge branch 'develop' of github.com:matrix-org/synapse into keyclient_retry_scheme 2015-02-17 17:26:46 +00:00
Erik Johnston
2c29ed3e84 Use absolute path when loading delta sql files 2015-02-17 17:22:24 +00:00
Erik Johnston
2b8f1a956c Add per server retry limiting.
Factor out the pre destination retry logic from TransactionQueue so it
can be reused in both get_pdu and crypto.keyring
2015-02-17 17:20:56 +00:00
Erik Johnston
5025305fb2 Rate limit retries when fetching server keys. 2015-02-17 15:57:42 +00:00
Erik Johnston
1a989c436c Bump schema version 2015-02-17 15:45:55 +00:00
Erik Johnston
964bb43fbe Fix typo in function name 2015-02-17 15:44:41 +00:00
Erik Johnston
e7e20417ca ExpiringCache: purge every 1/2 interval 2015-02-17 15:44:26 +00:00
Erik Johnston
8b919c00f3 Start the get_pdu cache 2015-02-17 15:44:01 +00:00
Erik Johnston
676e8ee78a Remove debug raise 2015-02-17 15:22:45 +00:00
Erik Johnston
08e70231c9 Merge branch 'develop' of github.com:matrix-org/synapse into failures 2015-02-17 15:21:33 +00:00
Erik Johnston
0647e27a41 Remove unused import 2015-02-17 15:19:54 +00:00
Erik Johnston
fa6c93bd26 Merge branch 'consumeErrors' of github.com:matrix-org/synapse into develop 2015-02-17 15:18:17 +00:00
Erik Johnston
c02da58a9d Merge branch 'develop' of github.com:matrix-org/synapse into failures 2015-02-17 15:15:07 +00:00
Erik Johnston
472734a8cc Consume errors in time_bound_deferred 2015-02-17 15:13:50 +00:00
Erik Johnston
4de93001bf Make matrixfederationclient log more nicely 2015-02-17 15:12:06 +00:00
Erik Johnston
659ead082f Format the response of transaction request in a nicer way 2015-02-17 15:11:44 +00:00
Erik Johnston
c82e26ad4b Actually respond with JSON to incoming transaction 2015-02-17 13:24:13 +00:00
Erik Johnston
47281f8fa4 Change some debug logging to info 2015-02-17 13:14:11 +00:00
Erik Johnston
02bfa889de Handle recieving failures in transactions 2015-02-17 13:13:14 +00:00
Erik Johnston
c37e7e1774 Merge pull request #76 from matrix-org/consumeErrors
Consume errors
2015-02-17 13:02:04 +00:00
Erik Johnston
c2b1dbd84c We do want to consumeError 2015-02-17 11:11:11 +00:00
Erik Johnston
ea1d6c16cd Don't write bytecode 2015-02-17 10:54:06 +00:00
Erik Johnston
72a4de2ce6 Use consumeErrors=True on all DeferredLists.
This is so that the DeferredLists actually consume the error instead of
propogating down the non-existent errback chain. This should reduce the
number of unhandled errors we are seeing.
2015-02-17 10:07:01 +00:00
Erik Johnston
0194e71e99 Merge branch 'develop' of github.com:matrix-org/synapse into get_pdu_limiting 2015-02-17 09:48:23 +00:00
Erik Johnston
baa5b9a975 Cache results of get_pdu. 2015-02-16 18:02:39 +00:00
Erik Johnston
bfffd2e108 Merge pull request #74 from matrix-org/federation_min_depth_fix
Federation min depth fix
2015-02-16 17:12:46 +00:00
Erik Johnston
2674aeb96a Factor out ExpiringCache from StateHandler 2015-02-16 16:16:47 +00:00
Erik Johnston
91fc5eef1d Mark old events as outliers.
This is to fix the issue where if a remote server sends an event
that references a really "old" event, then the local server will pull
that in and send to all clients.

We decide if an event is old if its depth is less than the minimum depth
of the room.
2015-02-16 14:27:40 +00:00
Erik Johnston
6138584651 Don't return anything from _handle_new_pdu, since we ignore the return value anyway 2015-02-16 14:08:02 +00:00
Erik Johnston
a5ad6f862c Fix contrib/graph/graph2.py to handle FrozenDict 2015-02-16 13:15:41 +00:00
Erik Johnston
8a59915d7d Merge branch 'hotfixes-v0.7.0f' of github.com:matrix-org/synapse into develop 2015-02-16 09:47:22 +00:00
Erik Johnston
0421eb84ac Merge pull request #73 from matrix-org/hotfixes-v0.7.0f
Hotfixes v0.7.0f
2015-02-16 09:46:01 +00:00
Erik Johnston
6dd5c95841 Bump version 2015-02-15 20:38:52 +00:00
Erik Johnston
b99a33f283 resolve_events expect lists, not dicts 2015-02-15 20:20:51 +00:00
Matthew Hodgson
2b8903ce2f we federate on port 8448 nowadays... 2015-02-14 00:16:33 +00:00
Erik Johnston
5f68529036 Merge branch 'master' of github.com:matrix-org/synapse into develop 2015-02-13 16:21:30 +00:00
Erik Johnston
d502013c6e Merge branch 'hotfixes-0.7.0e' of github.com:matrix-org/synapse 2015-02-13 16:19:32 +00:00
David Baker
64def4f953 Merge branch 'hotfixes-0.7.0e' into develop 2015-02-13 16:18:34 +00:00
Erik Johnston
a78838c5ba Bump version 2015-02-13 16:17:54 +00:00
David Baker
8d5cce62ab Update pushers by app id and pushkey, not user id and pushkey 2015-02-13 16:16:16 +00:00
Erik Johnston
650dc7f0f9 Merge branch 'master' of github.com:matrix-org/synapse into develop 2015-02-13 15:46:42 +00:00
Erik Johnston
be26697b29 Bump version 2015-02-13 15:37:35 +00:00
Erik Johnston
b11a6e1c3c Fix wrong variable name 2015-02-13 15:37:18 +00:00
Mark Haines
0d872f5aa6 Merge pull request #50 from matrix-org/application-services
Application Services
2015-02-13 15:06:14 +00:00
Mark Haines
fa662b52d0 Merge pull request #72 from matrix-org/in_memory_sqlite_for_testing
Prepare the database whenever a connection is opened from the db_pool so...
2015-02-13 14:42:27 +00:00
Mark Haines
183b3d4e47 Prepare the database whenever a connection is opened from the db_pool so that in-memory databases will work 2015-02-13 14:38:24 +00:00
Erik Johnston
49eb11530c Merge branch 'develop' of github.com:matrix-org/synapse 2015-02-13 14:37:30 +00:00
Erik Johnston
0546126cc5 Bump version 2015-02-13 14:36:40 +00:00
Erik Johnston
2aa87305c0 Merge pull request #71 from matrix-org/auth-conflict-res
When we see a difference in current state, actually use state conflict resolution algorithm
2015-02-13 14:32:17 +00:00
Erik Johnston
e441c10a73 pyflakes 2015-02-13 14:23:39 +00:00
Erik Johnston
8c652a2b5f When we see a difference in current state, actually use state conflict resolution algorithm 2015-02-13 14:20:05 +00:00
Erik Johnston
6375abcdac Merge branch 'master' of github.com:matrix-org/synapse into develop 2015-02-13 13:34:25 +00:00
Erik Johnston
74626a8de4 Merge branch 'master' of github.com:matrix-org/synapse into develop 2015-02-13 10:09:55 +00:00
Paul "LeoNerd" Evans
93d07c87dc Reindent code to be less human-readable to keep pep8 from complaining 2015-02-12 19:19:37 +00:00
Paul "LeoNerd" Evans
5f6e6530d0 Appease pyflakes 2015-02-12 19:15:23 +00:00
Paul "LeoNerd" Evans
29805213d1 Can now remove the FIXME too 2015-02-12 19:13:21 +00:00
Paul "LeoNerd" Evans
860b1b4841 Only attempt to fetch presence state of JOINed members in room initialSync (SYN-202) 2015-02-12 19:13:21 +00:00
Kegan Dougal
cb43fbeeb4 Fix tests which broke when event caching was introduced. 2015-02-11 16:46:01 +00:00
Kegan Dougal
f2fdcb7c4b Merge branch 'develop' into application-services 2015-02-11 16:43:26 +00:00
Kegan Dougal
f518324426 Minor tweaks based on PR feedback. 2015-02-11 16:41:16 +00:00
Kegan Dougal
14d413752b Fix newline on __init__ 2015-02-11 10:53:47 +00:00
Kegan Dougal
fd40d992ad PEP8-ify 2015-02-11 10:41:33 +00:00
Kegan Dougal
8beb613916 Add newline to EOF 2015-02-11 10:36:48 +00:00
Kegan Dougal
c7783d6fee Notify ASes for events sent by other users in a room which an AS user is a part of. 2015-02-11 10:36:08 +00:00
Kegan Dougal
9978c5c103 Merge branch 'develop' into application-services 2015-02-11 10:03:24 +00:00
Kegan Dougal
53557fc532 Merge branch 'develop' into application-services 2015-02-09 15:20:56 +00:00
Kegan Dougal
f7cac2f7b6 Fix bugs so lazy room joining works as intended. 2015-02-09 15:01:28 +00:00
Kegan Dougal
ab3c897ce1 Remove unused imports. 2015-02-09 14:16:36 +00:00
Kegan Dougal
5a7dd05818 Modify auth.get_user_by_req for authing appservices directly.
Add logic to map the appservice token to the autogenned appservice user ID.
Add unit tests for all forms of get_user_by_req (user/appservice,
valid/bad/missing tokens)
2015-02-09 14:14:15 +00:00
Kegan Dougal
ac3183caaa Register a user account for the AS when the AS registers. Add 'sender' column to AS table. 2015-02-09 12:03:37 +00:00
Kegan Dougal
73a680b2a8 Add errcodes for appservice registrations. 2015-02-06 17:10:04 +00:00
Kegan Dougal
0995810273 Pyflakes: unused variable. 2015-02-06 11:45:19 +00:00
Kegan Dougal
c3ae8def75 Grant ASes the ability to delete aliases in their own namespace. 2015-02-06 11:32:07 +00:00
Kegan Dougal
e426df8e10 Grant ASes the ability to create alias in their own namespace.
Add a new errcode type M_EXCLUSIVE when users try to create aliases inside
AS namespaces, and when ASes try to create aliases outside their own
namespace.
2015-02-06 10:57:14 +00:00
Kegan Dougal
0227618d3c Add m.login.application_service registration procedure.
This allows known application services to register any user ID under their
own user namespace(s).
2015-02-05 17:29:27 +00:00
Kegan Dougal
11e6b3d18b Dependency inject ApplicationServiceApi when creating ApplicationServicesHandler. 2015-02-05 17:04:59 +00:00
Kegan Dougal
a3c6010718 Add delta sql file. 2015-02-05 16:48:57 +00:00
Kegan Dougal
cab4c73088 Prevent user IDs in AS namespaces being created/deleted by humans. 2015-02-05 16:46:56 +00:00
Kegan Dougal
e9484d6a95 Prevent aliases in AS namespaces being created/deleted by users. Check with ASes when queried for room aliases via federation. 2015-02-05 16:29:56 +00:00
Kegan Dougal
c20281ee33 Merge branch 'develop' into application-services 2015-02-05 16:11:34 +00:00
Kegan Dougal
fc8bcc809d Merge branch 'develop' into application-services 2015-02-05 15:32:45 +00:00
Kegan Dougal
5b99b471b2 Fix unit tests. 2015-02-05 15:12:36 +00:00
Kegan Dougal
c163357f38 Add CS extension for masquerading as users within the namespaces specified by the AS. 2015-02-05 15:00:33 +00:00
Kegan Dougal
951690e54d Merge branch 'develop' into application-services 2015-02-05 14:28:03 +00:00
Kegan Dougal
c71456117d Fix user query checks. HS>AS pushing now works. 2015-02-05 14:17:08 +00:00
Kegan Dougal
0613666d9c Serialize events before sending to ASes 2015-02-05 13:42:35 +00:00
Kegan Dougal
131e036402 Fix unit tests. 2015-02-05 13:22:20 +00:00
Kegan Dougal
51d63ac329 Glue AS work to general event notifications. Add more exception handling when poking ASes. 2015-02-05 13:19:46 +00:00
Kegan Dougal
bc658907f0 Add unit test for appservice_handler.query_room_alias_exists 2015-02-05 11:54:36 +00:00
Kegan Dougal
b932600653 Add unknown room alias check. Call it from directory_handler.get_association 2015-02-05 11:47:11 +00:00
Kegan Dougal
f0c730252f Add unknown user ID check. Use store.get_aliases_for_room(room_id) when searching for services by alias. 2015-02-05 11:25:32 +00:00
Kegan Dougal
27091f146a Add hs_token column and generate a different token f.e application service. 2015-02-05 10:08:12 +00:00
Kegan Dougal
a1a4960baf Impl push_bulk function 2015-02-05 09:43:22 +00:00
Kegan Dougal
543e84fe70 Add SimpleHttpClient.put_json with the same semantics as get_json. 2015-02-04 17:39:51 +00:00
Kegan Dougal
6d3e4f4d0a Update user/alias query APIs to use new format of SimpleHttpClient.get_json 2015-02-04 17:32:44 +00:00
Kegan Dougal
96d4bf9012 Modify API for SimpleHttpClient.get_json and update usages.
Previously, this would only return the HTTP body as JSON, and discard other
response information (e.g. the HTTP response code). This has now been changed
to throw a CodeMessageException on a non-2xx response, with the response code
and body, which can then be parsed as JSON.

Affected modules include:
 - Registration/Login (when using an email for IS auth)
2015-02-04 17:07:31 +00:00
Kegan Dougal
aa8cce58bf Add query_user/alias APIs. 2015-02-04 16:44:53 +00:00
Kegan Dougal
ce8bc642ae Merge branch 'develop' into application-services 2015-02-04 15:31:02 +00:00
Kegan Dougal
89f2e8fbdf Fix bug in store defer. Add more unit tests. 2015-02-04 15:21:03 +00:00
Kegan Dougal
525a218b2b Begin to add unit tests for appservice glue and regex testing. 2015-02-04 12:24:20 +00:00
Kegan Dougal
17753f0c20 Add stub ApplicationServiceApi and glue it with the handler. 2015-02-04 11:19:18 +00:00
Kegan Dougal
94a5db9f4d Add appservice package and move ApplicationService into it. 2015-02-03 14:44:16 +00:00
Kegan Dougal
f2c039bfb9 Implement restricted namespace checks. Begin fleshing out the main hook for notifying application services. 2015-02-03 13:29:27 +00:00
Kegan Dougal
a060b47b13 Add namespace constants. Add restrict_to option to limit namespace checks. 2015-02-03 13:17:28 +00:00
Kegan Dougal
3bd2841fdb Everyone loves SQL typos 2015-02-03 11:37:52 +00:00
Kegan Dougal
197f3ea4ba Implement regex checks for app services.
Expose handler.get_services_for_event which manages the checks for all
services.
2015-02-03 11:26:33 +00:00
Kegan Dougal
9ff349a3cb Add defers in the right places. 2015-02-02 17:42:49 +00:00
Kegan Dougal
1a2de0c5fe Implement txns for AS (un)registration. 2015-02-02 17:39:41 +00:00
Kegan Dougal
a006d168c5 Actually merge into develop. 2015-02-02 16:05:34 +00:00
Kegan Dougal
c059c9fea5 Merge branch 'develop' into application-services
Conflicts:
	synapse/handlers/__init__.py
	synapse/storage/__init__.py
2015-02-02 15:57:59 +00:00
Kegan Dougal
42876969b9 Add basic application_services SQL, and hook up parts of the appservice store to read from it. 2015-01-28 11:59:38 +00:00
Kegan Dougal
b46fa8603e Remove unused import 2015-01-28 09:17:48 +00:00
Kegan Dougal
fbeaeb8689 Log when ASes are registered/unregistered. 2015-01-27 17:34:40 +00:00
Kegan Dougal
ec3719b583 Use ApplicationService when registering. 2015-01-27 17:15:06 +00:00
Kegan Dougal
92171f9dd1 Add stub methods, TODOs and docstrings for application services. 2015-01-27 16:53:59 +00:00
Kegan Dougal
7331d34839 Add AS specific classes with docstrings. 2015-01-27 16:23:46 +00:00
Kegan Dougal
51449e0665 Add appservice handler and store. Glue together rest > handler > store. 2015-01-27 15:50:28 +00:00
Kegan Dougal
6efdc11cc8 Parse /register and /unregister request JSON. 2015-01-27 15:03:19 +00:00
Kegan Dougal
fa8e6ff900 Add stub application services REST API. 2015-01-27 14:01:51 +00:00
59 changed files with 2633 additions and 342 deletions

View File

@@ -1,3 +1,23 @@
Changes in synapse v0.7.1 (2015-02-19)
======================================
* Initial alpha implementation of parts of the Application Services API.
Including:
- AS Registration / Unregistration
- User Query API
- Room Alias Query API
- Push transport for receiving events.
- User/Alias namespace admin control
* Add cache when fetching events from remote servers to stop repeatedly
fetching events with bad signatures.
* Respect the per remote server retry scheme when fetching both events and
server keys to reduce the number of times we send requests to dead servers.
* Inform remote servers when the local server fails to handle a received event.
* Turn off python bytecode generation due to problems experienced when
upgrading from previous versions.
Changes in synapse v0.7.0 (2015-02-12)
======================================

View File

@@ -6,7 +6,7 @@ VoIP. The basics you need to know to get up and running are:
- Everything in Matrix happens in a room. Rooms are distributed and do not
exist on any single server. Rooms can be located using convenience aliases
like ``#matrix:matrix.org`` or ``#test:localhost:8008``.
like ``#matrix:matrix.org`` or ``#test:localhost:8448``.
- Matrix user IDs look like ``@matthew:matrix.org`` (although in the future
you will normally refer to yourself and others using a 3PID: email

View File

@@ -21,6 +21,7 @@ import datetime
import argparse
from synapse.events import FrozenEvent
from synapse.util.frozenutils import unfreeze
def make_graph(db_name, room_id, file_prefix, limit):
@@ -70,7 +71,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
float(event.origin_server_ts) / 1000
).strftime('%Y-%m-%d %H:%M:%S,%f')
content = json.dumps(event.get_dict()["content"])
content = json.dumps(unfreeze(event.get_dict()["content"]))
label = (
"<"

View File

@@ -16,4 +16,4 @@
""" This is a reference implementation of a Matrix home server.
"""
__version__ = "0.7.0b"
__version__ = "0.7.1"

View File

@@ -306,6 +306,34 @@ class Auth(object):
# Can optionally look elsewhere in the request (e.g. headers)
try:
access_token = request.args["access_token"][0]
# Check for application service tokens with a user_id override
try:
app_service = yield self.store.get_app_service_by_token(
access_token
)
if not app_service:
raise KeyError
user_id = app_service.sender
if "user_id" in request.args:
user_id = request.args["user_id"][0]
if not app_service.is_interested_in_user(user_id):
raise AuthError(
403,
"Application service cannot masquerade as this user."
)
if not user_id:
raise KeyError
defer.returnValue(
(UserID.from_string(user_id), ClientInfo("", ""))
)
return
except KeyError:
pass # normal users won't have this query parameter set
user_info = yield self.get_user_by_token(access_token)
user = user_info["user"]
device_id = user_info["device_id"]
@@ -344,8 +372,7 @@ class Auth(object):
try:
ret = yield self.store.get_user_by_token(token=token)
if not ret:
raise StoreError()
raise StoreError(400, "Unknown token")
user_info = {
"admin": bool(ret.get("admin", False)),
"device_id": ret.get("device_id"),
@@ -358,6 +385,18 @@ class Auth(object):
raise AuthError(403, "Unrecognised access token.",
errcode=Codes.UNKNOWN_TOKEN)
@defer.inlineCallbacks
def get_appservice_by_req(self, request):
try:
token = request.args["access_token"][0]
service = yield self.store.get_app_service_by_token(token)
if not service:
raise AuthError(403, "Unrecognised access token.",
errcode=Codes.UNKNOWN_TOKEN)
defer.returnValue(service)
except KeyError:
raise AuthError(403, "Missing access token.")
def is_server_admin(self, user):
return self.store.is_server_admin(user)

View File

@@ -59,6 +59,7 @@ class LoginType(object):
EMAIL_URL = u"m.login.email.url"
EMAIL_IDENTITY = u"m.login.email.identity"
RECAPTCHA = u"m.login.recaptcha"
APPLICATION_SERVICE = u"m.login.application_service"
class EventTypes(object):

View File

@@ -36,7 +36,8 @@ class Codes(object):
CAPTCHA_NEEDED = "M_CAPTCHA_NEEDED"
CAPTCHA_INVALID = "M_CAPTCHA_INVALID"
MISSING_PARAM = "M_MISSING_PARAM",
TOO_LARGE = "M_TOO_LARGE"
TOO_LARGE = "M_TOO_LARGE",
EXCLUSIVE = "M_EXCLUSIVE"
class CodeMessageException(RuntimeError):

View File

@@ -22,3 +22,4 @@ WEB_CLIENT_PREFIX = "/_matrix/client"
CONTENT_REPO_PREFIX = "/_matrix/content"
SERVER_KEY_PREFIX = "/_matrix/key/v1"
MEDIA_PREFIX = "/_matrix/media/v1"
APP_SERVICE_PREFIX = "/_matrix/appservice/v1"

View File

@@ -14,6 +14,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
sys.dont_write_bytecode = True
from synapse.storage import prepare_database, UpgradeDatabaseException
from synapse.server import HomeServer
@@ -26,13 +29,14 @@ from twisted.web.resource import Resource
from twisted.web.static import File
from twisted.web.server import Site
from synapse.http.server import JsonResource, RootRedirect
from synapse.rest.appservice.v1 import AppServiceRestResource
from synapse.rest.media.v0.content_repository import ContentRepoResource
from synapse.rest.media.v1.media_repository import MediaRepositoryResource
from synapse.http.server_key_resource import LocalKey
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
from synapse.api.urls import (
CLIENT_PREFIX, FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX,
SERVER_KEY_PREFIX, MEDIA_PREFIX, CLIENT_V2_ALPHA_PREFIX,
SERVER_KEY_PREFIX, MEDIA_PREFIX, CLIENT_V2_ALPHA_PREFIX, APP_SERVICE_PREFIX
)
from synapse.config.homeserver import HomeServerConfig
from synapse.crypto import context_factory
@@ -48,7 +52,7 @@ import synapse
import logging
import os
import re
import sys
import subprocess
import sqlite3
import syweb
@@ -69,6 +73,9 @@ class SynapseHomeServer(HomeServer):
def build_resource_for_federation(self):
return JsonResource(self)
def build_resource_for_app_services(self):
return AppServiceRestResource(self)
def build_resource_for_web_client(self):
syweb_path = os.path.dirname(syweb.__file__)
webclient_path = os.path.join(syweb_path, "webclient")
@@ -90,7 +97,9 @@ class SynapseHomeServer(HomeServer):
"sqlite3", self.get_db_name(),
check_same_thread=False,
cp_min=1,
cp_max=1
cp_max=1,
cp_openfun=prepare_database, # Prepare the database for each conn
# so that :memory: sqlite works
)
def create_resource_tree(self, web_client, redirect_root_to_web_client):
@@ -114,6 +123,7 @@ class SynapseHomeServer(HomeServer):
(CONTENT_REPO_PREFIX, self.get_resource_for_content_repo()),
(SERVER_KEY_PREFIX, self.get_resource_for_server_key()),
(MEDIA_PREFIX, self.get_resource_for_media_repository()),
(APP_SERVICE_PREFIX, self.get_resource_for_app_services()),
]
if web_client:
logger.info("Adding the web client.")
@@ -199,6 +209,66 @@ class SynapseHomeServer(HomeServer):
logger.info("Synapse now listening on port %d", unsecure_port)
def get_version_string():
null = open(os.devnull, 'w')
cwd = os.path.dirname(os.path.abspath(__file__))
try:
git_branch = subprocess.check_output(
['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
stderr=null,
cwd=cwd,
).strip()
git_branch = "b=" + git_branch
except subprocess.CalledProcessError:
git_branch = ""
try:
git_tag = subprocess.check_output(
['git', 'describe', '--exact-match'],
stderr=null,
cwd=cwd,
).strip()
git_tag = "t=" + git_tag
except subprocess.CalledProcessError:
git_tag = ""
try:
git_commit = subprocess.check_output(
['git', 'rev-parse', '--short', 'HEAD'],
stderr=null,
cwd=cwd,
).strip()
except subprocess.CalledProcessError:
git_commit = ""
try:
dirty_string = "-this_is_a_dirty_checkout"
is_dirty = subprocess.check_output(
['git', 'describe', '--dirty=' + dirty_string],
stderr=null,
cwd=cwd,
).strip().endswith(dirty_string)
git_dirty = "dirty" if is_dirty else ""
except subprocess.CalledProcessError:
git_dirty = ""
if git_branch or git_tag or git_commit or git_dirty:
git_version = ",".join(
s for s in
(git_branch, git_tag, git_commit, git_dirty,)
if s
)
return (
"Synapse/%s (%s)" % (
synapse.__version__, git_version,
)
).encode("ascii")
return ("Synapse/%s" % (synapse.__version__,)).encode("ascii")
def setup():
config = HomeServerConfig.load_config(
"Synapse Homeserver",
@@ -210,8 +280,10 @@ def setup():
check_requirements()
version_string = get_version_string()
logger.info("Server hostname: %s", config.server_name)
logger.info("Server version: %s", synapse.__version__)
logger.info("Server version: %s", version_string)
if re.search(":[0-9]+$", config.server_name):
domain_with_port = config.server_name
@@ -228,6 +300,7 @@ def setup():
tls_context_factory=tls_context_factory,
config=config,
content_addr=config.content_addr,
version_string=version_string,
)
hs.create_resource_tree(
@@ -252,14 +325,6 @@ def setup():
logger.info("Database prepared in %s.", db_name)
db_pool = hs.get_db_pool()
if db_name == ":memory:":
# Memory databases will need to be setup each time they are opened.
reactor.callWhenRunning(
db_pool.runWithConnection, prepare_database
)
if config.manhole:
f = twisted.manhole.telnet.ShellFactory()
f.username = "matrix"
@@ -270,12 +335,13 @@ def setup():
bind_port = config.bind_port
if config.no_tls:
bind_port = None
hs.start_listening(bind_port, config.unsecure_port)
hs.get_pusherpool().start()
hs.get_state_handler().start_caching()
hs.get_datastore().start_profiling()
hs.get_replication_layer().start_get_pdu_cache()
if config.daemonize:
print config.pid_file

View File

@@ -19,7 +19,7 @@ import os
import subprocess
import signal
SYNAPSE = ["python", "-m", "synapse.app.homeserver"]
SYNAPSE = ["python", "-B", "-m", "synapse.app.homeserver"]
CONFIGFILE = "homeserver.yaml"
PIDFILE = "homeserver.pid"

View File

@@ -0,0 +1,147 @@
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.api.constants import EventTypes
import logging
import re
logger = logging.getLogger(__name__)
class ApplicationService(object):
"""Defines an application service. This definition is mostly what is
provided to the /register AS API.
Provides methods to check if this service is "interested" in events.
"""
NS_USERS = "users"
NS_ALIASES = "aliases"
NS_ROOMS = "rooms"
# The ordering here is important as it is used to map database values (which
# are stored as ints representing the position in this list) to namespace
# values.
NS_LIST = [NS_USERS, NS_ALIASES, NS_ROOMS]
def __init__(self, token, url=None, namespaces=None, hs_token=None,
sender=None, txn_id=None):
self.token = token
self.url = url
self.hs_token = hs_token
self.sender = sender
self.namespaces = self._check_namespaces(namespaces)
self.txn_id = txn_id
def _check_namespaces(self, namespaces):
# Sanity check that it is of the form:
# {
# users: ["regex",...],
# aliases: ["regex",...],
# rooms: ["regex",...],
# }
if not namespaces:
return None
for ns in ApplicationService.NS_LIST:
if type(namespaces[ns]) != list:
raise ValueError("Bad namespace value for '%s'", ns)
for regex in namespaces[ns]:
if not isinstance(regex, basestring):
raise ValueError("Expected string regex for ns '%s'", ns)
return namespaces
def _matches_regex(self, test_string, namespace_key):
if not isinstance(test_string, basestring):
logger.error(
"Expected a string to test regex against, but got %s",
test_string
)
return False
for regex in self.namespaces[namespace_key]:
if re.match(regex, test_string):
return True
return False
def _matches_user(self, event, member_list):
if (hasattr(event, "sender") and
self.is_interested_in_user(event.sender)):
return True
# also check m.room.member state key
if (hasattr(event, "type") and event.type == EventTypes.Member
and hasattr(event, "state_key")
and self.is_interested_in_user(event.state_key)):
return True
# check joined member events
for member in member_list:
if self.is_interested_in_user(member.state_key):
return True
return False
def _matches_room_id(self, event):
if hasattr(event, "room_id"):
return self.is_interested_in_room(event.room_id)
return False
def _matches_aliases(self, event, alias_list):
for alias in alias_list:
if self.is_interested_in_alias(alias):
return True
return False
def is_interested(self, event, restrict_to=None, aliases_for_event=None,
member_list=None):
"""Check if this service is interested in this event.
Args:
event(Event): The event to check.
restrict_to(str): The namespace to restrict regex tests to.
aliases_for_event(list): A list of all the known room aliases for
this event.
member_list(list): A list of all joined room members in this room.
Returns:
bool: True if this service would like to know about this event.
"""
if aliases_for_event is None:
aliases_for_event = []
if member_list is None:
member_list = []
if restrict_to and restrict_to not in ApplicationService.NS_LIST:
# this is a programming error, so fail early and raise a general
# exception
raise Exception("Unexpected restrict_to value: %s". restrict_to)
if not restrict_to:
return (self._matches_user(event, member_list)
or self._matches_aliases(event, aliases_for_event)
or self._matches_room_id(event))
elif restrict_to == ApplicationService.NS_ALIASES:
return self._matches_aliases(event, aliases_for_event)
elif restrict_to == ApplicationService.NS_ROOMS:
return self._matches_room_id(event)
elif restrict_to == ApplicationService.NS_USERS:
return self._matches_user(event, member_list)
def is_interested_in_user(self, user_id):
return self._matches_regex(user_id, ApplicationService.NS_USERS)
def is_interested_in_alias(self, alias):
return self._matches_regex(alias, ApplicationService.NS_ALIASES)
def is_interested_in_room(self, room_id):
return self._matches_regex(room_id, ApplicationService.NS_ROOMS)
def __str__(self):
return "ApplicationService: %s" % (self.__dict__,)

108
synapse/appservice/api.py Normal file
View File

@@ -0,0 +1,108 @@
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer
from synapse.api.errors import CodeMessageException
from synapse.http.client import SimpleHttpClient
from synapse.events.utils import serialize_event
import logging
import urllib
logger = logging.getLogger(__name__)
class ApplicationServiceApi(SimpleHttpClient):
"""This class manages HS -> AS communications, including querying and
pushing.
"""
def __init__(self, hs):
super(ApplicationServiceApi, self).__init__(hs)
self.clock = hs.get_clock()
@defer.inlineCallbacks
def query_user(self, service, user_id):
uri = service.url + ("/users/%s" % urllib.quote(user_id))
response = None
try:
response = yield self.get_json(uri, {
"access_token": service.hs_token
})
if response is not None: # just an empty json object
defer.returnValue(True)
except CodeMessageException as e:
if e.code == 404:
defer.returnValue(False)
return
logger.warning("query_user to %s received %s", uri, e.code)
except Exception as ex:
logger.warning("query_user to %s threw exception %s", uri, ex)
defer.returnValue(False)
@defer.inlineCallbacks
def query_alias(self, service, alias):
uri = service.url + ("/rooms/%s" % urllib.quote(alias))
response = None
try:
response = yield self.get_json(uri, {
"access_token": service.hs_token
})
if response is not None: # just an empty json object
defer.returnValue(True)
except CodeMessageException as e:
logger.warning("query_alias to %s received %s", uri, e.code)
if e.code == 404:
defer.returnValue(False)
return
except Exception as ex:
logger.warning("query_alias to %s threw exception %s", uri, ex)
defer.returnValue(False)
@defer.inlineCallbacks
def push_bulk(self, service, events):
events = self._serialize(events)
uri = service.url + ("/transactions/%s" %
urllib.quote(str(0))) # TODO txn_ids
response = None
try:
response = yield self.put_json(
uri=uri,
json_body={
"events": events
},
args={
"access_token": service.hs_token
})
if response: # just an empty json object
# TODO: Mark txn as sent successfully
defer.returnValue(True)
except CodeMessageException as e:
logger.warning("push_bulk to %s received %s", uri, e.code)
except Exception as ex:
logger.warning("push_bulk to %s threw exception %s", uri, ex)
defer.returnValue(False)
@defer.inlineCallbacks
def push(self, service, event):
response = yield self.push_bulk(service, [event])
defer.returnValue(response)
def _serialize(self, events):
time_now = self.clock.time_msec()
return [
serialize_event(e, time_now, as_client_event=True) for e in events
]

View File

@@ -22,6 +22,8 @@ from syutil.crypto.signing_key import (
from syutil.base64util import decode_base64, encode_base64
from synapse.api.errors import SynapseError, Codes
from synapse.util.retryutils import get_retry_limiter
from OpenSSL import crypto
import logging
@@ -87,12 +89,18 @@ class Keyring(object):
return
# Try to fetch the key from the remote server.
# TODO(markjh): Ratelimit requests to a given server.
(response, tls_certificate) = yield fetch_server_key(
server_name, self.hs.tls_context_factory
limiter = yield get_retry_limiter(
server_name,
self.clock,
self.store,
)
with limiter:
(response, tls_certificate) = yield fetch_server_key(
server_name, self.hs.tls_context_factory
)
# Check the response.
x509_certificate_bytes = crypto.dump_certificate(

View File

@@ -19,10 +19,13 @@ from twisted.internet import defer
from .federation_base import FederationBase
from .units import Edu
from synapse.api.errors import CodeMessageException
from synapse.api.errors import CodeMessageException, SynapseError
from synapse.util.expiringcache import ExpiringCache
from synapse.util.logutils import log_function
from synapse.events import FrozenEvent
from synapse.util.retryutils import get_retry_limiter, NotRetryingDestination
import logging
@@ -30,6 +33,20 @@ logger = logging.getLogger(__name__)
class FederationClient(FederationBase):
def __init__(self):
self._get_pdu_cache = None
def start_get_pdu_cache(self):
self._get_pdu_cache = ExpiringCache(
cache_name="get_pdu_cache",
clock=self._clock,
max_len=1000,
expiry_ms=120*1000,
reset_expiry_on_get=False,
)
self._get_pdu_cache.start()
@log_function
def send_pdu(self, pdu, destinations):
"""Informs the replication layer about a new PDU generated within the
@@ -160,29 +177,58 @@ class FederationClient(FederationBase):
# TODO: Rate limit the number of times we try and get the same event.
if self._get_pdu_cache:
e = self._get_pdu_cache.get(event_id)
if e:
defer.returnValue(e)
pdu = None
for destination in destinations:
try:
transaction_data = yield self.transport_layer.get_event(
destination, event_id
limiter = yield get_retry_limiter(
destination,
self._clock,
self.store,
)
logger.debug("transaction_data %r", transaction_data)
with limiter:
transaction_data = yield self.transport_layer.get_event(
destination, event_id
)
pdu_list = [
self.event_from_pdu_json(p, outlier=outlier)
for p in transaction_data["pdus"]
]
logger.debug("transaction_data %r", transaction_data)
if pdu_list:
pdu = pdu_list[0]
pdu_list = [
self.event_from_pdu_json(p, outlier=outlier)
for p in transaction_data["pdus"]
]
# Check signatures are correct.
pdu = yield self._check_sigs_and_hash(pdu)
if pdu_list:
pdu = pdu_list[0]
break
except CodeMessageException:
raise
# Check signatures are correct.
pdu = yield self._check_sigs_and_hash(pdu)
break
except SynapseError:
logger.info(
"Failed to get PDU %s from %s because %s",
event_id, destination, e,
)
continue
except CodeMessageException as e:
if 400 <= e.code < 500:
raise
logger.info(
"Failed to get PDU %s from %s because %s",
event_id, destination, e,
)
continue
except NotRetryingDestination as e:
logger.info(e.message)
continue
except Exception as e:
logger.info(
"Failed to get PDU %s from %s because %s",
@@ -190,6 +236,9 @@ class FederationClient(FederationBase):
)
continue
if self._get_pdu_cache is not None:
self._get_pdu_cache[event_id] = pdu
defer.returnValue(pdu)
@defer.inlineCallbacks

View File

@@ -114,7 +114,15 @@ class FederationServer(FederationBase):
with PreserveLoggingContext():
dl = []
for pdu in pdu_list:
dl.append(self._handle_new_pdu(transaction.origin, pdu))
d = self._handle_new_pdu(transaction.origin, pdu)
def handle_failure(failure):
failure.trap(FederationError)
self.send_failure(failure.value, transaction.origin)
d.addErrback(handle_failure)
dl.append(d)
if hasattr(transaction, "edus"):
for edu in [Edu(**x) for x in transaction.edus]:
@@ -124,7 +132,10 @@ class FederationServer(FederationBase):
edu.content
)
results = yield defer.DeferredList(dl)
for failure in getattr(transaction, "pdu_failures", []):
logger.info("Got failure %r", failure)
results = yield defer.DeferredList(dl, consumeErrors=True)
ret = []
for r in results:
@@ -132,10 +143,16 @@ class FederationServer(FederationBase):
ret.append({})
else:
logger.exception(r[1])
ret.append({"error": str(r[1])})
ret.append({"error": str(r[1].value)})
logger.debug("Returning: %s", str(ret))
response = {
"pdus": dict(zip(
(p.event_id for p in pdu_list), ret
)),
}
yield self.transaction_actions.set_response(
transaction,
200, response
@@ -331,7 +348,6 @@ class FederationServer(FederationBase):
)
if already_seen:
logger.debug("Already seen pdu %s", pdu.event_id)
defer.returnValue({})
return
# Check signature.
@@ -367,7 +383,13 @@ class FederationServer(FederationBase):
pdu.room_id, min_depth
)
if min_depth and pdu.depth > min_depth and max_recursion > 0:
if min_depth and pdu.depth < min_depth:
# This is so that we don't notify the user about this
# message, to work around the fact that some events will
# reference really really old events we really don't want to
# send to the clients.
pdu.internal_metadata.outlier = True
elif min_depth and pdu.depth > min_depth and max_recursion > 0:
for event_id, hashes in pdu.prev_events:
if event_id not in have_seen:
logger.debug(
@@ -418,7 +440,7 @@ class FederationServer(FederationBase):
except:
logger.warn("Failed to get state for event: %s", pdu.event_id)
ret = yield self.handler.on_receive_pdu(
yield self.handler.on_receive_pdu(
origin,
pdu,
backfilled=False,
@@ -426,8 +448,6 @@ class FederationServer(FederationBase):
auth_chain=auth_chain,
)
defer.returnValue(ret)
def __str__(self):
return "<ReplicationLayer(%s)>" % self.server_name

View File

@@ -22,6 +22,9 @@ from .units import Transaction
from synapse.api.errors import HttpResponseException
from synapse.util.logutils import log_function
from synapse.util.logcontext import PreserveLoggingContext
from synapse.util.retryutils import (
get_retry_limiter, NotRetryingDestination,
)
import logging
@@ -63,6 +66,26 @@ class TransactionQueue(object):
# HACK to get unique tx id
self._next_txn_id = int(self._clock.time_msec())
def can_send_to(self, destination):
"""Can we send messages to the given server?
We can't send messages to ourselves. If we are running on localhost
then we can only federation with other servers running on localhost.
Otherwise we only federate with servers on a public domain.
Args:
destination(str): The server we are possibly trying to send to.
Returns:
bool: True if we can send to the server.
"""
if destination == self.server_name:
return False
if self.server_name.startswith("localhost"):
return destination.startswith("localhost")
else:
return not destination.startswith("localhost")
@defer.inlineCallbacks
@log_function
def enqueue_pdu(self, pdu, destinations, order):
@@ -71,8 +94,9 @@ class TransactionQueue(object):
# table and we'll get back to it later.
destinations = set(destinations)
destinations.discard(self.server_name)
destinations.discard("localhost")
destinations = set(
dest for dest in destinations if self.can_send_to(dest)
)
logger.debug("Sending to: %s", str(destinations))
@@ -87,24 +111,27 @@ class TransactionQueue(object):
(pdu, deferred, order)
)
def eb(failure):
def chain(failure):
if not deferred.called:
deferred.errback(failure)
else:
logger.warn("Failed to send pdu", failure)
def log_failure(failure):
logger.warn("Failed to send pdu", failure.value)
deferred.addErrback(log_failure)
with PreserveLoggingContext():
self._attempt_new_transaction(destination).addErrback(eb)
self._attempt_new_transaction(destination).addErrback(chain)
deferreds.append(deferred)
yield defer.DeferredList(deferreds)
yield defer.DeferredList(deferreds, consumeErrors=True)
# NO inlineCallbacks
def enqueue_edu(self, edu):
destination = edu.destination
if destination == self.server_name:
if not self.can_send_to(destination):
return
deferred = defer.Deferred()
@@ -112,51 +139,53 @@ class TransactionQueue(object):
(edu, deferred)
)
def eb(failure):
def chain(failure):
if not deferred.called:
deferred.errback(failure)
else:
logger.warn("Failed to send edu", failure)
def log_failure(failure):
logger.warn("Failed to send pdu", failure.value)
deferred.addErrback(log_failure)
with PreserveLoggingContext():
self._attempt_new_transaction(destination).addErrback(eb)
self._attempt_new_transaction(destination).addErrback(chain)
return deferred
@defer.inlineCallbacks
def enqueue_failure(self, failure, destination):
if destination == self.server_name or destination == "localhost":
return
deferred = defer.Deferred()
if not self.can_send_to(destination):
return
self.pending_failures_by_dest.setdefault(
destination, []
).append(
(failure, deferred)
)
def chain(f):
if not deferred.called:
deferred.errback(f)
def log_failure(f):
logger.warn("Failed to send pdu", f.value)
deferred.addErrback(log_failure)
with PreserveLoggingContext():
self._attempt_new_transaction(destination).addErrback(chain)
yield deferred
@defer.inlineCallbacks
@log_function
def _attempt_new_transaction(self, destination):
(retry_last_ts, retry_interval) = (0, 0)
retry_timings = yield self.store.get_destination_retry_timings(
destination
)
if retry_timings:
(retry_last_ts, retry_interval) = (
retry_timings.retry_last_ts, retry_timings.retry_interval
)
if retry_last_ts + retry_interval > int(self._clock.time_msec()):
logger.info(
"TX [%s] not ready for retry yet - "
"dropping transaction for now",
destination,
)
return
else:
logger.info("TX [%s] is ready for retry", destination)
if destination in self.pending_transactions:
# XXX: pending_transactions can get stuck on by a never-ending
# request at which point pending_pdus_by_dest just keeps growing.
@@ -183,15 +212,6 @@ class TransactionQueue(object):
logger.info("TX [%s] Nothing to send", destination)
return
logger.debug(
"TX [%s] Attempting new transaction"
" (pdus: %d, edus: %d, failures: %d)",
destination,
len(pending_pdus),
len(pending_edus),
len(pending_failures)
)
# Sort based on the order field
pending_pdus.sort(key=lambda t: t[2])
@@ -204,6 +224,21 @@ class TransactionQueue(object):
]
try:
limiter = yield get_retry_limiter(
destination,
self._clock,
self.store,
)
logger.debug(
"TX [%s] Attempting new transaction"
" (pdus: %d, edus: %d, failures: %d)",
destination,
len(pending_pdus),
len(pending_edus),
len(pending_failures)
)
self.pending_transactions[destination] = 1
logger.debug("TX [%s] Persisting transaction...", destination)
@@ -229,52 +264,56 @@ class TransactionQueue(object):
transaction.transaction_id,
)
# Actually send the transaction
with limiter:
# Actually send the transaction
# FIXME (erikj): This is a bit of a hack to make the Pdu age
# keys work
def json_data_cb():
data = transaction.get_dict()
now = int(self._clock.time_msec())
if "pdus" in data:
for p in data["pdus"]:
if "age_ts" in p:
unsigned = p.setdefault("unsigned", {})
unsigned["age"] = now - int(p["age_ts"])
del p["age_ts"]
return data
# FIXME (erikj): This is a bit of a hack to make the Pdu age
# keys work
def json_data_cb():
data = transaction.get_dict()
now = int(self._clock.time_msec())
if "pdus" in data:
for p in data["pdus"]:
if "age_ts" in p:
unsigned = p.setdefault("unsigned", {})
unsigned["age"] = now - int(p["age_ts"])
del p["age_ts"]
return data
try:
response = yield self.transport_layer.send_transaction(
transaction, json_data_cb
)
code = 200
except HttpResponseException as e:
code = e.code
response = e.response
try:
response = yield self.transport_layer.send_transaction(
transaction, json_data_cb
)
code = 200
logger.info("TX [%s] got %d response", destination, code)
if response:
for e_id, r in getattr(response, "pdus", {}).items():
if "error" in r:
logger.warn(
"Transaction returned error for %s: %s",
e_id, r,
)
except HttpResponseException as e:
code = e.code
response = e.response
logger.debug("TX [%s] Sent transaction", destination)
logger.debug("TX [%s] Marking as delivered...", destination)
logger.info("TX [%s] got %d response", destination, code)
logger.debug("TX [%s] Sent transaction", destination)
logger.debug("TX [%s] Marking as delivered...", destination)
yield self.transaction_actions.delivered(
transaction, code, response
)
logger.debug("TX [%s] Marked as delivered", destination)
logger.debug("TX [%s] Yielding to callbacks...", destination)
for deferred in deferreds:
if code == 200:
if retry_last_ts:
# this host is alive! reset retry schedule
yield self.store.set_destination_retry_timings(
destination, 0, 0
)
deferred.callback(None)
else:
self.set_retrying(destination, retry_interval)
deferred.errback(RuntimeError("Got status %d" % code))
# Ensures we don't continue until all callbacks on that
@@ -285,6 +324,12 @@ class TransactionQueue(object):
pass
logger.debug("TX [%s] Yielded to callbacks", destination)
except NotRetryingDestination:
logger.info(
"TX [%s] not ready for retry yet - "
"dropping transaction for now",
destination,
)
except RuntimeError as e:
# We capture this here as there as nothing actually listens
# for this finishing functions deferred.
@@ -296,14 +341,12 @@ class TransactionQueue(object):
except Exception as e:
# We capture this here as there as nothing actually listens
# for this finishing functions deferred.
logger.exception(
logger.warn(
"TX [%s] Problem in _attempt_transaction: %s",
destination,
e,
)
self.set_retrying(destination, retry_interval)
for deferred in deferreds:
if not deferred.called:
deferred.errback(e)
@@ -314,22 +357,3 @@ class TransactionQueue(object):
# Check to see if there is anything else to send.
self._attempt_new_transaction(destination)
@defer.inlineCallbacks
def set_retrying(self, destination, retry_interval):
# track that this destination is having problems and we should
# give it a chance to recover before trying it again
if retry_interval:
retry_interval *= 2
# plateau at hourly retries for now
if retry_interval >= 60 * 60 * 1000:
retry_interval = 60 * 60 * 1000
else:
retry_interval = 2000 # try again at first after 2 seconds
yield self.store.set_destination_retry_timings(
destination,
int(self._clock.time_msec()),
retry_interval
)

View File

@@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.appservice.api import ApplicationServiceApi
from .register import RegistrationHandler
from .room import (
RoomCreationHandler, RoomMemberHandler, RoomListHandler
@@ -26,6 +27,7 @@ from .presence import PresenceHandler
from .directory import DirectoryHandler
from .typing import TypingNotificationHandler
from .admin import AdminHandler
from .appservice import ApplicationServicesHandler
from .sync import SyncHandler
@@ -52,4 +54,7 @@ class Handlers(object):
self.directory_handler = DirectoryHandler(hs)
self.typing_notification_handler = TypingNotificationHandler(hs)
self.admin_handler = AdminHandler(hs)
self.appservice_handler = ApplicationServicesHandler(
hs, ApplicationServiceApi(hs)
)
self.sync_handler = SyncHandler(hs)

View File

@@ -0,0 +1,211 @@
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer
from synapse.api.constants import EventTypes, Membership
from synapse.api.errors import Codes, StoreError, SynapseError
from synapse.appservice import ApplicationService
from synapse.types import UserID
import synapse.util.stringutils as stringutils
import logging
logger = logging.getLogger(__name__)
# NB: Purposefully not inheriting BaseHandler since that contains way too much
# setup code which this handler does not need or use. This makes testing a lot
# easier.
class ApplicationServicesHandler(object):
def __init__(self, hs, appservice_api):
self.store = hs.get_datastore()
self.hs = hs
self.appservice_api = appservice_api
@defer.inlineCallbacks
def register(self, app_service):
logger.info("Register -> %s", app_service)
# check the token is recognised
try:
stored_service = yield self.store.get_app_service_by_token(
app_service.token
)
if not stored_service:
raise StoreError(404, "Application service not found")
except StoreError:
raise SynapseError(
403, "Unrecognised application services token. "
"Consult the home server admin.",
errcode=Codes.FORBIDDEN
)
app_service.hs_token = self._generate_hs_token()
# create a sender for this application service which is used when
# creating rooms, etc..
account = yield self.hs.get_handlers().registration_handler.register()
app_service.sender = account[0]
yield self.store.update_app_service(app_service)
defer.returnValue(app_service)
@defer.inlineCallbacks
def unregister(self, token):
logger.info("Unregister as_token=%s", token)
yield self.store.unregister_app_service(token)
@defer.inlineCallbacks
def notify_interested_services(self, event):
"""Notifies (pushes) all application services interested in this event.
Pushing is done asynchronously, so this method won't block for any
prolonged length of time.
Args:
event(Event): The event to push out to interested services.
"""
# Gather interested services
services = yield self._get_services_for_event(event)
if len(services) == 0:
return # no services need notifying
# Do we know this user exists? If not, poke the user query API for
# all services which match that user regex. This needs to block as these
# user queries need to be made BEFORE pushing the event.
yield self._check_user_exists(event.sender)
if event.type == EventTypes.Member:
yield self._check_user_exists(event.state_key)
# Fork off pushes to these services - XXX First cut, best effort
for service in services:
self.appservice_api.push(service, event)
@defer.inlineCallbacks
def query_user_exists(self, user_id):
"""Check if any application service knows this user_id exists.
Args:
user_id(str): The user to query if they exist on any AS.
Returns:
True if this user exists on at least one application service.
"""
user_query_services = yield self._get_services_for_user(
user_id=user_id
)
for user_service in user_query_services:
is_known_user = yield self.appservice_api.query_user(
user_service, user_id
)
if is_known_user:
defer.returnValue(True)
defer.returnValue(False)
@defer.inlineCallbacks
def query_room_alias_exists(self, room_alias):
"""Check if an application service knows this room alias exists.
Args:
room_alias(RoomAlias): The room alias to query.
Returns:
namedtuple: with keys "room_id" and "servers" or None if no
association can be found.
"""
room_alias_str = room_alias.to_string()
alias_query_services = yield self._get_services_for_event(
event=None,
restrict_to=ApplicationService.NS_ALIASES,
alias_list=[room_alias_str]
)
for alias_service in alias_query_services:
is_known_alias = yield self.appservice_api.query_alias(
alias_service, room_alias_str
)
if is_known_alias:
# the alias exists now so don't query more ASes.
result = yield self.store.get_association_from_room_alias(
room_alias
)
defer.returnValue(result)
@defer.inlineCallbacks
def _get_services_for_event(self, event, restrict_to="", alias_list=None):
"""Retrieve a list of application services interested in this event.
Args:
event(Event): The event to check. Can be None if alias_list is not.
restrict_to(str): The namespace to restrict regex tests to.
alias_list: A list of aliases to get services for. If None, this
list is obtained from the database.
Returns:
list<ApplicationService>: A list of services interested in this
event based on the service regex.
"""
member_list = None
if hasattr(event, "room_id"):
# We need to know the aliases associated with this event.room_id,
# if any.
if not alias_list:
alias_list = yield self.store.get_aliases_for_room(
event.room_id
)
# We need to know the members associated with this event.room_id,
# if any.
member_list = yield self.store.get_room_members(
room_id=event.room_id,
membership=Membership.JOIN
)
services = yield self.store.get_app_services()
interested_list = [
s for s in services if (
s.is_interested(event, restrict_to, alias_list, member_list)
)
]
defer.returnValue(interested_list)
@defer.inlineCallbacks
def _get_services_for_user(self, user_id):
services = yield self.store.get_app_services()
interested_list = [
s for s in services if (
s.is_interested_in_user(user_id)
)
]
defer.returnValue(interested_list)
@defer.inlineCallbacks
def _is_unknown_user(self, user_id):
user = UserID.from_string(user_id)
if not self.hs.is_mine(user):
# we don't know if they are unknown or not since it isn't one of our
# users. We can't poke ASes.
defer.returnValue(False)
return
user_info = yield self.store.get_user_by_id(user_id)
defer.returnValue(len(user_info) == 0)
@defer.inlineCallbacks
def _check_user_exists(self, user_id):
unknown_user = yield self._is_unknown_user(user_id)
if unknown_user:
exists = yield self.query_user_exists(user_id)
defer.returnValue(exists)
defer.returnValue(True)
def _generate_hs_token(self):
return stringutils.random_string(24)

View File

@@ -37,18 +37,15 @@ class DirectoryHandler(BaseHandler):
)
@defer.inlineCallbacks
def create_association(self, user_id, room_alias, room_id, servers=None):
# TODO(erikj): Do auth.
def _create_association(self, room_alias, room_id, servers=None):
# general association creation for both human users and app services
if not self.hs.is_mine(room_alias):
raise SynapseError(400, "Room alias must be local")
# TODO(erikj): Change this.
# TODO(erikj): Add transactions.
# TODO(erikj): Check if there is a current association.
if not servers:
servers = yield self.store.get_joined_hosts_for_room(room_id)
@@ -61,23 +58,78 @@ class DirectoryHandler(BaseHandler):
servers
)
@defer.inlineCallbacks
def create_association(self, user_id, room_alias, room_id, servers=None):
# association creation for human users
# TODO(erikj): Do user auth.
can_create = yield self.can_modify_alias(
room_alias,
user_id=user_id
)
if not can_create:
raise SynapseError(
400, "This alias is reserved by an application service.",
errcode=Codes.EXCLUSIVE
)
yield self._create_association(room_alias, room_id, servers)
@defer.inlineCallbacks
def create_appservice_association(self, service, room_alias, room_id,
servers=None):
if not service.is_interested_in_alias(room_alias.to_string()):
raise SynapseError(
400, "This application service has not reserved"
" this kind of alias.", errcode=Codes.EXCLUSIVE
)
# association creation for app services
yield self._create_association(room_alias, room_id, servers)
@defer.inlineCallbacks
def delete_association(self, user_id, room_alias):
# association deletion for human users
# TODO Check if server admin
can_delete = yield self.can_modify_alias(
room_alias,
user_id=user_id
)
if not can_delete:
raise SynapseError(
400, "This alias is reserved by an application service.",
errcode=Codes.EXCLUSIVE
)
yield self._delete_association(room_alias)
@defer.inlineCallbacks
def delete_appservice_association(self, service, room_alias):
if not service.is_interested_in_alias(room_alias.to_string()):
raise SynapseError(
400,
"This application service has not reserved this kind of alias",
errcode=Codes.EXCLUSIVE
)
yield self._delete_association(room_alias)
@defer.inlineCallbacks
def _delete_association(self, room_alias):
if not self.hs.is_mine(room_alias):
raise SynapseError(400, "Room alias must be local")
room_id = yield self.store.delete_room_alias(room_alias)
yield self.store.delete_room_alias(room_alias)
if room_id:
yield self._update_room_alias_events(user_id, room_id)
# TODO - Looks like _update_room_alias_event has never been implemented
# if room_id:
# yield self._update_room_alias_events(user_id, room_id)
@defer.inlineCallbacks
def get_association(self, room_alias):
room_id = None
if self.hs.is_mine(room_alias):
result = yield self.store.get_association_from_room_alias(
result = yield self.get_association_from_room_alias(
room_alias
)
@@ -138,7 +190,7 @@ class DirectoryHandler(BaseHandler):
400, "Room Alias is not hosted on this Home Server"
)
result = yield self.store.get_association_from_room_alias(
result = yield self.get_association_from_room_alias(
room_alias
)
@@ -166,3 +218,27 @@ class DirectoryHandler(BaseHandler):
"sender": user_id,
"content": {"aliases": aliases},
}, ratelimit=False)
@defer.inlineCallbacks
def get_association_from_room_alias(self, room_alias):
result = yield self.store.get_association_from_room_alias(
room_alias
)
if not result:
# Query AS to see if it exists
as_handler = self.hs.get_handlers().appservice_handler
result = yield as_handler.query_room_alias_exists(room_alias)
defer.returnValue(result)
@defer.inlineCallbacks
def can_modify_alias(self, alias, user_id=None):
services = yield self.store.get_app_services()
interested_services = [
s for s in services if s.is_interested_in_alias(alias.to_string())
]
for service in interested_services:
if user_id == service.sender:
# this user IS the app service
defer.returnValue(True)
return
defer.returnValue(len(interested_services) == 0)

View File

@@ -802,7 +802,7 @@ class FederationHandler(BaseHandler):
missing_auth = event_auth_events - seen_events
if missing_auth:
logger.debug("Missing auth: %s", missing_auth)
logger.info("Missing auth: %s", missing_auth)
# If we don't have all the auth events, we need to get them.
try:
remote_auth_chain = yield self.replication_layer.get_event_auth(
@@ -856,7 +856,43 @@ class FederationHandler(BaseHandler):
if different_auth and not event.internal_metadata.is_outlier():
# Do auth conflict res.
logger.debug("Different auth: %s", different_auth)
logger.info("Different auth: %s", different_auth)
different_events = yield defer.gatherResults(
[
self.store.get_event(
d,
allow_none=True,
allow_rejected=False,
)
for d in different_auth
if d in have_events and not have_events[d]
],
consumeErrors=True
)
if different_events:
local_view = dict(auth_events)
remote_view = dict(auth_events)
remote_view.update({
(d.type, d.state_key) for d in different_events
})
new_state, prev_state = self.state_handler.resolve_events(
[local_view.values(), remote_view.values()],
event
)
auth_events.update(new_state)
current_state = set(e.event_id for e in auth_events.values())
different_auth = event_auth_events - current_state
context.current_state.update(auth_events)
context.state_group = None
if different_auth and not event.internal_metadata.is_outlier():
logger.info("Different auth after resolution: %s", different_auth)
# Only do auth resolution if we have something new to say.
# We can't rove an auth failure.

View File

@@ -16,12 +16,13 @@
from twisted.internet import defer
from ._base import BaseHandler
from synapse.api.errors import LoginError, Codes
from synapse.api.errors import LoginError, Codes, CodeMessageException
from synapse.http.client import SimpleHttpClient
from synapse.util.emailutils import EmailException
import synapse.util.emailutils as emailutils
import bcrypt
import json
import logging
logger = logging.getLogger(__name__)
@@ -96,16 +97,20 @@ class LoginHandler(BaseHandler):
@defer.inlineCallbacks
def _query_email(self, email):
httpCli = SimpleHttpClient(self.hs)
data = yield httpCli.get_json(
# TODO FIXME This should be configurable.
# XXX: ID servers need to use HTTPS
"http://%s%s" % (
"matrix.org:8090", "/_matrix/identity/api/v1/lookup"
),
{
'medium': 'email',
'address': email
}
)
defer.returnValue(data)
http_client = SimpleHttpClient(self.hs)
try:
data = yield http_client.get_json(
# TODO FIXME This should be configurable.
# XXX: ID servers need to use HTTPS
"http://%s%s" % (
"matrix.org:8090", "/_matrix/identity/api/v1/lookup"
),
{
'medium': 'email',
'address': email
}
)
defer.returnValue(data)
except CodeMessageException as e:
data = json.loads(e.msg)
defer.returnValue(data)

View File

@@ -385,17 +385,10 @@ class MessageHandler(BaseHandler):
as_event=True,
)
presence.append(member_presence)
except SynapseError as e:
if e.code == 404:
# FIXME: We are doing this as a warn since this gets hit a
# lot and spams the logs. Why is this happening?
logger.warn(
"Failed to get member presence of %r", m.user_id
)
else:
logger.exception(
"Failed to get member presence of %r", m.user_id
)
except SynapseError:
logger.exception(
"Failed to get member presence of %r", m.user_id
)
time_now = self.clock.time_msec()

View File

@@ -492,7 +492,7 @@ class PresenceHandler(BaseHandler):
user, domain, remoteusers
))
yield defer.DeferredList(deferreds)
yield defer.DeferredList(deferreds, consumeErrors=True)
def _start_polling_local(self, user, target_user):
target_localpart = target_user.localpart
@@ -548,7 +548,7 @@ class PresenceHandler(BaseHandler):
self._stop_polling_remote(user, domain, remoteusers)
)
return defer.DeferredList(deferreds)
return defer.DeferredList(deferreds, consumeErrors=True)
def _stop_polling_local(self, user, target_user):
for localpart in self._local_pushmap.keys():
@@ -729,7 +729,7 @@ class PresenceHandler(BaseHandler):
del self._remote_sendmap[user]
with PreserveLoggingContext():
yield defer.DeferredList(deferreds)
yield defer.DeferredList(deferreds, consumeErrors=True)
@defer.inlineCallbacks
def push_update_to_local_and_remote(self, observed_user, statuscache,
@@ -768,7 +768,7 @@ class PresenceHandler(BaseHandler):
)
)
yield defer.DeferredList(deferreds)
yield defer.DeferredList(deferreds, consumeErrors=True)
defer.returnValue((localusers, remote_domains))

View File

@@ -18,7 +18,8 @@ from twisted.internet import defer
from synapse.types import UserID
from synapse.api.errors import (
SynapseError, RegistrationError, InvalidCaptchaError
AuthError, Codes, SynapseError, RegistrationError, InvalidCaptchaError,
CodeMessageException
)
from ._base import BaseHandler
import synapse.util.stringutils as stringutils
@@ -28,6 +29,7 @@ from synapse.http.client import CaptchaServerHttpClient
import base64
import bcrypt
import json
import logging
logger = logging.getLogger(__name__)
@@ -64,6 +66,8 @@ class RegistrationHandler(BaseHandler):
user = UserID(localpart, self.hs.hostname)
user_id = user.to_string()
yield self.check_user_id_is_valid(user_id)
token = self._generate_token(user_id)
yield self.store.register(
user_id=user_id,
@@ -82,6 +86,7 @@ class RegistrationHandler(BaseHandler):
localpart = self._generate_user_id()
user = UserID(localpart, self.hs.hostname)
user_id = user.to_string()
yield self.check_user_id_is_valid(user_id)
token = self._generate_token(user_id)
yield self.store.register(
@@ -121,6 +126,27 @@ class RegistrationHandler(BaseHandler):
defer.returnValue((user_id, token))
@defer.inlineCallbacks
def appservice_register(self, user_localpart, as_token):
user = UserID(user_localpart, self.hs.hostname)
user_id = user.to_string()
service = yield self.store.get_app_service_by_token(as_token)
if not service:
raise AuthError(403, "Invalid application service token.")
if not service.is_interested_in_user(user_id):
raise SynapseError(
400, "Invalid user localpart for this application service.",
errcode=Codes.EXCLUSIVE
)
token = self._generate_token(user_id)
yield self.store.register(
user_id=user_id,
token=token,
password_hash=""
)
self.distributor.fire("registered_user", user)
defer.returnValue((user_id, token))
@defer.inlineCallbacks
def check_recaptcha(self, ip, private_key, challenge, response):
"""Checks a recaptcha is correct."""
@@ -167,6 +193,20 @@ class RegistrationHandler(BaseHandler):
# XXX: This should be a deferred list, shouldn't it?
yield self._bind_threepid(c, user_id)
@defer.inlineCallbacks
def check_user_id_is_valid(self, user_id):
# valid user IDs must not clash with any user ID namespaces claimed by
# application services.
services = yield self.store.get_app_services()
interested_services = [
s for s in services if s.is_interested_in_user(user_id)
]
if len(interested_services) > 0:
raise SynapseError(
400, "This user ID is reserved by an application service.",
errcode=Codes.EXCLUSIVE
)
def _generate_token(self, user_id):
# urlsafe variant uses _ and - so use . as the separator and replace
# all =s with .s so http clients don't quote =s when it is used as
@@ -181,21 +221,26 @@ class RegistrationHandler(BaseHandler):
def _threepid_from_creds(self, creds):
# TODO: get this from the homeserver rather than creating a new one for
# each request
httpCli = SimpleHttpClient(self.hs)
http_client = SimpleHttpClient(self.hs)
# XXX: make this configurable!
trustedIdServers = ['matrix.org:8090', 'matrix.org']
if not creds['idServer'] in trustedIdServers:
logger.warn('%s is not a trusted ID server: rejecting 3pid ' +
'credentials', creds['idServer'])
defer.returnValue(None)
data = yield httpCli.get_json(
# XXX: This should be HTTPS
"http://%s%s" % (
creds['idServer'],
"/_matrix/identity/api/v1/3pid/getValidated3pid"
),
{'sid': creds['sid'], 'clientSecret': creds['clientSecret']}
)
data = {}
try:
data = yield http_client.get_json(
# XXX: This should be HTTPS
"http://%s%s" % (
creds['idServer'],
"/_matrix/identity/api/v1/3pid/getValidated3pid"
),
{'sid': creds['sid'], 'clientSecret': creds['clientSecret']}
)
except CodeMessageException as e:
data = json.loads(e.msg)
if 'medium' in data:
defer.returnValue(data)
@@ -205,19 +250,23 @@ class RegistrationHandler(BaseHandler):
def _bind_threepid(self, creds, mxid):
yield
logger.debug("binding threepid")
httpCli = SimpleHttpClient(self.hs)
data = yield httpCli.post_urlencoded_get_json(
# XXX: Change when ID servers are all HTTPS
"http://%s%s" % (
creds['idServer'], "/_matrix/identity/api/v1/3pid/bind"
),
{
'sid': creds['sid'],
'clientSecret': creds['clientSecret'],
'mxid': mxid,
}
)
logger.debug("bound threepid")
http_client = SimpleHttpClient(self.hs)
data = None
try:
data = yield http_client.post_urlencoded_get_json(
# XXX: Change when ID servers are all HTTPS
"http://%s%s" % (
creds['idServer'], "/_matrix/identity/api/v1/3pid/bind"
),
{
'sid': creds['sid'],
'clientSecret': creds['clientSecret'],
'mxid': mxid,
}
)
logger.debug("bound threepid")
except CodeMessageException as e:
data = json.loads(e.msg)
defer.returnValue(data)
@defer.inlineCallbacks

View File

@@ -181,7 +181,7 @@ class TypingNotificationHandler(BaseHandler):
},
))
yield defer.DeferredList(deferreds, consumeErrors=False)
yield defer.DeferredList(deferreds, consumeErrors=True)
@defer.inlineCallbacks
def _recv_edu(self, origin, content):

View File

@@ -13,8 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.http.agent_name import AGENT_NAME
from synapse.api.errors import CodeMessageException
from syutil.jsonutil import encode_canonical_json
from twisted.internet import defer, reactor
@@ -44,6 +43,7 @@ class SimpleHttpClient(object):
# BrowserLikePolicyForHTTPS which will do regular cert validation
# 'like a browser'
self.agent = Agent(reactor)
self.version_string = hs.version_string
@defer.inlineCallbacks
def post_urlencoded_get_json(self, uri, args={}):
@@ -55,7 +55,7 @@ class SimpleHttpClient(object):
uri.encode("ascii"),
headers=Headers({
b"Content-Type": [b"application/x-www-form-urlencoded"],
b"User-Agent": [AGENT_NAME],
b"User-Agent": [self.version_string],
}),
bodyProducer=FileBodyProducer(StringIO(query_bytes))
)
@@ -85,7 +85,7 @@ class SimpleHttpClient(object):
@defer.inlineCallbacks
def get_json(self, uri, args={}):
""" Get's some json from the given host and path
""" Gets some json from the given URI.
Args:
uri (str): The URI to request, not including query parameters
@@ -93,15 +93,13 @@ class SimpleHttpClient(object):
None.
**Note**: The value of each key is assumed to be an iterable
and *not* a string.
Returns:
Deferred: Succeeds when we get *any* HTTP response.
The result of the deferred is a tuple of `(code, response)`,
where `response` is a dict representing the decoded JSON body.
Deferred: Succeeds when we get *any* 2xx HTTP response, with the
HTTP body as JSON.
Raises:
On a non-2xx HTTP response. The response body will be used as the
error message.
"""
yield
if len(args):
query_bytes = urllib.urlencode(args, True)
uri = "%s?%s" % (uri, query_bytes)
@@ -110,13 +108,62 @@ class SimpleHttpClient(object):
"GET",
uri.encode("ascii"),
headers=Headers({
b"User-Agent": [AGENT_NAME],
b"User-Agent": [self.version_string],
})
)
body = yield readBody(response)
defer.returnValue(json.loads(body))
if 200 <= response.code < 300:
defer.returnValue(json.loads(body))
else:
# NB: This is explicitly not json.loads(body)'d because the contract
# of CodeMessageException is a *string* message. Callers can always
# load it into JSON if they want.
raise CodeMessageException(response.code, body)
@defer.inlineCallbacks
def put_json(self, uri, json_body, args={}):
""" Puts some json to the given URI.
Args:
uri (str): The URI to request, not including query parameters
json_body (dict): The JSON to put in the HTTP body,
args (dict): A dictionary used to create query strings, defaults to
None.
**Note**: The value of each key is assumed to be an iterable
and *not* a string.
Returns:
Deferred: Succeeds when we get *any* 2xx HTTP response, with the
HTTP body as JSON.
Raises:
On a non-2xx HTTP response.
"""
if len(args):
query_bytes = urllib.urlencode(args, True)
uri = "%s?%s" % (uri, query_bytes)
json_str = json.dumps(json_body)
response = yield self.agent.request(
"PUT",
uri.encode("ascii"),
headers=Headers({
b"User-Agent": [self.version_string],
"Content-Type": ["application/json"]
}),
bodyProducer=FileBodyProducer(StringIO(json_str))
)
body = yield readBody(response)
if 200 <= response.code < 300:
defer.returnValue(json.loads(body))
else:
# NB: This is explicitly not json.loads(body)'d because the contract
# of CodeMessageException is a *string* message. Callers can always
# load it into JSON if they want.
raise CodeMessageException(response.code, body)
class CaptchaServerHttpClient(SimpleHttpClient):
@@ -135,7 +182,7 @@ class CaptchaServerHttpClient(SimpleHttpClient):
bodyProducer=FileBodyProducer(StringIO(query_bytes)),
headers=Headers({
b"Content-Type": [b"application/x-www-form-urlencoded"],
b"User-Agent": [AGENT_NAME],
b"User-Agent": [self.version_string],
})
)

View File

@@ -20,7 +20,6 @@ from twisted.web.client import readBody, _AgentBase, _URI
from twisted.web.http_headers import Headers
from twisted.web._newclient import ResponseDone
from synapse.http.agent_name import AGENT_NAME
from synapse.http.endpoint import matrix_federation_endpoint
from synapse.util.async import sleep
from synapse.util.logcontext import PreserveLoggingContext
@@ -80,6 +79,7 @@ class MatrixFederationHttpClient(object):
self.server_name = hs.hostname
self.agent = MatrixFederationHttpAgent(reactor)
self.clock = hs.get_clock()
self.version_string = hs.version_string
@defer.inlineCallbacks
def _create_request(self, destination, method, path_bytes,
@@ -87,7 +87,7 @@ class MatrixFederationHttpClient(object):
query_bytes=b"", retry_on_dns_fail=True):
""" Creates and sends a request to the given url
"""
headers_dict[b"User-Agent"] = [AGENT_NAME]
headers_dict[b"User-Agent"] = [self.version_string]
headers_dict[b"Host"] = [destination]
url_bytes = urlparse.urlunparse(
@@ -144,16 +144,16 @@ class MatrixFederationHttpClient(object):
destination,
e
)
raise SynapseError(400, "Domain specified not found.")
raise
logger.warn(
"Sending request failed to %s: %s %s : %s",
"Sending request failed to %s: %s %s: %s - %s",
destination,
method,
url_bytes,
e
type(e).__name__,
_flatten_response_never_received(e),
)
_print_ex(e)
if retries_left:
yield sleep(2 ** (5 - retries_left))
@@ -447,14 +447,6 @@ def _readBodyToFile(response, stream, max_size):
return d
def _print_ex(e):
if hasattr(e, "reasons") and e.reasons:
for ex in e.reasons:
_print_ex(ex)
else:
logger.warn(e)
class _JsonProducer(object):
""" Used by the twisted http client to create the HTTP body from json
"""
@@ -474,3 +466,13 @@ class _JsonProducer(object):
def stopProducing(self):
pass
def _flatten_response_never_received(e):
if hasattr(e, "reasons"):
return ", ".join(
_flatten_response_never_received(f.value)
for f in e.reasons
)
else:
return "%s: %s" % (type(e).__name__, e.message,)

View File

@@ -14,7 +14,6 @@
# limitations under the License.
from synapse.http.agent_name import AGENT_NAME
from synapse.api.errors import (
cs_exception, SynapseError, CodeMessageException, UnrecognizedRequestError
)
@@ -74,6 +73,7 @@ class JsonResource(HttpServer, resource.Resource):
self.clock = hs.get_clock()
self.path_regexs = {}
self.version_string = hs.version_string
def register_path(self, method, path_pattern, callback):
self.path_regexs.setdefault(method, []).append(
@@ -189,9 +189,13 @@ class JsonResource(HttpServer, resource.Resource):
return
# TODO: Only enable CORS for the requests that need it.
respond_with_json(request, code, response_json_object, send_cors=True,
response_code_message=response_code_message,
pretty_print=self._request_user_agent_is_curl)
respond_with_json(
request, code, response_json_object,
send_cors=True,
response_code_message=response_code_message,
pretty_print=self._request_user_agent_is_curl,
version_string=self.version_string,
)
@staticmethod
def _request_user_agent_is_curl(request):
@@ -221,18 +225,23 @@ class RootRedirect(resource.Resource):
def respond_with_json(request, code, json_object, send_cors=False,
response_code_message=None, pretty_print=False):
response_code_message=None, pretty_print=False,
version_string=""):
if not pretty_print:
json_bytes = encode_pretty_printed_json(json_object)
else:
json_bytes = encode_canonical_json(json_object)
return respond_with_json_bytes(request, code, json_bytes, send_cors,
response_code_message=response_code_message)
return respond_with_json_bytes(
request, code, json_bytes,
send_cors=send_cors,
response_code_message=response_code_message,
version_string=version_string
)
def respond_with_json_bytes(request, code, json_bytes, send_cors=False,
response_code_message=None):
version_string="", response_code_message=None):
"""Sends encoded JSON in response to the given request.
Args:
@@ -246,7 +255,7 @@ def respond_with_json_bytes(request, code, json_bytes, send_cors=False,
request.setResponseCode(code, message=response_code_message)
request.setHeader(b"Content-Type", b"application/json")
request.setHeader(b"Server", AGENT_NAME)
request.setHeader(b"Server", version_string)
request.setHeader(b"Content-Length", b"%d" % (len(json_bytes),))
if send_cors:

View File

@@ -50,6 +50,7 @@ class LocalKey(Resource):
def __init__(self, hs):
self.hs = hs
self.version_string = hs.version_string
self.response_body = encode_canonical_json(
self.response_json_object(hs.config)
)
@@ -82,7 +83,10 @@ class LocalKey(Resource):
return json_object
def render_GET(self, request):
return respond_with_json_bytes(request, 200, self.response_body)
return respond_with_json_bytes(
request, 200, self.response_body,
version_string=self.version_string
)
def getChild(self, name, request):
if name == '':

View File

@@ -99,6 +99,12 @@ class Notifier(object):
`extra_users` param.
"""
yield run_on_reactor()
# poke any interested application service.
self.hs.get_handlers().appservice_handler.notify_interested_services(
event
)
room_id = event.room_id
room_source = self.event_sources.sources["room"]
@@ -135,7 +141,8 @@ class Notifier(object):
with PreserveLoggingContext():
yield defer.DeferredList(
[notify(l).addErrback(eb) for l in listeners]
[notify(l).addErrback(eb) for l in listeners],
consumeErrors=True,
)
@defer.inlineCallbacks
@@ -203,7 +210,8 @@ class Notifier(object):
with PreserveLoggingContext():
yield defer.DeferredList(
[notify(l).addErrback(eb) for l in listeners]
[notify(l).addErrback(eb) for l in listeners],
consumeErrors=True,
)
@defer.inlineCallbacks

View File

@@ -237,7 +237,7 @@ class Pusher(object):
self.user_name, config, timeout=0)
self.last_token = chunk['end']
self.store.update_pusher_last_token(
self.user_name, self.pushkey, self.last_token)
self.app_id, self.pushkey, self.last_token)
logger.info("Pusher %s for user %s starting from token %s",
self.pushkey, self.user_name, self.last_token)
@@ -308,7 +308,7 @@ class Pusher(object):
self.backoff_delay = Pusher.INITIAL_BACKOFF
self.last_token = chunk['end']
self.store.update_pusher_last_token_and_success(
self.user_name,
self.app_id,
self.pushkey,
self.last_token,
self.clock.time_msec()
@@ -316,14 +316,14 @@ class Pusher(object):
if self.failing_since:
self.failing_since = None
self.store.update_pusher_failing_since(
self.user_name,
self.app_id,
self.pushkey,
self.failing_since)
else:
if not self.failing_since:
self.failing_since = self.clock.time_msec()
self.store.update_pusher_failing_since(
self.user_name,
self.app_id,
self.pushkey,
self.failing_since
)
@@ -340,14 +340,14 @@ class Pusher(object):
self.backoff_delay = Pusher.INITIAL_BACKOFF
self.last_token = chunk['end']
self.store.update_pusher_last_token(
self.user_name,
self.app_id,
self.pushkey,
self.last_token
)
self.failing_since = None
self.store.update_pusher_failing_since(
self.user_name,
self.app_id,
self.pushkey,
self.failing_since
)

View File

@@ -66,6 +66,7 @@ class HttpPusher(Pusher):
d = {
'notification': {
'id': event['event_id'],
'room_id': event['room_id'],
'type': event['type'],
'sender': event['user_id'],
'counts': { # -- we don't mark messages as read yet so

View File

@@ -5,7 +5,7 @@ logger = logging.getLogger(__name__)
REQUIREMENTS = {
"syutil>=0.0.3": ["syutil"],
"matrix_angular_sdk>=0.6.2": ["syweb>=0.6.2"],
"matrix_angular_sdk>=0.6.3": ["syweb>=0.6.3"],
"Twisted==14.0.2": ["twisted==14.0.2"],
"service_identity>=1.0.0": ["service_identity>=1.0.0"],
"pyopenssl>=0.14": ["OpenSSL>=0.14"],
@@ -24,6 +24,11 @@ def github_link(project, version, egg):
return "https://github.com/%s/tarball/%s/#egg=%s" % (project, version, egg)
DEPENDENCY_LINKS = [
github_link(
project="pyca/pynacl",
version="d4d3175589b892f6ea7c22f466e0e223853516fa",
egg="pynacl-0.3.0",
),
github_link(
project="matrix-org/syutil",
version="v0.0.3",
@@ -31,14 +36,9 @@ DEPENDENCY_LINKS = [
),
github_link(
project="matrix-org/matrix-angular-sdk",
version="v0.6.2",
egg="matrix_angular_sdk-0.6.2",
version="v0.6.3",
egg="matrix_angular_sdk-0.6.3",
),
github_link(
project="pyca/pynacl",
version="d4d3175589b892f6ea7c22f466e0e223853516fa",
egg="pynacl-0.3.0",
)
]

View File

@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
# Copyright 2014, 2015 OpenMarket Ltd
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,7 +12,3 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse import __version__
AGENT_NAME = ("Synapse/%s" % (__version__,)).encode("ascii")

View File

@@ -0,0 +1,29 @@
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from . import register
from synapse.http.server import JsonResource
class AppServiceRestResource(JsonResource):
"""A resource for version 1 of the matrix application service API."""
def __init__(self, hs):
JsonResource.__init__(self, hs)
self.register_servlets(self, hs)
@staticmethod
def register_servlets(appservice_resource, hs):
register.register_servlets(hs, appservice_resource)

View File

@@ -0,0 +1,48 @@
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains base REST classes for constructing client v1 servlets.
"""
from synapse.http.servlet import RestServlet
from synapse.api.urls import APP_SERVICE_PREFIX
import re
import logging
logger = logging.getLogger(__name__)
def as_path_pattern(path_regex):
"""Creates a regex compiled appservice path with the correct path
prefix.
Args:
path_regex (str): The regex string to match. This should NOT have a ^
as this will be prefixed.
Returns:
SRE_Pattern
"""
return re.compile("^" + APP_SERVICE_PREFIX + path_regex)
class AppServiceRestServlet(RestServlet):
"""A base Synapse REST Servlet for the application services version 1 API.
"""
def __init__(self, hs):
self.hs = hs
self.handler = hs.get_handlers().appservice_handler

View File

@@ -0,0 +1,121 @@
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains REST servlets to do with registration: /register"""
from twisted.internet import defer
from base import AppServiceRestServlet, as_path_pattern
from synapse.api.errors import CodeMessageException, SynapseError
from synapse.storage.appservice import ApplicationService
import json
import logging
logger = logging.getLogger(__name__)
class RegisterRestServlet(AppServiceRestServlet):
"""Handles AS registration with the home server.
"""
PATTERN = as_path_pattern("/register$")
@defer.inlineCallbacks
def on_POST(self, request):
params = _parse_json(request)
# sanity check required params
try:
as_token = params["as_token"]
as_url = params["url"]
if (not isinstance(as_token, basestring) or
not isinstance(as_url, basestring)):
raise ValueError
except (KeyError, ValueError):
raise SynapseError(
400, "Missed required keys: as_token(str) / url(str)."
)
namespaces = {
"users": [],
"rooms": [],
"aliases": []
}
if "namespaces" in params:
self._parse_namespace(namespaces, params["namespaces"], "users")
self._parse_namespace(namespaces, params["namespaces"], "rooms")
self._parse_namespace(namespaces, params["namespaces"], "aliases")
app_service = ApplicationService(as_token, as_url, namespaces)
app_service = yield self.handler.register(app_service)
hs_token = app_service.hs_token
defer.returnValue((200, {
"hs_token": hs_token
}))
def _parse_namespace(self, target_ns, origin_ns, ns):
if ns not in target_ns or ns not in origin_ns:
return # nothing to parse / map through to.
possible_regex_list = origin_ns[ns]
if not type(possible_regex_list) == list:
raise SynapseError(400, "Namespace %s isn't an array." % ns)
for regex in possible_regex_list:
if not isinstance(regex, basestring):
raise SynapseError(
400, "Regex '%s' isn't a string in namespace %s" %
(regex, ns)
)
target_ns[ns] = origin_ns[ns]
class UnregisterRestServlet(AppServiceRestServlet):
"""Handles AS registration with the home server.
"""
PATTERN = as_path_pattern("/unregister$")
def on_POST(self, request):
params = _parse_json(request)
try:
as_token = params["as_token"]
if not isinstance(as_token, basestring):
raise ValueError
except (KeyError, ValueError):
raise SynapseError(400, "Missing required key: as_token(str)")
yield self.handler.unregister(as_token)
raise CodeMessageException(500, "Not implemented")
def _parse_json(request):
try:
content = json.loads(request.content.read())
if type(content) != dict:
raise SynapseError(400, "Content must be a JSON object.")
return content
except ValueError:
raise SynapseError(400, "Content not JSON.")
def register_servlets(hs, http_server):
RegisterRestServlet(hs).register(http_server)
UnregisterRestServlet(hs).register(http_server)

View File

@@ -45,8 +45,6 @@ class ClientDirectoryServer(ClientV1RestServlet):
@defer.inlineCallbacks
def on_PUT(self, request, room_alias):
user, client = yield self.auth.get_user_by_req(request)
content = _parse_json(request)
if "room_id" not in content:
raise SynapseError(400, "Missing room_id key",
@@ -70,34 +68,70 @@ class ClientDirectoryServer(ClientV1RestServlet):
dir_handler = self.handlers.directory_handler
try:
user_id = user.to_string()
yield dir_handler.create_association(
user_id, room_alias, room_id, servers
# try to auth as a user
user, client = yield self.auth.get_user_by_req(request)
try:
user_id = user.to_string()
yield dir_handler.create_association(
user_id, room_alias, room_id, servers
)
yield dir_handler.send_room_alias_update_event(user_id, room_id)
except SynapseError as e:
raise e
except:
logger.exception("Failed to create association")
raise
except AuthError:
# try to auth as an application service
service = yield self.auth.get_appservice_by_req(request)
yield dir_handler.create_appservice_association(
service, room_alias, room_id, servers
)
logger.info(
"Application service at %s created alias %s pointing to %s",
service.url,
room_alias.to_string(),
room_id
)
yield dir_handler.send_room_alias_update_event(user_id, room_id)
except SynapseError as e:
raise e
except:
logger.exception("Failed to create association")
raise
defer.returnValue((200, {}))
@defer.inlineCallbacks
def on_DELETE(self, request, room_alias):
dir_handler = self.handlers.directory_handler
try:
service = yield self.auth.get_appservice_by_req(request)
room_alias = RoomAlias.from_string(room_alias)
yield dir_handler.delete_appservice_association(
service, room_alias
)
logger.info(
"Application service at %s deleted alias %s",
service.url,
room_alias.to_string()
)
defer.returnValue((200, {}))
except AuthError:
# fallback to default user behaviour if they aren't an AS
pass
user, client = yield self.auth.get_user_by_req(request)
is_admin = yield self.auth.is_server_admin(user)
if not is_admin:
raise AuthError(403, "You need to be a server admin")
dir_handler = self.handlers.directory_handler
room_alias = RoomAlias.from_string(room_alias)
yield dir_handler.delete_association(
user.to_string(), room_alias
)
logger.info(
"User %s deleted alias %s",
user.to_string(),
room_alias.to_string()
)
defer.returnValue((200, {}))

View File

@@ -110,7 +110,8 @@ class RegisterRestServlet(ClientV1RestServlet):
stages = {
LoginType.RECAPTCHA: self._do_recaptcha,
LoginType.PASSWORD: self._do_password,
LoginType.EMAIL_IDENTITY: self._do_email_identity
LoginType.EMAIL_IDENTITY: self._do_email_identity,
LoginType.APPLICATION_SERVICE: self._do_app_service
}
session_info = self._get_session_info(request, session)
@@ -276,6 +277,27 @@ class RegisterRestServlet(ClientV1RestServlet):
self._remove_session(session)
defer.returnValue(result)
@defer.inlineCallbacks
def _do_app_service(self, request, register_json, session):
if "access_token" not in request.args:
raise SynapseError(400, "Expected application service token.")
if "user" not in register_json:
raise SynapseError(400, "Expected 'user' key.")
as_token = request.args["access_token"][0]
user_localpart = register_json["user"].encode("utf-8")
handler = self.handlers.registration_handler
(user_id, token) = yield handler.appservice_register(
user_localpart, as_token
)
self._remove_session(session)
defer.returnValue({
"user_id": user_id,
"access_token": token,
"home_server": self.hs.hostname,
})
def _parse_json(request):
try:

View File

@@ -54,7 +54,7 @@ class BaseMediaResource(Resource):
try:
yield request_handler(self, request)
except CodeMessageException as e:
logger.exception(e)
logger.info("Responding with error: %r", e)
respond_with_json(
request, e.code, cs_exception(e), send_cors=True
)

View File

@@ -77,6 +77,7 @@ class BaseHomeServer(object):
'resource_for_content_repo',
'resource_for_server_key',
'resource_for_media_repository',
'resource_for_app_services',
'event_sources',
'ratelimiter',
'keyring',

View File

@@ -18,6 +18,7 @@ from twisted.internet import defer
from synapse.util.logutils import log_function
from synapse.util.async import run_on_reactor
from synapse.util.expiringcache import ExpiringCache
from synapse.api.constants import EventTypes
from synapse.api.errors import AuthError
from synapse.events.snapshot import EventContext
@@ -51,7 +52,6 @@ class _StateCacheEntry(object):
def __init__(self, state, state_group, ts):
self.state = state
self.state_group = state_group
self.ts = ts
class StateHandler(object):
@@ -69,12 +69,15 @@ class StateHandler(object):
def start_caching(self):
logger.debug("start_caching")
self._state_cache = {}
self._state_cache = ExpiringCache(
cache_name="state_cache",
clock=self.clock,
max_len=SIZE_OF_CACHE,
expiry_ms=EVICTION_TIMEOUT_SECONDS*1000,
reset_expiry_on_get=True,
)
def f():
self._prune_cache()
self.clock.looping_call(f, 5*1000)
self._state_cache.start()
@defer.inlineCallbacks
def get_current_state(self, room_id, event_type=None, state_key=""):
@@ -259,13 +262,37 @@ class StateHandler(object):
defer.returnValue((name, state, prev_states))
new_state, prev_states = self._resolve_events(
state_groups.values(), event_type, state_key
)
if self._state_cache is not None:
cache = _StateCacheEntry(
state=new_state,
state_group=None,
ts=self.clock.time_msec()
)
self._state_cache[frozenset(event_ids)] = cache
defer.returnValue((None, new_state, prev_states))
def resolve_events(self, state_sets, event):
if event.is_state():
return self._resolve_events(
state_sets, event.type, event.state_key
)
else:
return self._resolve_events(state_sets)
def _resolve_events(self, state_sets, event_type=None, state_key=""):
state = {}
for group, g_state in state_groups.items():
for s in g_state:
for st in state_sets:
for e in st:
state.setdefault(
(s.type, s.state_key),
(e.type, e.state_key),
{}
)[s.event_id] = s
)[e.event_id] = e
unconflicted_state = {
k: v.values()[0] for k, v in state.items()
@@ -302,16 +329,7 @@ class StateHandler(object):
new_state = unconflicted_state
new_state.update(resolved_state)
if self._state_cache is not None:
cache = _StateCacheEntry(
state=new_state,
state_group=None,
ts=self.clock.time_msec()
)
self._state_cache[frozenset(event_ids)] = cache
defer.returnValue((None, new_state, prev_states))
return new_state, prev_states
@log_function
def _resolve_state_events(self, conflicted_state, auth_events):
@@ -394,34 +412,3 @@ class StateHandler(object):
return -int(e.depth), hashlib.sha1(e.event_id).hexdigest()
return sorted(events, key=key_func)
def _prune_cache(self):
logger.debug(
"_prune_cache. before len: %d",
len(self._state_cache.keys())
)
now = self.clock.time_msec()
if len(self._state_cache.keys()) > SIZE_OF_CACHE:
sorted_entries = sorted(
self._state_cache.items(),
key=lambda k, v: v.ts,
)
for k, _ in sorted_entries[SIZE_OF_CACHE:]:
self._state_cache.pop(k)
keys_to_delete = set()
for key, cache_entry in self._state_cache.items():
if now - cache_entry.ts > EVICTION_TIMEOUT_SECONDS*1000:
keys_to_delete.add(key)
for k in keys_to_delete:
self._state_cache.pop(k)
logger.debug(
"_prune_cache. after len: %d",
len(self._state_cache.keys())
)

View File

@@ -18,6 +18,7 @@ from twisted.internet import defer
from synapse.util.logutils import log_function
from synapse.api.constants import EventTypes
from .appservice import ApplicationServiceStore
from .directory import DirectoryStore
from .feedback import FeedbackStore
from .presence import PresenceStore
@@ -65,6 +66,7 @@ SCHEMAS = [
"event_signatures",
"pusher",
"media_repository",
"application_services",
"filtering",
"rejections",
]
@@ -72,7 +74,9 @@ SCHEMAS = [
# Remember to update this number every time an incompatible change is made to
# database schema files, so the users will be informed on server restarts.
SCHEMA_VERSION = 12
SCHEMA_VERSION = 13
dir_path = os.path.abspath(os.path.dirname(__file__))
class _RollbackButIsFineException(Exception):
@@ -86,6 +90,7 @@ class DataStore(RoomMemberStore, RoomStore,
RegistrationStore, StreamStore, ProfileStore, FeedbackStore,
PresenceStore, TransactionStore,
DirectoryStore, KeyStore, StateStore, SignatureStore,
ApplicationServiceStore,
EventFederationStore,
MediaRepositoryStore,
RejectionsStore,
@@ -580,7 +585,6 @@ def schema_path(schema):
A filesystem path pointing at a ".sql" file.
"""
dir_path = os.path.dirname(__file__)
schemaPath = os.path.join(dir_path, "schema", schema + ".sql")
return schemaPath
@@ -637,10 +641,13 @@ def prepare_database(db_conn):
c.executescript(sql_script)
db_conn.commit()
else:
logger.info("Database is at version %r", user_version)
else:
sql_script = "BEGIN TRANSACTION;\n"
for sql_loc in SCHEMAS:
logger.debug("Applying schema %r", sql_loc)
sql_script += read_schema(sql_loc)
sql_script += "\n"
sql_script += "COMMIT TRANSACTION;"

View File

@@ -0,0 +1,244 @@
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from twisted.internet import defer
from synapse.api.errors import StoreError
from synapse.appservice import ApplicationService
from ._base import SQLBaseStore
logger = logging.getLogger(__name__)
class ApplicationServiceCache(object):
"""Caches ApplicationServices and provides utility functions on top.
This class is designed to be invoked on incoming events in order to avoid
hammering the database every time to extract a list of application service
regexes.
"""
def __init__(self):
self.services = []
class ApplicationServiceStore(SQLBaseStore):
def __init__(self, hs):
super(ApplicationServiceStore, self).__init__(hs)
self.cache = ApplicationServiceCache()
self.cache_defer = self._populate_cache()
@defer.inlineCallbacks
def unregister_app_service(self, token):
"""Unregisters this service.
This removes all AS specific regex and the base URL. The token is the
only thing preserved for future registration attempts.
"""
yield self.cache_defer # make sure the cache is ready
yield self.runInteraction(
"unregister_app_service",
self._unregister_app_service_txn,
token,
)
# update cache TODO: Should this be in the txn?
for service in self.cache.services:
if service.token == token:
service.url = None
service.namespaces = None
service.hs_token = None
def _unregister_app_service_txn(self, txn, token):
# kill the url to prevent pushes
txn.execute(
"UPDATE application_services SET url=NULL WHERE token=?",
(token,)
)
# cleanup regex
as_id = self._get_as_id_txn(txn, token)
if not as_id:
logger.warning(
"unregister_app_service_txn: Failed to find as_id for token=",
token
)
return False
txn.execute(
"DELETE FROM application_services_regex WHERE as_id=?",
(as_id,)
)
return True
@defer.inlineCallbacks
def update_app_service(self, service):
"""Update an application service, clobbering what was previously there.
Args:
service(ApplicationService): The updated service.
"""
yield self.cache_defer # make sure the cache is ready
# NB: There is no "insert" since we provide no public-facing API to
# allocate new ASes. It relies on the server admin inserting the AS
# token into the database manually.
if not service.token or not service.url:
raise StoreError(400, "Token and url must be specified.")
if not service.hs_token:
raise StoreError(500, "No HS token")
yield self.runInteraction(
"update_app_service",
self._update_app_service_txn,
service
)
# update cache TODO: Should this be in the txn?
for (index, cache_service) in enumerate(self.cache.services):
if service.token == cache_service.token:
self.cache.services[index] = service
logger.info("Updated: %s", service)
return
# new entry
self.cache.services.append(service)
logger.info("Updated(new): %s", service)
def _update_app_service_txn(self, txn, service):
as_id = self._get_as_id_txn(txn, service.token)
if not as_id:
logger.warning(
"update_app_service_txn: Failed to find as_id for token=",
service.token
)
return False
txn.execute(
"UPDATE application_services SET url=?, hs_token=?, sender=? "
"WHERE id=?",
(service.url, service.hs_token, service.sender, as_id,)
)
# cleanup regex
txn.execute(
"DELETE FROM application_services_regex WHERE as_id=?",
(as_id,)
)
for (ns_int, ns_str) in enumerate(ApplicationService.NS_LIST):
if ns_str in service.namespaces:
for regex in service.namespaces[ns_str]:
txn.execute(
"INSERT INTO application_services_regex("
"as_id, namespace, regex) values(?,?,?)",
(as_id, ns_int, regex)
)
return True
def _get_as_id_txn(self, txn, token):
cursor = txn.execute(
"SELECT id FROM application_services WHERE token=?",
(token,)
)
res = cursor.fetchone()
if res:
return res[0]
@defer.inlineCallbacks
def get_app_services(self):
yield self.cache_defer # make sure the cache is ready
defer.returnValue(self.cache.services)
@defer.inlineCallbacks
def get_app_service_by_token(self, token, from_cache=True):
"""Get the application service with the given token.
Args:
token (str): The application service token.
from_cache (bool): True to get this service from the cache, False to
check the database.
Raises:
StoreError if there was a problem retrieving this service.
"""
yield self.cache_defer # make sure the cache is ready
if from_cache:
for service in self.cache.services:
if service.token == token:
defer.returnValue(service)
return
defer.returnValue(None)
# TODO: The from_cache=False impl
# TODO: This should be JOINed with the application_services_regex table.
@defer.inlineCallbacks
def _populate_cache(self):
"""Populates the ApplicationServiceCache from the database."""
sql = ("SELECT * FROM application_services LEFT JOIN "
"application_services_regex ON application_services.id = "
"application_services_regex.as_id")
# SQL results in the form:
# [
# {
# 'regex': "something",
# 'url': "something",
# 'namespace': enum,
# 'as_id': 0,
# 'token': "something",
# 'hs_token': "otherthing",
# 'id': 0
# }
# ]
services = {}
results = yield self._execute_and_decode(sql)
for res in results:
as_token = res["token"]
if as_token not in services:
# add the service
services[as_token] = {
"url": res["url"],
"token": as_token,
"hs_token": res["hs_token"],
"sender": res["sender"],
"namespaces": {
ApplicationService.NS_USERS: [],
ApplicationService.NS_ALIASES: [],
ApplicationService.NS_ROOMS: []
}
}
# add the namespace regex if one exists
ns_int = res["namespace"]
if ns_int is None:
continue
try:
services[as_token]["namespaces"][
ApplicationService.NS_LIST[ns_int]].append(
res["regex"]
)
except IndexError:
logger.error("Bad namespace enum '%s'. %s", ns_int, res)
# TODO get last successful txn id f.e. service
for service in services.values():
logger.info("Found application service: %s", service)
self.cache.services.append(ApplicationService(
token=service["token"],
url=service["url"],
namespaces=service["namespaces"],
hs_token=service["hs_token"],
sender=service["sender"]
))

View File

@@ -126,27 +126,27 @@ class PusherStore(SQLBaseStore):
)
@defer.inlineCallbacks
def update_pusher_last_token(self, user_name, pushkey, last_token):
def update_pusher_last_token(self, app_id, pushkey, last_token):
yield self._simple_update_one(
PushersTable.table_name,
{'user_name': user_name, 'pushkey': pushkey},
{'app_id': app_id, 'pushkey': pushkey},
{'last_token': last_token}
)
@defer.inlineCallbacks
def update_pusher_last_token_and_success(self, user_name, pushkey,
def update_pusher_last_token_and_success(self, app_id, pushkey,
last_token, last_success):
yield self._simple_update_one(
PushersTable.table_name,
{'user_name': user_name, 'pushkey': pushkey},
{'app_id': app_id, 'pushkey': pushkey},
{'last_token': last_token, 'last_success': last_success}
)
@defer.inlineCallbacks
def update_pusher_failing_since(self, user_name, pushkey, failing_since):
def update_pusher_failing_since(self, app_id, pushkey, failing_since):
yield self._simple_update_one(
PushersTable.table_name,
{'user_name': user_name, 'pushkey': pushkey},
{'app_id': app_id, 'pushkey': pushkey},
{'failing_since': failing_since}
)

View File

@@ -288,7 +288,7 @@ class RoomMemberStore(SQLBaseStore):
deferreds = [self.get_rooms_for_user(u) for u in user_id_list]
results = yield defer.DeferredList(deferreds)
results = yield defer.DeferredList(deferreds, consumeErrors=True)
# A list of sets of strings giving room IDs for each user
room_id_lists = [set([r.room_id for r in result[1]]) for result in results]

View File

@@ -0,0 +1,34 @@
/* Copyright 2015 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
CREATE TABLE IF NOT EXISTS application_services(
id INTEGER PRIMARY KEY AUTOINCREMENT,
url TEXT,
token TEXT,
hs_token TEXT,
sender TEXT,
UNIQUE(token) ON CONFLICT ROLLBACK
);
CREATE TABLE IF NOT EXISTS application_services_regex(
id INTEGER PRIMARY KEY AUTOINCREMENT,
as_id INTEGER NOT NULL,
namespace INTEGER, /* enum[room_id|room_alias|user_id] */
regex TEXT,
FOREIGN KEY(as_id) REFERENCES application_services(id)
);

View File

@@ -0,0 +1,34 @@
/* Copyright 2015 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
CREATE TABLE IF NOT EXISTS application_services(
id INTEGER PRIMARY KEY AUTOINCREMENT,
url TEXT,
token TEXT,
hs_token TEXT,
sender TEXT,
UNIQUE(token) ON CONFLICT ROLLBACK
);
CREATE TABLE IF NOT EXISTS application_services_regex(
id INTEGER PRIMARY KEY AUTOINCREMENT,
as_id INTEGER NOT NULL,
namespace INTEGER, /* enum[room_id|room_alias|user_id] */
regex TEXT,
FOREIGN KEY(as_id) REFERENCES application_services(id)
);

View File

@@ -99,8 +99,6 @@ class Clock(object):
except:
pass
return res
given_deferred.addCallbacks(callback=sucess, errback=err)
timer = self.call_later(time_out, timed_out_fn)

View File

@@ -0,0 +1,115 @@
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
logger = logging.getLogger(__name__)
class ExpiringCache(object):
def __init__(self, cache_name, clock, max_len=0, expiry_ms=0,
reset_expiry_on_get=False):
"""
Args:
cache_name (str): Name of this cache, used for logging.
clock (Clock)
max_len (int): Max size of dict. If the dict grows larger than this
then the oldest items get automatically evicted. Default is 0,
which indicates there is no max limit.
expiry_ms (int): How long before an item is evicted from the cache
in milliseconds. Default is 0, indicating items never get
evicted based on time.
reset_expiry_on_get (bool): If true, will reset the expiry time for
an item on access. Defaults to False.
"""
self._cache_name = cache_name
self._clock = clock
self._max_len = max_len
self._expiry_ms = expiry_ms
self._reset_expiry_on_get = reset_expiry_on_get
self._cache = {}
def start(self):
if not self._expiry_ms:
# Don't bother starting the loop if things never expire
return
def f():
self._prune_cache()
self._clock.looping_call(f, self._expiry_ms/2)
def __setitem__(self, key, value):
now = self._clock.time_msec()
self._cache[key] = _CacheEntry(now, value)
# Evict if there are now too many items
if self._max_len and len(self._cache.keys()) > self._max_len:
sorted_entries = sorted(
self._cache.items(),
key=lambda k, v: v.time,
)
for k, _ in sorted_entries[self._max_len:]:
self._cache.pop(k)
def __getitem__(self, key):
entry = self._cache[key]
if self._reset_expiry_on_get:
entry.time = self._clock.time_msec()
return entry.value
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def _prune_cache(self):
if not self._expiry_ms:
# zero expiry time means don't expire. This should never get called
# since we have this check in start too.
return
begin_length = len(self._cache)
now = self._clock.time_msec()
keys_to_delete = set()
for key, cache_entry in self._cache.items():
if now - cache_entry.time > self._expiry_ms:
keys_to_delete.add(key)
for k in keys_to_delete:
self._cache.pop(k)
logger.debug(
"[%s] _prune_cache before: %d, after len: %d",
self._cache_name, begin_length, len(self._cache.keys())
)
class _CacheEntry(object):
def __init__(self, time, value):
self.time = time
self.value = value

153
synapse/util/retryutils.py Normal file
View File

@@ -0,0 +1,153 @@
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer
from synapse.api.errors import CodeMessageException
import logging
logger = logging.getLogger(__name__)
class NotRetryingDestination(Exception):
def __init__(self, retry_last_ts, retry_interval, destination):
msg = "Not retrying server %s." % (destination,)
super(NotRetryingDestination, self).__init__(msg)
self.retry_last_ts = retry_last_ts
self.retry_interval = retry_interval
self.destination = destination
@defer.inlineCallbacks
def get_retry_limiter(destination, clock, store, **kwargs):
"""For a given destination check if we have previously failed to
send a request there and are waiting before retrying the destination.
If we are not ready to retry the destination, this will raise a
NotRetryingDestination exception. Otherwise, will return a Context Manager
that will mark the destination as down if an exception is thrown (excluding
CodeMessageException with code < 500)
Example usage:
try:
limiter = yield get_retry_limiter(destination, clock, store)
with limiter:
response = yield do_request()
except NotRetryingDestination:
# We aren't ready to retry that destination.
raise
"""
retry_last_ts, retry_interval = (0, 0)
retry_timings = yield store.get_destination_retry_timings(
destination
)
if retry_timings:
retry_last_ts, retry_interval = (
retry_timings.retry_last_ts, retry_timings.retry_interval
)
now = int(clock.time_msec())
if retry_last_ts + retry_interval > now:
raise NotRetryingDestination(
retry_last_ts=retry_last_ts,
retry_interval=retry_interval,
destination=destination,
)
defer.returnValue(
RetryDestinationLimiter(
destination,
clock,
store,
retry_interval,
**kwargs
)
)
class RetryDestinationLimiter(object):
def __init__(self, destination, clock, store, retry_interval,
min_retry_interval=5000, max_retry_interval=60 * 60 * 1000,
multiplier_retry_interval=2,):
"""Marks the destination as "down" if an exception is thrown in the
context, except for CodeMessageException with code < 500.
If no exception is raised, marks the destination as "up".
Args:
destination (str)
clock (Clock)
store (DataStore)
retry_interval (int): The next retry interval taken from the
database in milliseconds, or zero if the last request was
successful.
min_retry_interval (int): The minimum retry interval to use after
a failed request, in milliseconds.
max_retry_interval (int): The maximum retry interval to use after
a failed request, in milliseconds.
multiplier_retry_interval (int): The multiplier to use to increase
the retry interval after a failed request.
"""
self.clock = clock
self.store = store
self.destination = destination
self.retry_interval = retry_interval
self.min_retry_interval = min_retry_interval
self.max_retry_interval = max_retry_interval
self.multiplier_retry_interval = multiplier_retry_interval
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
def err(failure):
logger.exception(
"Failed to store set_destination_retry_timings",
failure.value
)
valid_err_code = False
if exc_type is CodeMessageException:
valid_err_code = 0 <= exc_val.code < 500
if exc_type is None or valid_err_code:
# We connected successfully.
if not self.retry_interval:
return
retry_last_ts = 0
self.retry_interval = 0
else:
# We couldn't connect.
if self.retry_interval:
self.retry_interval *= self.multiplier_retry_interval
if self.retry_interval >= self.max_retry_interval:
self.retry_interval = self.max_retry_interval
else:
self.retry_interval = self.min_retry_interval
retry_last_ts = int(self.clock.time_msec())
self.store.set_destination_retry_timings(
self.destination, retry_last_ts, self.retry_interval
).addErrback(err)

139
tests/api/test_auth.py Normal file
View File

@@ -0,0 +1,139 @@
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests import unittest
from twisted.internet import defer
from mock import Mock
from synapse.api.auth import Auth
from synapse.api.errors import AuthError
class AuthTestCase(unittest.TestCase):
def setUp(self):
self.state_handler = Mock()
self.store = Mock()
self.hs = Mock()
self.hs.get_datastore = Mock(return_value=self.store)
self.hs.get_state_handler = Mock(return_value=self.state_handler)
self.auth = Auth(self.hs)
self.test_user = "@foo:bar"
self.test_token = "_test_token_"
@defer.inlineCallbacks
def test_get_user_by_req_user_valid_token(self):
self.store.get_app_service_by_token = Mock(return_value=None)
user_info = {
"name": self.test_user,
"device_id": "nothing",
"token_id": "ditto",
"admin": False
}
self.store.get_user_by_token = Mock(return_value=user_info)
request = Mock(args={})
request.args["access_token"] = [self.test_token]
request.requestHeaders.getRawHeaders = Mock(return_value=[""])
(user, info) = yield self.auth.get_user_by_req(request)
self.assertEquals(user.to_string(), self.test_user)
def test_get_user_by_req_user_bad_token(self):
self.store.get_app_service_by_token = Mock(return_value=None)
self.store.get_user_by_token = Mock(return_value=None)
request = Mock(args={})
request.args["access_token"] = [self.test_token]
request.requestHeaders.getRawHeaders = Mock(return_value=[""])
d = self.auth.get_user_by_req(request)
self.failureResultOf(d, AuthError)
def test_get_user_by_req_user_missing_token(self):
self.store.get_app_service_by_token = Mock(return_value=None)
user_info = {
"name": self.test_user,
"device_id": "nothing",
"token_id": "ditto",
"admin": False
}
self.store.get_user_by_token = Mock(return_value=user_info)
request = Mock(args={})
request.requestHeaders.getRawHeaders = Mock(return_value=[""])
d = self.auth.get_user_by_req(request)
self.failureResultOf(d, AuthError)
@defer.inlineCallbacks
def test_get_user_by_req_appservice_valid_token(self):
app_service = Mock(token="foobar", url="a_url", sender=self.test_user)
self.store.get_app_service_by_token = Mock(return_value=app_service)
self.store.get_user_by_token = Mock(return_value=None)
request = Mock(args={})
request.args["access_token"] = [self.test_token]
request.requestHeaders.getRawHeaders = Mock(return_value=[""])
(user, info) = yield self.auth.get_user_by_req(request)
self.assertEquals(user.to_string(), self.test_user)
def test_get_user_by_req_appservice_bad_token(self):
self.store.get_app_service_by_token = Mock(return_value=None)
self.store.get_user_by_token = Mock(return_value=None)
request = Mock(args={})
request.args["access_token"] = [self.test_token]
request.requestHeaders.getRawHeaders = Mock(return_value=[""])
d = self.auth.get_user_by_req(request)
self.failureResultOf(d, AuthError)
def test_get_user_by_req_appservice_missing_token(self):
app_service = Mock(token="foobar", url="a_url", sender=self.test_user)
self.store.get_app_service_by_token = Mock(return_value=app_service)
self.store.get_user_by_token = Mock(return_value=None)
request = Mock(args={})
request.requestHeaders.getRawHeaders = Mock(return_value=[""])
d = self.auth.get_user_by_req(request)
self.failureResultOf(d, AuthError)
@defer.inlineCallbacks
def test_get_user_by_req_appservice_valid_token_valid_user_id(self):
masquerading_user_id = "@doppelganger:matrix.org"
app_service = Mock(token="foobar", url="a_url", sender=self.test_user)
app_service.is_interested_in_user = Mock(return_value=True)
self.store.get_app_service_by_token = Mock(return_value=app_service)
self.store.get_user_by_token = Mock(return_value=None)
request = Mock(args={})
request.args["access_token"] = [self.test_token]
request.args["user_id"] = [masquerading_user_id]
request.requestHeaders.getRawHeaders = Mock(return_value=[""])
(user, info) = yield self.auth.get_user_by_req(request)
self.assertEquals(user.to_string(), masquerading_user_id)
def test_get_user_by_req_appservice_valid_token_bad_user_id(self):
masquerading_user_id = "@doppelganger:matrix.org"
app_service = Mock(token="foobar", url="a_url", sender=self.test_user)
app_service.is_interested_in_user = Mock(return_value=False)
self.store.get_app_service_by_token = Mock(return_value=app_service)
self.store.get_user_by_token = Mock(return_value=None)
request = Mock(args={})
request.args["access_token"] = [self.test_token]
request.args["user_id"] = [masquerading_user_id]
request.requestHeaders.getRawHeaders = Mock(return_value=[""])
d = self.auth.get_user_by_req(request)
self.failureResultOf(d, AuthError)

View File

@@ -0,0 +1,14 @@
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -0,0 +1,170 @@
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.appservice import ApplicationService
from mock import Mock, PropertyMock
from tests import unittest
class ApplicationServiceTestCase(unittest.TestCase):
def setUp(self):
self.service = ApplicationService(
url="some_url",
token="some_token",
namespaces={
ApplicationService.NS_USERS: [],
ApplicationService.NS_ROOMS: [],
ApplicationService.NS_ALIASES: []
}
)
self.event = Mock(
type="m.something", room_id="!foo:bar", sender="@someone:somewhere"
)
def test_regex_user_id_prefix_match(self):
self.service.namespaces[ApplicationService.NS_USERS].append(
"@irc_.*"
)
self.event.sender = "@irc_foobar:matrix.org"
self.assertTrue(self.service.is_interested(self.event))
def test_regex_user_id_prefix_no_match(self):
self.service.namespaces[ApplicationService.NS_USERS].append(
"@irc_.*"
)
self.event.sender = "@someone_else:matrix.org"
self.assertFalse(self.service.is_interested(self.event))
def test_regex_room_member_is_checked(self):
self.service.namespaces[ApplicationService.NS_USERS].append(
"@irc_.*"
)
self.event.sender = "@someone_else:matrix.org"
self.event.type = "m.room.member"
self.event.state_key = "@irc_foobar:matrix.org"
self.assertTrue(self.service.is_interested(self.event))
def test_regex_room_id_match(self):
self.service.namespaces[ApplicationService.NS_ROOMS].append(
"!some_prefix.*some_suffix:matrix.org"
)
self.event.room_id = "!some_prefixs0m3th1nGsome_suffix:matrix.org"
self.assertTrue(self.service.is_interested(self.event))
def test_regex_room_id_no_match(self):
self.service.namespaces[ApplicationService.NS_ROOMS].append(
"!some_prefix.*some_suffix:matrix.org"
)
self.event.room_id = "!XqBunHwQIXUiqCaoxq:matrix.org"
self.assertFalse(self.service.is_interested(self.event))
def test_regex_alias_match(self):
self.service.namespaces[ApplicationService.NS_ALIASES].append(
"#irc_.*:matrix.org"
)
self.assertTrue(self.service.is_interested(
self.event,
aliases_for_event=["#irc_foobar:matrix.org", "#athing:matrix.org"]
))
def test_regex_alias_no_match(self):
self.service.namespaces[ApplicationService.NS_ALIASES].append(
"#irc_.*:matrix.org"
)
self.assertFalse(self.service.is_interested(
self.event,
aliases_for_event=["#xmpp_foobar:matrix.org", "#athing:matrix.org"]
))
def test_regex_multiple_matches(self):
self.service.namespaces[ApplicationService.NS_ALIASES].append(
"#irc_.*:matrix.org"
)
self.service.namespaces[ApplicationService.NS_USERS].append(
"@irc_.*"
)
self.event.sender = "@irc_foobar:matrix.org"
self.assertTrue(self.service.is_interested(
self.event,
aliases_for_event=["#irc_barfoo:matrix.org"]
))
def test_restrict_to_rooms(self):
self.service.namespaces[ApplicationService.NS_ROOMS].append(
"!flibble_.*:matrix.org"
)
self.service.namespaces[ApplicationService.NS_USERS].append(
"@irc_.*"
)
self.event.sender = "@irc_foobar:matrix.org"
self.event.room_id = "!wibblewoo:matrix.org"
self.assertFalse(self.service.is_interested(
self.event,
restrict_to=ApplicationService.NS_ROOMS
))
def test_restrict_to_aliases(self):
self.service.namespaces[ApplicationService.NS_ALIASES].append(
"#xmpp_.*:matrix.org"
)
self.service.namespaces[ApplicationService.NS_USERS].append(
"@irc_.*"
)
self.event.sender = "@irc_foobar:matrix.org"
self.assertFalse(self.service.is_interested(
self.event,
restrict_to=ApplicationService.NS_ALIASES,
aliases_for_event=["#irc_barfoo:matrix.org"]
))
def test_restrict_to_senders(self):
self.service.namespaces[ApplicationService.NS_ALIASES].append(
"#xmpp_.*:matrix.org"
)
self.service.namespaces[ApplicationService.NS_USERS].append(
"@irc_.*"
)
self.event.sender = "@xmpp_foobar:matrix.org"
self.assertFalse(self.service.is_interested(
self.event,
restrict_to=ApplicationService.NS_USERS,
aliases_for_event=["#xmpp_barfoo:matrix.org"]
))
def test_member_list_match(self):
self.service.namespaces[ApplicationService.NS_USERS].append(
"@irc_.*"
)
join_list = [
Mock(
type="m.room.member", room_id="!foo:bar", sender="@alice:here",
state_key="@alice:here"
),
Mock(
type="m.room.member", room_id="!foo:bar", sender="@irc_fo:here",
state_key="@irc_fo:here" # AS user
),
Mock(
type="m.room.member", room_id="!foo:bar", sender="@bob:here",
state_key="@bob:here"
)
]
self.event.sender = "@xmpp_foobar:matrix.org"
self.assertTrue(self.service.is_interested(
event=self.event,
member_list=join_list
))

View File

@@ -0,0 +1,93 @@
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet import defer
from .. import unittest
from synapse.handlers.appservice import ApplicationServicesHandler
from mock import Mock
class AppServiceHandlerTestCase(unittest.TestCase):
""" Tests the ApplicationServicesHandler. """
def setUp(self):
self.mock_store = Mock()
self.mock_as_api = Mock()
hs = Mock()
hs.get_datastore = Mock(return_value=self.mock_store)
self.handler = ApplicationServicesHandler(
hs, self.mock_as_api
)
@defer.inlineCallbacks
def test_notify_interested_services(self):
interested_service = self._mkservice(is_interested=True)
services = [
self._mkservice(is_interested=False),
interested_service,
self._mkservice(is_interested=False)
]
self.mock_store.get_app_services = Mock(return_value=services)
self.mock_store.get_user_by_id = Mock(return_value=[])
event = Mock(
sender="@someone:anywhere",
type="m.room.message",
room_id="!foo:bar"
)
self.mock_as_api.push = Mock()
yield self.handler.notify_interested_services(event)
self.mock_as_api.push.assert_called_once_with(interested_service, event)
@defer.inlineCallbacks
def test_query_room_alias_exists(self):
room_alias_str = "#foo:bar"
room_alias = Mock()
room_alias.to_string = Mock(return_value=room_alias_str)
room_id = "!alpha:bet"
servers = ["aperture"]
interested_service = self._mkservice(is_interested=True)
services = [
self._mkservice(is_interested=False),
interested_service,
self._mkservice(is_interested=False)
]
self.mock_store.get_app_services = Mock(return_value=services)
self.mock_store.get_association_from_room_alias = Mock(
return_value=Mock(room_id=room_id, servers=servers)
)
result = yield self.handler.query_room_alias_exists(room_alias)
self.mock_as_api.query_alias.assert_called_once_with(
interested_service,
room_alias_str
)
self.assertEquals(result.room_id, room_id)
self.assertEquals(result.servers, servers)
def _mkservice(self, is_interested):
service = Mock()
service.is_interested = Mock(return_value=is_interested)
service.token = "mock_service_token"
service.url = "mock_service_url"
return service

View File

@@ -61,6 +61,7 @@ class PresenceStateTestCase(unittest.TestCase):
hs.handlers = JustPresenceHandlers(hs)
self.datastore = hs.get_datastore()
self.datastore.get_app_service_by_token = Mock(return_value=None)
def get_presence_list(*a, **kw):
return defer.succeed([])
@@ -147,6 +148,7 @@ class PresenceListTestCase(unittest.TestCase):
hs.handlers = JustPresenceHandlers(hs)
self.datastore = hs.get_datastore()
self.datastore.get_app_service_by_token = Mock(return_value=None)
def has_presence_state(user_localpart):
return defer.succeed(
@@ -292,6 +294,7 @@ class PresenceEventStreamTestCase(unittest.TestCase):
hs.handlers.room_member_handler.get_rooms_for_user = get_rooms_for_user
self.mock_datastore = hs.get_datastore()
self.mock_datastore.get_app_service_by_token = Mock(return_value=None)
def get_profile_displayname(user_id):
return defer.succeed("Frank")

View File

@@ -56,6 +56,8 @@ class V2AlphaRestTestCase(unittest.TestCase):
r.register_servlets(hs, self.mock_resource)
def make_datastore_mock(self):
return Mock(spec=[
store = Mock(spec=[
"insert_client_ip",
])
store.get_app_service_by_token = Mock(return_value=None)
return store

View File

@@ -0,0 +1,110 @@
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests import unittest
from twisted.internet import defer
from synapse.appservice import ApplicationService
from synapse.server import HomeServer
from synapse.storage.appservice import ApplicationServiceStore
from mock import Mock
from tests.utils import SQLiteMemoryDbPool, MockClock
class ApplicationServiceStoreTestCase(unittest.TestCase):
@defer.inlineCallbacks
def setUp(self):
db_pool = SQLiteMemoryDbPool()
yield db_pool.prepare()
hs = HomeServer(
"test", db_pool=db_pool, clock=MockClock(), config=Mock()
)
self.as_token = "token1"
db_pool.runQuery(
"INSERT INTO application_services(token) VALUES(?)",
(self.as_token,)
)
db_pool.runQuery(
"INSERT INTO application_services(token) VALUES(?)", ("token2",)
)
db_pool.runQuery(
"INSERT INTO application_services(token) VALUES(?)", ("token3",)
)
# must be done after inserts
self.store = ApplicationServiceStore(hs)
@defer.inlineCallbacks
def test_update_and_retrieval_of_service(self):
url = "https://matrix.org/appservices/foobar"
hs_token = "hstok"
user_regex = ["@foobar_.*:matrix.org"]
alias_regex = ["#foobar_.*:matrix.org"]
room_regex = []
service = ApplicationService(
url=url, hs_token=hs_token, token=self.as_token, namespaces={
ApplicationService.NS_USERS: user_regex,
ApplicationService.NS_ALIASES: alias_regex,
ApplicationService.NS_ROOMS: room_regex
})
yield self.store.update_app_service(service)
stored_service = yield self.store.get_app_service_by_token(
self.as_token
)
self.assertEquals(stored_service.token, self.as_token)
self.assertEquals(stored_service.url, url)
self.assertEquals(
stored_service.namespaces[ApplicationService.NS_ALIASES],
alias_regex
)
self.assertEquals(
stored_service.namespaces[ApplicationService.NS_ROOMS],
room_regex
)
self.assertEquals(
stored_service.namespaces[ApplicationService.NS_USERS],
user_regex
)
@defer.inlineCallbacks
def test_retrieve_unknown_service_token(self):
service = yield self.store.get_app_service_by_token("invalid_token")
self.assertEquals(service, None)
@defer.inlineCallbacks
def test_retrieval_of_service(self):
stored_service = yield self.store.get_app_service_by_token(
self.as_token
)
self.assertEquals(stored_service.token, self.as_token)
self.assertEquals(stored_service.url, None)
self.assertEquals(
stored_service.namespaces[ApplicationService.NS_ALIASES],
[]
)
self.assertEquals(
stored_service.namespaces[ApplicationService.NS_ROOMS],
[]
)
self.assertEquals(
stored_service.namespaces[ApplicationService.NS_USERS],
[]
)
@defer.inlineCallbacks
def test_retrieval_of_all_services(self):
services = yield self.store.get_app_services()
self.assertEquals(len(services), 3)

View File

@@ -46,10 +46,16 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
if datastore is None:
db_pool = SQLiteMemoryDbPool()
yield db_pool.prepare()
hs = HomeServer(name, db_pool=db_pool, config=config, **kargs)
hs = HomeServer(
name, db_pool=db_pool, config=config,
version_string="Synapse/tests",
**kargs
)
else:
hs = HomeServer(
name, db_pool=None, datastore=datastore, config=config, **kargs
name, db_pool=None, datastore=datastore, config=config,
version_string="Synapse/tests",
**kargs
)
defer.returnValue(hs)