Compare commits
9 Commits
experiment
...
travis/int
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bc50cffa26 | ||
|
|
944d524f18 | ||
|
|
a44c0a096f | ||
|
|
80527b568d | ||
|
|
de8772a655 | ||
|
|
e8690dec2e | ||
|
|
6c18cc4b50 | ||
|
|
6bcfbcea5c | ||
|
|
a8452b8fb4 |
1
changelog.d/4190.misc
Normal file
1
changelog.d/4190.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add some diagnostics to the tests to detect logcontext problems
|
||||
1
changelog.d/4209.misc
Normal file
1
changelog.d/4209.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix logcontext leaks in EmailPusher and in tests
|
||||
1
changelog.d/4214.misc
Normal file
1
changelog.d/4214.misc
Normal file
@@ -0,0 +1 @@
|
||||
Ignore __pycache__ directories in the database schema folder
|
||||
1
changelog.d/4220.feature
Normal file
1
changelog.d/4220.feature
Normal file
@@ -0,0 +1 @@
|
||||
Rename login type m.login.cas to m.login.sso
|
||||
1
changelog.d/4227.misc
Normal file
1
changelog.d/4227.misc
Normal file
@@ -0,0 +1 @@
|
||||
Garbage-collect after each unit test to fix logcontext leaks
|
||||
@@ -163,7 +163,7 @@ the logcontext was set, this will make things work out ok: provided
|
||||
It's all too easy to forget to ``yield``: for instance if we forgot that
|
||||
``do_some_stuff`` returned a deferred, we might plough on regardless. This
|
||||
leads to a mess; it will probably work itself out eventually, but not before
|
||||
a load of stuff has been logged against the wrong content. (Normally, other
|
||||
a load of stuff has been logged against the wrong context. (Normally, other
|
||||
things will break, more obviously, if you forget to ``yield``, so this tends
|
||||
not to be a major problem in practice.)
|
||||
|
||||
@@ -440,3 +440,59 @@ To conclude: I think this scheme would have worked equally well, with less
|
||||
danger of messing it up, and probably made some more esoteric code easier to
|
||||
write. But again — changing the conventions of the entire Synapse codebase is
|
||||
not a sensible option for the marginal improvement offered.
|
||||
|
||||
|
||||
A note on garbage-collection of Deferred chains
|
||||
-----------------------------------------------
|
||||
|
||||
It turns out that our logcontext rules do not play nicely with Deferred
|
||||
chains which get orphaned and garbage-collected.
|
||||
|
||||
Imagine we have some code that looks like this:
|
||||
|
||||
.. code:: python
|
||||
|
||||
listener_queue = []
|
||||
|
||||
def on_something_interesting():
|
||||
for d in listener_queue:
|
||||
d.callback("foo")
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def await_something_interesting():
|
||||
new_deferred = defer.Deferred()
|
||||
listener_queue.append(new_deferred)
|
||||
|
||||
with PreserveLoggingContext():
|
||||
yield new_deferred
|
||||
|
||||
Obviously, the idea here is that we have a bunch of things which are waiting
|
||||
for an event. (It's just an example of the problem here, but a relatively
|
||||
common one.)
|
||||
|
||||
Now let's imagine two further things happen. First of all, whatever was
|
||||
waiting for the interesting thing goes away. (Perhaps the request times out,
|
||||
or something *even more* interesting happens.)
|
||||
|
||||
Secondly, let's suppose that we decide that the interesting thing is never
|
||||
going to happen, and we reset the listener queue:
|
||||
|
||||
.. code:: python
|
||||
|
||||
def reset_listener_queue():
|
||||
listener_queue.clear()
|
||||
|
||||
So, both ends of the deferred chain have now dropped their references, and the
|
||||
deferred chain is now orphaned, and will be garbage-collected at some point.
|
||||
Note that ``await_something_interesting`` is a generator function, and when
|
||||
Python garbage-collects generator functions, it gives them a chance to clean
|
||||
up by making the ``yield`` raise a ``GeneratorExit`` exception. In our case,
|
||||
that means that the ``__exit__`` handler of ``PreserveLoggingContext`` will
|
||||
carefully restore the request context, but there is now nothing waiting for
|
||||
its return, so the request context is never cleared.
|
||||
|
||||
To reiterate, this problem only arises when *both* ends of a deferred chain
|
||||
are dropped. Dropping the the reference to a deferred you're supposed to be
|
||||
calling is probably bad practice, so this doesn't actually happen too much.
|
||||
Unfortunately, when it does happen, it will lead to leaked logcontexts which
|
||||
are incredibly hard to track down.
|
||||
|
||||
@@ -259,8 +259,10 @@ class DirectoryHandler(BaseHandler):
|
||||
servers = result["servers"]
|
||||
|
||||
if not room_id:
|
||||
raise NotFoundError(
|
||||
raise SynapseError(
|
||||
404,
|
||||
"Room alias %s not found" % (room_alias.to_string(),),
|
||||
Codes.NOT_FOUND
|
||||
)
|
||||
|
||||
users = yield self.state.get_current_user_in_room(room_id)
|
||||
@@ -300,8 +302,10 @@ class DirectoryHandler(BaseHandler):
|
||||
"servers": result.servers,
|
||||
})
|
||||
else:
|
||||
raise NotFoundError(
|
||||
raise SynapseError(
|
||||
404,
|
||||
"Room alias %r not found" % (room_alias.to_string(),),
|
||||
Codes.NOT_FOUND
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
||||
@@ -48,14 +48,13 @@ from synapse.crypto.event_signing import (
|
||||
compute_event_signature,
|
||||
)
|
||||
from synapse.events.validator import EventValidator
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.replication.http.federation import (
|
||||
ReplicationCleanRoomRestServlet,
|
||||
ReplicationFederationSendEventsRestServlet,
|
||||
)
|
||||
from synapse.replication.http.membership import ReplicationUserJoinedLeftRoomRestServlet
|
||||
from synapse.state import StateResolutionStore, resolve_events_with_store
|
||||
from synapse.types import UserID, create_requester, get_domain_from_id
|
||||
from synapse.types import UserID, get_domain_from_id
|
||||
from synapse.util import logcontext, unwrapFirstError
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.util.distributor import user_joined_room
|
||||
@@ -106,7 +105,6 @@ class FederationHandler(BaseHandler):
|
||||
|
||||
self.hs = hs
|
||||
|
||||
self.clock = hs.get_clock()
|
||||
self.store = hs.get_datastore() # type: synapse.storage.DataStore
|
||||
self.federation_client = hs.get_federation_client()
|
||||
self.state_handler = hs.get_state_handler()
|
||||
@@ -1302,37 +1300,8 @@ class FederationHandler(BaseHandler):
|
||||
context = yield self.state_handler.compute_event_context(event)
|
||||
yield self.persist_events_and_notify([(event, context)])
|
||||
|
||||
sender = UserID.from_string(event.sender)
|
||||
target = UserID.from_string(event.state_key)
|
||||
if (sender.localpart == target.localpart):
|
||||
run_as_background_process(
|
||||
"_auto_accept_invite",
|
||||
self._auto_accept_invite,
|
||||
sender, target, event.room_id,
|
||||
)
|
||||
|
||||
defer.returnValue(event)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _auto_accept_invite(self, sender, target, room_id):
|
||||
joined = False
|
||||
for attempt in range(0, 10):
|
||||
try:
|
||||
yield self.hs.get_room_member_handler().update_membership(
|
||||
requester=create_requester(target.to_string()),
|
||||
target=target,
|
||||
room_id=room_id,
|
||||
action="join",
|
||||
)
|
||||
joined = True
|
||||
break
|
||||
except Exception:
|
||||
# We're going to retry, but we should log the error
|
||||
logger.exception("Error auto-accepting invite on attempt %d" % attempt)
|
||||
yield self.clock.sleep(1)
|
||||
if not joined:
|
||||
logger.error("Giving up on trying to auto-accept invite: too many attempts")
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def do_remotely_reject_invite(self, target_hosts, room_id, user_id):
|
||||
origin, event = yield self._make_and_verify_event(
|
||||
|
||||
@@ -28,9 +28,8 @@ from twisted.internet import defer
|
||||
import synapse.server
|
||||
import synapse.types
|
||||
from synapse.api.constants import EventTypes, Membership
|
||||
from synapse.api.errors import AuthError, Codes, NotFoundError, SynapseError
|
||||
from synapse.types import RoomAlias, RoomID, UserID
|
||||
from synapse.util import logcontext
|
||||
from synapse.api.errors import AuthError, Codes, SynapseError
|
||||
from synapse.types import RoomID, UserID
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.util.distributor import user_joined_room, user_left_room
|
||||
|
||||
@@ -417,10 +416,6 @@ class RoomMemberHandler(object):
|
||||
ret = yield self._remote_join(
|
||||
requester, remote_room_hosts, room_id, target, content
|
||||
)
|
||||
logcontext.run_in_background(
|
||||
self._send_merged_user_invites,
|
||||
requester, room_id,
|
||||
)
|
||||
defer.returnValue(ret)
|
||||
|
||||
elif effective_membership_state == Membership.LEAVE:
|
||||
@@ -455,58 +450,8 @@ class RoomMemberHandler(object):
|
||||
prev_events_and_hashes=prev_events_and_hashes,
|
||||
content=content,
|
||||
)
|
||||
if effective_membership_state == Membership.JOIN:
|
||||
logcontext.run_in_background(
|
||||
self._send_merged_user_invites,
|
||||
requester, room_id,
|
||||
)
|
||||
defer.returnValue(res)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _send_merged_user_invites(self, requester, room_id):
|
||||
try:
|
||||
profile_alias = "#_profile_%s:%s" % (
|
||||
requester.user.localpart, self.hs.hostname,
|
||||
)
|
||||
profile_alias = RoomAlias.from_string(profile_alias)
|
||||
try:
|
||||
profile_room_id, remote_room_hosts = yield self.lookup_room_alias(
|
||||
profile_alias,
|
||||
)
|
||||
except NotFoundError:
|
||||
logger.info(
|
||||
"Not sending merged invites as %s does not exists",
|
||||
profile_alias
|
||||
)
|
||||
return
|
||||
|
||||
linked_accounts = yield self.state_handler.get_current_state(
|
||||
room_id=profile_room_id.to_string(),
|
||||
event_type="m.linked_accounts",
|
||||
state_key="",
|
||||
)
|
||||
if not linked_accounts or not linked_accounts.content['all_children']:
|
||||
return
|
||||
for child_id in linked_accounts.content['all_children']:
|
||||
child = UserID.from_string(child_id)
|
||||
if self.hs.is_mine(child) or child_id == requester.user.to_string():
|
||||
# TODO: Handle auto-invite for local users (not a priority)
|
||||
continue
|
||||
try:
|
||||
yield self.update_membership(
|
||||
requester=requester,
|
||||
target=child,
|
||||
room_id=room_id,
|
||||
action="invite",
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Failed to invite %s to %s", child_id, room_id)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to send invites to children of %s in %s",
|
||||
requester.user.to_string(), room_id,
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def send_membership_event(
|
||||
self,
|
||||
@@ -633,7 +578,7 @@ class RoomMemberHandler(object):
|
||||
mapping = yield directory_handler.get_association(room_alias)
|
||||
|
||||
if not mapping:
|
||||
raise NotFoundError("No such room alias")
|
||||
raise SynapseError(404, "No such room alias")
|
||||
|
||||
room_id = mapping["room_id"]
|
||||
servers = mapping["servers"]
|
||||
|
||||
@@ -27,7 +27,7 @@ from twisted.web.client import PartialDownloadError
|
||||
|
||||
from synapse.api.errors import Codes, LoginError, SynapseError
|
||||
from synapse.http.server import finish_request
|
||||
from synapse.http.servlet import parse_json_object_from_request
|
||||
from synapse.http.servlet import RestServlet, parse_json_object_from_request
|
||||
from synapse.types import UserID
|
||||
from synapse.util.msisdn import phone_number_to_msisdn
|
||||
|
||||
@@ -83,6 +83,7 @@ class LoginRestServlet(ClientV1RestServlet):
|
||||
PATTERNS = client_path_patterns("/login$")
|
||||
SAML2_TYPE = "m.login.saml2"
|
||||
CAS_TYPE = "m.login.cas"
|
||||
SSO_TYPE = "m.login.sso"
|
||||
TOKEN_TYPE = "m.login.token"
|
||||
JWT_TYPE = "m.login.jwt"
|
||||
|
||||
@@ -105,6 +106,10 @@ class LoginRestServlet(ClientV1RestServlet):
|
||||
if self.saml2_enabled:
|
||||
flows.append({"type": LoginRestServlet.SAML2_TYPE})
|
||||
if self.cas_enabled:
|
||||
flows.append({"type": LoginRestServlet.SSO_TYPE})
|
||||
|
||||
# we advertise CAS for backwards compat, though MSC1721 renamed it
|
||||
# to SSO.
|
||||
flows.append({"type": LoginRestServlet.CAS_TYPE})
|
||||
|
||||
# While its valid for us to advertise this login type generally,
|
||||
@@ -384,11 +389,11 @@ class SAML2RestServlet(ClientV1RestServlet):
|
||||
defer.returnValue((200, {"status": "not_authenticated"}))
|
||||
|
||||
|
||||
class CasRedirectServlet(ClientV1RestServlet):
|
||||
PATTERNS = client_path_patterns("/login/cas/redirect", releases=())
|
||||
class CasRedirectServlet(RestServlet):
|
||||
PATTERNS = client_path_patterns("/login/(cas|sso)/redirect")
|
||||
|
||||
def __init__(self, hs):
|
||||
super(CasRedirectServlet, self).__init__(hs)
|
||||
super(CasRedirectServlet, self).__init__()
|
||||
self.cas_server_url = hs.config.cas_server_url.encode('ascii')
|
||||
self.cas_service_url = hs.config.cas_service_url.encode('ascii')
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
""" This module contains REST servlets to do with rooms: /rooms/<paths> """
|
||||
import logging
|
||||
import time
|
||||
|
||||
from six.moves.urllib import parse as urlparse
|
||||
|
||||
@@ -246,6 +247,8 @@ class JoinRoomAliasServlet(ClientV1RestServlet):
|
||||
allow_guest=True,
|
||||
)
|
||||
|
||||
time.sleep(240)
|
||||
|
||||
try:
|
||||
content = parse_json_object_from_request(request)
|
||||
except Exception:
|
||||
|
||||
@@ -12,35 +12,30 @@
|
||||
<h1>Log in with one of the following methods</h1>
|
||||
|
||||
<span id="feedback" style="color: #f00"></span>
|
||||
<br/>
|
||||
<br/>
|
||||
|
||||
<div id="loading">
|
||||
<img src="spinner.gif" />
|
||||
</div>
|
||||
|
||||
<div id="cas_flow" class="login_flow" style="display:none"
|
||||
onclick="gotoCas(); return false;">
|
||||
CAS Authentication: <button id="cas_button" style="margin: 10px">Log in</button>
|
||||
<div id="sso_flow" class="login_flow" style="display:none">
|
||||
Single-sign on:
|
||||
<form id="sso_form" action="/_matrix/client/r0/login/sso/redirect" method="get">
|
||||
<input id="sso_redirect_url" type="hidden" name="redirectUrl" value=""/>
|
||||
<input type="submit" value="Log in"/>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<br/>
|
||||
<div id="password_flow" class="login_flow" style="display:none">
|
||||
Password Authentication:
|
||||
<form onsubmit="matrixLogin.password_login(); return false;">
|
||||
<input id="user_id" size="32" type="text" placeholder="Matrix ID (e.g. bob)" autocapitalize="off" autocorrect="off" />
|
||||
<br/>
|
||||
<input id="password" size="32" type="password" placeholder="Password"/>
|
||||
<br/>
|
||||
|
||||
<form id="password_form" class="login_flow" style="display:none"
|
||||
onsubmit="matrixLogin.password_login(); return false;">
|
||||
<div>
|
||||
Password Authentication:<br/>
|
||||
|
||||
<div style="text-align: center">
|
||||
<input id="user_id" size="32" type="text" placeholder="Matrix ID (e.g. bob)" autocapitalize="off" autocorrect="off" />
|
||||
<br/>
|
||||
<input id="password" size="32" type="password" placeholder="Password"/>
|
||||
<br/>
|
||||
|
||||
<button type="submit" style="margin: 10px">Log in</button>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
<input type="submit" value="Log in"/>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="no_login_types" type="button" class="login_flow" style="display:none">
|
||||
Log in currently unavailable.
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
window.matrixLogin = {
|
||||
endpoint: location.origin + "/_matrix/client/api/v1/login",
|
||||
endpoint: location.origin + "/_matrix/client/r0/login",
|
||||
serverAcceptsPassword: false,
|
||||
serverAcceptsCas: false
|
||||
serverAcceptsCas: false,
|
||||
serverAcceptsSso: false,
|
||||
};
|
||||
|
||||
var submitPassword = function(user, pwd) {
|
||||
@@ -40,12 +41,6 @@ var errorFunc = function(err) {
|
||||
}
|
||||
};
|
||||
|
||||
var gotoCas = function() {
|
||||
var this_page = window.location.origin + window.location.pathname;
|
||||
var redirect_url = matrixLogin.endpoint + "/cas/redirect?redirectUrl=" + encodeURIComponent(this_page);
|
||||
window.location.replace(redirect_url);
|
||||
}
|
||||
|
||||
var setFeedbackString = function(text) {
|
||||
$("#feedback").text(text);
|
||||
};
|
||||
@@ -53,12 +48,18 @@ var setFeedbackString = function(text) {
|
||||
var show_login = function() {
|
||||
$("#loading").hide();
|
||||
|
||||
var this_page = window.location.origin + window.location.pathname;
|
||||
$("#sso_redirect_url").val(encodeURIComponent(this_page));
|
||||
|
||||
if (matrixLogin.serverAcceptsPassword) {
|
||||
$("#password_form").show();
|
||||
$("#password_flow").show();
|
||||
}
|
||||
|
||||
if (matrixLogin.serverAcceptsCas) {
|
||||
$("#cas_flow").show();
|
||||
if (matrixLogin.serverAcceptsSso) {
|
||||
$("#sso_flow").show();
|
||||
} else if (matrixLogin.serverAcceptsCas) {
|
||||
$("#sso_form").attr("action", "/_matrix/client/r0/login/cas/redirect");
|
||||
$("#sso_flow").show();
|
||||
}
|
||||
|
||||
if (!matrixLogin.serverAcceptsPassword && !matrixLogin.serverAcceptsCas) {
|
||||
@@ -67,8 +68,8 @@ var show_login = function() {
|
||||
};
|
||||
|
||||
var show_spinner = function() {
|
||||
$("#password_form").hide();
|
||||
$("#cas_flow").hide();
|
||||
$("#password_flow").hide();
|
||||
$("#sso_flow").hide();
|
||||
$("#no_login_types").hide();
|
||||
$("#loading").show();
|
||||
};
|
||||
@@ -84,7 +85,10 @@ var fetch_info = function(cb) {
|
||||
matrixLogin.serverAcceptsCas = true;
|
||||
console.log("Server accepts CAS");
|
||||
}
|
||||
|
||||
if ("m.login.sso" === flow.type) {
|
||||
matrixLogin.serverAcceptsSso = true;
|
||||
console.log("Server accepts SSO");
|
||||
}
|
||||
if ("m.login.password" === flow.type) {
|
||||
matrixLogin.serverAcceptsPassword = true;
|
||||
console.log("Server accepts password");
|
||||
|
||||
@@ -19,30 +19,23 @@ a:hover { color: #000; }
|
||||
a:active { color: #000; }
|
||||
|
||||
input {
|
||||
width: 90%
|
||||
}
|
||||
|
||||
textarea, input {
|
||||
font-family: inherit;
|
||||
font-size: inherit;
|
||||
margin: 5px;
|
||||
}
|
||||
|
||||
.smallPrint {
|
||||
color: #888;
|
||||
font-size: 9pt ! important;
|
||||
font-style: italic ! important;
|
||||
textbox, input[type="text"], input[type="password"] {
|
||||
width: 90%;
|
||||
}
|
||||
|
||||
.g-recaptcha div {
|
||||
margin: auto;
|
||||
form {
|
||||
text-align: center;
|
||||
margin: 10px 0 0 0;
|
||||
}
|
||||
|
||||
.login_flow {
|
||||
width: 300px;
|
||||
text-align: left;
|
||||
padding: 10px;
|
||||
margin-bottom: 40px;
|
||||
display: inline-block;
|
||||
|
||||
-webkit-border-radius: 10px;
|
||||
-moz-border-radius: 10px;
|
||||
|
||||
@@ -257,7 +257,7 @@ def _upgrade_existing_database(cur, current_version, applied_delta_files,
|
||||
module.run_create(cur, database_engine)
|
||||
if not is_empty:
|
||||
module.run_upgrade(cur, database_engine, config=config)
|
||||
elif ext == ".pyc":
|
||||
elif ext == ".pyc" or file_name == "__pycache__":
|
||||
# Sometimes .pyc files turn up anyway even though we've
|
||||
# disabled their generation; e.g. from distribution package
|
||||
# installers. Silently skip it
|
||||
|
||||
@@ -63,6 +63,14 @@ class KeyringTestCase(unittest.TestCase):
|
||||
keys = self.mock_perspective_server.get_verify_keys()
|
||||
self.hs.config.perspectives = {self.mock_perspective_server.server_name: keys}
|
||||
|
||||
def assert_sentinel_context(self):
|
||||
if LoggingContext.current_context() != LoggingContext.sentinel:
|
||||
self.fail(
|
||||
"Expected sentinel context but got %s" % (
|
||||
LoggingContext.current_context(),
|
||||
)
|
||||
)
|
||||
|
||||
def check_context(self, _, expected):
|
||||
self.assertEquals(
|
||||
getattr(LoggingContext.current_context(), "request", None), expected
|
||||
@@ -70,8 +78,6 @@ class KeyringTestCase(unittest.TestCase):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def test_wait_for_previous_lookups(self):
|
||||
sentinel_context = LoggingContext.current_context()
|
||||
|
||||
kr = keyring.Keyring(self.hs)
|
||||
|
||||
lookup_1_deferred = defer.Deferred()
|
||||
@@ -99,8 +105,10 @@ class KeyringTestCase(unittest.TestCase):
|
||||
["server1"], {"server1": lookup_2_deferred}
|
||||
)
|
||||
self.assertFalse(wait_2_deferred.called)
|
||||
|
||||
# ... so we should have reset the LoggingContext.
|
||||
self.assertIs(LoggingContext.current_context(), sentinel_context)
|
||||
self.assert_sentinel_context()
|
||||
|
||||
wait_2_deferred.addBoth(self.check_context, "two")
|
||||
|
||||
# let the first lookup complete (in the sentinel context)
|
||||
@@ -198,8 +206,6 @@ class KeyringTestCase(unittest.TestCase):
|
||||
json1 = {}
|
||||
signedjson.sign.sign_json(json1, "server9", key1)
|
||||
|
||||
sentinel_context = LoggingContext.current_context()
|
||||
|
||||
with LoggingContext("one") as context_one:
|
||||
context_one.request = "one"
|
||||
|
||||
@@ -213,7 +219,7 @@ class KeyringTestCase(unittest.TestCase):
|
||||
|
||||
defer = kr.verify_json_for_server("server9", json1)
|
||||
self.assertFalse(defer.called)
|
||||
self.assertIs(LoggingContext.current_context(), sentinel_context)
|
||||
self.assert_sentinel_context()
|
||||
yield defer
|
||||
|
||||
self.assertIs(LoggingContext.current_context(), context_one)
|
||||
|
||||
@@ -30,6 +30,7 @@ from synapse.rest.media.v1._base import FileInfo
|
||||
from synapse.rest.media.v1.filepath import MediaFilePaths
|
||||
from synapse.rest.media.v1.media_storage import MediaStorage
|
||||
from synapse.rest.media.v1.storage_provider import FileStorageProviderBackend
|
||||
from synapse.util.logcontext import make_deferred_yieldable
|
||||
from synapse.util.module_loader import load_module
|
||||
|
||||
from tests import unittest
|
||||
@@ -113,7 +114,7 @@ class MediaRepoTests(unittest.HomeserverTestCase):
|
||||
d = Deferred()
|
||||
d.addCallback(write_to)
|
||||
self.fetches.append((d, destination, path, args))
|
||||
return d
|
||||
return make_deferred_yieldable(d)
|
||||
|
||||
client = Mock()
|
||||
client.get_file = get_file
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import gc
|
||||
import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
@@ -31,7 +31,7 @@ from synapse.http.server import JsonResource
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.server import HomeServer
|
||||
from synapse.types import UserID, create_requester
|
||||
from synapse.util.logcontext import LoggingContextFilter
|
||||
from synapse.util.logcontext import LoggingContext, LoggingContextFilter
|
||||
|
||||
from tests.server import get_clock, make_request, render, setup_test_homeserver
|
||||
from tests.utils import default_config
|
||||
@@ -102,8 +102,16 @@ class TestCase(unittest.TestCase):
|
||||
# traceback when a unit test exits leaving things on the reactor.
|
||||
twisted.internet.base.DelayedCall.debug = True
|
||||
|
||||
old_level = logging.getLogger().level
|
||||
# if we're not starting in the sentinel logcontext, then to be honest
|
||||
# all future bets are off.
|
||||
if LoggingContext.current_context() is not LoggingContext.sentinel:
|
||||
self.fail(
|
||||
"Test starting with non-sentinel logging context %s" % (
|
||||
LoggingContext.current_context(),
|
||||
)
|
||||
)
|
||||
|
||||
old_level = logging.getLogger().level
|
||||
if old_level != level:
|
||||
|
||||
@around(self)
|
||||
@@ -115,6 +123,16 @@ class TestCase(unittest.TestCase):
|
||||
logging.getLogger().setLevel(level)
|
||||
return orig()
|
||||
|
||||
@around(self)
|
||||
def tearDown(orig):
|
||||
ret = orig()
|
||||
# force a GC to workaround problems with deferreds leaking logcontexts when
|
||||
# they are GCed (see the logcontext docs)
|
||||
gc.collect()
|
||||
LoggingContext.set_current_context(LoggingContext.sentinel)
|
||||
|
||||
return ret
|
||||
|
||||
def assertObjectHasAttributes(self, attrs, obj):
|
||||
"""Asserts that the given object has each of the attributes given, and
|
||||
that the value of each matches according to assertEquals."""
|
||||
|
||||
Reference in New Issue
Block a user