From bff4a11b3ff2ea8784dd309fe0a9ef8925ef4202 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Tue, 2 Sep 2025 12:14:27 -0500 Subject: [PATCH 01/54] Re-introduce: Fix `LaterGauge` metrics to collect from all servers (#18791) Re-introduce: https://github.com/element-hq/synapse/pull/18751 that was reverted in https://github.com/element-hq/synapse/pull/18789 (explains why the PR was reverted in the first place). - Adds a `cleanup` pattern that cleans up metrics from each homeserver in the tests. Previously, the list of hooks built up until our CI machines couldn't operate properly, see https://github.com/element-hq/synapse/pull/18789 - Fix long-standing issue with `synapse_background_update_status` metrics only tracking the last database listed in the config (see https://github.com/element-hq/synapse/pull/18791#discussion_r2261706749) --- changelog.d/18791.misc | 1 + synapse/_scripts/generate_workers_map.py | 6 +- synapse/_scripts/synapse_port_db.py | 15 ++- synapse/federation/send_queue.py | 43 ++++--- synapse/federation/sender/__init__.py | 43 ++++--- synapse/handlers/presence.py | 28 ++-- synapse/http/request_metrics.py | 6 +- synapse/metrics/__init__.py | 128 ++++++++++++++----- synapse/notifier.py | 42 +++--- synapse/replication/tcp/handler.py | 28 ++-- synapse/replication/tcp/protocol.py | 20 ++- synapse/server.py | 36 +++++- synapse/storage/database.py | 8 +- synapse/storage/databases/__init__.py | 17 +++ synapse/storage/databases/main/roommember.py | 15 ++- synapse/util/ratelimitutils.py | 14 +- synapse/util/task_scheduler.py | 15 ++- tests/metrics/test_metrics.py | 100 ++++++++++++++- tests/replication/_base.py | 3 +- tests/server.py | 3 + 20 files changed, 435 insertions(+), 136 deletions(-) create mode 100644 changelog.d/18791.misc diff --git a/changelog.d/18791.misc b/changelog.d/18791.misc new file mode 100644 index 0000000000..6ecd498286 --- /dev/null +++ b/changelog.d/18791.misc @@ -0,0 +1 @@ +Fix `LaterGauge` metrics to collect from all servers. diff --git a/synapse/_scripts/generate_workers_map.py b/synapse/_scripts/generate_workers_map.py index 09feb8cf30..8878e364e2 100755 --- a/synapse/_scripts/generate_workers_map.py +++ b/synapse/_scripts/generate_workers_map.py @@ -153,9 +153,13 @@ def get_registered_paths_for_default( """ hs = MockHomeserver(base_config, worker_app) + # TODO We only do this to avoid an error, but don't need the database etc hs.setup() - return get_registered_paths_for_hs(hs) + registered_paths = get_registered_paths_for_hs(hs) + hs.cleanup() + + return registered_paths def elide_http_methods_if_unconflicting( diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py index 0f54cfc64a..a81db3cfbf 100755 --- a/synapse/_scripts/synapse_port_db.py +++ b/synapse/_scripts/synapse_port_db.py @@ -99,6 +99,7 @@ from synapse.storage.engines import create_engine from synapse.storage.prepare_database import prepare_database from synapse.types import ISynapseReactor from synapse.util import SYNAPSE_VERSION, Clock +from synapse.util.stringutils import random_string # Cast safety: Twisted does some naughty magic which replaces the # twisted.internet.reactor module with a Reactor instance at runtime. @@ -323,6 +324,7 @@ class MockHomeserver: self.config = config self.hostname = config.server.server_name self.version_string = SYNAPSE_VERSION + self.instance_id = random_string(5) def get_clock(self) -> Clock: return self.clock @@ -330,6 +332,9 @@ class MockHomeserver: def get_reactor(self) -> ISynapseReactor: return reactor + def get_instance_id(self) -> str: + return self.instance_id + def get_instance_name(self) -> str: return "master" @@ -685,7 +690,15 @@ class Porter: ) prepare_database(db_conn, engine, config=self.hs_config) # Type safety: ignore that we're using Mock homeservers here. - store = Store(DatabasePool(hs, db_config, engine), db_conn, hs) # type: ignore[arg-type] + store = Store( + DatabasePool( + hs, # type: ignore[arg-type] + db_config, + engine, + ), + db_conn, + hs, # type: ignore[arg-type] + ) db_conn.commit() return store diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py index 7f511d570c..2fdee9ac54 100644 --- a/synapse/federation/send_queue.py +++ b/synapse/federation/send_queue.py @@ -37,6 +37,7 @@ Events are replicated via a separate events stream. """ import logging +from enum import Enum from typing import ( TYPE_CHECKING, Dict, @@ -67,6 +68,25 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) +class QueueNames(str, Enum): + PRESENCE_MAP = "presence_map" + KEYED_EDU = "keyed_edu" + KEYED_EDU_CHANGED = "keyed_edu_changed" + EDUS = "edus" + POS_TIME = "pos_time" + PRESENCE_DESTINATIONS = "presence_destinations" + + +queue_name_to_gauge_map: Dict[QueueNames, LaterGauge] = {} + +for queue_name in QueueNames: + queue_name_to_gauge_map[queue_name] = LaterGauge( + name=f"synapse_federation_send_queue_{queue_name.value}_size", + desc="", + labelnames=[SERVER_NAME_LABEL], + ) + + class FederationRemoteSendQueue(AbstractFederationSender): """A drop in replacement for FederationSender""" @@ -111,23 +131,16 @@ class FederationRemoteSendQueue(AbstractFederationSender): # we make a new function, so we need to make a new function so the inner # lambda binds to the queue rather than to the name of the queue which # changes. ARGH. - def register(name: str, queue: Sized) -> None: - LaterGauge( - name="synapse_federation_send_queue_%s_size" % (queue_name,), - desc="", - labelnames=[SERVER_NAME_LABEL], - caller=lambda: {(self.server_name,): len(queue)}, + def register(queue_name: QueueNames, queue: Sized) -> None: + queue_name_to_gauge_map[queue_name].register_hook( + homeserver_instance_id=hs.get_instance_id(), + hook=lambda: {(self.server_name,): len(queue)}, ) - for queue_name in [ - "presence_map", - "keyed_edu", - "keyed_edu_changed", - "edus", - "pos_time", - "presence_destinations", - ]: - register(queue_name, getattr(self, queue_name)) + for queue_name in QueueNames: + queue = getattr(self, queue_name.value) + assert isinstance(queue, Sized) + register(queue_name, queue=queue) self.clock.looping_call(self._clear_queue, 30 * 1000) diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py index 8befbe3722..278a957331 100644 --- a/synapse/federation/sender/__init__.py +++ b/synapse/federation/sender/__init__.py @@ -199,6 +199,24 @@ sent_pdus_destination_dist_total = Counter( labelnames=[SERVER_NAME_LABEL], ) +transaction_queue_pending_destinations_gauge = LaterGauge( + name="synapse_federation_transaction_queue_pending_destinations", + desc="", + labelnames=[SERVER_NAME_LABEL], +) + +transaction_queue_pending_pdus_gauge = LaterGauge( + name="synapse_federation_transaction_queue_pending_pdus", + desc="", + labelnames=[SERVER_NAME_LABEL], +) + +transaction_queue_pending_edus_gauge = LaterGauge( + name="synapse_federation_transaction_queue_pending_edus", + desc="", + labelnames=[SERVER_NAME_LABEL], +) + # Time (in s) to wait before trying to wake up destinations that have # catch-up outstanding. # Please note that rate limiting still applies, so while the loop is @@ -398,11 +416,9 @@ class FederationSender(AbstractFederationSender): # map from destination to PerDestinationQueue self._per_destination_queues: Dict[str, PerDestinationQueue] = {} - LaterGauge( - name="synapse_federation_transaction_queue_pending_destinations", - desc="", - labelnames=[SERVER_NAME_LABEL], - caller=lambda: { + transaction_queue_pending_destinations_gauge.register_hook( + homeserver_instance_id=hs.get_instance_id(), + hook=lambda: { (self.server_name,): sum( 1 for d in self._per_destination_queues.values() @@ -410,22 +426,17 @@ class FederationSender(AbstractFederationSender): ) }, ) - - LaterGauge( - name="synapse_federation_transaction_queue_pending_pdus", - desc="", - labelnames=[SERVER_NAME_LABEL], - caller=lambda: { + transaction_queue_pending_pdus_gauge.register_hook( + homeserver_instance_id=hs.get_instance_id(), + hook=lambda: { (self.server_name,): sum( d.pending_pdu_count() for d in self._per_destination_queues.values() ) }, ) - LaterGauge( - name="synapse_federation_transaction_queue_pending_edus", - desc="", - labelnames=[SERVER_NAME_LABEL], - caller=lambda: { + transaction_queue_pending_edus_gauge.register_hook( + homeserver_instance_id=hs.get_instance_id(), + hook=lambda: { (self.server_name,): sum( d.pending_edu_count() for d in self._per_destination_queues.values() ) diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index b253117498..d7de20f884 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -173,6 +173,18 @@ state_transition_counter = Counter( labelnames=["locality", "from", "to", SERVER_NAME_LABEL], ) +presence_user_to_current_state_size_gauge = LaterGauge( + name="synapse_handlers_presence_user_to_current_state_size", + desc="", + labelnames=[SERVER_NAME_LABEL], +) + +presence_wheel_timer_size_gauge = LaterGauge( + name="synapse_handlers_presence_wheel_timer_size", + desc="", + labelnames=[SERVER_NAME_LABEL], +) + # If a user was last active in the last LAST_ACTIVE_GRANULARITY, consider them # "currently_active" LAST_ACTIVE_GRANULARITY = 60 * 1000 @@ -779,11 +791,9 @@ class PresenceHandler(BasePresenceHandler): EduTypes.PRESENCE, self.incoming_presence ) - LaterGauge( - name="synapse_handlers_presence_user_to_current_state_size", - desc="", - labelnames=[SERVER_NAME_LABEL], - caller=lambda: {(self.server_name,): len(self.user_to_current_state)}, + presence_user_to_current_state_size_gauge.register_hook( + homeserver_instance_id=hs.get_instance_id(), + hook=lambda: {(self.server_name,): len(self.user_to_current_state)}, ) # The per-device presence state, maps user to devices to per-device presence state. @@ -882,11 +892,9 @@ class PresenceHandler(BasePresenceHandler): 60 * 1000, ) - LaterGauge( - name="synapse_handlers_presence_wheel_timer_size", - desc="", - labelnames=[SERVER_NAME_LABEL], - caller=lambda: {(self.server_name,): len(self.wheel_timer)}, + presence_wheel_timer_size_gauge.register_hook( + homeserver_instance_id=hs.get_instance_id(), + hook=lambda: {(self.server_name,): len(self.wheel_timer)}, ) # Used to handle sending of presence to newly joined users/servers diff --git a/synapse/http/request_metrics.py b/synapse/http/request_metrics.py index a9b049f904..83f52edb7c 100644 --- a/synapse/http/request_metrics.py +++ b/synapse/http/request_metrics.py @@ -164,11 +164,13 @@ def _get_in_flight_counts() -> Mapping[Tuple[str, ...], int]: return counts -LaterGauge( +in_flight_requests = LaterGauge( name="synapse_http_server_in_flight_requests_count", desc="", labelnames=["method", "servlet", SERVER_NAME_LABEL], - caller=_get_in_flight_counts, +) +in_flight_requests.register_hook( + homeserver_instance_id=None, hook=_get_in_flight_counts ) diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 11e2551a16..5b291aa893 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -73,8 +73,6 @@ logger = logging.getLogger(__name__) METRICS_PREFIX = "/_synapse/metrics" -all_gauges: Dict[str, Collector] = {} - HAVE_PROC_SELF_STAT = os.path.exists("/proc/self/stat") SERVER_NAME_LABEL = "server_name" @@ -163,42 +161,110 @@ class LaterGauge(Collector): name: str desc: str labelnames: Optional[StrSequence] = attr.ib(hash=False) - # callback: should either return a value (if there are no labels for this metric), - # or dict mapping from a label tuple to a value - caller: Callable[ - [], Union[Mapping[Tuple[str, ...], Union[int, float]], Union[int, float]] - ] + _instance_id_to_hook_map: Dict[ + Optional[str], # instance_id + Callable[ + [], Union[Mapping[Tuple[str, ...], Union[int, float]], Union[int, float]] + ], + ] = attr.ib(factory=dict, hash=False) + """ + Map from homeserver instance_id to a callback. Each callback should either return a + value (if there are no labels for this metric), or dict mapping from a label tuple + to a value. + + We use `instance_id` instead of `server_name` because it's possible to have multiple + workers running in the same process with the same `server_name`. + """ def collect(self) -> Iterable[Metric]: # The decision to add `SERVER_NAME_LABEL` is from the `LaterGauge` usage itself # (we don't enforce it here, one level up). g = GaugeMetricFamily(self.name, self.desc, labels=self.labelnames) # type: ignore[missing-server-name-label] - try: - calls = self.caller() - except Exception: - logger.exception("Exception running callback for LaterGauge(%s)", self.name) - yield g - return + for homeserver_instance_id, hook in self._instance_id_to_hook_map.items(): + try: + hook_result = hook() + except Exception: + logger.exception( + "Exception running callback for LaterGauge(%s) for homeserver_instance_id=%s", + self.name, + homeserver_instance_id, + ) + # Continue to return the rest of the metrics that aren't broken + continue - if isinstance(calls, (int, float)): - g.add_metric([], calls) - else: - for k, v in calls.items(): - g.add_metric(k, v) + if isinstance(hook_result, (int, float)): + g.add_metric([], hook_result) + else: + for k, v in hook_result.items(): + g.add_metric(k, v) yield g + def register_hook( + self, + *, + homeserver_instance_id: Optional[str], + hook: Callable[ + [], Union[Mapping[Tuple[str, ...], Union[int, float]], Union[int, float]] + ], + ) -> None: + """ + Register a callback/hook that will be called to generate a metric samples for + the gauge. + + Args: + homeserver_instance_id: The unique ID for this Synapse process instance + (`hs.get_instance_id()`) that this hook is associated with. This can be used + later to lookup all hooks associated with a given server name in order to + unregister them. This should only be omitted for global hooks that work + across all homeservers. + hook: A callback that should either return a value (if there are no + labels for this metric), or dict mapping from a label tuple to a value + """ + # We shouldn't have multiple hooks registered for the same homeserver `instance_id`. + existing_hook = self._instance_id_to_hook_map.get(homeserver_instance_id) + assert existing_hook is None, ( + f"LaterGauge(name={self.name}) hook already registered for homeserver_instance_id={homeserver_instance_id}. " + "This is likely a Synapse bug and you forgot to unregister the previous hooks for " + "the server (especially in tests)." + ) + + self._instance_id_to_hook_map[homeserver_instance_id] = hook + + def unregister_hooks_for_homeserver_instance_id( + self, homeserver_instance_id: str + ) -> None: + """ + Unregister all hooks associated with the given homeserver `instance_id`. This should be + called when a homeserver is shutdown to avoid extra hooks sitting around. + + Args: + homeserver_instance_id: The unique ID for this Synapse process instance to + unregister hooks for (`hs.get_instance_id()`). + """ + self._instance_id_to_hook_map.pop(homeserver_instance_id, None) + def __attrs_post_init__(self) -> None: - self._register() - - def _register(self) -> None: - if self.name in all_gauges.keys(): - logger.warning("%s already registered, reregistering", self.name) - REGISTRY.unregister(all_gauges.pop(self.name)) - REGISTRY.register(self) - all_gauges[self.name] = self + + # We shouldn't have multiple metrics with the same name. Typically, metrics + # should be created globally so you shouldn't be running into this and this will + # catch any stupid mistakes. The `REGISTRY.register(self)` call above will also + # raise an error if the metric already exists but to make things explicit, we'll + # also check here. + existing_gauge = all_later_gauges_to_clean_up_on_shutdown.get(self.name) + assert existing_gauge is None, f"LaterGauge(name={self.name}) already exists. " + + # Keep track of the gauge so we can clean it up later. + all_later_gauges_to_clean_up_on_shutdown[self.name] = self + + +all_later_gauges_to_clean_up_on_shutdown: Dict[str, LaterGauge] = {} +""" +Track all `LaterGauge` instances so we can remove any associated hooks during homeserver +shutdown. +""" # `MetricsEntry` only makes sense when it is a `Protocol`, @@ -250,7 +316,7 @@ class InFlightGauge(Generic[MetricsEntry], Collector): # Protects access to _registrations self._lock = threading.Lock() - self._register_with_collector() + REGISTRY.register(self) def register( self, @@ -341,14 +407,6 @@ class InFlightGauge(Generic[MetricsEntry], Collector): gauge.add_metric(labels=key, value=getattr(metrics, name)) yield gauge - def _register_with_collector(self) -> None: - if self.name in all_gauges.keys(): - logger.warning("%s already registered, reregistering", self.name) - REGISTRY.unregister(all_gauges.pop(self.name)) - - REGISTRY.register(self) - all_gauges[self.name] = self - class GaugeHistogramMetricFamilyWithLabels(GaugeHistogramMetricFamily): """ diff --git a/synapse/notifier.py b/synapse/notifier.py index 448a715e2a..7782c9ca65 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -86,6 +86,24 @@ users_woken_by_stream_counter = Counter( labelnames=["stream", SERVER_NAME_LABEL], ) + +notifier_listeners_gauge = LaterGauge( + name="synapse_notifier_listeners", + desc="", + labelnames=[SERVER_NAME_LABEL], +) + +notifier_rooms_gauge = LaterGauge( + name="synapse_notifier_rooms", + desc="", + labelnames=[SERVER_NAME_LABEL], +) +notifier_users_gauge = LaterGauge( + name="synapse_notifier_users", + desc="", + labelnames=[SERVER_NAME_LABEL], +) + T = TypeVar("T") @@ -281,28 +299,20 @@ class Notifier: ) } - LaterGauge( - name="synapse_notifier_listeners", - desc="", - labelnames=[SERVER_NAME_LABEL], - caller=count_listeners, + notifier_listeners_gauge.register_hook( + homeserver_instance_id=hs.get_instance_id(), hook=count_listeners ) - - LaterGauge( - name="synapse_notifier_rooms", - desc="", - labelnames=[SERVER_NAME_LABEL], - caller=lambda: { + notifier_rooms_gauge.register_hook( + homeserver_instance_id=hs.get_instance_id(), + hook=lambda: { (self.server_name,): count( bool, list(self.room_to_user_streams.values()) ) }, ) - LaterGauge( - name="synapse_notifier_users", - desc="", - labelnames=[SERVER_NAME_LABEL], - caller=lambda: {(self.server_name,): len(self.user_to_user_stream)}, + notifier_users_gauge.register_hook( + homeserver_instance_id=hs.get_instance_id(), + hook=lambda: {(self.server_name,): len(self.user_to_user_stream)}, ) def add_replication_callback(self, cb: Callable[[], None]) -> None: diff --git a/synapse/replication/tcp/handler.py b/synapse/replication/tcp/handler.py index 0f14c7e380..dd7e38dd78 100644 --- a/synapse/replication/tcp/handler.py +++ b/synapse/replication/tcp/handler.py @@ -106,6 +106,18 @@ user_ip_cache_counter = Counter( "synapse_replication_tcp_resource_user_ip_cache", "", labelnames=[SERVER_NAME_LABEL] ) +tcp_resource_total_connections_gauge = LaterGauge( + name="synapse_replication_tcp_resource_total_connections", + desc="", + labelnames=[SERVER_NAME_LABEL], +) + +tcp_command_queue_gauge = LaterGauge( + name="synapse_replication_tcp_command_queue", + desc="Number of inbound RDATA/POSITION commands queued for processing", + labelnames=["stream_name", SERVER_NAME_LABEL], +) + # the type of the entries in _command_queues_by_stream _StreamCommandQueue = Deque[ @@ -243,11 +255,9 @@ class ReplicationCommandHandler: # outgoing replication commands to.) self._connections: List[IReplicationConnection] = [] - LaterGauge( - name="synapse_replication_tcp_resource_total_connections", - desc="", - labelnames=[SERVER_NAME_LABEL], - caller=lambda: {(self.server_name,): len(self._connections)}, + tcp_resource_total_connections_gauge.register_hook( + homeserver_instance_id=hs.get_instance_id(), + hook=lambda: {(self.server_name,): len(self._connections)}, ) # When POSITION or RDATA commands arrive, we stick them in a queue and process @@ -266,11 +276,9 @@ class ReplicationCommandHandler: # from that connection. self._streams_by_connection: Dict[IReplicationConnection, Set[str]] = {} - LaterGauge( - name="synapse_replication_tcp_command_queue", - desc="Number of inbound RDATA/POSITION commands queued for processing", - labelnames=["stream_name", SERVER_NAME_LABEL], - caller=lambda: { + tcp_command_queue_gauge.register_hook( + homeserver_instance_id=hs.get_instance_id(), + hook=lambda: { (stream_name, self.server_name): len(queue) for stream_name, queue in self._command_queues_by_stream.items() }, diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py index 969f0303e0..2ec25bf43d 100644 --- a/synapse/replication/tcp/protocol.py +++ b/synapse/replication/tcp/protocol.py @@ -527,7 +527,10 @@ pending_commands = LaterGauge( name="synapse_replication_tcp_protocol_pending_commands", desc="", labelnames=["name", SERVER_NAME_LABEL], - caller=lambda: { +) +pending_commands.register_hook( + homeserver_instance_id=None, + hook=lambda: { (p.name, p.server_name): len(p.pending_commands) for p in connected_connections }, ) @@ -544,7 +547,10 @@ transport_send_buffer = LaterGauge( name="synapse_replication_tcp_protocol_transport_send_buffer", desc="", labelnames=["name", SERVER_NAME_LABEL], - caller=lambda: { +) +transport_send_buffer.register_hook( + homeserver_instance_id=None, + hook=lambda: { (p.name, p.server_name): transport_buffer_size(p) for p in connected_connections }, ) @@ -571,7 +577,10 @@ tcp_transport_kernel_send_buffer = LaterGauge( name="synapse_replication_tcp_protocol_transport_kernel_send_buffer", desc="", labelnames=["name", SERVER_NAME_LABEL], - caller=lambda: { +) +tcp_transport_kernel_send_buffer.register_hook( + homeserver_instance_id=None, + hook=lambda: { (p.name, p.server_name): transport_kernel_read_buffer_size(p, False) for p in connected_connections }, @@ -582,7 +591,10 @@ tcp_transport_kernel_read_buffer = LaterGauge( name="synapse_replication_tcp_protocol_transport_kernel_read_buffer", desc="", labelnames=["name", SERVER_NAME_LABEL], - caller=lambda: { +) +tcp_transport_kernel_read_buffer.register_hook( + homeserver_instance_id=None, + hook=lambda: { (p.name, p.server_name): transport_kernel_read_buffer_size(p, True) for p in connected_connections }, diff --git a/synapse/server.py b/synapse/server.py index bf82f79bec..3eac271c90 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -129,7 +129,10 @@ from synapse.http.client import ( ) from synapse.http.matrixfederationclient import MatrixFederationHttpClient from synapse.media.media_repository import MediaRepository -from synapse.metrics import register_threadpool +from synapse.metrics import ( + all_later_gauges_to_clean_up_on_shutdown, + register_threadpool, +) from synapse.metrics.common_usage_metrics import CommonUsageMetricsManager from synapse.module_api import ModuleApi from synapse.module_api.callbacks import ModuleApiCallbacks @@ -369,6 +372,37 @@ class HomeServer(metaclass=abc.ABCMeta): if self.config.worker.run_background_tasks: self.setup_background_tasks() + def __del__(self) -> None: + """ + Called when an the homeserver is garbage collected. + + Make sure we actually do some clean-up, rather than leak data. + """ + self.cleanup() + + def cleanup(self) -> None: + """ + WIP: Clean-up any references to the homeserver and stop any running related + processes, timers, loops, replication stream, etc. + + This should be called wherever you care about the HomeServer being completely + garbage collected like in tests. It's not necessary to call if you plan to just + shut down the whole Python process anyway. + + Can be called multiple times. + """ + logger.info("Received cleanup request for %s.", self.hostname) + + # TODO: Stop background processes, timers, loops, replication stream, etc. + + # Cleanup metrics associated with the homeserver + for later_gauge in all_later_gauges_to_clean_up_on_shutdown.values(): + later_gauge.unregister_hooks_for_homeserver_instance_id( + self.get_instance_id() + ) + + logger.info("Cleanup complete for %s.", self.hostname) + def start_listening(self) -> None: # noqa: B027 (no-op by design) """Start the HTTP, manhole, metrics, etc listeners diff --git a/synapse/storage/database.py b/synapse/storage/database.py index f7aec16c96..cfec36e0fa 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -61,7 +61,7 @@ from synapse.logging.context import ( current_context, make_deferred_yieldable, ) -from synapse.metrics import SERVER_NAME_LABEL, LaterGauge, register_threadpool +from synapse.metrics import SERVER_NAME_LABEL, register_threadpool from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage.background_updates import BackgroundUpdater from synapse.storage.engines import BaseDatabaseEngine, PostgresEngine, Sqlite3Engine @@ -611,12 +611,6 @@ class DatabasePool: ) self.updates = BackgroundUpdater(hs, self) - LaterGauge( - name="synapse_background_update_status", - desc="Background update status", - labelnames=[SERVER_NAME_LABEL], - caller=lambda: {(self.server_name,): self.updates.get_status()}, - ) self._previous_txn_total_time = 0.0 self._current_txn_total_time = 0.0 diff --git a/synapse/storage/databases/__init__.py b/synapse/storage/databases/__init__.py index 6442ab6c7a..a4aba96686 100644 --- a/synapse/storage/databases/__init__.py +++ b/synapse/storage/databases/__init__.py @@ -22,6 +22,7 @@ import logging from typing import TYPE_CHECKING, Generic, List, Optional, Type, TypeVar +from synapse.metrics import SERVER_NAME_LABEL, LaterGauge from synapse.storage._base import SQLBaseStore from synapse.storage.database import DatabasePool, make_conn from synapse.storage.databases.main.events import PersistEventsStore @@ -40,6 +41,13 @@ logger = logging.getLogger(__name__) DataStoreT = TypeVar("DataStoreT", bound=SQLBaseStore, covariant=True) +background_update_status = LaterGauge( + name="synapse_background_update_status", + desc="Background update status", + labelnames=["database_name", SERVER_NAME_LABEL], +) + + class Databases(Generic[DataStoreT]): """The various databases. @@ -143,6 +151,15 @@ class Databases(Generic[DataStoreT]): db_conn.close() + # Track the background update status for each database + background_update_status.register_hook( + homeserver_instance_id=hs.get_instance_id(), + hook=lambda: { + (database.name(), server_name): database.updates.get_status() + for database in self.databases + }, + ) + # Sanity check that we have actually configured all the required stores. if not main: raise Exception("No 'main' database configured") diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py index 67e7e99baa..9db2e14a06 100644 --- a/synapse/storage/databases/main/roommember.py +++ b/synapse/storage/databases/main/roommember.py @@ -84,6 +84,13 @@ _CURRENT_STATE_MEMBERSHIP_UPDATE_NAME = "current_state_events_membership" _POPULATE_PARTICIPANT_BG_UPDATE_BATCH_SIZE = 1000 +federation_known_servers_gauge = LaterGauge( + name="synapse_federation_known_servers", + desc="", + labelnames=[SERVER_NAME_LABEL], +) + + @attr.s(frozen=True, slots=True, auto_attribs=True) class EventIdMembership: """Returned by `get_membership_from_event_ids`""" @@ -116,11 +123,9 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore): 1, self._count_known_servers, ) - LaterGauge( - name="synapse_federation_known_servers", - desc="", - labelnames=[SERVER_NAME_LABEL], - caller=lambda: {(self.server_name,): self._known_servers_count}, + federation_known_servers_gauge.register_hook( + homeserver_instance_id=hs.get_instance_id(), + hook=lambda: {(self.server_name,): self._known_servers_count}, ) @wrap_as_background_process("_count_known_servers") diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py index f5e592d80e..88edc07161 100644 --- a/synapse/util/ratelimitutils.py +++ b/synapse/util/ratelimitutils.py @@ -131,22 +131,28 @@ def _get_counts_from_rate_limiter_instance( # We track the number of affected hosts per time-period so we can # differentiate one really noisy homeserver from a general # ratelimit tuning problem across the federation. -LaterGauge( +sleep_affected_hosts_gauge = LaterGauge( name="synapse_rate_limit_sleep_affected_hosts", desc="Number of hosts that had requests put to sleep", labelnames=["rate_limiter_name", SERVER_NAME_LABEL], - caller=lambda: _get_counts_from_rate_limiter_instance( +) +sleep_affected_hosts_gauge.register_hook( + homeserver_instance_id=None, + hook=lambda: _get_counts_from_rate_limiter_instance( lambda rate_limiter_instance: sum( ratelimiter.should_sleep() for ratelimiter in rate_limiter_instance.ratelimiters.values() ) ), ) -LaterGauge( +reject_affected_hosts_gauge = LaterGauge( name="synapse_rate_limit_reject_affected_hosts", desc="Number of hosts that had requests rejected", labelnames=["rate_limiter_name", SERVER_NAME_LABEL], - caller=lambda: _get_counts_from_rate_limiter_instance( +) +reject_affected_hosts_gauge.register_hook( + homeserver_instance_id=None, + hook=lambda: _get_counts_from_rate_limiter_instance( lambda rate_limiter_instance: sum( ratelimiter.should_reject() for ratelimiter in rate_limiter_instance.ratelimiters.values() diff --git a/synapse/util/task_scheduler.py b/synapse/util/task_scheduler.py index fdcacdf128..0539989320 100644 --- a/synapse/util/task_scheduler.py +++ b/synapse/util/task_scheduler.py @@ -44,6 +44,13 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) +running_tasks_gauge = LaterGauge( + name="synapse_scheduler_running_tasks", + desc="The number of concurrent running tasks handled by the TaskScheduler", + labelnames=[SERVER_NAME_LABEL], +) + + class TaskScheduler: """ This is a simple task scheduler designed for resumable tasks. Normally, @@ -130,11 +137,9 @@ class TaskScheduler: TaskScheduler.SCHEDULE_INTERVAL_MS, ) - LaterGauge( - name="synapse_scheduler_running_tasks", - desc="The number of concurrent running tasks handled by the TaskScheduler", - labelnames=[SERVER_NAME_LABEL], - caller=lambda: {(self.server_name,): len(self._running_tasks)}, + running_tasks_gauge.register_hook( + homeserver_instance_id=hs.get_instance_id(), + hook=lambda: {(self.server_name,): len(self._running_tasks)}, ) def register_action( diff --git a/tests/metrics/test_metrics.py b/tests/metrics/test_metrics.py index 61874564a6..832e991730 100644 --- a/tests/metrics/test_metrics.py +++ b/tests/metrics/test_metrics.py @@ -18,11 +18,18 @@ # [This file includes modifications made by New Vector Limited] # # -from typing import Dict, Protocol, Tuple +from typing import Dict, NoReturn, Protocol, Tuple from prometheus_client.core import Sample -from synapse.metrics import REGISTRY, InFlightGauge, generate_latest +from synapse.metrics import ( + REGISTRY, + SERVER_NAME_LABEL, + InFlightGauge, + LaterGauge, + all_later_gauges_to_clean_up_on_shutdown, + generate_latest, +) from synapse.util.caches.deferred_cache import DeferredCache from tests import unittest @@ -285,6 +292,95 @@ class CacheMetricsTests(unittest.HomeserverTestCase): self.assertEqual(hs2_cache_max_size_metric_value, "777.0") +class LaterGaugeTests(unittest.HomeserverTestCase): + def setUp(self) -> None: + super().setUp() + self.later_gauge = LaterGauge( + name="foo", + desc="", + labelnames=[SERVER_NAME_LABEL], + ) + + def tearDown(self) -> None: + super().tearDown() + + REGISTRY.unregister(self.later_gauge) + all_later_gauges_to_clean_up_on_shutdown.pop(self.later_gauge.name, None) + + def test_later_gauge_multiple_servers(self) -> None: + """ + Test that LaterGauge metrics are reported correctly across multiple servers. We + will have an metrics entry for each homeserver that is labeled with the + `server_name` label. + """ + self.later_gauge.register_hook( + homeserver_instance_id="123", hook=lambda: {("hs1",): 1} + ) + self.later_gauge.register_hook( + homeserver_instance_id="456", hook=lambda: {("hs2",): 2} + ) + + metrics_map = get_latest_metrics() + + # Find the metrics from both homeservers + hs1_metric = 'foo{server_name="hs1"}' + hs1_metric_value = metrics_map.get(hs1_metric) + self.assertIsNotNone( + hs1_metric_value, + f"Missing metric {hs1_metric} in metrics {metrics_map}", + ) + self.assertEqual(hs1_metric_value, "1.0") + + hs2_metric = 'foo{server_name="hs2"}' + hs2_metric_value = metrics_map.get(hs2_metric) + self.assertIsNotNone( + hs2_metric_value, + f"Missing metric {hs2_metric} in metrics {metrics_map}", + ) + self.assertEqual(hs2_metric_value, "2.0") + + def test_later_gauge_hook_exception(self) -> None: + """ + Test that LaterGauge metrics are collected across multiple servers even if one + hooks is throwing an exception. + """ + + def raise_exception() -> NoReturn: + raise Exception("fake error generating data") + + # Make the hook for hs1 throw an exception + self.later_gauge.register_hook( + homeserver_instance_id="123", hook=raise_exception + ) + # Metrics from hs2 still work fine + self.later_gauge.register_hook( + homeserver_instance_id="456", hook=lambda: {("hs2",): 2} + ) + + metrics_map = get_latest_metrics() + + # Since we encountered an exception while trying to collect metrics from hs1, we + # don't expect to see it here. + hs1_metric = 'foo{server_name="hs1"}' + hs1_metric_value = metrics_map.get(hs1_metric) + self.assertIsNone( + hs1_metric_value, + ( + "Since we encountered an exception while trying to collect metrics from hs1" + f"we don't expect to see it the metrics_map {metrics_map}" + ), + ) + + # We should still see metrics from hs2 though + hs2_metric = 'foo{server_name="hs2"}' + hs2_metric_value = metrics_map.get(hs2_metric) + self.assertIsNotNone( + hs2_metric_value, + f"Missing metric {hs2_metric} in cache metrics {metrics_map}", + ) + self.assertEqual(hs2_metric_value, "2.0") + + def get_latest_metrics() -> Dict[str, str]: """ Collect the latest metrics from the registry and parse them into an easy to use map. diff --git a/tests/replication/_base.py b/tests/replication/_base.py index 453eb7750b..e756021937 100644 --- a/tests/replication/_base.py +++ b/tests/replication/_base.py @@ -32,7 +32,6 @@ from synapse.config.workers import InstanceTcpLocationConfig, InstanceUnixLocati from synapse.http.site import SynapseRequest, SynapseSite from synapse.replication.http import ReplicationRestResource from synapse.replication.tcp.client import ReplicationDataHandler -from synapse.replication.tcp.handler import ReplicationCommandHandler from synapse.replication.tcp.protocol import ( ClientReplicationStreamProtocol, ServerReplicationStreamProtocol, @@ -97,7 +96,7 @@ class BaseStreamTestCase(unittest.HomeserverTestCase): self.test_handler = self._build_replication_data_handler() self.worker_hs._replication_data_handler = self.test_handler # type: ignore[attr-defined] - repl_handler = ReplicationCommandHandler(self.worker_hs) + repl_handler = self.worker_hs.get_replication_command_handler() self.client = ClientReplicationStreamProtocol( self.worker_hs, "client", diff --git a/tests/server.py b/tests/server.py index 3a81a4c6d9..ebff8b04b3 100644 --- a/tests/server.py +++ b/tests/server.py @@ -1145,6 +1145,9 @@ def setup_test_homeserver( reactor=reactor, ) + # Register the cleanup hook + cleanup_func(hs.cleanup) + # Install @cache_in_self attributes for key, val in kwargs.items(): setattr(hs, "_" + key, val) From b2997a8f20d1999ec9f73c3d4a5fb210d4294176 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Tue, 2 Sep 2025 13:34:47 -0500 Subject: [PATCH 02/54] Suppress "Applying schema" log noise bulk when running Complement tests (#18878) If Synapse is under test (`SYNAPSE_LOG_TESTING` is set), we don't care about seeing the "Applying schema" log lines at the INFO level every time we run the tests (it's 100 lines of bulk for each homeserver). ``` synapse_main | 2025-08-29 22:34:03,453 - synapse.storage.prepare_database - 433 - INFO - main - Applying schema deltas for v73 synapse_main | 2025-08-29 22:34:03,454 - synapse.storage.prepare_database - 541 - INFO - main - Applying schema 73/01event_failed_pull_attempts.sql synapse_main | 2025-08-29 22:34:03,463 - synapse.storage.prepare_database - 541 - INFO - main - Applying schema 73/02add_pusher_enabled.sql synapse_main | 2025-08-29 22:34:03,473 - synapse.storage.prepare_database - 541 - INFO - main - Applying schema 73/02room_id_indexes_for_purging.sql synapse_main | 2025-08-29 22:34:03,482 - synapse.storage.prepare_database - 541 - INFO - main - Applying schema 73/03pusher_device_id.sql synapse_main | 2025-08-29 22:34:03,492 - synapse.storage.prepare_database - 541 - INFO - main - Applying schema 73/03users_approved_column.sql synapse_main | 2025-08-29 22:34:03,502 - synapse.storage.prepare_database - 541 - INFO - main - Applying schema 73/04partial_join_details.sql synapse_main | 2025-08-29 22:34:03,513 - synapse.storage.prepare_database - 541 - INFO - main - Applying schema 73/04pending_device_list_updates.sql ... ``` The Synapse logs are visible when a Complement test fails or you use `COMPLEMENT_ALWAYS_PRINT_SERVER_LOGS=1`. This is spawning from a Complement test with three homeservers and wanting less log noise to scroll through. --- changelog.d/18878.docker | 1 + docker/conf/log.config | 7 +++++++ 2 files changed, 8 insertions(+) create mode 100644 changelog.d/18878.docker diff --git a/changelog.d/18878.docker b/changelog.d/18878.docker new file mode 100644 index 0000000000..cf74f67cc8 --- /dev/null +++ b/changelog.d/18878.docker @@ -0,0 +1 @@ +Suppress "Applying schema" log noise bulk when `SYNAPSE_LOG_TESTING` is set. diff --git a/docker/conf/log.config b/docker/conf/log.config index 5772321202..6fe7db66da 100644 --- a/docker/conf/log.config +++ b/docker/conf/log.config @@ -77,6 +77,13 @@ loggers: #} synapse.visibility.filtered_event_debug: level: DEBUG + + {# + If Synapse is under test, we don't care about seeing the "Applying schema" log + lines at the INFO level every time we run the tests (it's 100 lines of bulk) + #} + synapse.storage.prepare_database: + level: WARN {% endif %} root: From 4b43e6fe0254bbed6f7da1cbe4e251df07f0fc71 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 8 Sep 2025 10:55:48 +0100 Subject: [PATCH 03/54] Handle rescinding invites over federation (#18823) We should send events that rescind invites over federation. Similarly, we should handle receiving such events. Unfortunately, the protocol doesn't make it possible to fully auth such events, and so we can only handle the case where the original inviter rescinded the invite (rather than a room admin). Complement test: https://github.com/matrix-org/complement/pull/797 --- changelog.d/18823.bugfix | 1 + synapse/federation/sender/__init__.py | 26 ++++++++++++++++ synapse/handlers/federation_event.py | 44 +++++++++++++++++++++++++-- 3 files changed, 68 insertions(+), 3 deletions(-) create mode 100644 changelog.d/18823.bugfix diff --git a/changelog.d/18823.bugfix b/changelog.d/18823.bugfix new file mode 100644 index 0000000000..473c865aa4 --- /dev/null +++ b/changelog.d/18823.bugfix @@ -0,0 +1 @@ +Fix bug where we did not send invite revocations over federation. diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py index 278a957331..6baa233143 100644 --- a/synapse/federation/sender/__init__.py +++ b/synapse/federation/sender/__init__.py @@ -150,6 +150,7 @@ from prometheus_client import Counter from twisted.internet import defer import synapse.metrics +from synapse.api.constants import EventTypes, Membership from synapse.api.presence import UserPresenceState from synapse.events import EventBase from synapse.federation.sender.per_destination_queue import ( @@ -655,6 +656,31 @@ class FederationSender(AbstractFederationSender): ) return + # If we've rescinded an invite then we want to tell the + # other server. + if ( + event.type == EventTypes.Member + and event.membership == Membership.LEAVE + and event.sender != event.state_key + ): + # We check if this leave event is rescinding an invite + # by looking if there is an invite event for the user in + # the auth events. It could otherwise be a kick or + # unban, which we don't want to send (if the user wasn't + # already in the room). + auth_events = await self.store.get_events_as_list( + event.auth_event_ids() + ) + for auth_event in auth_events: + if ( + auth_event.type == EventTypes.Member + and auth_event.state_key == event.state_key + and auth_event.membership == Membership.INVITE + ): + destinations = set(destinations) + destinations.add(get_domain_from_id(event.state_key)) + break + sharded_destinations = { d for d in destinations diff --git a/synapse/handlers/federation_event.py b/synapse/handlers/federation_event.py index 04ee774aa3..1e47b4ef4f 100644 --- a/synapse/handlers/federation_event.py +++ b/synapse/handlers/federation_event.py @@ -248,9 +248,10 @@ class FederationEventHandler: self.room_queues[room_id].append((pdu, origin)) return - # If we're not in the room just ditch the event entirely. This is - # probably an old server that has come back and thinks we're still in - # the room (or we've been rejoined to the room by a state reset). + # If we're not in the room just ditch the event entirely (and not + # invited). This is probably an old server that has come back and thinks + # we're still in the room (or we've been rejoined to the room by a state + # reset). # # Note that if we were never in the room then we would have already # dropped the event, since we wouldn't know the room version. @@ -258,6 +259,43 @@ class FederationEventHandler: room_id, self.server_name ) if not is_in_room: + # Check if this is a leave event rescinding an invite + if ( + pdu.type == EventTypes.Member + and pdu.membership == Membership.LEAVE + and pdu.state_key != pdu.sender + and self._is_mine_id(pdu.state_key) + ): + ( + membership, + membership_event_id, + ) = await self._store.get_local_current_membership_for_user_in_room( + pdu.state_key, pdu.room_id + ) + if ( + membership == Membership.INVITE + and membership_event_id + and membership_event_id + in pdu.auth_event_ids() # The invite should be in the auth events of the rescission. + ): + invite_event = await self._store.get_event( + membership_event_id, allow_none=True + ) + + # We cannot fully auth the rescission event, but we can + # check if the sender of the leave event is the same as the + # invite. + # + # Technically, a room admin could rescind the invite, but we + # have no way of knowing who is and isn't a room admin. + if invite_event and pdu.sender == invite_event.sender: + # Handle the rescission event + pdu.internal_metadata.outlier = True + pdu.internal_metadata.out_of_band_membership = True + context = EventContext.for_outlier(self._storage_controllers) + await self.persist_events_and_notify(room_id, [(pdu, context)]) + return + logger.info( "Ignoring PDU from %s as we're not in the room", origin, From dfccde9f605a0d6e3bda9c05cd45a7aabcbd6473 Mon Sep 17 00:00:00 2001 From: reivilibre Date: Tue, 9 Sep 2025 09:28:45 +0100 Subject: [PATCH 04/54] Remove obsolete and experimental `/sync/e2ee` endpoint. (#18583) Introduced in: https://github.com/element-hq/synapse/pull/17167 The endpoint was part of experiments for MSC3575 but does not feature in that MSC. Signed-off-by: Olivier 'reivilibre --- changelog.d/18583.removal | 1 + synapse/handlers/sync.py | 285 ++--------------------- synapse/rest/client/sync.py | 174 -------------- tests/events/test_auto_accept_invites.py | 3 +- tests/events/test_presence_router.py | 3 +- tests/handlers/test_sync.py | 30 --- tests/rest/client/test_sendtodevice.py | 42 +--- tests/rest/client/test_sync.py | 93 +------- 8 files changed, 38 insertions(+), 593 deletions(-) create mode 100644 changelog.d/18583.removal diff --git a/changelog.d/18583.removal b/changelog.d/18583.removal new file mode 100644 index 0000000000..d7baa85147 --- /dev/null +++ b/changelog.d/18583.removal @@ -0,0 +1 @@ +Remove obsolete and experimental `/sync/e2ee` endpoint. \ No newline at end of file diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 7bfe4e8760..4a68fdcc76 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -20,7 +20,6 @@ # import itertools import logging -from enum import Enum from typing import ( TYPE_CHECKING, AbstractSet, @@ -28,14 +27,11 @@ from typing import ( Dict, FrozenSet, List, - Literal, Mapping, Optional, Sequence, Set, Tuple, - Union, - overload, ) import attr @@ -120,25 +116,6 @@ LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE = 100 SyncRequestKey = Tuple[Any, ...] -class SyncVersion(Enum): - """ - Enum for specifying the version of sync request. This is used to key which type of - sync response that we are generating. - - This is different than the `sync_type` you might see used in other code below; which - specifies the sub-type sync request (e.g. initial_sync, full_state_sync, - incremental_sync) and is really only relevant for the `/sync` v2 endpoint. - """ - - # These string values are semantically significant because they are used in the the - # metrics - - # Traditional `/sync` endpoint - SYNC_V2 = "sync_v2" - # Part of MSC3575 Sliding Sync - E2EE_SYNC = "e2ee_sync" - - @attr.s(slots=True, frozen=True, auto_attribs=True) class SyncConfig: user: UserID @@ -308,26 +285,6 @@ class SyncResult: ) -@attr.s(slots=True, frozen=True, auto_attribs=True) -class E2eeSyncResult: - """ - Attributes: - next_batch: Token for the next sync - to_device: List of direct messages for the device. - device_lists: List of user_ids whose devices have changed - device_one_time_keys_count: Dict of algorithm to count for one time keys - for this device - device_unused_fallback_key_types: List of key types that have an unused fallback - key - """ - - next_batch: StreamToken - to_device: List[JsonDict] - device_lists: DeviceListUpdates - device_one_time_keys_count: JsonMapping - device_unused_fallback_key_types: List[str] - - class SyncHandler: def __init__(self, hs: "HomeServer"): self.server_name = hs.hostname @@ -373,52 +330,15 @@ class SyncHandler: self.rooms_to_exclude_globally = hs.config.server.rooms_to_exclude_from_sync - @overload async def wait_for_sync_for_user( self, requester: Requester, sync_config: SyncConfig, - sync_version: Literal[SyncVersion.SYNC_V2], request_key: SyncRequestKey, since_token: Optional[StreamToken] = None, timeout: int = 0, full_state: bool = False, - ) -> SyncResult: ... - - @overload - async def wait_for_sync_for_user( - self, - requester: Requester, - sync_config: SyncConfig, - sync_version: Literal[SyncVersion.E2EE_SYNC], - request_key: SyncRequestKey, - since_token: Optional[StreamToken] = None, - timeout: int = 0, - full_state: bool = False, - ) -> E2eeSyncResult: ... - - @overload - async def wait_for_sync_for_user( - self, - requester: Requester, - sync_config: SyncConfig, - sync_version: SyncVersion, - request_key: SyncRequestKey, - since_token: Optional[StreamToken] = None, - timeout: int = 0, - full_state: bool = False, - ) -> Union[SyncResult, E2eeSyncResult]: ... - - async def wait_for_sync_for_user( - self, - requester: Requester, - sync_config: SyncConfig, - sync_version: SyncVersion, - request_key: SyncRequestKey, - since_token: Optional[StreamToken] = None, - timeout: int = 0, - full_state: bool = False, - ) -> Union[SyncResult, E2eeSyncResult]: + ) -> SyncResult: """Get the sync for a client if we have new data for it now. Otherwise wait for new data to arrive on the server. If the timeout expires, then return an empty sync result. @@ -433,8 +353,7 @@ class SyncHandler: full_state: Whether to return the full state for each room. Returns: - When `SyncVersion.SYNC_V2`, returns a full `SyncResult`. - When `SyncVersion.E2EE_SYNC`, returns a `E2eeSyncResult`. + returns a full `SyncResult`. """ # If the user is not part of the mau group, then check that limits have # not been exceeded (if not part of the group by this point, almost certain @@ -446,7 +365,6 @@ class SyncHandler: request_key, self._wait_for_sync_for_user, sync_config, - sync_version, since_token, timeout, full_state, @@ -455,48 +373,14 @@ class SyncHandler: logger.debug("Returning sync response for %s", user_id) return res - @overload async def _wait_for_sync_for_user( self, sync_config: SyncConfig, - sync_version: Literal[SyncVersion.SYNC_V2], since_token: Optional[StreamToken], timeout: int, full_state: bool, cache_context: ResponseCacheContext[SyncRequestKey], - ) -> SyncResult: ... - - @overload - async def _wait_for_sync_for_user( - self, - sync_config: SyncConfig, - sync_version: Literal[SyncVersion.E2EE_SYNC], - since_token: Optional[StreamToken], - timeout: int, - full_state: bool, - cache_context: ResponseCacheContext[SyncRequestKey], - ) -> E2eeSyncResult: ... - - @overload - async def _wait_for_sync_for_user( - self, - sync_config: SyncConfig, - sync_version: SyncVersion, - since_token: Optional[StreamToken], - timeout: int, - full_state: bool, - cache_context: ResponseCacheContext[SyncRequestKey], - ) -> Union[SyncResult, E2eeSyncResult]: ... - - async def _wait_for_sync_for_user( - self, - sync_config: SyncConfig, - sync_version: SyncVersion, - since_token: Optional[StreamToken], - timeout: int, - full_state: bool, - cache_context: ResponseCacheContext[SyncRequestKey], - ) -> Union[SyncResult, E2eeSyncResult]: + ) -> SyncResult: """The start of the machinery that produces a /sync response. See https://spec.matrix.org/v1.1/client-server-api/#syncing for full details. @@ -517,7 +401,7 @@ class SyncHandler: else: sync_type = "incremental_sync" - sync_label = f"{sync_version}:{sync_type}" + sync_label = f"sync_v2:{sync_type}" context = current_context() if context: @@ -578,19 +462,15 @@ class SyncHandler: if timeout == 0 or since_token is None or full_state: # we are going to return immediately, so don't bother calling # notifier.wait_for_events. - result: Union[ - SyncResult, E2eeSyncResult - ] = await self.current_sync_for_user( - sync_config, sync_version, since_token, full_state=full_state + result = await self.current_sync_for_user( + sync_config, since_token, full_state=full_state ) else: # Otherwise, we wait for something to happen and report it to the user. async def current_sync_callback( before_token: StreamToken, after_token: StreamToken - ) -> Union[SyncResult, E2eeSyncResult]: - return await self.current_sync_for_user( - sync_config, sync_version, since_token - ) + ) -> SyncResult: + return await self.current_sync_for_user(sync_config, since_token) result = await self.notifier.wait_for_events( sync_config.user.to_string(), @@ -623,43 +503,15 @@ class SyncHandler: return result - @overload async def current_sync_for_user( self, sync_config: SyncConfig, - sync_version: Literal[SyncVersion.SYNC_V2], since_token: Optional[StreamToken] = None, full_state: bool = False, - ) -> SyncResult: ... - - @overload - async def current_sync_for_user( - self, - sync_config: SyncConfig, - sync_version: Literal[SyncVersion.E2EE_SYNC], - since_token: Optional[StreamToken] = None, - full_state: bool = False, - ) -> E2eeSyncResult: ... - - @overload - async def current_sync_for_user( - self, - sync_config: SyncConfig, - sync_version: SyncVersion, - since_token: Optional[StreamToken] = None, - full_state: bool = False, - ) -> Union[SyncResult, E2eeSyncResult]: ... - - async def current_sync_for_user( - self, - sync_config: SyncConfig, - sync_version: SyncVersion, - since_token: Optional[StreamToken] = None, - full_state: bool = False, - ) -> Union[SyncResult, E2eeSyncResult]: + ) -> SyncResult: """ Generates the response body of a sync result, represented as a - `SyncResult`/`E2eeSyncResult`. + `SyncResult`. This is a wrapper around `generate_sync_result` which starts an open tracing span to track the sync. See `generate_sync_result` for the next part of your @@ -672,28 +524,15 @@ class SyncHandler: full_state: Whether to return the full state for each room. Returns: - When `SyncVersion.SYNC_V2`, returns a full `SyncResult`. - When `SyncVersion.E2EE_SYNC`, returns a `E2eeSyncResult`. + returns a full `SyncResult`. """ with start_active_span("sync.current_sync_for_user"): log_kv({"since_token": since_token}) # Go through the `/sync` v2 path - if sync_version == SyncVersion.SYNC_V2: - sync_result: Union[ - SyncResult, E2eeSyncResult - ] = await self.generate_sync_result( - sync_config, since_token, full_state - ) - # Go through the MSC3575 Sliding Sync `/sync/e2ee` path - elif sync_version == SyncVersion.E2EE_SYNC: - sync_result = await self.generate_e2ee_sync_result( - sync_config, since_token - ) - else: - raise Exception( - f"Unknown sync_version (this is a Synapse problem): {sync_version}" - ) + sync_result = await self.generate_sync_result( + sync_config, since_token, full_state + ) set_tag(SynapseTags.SYNC_RESULT, bool(sync_result)) return sync_result @@ -1968,102 +1807,6 @@ class SyncHandler: next_batch=sync_result_builder.now_token, ) - async def generate_e2ee_sync_result( - self, - sync_config: SyncConfig, - since_token: Optional[StreamToken] = None, - ) -> E2eeSyncResult: - """ - Generates the response body of a MSC3575 Sliding Sync `/sync/e2ee` result. - - This is represented by a `E2eeSyncResult` struct, which is built from small - pieces using a `SyncResultBuilder`. The `sync_result_builder` is passed as a - mutable ("inout") parameter to various helper functions. These retrieve and - process the data which forms the sync body, often writing to the - `sync_result_builder` to store their output. - - At the end, we transfer data from the `sync_result_builder` to a new `E2eeSyncResult` - instance to signify that the sync calculation is complete. - """ - user_id = sync_config.user.to_string() - app_service = self.store.get_app_service_by_user_id(user_id) - if app_service: - # We no longer support AS users using /sync directly. - # See https://github.com/matrix-org/matrix-doc/issues/1144 - raise NotImplementedError() - - sync_result_builder = await self.get_sync_result_builder( - sync_config, - since_token, - full_state=False, - ) - - # 1. Calculate `to_device` events - await self._generate_sync_entry_for_to_device(sync_result_builder) - - # 2. Calculate `device_lists` - # Device list updates are sent if a since token is provided. - device_lists = DeviceListUpdates() - include_device_list_updates = bool(since_token and since_token.device_list_key) - if include_device_list_updates: - # Note that _generate_sync_entry_for_rooms sets sync_result_builder.joined, which - # is used in calculate_user_changes below. - # - # TODO: Running `_generate_sync_entry_for_rooms()` is a lot of work just to - # figure out the membership changes/derived info needed for - # `_generate_sync_entry_for_device_list()`. In the future, we should try to - # refactor this away. - ( - newly_joined_rooms, - newly_left_rooms, - ) = await self._generate_sync_entry_for_rooms(sync_result_builder) - - # This uses the sync_result_builder.joined which is set in - # `_generate_sync_entry_for_rooms`, if that didn't find any joined - # rooms for some reason it is a no-op. - ( - newly_joined_or_invited_or_knocked_users, - newly_left_users, - ) = sync_result_builder.calculate_user_changes() - - # include_device_list_updates can only be True if we have a - # since token. - assert since_token is not None - device_lists = await self._device_handler.generate_sync_entry_for_device_list( - user_id=user_id, - since_token=since_token, - now_token=sync_result_builder.now_token, - joined_room_ids=sync_result_builder.joined_room_ids, - newly_joined_rooms=newly_joined_rooms, - newly_joined_or_invited_or_knocked_users=newly_joined_or_invited_or_knocked_users, - newly_left_rooms=newly_left_rooms, - newly_left_users=newly_left_users, - ) - - # 3. Calculate `device_one_time_keys_count` and `device_unused_fallback_key_types` - device_id = sync_config.device_id - one_time_keys_count: JsonMapping = {} - unused_fallback_key_types: List[str] = [] - if device_id: - # TODO: We should have a way to let clients differentiate between the states of: - # * no change in OTK count since the provided since token - # * the server has zero OTKs left for this device - # Spec issue: https://github.com/matrix-org/matrix-doc/issues/3298 - one_time_keys_count = await self.store.count_e2e_one_time_keys( - user_id, device_id - ) - unused_fallback_key_types = list( - await self.store.get_e2e_unused_fallback_key_types(user_id, device_id) - ) - - return E2eeSyncResult( - to_device=sync_result_builder.to_device, - device_lists=device_lists, - device_one_time_keys_count=one_time_keys_count, - device_unused_fallback_key_types=unused_fallback_key_types, - next_batch=sync_result_builder.now_token, - ) - async def get_sync_result_builder( self, sync_config: SyncConfig, diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index 5e0596d1bc..6f2f6642be 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -42,7 +42,6 @@ from synapse.handlers.sync import ( KnockedSyncResult, SyncConfig, SyncResult, - SyncVersion, ) from synapse.http.server import HttpServer from synapse.http.servlet import ( @@ -267,7 +266,6 @@ class SyncRestServlet(RestServlet): sync_result = await self.sync_handler.wait_for_sync_for_user( requester, sync_config, - SyncVersion.SYNC_V2, request_key, since_token=since_token, timeout=timeout, @@ -632,177 +630,6 @@ class SyncRestServlet(RestServlet): return result -class SlidingSyncE2eeRestServlet(RestServlet): - """ - API endpoint for MSC3575 Sliding Sync `/sync/e2ee`. This is being introduced as part - of Sliding Sync but doesn't have any sliding window component. It's just a way to - get E2EE events without having to sit through a big initial sync (`/sync` v2). And - we can avoid encryption events being backed up by the main sync response. - - Having To-Device messages split out to this sync endpoint also helps when clients - need to have 2 or more sync streams open at a time, e.g a push notification process - and a main process. This can cause the two processes to race to fetch the To-Device - events, resulting in the need for complex synchronisation rules to ensure the token - is correctly and atomically exchanged between processes. - - GET parameters:: - timeout(int): How long to wait for new events in milliseconds. - since(batch_token): Batch token when asking for incremental deltas. - - Response JSON:: - { - "next_batch": // batch token for the next /sync - "to_device": { - // list of to-device events - "events": [ - { - "content: { "algorithm": "m.olm.v1.curve25519-aes-sha2", "ciphertext": { ... }, "org.matrix.msgid": "abcd", "session_id": "abcd" }, - "type": "m.room.encrypted", - "sender": "@alice:example.com", - } - // ... - ] - }, - "device_lists": { - "changed": ["@alice:example.com"], - "left": ["@bob:example.com"] - }, - "device_one_time_keys_count": { - "signed_curve25519": 50 - }, - "device_unused_fallback_key_types": [ - "signed_curve25519" - ] - } - """ - - PATTERNS = client_patterns( - "/org.matrix.msc3575/sync/e2ee$", releases=[], v1=False, unstable=True - ) - - def __init__(self, hs: "HomeServer"): - super().__init__() - self.hs = hs - self.auth = hs.get_auth() - self.store = hs.get_datastores().main - self.sync_handler = hs.get_sync_handler() - - # Filtering only matters for the `device_lists` because it requires a bunch of - # derived information from rooms (see how `_generate_sync_entry_for_rooms()` - # prepares a bunch of data for `_generate_sync_entry_for_device_list()`). - self.only_member_events_filter_collection = FilterCollection( - self.hs, - { - "room": { - # We only care about membership events for the `device_lists`. - # Membership will tell us whether a user has joined/left a room and - # if there are new devices to encrypt for. - "timeline": { - "types": ["m.room.member"], - }, - "state": { - "types": ["m.room.member"], - }, - # We don't want any extra account_data generated because it's not - # returned by this endpoint. This helps us avoid work in - # `_generate_sync_entry_for_rooms()` - "account_data": { - "not_types": ["*"], - }, - # We don't want any extra ephemeral data generated because it's not - # returned by this endpoint. This helps us avoid work in - # `_generate_sync_entry_for_rooms()` - "ephemeral": { - "not_types": ["*"], - }, - }, - # We don't want any extra account_data generated because it's not - # returned by this endpoint. (This is just here for good measure) - "account_data": { - "not_types": ["*"], - }, - # We don't want any extra presence data generated because it's not - # returned by this endpoint. (This is just here for good measure) - "presence": { - "not_types": ["*"], - }, - }, - ) - - async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: - requester = await self.auth.get_user_by_req_experimental_feature( - request, allow_guest=True, feature=ExperimentalFeature.MSC3575 - ) - user = requester.user - device_id = requester.device_id - - timeout = parse_integer(request, "timeout", default=0) - since = parse_string(request, "since") - - sync_config = SyncConfig( - user=user, - filter_collection=self.only_member_events_filter_collection, - is_guest=requester.is_guest, - device_id=device_id, - use_state_after=False, # We don't return any rooms so this flag is a no-op - ) - - since_token = None - if since is not None: - since_token = await StreamToken.from_string(self.store, since) - - # Request cache key - request_key = ( - SyncVersion.E2EE_SYNC, - user, - timeout, - since, - ) - - # Gather data for the response - sync_result = await self.sync_handler.wait_for_sync_for_user( - requester, - sync_config, - SyncVersion.E2EE_SYNC, - request_key, - since_token=since_token, - timeout=timeout, - full_state=False, - ) - - # The client may have disconnected by now; don't bother to serialize the - # response if so. - if request._disconnected: - logger.info("Client has disconnected; not serializing response.") - return 200, {} - - response: JsonDict = defaultdict(dict) - response["next_batch"] = await sync_result.next_batch.to_string(self.store) - - if sync_result.to_device: - response["to_device"] = {"events": sync_result.to_device} - - if sync_result.device_lists.changed: - response["device_lists"]["changed"] = list(sync_result.device_lists.changed) - if sync_result.device_lists.left: - response["device_lists"]["left"] = list(sync_result.device_lists.left) - - # We always include this because https://github.com/vector-im/element-android/issues/3725 - # The spec isn't terribly clear on when this can be omitted and how a client would tell - # the difference between "no keys present" and "nothing changed" in terms of whole field - # absent / individual key type entry absent - # Corresponding synapse issue: https://github.com/matrix-org/synapse/issues/10456 - response["device_one_time_keys_count"] = sync_result.device_one_time_keys_count - - # https://github.com/matrix-org/matrix-doc/blob/54255851f642f84a4f1aaf7bc063eebe3d76752b/proposals/2732-olm-fallback-keys.md - # states that this field should always be included, as long as the server supports the feature. - response["device_unused_fallback_key_types"] = ( - sync_result.device_unused_fallback_key_types - ) - - return 200, response - - class SlidingSyncRestServlet(RestServlet): """ API endpoint for MSC3575 Sliding Sync `/sync`. Allows for clients to request a @@ -1254,4 +1081,3 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: SyncRestServlet(hs).register(http_server) SlidingSyncRestServlet(hs).register(http_server) - SlidingSyncE2eeRestServlet(hs).register(http_server) diff --git a/tests/events/test_auto_accept_invites.py b/tests/events/test_auto_accept_invites.py index ab183f8106..8f1dc86984 100644 --- a/tests/events/test_auto_accept_invites.py +++ b/tests/events/test_auto_accept_invites.py @@ -35,7 +35,7 @@ from synapse.config._base import RootConfig from synapse.config.auto_accept_invites import AutoAcceptInvitesConfig from synapse.events.auto_accept_invites import InviteAutoAccepter from synapse.federation.federation_base import event_from_pdu_json -from synapse.handlers.sync import JoinedSyncResult, SyncRequestKey, SyncVersion +from synapse.handlers.sync import JoinedSyncResult, SyncRequestKey from synapse.module_api import ModuleApi from synapse.rest import admin from synapse.rest.client import login, room @@ -548,7 +548,6 @@ def sync_join( testcase.hs.get_sync_handler().wait_for_sync_for_user( requester, sync_config, - SyncVersion.SYNC_V2, generate_request_key(), since_token, ) diff --git a/tests/events/test_presence_router.py b/tests/events/test_presence_router.py index a61f1369f4..f7d55223b1 100644 --- a/tests/events/test_presence_router.py +++ b/tests/events/test_presence_router.py @@ -36,7 +36,7 @@ from synapse.server import HomeServer from synapse.types import JsonDict, StreamToken, create_requester from synapse.util import Clock -from tests.handlers.test_sync import SyncRequestKey, SyncVersion, generate_sync_config +from tests.handlers.test_sync import SyncRequestKey, generate_sync_config from tests.unittest import ( FederatingHomeserverTestCase, HomeserverTestCase, @@ -532,7 +532,6 @@ def sync_presence( testcase.hs.get_sync_handler().wait_for_sync_for_user( requester, sync_config, - SyncVersion.SYNC_V2, generate_request_key(), since_token, ) diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index cea61bed6a..9d3e88c126 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -37,7 +37,6 @@ from synapse.handlers.sync import ( SyncConfig, SyncRequestKey, SyncResult, - SyncVersion, TimelineBatch, ) from synapse.rest import admin @@ -113,7 +112,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( requester, sync_config, - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), ) ) @@ -124,7 +122,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( requester, sync_config, - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), ), ResourceLimitError, @@ -142,7 +139,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( requester, sync_config, - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), ), ResourceLimitError, @@ -167,7 +163,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): sync_config=generate_sync_config( user, device_id="dev", use_state_after=self.use_state_after ), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), ) ) @@ -203,7 +198,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): sync_config=generate_sync_config( user, use_state_after=self.use_state_after ), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), ) ) @@ -218,7 +212,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): sync_config=generate_sync_config( user, device_id="dev", use_state_after=self.use_state_after ), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), since_token=initial_result.next_batch, ) @@ -252,7 +245,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): sync_config=generate_sync_config( user, use_state_after=self.use_state_after ), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), ) ) @@ -267,7 +259,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): sync_config=generate_sync_config( user, device_id="dev", use_state_after=self.use_state_after ), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), since_token=initial_result.next_batch, ) @@ -310,7 +301,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( create_requester(owner), generate_sync_config(owner, use_state_after=self.use_state_after), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), ) ) @@ -336,7 +326,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( eve_requester, eve_sync_config, - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), ) ) @@ -363,7 +352,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( eve_requester, eve_sync_config, - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), since_token=eve_sync_after_ban.next_batch, ) @@ -376,7 +364,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( eve_requester, eve_sync_config, - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), since_token=None, ) @@ -411,7 +398,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( alice_requester, generate_sync_config(alice, use_state_after=self.use_state_after), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), ) ) @@ -441,7 +427,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): ), use_state_after=self.use_state_after, ), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), since_token=initial_sync_result.next_batch, ) @@ -487,7 +472,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( alice_requester, generate_sync_config(alice, use_state_after=self.use_state_after), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), ) ) @@ -527,7 +511,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): ), use_state_after=self.use_state_after, ), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), since_token=initial_sync_result.next_batch, ) @@ -576,7 +559,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( alice_requester, generate_sync_config(alice, use_state_after=self.use_state_after), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), ) ) @@ -603,7 +585,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): ), use_state_after=self.use_state_after, ), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), since_token=initial_sync_result.next_batch, ) @@ -643,7 +624,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): ), use_state_after=self.use_state_after, ), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), since_token=incremental_sync.next_batch, ) @@ -717,7 +697,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( alice_requester, generate_sync_config(alice, use_state_after=self.use_state_after), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), ) ) @@ -743,7 +722,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): ), use_state_after=self.use_state_after, ), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), ) ) @@ -769,7 +747,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( alice_requester, generate_sync_config(alice, use_state_after=self.use_state_after), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), since_token=initial_sync_result.next_batch, ) @@ -833,7 +810,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( bob_requester, generate_sync_config(bob, use_state_after=self.use_state_after), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), ) ) @@ -867,7 +843,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): filter_collection=FilterCollection(self.hs, filter_dict), use_state_after=self.use_state_after, ), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), since_token=None if initial_sync else initial_sync_result.next_batch, ) @@ -967,7 +942,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( create_requester(user), generate_sync_config(user, use_state_after=self.use_state_after), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), ) ) @@ -1016,7 +990,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( create_requester(user2), generate_sync_config(user2, use_state_after=self.use_state_after), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), ) ) @@ -1042,7 +1015,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( create_requester(user), generate_sync_config(user, use_state_after=self.use_state_after), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), ) ) @@ -1079,7 +1051,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( create_requester(user), generate_sync_config(user, use_state_after=self.use_state_after), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), since_token=since_token, timeout=0, @@ -1134,7 +1105,6 @@ class SyncTestCase(tests.unittest.HomeserverTestCase): self.sync_handler.wait_for_sync_for_user( create_requester(user), generate_sync_config(user, use_state_after=self.use_state_after), - sync_version=SyncVersion.SYNC_V2, request_key=generate_request_key(), since_token=since_token, timeout=0, diff --git a/tests/rest/client/test_sendtodevice.py b/tests/rest/client/test_sendtodevice.py index 5ef501c6d5..56533d85f5 100644 --- a/tests/rest/client/test_sendtodevice.py +++ b/tests/rest/client/test_sendtodevice.py @@ -18,27 +18,13 @@ # [This file includes modifications made by New Vector Limited] # # -from parameterized import parameterized_class - from synapse.api.constants import EduTypes from synapse.rest import admin from synapse.rest.client import login, sendtodevice, sync -from synapse.types import JsonDict from tests.unittest import HomeserverTestCase, override_config -@parameterized_class( - ("sync_endpoint", "experimental_features"), - [ - ("/sync", {}), - ( - "/_matrix/client/unstable/org.matrix.msc3575/sync/e2ee", - # Enable sliding sync - {"msc3575_enabled": True}, - ), - ], -) class SendToDeviceTestCase(HomeserverTestCase): """ Test `/sendToDevice` will deliver messages across to people receiving them over `/sync`. @@ -48,9 +34,6 @@ class SendToDeviceTestCase(HomeserverTestCase): experimental_features: The experimental features homeserver config to use. """ - sync_endpoint: str - experimental_features: JsonDict - servlets = [ admin.register_servlets, login.register_servlets, @@ -58,11 +41,6 @@ class SendToDeviceTestCase(HomeserverTestCase): sync.register_servlets, ] - def default_config(self) -> JsonDict: - config = super().default_config() - config["experimental_features"] = self.experimental_features - return config - def test_user_to_user(self) -> None: """A to-device message from one user to another should get delivered""" @@ -83,7 +61,7 @@ class SendToDeviceTestCase(HomeserverTestCase): self.assertEqual(chan.code, 200, chan.result) # check it appears - channel = self.make_request("GET", self.sync_endpoint, access_token=user2_tok) + channel = self.make_request("GET", "/sync", access_token=user2_tok) self.assertEqual(channel.code, 200, channel.result) expected_result = { "events": [ @@ -99,7 +77,7 @@ class SendToDeviceTestCase(HomeserverTestCase): # it should re-appear if we do another sync because the to-device message is not # deleted until we acknowledge it by sending a `?since=...` parameter in the # next sync request corresponding to the `next_batch` value from the response. - channel = self.make_request("GET", self.sync_endpoint, access_token=user2_tok) + channel = self.make_request("GET", "/sync", access_token=user2_tok) self.assertEqual(channel.code, 200, channel.result) self.assertEqual(channel.json_body["to_device"], expected_result) @@ -107,7 +85,7 @@ class SendToDeviceTestCase(HomeserverTestCase): sync_token = channel.json_body["next_batch"] channel = self.make_request( "GET", - f"{self.sync_endpoint}?since={sync_token}", + f"/sync?since={sync_token}", access_token=user2_tok, ) self.assertEqual(channel.code, 200, channel.result) @@ -133,7 +111,7 @@ class SendToDeviceTestCase(HomeserverTestCase): self.assertEqual(chan.code, 200, chan.result) # now sync: we should get two of the three (because burst_count=2) - channel = self.make_request("GET", self.sync_endpoint, access_token=user2_tok) + channel = self.make_request("GET", "/sync", access_token=user2_tok) self.assertEqual(channel.code, 200, channel.result) msgs = channel.json_body["to_device"]["events"] self.assertEqual(len(msgs), 2) @@ -163,7 +141,7 @@ class SendToDeviceTestCase(HomeserverTestCase): # ... which should arrive channel = self.make_request( "GET", - f"{self.sync_endpoint}?since={sync_token}", + f"/sync?since={sync_token}", access_token=user2_tok, ) self.assertEqual(channel.code, 200, channel.result) @@ -198,7 +176,7 @@ class SendToDeviceTestCase(HomeserverTestCase): ) # now sync: we should get two of the three - channel = self.make_request("GET", self.sync_endpoint, access_token=user2_tok) + channel = self.make_request("GET", "/sync", access_token=user2_tok) self.assertEqual(channel.code, 200, channel.result) msgs = channel.json_body["to_device"]["events"] self.assertEqual(len(msgs), 2) @@ -233,7 +211,7 @@ class SendToDeviceTestCase(HomeserverTestCase): # ... which should arrive channel = self.make_request( "GET", - f"{self.sync_endpoint}?since={sync_token}", + f"/sync?since={sync_token}", access_token=user2_tok, ) self.assertEqual(channel.code, 200, channel.result) @@ -258,7 +236,7 @@ class SendToDeviceTestCase(HomeserverTestCase): user2_tok = self.login("u2", "pass", "d2") # Do an initial sync - channel = self.make_request("GET", self.sync_endpoint, access_token=user2_tok) + channel = self.make_request("GET", "/sync", access_token=user2_tok) self.assertEqual(channel.code, 200, channel.result) sync_token = channel.json_body["next_batch"] @@ -275,7 +253,7 @@ class SendToDeviceTestCase(HomeserverTestCase): channel = self.make_request( "GET", - f"{self.sync_endpoint}?since={sync_token}&timeout=300000", + f"/sync?since={sync_token}&timeout=300000", access_token=user2_tok, ) self.assertEqual(channel.code, 200, channel.result) @@ -285,7 +263,7 @@ class SendToDeviceTestCase(HomeserverTestCase): channel = self.make_request( "GET", - f"{self.sync_endpoint}?since={sync_token}&timeout=300000", + f"/sync?since={sync_token}&timeout=300000", access_token=user2_tok, ) self.assertEqual(channel.code, 200, channel.result) diff --git a/tests/rest/client/test_sync.py b/tests/rest/client/test_sync.py index e612df3be9..7f3cf5affb 100644 --- a/tests/rest/client/test_sync.py +++ b/tests/rest/client/test_sync.py @@ -22,7 +22,7 @@ import json import logging from typing import List -from parameterized import parameterized, parameterized_class +from parameterized import parameterized from twisted.internet.testing import MemoryReactor @@ -702,29 +702,11 @@ class SyncCacheTestCase(unittest.HomeserverTestCase): self.assertEqual(channel.code, 200, channel.json_body) -@parameterized_class( - ("sync_endpoint", "experimental_features"), - [ - ("/sync", {}), - ( - "/_matrix/client/unstable/org.matrix.msc3575/sync/e2ee", - # Enable sliding sync - {"msc3575_enabled": True}, - ), - ], -) class DeviceListSyncTestCase(unittest.HomeserverTestCase): """ Tests regarding device list (`device_lists`) changes. - - Attributes: - sync_endpoint: The endpoint under test to use for syncing. - experimental_features: The experimental features homeserver config to use. """ - sync_endpoint: str - experimental_features: JsonDict - servlets = [ synapse.rest.admin.register_servlets, login.register_servlets, @@ -733,11 +715,6 @@ class DeviceListSyncTestCase(unittest.HomeserverTestCase): devices.register_servlets, ] - def default_config(self) -> JsonDict: - config = super().default_config() - config["experimental_features"] = self.experimental_features - return config - def test_receiving_local_device_list_changes(self) -> None: """Tests that a local users that share a room receive each other's device list changes. @@ -767,7 +744,7 @@ class DeviceListSyncTestCase(unittest.HomeserverTestCase): # Now have Bob initiate an initial sync (in order to get a since token) channel = self.make_request( "GET", - self.sync_endpoint, + "/sync", access_token=bob_access_token, ) self.assertEqual(channel.code, 200, channel.json_body) @@ -777,7 +754,7 @@ class DeviceListSyncTestCase(unittest.HomeserverTestCase): # which we hope will happen as a result of Alice updating their device list. bob_sync_channel = self.make_request( "GET", - f"{self.sync_endpoint}?since={next_batch_token}&timeout=30000", + f"/sync?since={next_batch_token}&timeout=30000", access_token=bob_access_token, # Start the request, then continue on. await_result=False, @@ -824,7 +801,7 @@ class DeviceListSyncTestCase(unittest.HomeserverTestCase): # Have Bob initiate an initial sync (in order to get a since token) channel = self.make_request( "GET", - self.sync_endpoint, + "/sync", access_token=bob_access_token, ) self.assertEqual(channel.code, 200, channel.json_body) @@ -834,7 +811,7 @@ class DeviceListSyncTestCase(unittest.HomeserverTestCase): # which we hope will happen as a result of Alice updating their device list. bob_sync_channel = self.make_request( "GET", - f"{self.sync_endpoint}?since={next_batch_token}&timeout=1000", + f"/sync?since={next_batch_token}&timeout=1000", access_token=bob_access_token, # Start the request, then continue on. await_result=False, @@ -873,9 +850,7 @@ class DeviceListSyncTestCase(unittest.HomeserverTestCase): ) # Request an initial sync - channel = self.make_request( - "GET", self.sync_endpoint, access_token=alice_access_token - ) + channel = self.make_request("GET", "/sync", access_token=alice_access_token) self.assertEqual(channel.code, 200, channel.json_body) next_batch = channel.json_body["next_batch"] @@ -883,7 +858,7 @@ class DeviceListSyncTestCase(unittest.HomeserverTestCase): # It won't return until something has happened incremental_sync_channel = self.make_request( "GET", - f"{self.sync_endpoint}?since={next_batch}&timeout=30000", + f"/sync?since={next_batch}&timeout=30000", access_token=alice_access_token, await_result=False, ) @@ -913,17 +888,6 @@ class DeviceListSyncTestCase(unittest.HomeserverTestCase): ) -@parameterized_class( - ("sync_endpoint", "experimental_features"), - [ - ("/sync", {}), - ( - "/_matrix/client/unstable/org.matrix.msc3575/sync/e2ee", - # Enable sliding sync - {"msc3575_enabled": True}, - ), - ], -) class DeviceOneTimeKeysSyncTestCase(unittest.HomeserverTestCase): """ Tests regarding device one time keys (`device_one_time_keys_count`) changes. @@ -933,9 +897,6 @@ class DeviceOneTimeKeysSyncTestCase(unittest.HomeserverTestCase): experimental_features: The experimental features homeserver config to use. """ - sync_endpoint: str - experimental_features: JsonDict - servlets = [ synapse.rest.admin.register_servlets, login.register_servlets, @@ -943,11 +904,6 @@ class DeviceOneTimeKeysSyncTestCase(unittest.HomeserverTestCase): devices.register_servlets, ] - def default_config(self) -> JsonDict: - config = super().default_config() - config["experimental_features"] = self.experimental_features - return config - def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.e2e_keys_handler = hs.get_e2e_keys_handler() @@ -964,9 +920,7 @@ class DeviceOneTimeKeysSyncTestCase(unittest.HomeserverTestCase): ) # Request an initial sync - channel = self.make_request( - "GET", self.sync_endpoint, access_token=alice_access_token - ) + channel = self.make_request("GET", "/sync", access_token=alice_access_token) self.assertEqual(channel.code, 200, channel.json_body) # Check for those one time key counts @@ -1011,9 +965,7 @@ class DeviceOneTimeKeysSyncTestCase(unittest.HomeserverTestCase): ) # Request an initial sync - channel = self.make_request( - "GET", self.sync_endpoint, access_token=alice_access_token - ) + channel = self.make_request("GET", "/sync", access_token=alice_access_token) self.assertEqual(channel.code, 200, channel.json_body) # Check for those one time key counts @@ -1024,17 +976,6 @@ class DeviceOneTimeKeysSyncTestCase(unittest.HomeserverTestCase): ) -@parameterized_class( - ("sync_endpoint", "experimental_features"), - [ - ("/sync", {}), - ( - "/_matrix/client/unstable/org.matrix.msc3575/sync/e2ee", - # Enable sliding sync - {"msc3575_enabled": True}, - ), - ], -) class DeviceUnusedFallbackKeySyncTestCase(unittest.HomeserverTestCase): """ Tests regarding device one time keys (`device_unused_fallback_key_types`) changes. @@ -1044,9 +985,6 @@ class DeviceUnusedFallbackKeySyncTestCase(unittest.HomeserverTestCase): experimental_features: The experimental features homeserver config to use. """ - sync_endpoint: str - experimental_features: JsonDict - servlets = [ synapse.rest.admin.register_servlets, login.register_servlets, @@ -1054,11 +992,6 @@ class DeviceUnusedFallbackKeySyncTestCase(unittest.HomeserverTestCase): devices.register_servlets, ] - def default_config(self) -> JsonDict: - config = super().default_config() - config["experimental_features"] = self.experimental_features - return config - def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.store = self.hs.get_datastores().main self.e2e_keys_handler = hs.get_e2e_keys_handler() @@ -1078,9 +1011,7 @@ class DeviceUnusedFallbackKeySyncTestCase(unittest.HomeserverTestCase): ) # Request an initial sync - channel = self.make_request( - "GET", self.sync_endpoint, access_token=alice_access_token - ) + channel = self.make_request("GET", "/sync", access_token=alice_access_token) self.assertEqual(channel.code, 200, channel.json_body) # Check for those one time key counts @@ -1122,9 +1053,7 @@ class DeviceUnusedFallbackKeySyncTestCase(unittest.HomeserverTestCase): self.assertEqual(fallback_res, ["alg1"], fallback_res) # Request an initial sync - channel = self.make_request( - "GET", self.sync_endpoint, access_token=alice_access_token - ) + channel = self.make_request("GET", "/sync", access_token=alice_access_token) self.assertEqual(channel.code, 200, channel.json_body) # Check for the unused fallback key types From 4d55f2f3017f5307d50808634ac1d563313fa8da Mon Sep 17 00:00:00 2001 From: Jason Little Date: Tue, 9 Sep 2025 03:50:09 -0500 Subject: [PATCH 05/54] fix: Use the `Enum`'s value for the dictionary key when responding to an admin request for experimental features (#18874) While exploring bring up of using `orjson`, exposed an interesting flaw. The stdlib `json` encoder seems to be ok with coercing a `str` from an `Enum`(specifically, a `Class[str, Enum]`). The `orjson` encoder does not like that this is a class and not a proper `str` per spec. Using the `.value` of the enum as the key for the dict produced while answering a `GET` admin request for experimental features seems to fix this. --- changelog.d/18874.misc | 1 + synapse/rest/admin/experimental_features.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/18874.misc diff --git a/changelog.d/18874.misc b/changelog.d/18874.misc new file mode 100644 index 0000000000..729befb5e8 --- /dev/null +++ b/changelog.d/18874.misc @@ -0,0 +1 @@ +Use the `Enum`'s value for the dictionary key when responding to an admin request for experimental features. diff --git a/synapse/rest/admin/experimental_features.py b/synapse/rest/admin/experimental_features.py index afb71f4a0f..3d3015cef7 100644 --- a/synapse/rest/admin/experimental_features.py +++ b/synapse/rest/admin/experimental_features.py @@ -92,9 +92,9 @@ class ExperimentalFeaturesRestServlet(RestServlet): user_features = {} for feature in ExperimentalFeature: if feature in enabled_features: - user_features[feature] = True + user_features[feature.value] = True else: - user_features[feature] = False + user_features[feature.value] = False return HTTPStatus.OK, {"features": user_features} async def on_PUT( From 74fdbc7b759856431bb951af2bbb0cbc08048c05 Mon Sep 17 00:00:00 2001 From: Amin Farjadi <31803062+amin-farjadi@users.noreply.github.com> Date: Tue, 9 Sep 2025 09:51:36 +0100 Subject: [PATCH 06/54] Fix typo in structured_logging.md for file handler config (#18872) --- docs/structured_logging.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/structured_logging.md b/docs/structured_logging.md index 002565b223..761d6466dd 100644 --- a/docs/structured_logging.md +++ b/docs/structured_logging.md @@ -35,7 +35,7 @@ handlers: loggers: synapse: level: INFO - handlers: [remote] + handlers: [file] synapse.storage.SQL: level: WARNING ``` From d48e69ad4caaa30c19cddbe5b3fcf8f8c7de31ea Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 9 Sep 2025 09:53:08 +0100 Subject: [PATCH 07/54] Fix prefixed support for MSC4133 (#18875) This fixes two bugs that affect the availability of MSC4133 until the next spec release. 1. The servlet didn't recognise the unstable endpoint even when the homeserver advertised it 2. The HS didn't advertise support for the stable prefixed version Would only have been a problem until the next spec release but it's nice to have it work before then. --- changelog.d/18875.bugfix | 1 + synapse/rest/client/profile.py | 6 ++++++ synapse/rest/client/versions.py | 1 + 3 files changed, 8 insertions(+) create mode 100644 changelog.d/18875.bugfix diff --git a/changelog.d/18875.bugfix b/changelog.d/18875.bugfix new file mode 100644 index 0000000000..3bda7a1d18 --- /dev/null +++ b/changelog.d/18875.bugfix @@ -0,0 +1 @@ +Fix prefixed support for MSC4133. diff --git a/synapse/rest/client/profile.py b/synapse/rest/client/profile.py index 243245f739..8bc532c811 100644 --- a/synapse/rest/client/profile.py +++ b/synapse/rest/client/profile.py @@ -109,6 +109,12 @@ class ProfileFieldRestServlet(RestServlet): self.hs = hs self.profile_handler = hs.get_profile_handler() self.auth = hs.get_auth() + if hs.config.experimental.msc4133_enabled: + self.PATTERNS.append( + re.compile( + r"^/_matrix/client/unstable/uk\.tcpip\.msc4133/profile/(?P[^/]*)/(?P[^/]*)" + ) + ) async def on_GET( self, request: SynapseRequest, user_id: str, field_name: str diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 7f78379534..1b8efd98cd 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -175,6 +175,7 @@ class VersionsRestServlet(RestServlet): "org.matrix.simplified_msc3575": msc3575_enabled, # Arbitrary key-value profile fields. "uk.tcpip.msc4133": self.config.experimental.msc4133_enabled, + "uk.tcpip.msc4133.stable": True, # MSC4155: Invite filtering "org.matrix.msc4155": self.config.experimental.msc4155_enabled, # MSC4306: Support for thread subscriptions From 6fe8137a4a1cd7479c25982cbafaff2882f6c308 Mon Sep 17 00:00:00 2001 From: reivilibre Date: Tue, 9 Sep 2025 11:40:10 +0100 Subject: [PATCH 08/54] Configure Synapse to run MSC4306: Thread Subscriptions Complement tests. (#18819) Pairs with: https://github.com/matrix-org/complement/pull/795 Signed-off-by: Olivier 'reivilibre --- changelog.d/18819.misc | 1 + changelog.d/18846.feature | 1 + .../conf/workers-shared-extra.yaml.j2 | 2 ++ rust/src/push/base_rules.rs | 14 +++++++----- rust/src/push/mod.rs | 1 + scripts-dev/complement.sh | 1 + synapse/push/clientformat.py | 2 +- synapse/push/rulekinds.py | 4 ++++ synapse/rest/client/push_rule.py | 11 ++++++++++ tests/rest/client/test_push_rule_attrs.py | 22 +++++++++++++++++++ 10 files changed, 53 insertions(+), 6 deletions(-) create mode 100644 changelog.d/18819.misc create mode 100644 changelog.d/18846.feature diff --git a/changelog.d/18819.misc b/changelog.d/18819.misc new file mode 100644 index 0000000000..c76e050e79 --- /dev/null +++ b/changelog.d/18819.misc @@ -0,0 +1 @@ +Configure Synapse to run MSC4306: Thread Subscriptions Complement tests. \ No newline at end of file diff --git a/changelog.d/18846.feature b/changelog.d/18846.feature new file mode 100644 index 0000000000..4a873d4446 --- /dev/null +++ b/changelog.d/18846.feature @@ -0,0 +1 @@ +Update push rules for experimental [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-doc/issues/4306) to follow newer draft. \ No newline at end of file diff --git a/docker/complement/conf/workers-shared-extra.yaml.j2 b/docker/complement/conf/workers-shared-extra.yaml.j2 index 168c385191..94e74df9d1 100644 --- a/docker/complement/conf/workers-shared-extra.yaml.j2 +++ b/docker/complement/conf/workers-shared-extra.yaml.j2 @@ -133,6 +133,8 @@ experimental_features: msc3984_appservice_key_query: true # Invite filtering msc4155_enabled: true + # Thread Subscriptions + msc4306_enabled: true server_notices: system_mxid_localpart: _server diff --git a/rust/src/push/base_rules.rs b/rust/src/push/base_rules.rs index ec027ca251..47d5289006 100644 --- a/rust/src/push/base_rules.rs +++ b/rust/src/push/base_rules.rs @@ -289,10 +289,10 @@ pub const BASE_APPEND_CONTENT_RULES: &[PushRule] = &[PushRule { default_enabled: true, }]; -pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[ +pub const BASE_APPEND_POSTCONTENT_RULES: &[PushRule] = &[ PushRule { - rule_id: Cow::Borrowed("global/content/.io.element.msc4306.rule.unsubscribed_thread"), - priority_class: 1, + rule_id: Cow::Borrowed("global/postcontent/.io.element.msc4306.rule.unsubscribed_thread"), + priority_class: 6, conditions: Cow::Borrowed(&[Condition::Known( KnownCondition::Msc4306ThreadSubscription { subscribed: false }, )]), @@ -301,8 +301,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[ default_enabled: true, }, PushRule { - rule_id: Cow::Borrowed("global/content/.io.element.msc4306.rule.subscribed_thread"), - priority_class: 1, + rule_id: Cow::Borrowed("global/postcontent/.io.element.msc4306.rule.subscribed_thread"), + priority_class: 6, conditions: Cow::Borrowed(&[Condition::Known( KnownCondition::Msc4306ThreadSubscription { subscribed: true }, )]), @@ -310,6 +310,9 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[ default: true, default_enabled: true, }, +]; + +pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[ PushRule { rule_id: Cow::Borrowed("global/underride/.m.rule.call"), priority_class: 1, @@ -726,6 +729,7 @@ lazy_static! { .iter() .chain(BASE_APPEND_OVERRIDE_RULES.iter()) .chain(BASE_APPEND_CONTENT_RULES.iter()) + .chain(BASE_APPEND_POSTCONTENT_RULES.iter()) .chain(BASE_APPEND_UNDERRIDE_RULES.iter()) .map(|rule| { (&*rule.rule_id, rule) }) .collect(); diff --git a/rust/src/push/mod.rs b/rust/src/push/mod.rs index b07a12e5cc..b0cedd758c 100644 --- a/rust/src/push/mod.rs +++ b/rust/src/push/mod.rs @@ -527,6 +527,7 @@ impl PushRules { .chain(base_rules::BASE_APPEND_OVERRIDE_RULES.iter()) .chain(self.content.iter()) .chain(base_rules::BASE_APPEND_CONTENT_RULES.iter()) + .chain(base_rules::BASE_APPEND_POSTCONTENT_RULES.iter()) .chain(self.room.iter()) .chain(self.sender.iter()) .chain(self.underride.iter()) diff --git a/scripts-dev/complement.sh b/scripts-dev/complement.sh index 08b500ecd6..c4d678b142 100755 --- a/scripts-dev/complement.sh +++ b/scripts-dev/complement.sh @@ -230,6 +230,7 @@ test_packages=( ./tests/msc3967 ./tests/msc4140 ./tests/msc4155 + ./tests/msc4306 ) # Enable dirty runs, so tests will reuse the same container where possible. diff --git a/synapse/push/clientformat.py b/synapse/push/clientformat.py index b4afcfd85b..4f647491f1 100644 --- a/synapse/push/clientformat.py +++ b/synapse/push/clientformat.py @@ -91,7 +91,7 @@ def _rule_to_template(rule: PushRule) -> Optional[Dict[str, Any]]: unscoped_rule_id = _rule_id_from_namespaced(rule.rule_id) template_name = _priority_class_to_template_name(rule.priority_class) - if template_name in ["override", "underride"]: + if template_name in ["override", "underride", "postcontent"]: templaterule = {"conditions": rule.conditions, "actions": rule.actions} elif template_name in ["sender", "room"]: templaterule = {"actions": rule.actions} diff --git a/synapse/push/rulekinds.py b/synapse/push/rulekinds.py index 781ecc7fae..2eff626f92 100644 --- a/synapse/push/rulekinds.py +++ b/synapse/push/rulekinds.py @@ -19,10 +19,14 @@ # # +# Integer literals for push rule `kind`s +# This is used to store them in the database. PRIORITY_CLASS_MAP = { "underride": 1, "sender": 2, "room": 3, + # MSC4306 + "postcontent": 6, "content": 4, "override": 5, } diff --git a/synapse/rest/client/push_rule.py b/synapse/rest/client/push_rule.py index af042504c9..c20de89bf7 100644 --- a/synapse/rest/client/push_rule.py +++ b/synapse/rest/client/push_rule.py @@ -19,9 +19,11 @@ # # +from http import HTTPStatus from typing import TYPE_CHECKING, List, Tuple, Union from synapse.api.errors import ( + Codes, NotFoundError, StoreError, SynapseError, @@ -239,6 +241,15 @@ def _rule_spec_from_path(path: List[str]) -> RuleSpec: def _rule_tuple_from_request_object( rule_template: str, rule_id: str, req_obj: JsonDict ) -> Tuple[List[JsonDict], List[Union[str, JsonDict]]]: + if rule_template == "postcontent": + # postcontent is from MSC4306, which says that clients + # cannot create their own postcontent rules right now. + raise SynapseError( + HTTPStatus.BAD_REQUEST, + "user-defined rules using `postcontent` are not accepted", + errcode=Codes.INVALID_PARAM, + ) + if rule_template in ["override", "underride"]: if "conditions" not in req_obj: raise InvalidRuleException("Missing 'conditions'") diff --git a/tests/rest/client/test_push_rule_attrs.py b/tests/rest/client/test_push_rule_attrs.py index 9da0e7982f..53c36b7a9c 100644 --- a/tests/rest/client/test_push_rule_attrs.py +++ b/tests/rest/client/test_push_rule_attrs.py @@ -18,6 +18,8 @@ # [This file includes modifications made by New Vector Limited] # # +from http import HTTPStatus + import synapse from synapse.api.errors import Codes from synapse.rest.client import login, push_rule, room @@ -486,3 +488,23 @@ class PushRuleAttributesTestCase(HomeserverTestCase): }, channel.json_body, ) + + def test_no_user_defined_postcontent_rules(self) -> None: + """ + Tests that clients are not permitted to create MSC4306 `postcontent` rules. + """ + self.register_user("bob", "pass") + token = self.login("bob", "pass") + + channel = self.make_request( + "PUT", + "/pushrules/global/postcontent/some.user.rule", + {}, + access_token=token, + ) + + self.assertEqual(channel.code, HTTPStatus.BAD_REQUEST) + self.assertEqual( + Codes.INVALID_PARAM, + channel.json_body["errcode"], + ) From 35e7e659f6ccf8835ab21eb10aac9a5126c32b5f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 Sep 2025 15:49:22 +0100 Subject: [PATCH 09/54] Bump actions/setup-python from 5.6.0 to 6.0.0 (#18890) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/docs-pr.yaml | 2 +- .github/workflows/docs.yaml | 2 +- .github/workflows/latest_deps.yml | 2 +- .github/workflows/poetry_lockfile.yaml | 2 +- .github/workflows/release-artifacts.yml | 8 ++++---- .github/workflows/schema.yaml | 4 ++-- .github/workflows/tests.yml | 12 ++++++------ 7 files changed, 16 insertions(+), 16 deletions(-) diff --git a/.github/workflows/docs-pr.yaml b/.github/workflows/docs-pr.yaml index e4b8a4c9f2..a0af38a6c5 100644 --- a/.github/workflows/docs-pr.yaml +++ b/.github/workflows/docs-pr.yaml @@ -24,7 +24,7 @@ jobs: mdbook-version: '0.4.17' - name: Setup python - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: "3.x" diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 0ca45a39cf..f260a4f804 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -64,7 +64,7 @@ jobs: run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js - name: Setup python - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: "3.x" diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index e97109d5fa..2bda13fd65 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -93,7 +93,7 @@ jobs: -e POSTGRES_PASSWORD=postgres \ -e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \ postgres:${{ matrix.postgres-version }} - - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: "3.x" - run: pip install .[all,test] diff --git a/.github/workflows/poetry_lockfile.yaml b/.github/workflows/poetry_lockfile.yaml index 5c8a0d7117..19468c2d92 100644 --- a/.github/workflows/poetry_lockfile.yaml +++ b/.github/workflows/poetry_lockfile.yaml @@ -17,7 +17,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: '3.x' - run: pip install tomli diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index 8a43f696dc..1217171b5a 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -28,7 +28,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: "3.x" - id: set-distros @@ -74,7 +74,7 @@ jobs: ${{ runner.os }}-buildx- - name: Set up python - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: "3.x" @@ -134,7 +134,7 @@ jobs: steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: # setup-python@v4 doesn't impose a default python version. Need to use 3.x # here, because `python` on osx points to Python 2.7. @@ -166,7 +166,7 @@ jobs: steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: "3.10" diff --git a/.github/workflows/schema.yaml b/.github/workflows/schema.yaml index 53d6bace2c..6c416e762d 100644 --- a/.github/workflows/schema.yaml +++ b/.github/workflows/schema.yaml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: "3.x" - name: Install check-jsonschema @@ -41,7 +41,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: "3.x" - name: Install PyYAML diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index ae75369809..7dc3e5b4a1 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -107,7 +107,7 @@ jobs: steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: "3.x" - run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'" @@ -117,7 +117,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: "3.x" - run: .ci/scripts/check_lockfile.py @@ -199,7 +199,7 @@ jobs: with: ref: ${{ github.event.pull_request.head.sha }} fetch-depth: 0 - - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: "3.x" - run: "pip install 'towncrier>=18.6.0rc1'" @@ -327,7 +327,7 @@ jobs: if: ${{ needs.changes.outputs.linting_readme == 'true' }} steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: "3.x" - run: "pip install rstcheck" @@ -377,7 +377,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: "3.x" - id: get-matrix @@ -468,7 +468,7 @@ jobs: sudo apt-get -qq install build-essential libffi-dev python3-dev \ libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev - - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: '3.9' From 3e865e403b773a28fe85d4c91a0f312c33575d8e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 Sep 2025 15:52:05 +0100 Subject: [PATCH 10/54] Bump actions/setup-go from 5.5.0 to 6.0.0 (#18891) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/latest_deps.yml | 2 +- .github/workflows/tests.yml | 2 +- .github/workflows/twisted_trunk.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index 2bda13fd65..c1c3d8199c 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -209,7 +209,7 @@ jobs: - name: Prepare Complement's Prerequisites run: synapse/.ci/scripts/setup_complement_prerequisites.sh - - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0 + - uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0 with: cache-dependency-path: complement/go.sum go-version-file: complement/go.mod diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 7dc3e5b4a1..216c7da6be 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -727,7 +727,7 @@ jobs: - name: Prepare Complement's Prerequisites run: synapse/.ci/scripts/setup_complement_prerequisites.sh - - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0 + - uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0 with: cache-dependency-path: complement/go.sum go-version-file: complement/go.mod diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index fbe1270767..edb3c44090 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -182,7 +182,7 @@ jobs: - name: Prepare Complement's Prerequisites run: synapse/.ci/scripts/setup_complement_prerequisites.sh - - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0 + - uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0 with: cache-dependency-path: complement/go.sum go-version-file: complement/go.mod From e235099ab9219e0af995d4ad58151f192279f79c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 Sep 2025 15:52:59 +0100 Subject: [PATCH 11/54] Bump log from 0.4.27 to 0.4.28 (#18892) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 095fb38ce1..eff363de80 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -753,9 +753,9 @@ checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" [[package]] name = "log" -version = "0.4.27" +version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" +checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" [[package]] name = "lru-slab" From 7951d41b4e435398d8d0abc39f9a2bbeb0c55473 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 Sep 2025 15:53:24 +0100 Subject: [PATCH 12/54] Bump phonenumbers from 9.0.12 to 9.0.13 (#18893) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 866d52bc1e..8a72bdf2bc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1531,14 +1531,14 @@ files = [ [[package]] name = "phonenumbers" -version = "9.0.12" +version = "9.0.13" description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." optional = false python-versions = "*" groups = ["main"] files = [ - {file = "phonenumbers-9.0.12-py2.py3-none-any.whl", hash = "sha256:900633afc3e12191458d710262df5efc117838bd1e2e613b64fa254a86bb20a1"}, - {file = "phonenumbers-9.0.12.tar.gz", hash = "sha256:ccadff6b949494bd606836d8c9678bee5b55cb1cbad1e98bf7adae108e6fd0be"}, + {file = "phonenumbers-9.0.13-py2.py3-none-any.whl", hash = "sha256:b97661e177773e7509c6d503e0f537cd0af22aa3746231654590876eb9430915"}, + {file = "phonenumbers-9.0.13.tar.gz", hash = "sha256:eca06e01382412c45316868f86a44bb217c02f9ee7196589041556a2f54a7639"}, ] [[package]] From ca655e4020f7970a525fe76d95f7aef259dfc1a9 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Tue, 9 Sep 2025 10:10:34 -0500 Subject: [PATCH 13/54] Start background tasks after we fork the process (daemonize) (#18886) Spawning from https://github.com/element-hq/synapse/pull/18871 [This change](https://github.com/element-hq/synapse/pull/18871/commits/6ce2f3e59d6dcd8a798552a087414cf137d0ddad) was originally used to fix CPU time going backwards when we `daemonize`. While, we don't seem to run into this problem on `develop`, I still think this is a good change to make. We don't need background tasks running on a process that will soon be forcefully exited and where the reactor isn't even running yet. We now kick off the background tasks (`run_as_background_process`) after we have forked the process and started the reactor. Also as simple note, we don't need background tasks running in both halves of a fork. --- changelog.d/18886.misc | 1 + synapse/_scripts/update_synapse_database.py | 7 +++++++ synapse/app/_base.py | 22 +++++++++++++++++++-- synapse/server.py | 8 +------- tests/server.py | 10 ++++++++++ 5 files changed, 39 insertions(+), 9 deletions(-) create mode 100644 changelog.d/18886.misc diff --git a/changelog.d/18886.misc b/changelog.d/18886.misc new file mode 100644 index 0000000000..d0d32e59ab --- /dev/null +++ b/changelog.d/18886.misc @@ -0,0 +1 @@ +Start background tasks after we fork the process (daemonize). diff --git a/synapse/_scripts/update_synapse_database.py b/synapse/_scripts/update_synapse_database.py index 70e5598418..3624db3544 100644 --- a/synapse/_scripts/update_synapse_database.py +++ b/synapse/_scripts/update_synapse_database.py @@ -120,6 +120,13 @@ def main() -> None: # DB. hs.setup() + # This will cause all of the relevant storage classes to be instantiated and call + # `register_background_update_handler(...)`, + # `register_background_index_update(...)`, + # `register_background_validate_constraint(...)`, etc so they are available to use + # if we are asked to run those background updates. + hs.get_storage_controllers() + if args.run_background_updates: run_background_updates(hs) diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 48989540bb..bce6f4d82f 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -609,10 +609,28 @@ async def start(hs: "HomeServer") -> None: setup_sentry(hs) setup_sdnotify(hs) - # If background tasks are running on the main process or this is the worker in - # charge of them, start collecting the phone home stats and shared usage metrics. + # Register background tasks required by this server. This must be done + # somewhat manually due to the background tasks not being registered + # unless handlers are instantiated. + # + # While we could "start" these before the reactor runs, nothing will happen until + # the reactor is running, so we may as well do it here in `start`. + # + # Additionally, this means we also start them after we daemonize and fork the + # process which means we can avoid any potential problems with cputime metrics + # getting confused about the per-thread resource usage appearing to go backwards + # because we're comparing the resource usage (`rusage`) from the original process to + # the forked process. if hs.config.worker.run_background_tasks: + hs.start_background_tasks() + + # TODO: This should be moved to same pattern we use for other background tasks: + # Add to `REQUIRED_ON_BACKGROUND_TASK_STARTUP` and rely on + # `start_background_tasks` to start it. await hs.get_common_usage_metrics_manager().setup() + + # TODO: This feels like another pattern that should refactored as one of the + # `REQUIRED_ON_BACKGROUND_TASK_STARTUP` start_phone_stats_home(hs) # We now freeze all allocated objects in the hopes that (almost) diff --git a/synapse/server.py b/synapse/server.py index 3eac271c90..3fb29a7817 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -366,12 +366,6 @@ class HomeServer(metaclass=abc.ABCMeta): self.datastores = Databases(self.DATASTORE_CLASS, self) logger.info("Finished setting up.") - # Register background tasks required by this server. This must be done - # somewhat manually due to the background tasks not being registered - # unless handlers are instantiated. - if self.config.worker.run_background_tasks: - self.setup_background_tasks() - def __del__(self) -> None: """ Called when an the homeserver is garbage collected. @@ -410,7 +404,7 @@ class HomeServer(metaclass=abc.ABCMeta): appropriate listeners. """ - def setup_background_tasks(self) -> None: + def start_background_tasks(self) -> None: """ Some handlers have side effects on instantiation (like registering background updates). This function causes them to be fetched, and diff --git a/tests/server.py b/tests/server.py index ebff8b04b3..7432db1ac8 100644 --- a/tests/server.py +++ b/tests/server.py @@ -1160,6 +1160,16 @@ def setup_test_homeserver( with patch("synapse.storage.database.make_pool", side_effect=make_fake_db_pool): hs.setup() + # Register background tasks required by this server. This must be done + # somewhat manually due to the background tasks not being registered + # unless handlers are instantiated. + # + # Since, we don't have to worry about `daemonize` (forking the process) in tests, we + # can just start the background tasks straight away after `hs.setup`. (compare this + # with where we call `hs.start_background_tasks()` outside of the test environment). + if hs.config.worker.run_background_tasks: + hs.start_background_tasks() + # Since we've changed the databases to run DB transactions on the same # thread, we need to stop the event fetcher hogging that one thread. hs.get_datastores().main.USE_DEDICATED_DB_THREADS_FOR_EVENT_FETCHING = False From 6146dbad3e834c7efafc20770c5754571704f1d2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 Sep 2025 16:39:17 +0100 Subject: [PATCH 14/54] Bump towncrier from 24.8.0 to 25.8.0 (#18894) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8a72bdf2bc..083824d076 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2747,14 +2747,14 @@ files = [ [[package]] name = "towncrier" -version = "24.8.0" +version = "25.8.0" description = "Building newsfiles for your project." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "towncrier-24.8.0-py3-none-any.whl", hash = "sha256:9343209592b839209cdf28c339ba45792fbfe9775b5f9c177462fd693e127d8d"}, - {file = "towncrier-24.8.0.tar.gz", hash = "sha256:013423ee7eed102b2f393c287d22d95f66f1a3ea10a4baa82d298001a7f18af3"}, + {file = "towncrier-25.8.0-py3-none-any.whl", hash = "sha256:b953d133d98f9aeae9084b56a3563fd2519dfc6ec33f61c9cd2c61ff243fb513"}, + {file = "towncrier-25.8.0.tar.gz", hash = "sha256:eef16d29f831ad57abb3ae32a0565739866219f1ebfbdd297d32894eb9940eb1"}, ] [package.dependencies] From e43bf101879a781ec5faada3158027521b00e9a1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 Sep 2025 16:39:44 +0100 Subject: [PATCH 15/54] Bump types-requests from 2.32.4.20250611 to 2.32.4.20250809 (#18895) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 083824d076..2159950116 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3011,14 +3011,14 @@ files = [ [[package]] name = "types-requests" -version = "2.32.4.20250611" +version = "2.32.4.20250809" description = "Typing stubs for requests" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_requests-2.32.4.20250611-py3-none-any.whl", hash = "sha256:ad2fe5d3b0cb3c2c902c8815a70e7fb2302c4b8c1f77bdcd738192cdb3878072"}, - {file = "types_requests-2.32.4.20250611.tar.gz", hash = "sha256:741c8777ed6425830bf51e54d6abe245f79b4dcb9019f1622b773463946bf826"}, + {file = "types_requests-2.32.4.20250809-py3-none-any.whl", hash = "sha256:f73d1832fb519ece02c85b1f09d5f0dd3108938e7d47e7f94bbfa18a6782b163"}, + {file = "types_requests-2.32.4.20250809.tar.gz", hash = "sha256:d8060de1c8ee599311f56ff58010fb4902f462a1470802cf9f6ed27bc46c4df3"}, ] [package.dependencies] From 92bdf77c3f06171874c41ebfd0849adf29d1d93c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 Sep 2025 16:41:19 +0100 Subject: [PATCH 16/54] Bump jsonschema from 4.25.0 to 4.25.1 (#18897) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2159950116..016be0eb6b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -919,14 +919,14 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonschema" -version = "4.25.0" +version = "4.25.1" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "jsonschema-4.25.0-py3-none-any.whl", hash = "sha256:24c2e8da302de79c8b9382fee3e76b355e44d2a4364bb207159ce10b517bd716"}, - {file = "jsonschema-4.25.0.tar.gz", hash = "sha256:e63acf5c11762c0e6672ffb61482bdf57f0876684d8d249c0fe2d730d48bc55f"}, + {file = "jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63"}, + {file = "jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85"}, ] [package.dependencies] From c68c5dd07bcba848e3278ef953e14a9472f30b1f Mon Sep 17 00:00:00 2001 From: reivilibre Date: Tue, 9 Sep 2025 18:37:04 +0100 Subject: [PATCH 17/54] Update push rules for experimental MSC4306: Thread Subscriptions to follow newer draft. (#18846) Follows: #18762 Implements: MSC4306 Closes: #18431 Closes: #18437 Move the MSC4306 push rules to a new kind `postcontent` Prevent users from creating user-defined `postcontent` rules --------- Signed-off-by: Olivier 'reivilibre --- tests/push/test_bulk_push_rule_evaluator.py | 133 +++++++++++++++++++- 1 file changed, 132 insertions(+), 1 deletion(-) diff --git a/tests/push/test_bulk_push_rule_evaluator.py b/tests/push/test_bulk_push_rule_evaluator.py index fad5c7affb..7342a72dff 100644 --- a/tests/push/test_bulk_push_rule_evaluator.py +++ b/tests/push/test_bulk_push_rule_evaluator.py @@ -19,6 +19,7 @@ # # +from http import HTTPStatus from typing import Any, Optional from unittest.mock import AsyncMock, patch @@ -30,7 +31,7 @@ from synapse.api.constants import EventContentFields, EventTypes, RelationTypes from synapse.api.room_versions import RoomVersions from synapse.push.bulk_push_rule_evaluator import BulkPushRuleEvaluator from synapse.rest import admin -from synapse.rest.client import login, register, room +from synapse.rest.client import login, push_rule, register, room from synapse.server import HomeServer from synapse.types import JsonDict, create_requester from synapse.util import Clock @@ -44,6 +45,7 @@ class TestBulkPushRuleEvaluator(HomeserverTestCase): room.register_servlets, login.register_servlets, register.register_servlets, + push_rule.register_servlets, ] def prepare( @@ -494,6 +496,135 @@ class TestBulkPushRuleEvaluator(HomeserverTestCase): ) ) + @override_config({"experimental_features": {"msc4306_enabled": True}}) + def test_thread_subscriptions_suppression_after_keyword_mention_overrides( + self, + ) -> None: + """ + Tests one of the purposes of the `postcontent` push rule section: + When a keyword mention is configured (in the `content` section), + it does not get suppressed by the thread being unsubscribed. + """ + # add a keyword mention to alice's push rules + channel = self.make_request( + "PUT", + "/_matrix/client/v3/pushrules/global/content/biscuits", + {"pattern": "biscuits", "actions": ["notify"]}, + access_token=self.token, + ) + self.assertEqual(channel.code, HTTPStatus.OK) + + bulk_evaluator = BulkPushRuleEvaluator(self.hs) + (thread_root_id,) = self.helper.send_messages(self.room_id, 1, tok=self.token) + + self.assertFalse( + self._create_and_process( + bulk_evaluator, + { + "msgtype": "m.text", + "body": "do you want some cookies?", + "m.relates_to": { + "rel_type": RelationTypes.THREAD, + "event_id": thread_root_id, + }, + }, + type="m.room.message", + ), + "alice is not subscribed to thread and does not have a mention on 'cookies' so should not be notified", + ) + + self.assertTrue( + self._create_and_process( + bulk_evaluator, + { + "msgtype": "m.text", + "body": "biscuits are available in the kitchen", + "m.relates_to": { + "rel_type": RelationTypes.THREAD, + "event_id": thread_root_id, + }, + }, + type="m.room.message", + ), + "alice is not subscribed to thread but DOES have a mention on 'biscuits' so should be notified", + ) + + @override_config({"experimental_features": {"msc4306_enabled": True}}) + def test_thread_subscriptions_notification_before_keywords_and_mentions( + self, + ) -> None: + """ + Tests one of the purposes of the `postcontent` push rule section: + When a room is set to (what is commonly known as) 'keywords & mentions', we still receive notifications + for messages in threads that we are subscribed to. + Effectively making this 'keywords, mentions & subscriptions' + """ + # add a 'keywords & mentions' setting to the room alice's push rules + # In case this rule isn't clear: by adding a rule in the `room` section that does nothing, + # it stops execution of the push rules before we fall through to the `underride` section, + # where intuitively many kinds of messages will ambiently generate notifications. + # Mentions and keywords are triggered before the `room` block, so this doesn't suppress those. + channel = self.make_request( + "PUT", + f"/_matrix/client/v3/pushrules/global/room/{self.room_id}", + {"actions": []}, + access_token=self.token, + ) + self.assertEqual(channel.code, HTTPStatus.OK) + + bulk_evaluator = BulkPushRuleEvaluator(self.hs) + (thread_root_id,) = self.helper.send_messages(self.room_id, 1, tok=self.token) + + # sanity check that our mentions still work + self.assertFalse( + self._create_and_process( + bulk_evaluator, + { + "msgtype": "m.text", + "body": "this is a plain message with no mention", + }, + type="m.room.message", + ), + "alice should not be notified (mentions & keywords room setting)", + ) + self.assertTrue( + self._create_and_process( + bulk_evaluator, + { + "msgtype": "m.text", + "body": "this is a message that mentions alice", + }, + type="m.room.message", + ), + "alice should be notified (mentioned)", + ) + + # let's have alice subscribe to the thread + self.get_success( + self.hs.get_datastores().main.subscribe_user_to_thread( + self.alice, + self.room_id, + thread_root_id, + automatic_event_orderings=None, + ) + ) + + self.assertTrue( + self._create_and_process( + bulk_evaluator, + { + "msgtype": "m.text", + "body": "some message in the thread", + "m.relates_to": { + "rel_type": RelationTypes.THREAD, + "event_id": thread_root_id, + }, + }, + type="m.room.message", + ), + "alice is subscribed to thread so should be notified", + ) + def test_with_disabled_thread_subscriptions(self) -> None: """ Test what happens with threaded events when MSC4306 is disabled. From 9cc400177822805e2a08d4d934daad6f3bc2a4df Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Wed, 10 Sep 2025 10:22:53 -0500 Subject: [PATCH 18/54] Better explain logcontext in `run_in_background(...)` and `run_as_background_process(...)` (#18900) Also adds a section in the docs explaining the `sentinel` logcontext. Spawning from https://github.com/element-hq/synapse/pull/18870 ### Testing strategy 1. Run Synapse normally and with `daemonize: true`: `poetry run synapse_homeserver --config-path homeserver.yaml` 1. Execute some requests 1. Shutdown the server 1. Look for any bad log entries in your homeserver logs: - `Expected logging context sentinel but found main` - `Expected logging context main was lost` - `Expected previous context` - `utime went backwards!`/`stime went backwards!` - `Called stop on logcontext POST-0 without recording a start rusage` - `Background process re-entered without a proc` Twisted trial tests: 1. Run full Twisted trial test suite. 1. Check the logs for `Test starting with non-sentinel logging context ...` --- changelog.d/18900.misc | 1 + docs/log_contexts.md | 22 +++ synapse/logging/context.py | 142 +++++++++++++----- synapse/metrics/background_process_metrics.py | 17 +++ 4 files changed, 146 insertions(+), 36 deletions(-) create mode 100644 changelog.d/18900.misc diff --git a/changelog.d/18900.misc b/changelog.d/18900.misc new file mode 100644 index 0000000000..d7d8b47eb0 --- /dev/null +++ b/changelog.d/18900.misc @@ -0,0 +1 @@ +Better explain how we manage the logcontext in `run_in_background(...)` and `run_as_background_process(...)`. diff --git a/docs/log_contexts.md b/docs/log_contexts.md index 9d087d11ef..4e0c0e64f7 100644 --- a/docs/log_contexts.md +++ b/docs/log_contexts.md @@ -59,6 +59,28 @@ def do_request_handling(): logger.debug("phew") ``` +### The `sentinel` context + +The default logcontext is `synapse.logging.context.SENTINEL_CONTEXT`, which is an empty +sentinel value to represent the root logcontext. This is what is used when there is no +other logcontext set. The phrase "clear/reset the logcontext" means to set the current +logcontext to the `sentinel` logcontext. + +No CPU/database usage metrics are recorded against the `sentinel` logcontext. + +Ideally, nothing from the Synapse homeserver would be logged against the `sentinel` +logcontext as we want to know which server the logs came from. In practice, this is not +always the case yet especially outside of request handling. + +Global things outside of Synapse (e.g. Twisted reactor code) should run in the +`sentinel` logcontext. It's only when it calls into application code that a logcontext +gets activated. This means the reactor should be started in the `sentinel` logcontext, +and any time an awaitable yields control back to the reactor, it should reset the +logcontext to be the `sentinel` logcontext. This is important to avoid leaking the +current logcontext to the reactor (which would then get picked up and associated with +the next thing the reactor does). + + ## Using logcontexts with awaitables Awaitables break the linear flow of code so that there is no longer a single entry point diff --git a/synapse/logging/context.py b/synapse/logging/context.py index 7ea3f3d726..6eaa19d2f6 100644 --- a/synapse/logging/context.py +++ b/synapse/logging/context.py @@ -227,7 +227,16 @@ LoggingContextOrSentinel = Union["LoggingContext", "_Sentinel"] class _Sentinel: - """Sentinel to represent the root context""" + """ + Sentinel to represent the root context + + This should only be used for tasks outside of Synapse like when we yield control + back to the Twisted reactor (event loop) so we don't leak the current logging + context to other tasks that are scheduled next in the event loop. + + Nothing from the Synapse homeserver should be logged with the sentinel context. i.e. + we should always know which server the logs are coming from. + """ __slots__ = ["previous_context", "finished", "request", "tag"] @@ -616,9 +625,17 @@ class LoggingContextFilter(logging.Filter): class PreserveLoggingContext: - """Context manager which replaces the logging context + """ + Context manager which replaces the logging context - The previous logging context is restored on exit.""" + The previous logging context is restored on exit. + + `make_deferred_yieldable` is pretty equivalent to using `with + PreserveLoggingContext():` (using the default sentinel context), i.e. it clears the + logcontext before awaiting (and so before execution passes back to the reactor) and + restores the old context once the awaitable completes (execution passes from the + reactor back to the code). + """ __slots__ = ["_old_context", "_new_context"] @@ -784,6 +801,14 @@ def run_in_background( return from the function, and that the sentinel context is set once the deferred returned by the function completes. + To explain how the log contexts work here: + - When this function is called, the current context is stored ("original"), we kick + off the background task, and we restore that original context before returning + - When the background task finishes, we don't want to leak our context into the + reactor which would erroneously get attached to the next operation picked up by + the event loop. We add a callback to the deferred which will clear the logging + context after it finishes and yields control back to the reactor. + Useful for wrapping functions that return a deferred or coroutine, which you don't yield or await on (for instance because you want to pass it to deferred.gatherResults()). @@ -795,8 +820,13 @@ def run_in_background( `f` doesn't raise any deferred exceptions, otherwise a scary-looking CRITICAL error about an unhandled error will be logged without much indication about where it came from. + + Returns: + Deferred which returns the result of func, or `None` if func raises. + Note that the returned Deferred does not follow the synapse logcontext + rules. """ - current = current_context() + calling_context = current_context() try: res = f(*args, **kwargs) except Exception: @@ -806,6 +836,9 @@ def run_in_background( # `res` may be a coroutine, `Deferred`, some other kind of awaitable, or a plain # value. Convert it to a `Deferred`. + # + # Wrapping the value in a deferred has the side effect of executing the coroutine, + # if it is one. If it's already a deferred, then we can just use that. d: "defer.Deferred[R]" if isinstance(res, typing.Coroutine): # Wrap the coroutine in a `Deferred`. @@ -820,20 +853,24 @@ def run_in_background( # `res` is a plain value. Wrap it in a `Deferred`. d = defer.succeed(res) + # The deferred has already completed if d.called and not d.paused: # The function should have maintained the logcontext, so we can # optimise out the messing about return d - # The function may have reset the context before returning, so - # we need to restore it now. - ctx = set_current_context(current) + # The function may have reset the context before returning, so we need to restore it + # now. + # + # Our goal is to have the caller logcontext unchanged after firing off the + # background task and returning. + set_current_context(calling_context) - # The original context will be restored when the deferred - # completes, but there is nothing waiting for it, so it will - # get leaked into the reactor or some other function which - # wasn't expecting it. We therefore need to reset the context - # here. + # The original logcontext will be restored when the deferred completes, but + # there is nothing waiting for it, so it will get leaked into the reactor (which + # would then get picked up by the next thing the reactor does). We therefore + # need to reset the logcontext here (set the `sentinel` logcontext) before + # yielding control back to the reactor. # # (If this feels asymmetric, consider it this way: we are # effectively forking a new thread of execution. We are @@ -841,7 +878,7 @@ def run_in_background( # which is supposed to have a single entry and exit point. But # by spawning off another deferred, we are effectively # adding a new exit point.) - d.addBoth(_set_context_cb, ctx) + d.addBoth(_set_context_cb, SENTINEL_CONTEXT) return d @@ -859,20 +896,34 @@ def run_coroutine_in_background( coroutine directly rather than a function. We can do this because coroutines do not run until called, and so calling an async function without awaiting cannot change the log contexts. - """ - current = current_context() + This is an ergonomic helper so we can do this: + ```python + run_coroutine_in_background(func1(arg1)) + ``` + Rather than having to do this: + ```python + run_in_background(lambda: func1(arg1)) + ``` + """ + calling_context = current_context() + + # Wrap the coroutine in a deferred, which will have the side effect of executing the + # coroutine in the background. d = defer.ensureDeferred(coroutine) - # The function may have reset the context before returning, so - # we need to restore it now. - ctx = set_current_context(current) + # The function may have reset the context before returning, so we need to restore it + # now. + # + # Our goal is to have the caller logcontext unchanged after firing off the + # background task and returning. + set_current_context(calling_context) - # The original context will be restored when the deferred - # completes, but there is nothing waiting for it, so it will - # get leaked into the reactor or some other function which - # wasn't expecting it. We therefore need to reset the context - # here. + # The original logcontext will be restored when the deferred completes, but + # there is nothing waiting for it, so it will get leaked into the reactor (which + # would then get picked up by the next thing the reactor does). We therefore + # need to reset the logcontext here (set the `sentinel` logcontext) before + # yielding control back to the reactor. # # (If this feels asymmetric, consider it this way: we are # effectively forking a new thread of execution. We are @@ -880,7 +931,7 @@ def run_coroutine_in_background( # which is supposed to have a single entry and exit point. But # by spawning off another deferred, we are effectively # adding a new exit point.) - d.addBoth(_set_context_cb, ctx) + d.addBoth(_set_context_cb, SENTINEL_CONTEXT) return d @@ -888,24 +939,43 @@ T = TypeVar("T") def make_deferred_yieldable(deferred: "defer.Deferred[T]") -> "defer.Deferred[T]": - """Given a deferred, make it follow the Synapse logcontext rules: - - If the deferred has completed, essentially does nothing (just returns another - completed deferred with the result/failure). - - If the deferred has not yet completed, resets the logcontext before - returning a deferred. Then, when the deferred completes, restores the - current logcontext before running callbacks/errbacks. - - (This is more-or-less the opposite operation to run_in_background.) """ + Given a deferred, make it follow the Synapse logcontext rules: + + - If the deferred has completed, essentially does nothing (just returns another + completed deferred with the result/failure). + - If the deferred has not yet completed, resets the logcontext before returning a + incomplete deferred. Then, when the deferred completes, restores the current + logcontext before running callbacks/errbacks. + + This means the resultant deferred can be awaited without leaking the current + logcontext to the reactor (which would then get erroneously picked up by the next + thing the reactor does), and also means that the logcontext is preserved when the + deferred completes. + + (This is more-or-less the opposite operation to run_in_background in terms of how it + handles log contexts.) + + Pretty much equivalent to using `with PreserveLoggingContext():`, i.e. it clears the + logcontext before awaiting (and so before execution passes back to the reactor) and + restores the old context once the awaitable completes (execution passes from the + reactor back to the code). + """ + # The deferred has already completed if deferred.called and not deferred.paused: # it looks like this deferred is ready to run any callbacks we give it # immediately. We may as well optimise out the logcontext faffery. return deferred - # ok, we can't be sure that a yield won't block, so let's reset the - # logcontext, and add a callback to the deferred to restore it. + # Our goal is to have the caller logcontext unchanged after they yield/await the + # returned deferred. + # + # When the caller yield/await's the returned deferred, it may yield + # control back to the reactor. To avoid leaking the current logcontext to the + # reactor (which would then get erroneously picked up by the next thing the reactor + # does) while the deferred runs in the reactor event loop, we reset the logcontext + # and add a callback to the deferred to restore it so the caller's logcontext is + # active when the deferred completes. prev_context = set_current_context(SENTINEL_CONTEXT) deferred.addBoth(_set_context_cb, prev_context) return deferred diff --git a/synapse/metrics/background_process_metrics.py b/synapse/metrics/background_process_metrics.py index f7f2d88885..c6ee21d42a 100644 --- a/synapse/metrics/background_process_metrics.py +++ b/synapse/metrics/background_process_metrics.py @@ -228,6 +228,11 @@ def run_as_background_process( clock.looping_call and friends (or for firing-and-forgetting in the middle of a normal synapse async function). + Because the returned Deferred does not follow the synapse logcontext rules, awaiting + the result of this function will result in the log context being cleared (bad). In + order to properly await the result of this function and maintain the current log + context, use `make_deferred_yieldable`. + Args: desc: a description for this background process type server_name: The homeserver name that this background process is being run for @@ -280,6 +285,18 @@ def run_as_background_process( name=desc, **{SERVER_NAME_LABEL: server_name} ).dec() + # To explain how the log contexts work here: + # - When this function is called, the current context is stored (using + # `PreserveLoggingContext`), we kick off the background task, and we restore the + # original context before returning (also part of `PreserveLoggingContext`). + # - When the background task finishes, we don't want to leak our background context + # into the reactor which would erroneously get attached to the next operation + # picked up by the event loop. We use `PreserveLoggingContext` to set the + # `sentinel` context and means the new `BackgroundProcessLoggingContext` will + # remember the `sentinel` context as its previous context to return to when it + # exits and yields control back to the reactor. + # + # TODO: Why can't we simplify to using `return run_in_background(run)`? with PreserveLoggingContext(): # Note that we return a Deferred here so that it can be used in a # looping_call and other places that expect a Deferred. From ada3a3b2b3d0471a78860782fc02afe7d2fb5aeb Mon Sep 17 00:00:00 2001 From: reivilibre Date: Thu, 11 Sep 2025 14:45:04 +0100 Subject: [PATCH 19/54] Add experimental support for MSC4308: Thread Subscriptions extension to Sliding Sync when MSC4306 and MSC4186 are enabled. (#18695) Closes: #18436 Implements: https://github.com/matrix-org/matrix-spec-proposals/pull/4308 Follows: #18674 Adds an extension to Sliding Sync and a companion endpoint needed for backpaginating missed thread subscription changes, as described in MSC4308 --------- Signed-off-by: Olivier 'reivilibre Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> --- changelog.d/18695.feature | 1 + synapse/config/experimental.py | 2 +- .../federation/transport/server/__init__.py | 2 +- synapse/handlers/sliding_sync/__init__.py | 2 +- synapse/handlers/sliding_sync/extensions.py | 97 +++- synapse/handlers/thread_subscriptions.py | 25 +- synapse/http/servlet.py | 10 + synapse/notifier.py | 1 + synapse/replication/tcp/client.py | 7 + synapse/rest/client/sync.py | 52 +- synapse/rest/client/thread_subscriptions.py | 146 ++++- synapse/storage/databases/main/relations.py | 15 +- .../storage/databases/main/sliding_sync.py | 2 +- .../databases/main/thread_subscriptions.py | 41 +- ...hread_subscriptions_seq_fixup.sql.postgres | 19 + synapse/storage/util/id_generators.py | 8 +- synapse/streams/events.py | 19 +- synapse/types/__init__.py | 34 +- synapse/types/handlers/sliding_sync.py | 41 ++ synapse/types/rest/client/__init__.py | 15 + synapse/util/async_helpers.py | 18 + tests/rest/admin/test_room.py | 4 +- .../test_extension_thread_subscriptions.py | 497 ++++++++++++++++++ tests/rest/client/test_rooms.py | 4 +- tests/storage/test_thread_subscriptions.py | 20 +- 25 files changed, 1019 insertions(+), 63 deletions(-) create mode 100644 changelog.d/18695.feature create mode 100644 synapse/storage/schema/main/delta/92/08_thread_subscriptions_seq_fixup.sql.postgres create mode 100644 tests/rest/client/sliding_sync/test_extension_thread_subscriptions.py diff --git a/changelog.d/18695.feature b/changelog.d/18695.feature new file mode 100644 index 0000000000..1481a27f23 --- /dev/null +++ b/changelog.d/18695.feature @@ -0,0 +1 @@ +Add experimental support for [MSC4308: Thread Subscriptions extension to Sliding Sync](https://github.com/matrix-org/matrix-spec-proposals/pull/4308) when [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-spec-proposals/pull/4306) and [MSC4186: Simplified Sliding Sync](https://github.com/matrix-org/matrix-spec-proposals/pull/4186) are enabled. \ No newline at end of file diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index c1631f39e3..d086deab3f 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -590,5 +590,5 @@ class ExperimentalConfig(Config): self.msc4293_enabled: bool = experimental.get("msc4293_enabled", False) # MSC4306: Thread Subscriptions - # (and MSC4308: sliding sync extension for thread subscriptions) + # (and MSC4308: Thread Subscriptions extension to Sliding Sync) self.msc4306_enabled: bool = experimental.get("msc4306_enabled", False) diff --git a/synapse/federation/transport/server/__init__.py b/synapse/federation/transport/server/__init__.py index 174d02ab6b..c4905e63dd 100644 --- a/synapse/federation/transport/server/__init__.py +++ b/synapse/federation/transport/server/__init__.py @@ -135,7 +135,7 @@ class PublicRoomList(BaseFederationServlet): if not self.allow_access: raise FederationDeniedError(origin) - limit = parse_integer_from_args(query, "limit", 0) + limit: Optional[int] = parse_integer_from_args(query, "limit", 0) since_token = parse_string_from_args(query, "since", None) include_all_networks = parse_boolean_from_args( query, "include_all_networks", default=False diff --git a/synapse/handlers/sliding_sync/__init__.py b/synapse/handlers/sliding_sync/__init__.py index 071a271ab7..255a041d0e 100644 --- a/synapse/handlers/sliding_sync/__init__.py +++ b/synapse/handlers/sliding_sync/__init__.py @@ -211,7 +211,7 @@ class SlidingSyncHandler: Args: sync_config: Sync configuration - to_token: The point in the stream to sync up to. + to_token: The latest point in the stream to sync up to. from_token: The point in the stream to sync from. Token of the end of the previous batch. May be `None` if this is the initial sync request. """ diff --git a/synapse/handlers/sliding_sync/extensions.py b/synapse/handlers/sliding_sync/extensions.py index 077887ec32..25ee954b7f 100644 --- a/synapse/handlers/sliding_sync/extensions.py +++ b/synapse/handlers/sliding_sync/extensions.py @@ -27,7 +27,7 @@ from typing import ( cast, ) -from typing_extensions import assert_never +from typing_extensions import TypeAlias, assert_never from synapse.api.constants import AccountDataTypes, EduTypes from synapse.handlers.receipts import ReceiptEventSource @@ -40,6 +40,7 @@ from synapse.types import ( SlidingSyncStreamToken, StrCollection, StreamToken, + ThreadSubscriptionsToken, ) from synapse.types.handlers.sliding_sync import ( HaveSentRoomFlag, @@ -54,6 +55,13 @@ from synapse.util.async_helpers import ( gather_optional_coroutines, ) +_ThreadSubscription: TypeAlias = ( + SlidingSyncResult.Extensions.ThreadSubscriptionsExtension.ThreadSubscription +) +_ThreadUnsubscription: TypeAlias = ( + SlidingSyncResult.Extensions.ThreadSubscriptionsExtension.ThreadUnsubscription +) + if TYPE_CHECKING: from synapse.server import HomeServer @@ -68,6 +76,7 @@ class SlidingSyncExtensionHandler: self.event_sources = hs.get_event_sources() self.device_handler = hs.get_device_handler() self.push_rules_handler = hs.get_push_rules_handler() + self._enable_thread_subscriptions = hs.config.experimental.msc4306_enabled @trace async def get_extensions_response( @@ -93,7 +102,7 @@ class SlidingSyncExtensionHandler: actual_room_ids: The actual room IDs in the the Sliding Sync response. actual_room_response_map: A map of room ID to room results in the the Sliding Sync response. - to_token: The point in the stream to sync up to. + to_token: The latest point in the stream to sync up to. from_token: The point in the stream to sync from. """ @@ -156,18 +165,32 @@ class SlidingSyncExtensionHandler: from_token=from_token, ) + thread_subs_coro = None + if ( + sync_config.extensions.thread_subscriptions is not None + and self._enable_thread_subscriptions + ): + thread_subs_coro = self.get_thread_subscriptions_extension_response( + sync_config=sync_config, + thread_subscriptions_request=sync_config.extensions.thread_subscriptions, + to_token=to_token, + from_token=from_token, + ) + ( to_device_response, e2ee_response, account_data_response, receipts_response, typing_response, + thread_subs_response, ) = await gather_optional_coroutines( to_device_coro, e2ee_coro, account_data_coro, receipts_coro, typing_coro, + thread_subs_coro, ) return SlidingSyncResult.Extensions( @@ -176,6 +199,7 @@ class SlidingSyncExtensionHandler: account_data=account_data_response, receipts=receipts_response, typing=typing_response, + thread_subscriptions=thread_subs_response, ) def find_relevant_room_ids_for_extension( @@ -877,3 +901,72 @@ class SlidingSyncExtensionHandler: return SlidingSyncResult.Extensions.TypingExtension( room_id_to_typing_map=room_id_to_typing_map, ) + + async def get_thread_subscriptions_extension_response( + self, + sync_config: SlidingSyncConfig, + thread_subscriptions_request: SlidingSyncConfig.Extensions.ThreadSubscriptionsExtension, + to_token: StreamToken, + from_token: Optional[SlidingSyncStreamToken], + ) -> Optional[SlidingSyncResult.Extensions.ThreadSubscriptionsExtension]: + """Handle Thread Subscriptions extension (MSC4308) + + Args: + sync_config: Sync configuration + thread_subscriptions_request: The thread_subscriptions extension from the request + to_token: The point in the stream to sync up to. + from_token: The point in the stream to sync from. + + Returns: + the response (None if empty or thread subscriptions are disabled) + """ + if not thread_subscriptions_request.enabled: + return None + + limit = thread_subscriptions_request.limit + + if from_token: + from_stream_id = from_token.stream_token.thread_subscriptions_key + else: + from_stream_id = StreamToken.START.thread_subscriptions_key + + to_stream_id = to_token.thread_subscriptions_key + + updates = await self.store.get_latest_updated_thread_subscriptions_for_user( + user_id=sync_config.user.to_string(), + from_id=from_stream_id, + to_id=to_stream_id, + limit=limit, + ) + + if len(updates) == 0: + return None + + subscribed_threads: Dict[str, Dict[str, _ThreadSubscription]] = {} + unsubscribed_threads: Dict[str, Dict[str, _ThreadUnsubscription]] = {} + for stream_id, room_id, thread_root_id, subscribed, automatic in updates: + if subscribed: + subscribed_threads.setdefault(room_id, {})[thread_root_id] = ( + _ThreadSubscription( + automatic=automatic, + bump_stamp=stream_id, + ) + ) + else: + unsubscribed_threads.setdefault(room_id, {})[thread_root_id] = ( + _ThreadUnsubscription(bump_stamp=stream_id) + ) + + prev_batch = None + if len(updates) == limit: + # Tell the client about a potential gap where there may be more + # thread subscriptions for it to backpaginate. + # We subtract one because the 'later in the stream' bound is inclusive, + # and we already saw the element at index 0. + prev_batch = ThreadSubscriptionsToken(updates[0][0] - 1) + + return SlidingSyncResult.Extensions.ThreadSubscriptionsExtension( + subscribed=subscribed_threads, + unsubscribed=unsubscribed_threads, + prev_batch=prev_batch, + ) diff --git a/synapse/handlers/thread_subscriptions.py b/synapse/handlers/thread_subscriptions.py index bda4342949..d56c915e0a 100644 --- a/synapse/handlers/thread_subscriptions.py +++ b/synapse/handlers/thread_subscriptions.py @@ -9,7 +9,7 @@ from synapse.storage.databases.main.thread_subscriptions import ( AutomaticSubscriptionConflicted, ThreadSubscription, ) -from synapse.types import EventOrderings, UserID +from synapse.types import EventOrderings, StreamKeyType, UserID if TYPE_CHECKING: from synapse.server import HomeServer @@ -22,6 +22,7 @@ class ThreadSubscriptionsHandler: self.store = hs.get_datastores().main self.event_handler = hs.get_event_handler() self.auth = hs.get_auth() + self._notifier = hs.get_notifier() async def get_thread_subscription_settings( self, @@ -132,6 +133,15 @@ class ThreadSubscriptionsHandler: errcode=Codes.MSC4306_CONFLICTING_UNSUBSCRIPTION, ) + if outcome is not None: + # wake up user streams (e.g. sliding sync) on the same worker + self._notifier.on_new_event( + StreamKeyType.THREAD_SUBSCRIPTIONS, + # outcome is a stream_id + outcome, + users=[user_id.to_string()], + ) + return outcome async def unsubscribe_user_from_thread( @@ -162,8 +172,19 @@ class ThreadSubscriptionsHandler: logger.info("rejecting thread subscriptions change (thread not accessible)") raise NotFoundError("No such thread root") - return await self.store.unsubscribe_user_from_thread( + outcome = await self.store.unsubscribe_user_from_thread( user_id.to_string(), event.room_id, thread_root_event_id, ) + + if outcome is not None: + # wake up user streams (e.g. sliding sync) on the same worker + self._notifier.on_new_event( + StreamKeyType.THREAD_SUBSCRIPTIONS, + # outcome is a stream_id + outcome, + users=[user_id.to_string()], + ) + + return outcome diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index 47d8bd5eaf..69bdce2b83 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -130,6 +130,16 @@ def parse_integer( return parse_integer_from_args(args, name, default, required, negative) +@overload +def parse_integer_from_args( + args: Mapping[bytes, Sequence[bytes]], + name: str, + default: int, + required: Literal[False] = False, + negative: bool = False, +) -> int: ... + + @overload def parse_integer_from_args( args: Mapping[bytes, Sequence[bytes]], diff --git a/synapse/notifier.py b/synapse/notifier.py index 7782c9ca65..e684df4866 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -532,6 +532,7 @@ class Notifier: StreamKeyType.TO_DEVICE, StreamKeyType.TYPING, StreamKeyType.UN_PARTIAL_STATED_ROOMS, + StreamKeyType.THREAD_SUBSCRIPTIONS, ], new_token: int, users: Optional[Collection[Union[str, UserID]]] = None, diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py index ee9250cf7d..7a86b2e65e 100644 --- a/synapse/replication/tcp/client.py +++ b/synapse/replication/tcp/client.py @@ -44,6 +44,7 @@ from synapse.replication.tcp.streams import ( UnPartialStatedEventStream, UnPartialStatedRoomStream, ) +from synapse.replication.tcp.streams._base import ThreadSubscriptionsStream from synapse.replication.tcp.streams.events import ( EventsStream, EventsStreamEventRow, @@ -255,6 +256,12 @@ class ReplicationDataHandler: self._state_storage_controller.notify_event_un_partial_stated( row.event_id ) + elif stream_name == ThreadSubscriptionsStream.NAME: + self.notifier.on_new_event( + StreamKeyType.THREAD_SUBSCRIPTIONS, + token, + users=[row.user_id for row in rows], + ) await self._presence_handler.process_replication_rows( stream_name, instance_name, token, rows diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index 6f2f6642be..c424ca5325 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -23,6 +23,8 @@ import logging from collections import defaultdict from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Tuple, Union +import attr + from synapse.api.constants import AccountDataTypes, EduTypes, Membership, PresenceState from synapse.api.errors import Codes, StoreError, SynapseError from synapse.api.filtering import FilterCollection @@ -632,12 +634,21 @@ class SyncRestServlet(RestServlet): class SlidingSyncRestServlet(RestServlet): """ - API endpoint for MSC3575 Sliding Sync `/sync`. Allows for clients to request a + API endpoint for MSC4186 Simplified Sliding Sync `/sync`, which was historically derived + from MSC3575 (Sliding Sync; now abandoned). Allows for clients to request a subset (sliding window) of rooms, state, and timeline events (just what they need) in order to bootstrap quickly and subscribe to only what the client cares about. Because the client can specify what it cares about, we can respond quickly and skip all of the work we would normally have to do with a sync v2 response. + Extensions of various features are defined in: + - to-device messaging (MSC3885) + - end-to-end encryption (MSC3884) + - typing notifications (MSC3961) + - receipts (MSC3960) + - account data (MSC3959) + - thread subscriptions (MSC4308) + Request query parameters: timeout: How long to wait for new events in milliseconds. pos: Stream position token when asking for incremental deltas. @@ -1074,9 +1085,48 @@ class SlidingSyncRestServlet(RestServlet): "rooms": extensions.typing.room_id_to_typing_map, } + # excludes both None and falsy `thread_subscriptions` + if extensions.thread_subscriptions: + serialized_extensions["io.element.msc4308.thread_subscriptions"] = ( + _serialise_thread_subscriptions(extensions.thread_subscriptions) + ) + return serialized_extensions +def _serialise_thread_subscriptions( + thread_subscriptions: SlidingSyncResult.Extensions.ThreadSubscriptionsExtension, +) -> JsonDict: + out: JsonDict = {} + + if thread_subscriptions.subscribed: + out["subscribed"] = { + room_id: { + thread_root_id: attr.asdict( + change, filter=lambda _attr, v: v is not None + ) + for thread_root_id, change in room_threads.items() + } + for room_id, room_threads in thread_subscriptions.subscribed.items() + } + + if thread_subscriptions.unsubscribed: + out["unsubscribed"] = { + room_id: { + thread_root_id: attr.asdict( + change, filter=lambda _attr, v: v is not None + ) + for thread_root_id, change in room_threads.items() + } + for room_id, room_threads in thread_subscriptions.unsubscribed.items() + } + + if thread_subscriptions.prev_batch: + out["prev_batch"] = thread_subscriptions.prev_batch.to_string() + + return out + + def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: SyncRestServlet(hs).register(http_server) diff --git a/synapse/rest/client/thread_subscriptions.py b/synapse/rest/client/thread_subscriptions.py index 4e7b5d06db..039aba1721 100644 --- a/synapse/rest/client/thread_subscriptions.py +++ b/synapse/rest/client/thread_subscriptions.py @@ -1,21 +1,39 @@ from http import HTTPStatus -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Dict, Optional, Tuple + +import attr +from typing_extensions import TypeAlias from synapse.api.errors import Codes, NotFoundError, SynapseError from synapse.http.server import HttpServer from synapse.http.servlet import ( RestServlet, parse_and_validate_json_object_from_request, + parse_integer, + parse_string, ) from synapse.http.site import SynapseRequest from synapse.rest.client._base import client_patterns -from synapse.types import JsonDict, RoomID +from synapse.types import ( + JsonDict, + RoomID, + SlidingSyncStreamToken, + ThreadSubscriptionsToken, +) +from synapse.types.handlers.sliding_sync import SlidingSyncResult from synapse.types.rest import RequestBodyModel from synapse.util.pydantic_models import AnyEventId if TYPE_CHECKING: from synapse.server import HomeServer +_ThreadSubscription: TypeAlias = ( + SlidingSyncResult.Extensions.ThreadSubscriptionsExtension.ThreadSubscription +) +_ThreadUnsubscription: TypeAlias = ( + SlidingSyncResult.Extensions.ThreadSubscriptionsExtension.ThreadUnsubscription +) + class ThreadSubscriptionsRestServlet(RestServlet): PATTERNS = client_patterns( @@ -100,6 +118,130 @@ class ThreadSubscriptionsRestServlet(RestServlet): return HTTPStatus.OK, {} +class ThreadSubscriptionsPaginationRestServlet(RestServlet): + PATTERNS = client_patterns( + "/io.element.msc4308/thread_subscriptions$", + unstable=True, + releases=(), + ) + CATEGORY = "Thread Subscriptions requests (unstable)" + + # Maximum number of thread subscriptions to return in one request. + MAX_LIMIT = 512 + + def __init__(self, hs: "HomeServer"): + self.auth = hs.get_auth() + self.is_mine = hs.is_mine + self.store = hs.get_datastores().main + + async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + requester = await self.auth.get_user_by_req(request) + + limit = min( + parse_integer(request, "limit", default=100, negative=False), + ThreadSubscriptionsPaginationRestServlet.MAX_LIMIT, + ) + from_end_opt = parse_string(request, "from", required=False) + to_start_opt = parse_string(request, "to", required=False) + _direction = parse_string(request, "dir", required=True, allowed_values=("b",)) + + if limit <= 0: + # condition needed because `negative=False` still allows 0 + raise SynapseError( + HTTPStatus.BAD_REQUEST, + "limit must be greater than 0", + errcode=Codes.INVALID_PARAM, + ) + + if from_end_opt is not None: + try: + # because of backwards pagination, the `from` token is actually the + # bound closest to the end of the stream + end_stream_id = ThreadSubscriptionsToken.from_string( + from_end_opt + ).stream_id + except ValueError: + raise SynapseError( + HTTPStatus.BAD_REQUEST, + "`from` is not a valid token", + errcode=Codes.INVALID_PARAM, + ) + else: + end_stream_id = self.store.get_max_thread_subscriptions_stream_id() + + if to_start_opt is not None: + # because of backwards pagination, the `to` token is actually the + # bound closest to the start of the stream + try: + start_stream_id = ThreadSubscriptionsToken.from_string( + to_start_opt + ).stream_id + except ValueError: + # we also accept sliding sync `pos` tokens on this parameter + try: + sliding_sync_pos = await SlidingSyncStreamToken.from_string( + self.store, to_start_opt + ) + start_stream_id = ( + sliding_sync_pos.stream_token.thread_subscriptions_key + ) + except ValueError: + raise SynapseError( + HTTPStatus.BAD_REQUEST, + "`to` is not a valid token", + errcode=Codes.INVALID_PARAM, + ) + else: + # the start of time is ID 1; the lower bound is exclusive though + start_stream_id = 0 + + subscriptions = ( + await self.store.get_latest_updated_thread_subscriptions_for_user( + requester.user.to_string(), + from_id=start_stream_id, + to_id=end_stream_id, + limit=limit, + ) + ) + + subscribed_threads: Dict[str, Dict[str, JsonDict]] = {} + unsubscribed_threads: Dict[str, Dict[str, JsonDict]] = {} + for stream_id, room_id, thread_root_id, subscribed, automatic in subscriptions: + if subscribed: + subscribed_threads.setdefault(room_id, {})[thread_root_id] = ( + attr.asdict( + _ThreadSubscription( + automatic=automatic, + bump_stamp=stream_id, + ) + ) + ) + else: + unsubscribed_threads.setdefault(room_id, {})[thread_root_id] = ( + attr.asdict(_ThreadUnsubscription(bump_stamp=stream_id)) + ) + + result: JsonDict = {} + if subscribed_threads: + result["subscribed"] = subscribed_threads + if unsubscribed_threads: + result["unsubscribed"] = unsubscribed_threads + + if len(subscriptions) == limit: + # We hit the limit, so there might be more entries to return. + # Generate a new token that has moved backwards, ready for the next + # request. + min_returned_stream_id, _, _, _, _ = subscriptions[0] + result["end"] = ThreadSubscriptionsToken( + # We subtract one because the 'later in the stream' bound is inclusive, + # and we already saw the element at index 0. + stream_id=min_returned_stream_id - 1 + ).to_string() + + return HTTPStatus.OK, result + + def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: if hs.config.experimental.msc4306_enabled: ThreadSubscriptionsRestServlet(hs).register(http_server) + ThreadSubscriptionsPaginationRestServlet(hs).register(http_server) diff --git a/synapse/storage/databases/main/relations.py b/synapse/storage/databases/main/relations.py index 5edac56ec3..ea746e0511 100644 --- a/synapse/storage/databases/main/relations.py +++ b/synapse/storage/databases/main/relations.py @@ -53,7 +53,7 @@ from synapse.storage.databases.main.stream import ( generate_pagination_where_clause, ) from synapse.storage.engines import PostgresEngine -from synapse.types import JsonDict, MultiWriterStreamToken, StreamKeyType, StreamToken +from synapse.types import JsonDict, StreamKeyType, StreamToken from synapse.util.caches.descriptors import cached, cachedList if TYPE_CHECKING: @@ -316,17 +316,8 @@ class RelationsWorkerStore(SQLBaseStore): StreamKeyType.ROOM, next_key ) else: - next_token = StreamToken( - room_key=next_key, - presence_key=0, - typing_key=0, - receipt_key=MultiWriterStreamToken(stream=0), - account_data_key=0, - push_rules_key=0, - to_device_key=0, - device_list_key=MultiWriterStreamToken(stream=0), - groups_key=0, - un_partial_stated_rooms_key=0, + next_token = StreamToken.START.copy_and_replace( + StreamKeyType.ROOM, next_key ) return events[:limit], next_token diff --git a/synapse/storage/databases/main/sliding_sync.py b/synapse/storage/databases/main/sliding_sync.py index 6a62b11d1e..72ec8e6b90 100644 --- a/synapse/storage/databases/main/sliding_sync.py +++ b/synapse/storage/databases/main/sliding_sync.py @@ -492,7 +492,7 @@ class PerConnectionStateDB: """An equivalent to `PerConnectionState` that holds data in a format stored in the DB. - The principle difference is that the tokens for the different streams are + The principal difference is that the tokens for the different streams are serialized to strings. When persisting this *only* contains updates to the state. diff --git a/synapse/storage/databases/main/thread_subscriptions.py b/synapse/storage/databases/main/thread_subscriptions.py index 24a99cf449..50084887a4 100644 --- a/synapse/storage/databases/main/thread_subscriptions.py +++ b/synapse/storage/databases/main/thread_subscriptions.py @@ -505,6 +505,9 @@ class ThreadSubscriptionsWorkerStore(CacheInvalidationWorkerStore): """ return self._thread_subscriptions_id_gen.get_current_token() + def get_thread_subscriptions_stream_id_generator(self) -> MultiWriterIdGenerator: + return self._thread_subscriptions_id_gen + async def get_updated_thread_subscriptions( self, *, from_id: int, to_id: int, limit: int ) -> List[Tuple[int, str, str, str]]: @@ -538,34 +541,52 @@ class ThreadSubscriptionsWorkerStore(CacheInvalidationWorkerStore): get_updated_thread_subscriptions_txn, ) - async def get_updated_thread_subscriptions_for_user( + async def get_latest_updated_thread_subscriptions_for_user( self, user_id: str, *, from_id: int, to_id: int, limit: int - ) -> List[Tuple[int, str, str]]: - """Get updates to thread subscriptions for a specific user. + ) -> List[Tuple[int, str, str, bool, Optional[bool]]]: + """Get the latest updates to thread subscriptions for a specific user. Args: user_id: The ID of the user from_id: The starting stream ID (exclusive) to_id: The ending stream ID (inclusive) limit: The maximum number of rows to return + If there are too many rows to return, rows from the start (closer to `from_id`) + will be omitted. Returns: - A list of (stream_id, room_id, thread_root_event_id) tuples. + A list of (stream_id, room_id, thread_root_event_id, subscribed, automatic) tuples. + The row with lowest `stream_id` is the first row. """ def get_updated_thread_subscriptions_for_user_txn( txn: LoggingTransaction, - ) -> List[Tuple[int, str, str]]: + ) -> List[Tuple[int, str, str, bool, Optional[bool]]]: sql = """ - SELECT stream_id, room_id, event_id - FROM thread_subscriptions - WHERE user_id = ? AND ? < stream_id AND stream_id <= ? + WITH the_updates AS ( + SELECT stream_id, room_id, event_id, subscribed, automatic + FROM thread_subscriptions + WHERE user_id = ? AND ? < stream_id AND stream_id <= ? + ORDER BY stream_id DESC + LIMIT ? + ) + SELECT stream_id, room_id, event_id, subscribed, automatic + FROM the_updates ORDER BY stream_id ASC - LIMIT ? """ txn.execute(sql, (user_id, from_id, to_id, limit)) - return [(row[0], row[1], row[2]) for row in txn] + return [ + ( + stream_id, + room_id, + event_id, + # SQLite integer to boolean conversions + bool(subscribed), + bool(automatic) if subscribed else None, + ) + for (stream_id, room_id, event_id, subscribed, automatic) in txn + ] return await self.db_pool.runInteraction( "get_updated_thread_subscriptions_for_user", diff --git a/synapse/storage/schema/main/delta/92/08_thread_subscriptions_seq_fixup.sql.postgres b/synapse/storage/schema/main/delta/92/08_thread_subscriptions_seq_fixup.sql.postgres new file mode 100644 index 0000000000..d327d1e165 --- /dev/null +++ b/synapse/storage/schema/main/delta/92/08_thread_subscriptions_seq_fixup.sql.postgres @@ -0,0 +1,19 @@ +-- +-- This file is licensed under the Affero General Public License (AGPL) version 3. +-- +-- Copyright (C) 2025 New Vector, Ltd +-- +-- This program is free software: you can redistribute it and/or modify +-- it under the terms of the GNU Affero General Public License as +-- published by the Free Software Foundation, either version 3 of the +-- License, or (at your option) any later version. +-- +-- See the GNU Affero General Public License for more details: +-- . + +-- Work around https://github.com/element-hq/synapse/issues/18712 by advancing the +-- stream sequence. +-- This makes last_value of the sequence point to a position that will not get later +-- returned by nextval. +-- (For blank thread subscription streams, this means last_value = 2, nextval() = 3 after this line.) +SELECT nextval('thread_subscriptions_sequence'); diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index a15a161ce8..1b7c5dac7a 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -187,8 +187,12 @@ class MultiWriterIdGenerator(AbstractStreamIdGenerator): Warning: Streams using this generator start at ID 2, because ID 1 is always assumed to have been 'seen as persisted'. Unclear if this extant behaviour is desirable for some reason. - When creating a new sequence for a new stream, - it will be necessary to use `START WITH 2`. + When creating a new sequence for a new stream, it will be necessary to advance it + so that position 1 is consumed. + DO NOT USE `START WITH 2` FOR THIS PURPOSE: + see https://github.com/element-hq/synapse/issues/18712 + Instead, use `SELECT nextval('sequence_name');` immediately after the + `CREATE SEQUENCE` statement. Args: db_conn diff --git a/synapse/streams/events.py b/synapse/streams/events.py index 4534068e7c..1e4bebe46d 100644 --- a/synapse/streams/events.py +++ b/synapse/streams/events.py @@ -33,7 +33,6 @@ from synapse.logging.opentracing import trace from synapse.streams import EventSource from synapse.types import ( AbstractMultiWriterStreamToken, - MultiWriterStreamToken, StreamKeyType, StreamToken, ) @@ -84,6 +83,7 @@ class EventSources: un_partial_stated_rooms_key = self.store.get_un_partial_stated_rooms_token( self._instance_name ) + thread_subscriptions_key = self.store.get_max_thread_subscriptions_stream_id() token = StreamToken( room_key=self.sources.room.get_current_key(), @@ -97,6 +97,7 @@ class EventSources: # Groups key is unused. groups_key=0, un_partial_stated_rooms_key=un_partial_stated_rooms_key, + thread_subscriptions_key=thread_subscriptions_key, ) return token @@ -123,6 +124,7 @@ class EventSources: StreamKeyType.TO_DEVICE: self.store.get_to_device_id_generator(), StreamKeyType.DEVICE_LIST: self.store.get_device_stream_id_generator(), StreamKeyType.UN_PARTIAL_STATED_ROOMS: self.store.get_un_partial_stated_rooms_id_generator(), + StreamKeyType.THREAD_SUBSCRIPTIONS: self.store.get_thread_subscriptions_stream_id_generator(), } for _, key in StreamKeyType.__members__.items(): @@ -195,16 +197,7 @@ class EventSources: Returns: The current token for pagination. """ - token = StreamToken( - room_key=await self.sources.room.get_current_key_for_room(room_id), - presence_key=0, - typing_key=0, - receipt_key=MultiWriterStreamToken(stream=0), - account_data_key=0, - push_rules_key=0, - to_device_key=0, - device_list_key=MultiWriterStreamToken(stream=0), - groups_key=0, - un_partial_stated_rooms_key=0, + return StreamToken.START.copy_and_replace( + StreamKeyType.ROOM, + await self.sources.room.get_current_key_for_room(room_id), ) - return token diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py index 943f211b11..2d5b07ab8f 100644 --- a/synapse/types/__init__.py +++ b/synapse/types/__init__.py @@ -996,6 +996,7 @@ class StreamKeyType(Enum): TO_DEVICE = "to_device_key" DEVICE_LIST = "device_list_key" UN_PARTIAL_STATED_ROOMS = "un_partial_stated_rooms_key" + THREAD_SUBSCRIPTIONS = "thread_subscriptions_key" @attr.s(slots=True, frozen=True, auto_attribs=True) @@ -1003,7 +1004,7 @@ class StreamToken: """A collection of keys joined together by underscores in the following order and which represent the position in their respective streams. - ex. `s2633508_17_338_6732159_1082514_541479_274711_265584_1_379` + ex. `s2633508_17_338_6732159_1082514_541479_274711_265584_1_379_4242` 1. `room_key`: `s2633508` which is a `RoomStreamToken` - `RoomStreamToken`'s can also look like `t426-2633508` or `m56~2.58~3.59` - See the docstring for `RoomStreamToken` for more details. @@ -1016,6 +1017,7 @@ class StreamToken: 8. `device_list_key`: `265584` 9. `groups_key`: `1` (note that this key is now unused) 10. `un_partial_stated_rooms_key`: `379` + 11. `thread_subscriptions_key`: 4242 You can see how many of these keys correspond to the various fields in a "/sync" response: @@ -1074,6 +1076,7 @@ class StreamToken: # Note that the groups key is no longer used and may have bogus values. groups_key: int un_partial_stated_rooms_key: int + thread_subscriptions_key: int _SEPARATOR = "_" START: ClassVar["StreamToken"] @@ -1101,6 +1104,7 @@ class StreamToken: device_list_key, groups_key, un_partial_stated_rooms_key, + thread_subscriptions_key, ) = keys return cls( @@ -1116,6 +1120,7 @@ class StreamToken: ), groups_key=int(groups_key), un_partial_stated_rooms_key=int(un_partial_stated_rooms_key), + thread_subscriptions_key=int(thread_subscriptions_key), ) except CancelledError: raise @@ -1138,6 +1143,7 @@ class StreamToken: # if additional tokens are added. str(self.groups_key), str(self.un_partial_stated_rooms_key), + str(self.thread_subscriptions_key), ] ) @@ -1202,6 +1208,7 @@ class StreamToken: StreamKeyType.TO_DEVICE, StreamKeyType.TYPING, StreamKeyType.UN_PARTIAL_STATED_ROOMS, + StreamKeyType.THREAD_SUBSCRIPTIONS, ], ) -> int: ... @@ -1257,7 +1264,8 @@ class StreamToken: f"typing: {self.typing_key}, receipt: {self.receipt_key}, " f"account_data: {self.account_data_key}, push_rules: {self.push_rules_key}, " f"to_device: {self.to_device_key}, device_list: {self.device_list_key}, " - f"groups: {self.groups_key}, un_partial_stated_rooms: {self.un_partial_stated_rooms_key})" + f"groups: {self.groups_key}, un_partial_stated_rooms: {self.un_partial_stated_rooms_key}," + f"thread_subscriptions: {self.thread_subscriptions_key})" ) @@ -1272,6 +1280,7 @@ StreamToken.START = StreamToken( device_list_key=MultiWriterStreamToken(stream=0), groups_key=0, un_partial_stated_rooms_key=0, + thread_subscriptions_key=0, ) @@ -1318,6 +1327,27 @@ class SlidingSyncStreamToken: return f"{self.connection_position}/{stream_token_str}" +@attr.s(slots=True, frozen=True, auto_attribs=True) +class ThreadSubscriptionsToken: + """ + Token for a position in the thread subscriptions stream. + + Format: `ts` + """ + + stream_id: int + + @staticmethod + def from_string(s: str) -> "ThreadSubscriptionsToken": + if not s.startswith("ts"): + raise ValueError("thread subscription token must start with `ts`") + + return ThreadSubscriptionsToken(stream_id=int(s[2:])) + + def to_string(self) -> str: + return f"ts{self.stream_id}" + + @attr.s(slots=True, frozen=True, auto_attribs=True) class PersistedPosition: """Position of a newly persisted row with instance that persisted it.""" diff --git a/synapse/types/handlers/sliding_sync.py b/synapse/types/handlers/sliding_sync.py index 3ebd334a6d..b7bc565464 100644 --- a/synapse/types/handlers/sliding_sync.py +++ b/synapse/types/handlers/sliding_sync.py @@ -50,6 +50,7 @@ from synapse.types import ( SlidingSyncStreamToken, StrCollection, StreamToken, + ThreadSubscriptionsToken, UserID, ) from synapse.types.rest.client import SlidingSyncBody @@ -357,11 +358,50 @@ class SlidingSyncResult: def __bool__(self) -> bool: return bool(self.room_id_to_typing_map) + @attr.s(slots=True, frozen=True, auto_attribs=True) + class ThreadSubscriptionsExtension: + """The Thread Subscriptions extension (MSC4308) + + Attributes: + subscribed: map (room_id -> thread_root_id -> info) of new or changed subscriptions + unsubscribed: map (room_id -> thread_root_id -> info) of new unsubscriptions + prev_batch: if present, there is a gap and the client can use this token to backpaginate + """ + + @attr.s(slots=True, frozen=True, auto_attribs=True) + class ThreadSubscription: + # always present when `subscribed` + automatic: Optional[bool] + + # the same as our stream_id; useful for clients to resolve + # race conditions locally + bump_stamp: int + + @attr.s(slots=True, frozen=True, auto_attribs=True) + class ThreadUnsubscription: + # the same as our stream_id; useful for clients to resolve + # race conditions locally + bump_stamp: int + + # room_id -> event_id (of thread root) -> the subscription change + subscribed: Optional[Mapping[str, Mapping[str, ThreadSubscription]]] + # room_id -> event_id (of thread root) -> the unsubscription + unsubscribed: Optional[Mapping[str, Mapping[str, ThreadUnsubscription]]] + prev_batch: Optional[ThreadSubscriptionsToken] + + def __bool__(self) -> bool: + return ( + bool(self.subscribed) + or bool(self.unsubscribed) + or bool(self.prev_batch) + ) + to_device: Optional[ToDeviceExtension] = None e2ee: Optional[E2eeExtension] = None account_data: Optional[AccountDataExtension] = None receipts: Optional[ReceiptsExtension] = None typing: Optional[TypingExtension] = None + thread_subscriptions: Optional[ThreadSubscriptionsExtension] = None def __bool__(self) -> bool: return bool( @@ -370,6 +410,7 @@ class SlidingSyncResult: or self.account_data or self.receipts or self.typing + or self.thread_subscriptions ) next_pos: SlidingSyncStreamToken diff --git a/synapse/types/rest/client/__init__.py b/synapse/types/rest/client/__init__.py index c739bd16b0..11d7e59b43 100644 --- a/synapse/types/rest/client/__init__.py +++ b/synapse/types/rest/client/__init__.py @@ -22,6 +22,7 @@ from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union from synapse._pydantic_compat import ( Extra, + Field, StrictBool, StrictInt, StrictStr, @@ -364,11 +365,25 @@ class SlidingSyncBody(RequestBodyModel): # Process all room subscriptions defined in the Room Subscription API. (This is the default.) rooms: Optional[List[StrictStr]] = ["*"] + class ThreadSubscriptionsExtension(RequestBodyModel): + """The Thread Subscriptions extension (MSC4308) + + Attributes: + enabled + limit: maximum number of subscription changes to return (default 100) + """ + + enabled: Optional[StrictBool] = False + limit: StrictInt = 100 + to_device: Optional[ToDeviceExtension] = None e2ee: Optional[E2eeExtension] = None account_data: Optional[AccountDataExtension] = None receipts: Optional[ReceiptsExtension] = None typing: Optional[TypingExtension] = None + thread_subscriptions: Optional[ThreadSubscriptionsExtension] = Field( + alias="io.element.msc4308.thread_subscriptions" + ) conn_id: Optional[StrictStr] diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index e596e1ed20..c21b7887f9 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -347,6 +347,7 @@ T2 = TypeVar("T2") T3 = TypeVar("T3") T4 = TypeVar("T4") T5 = TypeVar("T5") +T6 = TypeVar("T6") @overload @@ -461,6 +462,23 @@ async def gather_optional_coroutines( ) -> Tuple[Optional[T1], Optional[T2], Optional[T3], Optional[T4], Optional[T5]]: ... +@overload +async def gather_optional_coroutines( + *coroutines: Unpack[ + Tuple[ + Optional[Coroutine[Any, Any, T1]], + Optional[Coroutine[Any, Any, T2]], + Optional[Coroutine[Any, Any, T3]], + Optional[Coroutine[Any, Any, T4]], + Optional[Coroutine[Any, Any, T5]], + Optional[Coroutine[Any, Any, T6]], + ] + ], +) -> Tuple[ + Optional[T1], Optional[T2], Optional[T3], Optional[T4], Optional[T5], Optional[T6] +]: ... + + async def gather_optional_coroutines( *coroutines: Unpack[Tuple[Optional[Coroutine[Any, Any, T1]], ...]], ) -> Tuple[Optional[T1], ...]: diff --git a/tests/rest/admin/test_room.py b/tests/rest/admin/test_room.py index b98c53891c..ee5d0419ab 100644 --- a/tests/rest/admin/test_room.py +++ b/tests/rest/admin/test_room.py @@ -2244,7 +2244,7 @@ class RoomMessagesTestCase(unittest.HomeserverTestCase): def test_topo_token_is_accepted(self) -> None: """Test Topo Token is accepted.""" - token = "t1-0_0_0_0_0_0_0_0_0_0" + token = "t1-0_0_0_0_0_0_0_0_0_0_0" channel = self.make_request( "GET", "/_synapse/admin/v1/rooms/%s/messages?from=%s" % (self.room_id, token), @@ -2258,7 +2258,7 @@ class RoomMessagesTestCase(unittest.HomeserverTestCase): def test_stream_token_is_accepted_for_fwd_pagianation(self) -> None: """Test that stream token is accepted for forward pagination.""" - token = "s0_0_0_0_0_0_0_0_0_0" + token = "s0_0_0_0_0_0_0_0_0_0_0" channel = self.make_request( "GET", "/_synapse/admin/v1/rooms/%s/messages?from=%s" % (self.room_id, token), diff --git a/tests/rest/client/sliding_sync/test_extension_thread_subscriptions.py b/tests/rest/client/sliding_sync/test_extension_thread_subscriptions.py new file mode 100644 index 0000000000..775c4f96c9 --- /dev/null +++ b/tests/rest/client/sliding_sync/test_extension_thread_subscriptions.py @@ -0,0 +1,497 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2025 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +import logging +from http import HTTPStatus +from typing import List, Optional, Tuple, cast + +from twisted.test.proto_helpers import MemoryReactor + +import synapse.rest.admin +from synapse.rest.client import login, room, sync, thread_subscriptions +from synapse.server import HomeServer +from synapse.types import JsonDict +from synapse.util import Clock + +from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase + +logger = logging.getLogger(__name__) + + +# The name of the extension. Currently unstable-prefixed. +EXT_NAME = "io.element.msc4308.thread_subscriptions" + + +class SlidingSyncThreadSubscriptionsExtensionTestCase(SlidingSyncBase): + """ + Test the thread subscriptions extension in the Sliding Sync API. + """ + + maxDiff = None + + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + room.register_servlets, + sync.register_servlets, + thread_subscriptions.register_servlets, + ] + + def default_config(self) -> JsonDict: + config = super().default_config() + config["experimental_features"] = {"msc4306_enabled": True} + return config + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + self.storage_controllers = hs.get_storage_controllers() + super().prepare(reactor, clock, hs) + + def test_no_data_initial_sync(self) -> None: + """ + Test enabling thread subscriptions extension during initial sync with no data. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + sync_body = { + "lists": {}, + "extensions": { + EXT_NAME: { + "enabled": True, + } + }, + } + + # Sync + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Assert + self.assertNotIn(EXT_NAME, response_body["extensions"]) + + def test_no_data_incremental_sync(self) -> None: + """ + Test enabling thread subscriptions extension during incremental sync with no data. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + initial_sync_body: JsonDict = { + "lists": {}, + } + + # Initial sync + response_body, sync_pos = self.do_sync(initial_sync_body, tok=user1_tok) + + # Incremental sync with extension enabled + sync_body = { + "lists": {}, + "extensions": { + EXT_NAME: { + "enabled": True, + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok, since=sync_pos) + + # Assert + self.assertNotIn( + EXT_NAME, + response_body["extensions"], + response_body, + ) + + def test_thread_subscription_initial_sync(self) -> None: + """ + Test thread subscriptions appear in initial sync response. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + thread_root_resp = self.helper.send(room_id, body="Thread root", tok=user1_tok) + thread_root_id = thread_root_resp["event_id"] + + # get the baseline stream_id of the thread_subscriptions stream + # before we write any data. + # Required because the initial value differs between SQLite and Postgres. + base = self.store.get_max_thread_subscriptions_stream_id() + + self._subscribe_to_thread(user1_id, room_id, thread_root_id) + sync_body = { + "lists": {}, + "extensions": { + EXT_NAME: { + "enabled": True, + } + }, + } + + # Sync + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Assert + self.assertEqual( + response_body["extensions"][EXT_NAME], + { + "subscribed": { + room_id: { + thread_root_id: { + "automatic": False, + "bump_stamp": base + 1, + } + } + } + }, + ) + + def test_thread_subscription_incremental_sync(self) -> None: + """ + Test new thread subscriptions appear in incremental sync response. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + sync_body = { + "lists": {}, + "extensions": { + EXT_NAME: { + "enabled": True, + } + }, + } + thread_root_resp = self.helper.send(room_id, body="Thread root", tok=user1_tok) + thread_root_id = thread_root_resp["event_id"] + + # get the baseline stream_id of the thread_subscriptions stream + # before we write any data. + # Required because the initial value differs between SQLite and Postgres. + base = self.store.get_max_thread_subscriptions_stream_id() + + # Initial sync + _, sync_pos = self.do_sync(sync_body, tok=user1_tok) + logger.info("Synced to: %r, now subscribing to thread", sync_pos) + + # Subscribe + self._subscribe_to_thread(user1_id, room_id, thread_root_id) + + # Incremental sync + response_body, sync_pos = self.do_sync(sync_body, tok=user1_tok, since=sync_pos) + logger.info("Synced to: %r", sync_pos) + + # Assert + self.assertEqual( + response_body["extensions"][EXT_NAME], + { + "subscribed": { + room_id: { + thread_root_id: { + "automatic": False, + "bump_stamp": base + 1, + } + } + } + }, + ) + + def test_unsubscribe_from_thread(self) -> None: + """ + Test unsubscribing from a thread. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + thread_root_resp = self.helper.send(room_id, body="Thread root", tok=user1_tok) + thread_root_id = thread_root_resp["event_id"] + + # get the baseline stream_id of the thread_subscriptions stream + # before we write any data. + # Required because the initial value differs between SQLite and Postgres. + base = self.store.get_max_thread_subscriptions_stream_id() + + self._subscribe_to_thread(user1_id, room_id, thread_root_id) + sync_body = { + "lists": {}, + "extensions": { + EXT_NAME: { + "enabled": True, + } + }, + } + + response_body, sync_pos = self.do_sync(sync_body, tok=user1_tok) + + # Assert: Subscription present + self.assertIn(EXT_NAME, response_body["extensions"]) + self.assertEqual( + response_body["extensions"][EXT_NAME], + { + "subscribed": { + room_id: { + thread_root_id: {"automatic": False, "bump_stamp": base + 1} + } + } + }, + ) + + # Unsubscribe + self._unsubscribe_from_thread(user1_id, room_id, thread_root_id) + + # Incremental sync + response_body, sync_pos = self.do_sync(sync_body, tok=user1_tok, since=sync_pos) + + # Assert: Unsubscription present + self.assertEqual( + response_body["extensions"][EXT_NAME], + {"unsubscribed": {room_id: {thread_root_id: {"bump_stamp": base + 2}}}}, + ) + + def test_multiple_thread_subscriptions(self) -> None: + """ + Test handling of multiple thread subscriptions. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + + # Create thread roots + thread_root_resp1 = self.helper.send( + room_id, body="Thread root 1", tok=user1_tok + ) + thread_root_id1 = thread_root_resp1["event_id"] + thread_root_resp2 = self.helper.send( + room_id, body="Thread root 2", tok=user1_tok + ) + thread_root_id2 = thread_root_resp2["event_id"] + thread_root_resp3 = self.helper.send( + room_id, body="Thread root 3", tok=user1_tok + ) + thread_root_id3 = thread_root_resp3["event_id"] + + # get the baseline stream_id of the thread_subscriptions stream + # before we write any data. + # Required because the initial value differs between SQLite and Postgres. + base = self.store.get_max_thread_subscriptions_stream_id() + + # Subscribe to threads + self._subscribe_to_thread(user1_id, room_id, thread_root_id1) + self._subscribe_to_thread(user1_id, room_id, thread_root_id2) + self._subscribe_to_thread(user1_id, room_id, thread_root_id3) + + sync_body = { + "lists": {}, + "extensions": { + EXT_NAME: { + "enabled": True, + } + }, + } + + # Sync + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Assert + self.assertEqual( + response_body["extensions"][EXT_NAME], + { + "subscribed": { + room_id: { + thread_root_id1: { + "automatic": False, + "bump_stamp": base + 1, + }, + thread_root_id2: { + "automatic": False, + "bump_stamp": base + 2, + }, + thread_root_id3: { + "automatic": False, + "bump_stamp": base + 3, + }, + } + } + }, + ) + + def test_limit_parameter(self) -> None: + """ + Test limit parameter in thread subscriptions extension. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + + # Create 5 thread roots and subscribe to each + thread_root_ids = [] + for i in range(5): + thread_root_resp = self.helper.send( + room_id, body=f"Thread root {i}", tok=user1_tok + ) + thread_root_ids.append(thread_root_resp["event_id"]) + self._subscribe_to_thread(user1_id, room_id, thread_root_ids[-1]) + + sync_body = { + "lists": {}, + "extensions": {EXT_NAME: {"enabled": True, "limit": 3}}, + } + + # Sync + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Assert + thread_subscriptions = response_body["extensions"][EXT_NAME] + self.assertEqual( + len(thread_subscriptions["subscribed"][room_id]), 3, thread_subscriptions + ) + + def test_limit_and_companion_backpagination(self) -> None: + """ + Create 1 thread subscription, do a sync, create 4 more, + then sync with a limit of 2 and fill in the gap + using the companion /thread_subscriptions endpoint. + """ + + thread_root_ids: List[str] = [] + + def make_subscription() -> None: + thread_root_resp = self.helper.send( + room_id, body="Some thread root", tok=user1_tok + ) + thread_root_ids.append(thread_root_resp["event_id"]) + self._subscribe_to_thread(user1_id, room_id, thread_root_ids[-1]) + + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + + # get the baseline stream_id of the thread_subscriptions stream + # before we write any data. + # Required because the initial value differs between SQLite and Postgres. + base = self.store.get_max_thread_subscriptions_stream_id() + + # Make our first subscription + make_subscription() + + # Sync for the first time + sync_body = { + "lists": {}, + "extensions": {EXT_NAME: {"enabled": True, "limit": 2}}, + } + + sync_resp, first_sync_pos = self.do_sync(sync_body, tok=user1_tok) + + thread_subscriptions = sync_resp["extensions"][EXT_NAME] + self.assertEqual( + thread_subscriptions["subscribed"], + { + room_id: { + thread_root_ids[0]: {"automatic": False, "bump_stamp": base + 1}, + } + }, + ) + + # Get our pos for the next sync + first_sync_pos = sync_resp["pos"] + + # Create 5 more thread subscriptions and subscribe to each + for _ in range(5): + make_subscription() + + # Now sync again. Our limit is 2, + # so we should get the latest 2 subscriptions, + # with a gap of 3 more subscriptions in the middle + sync_resp, _pos = self.do_sync(sync_body, tok=user1_tok, since=first_sync_pos) + + thread_subscriptions = sync_resp["extensions"][EXT_NAME] + self.assertEqual( + thread_subscriptions["subscribed"], + { + room_id: { + thread_root_ids[4]: {"automatic": False, "bump_stamp": base + 5}, + thread_root_ids[5]: {"automatic": False, "bump_stamp": base + 6}, + } + }, + ) + # 1st backpagination: expecting a page with 2 subscriptions + page, end_tok = self._do_backpaginate( + from_tok=thread_subscriptions["prev_batch"], + to_tok=first_sync_pos, + limit=2, + access_token=user1_tok, + ) + self.assertIsNotNone(end_tok, "backpagination should continue") + self.assertEqual( + page["subscribed"], + { + room_id: { + thread_root_ids[2]: {"automatic": False, "bump_stamp": base + 3}, + thread_root_ids[3]: {"automatic": False, "bump_stamp": base + 4}, + } + }, + ) + + # 2nd backpagination: expecting a page with only 1 subscription + # and no other token for further backpagination + assert end_tok is not None + page, end_tok = self._do_backpaginate( + from_tok=end_tok, to_tok=first_sync_pos, limit=2, access_token=user1_tok + ) + self.assertIsNone(end_tok, "backpagination should have finished") + self.assertEqual( + page["subscribed"], + { + room_id: { + thread_root_ids[1]: {"automatic": False, "bump_stamp": base + 2}, + } + }, + ) + + def _do_backpaginate( + self, *, from_tok: str, to_tok: str, limit: int, access_token: str + ) -> Tuple[JsonDict, Optional[str]]: + channel = self.make_request( + "GET", + "/_matrix/client/unstable/io.element.msc4308/thread_subscriptions" + f"?from={from_tok}&to={to_tok}&limit={limit}&dir=b", + access_token=access_token, + ) + + self.assertEqual(channel.code, HTTPStatus.OK, channel.json_body) + body = channel.json_body + return body, cast(Optional[str], body.get("end")) + + def _subscribe_to_thread( + self, user_id: str, room_id: str, thread_root_id: str + ) -> None: + """ + Helper method to subscribe a user to a thread. + """ + self.get_success( + self.store.subscribe_user_to_thread( + user_id=user_id, + room_id=room_id, + thread_root_event_id=thread_root_id, + automatic_event_orderings=None, + ) + ) + + def _unsubscribe_from_thread( + self, user_id: str, room_id: str, thread_root_id: str + ) -> None: + """ + Helper method to unsubscribe a user from a thread. + """ + self.get_success( + self.store.unsubscribe_user_from_thread( + user_id=user_id, + room_id=room_id, + thread_root_event_id=thread_root_id, + ) + ) diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py index 24a28fbdd2..d3b5e26132 100644 --- a/tests/rest/client/test_rooms.py +++ b/tests/rest/client/test_rooms.py @@ -2245,7 +2245,7 @@ class RoomMessageListTestCase(RoomBase): self.room_id = self.helper.create_room_as(self.user_id) def test_topo_token_is_accepted(self) -> None: - token = "t1-0_0_0_0_0_0_0_0_0_0" + token = "t1-0_0_0_0_0_0_0_0_0_0_0" channel = self.make_request( "GET", "/rooms/%s/messages?access_token=x&from=%s" % (self.room_id, token) ) @@ -2256,7 +2256,7 @@ class RoomMessageListTestCase(RoomBase): self.assertTrue("end" in channel.json_body) def test_stream_token_is_accepted_for_fwd_pagianation(self) -> None: - token = "s0_0_0_0_0_0_0_0_0_0" + token = "s0_0_0_0_0_0_0_0_0_0_0" channel = self.make_request( "GET", "/rooms/%s/messages?access_token=x&from=%s" % (self.room_id, token) ) diff --git a/tests/storage/test_thread_subscriptions.py b/tests/storage/test_thread_subscriptions.py index 2a5c440cf4..2ce369247f 100644 --- a/tests/storage/test_thread_subscriptions.py +++ b/tests/storage/test_thread_subscriptions.py @@ -189,19 +189,19 @@ class ThreadSubscriptionsTestCase(unittest.HomeserverTestCase): self._subscribe(self.other_thread_root_id, automatic_event_orderings=None) subscriptions = self.get_success( - self.store.get_updated_thread_subscriptions_for_user( + self.store.get_latest_updated_thread_subscriptions_for_user( self.user_id, from_id=0, to_id=50, limit=50, ) ) - min_id = min(id for (id, _, _) in subscriptions) + min_id = min(id for (id, _, _, _, _) in subscriptions) self.assertEqual( subscriptions, [ - (min_id, self.room_id, self.thread_root_id), - (min_id + 1, self.room_id, self.other_thread_root_id), + (min_id, self.room_id, self.thread_root_id, True, True), + (min_id + 1, self.room_id, self.other_thread_root_id, True, False), ], ) @@ -212,7 +212,7 @@ class ThreadSubscriptionsTestCase(unittest.HomeserverTestCase): # Check user has no subscriptions subscriptions = self.get_success( - self.store.get_updated_thread_subscriptions_for_user( + self.store.get_latest_updated_thread_subscriptions_for_user( self.user_id, from_id=0, to_id=50, @@ -280,20 +280,22 @@ class ThreadSubscriptionsTestCase(unittest.HomeserverTestCase): # Get updates for main user updates = self.get_success( - self.store.get_updated_thread_subscriptions_for_user( + self.store.get_latest_updated_thread_subscriptions_for_user( self.user_id, from_id=0, to_id=stream_id2, limit=10 ) ) - self.assertEqual(updates, [(stream_id1, self.room_id, self.thread_root_id)]) + self.assertEqual( + updates, [(stream_id1, self.room_id, self.thread_root_id, True, True)] + ) # Get updates for other user updates = self.get_success( - self.store.get_updated_thread_subscriptions_for_user( + self.store.get_latest_updated_thread_subscriptions_for_user( other_user_id, from_id=0, to_id=max(stream_id1, stream_id2), limit=10 ) ) self.assertEqual( - updates, [(stream_id2, self.room_id, self.other_thread_root_id)] + updates, [(stream_id2, self.room_id, self.other_thread_root_id, True, True)] ) def test_should_skip_autosubscription_after_unsubscription(self) -> None: From ec64c3e88d14e9b1d14c093b91b82817c7ede424 Mon Sep 17 00:00:00 2001 From: Kegan Dougal <7190048+kegsay@users.noreply.github.com> Date: Fri, 12 Sep 2025 09:54:20 +0100 Subject: [PATCH 20/54] Ensure we `/send` PDUs which pass canonical JSON checks (#18641) ### Pull Request Checklist Fixes https://github.com/element-hq/synapse/issues/18554 Looks like this was missed when it was [implemented](https://github.com/element-hq/synapse/commit/2277df2a1eb685f85040ef98fa21d41aa4cdd389). * [x] Pull request is based on the develop branch * [x] Pull request includes a [changelog file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should: - Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.". - Use markdown where necessary, mostly for `code blocks`. - End with either a period (.) or an exclamation mark (!). - Start with a capital letter. - Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry. * [x] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters)) --------- Co-authored-by: reivilibre --- changelog.d/18641.bugfix | 1 + synapse/federation/sender/transaction_manager.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/18641.bugfix diff --git a/changelog.d/18641.bugfix b/changelog.d/18641.bugfix new file mode 100644 index 0000000000..8f2a2e3d8b --- /dev/null +++ b/changelog.d/18641.bugfix @@ -0,0 +1 @@ +Ensure all PDUs sent via `/send` pass canonical JSON checks. diff --git a/synapse/federation/sender/transaction_manager.py b/synapse/federation/sender/transaction_manager.py index 63ed13c6fa..050982c499 100644 --- a/synapse/federation/sender/transaction_manager.py +++ b/synapse/federation/sender/transaction_manager.py @@ -26,7 +26,7 @@ from synapse.api.constants import EduTypes from synapse.api.errors import HttpResponseException from synapse.events import EventBase from synapse.federation.persistence import TransactionActions -from synapse.federation.units import Edu, Transaction +from synapse.federation.units import Edu, Transaction, serialize_and_filter_pdus from synapse.logging.opentracing import ( extract_text_map, set_tag, @@ -119,7 +119,7 @@ class TransactionManager: transaction_id=txn_id, origin=self.server_name, destination=destination, - pdus=[p.get_pdu_json() for p in pdus], + pdus=serialize_and_filter_pdus(pdus), edus=[edu.get_dict() for edu in edus], ) From 8c98cf7e5514707948c3461638ed006239889f5b Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Fri, 12 Sep 2025 11:57:04 +0200 Subject: [PATCH 21/54] Remove usage of deprecated `pkg_resources` interface (#18910) --- changelog.d/18910.misc | 1 + synapse/config/_base.py | 6 +++--- synapse/config/oembed.py | 9 +++++++-- synapse/metrics/__init__.py | 2 +- tests/push/test_email.py | 9 +++++---- tests/rest/client/test_account.py | 17 +++++++++-------- tests/rest/client/test_register.py | 10 +++++----- 7 files changed, 31 insertions(+), 23 deletions(-) create mode 100644 changelog.d/18910.misc diff --git a/changelog.d/18910.misc b/changelog.d/18910.misc new file mode 100644 index 0000000000..d5bd3ef314 --- /dev/null +++ b/changelog.d/18910.misc @@ -0,0 +1 @@ +Replace usages of the deprecated `pkg_resources` interface in preparation of setuptools dropping it soon. \ No newline at end of file diff --git a/synapse/config/_base.py b/synapse/config/_base.py index 0a01fb5582..191253ddda 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -22,6 +22,7 @@ import argparse import errno +import importlib.resources as importlib_resources import logging import os import re @@ -46,7 +47,6 @@ from typing import ( import attr import jinja2 -import pkg_resources import yaml from synapse.types import StrSequence @@ -174,8 +174,8 @@ class Config: self.root = root_config # Get the path to the default Synapse template directory - self.default_template_dir = pkg_resources.resource_filename( - "synapse", "res/templates" + self.default_template_dir = str( + importlib_resources.files("synapse").joinpath("res").joinpath("templates") ) @staticmethod diff --git a/synapse/config/oembed.py b/synapse/config/oembed.py index b177a75cf6..1b6c521087 100644 --- a/synapse/config/oembed.py +++ b/synapse/config/oembed.py @@ -18,13 +18,13 @@ # [This file includes modifications made by New Vector Limited] # # +import importlib.resources as importlib_resources import json import re from typing import Any, Dict, Iterable, List, Optional, Pattern from urllib import parse as urlparse import attr -import pkg_resources from synapse.types import JsonDict, StrSequence @@ -64,7 +64,12 @@ class OembedConfig(Config): """ # Whether to use the packaged providers.json file. if not oembed_config.get("disable_default_providers") or False: - with pkg_resources.resource_stream("synapse", "res/providers.json") as s: + path = ( + importlib_resources.files("synapse") + .joinpath("res") + .joinpath("providers.json") + ) + with path.open("r", encoding="utf-8") as s: providers = json.load(s) yield from self._parse_and_validate_provider( diff --git a/synapse/metrics/__init__.py b/synapse/metrics/__init__.py index 5b291aa893..2ffb14070b 100644 --- a/synapse/metrics/__init__.py +++ b/synapse/metrics/__init__.py @@ -43,7 +43,7 @@ from typing import ( ) import attr -from pkg_resources import parse_version +from packaging.version import parse as parse_version from prometheus_client import ( CollectorRegistry, Counter, diff --git a/tests/push/test_email.py b/tests/push/test_email.py index 4d885c78eb..4d9e42ac2c 100644 --- a/tests/push/test_email.py +++ b/tests/push/test_email.py @@ -18,12 +18,12 @@ # # import email.message +import importlib.resources as importlib_resources import os from http import HTTPStatus from typing import Any, Dict, List, Sequence, Tuple import attr -import pkg_resources from parameterized import parameterized from twisted.internet.defer import Deferred @@ -59,11 +59,12 @@ class EmailPusherTests(HomeserverTestCase): def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: config = self.default_config() + templates = ( + importlib_resources.files("synapse").joinpath("res").joinpath("templates") + ) config["email"] = { "enable_notifs": True, - "template_dir": os.path.abspath( - pkg_resources.resource_filename("synapse", "res/templates") - ), + "template_dir": os.path.abspath(str(templates)), "expiry_template_html": "notice_expiry.html", "expiry_template_text": "notice_expiry.txt", "notif_template_html": "notif_mail.html", diff --git a/tests/rest/client/test_account.py b/tests/rest/client/test_account.py index 02d02ae78e..9a3202bd93 100644 --- a/tests/rest/client/test_account.py +++ b/tests/rest/client/test_account.py @@ -18,6 +18,7 @@ # [This file includes modifications made by New Vector Limited] # # +import importlib.resources as importlib_resources import os import re from email.parser import Parser @@ -25,8 +26,6 @@ from http import HTTPStatus from typing import Any, Dict, List, Optional, Union from unittest.mock import Mock -import pkg_resources - from twisted.internet.interfaces import IReactorTCP from twisted.internet.testing import MemoryReactor @@ -59,11 +58,12 @@ class PasswordResetTestCase(unittest.HomeserverTestCase): config = self.default_config() # Email config. + templates = ( + importlib_resources.files("synapse").joinpath("res").joinpath("templates") + ) config["email"] = { "enable_notifs": False, - "template_dir": os.path.abspath( - pkg_resources.resource_filename("synapse", "res/templates") - ), + "template_dir": os.path.abspath(str(templates)), "smtp_host": "127.0.0.1", "smtp_port": 20, "require_transport_security": False, @@ -798,11 +798,12 @@ class ThreepidEmailRestTestCase(unittest.HomeserverTestCase): config = self.default_config() # Email config. + templates = ( + importlib_resources.files("synapse").joinpath("res").joinpath("templates") + ) config["email"] = { "enable_notifs": False, - "template_dir": os.path.abspath( - pkg_resources.resource_filename("synapse", "res/templates") - ), + "template_dir": os.path.abspath(str(templates)), "smtp_host": "127.0.0.1", "smtp_port": 20, "require_transport_security": False, diff --git a/tests/rest/client/test_register.py b/tests/rest/client/test_register.py index f0745cf298..70e005caf4 100644 --- a/tests/rest/client/test_register.py +++ b/tests/rest/client/test_register.py @@ -20,12 +20,11 @@ # # import datetime +import importlib.resources as importlib_resources import os from typing import Any, Dict, List, Tuple from unittest.mock import AsyncMock -import pkg_resources - from twisted.internet.testing import MemoryReactor import synapse.rest.admin @@ -981,11 +980,12 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase): # Email config. + templates = ( + importlib_resources.files("synapse").joinpath("res").joinpath("templates") + ) config["email"] = { "enable_notifs": True, - "template_dir": os.path.abspath( - pkg_resources.resource_filename("synapse", "res/templates") - ), + "template_dir": os.path.abspath(str(templates)), "expiry_template_html": "notice_expiry.html", "expiry_template_text": "notice_expiry.txt", "notif_template_html": "notif_mail.html", From e1036ffa48f9bb9f123b87ffa01fd51b03ab666c Mon Sep 17 00:00:00 2001 From: Hugh Nimmo-Smith Date: Fri, 12 Sep 2025 12:26:19 +0100 Subject: [PATCH 22/54] Add get_media_upload_limits_for_user and on_media_upload_limit_exceeded callbacks to module API (#18848) Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> --- changelog.d/18848.feature | 1 + docs/modules/media_repository_callbacks.md | 65 ++++++ .../configuration/config_documentation.md | 3 + schema/synapse-config.schema.yaml | 7 + synapse/config/repository.py | 12 +- synapse/media/media_repository.py | 31 ++- synapse/module_api/__init__.py | 12 ++ .../callbacks/media_repository_callbacks.py | 75 +++++++ tests/rest/client/test_media.py | 191 ++++++++++++++++++ 9 files changed, 389 insertions(+), 8 deletions(-) create mode 100644 changelog.d/18848.feature diff --git a/changelog.d/18848.feature b/changelog.d/18848.feature new file mode 100644 index 0000000000..302a6e7b66 --- /dev/null +++ b/changelog.d/18848.feature @@ -0,0 +1 @@ +Add `get_media_upload_limits_for_user` and `on_media_upload_limit_exceeded` module API callbacks for media repository. diff --git a/docs/modules/media_repository_callbacks.md b/docs/modules/media_repository_callbacks.md index fc37130439..7c724038a7 100644 --- a/docs/modules/media_repository_callbacks.md +++ b/docs/modules/media_repository_callbacks.md @@ -64,3 +64,68 @@ If multiple modules implement this callback, they will be considered in order. I returns `True`, Synapse falls through to the next one. The value of the first callback that returns `False` will be used. If this happens, Synapse will not call any of the subsequent implementations of this callback. + +### `get_media_upload_limits_for_user` + +_First introduced in Synapse v1.139.0_ + +```python +async def get_media_upload_limits_for_user(user_id: str, size: int) -> Optional[List[synapse.module_api.MediaUploadLimit]] +``` + +** +Caution: This callback is currently experimental. The method signature or behaviour +may change without notice. +** + +Called when processing a request to store content in the media repository. This can be used to dynamically override +the [media upload limits configuration](../usage/configuration/config_documentation.html#media_upload_limits). + +The arguments passed to this callback are: + +* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request. + +If the callback returns a list then it will be used as the limits instead of those in the configuration (if any). + +If an empty list is returned then no limits are applied (**warning:** users will be able +to upload as much data as they desire). + +If multiple modules implement this callback, they will be considered in order. If a +callback returns `None`, Synapse falls through to the next one. The value of the first +callback that does not return `None` will be used. If this happens, Synapse will not call +any of the subsequent implementations of this callback. + +If there are no registered modules, or if all modules return `None`, then +the default +[media upload limits configuration](../usage/configuration/config_documentation.html#media_upload_limits) +will be used. + +### `on_media_upload_limit_exceeded` + +_First introduced in Synapse v1.139.0_ + +```python +async def on_media_upload_limit_exceeded(user_id: str, limit: synapse.module_api.MediaUploadLimit, sent_bytes: int, attempted_bytes: int) -> None +``` + +** +Caution: This callback is currently experimental. The method signature or behaviour +may change without notice. +** + +Called when a user attempts to upload media that would exceed a +[configured media upload limit](../usage/configuration/config_documentation.html#media_upload_limits). + +This callback will only be called on workers which handle +[POST /_matrix/media/v3/upload](https://spec.matrix.org/v1.15/client-server-api/#post_matrixmediav3upload) +requests. + +This could be used to inform the user that they have reached a media upload limit through +some external method. + +The arguments passed to this callback are: + +* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request. +* `limit`: The `synapse.module_api.MediaUploadLimit` representing the limit that was reached. +* `sent_bytes`: The number of bytes already sent during the period of the limit. +* `attempted_bytes`: The number of bytes that the user attempted to send. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 68303308cd..3c401d569b 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -2168,9 +2168,12 @@ max_upload_size: 60M ### `media_upload_limits` *(array)* A list of media upload limits defining how much data a given user can upload in a given time period. +These limits are applied in addition to the `max_upload_size` limit above (which applies to individual uploads). An empty list means no limits are applied. +These settings can be overridden using the `get_media_upload_limits_for_user` module API [callback](../../modules/media_repository_callbacks.md#get_media_upload_limits_for_user). + Defaults to `[]`. Example configuration: diff --git a/schema/synapse-config.schema.yaml b/schema/synapse-config.schema.yaml index 83e16de397..fdce4219ae 100644 --- a/schema/synapse-config.schema.yaml +++ b/schema/synapse-config.schema.yaml @@ -2415,8 +2415,15 @@ properties: A list of media upload limits defining how much data a given user can upload in a given time period. + These limits are applied in addition to the `max_upload_size` limit above + (which applies to individual uploads). + An empty list means no limits are applied. + + + These settings can be overridden using the `get_media_upload_limits_for_user` + module API [callback](../../modules/media_repository_callbacks.md#get_media_upload_limits_for_user). default: [] items: time_period: diff --git a/synapse/config/repository.py b/synapse/config/repository.py index efdc505659..e7d23740f9 100644 --- a/synapse/config/repository.py +++ b/synapse/config/repository.py @@ -120,11 +120,19 @@ def parse_thumbnail_requirements( @attr.s(auto_attribs=True, slots=True, frozen=True) class MediaUploadLimit: - """A limit on the amount of data a user can upload in a given time - period.""" + """ + Represents a limit on the amount of data a user can upload in a given time + period. + + These can be configured through the `media_upload_limits` [config option](https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#media_upload_limits) + or via the `get_media_upload_limits_for_user` module API [callback](https://element-hq.github.io/synapse/latest/modules/media_repository_callbacks.html#get_media_upload_limits_for_user). + """ max_bytes: int + """The maximum number of bytes that can be uploaded in the given time period.""" + time_period_ms: int + """The time period in milliseconds.""" class ContentRepositoryConfig(Config): diff --git a/synapse/media/media_repository.py b/synapse/media/media_repository.py index aae88d25c9..54791f43a7 100644 --- a/synapse/media/media_repository.py +++ b/synapse/media/media_repository.py @@ -179,11 +179,13 @@ class MediaRepository: # We get the media upload limits and sort them in descending order of # time period, so that we can apply some optimizations. - self.media_upload_limits = hs.config.media.media_upload_limits - self.media_upload_limits.sort( + self.default_media_upload_limits = hs.config.media.media_upload_limits + self.default_media_upload_limits.sort( key=lambda limit: limit.time_period_ms, reverse=True ) + self.media_repository_callbacks = hs.get_module_api_callbacks().media_repository + def _start_update_recently_accessed(self) -> Deferred: return run_as_background_process( "update_recently_accessed_media", @@ -340,16 +342,27 @@ class MediaRepository: # Check that the user has not exceeded any of the media upload limits. + # Use limits from module API if provided + media_upload_limits = ( + await self.media_repository_callbacks.get_media_upload_limits_for_user( + auth_user.to_string() + ) + ) + + # Otherwise use the default limits from config + if media_upload_limits is None: + # Note: the media upload limits are sorted so larger time periods are + # first. + media_upload_limits = self.default_media_upload_limits + # This is the total size of media uploaded by the user in the last # `time_period_ms` milliseconds, or None if we haven't checked yet. uploaded_media_size: Optional[int] = None - # Note: the media upload limits are sorted so larger time periods are - # first. - for limit in self.media_upload_limits: + for limit in media_upload_limits: # We only need to check the amount of media uploaded by the user in # this latest (smaller) time period if the amount of media uploaded - # in a previous (larger) time period is above the limit. + # in a previous (larger) time period is below the limit. # # This optimization means that in the common case where the user # hasn't uploaded much media, we only need to query the database @@ -363,6 +376,12 @@ class MediaRepository: ) if uploaded_media_size + content_length > limit.max_bytes: + await self.media_repository_callbacks.on_media_upload_limit_exceeded( + user_id=auth_user.to_string(), + limit=limit, + sent_bytes=uploaded_media_size, + attempted_bytes=content_length, + ) raise SynapseError( 400, "Media upload limit exceeded", Codes.RESOURCE_LIMIT_EXCEEDED ) diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index 9309aa9394..6218135513 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -50,6 +50,7 @@ from synapse.api.constants import ProfileFields from synapse.api.errors import SynapseError from synapse.api.presence import UserPresenceState from synapse.config import ConfigError +from synapse.config.repository import MediaUploadLimit from synapse.events import EventBase from synapse.events.presence_router import ( GET_INTERESTED_USERS_CALLBACK, @@ -94,7 +95,9 @@ from synapse.module_api.callbacks.account_validity_callbacks import ( ) from synapse.module_api.callbacks.media_repository_callbacks import ( GET_MEDIA_CONFIG_FOR_USER_CALLBACK, + GET_MEDIA_UPLOAD_LIMITS_FOR_USER_CALLBACK, IS_USER_ALLOWED_TO_UPLOAD_MEDIA_OF_SIZE_CALLBACK, + ON_MEDIA_UPLOAD_LIMIT_EXCEEDED_CALLBACK, ) from synapse.module_api.callbacks.ratelimit_callbacks import ( GET_RATELIMIT_OVERRIDE_FOR_USER_CALLBACK, @@ -205,6 +208,7 @@ __all__ = [ "RoomAlias", "UserProfile", "RatelimitOverride", + "MediaUploadLimit", ] logger = logging.getLogger(__name__) @@ -462,6 +466,12 @@ class ModuleApi: is_user_allowed_to_upload_media_of_size: Optional[ IS_USER_ALLOWED_TO_UPLOAD_MEDIA_OF_SIZE_CALLBACK ] = None, + get_media_upload_limits_for_user: Optional[ + GET_MEDIA_UPLOAD_LIMITS_FOR_USER_CALLBACK + ] = None, + on_media_upload_limit_exceeded: Optional[ + ON_MEDIA_UPLOAD_LIMIT_EXCEEDED_CALLBACK + ] = None, ) -> None: """Registers callbacks for media repository capabilities. Added in Synapse v1.132.0. @@ -469,6 +479,8 @@ class ModuleApi: return self._callbacks.media_repository.register_callbacks( get_media_config_for_user=get_media_config_for_user, is_user_allowed_to_upload_media_of_size=is_user_allowed_to_upload_media_of_size, + get_media_upload_limits_for_user=get_media_upload_limits_for_user, + on_media_upload_limit_exceeded=on_media_upload_limit_exceeded, ) def register_third_party_rules_callbacks( diff --git a/synapse/module_api/callbacks/media_repository_callbacks.py b/synapse/module_api/callbacks/media_repository_callbacks.py index 2ab65f9fd6..7d3aed9d66 100644 --- a/synapse/module_api/callbacks/media_repository_callbacks.py +++ b/synapse/module_api/callbacks/media_repository_callbacks.py @@ -15,6 +15,7 @@ import logging from typing import TYPE_CHECKING, Awaitable, Callable, List, Optional +from synapse.config.repository import MediaUploadLimit from synapse.types import JsonDict from synapse.util.async_helpers import delay_cancellation from synapse.util.metrics import Measure @@ -28,6 +29,14 @@ GET_MEDIA_CONFIG_FOR_USER_CALLBACK = Callable[[str], Awaitable[Optional[JsonDict IS_USER_ALLOWED_TO_UPLOAD_MEDIA_OF_SIZE_CALLBACK = Callable[[str, int], Awaitable[bool]] +GET_MEDIA_UPLOAD_LIMITS_FOR_USER_CALLBACK = Callable[ + [str], Awaitable[Optional[List[MediaUploadLimit]]] +] + +ON_MEDIA_UPLOAD_LIMIT_EXCEEDED_CALLBACK = Callable[ + [str, MediaUploadLimit, int, int], Awaitable[None] +] + class MediaRepositoryModuleApiCallbacks: def __init__(self, hs: "HomeServer") -> None: @@ -39,6 +48,12 @@ class MediaRepositoryModuleApiCallbacks: self._is_user_allowed_to_upload_media_of_size_callbacks: List[ IS_USER_ALLOWED_TO_UPLOAD_MEDIA_OF_SIZE_CALLBACK ] = [] + self._get_media_upload_limits_for_user_callbacks: List[ + GET_MEDIA_UPLOAD_LIMITS_FOR_USER_CALLBACK + ] = [] + self._on_media_upload_limit_exceeded_callbacks: List[ + ON_MEDIA_UPLOAD_LIMIT_EXCEEDED_CALLBACK + ] = [] def register_callbacks( self, @@ -46,6 +61,12 @@ class MediaRepositoryModuleApiCallbacks: is_user_allowed_to_upload_media_of_size: Optional[ IS_USER_ALLOWED_TO_UPLOAD_MEDIA_OF_SIZE_CALLBACK ] = None, + get_media_upload_limits_for_user: Optional[ + GET_MEDIA_UPLOAD_LIMITS_FOR_USER_CALLBACK + ] = None, + on_media_upload_limit_exceeded: Optional[ + ON_MEDIA_UPLOAD_LIMIT_EXCEEDED_CALLBACK + ] = None, ) -> None: """Register callbacks from module for each hook.""" if get_media_config_for_user is not None: @@ -56,6 +77,16 @@ class MediaRepositoryModuleApiCallbacks: is_user_allowed_to_upload_media_of_size ) + if get_media_upload_limits_for_user is not None: + self._get_media_upload_limits_for_user_callbacks.append( + get_media_upload_limits_for_user + ) + + if on_media_upload_limit_exceeded is not None: + self._on_media_upload_limit_exceeded_callbacks.append( + on_media_upload_limit_exceeded + ) + async def get_media_config_for_user(self, user_id: str) -> Optional[JsonDict]: for callback in self._get_media_config_for_user_callbacks: with Measure( @@ -83,3 +114,47 @@ class MediaRepositoryModuleApiCallbacks: return res return True + + async def get_media_upload_limits_for_user( + self, user_id: str + ) -> Optional[List[MediaUploadLimit]]: + """ + Get the first non-None list of MediaUploadLimits for the user from the registered callbacks. + If a list is returned it will be sorted in descending order of duration. + """ + for callback in self._get_media_upload_limits_for_user_callbacks: + with Measure( + self.clock, + name=f"{callback.__module__}.{callback.__qualname__}", + server_name=self.server_name, + ): + res: Optional[List[MediaUploadLimit]] = await delay_cancellation( + callback(user_id) + ) + if res is not None: # to allow [] to be returned meaning no limit + # We sort them in descending order of time period + res.sort(key=lambda limit: limit.time_period_ms, reverse=True) + return res + + return None + + async def on_media_upload_limit_exceeded( + self, + user_id: str, + limit: MediaUploadLimit, + sent_bytes: int, + attempted_bytes: int, + ) -> None: + for callback in self._on_media_upload_limit_exceeded_callbacks: + with Measure( + self.clock, + name=f"{callback.__module__}.{callback.__qualname__}", + server_name=self.server_name, + ): + # Use a copy of the data in case the module modifies it + limit_copy = MediaUploadLimit( + max_bytes=limit.max_bytes, time_period_ms=limit.time_period_ms + ) + await delay_cancellation( + callback(user_id, limit_copy, sent_bytes, attempted_bytes) + ) diff --git a/tests/rest/client/test_media.py b/tests/rest/client/test_media.py index e6ed47f83a..ec6760feea 100644 --- a/tests/rest/client/test_media.py +++ b/tests/rest/client/test_media.py @@ -46,6 +46,7 @@ from twisted.web.resource import Resource from synapse.api.errors import HttpResponseException from synapse.api.ratelimiting import Ratelimiter +from synapse.config._base import Config from synapse.config.oembed import OEmbedEndpointConfig from synapse.http.client import MultipartResponse from synapse.http.types import QueryParams @@ -53,6 +54,7 @@ from synapse.logging.context import make_deferred_yieldable from synapse.media._base import FileInfo, ThumbnailInfo from synapse.media.thumbnailer import ThumbnailProvider from synapse.media.url_previewer import IMAGE_CACHE_EXPIRY_MS +from synapse.module_api import MediaUploadLimit from synapse.rest import admin from synapse.rest.client import login, media from synapse.server import HomeServer @@ -2967,3 +2969,192 @@ class MediaUploadLimits(unittest.HomeserverTestCase): # This will succeed as the weekly limit has reset channel = self.upload_media(900) self.assertEqual(channel.code, 200) + + +class MediaUploadLimitsModuleOverrides(unittest.HomeserverTestCase): + """ + This test case simulates a homeserver with media upload limits being overridden by the module API. + """ + + servlets = [ + media.register_servlets, + login.register_servlets, + admin.register_servlets, + ] + + def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: + config = self.default_config() + + self.storage_path = self.mktemp() + self.media_store_path = self.mktemp() + os.mkdir(self.storage_path) + os.mkdir(self.media_store_path) + config["media_store_path"] = self.media_store_path + + provider_config = { + "module": "synapse.media.storage_provider.FileStorageProviderBackend", + "store_local": True, + "store_synchronous": False, + "store_remote": True, + "config": {"directory": self.storage_path}, + } + + config["media_storage_providers"] = [provider_config] + + # default limits to use + config["media_upload_limits"] = [ + {"time_period": "1d", "max_size": "1K"}, + {"time_period": "1w", "max_size": "3K"}, + ] + + return self.setup_test_homeserver(config=config) + + async def _get_media_upload_limits_for_user( + self, + user_id: str, + ) -> Optional[List[MediaUploadLimit]]: + # user1 has custom limits + if user_id == self.user1: + # n.b. we return these in increasing duration order and Synapse will need to sort them correctly + return [ + MediaUploadLimit( + time_period_ms=Config.parse_duration("1d"), max_bytes=5000 + ), + MediaUploadLimit( + time_period_ms=Config.parse_duration("1w"), max_bytes=15000 + ), + ] + # user2 has no limits + if user_id == self.user2: + return [] + # otherwise use default + return None + + async def _on_media_upload_limit_exceeded( + self, + user_id: str, + limit: MediaUploadLimit, + sent_bytes: int, + attempted_bytes: int, + ) -> None: + self.last_media_upload_limit_exceeded: Optional[dict[str, object]] = { + "user_id": user_id, + "limit": limit, + "sent_bytes": sent_bytes, + "attempted_bytes": attempted_bytes, + } + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.repo = hs.get_media_repository() + self.client = hs.get_federation_http_client() + self.store = hs.get_datastores().main + self.user1 = self.register_user("user1", "pass") + self.tok1 = self.login("user1", "pass") + self.user2 = self.register_user("user2", "pass") + self.tok2 = self.login("user2", "pass") + self.user3 = self.register_user("user3", "pass") + self.tok3 = self.login("user3", "pass") + self.last_media_upload_limit_exceeded = None + self.hs.get_module_api().register_media_repository_callbacks( + get_media_upload_limits_for_user=self._get_media_upload_limits_for_user, + on_media_upload_limit_exceeded=self._on_media_upload_limit_exceeded, + ) + + def create_resource_dict(self) -> Dict[str, Resource]: + resources = super().create_resource_dict() + resources["/_matrix/media"] = self.hs.get_media_repository_resource() + return resources + + def upload_media(self, size: int, tok: str) -> FakeChannel: + """Helper to upload media of a given size with a given token.""" + return self.make_request( + "POST", + "/_matrix/media/v3/upload", + content=b"0" * size, + access_token=tok, + shorthand=False, + content_type=b"text/plain", + custom_headers=[("Content-Length", str(size))], + ) + + def test_upload_under_limit(self) -> None: + """Test that uploading media under the limit works.""" + + # User 1 uploads 100 bytes + channel = self.upload_media(100, self.tok1) + self.assertEqual(channel.code, 200) + + # User 2 (unlimited) uploads 100 bytes + channel = self.upload_media(100, self.tok2) + self.assertEqual(channel.code, 200) + + # User 3 (default) uploads 100 bytes + channel = self.upload_media(100, self.tok3) + self.assertEqual(channel.code, 200) + + self.assertEqual(self.last_media_upload_limit_exceeded, None) + + def test_uses_custom_limit(self) -> None: + """Test that uploading media over the module provided daily limit fails.""" + + # User 1 uploads 3000 bytes + channel = self.upload_media(3000, self.tok1) + self.assertEqual(channel.code, 200) + + # User 1 attempts to upload 4000 bytes taking it over the limit + channel = self.upload_media(4000, self.tok1) + self.assertEqual(channel.code, 400) + assert self.last_media_upload_limit_exceeded is not None + self.assertEqual(self.last_media_upload_limit_exceeded["user_id"], self.user1) + self.assertEqual( + self.last_media_upload_limit_exceeded["limit"], + MediaUploadLimit( + max_bytes=5000, time_period_ms=Config.parse_duration("1d") + ), + ) + self.assertEqual(self.last_media_upload_limit_exceeded["sent_bytes"], 3000) + self.assertEqual(self.last_media_upload_limit_exceeded["attempted_bytes"], 4000) + + # User 1 attempts to upload 20000 bytes which is over the weekly limit + # This tests that the limits have been sorted as expected + channel = self.upload_media(20000, self.tok1) + self.assertEqual(channel.code, 400) + assert self.last_media_upload_limit_exceeded is not None + self.assertEqual(self.last_media_upload_limit_exceeded["user_id"], self.user1) + self.assertEqual( + self.last_media_upload_limit_exceeded["limit"], + MediaUploadLimit( + max_bytes=15000, time_period_ms=Config.parse_duration("1w") + ), + ) + self.assertEqual(self.last_media_upload_limit_exceeded["sent_bytes"], 3000) + self.assertEqual( + self.last_media_upload_limit_exceeded["attempted_bytes"], 20000 + ) + + def test_uses_unlimited(self) -> None: + """Test that unlimited user is not limited when module returns [].""" + # User 2 uploads 10000 bytes which is over the default limit + channel = self.upload_media(10000, self.tok2) + self.assertEqual(channel.code, 200) + self.assertEqual(self.last_media_upload_limit_exceeded, None) + + def test_uses_defaults(self) -> None: + """Test that the default limits are applied when module returned None.""" + # User 3 uploads 500 bytes + channel = self.upload_media(500, self.tok3) + self.assertEqual(channel.code, 200) + + # User 3 uploads 800 bytes which is over the limit + channel = self.upload_media(800, self.tok3) + self.assertEqual(channel.code, 400) + assert self.last_media_upload_limit_exceeded is not None + self.assertEqual(self.last_media_upload_limit_exceeded["user_id"], self.user3) + self.assertEqual( + self.last_media_upload_limit_exceeded["limit"], + MediaUploadLimit( + max_bytes=1024, time_period_ms=Config.parse_duration("1d") + ), + ) + self.assertEqual(self.last_media_upload_limit_exceeded["sent_bytes"], 500) + self.assertEqual(self.last_media_upload_limit_exceeded["attempted_bytes"], 800) From 7ecfe8b1a86d1f6a11dba283ee1468d516b0a6c2 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Fri, 12 Sep 2025 09:29:35 -0500 Subject: [PATCH 23/54] Better explain which context the task is run in when using `run_in_background(...)` or `run_as_background_process(...)` (#18906) Follow-up to https://github.com/element-hq/synapse/pull/18900 --- changelog.d/18906.misc | 1 + synapse/logging/context.py | 6 ++++-- synapse/metrics/background_process_metrics.py | 8 +++++--- 3 files changed, 10 insertions(+), 5 deletions(-) create mode 100644 changelog.d/18906.misc diff --git a/changelog.d/18906.misc b/changelog.d/18906.misc new file mode 100644 index 0000000000..d7d8b47eb0 --- /dev/null +++ b/changelog.d/18906.misc @@ -0,0 +1 @@ +Better explain how we manage the logcontext in `run_in_background(...)` and `run_as_background_process(...)`. diff --git a/synapse/logging/context.py b/synapse/logging/context.py index 6eaa19d2f6..aa4b98e7c7 100644 --- a/synapse/logging/context.py +++ b/synapse/logging/context.py @@ -802,8 +802,9 @@ def run_in_background( deferred returned by the function completes. To explain how the log contexts work here: - - When this function is called, the current context is stored ("original"), we kick - off the background task, and we restore that original context before returning + - When `run_in_background` is called, the current context is stored ("original"), + we kick off the background task in the current context, and we restore that + original context before returning - When the background task finishes, we don't want to leak our context into the reactor which would erroneously get attached to the next operation picked up by the event loop. We add a callback to the deferred which will clear the logging @@ -828,6 +829,7 @@ def run_in_background( """ calling_context = current_context() try: + # (kick off the task in the current context) res = f(*args, **kwargs) except Exception: # the assumption here is that the caller doesn't want to be disturbed diff --git a/synapse/metrics/background_process_metrics.py b/synapse/metrics/background_process_metrics.py index c6ee21d42a..633705b02a 100644 --- a/synapse/metrics/background_process_metrics.py +++ b/synapse/metrics/background_process_metrics.py @@ -286,9 +286,11 @@ def run_as_background_process( ).dec() # To explain how the log contexts work here: - # - When this function is called, the current context is stored (using - # `PreserveLoggingContext`), we kick off the background task, and we restore the - # original context before returning (also part of `PreserveLoggingContext`). + # - When `run_as_background_process` is called, the current context is stored + # (using `PreserveLoggingContext`), we kick off the background task, and we + # restore the original context before returning (also part of + # `PreserveLoggingContext`). + # - The background task runs in its own new logcontext named after `desc` # - When the background task finishes, we don't want to leak our background context # into the reactor which would erroneously get attached to the next operation # picked up by the event loop. We use `PreserveLoggingContext` to set the From 769d30a247926dd90d9afe7f6afd65bdc605d465 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 15 Sep 2025 09:45:41 -0500 Subject: [PATCH 24/54] Clarify Python dependency constraints (#18856) Clarify Python dependency constraints Spawning from https://github.com/element-hq/synapse/pull/18852#issuecomment-3212003675 as I don't actually know the the exact rule of thumb. It's unclear to me what we care about exactly. Our [deprecation policy](https://element-hq.github.io/synapse/latest/deprecation_policy.html) mentions Debian oldstable support at-least for the version of SQLite. But then we only refer to Debian stable for the Twisted dependency. --- changelog.d/18856.doc | 1 + docs/deprecation_policy.md | 58 ++++++++++++++++++++++++++++++-------- 2 files changed, 48 insertions(+), 11 deletions(-) create mode 100644 changelog.d/18856.doc diff --git a/changelog.d/18856.doc b/changelog.d/18856.doc new file mode 100644 index 0000000000..0e5e55377f --- /dev/null +++ b/changelog.d/18856.doc @@ -0,0 +1 @@ +Clarify Python dependency constraints in our deprecation policy. diff --git a/docs/deprecation_policy.md b/docs/deprecation_policy.md index 8403664850..2f3a09723e 100644 --- a/docs/deprecation_policy.md +++ b/docs/deprecation_policy.md @@ -1,13 +1,11 @@ -Deprecation Policy for Platform Dependencies -============================================ +# Deprecation Policy -Synapse has a number of platform dependencies, including Python, Rust, -PostgreSQL and SQLite. This document outlines the policy towards which versions -we support, and when we drop support for versions in the future. +Synapse has a number of **platform dependencies** (Python, Rust, PostgreSQL, and SQLite) +and **application dependencies** (Python and Rust packages). This document outlines the +policy towards which versions we support, and when we drop support for versions in the +future. - -Policy ------- +## Platform Dependencies Synapse follows the upstream support life cycles for Python and PostgreSQL, i.e. when a version reaches End of Life Synapse will withdraw support for that @@ -26,8 +24,8 @@ The oldest supported version of SQLite is the version [provided](https://packages.debian.org/bullseye/libsqlite3-0) by [Debian oldstable](https://wiki.debian.org/DebianOldStable). -Context -------- + +### Context It is important for system admins to have a clear understanding of the platform requirements of Synapse and its deprecation policies so that they can @@ -50,4 +48,42 @@ the ecosystem. On a similar note, SQLite does not generally have a concept of "supported release"; bugfixes are published for the latest minor release only. We chose to track Debian's oldstable as this is relatively conservative, predictably updated -and is consistent with the `.deb` packages released by Matrix.org. \ No newline at end of file +and is consistent with the `.deb` packages released by Matrix.org. + + +## Application dependencies + +For application-level Python dependencies, we often specify loose version constraints +(ex. `>=X.Y.Z`) to be forwards compatible with any new versions. Upper bounds (` Date: Mon, 15 Sep 2025 17:26:04 +0100 Subject: [PATCH 25/54] Bump sigstore/cosign-installer from 3.9.2 to 3.10.0 (#18917) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/docker.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 0deb5052d5..dc65625c6f 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -120,7 +120,7 @@ jobs: uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 - name: Install Cosign - uses: sigstore/cosign-installer@d58896d6a1865668819e1d91763c7751a165e159 # v3.9.2 + uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0 - name: Calculate docker image tag uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0 From 4f80fa4b0af64ebec3afda9d1d1be671db01111b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Sep 2025 17:29:49 +0100 Subject: [PATCH 26/54] Bump types-psycopg2 from 2.9.21.20250809 to 2.9.21.20250915 (#18918) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 016be0eb6b..540db8d926 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2971,14 +2971,14 @@ files = [ [[package]] name = "types-psycopg2" -version = "2.9.21.20250809" +version = "2.9.21.20250915" description = "Typing stubs for psycopg2" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_psycopg2-2.9.21.20250809-py3-none-any.whl", hash = "sha256:59b7b0ed56dcae9efae62b8373497274fc1a0484bdc5135cdacbe5a8f44e1d7b"}, - {file = "types_psycopg2-2.9.21.20250809.tar.gz", hash = "sha256:b7c2cbdcf7c0bd16240f59ba694347329b0463e43398de69784ea4dee45f3c6d"}, + {file = "types_psycopg2-2.9.21.20250915-py3-none-any.whl", hash = "sha256:eefe5ccdc693fc086146e84c9ba437bb278efe1ef330b299a0cb71169dc6c55f"}, + {file = "types_psycopg2-2.9.21.20250915.tar.gz", hash = "sha256:bfeb8f54c32490e7b5edc46215ab4163693192bc90407b4a023822de9239f5c8"}, ] [[package]] From 8f7bd946de529521860c57cbb5c80c08872c00b2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Sep 2025 17:31:12 +0100 Subject: [PATCH 27/54] Bump serde_json from 1.0.143 to 1.0.145 (#18919) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index eff363de80..07fc7831e0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1250,18 +1250,28 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.219" +version = "1.0.223" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +checksum = "a505d71960adde88e293da5cb5eda57093379f64e61cf77bf0e6a63af07a7bac" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.223" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20f57cbd357666aa7b3ac84a90b4ea328f1d4ddb6772b430caa5d9e1309bb9e9" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.219" +version = "1.0.223" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +checksum = "3d428d07faf17e306e699ec1e91996e5a165ba5d6bce5b5155173e91a8a01a56" dependencies = [ "proc-macro2", "quote", @@ -1270,14 +1280,15 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.143" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d401abef1d108fbd9cbaebc3e46611f4b1021f714a0597a71f41ee463f5f4a5a" +checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" dependencies = [ "itoa", "memchr", "ryu", "serde", + "serde_core", ] [[package]] From f7b547e2d86bd6714846b378f49048fe40175031 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Sep 2025 17:35:11 +0100 Subject: [PATCH 28/54] Bump authlib from 1.6.1 to 1.6.3 (#18921) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 540db8d926..bdbf9ba123 100644 --- a/poetry.lock +++ b/poetry.lock @@ -34,15 +34,15 @@ tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" a [[package]] name = "authlib" -version = "1.6.1" +version = "1.6.3" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." optional = true python-versions = ">=3.9" groups = ["main"] markers = "extra == \"all\" or extra == \"jwt\" or extra == \"oidc\"" files = [ - {file = "authlib-1.6.1-py2.py3-none-any.whl", hash = "sha256:e9d2031c34c6309373ab845afc24168fe9e93dc52d252631f52642f21f5ed06e"}, - {file = "authlib-1.6.1.tar.gz", hash = "sha256:4dffdbb1460ba6ec8c17981a4c67af7d8af131231b5a36a88a1e8c80c111cdfd"}, + {file = "authlib-1.6.3-py2.py3-none-any.whl", hash = "sha256:7ea0f082edd95a03b7b72edac65ec7f8f68d703017d7e37573aee4fc603f2a48"}, + {file = "authlib-1.6.3.tar.gz", hash = "sha256:9f7a982cc395de719e4c2215c5707e7ea690ecf84f1ab126f28c053f4219e610"}, ] [package.dependencies] From 6358afff8d3263de560a1e834ad795998672b13a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Sep 2025 17:37:24 +0100 Subject: [PATCH 29/54] Bump pydantic from 2.11.7 to 2.11.9 (#18922) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index bdbf9ba123..38ca740fb5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1774,14 +1774,14 @@ files = [ [[package]] name = "pydantic" -version = "2.11.7" +version = "2.11.9" description = "Data validation using Python type hints" optional = false python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, - {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, + {file = "pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2"}, + {file = "pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2"}, ] [package.dependencies] From 2c60b67a9518a810d7b2f0a51032785e5ec9bf98 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Sep 2025 17:37:43 +0100 Subject: [PATCH 30/54] Bump types-setuptools from 80.9.0.20250809 to 80.9.0.20250822 (#18924) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 38ca740fb5..4eedeea4e7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3026,14 +3026,14 @@ urllib3 = ">=2" [[package]] name = "types-setuptools" -version = "80.9.0.20250809" +version = "80.9.0.20250822" description = "Typing stubs for setuptools" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_setuptools-80.9.0.20250809-py3-none-any.whl", hash = "sha256:7c6539b4c7ac7b4ab4db2be66d8a58fb1e28affa3ee3834be48acafd94f5976a"}, - {file = "types_setuptools-80.9.0.20250809.tar.gz", hash = "sha256:e986ba37ffde364073d76189e1d79d9928fb6f5278c7d07589cde353d0218864"}, + {file = "types_setuptools-80.9.0.20250822-py3-none-any.whl", hash = "sha256:53bf881cb9d7e46ed12c76ef76c0aaf28cfe6211d3fab12e0b83620b1a8642c3"}, + {file = "types_setuptools-80.9.0.20250822.tar.gz", hash = "sha256:070ea7716968ec67a84c7f7768d9952ff24d28b65b6594797a464f1b3066f965"}, ] [[package]] From 2bed3fb5666c77d727f7faf36e3b9743f6a6a65f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Sep 2025 20:05:23 +0100 Subject: [PATCH 31/54] Bump serde from 1.0.219 to 1.0.223 (#18920) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 07fc7831e0..678b888e13 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1250,9 +1250,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.223" +version = "1.0.224" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a505d71960adde88e293da5cb5eda57093379f64e61cf77bf0e6a63af07a7bac" +checksum = "6aaeb1e94f53b16384af593c71e20b095e958dab1d26939c1b70645c5cfbcc0b" dependencies = [ "serde_core", "serde_derive", @@ -1260,18 +1260,18 @@ dependencies = [ [[package]] name = "serde_core" -version = "1.0.223" +version = "1.0.224" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20f57cbd357666aa7b3ac84a90b4ea328f1d4ddb6772b430caa5d9e1309bb9e9" +checksum = "32f39390fa6346e24defbcdd3d9544ba8a19985d0af74df8501fbfe9a64341ab" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.223" +version = "1.0.224" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d428d07faf17e306e699ec1e91996e5a165ba5d6bce5b5155173e91a8a01a56" +checksum = "87ff78ab5e8561c9a675bfc1785cb07ae721f0ee53329a595cefd8c04c2ac4e0" dependencies = [ "proc-macro2", "quote", From 84d64251dc741938eed3e2f3ba41e031b119d966 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Tue, 16 Sep 2025 17:15:08 -0500 Subject: [PATCH 32/54] Remove `sentinel` logcontext where we log in `setup`, `start` and exit (#18870) Remove `sentinel` logcontext where we log in `setup`, `start`, and exit. Instead of having one giant PR that removes all places we use `sentinel` logcontext, I've decided to tackle this more piece-meal. This PR covers the parts if you just startup Synapse and exit it with no requests or activity going on in between. Part of https://github.com/element-hq/synapse/issues/18905 (Remove `sentinel` logcontext where we log in Synapse) Prerequisite for https://github.com/element-hq/synapse/pull/18868. Logging with the `sentinel` logcontext means we won't know which server the log came from. ### Why https://github.com/element-hq/synapse/blob/9cc400177822805e2a08d4d934daad6f3bc2a4df/docs/log_contexts.md#L71-L81 (docs updated in https://github.com/element-hq/synapse/pull/18900) ### Testing strategy 1. Run Synapse normally and with `daemonize: true`: `poetry run synapse_homeserver --config-path homeserver.yaml` 1. Execute some requests 1. Shutdown the server 1. Look for any bad log entries in your homeserver logs: - `Expected logging context sentinel but found main` - `Expected logging context main was lost` - `Expected previous context` - `utime went backwards!`/`stime went backwards!` - `Called stop on logcontext POST-0 without recording a start rusage` 1. Look for any logs coming from the `sentinel` context With these changes, you should only see the following logs (not from Synapse) using the `sentinel` context if you start up Synapse and exit: `homeserver.log` ``` 2025-09-10 14:45:39,924 - asyncio - 64 - DEBUG - sentinel - Using selector: EpollSelector 2025-09-10 14:45:40,562 - twisted - 281 - INFO - sentinel - Received SIGINT, shutting down. 2025-09-10 14:45:40,562 - twisted - 281 - INFO - sentinel - (TCP Port 9322 Closed) 2025-09-10 14:45:40,563 - twisted - 281 - INFO - sentinel - (TCP Port 8008 Closed) 2025-09-10 14:45:40,563 - twisted - 281 - INFO - sentinel - (TCP Port 9093 Closed) 2025-09-10 14:45:40,564 - twisted - 281 - INFO - sentinel - Main loop terminated. ``` --- changelog.d/18870.misc | 1 + synapse/app/_base.py | 40 +++++++++++++++++------------------ synapse/app/generic_worker.py | 7 +++++- synapse/app/homeserver.py | 16 ++++++++------ synapse/util/daemonize.py | 22 ++++++++++++++----- 5 files changed, 53 insertions(+), 33 deletions(-) create mode 100644 changelog.d/18870.misc diff --git a/changelog.d/18870.misc b/changelog.d/18870.misc new file mode 100644 index 0000000000..e54ba4f37a --- /dev/null +++ b/changelog.d/18870.misc @@ -0,0 +1 @@ +Remove `sentinel` logcontext usage where we log in `setup`, `start` and exit. diff --git a/synapse/app/_base.py b/synapse/app/_base.py index bce6f4d82f..cf3d260e65 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -72,7 +72,7 @@ from synapse.events.auto_accept_invites import InviteAutoAccepter from synapse.events.presence_router import load_legacy_presence_router from synapse.handlers.auth import load_legacy_password_auth_providers from synapse.http.site import SynapseSite -from synapse.logging.context import PreserveLoggingContext +from synapse.logging.context import LoggingContext, PreserveLoggingContext from synapse.logging.opentracing import init_tracer from synapse.metrics import install_gc_manager, register_threadpool from synapse.metrics.background_process_metrics import run_as_background_process @@ -183,25 +183,23 @@ def start_reactor( if gc_thresholds: gc.set_threshold(*gc_thresholds) install_gc_manager() - run_command() - # make sure that we run the reactor with the sentinel log context, - # otherwise other PreserveLoggingContext instances will get confused - # and complain when they see the logcontext arbitrarily swapping - # between the sentinel and `run` logcontexts. - # - # We also need to drop the logcontext before forking if we're daemonizing, - # otherwise the cputime metrics get confused about the per-thread resource usage - # appearing to go backwards. - with PreserveLoggingContext(): - if daemonize: - assert pid_file is not None + # Reset the logging context when we start the reactor (whenever we yield control + # to the reactor, the `sentinel` logging context needs to be set so we don't + # leak the current logging context and erroneously apply it to the next task the + # reactor event loop picks up) + with PreserveLoggingContext(): + run_command() - if print_pidfile: - print(pid_file) + if daemonize: + assert pid_file is not None - daemonize_process(pid_file, logger) - run() + if print_pidfile: + print(pid_file) + + daemonize_process(pid_file, logger) + + run() def quit_with_error(error_string: str) -> NoReturn: @@ -601,10 +599,12 @@ async def start(hs: "HomeServer") -> None: hs.get_datastores().main.db_pool.start_profiling() hs.get_pusherpool().start() + def log_shutdown() -> None: + with LoggingContext("log_shutdown"): + logger.info("Shutting down...") + # Log when we start the shut down process. - hs.get_reactor().addSystemEventTrigger( - "before", "shutdown", logger.info, "Shutting down..." - ) + hs.get_reactor().addSystemEventTrigger("before", "shutdown", log_shutdown) setup_sentry(hs) setup_sdnotify(hs) diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 4f5bea6bd6..543b26d8ba 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -355,7 +355,12 @@ def start(config_options: List[str]) -> None: except Exception as e: handle_startup_exception(e) - register_start(_base.start, hs) + async def start() -> None: + # Re-establish log context now that we're back from the reactor + with LoggingContext("start"): + await _base.start(hs) + + register_start(start) # redirect stdio to the logs, if configured. if not hs.config.logging.no_redirect_stdio: diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index e027b5eaea..dfc4a00719 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -377,15 +377,17 @@ def setup(config_options: List[str]) -> SynapseHomeServer: handle_startup_exception(e) async def start() -> None: - # Load the OIDC provider metadatas, if OIDC is enabled. - if hs.config.oidc.oidc_enabled: - oidc = hs.get_oidc_handler() - # Loading the provider metadata also ensures the provider config is valid. - await oidc.load_metadata() + # Re-establish log context now that we're back from the reactor + with LoggingContext("start"): + # Load the OIDC provider metadatas, if OIDC is enabled. + if hs.config.oidc.oidc_enabled: + oidc = hs.get_oidc_handler() + # Loading the provider metadata also ensures the provider config is valid. + await oidc.load_metadata() - await _base.start(hs) + await _base.start(hs) - hs.get_datastores().main.db_pool.updates.start_doing_background_updates() + hs.get_datastores().main.db_pool.updates.start_doing_background_updates() register_start(start) diff --git a/synapse/util/daemonize.py b/synapse/util/daemonize.py index 9fdefc5a76..e653abff97 100644 --- a/synapse/util/daemonize.py +++ b/synapse/util/daemonize.py @@ -29,6 +29,11 @@ import sys from types import FrameType, TracebackType from typing import NoReturn, Optional, Type +from synapse.logging.context import ( + LoggingContext, + PreserveLoggingContext, +) + def daemonize_process(pid_file: str, logger: logging.Logger, chdir: str = "/") -> None: """daemonize the current process @@ -64,8 +69,14 @@ def daemonize_process(pid_file: str, logger: logging.Logger, chdir: str = "/") - pid_fh.write(old_pid) sys.exit(1) - # Fork, creating a new process for the child. - process_id = os.fork() + # Stop the existing context *before* we fork the process. Otherwise the cputime + # metrics get confused about the per-thread resource usage appearing to go backwards + # because we're comparing the resource usage from the original process to the forked + # process. `PreserveLoggingContext` already takes care of restarting the original + # context *after* the block. + with PreserveLoggingContext(): + # Fork, creating a new process for the child. + process_id = os.fork() if process_id != 0: # parent process: exit. @@ -140,9 +151,10 @@ def daemonize_process(pid_file: str, logger: logging.Logger, chdir: str = "/") - # Cleanup pid file at exit. def exit() -> None: - logger.warning("Stopping daemon.") - os.remove(pid_file) - sys.exit(0) + with LoggingContext("atexit"): + logger.warning("Stopping daemon.") + os.remove(pid_file) + sys.exit(0) atexit.register(exit) From 6f9fab10890e9ad3afffb34fe40238b7e73155b0 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Wed, 17 Sep 2025 13:54:47 -0500 Subject: [PATCH 33/54] Fix open redirect in legacy SSO flow (`idp`) (#18909) - Validate the `idp` parameter to only accept the ones that are known in the config file - URL-encode the `idp` parameter for safety's sake (this is the main fix) Fix https://github.com/matrix-org/internal-config/issues/1651 (internal link) Regressed in https://github.com/element-hq/synapse/pull/17972 --- changelog.d/18909.bugfix | 1 + synapse/api/urls.py | 13 +++++++- synapse/rest/synapse/client/pick_idp.py | 16 ++++++++++ tests/api/test_urls.py | 26 ++++++++++++++++ tests/rest/client/test_login.py | 41 ++++++++++--------------- 5 files changed, 72 insertions(+), 25 deletions(-) create mode 100644 changelog.d/18909.bugfix diff --git a/changelog.d/18909.bugfix b/changelog.d/18909.bugfix new file mode 100644 index 0000000000..10d17631f0 --- /dev/null +++ b/changelog.d/18909.bugfix @@ -0,0 +1 @@ +Fix open redirect in legacy SSO flow with the `idp` query parameter. diff --git a/synapse/api/urls.py b/synapse/api/urls.py index 655b5edd7a..baa6e2d390 100644 --- a/synapse/api/urls.py +++ b/synapse/api/urls.py @@ -22,6 +22,7 @@ """Contains the URL paths to prefix various aspects of the server with.""" import hmac +import urllib.parse from hashlib import sha256 from typing import Optional from urllib.parse import urlencode, urljoin @@ -96,11 +97,21 @@ class LoginSSORedirectURIBuilder: serialized_query_parameters = urlencode({"redirectUrl": client_redirect_url}) if idp_id: + # Since this is a user-controlled string, make it safe to include in a URL path. + url_encoded_idp_id = urllib.parse.quote( + idp_id, + # Since this defaults to `safe="/"`, we have to override it. We're + # working with an individual URL path parameter so there shouldn't be + # any slashes in it which could change the request path. + safe="", + encoding="utf8", + ) + resultant_url = urljoin( # We have to add a trailing slash to the base URL to ensure that the # last path segment is not stripped away when joining with another path. f"{base_url}/", - f"{idp_id}?{serialized_query_parameters}", + f"{url_encoded_idp_id}?{serialized_query_parameters}", ) else: resultant_url = f"{base_url}?{serialized_query_parameters}" diff --git a/synapse/rest/synapse/client/pick_idp.py b/synapse/rest/synapse/client/pick_idp.py index 9668a09c19..15c1b3ab49 100644 --- a/synapse/rest/synapse/client/pick_idp.py +++ b/synapse/rest/synapse/client/pick_idp.py @@ -63,6 +63,22 @@ class PickIdpResource(DirectServeHtmlResource): if not idp: return await self._serve_id_picker(request, client_redirect_url) + # Validate the `idp` query parameter. We should only be working with known IdPs. + # No need waste further effort if we don't know about it. + # + # Although, we primarily prevent open redirect attacks by URL encoding all of + # the parameters we use in the redirect URL below, this validation also helps + # prevent Synapse from crafting arbitrary URLs and being used in open redirect + # attacks (defense in depth). + providers = self._sso_handler.get_identity_providers() + auth_provider = providers.get(idp) + if not auth_provider: + logger.info("Unknown idp %r", idp) + self._sso_handler.render_error( + request, "unknown_idp", "Unknown identity provider ID" + ) + return + # Otherwise, redirect to the login SSO redirect endpoint for the given IdP # (which will in turn take us to the the IdP's redirect URI). # diff --git a/tests/api/test_urls.py b/tests/api/test_urls.py index fecc7e3e2d..bb46008ad2 100644 --- a/tests/api/test_urls.py +++ b/tests/api/test_urls.py @@ -53,3 +53,29 @@ class LoginSSORedirectURIBuilderTestCase(HomeserverTestCase): ), "https://test/_matrix/client/v3/login/sso/redirect/oidc-github?redirectUrl=https%3A%2F%2Fx%3F%3Cab+c%3E%26q%22%2B%253D%252B%22%3D%22f%C3%B6%2526%3Do%22", ) + + def test_idp_id_with_slash_is_escaped(self) -> None: + """ + Test to make sure that we properly URL encode the IdP ID. + """ + self.assertEqual( + self.login_sso_redirect_url_builder.build_login_sso_redirect_uri( + idp_id="foo/bar", + client_redirect_url="http://example.com/redirect", + ), + "https://test/_matrix/client/v3/login/sso/redirect/foo%2Fbar?redirectUrl=http%3A%2F%2Fexample.com%2Fredirect", + ) + + def test_url_as_idp_id_is_escaped(self) -> None: + """ + Test to make sure that we properly URL encode the IdP ID. + + The IdP ID shouldn't be a URL. + """ + self.assertEqual( + self.login_sso_redirect_url_builder.build_login_sso_redirect_uri( + idp_id="http://should-not-be-url.com/", + client_redirect_url="http://example.com/redirect", + ), + "https://test/_matrix/client/v3/login/sso/redirect/http%3A%2F%2Fshould-not-be-url.com%2F?redirectUrl=http%3A%2F%2Fexample.com%2Fredirect", + ) diff --git a/tests/rest/client/test_login.py b/tests/rest/client/test_login.py index d3a7905ef2..8f9856fa2e 100644 --- a/tests/rest/client/test_login.py +++ b/tests/rest/client/test_login.py @@ -939,39 +939,32 @@ class MultiSSOTestCase(unittest.HomeserverTestCase): self.assertEqual(chan.code, 200, chan.result) self.assertEqual(chan.json_body["user_id"], "@user1:test") - def test_multi_sso_redirect_to_unknown(self) -> None: - """An unknown IdP should cause a 404""" + def test_multi_sso_redirect_unknown_idp(self) -> None: + """An unknown IdP should cause a 400 bad request error""" channel = self.make_request( "GET", "/_synapse/client/pick_idp?redirectUrl=http://x&idp=xyz", ) - self.assertEqual(channel.code, 302, channel.result) - location_headers = channel.headers.getRawHeaders("Location") - assert location_headers - sso_login_redirect_uri = location_headers[0] + self.assertEqual(channel.code, 400, channel.result) - # it should redirect us to the standard login SSO redirect flow - self.assertEqual( - sso_login_redirect_uri, - self.login_sso_redirect_url_builder.build_login_sso_redirect_uri( - idp_id="xyz", client_redirect_url="http://x" - ), - ) + def test_multi_sso_redirect_unknown_idp_as_url(self) -> None: + """ + An unknown IdP that looks like a URL should cause a 400 bad request error (to + avoid open redirects). - # follow the redirect + Ideally, we'd have another test for a known IdP with a URL as the `idp_id`, but + we can't configure that in our tests because the config validation on + `oidc_providers` only allows a subset of characters. If we could configure + `oidc_providers` with a URL as the `idp_id`, it should still be URL-encoded + properly to avoid open redirections. We do have `test_url_as_idp_id_is_escaped` + in the URL building tests to cover this case but is only a unit test vs + something at the REST layer here that covers things end-to-end. + """ channel = self.make_request( "GET", - # We have to make this relative to be compatible with `make_request(...)` - get_relative_uri_from_absolute_uri(sso_login_redirect_uri), - # We have to set the Host header to match the `public_baseurl` to avoid - # the extra redirect in the `SsoRedirectServlet` in order for the - # cookies to be visible. - custom_headers=[ - ("Host", SYNAPSE_SERVER_PUBLIC_HOSTNAME), - ], + "/_synapse/client/pick_idp?redirectUrl=something&idp=https://element.io/", ) - - self.assertEqual(channel.code, 404, channel.result) + self.assertEqual(channel.code, 400, channel.result) def test_client_idp_redirect_to_unknown(self) -> None: """If the client tries to pick an unknown IdP, return a 404""" From b596faa4ecf2f22610e49d00e7eb186cd95a5e0e Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Thu, 18 Sep 2025 12:06:08 +0100 Subject: [PATCH 34/54] Cache `_get_e2e_cross_signing_signatures_for_devices` (#18899) --- changelog.d/18899.feature | 1 + synapse/storage/database.py | 11 +- synapse/storage/databases/main/cache.py | 34 +++ .../storage/databases/main/end_to_end_keys.py | 202 +++++++++++++----- synapse/util/caches/descriptors.py | 7 +- 5 files changed, 196 insertions(+), 59 deletions(-) create mode 100644 changelog.d/18899.feature diff --git a/changelog.d/18899.feature b/changelog.d/18899.feature new file mode 100644 index 0000000000..ee7141efc5 --- /dev/null +++ b/changelog.d/18899.feature @@ -0,0 +1 @@ +Add an in-memory cache to `_get_e2e_cross_signing_signatures_for_devices` to reduce DB load. \ No newline at end of file diff --git a/synapse/storage/database.py b/synapse/storage/database.py index cfec36e0fa..aae029f910 100644 --- a/synapse/storage/database.py +++ b/synapse/storage/database.py @@ -2653,8 +2653,7 @@ def make_in_list_sql_clause( # These overloads ensure that `columns` and `iterable` values have the same length. -# Suppress "Single overload definition, multiple required" complaint. -@overload # type: ignore[misc] +@overload def make_tuple_in_list_sql_clause( database_engine: BaseDatabaseEngine, columns: Tuple[str, str], @@ -2662,6 +2661,14 @@ def make_tuple_in_list_sql_clause( ) -> Tuple[str, list]: ... +@overload +def make_tuple_in_list_sql_clause( + database_engine: BaseDatabaseEngine, + columns: Tuple[str, str, str], + iterable: Collection[Tuple[Any, Any, Any]], +) -> Tuple[str, list]: ... + + def make_tuple_in_list_sql_clause( database_engine: BaseDatabaseEngine, columns: Tuple[str, ...], diff --git a/synapse/storage/databases/main/cache.py b/synapse/storage/databases/main/cache.py index 7794926812..cad26fefa4 100644 --- a/synapse/storage/databases/main/cache.py +++ b/synapse/storage/databases/main/cache.py @@ -21,6 +21,7 @@ import itertools +import json import logging from typing import TYPE_CHECKING, Any, Collection, Iterable, List, Optional, Tuple @@ -62,6 +63,12 @@ PURGE_HISTORY_CACHE_NAME = "ph_cache_fake" # As above, but for invalidating room caches on room deletion DELETE_ROOM_CACHE_NAME = "dr_cache_fake" +# This cache takes a list of tuples as its first argument, which requires +# special handling. +GET_E2E_CROSS_SIGNING_SIGNATURES_FOR_DEVICE_CACHE_NAME = ( + "_get_e2e_cross_signing_signatures_for_device" +) + # How long between cache invalidation table cleanups, once we have caught up # with the backlog. REGULAR_CLEANUP_INTERVAL_MS = Config.parse_duration("1h") @@ -270,6 +277,33 @@ class CacheInvalidationWorkerStore(SQLBaseStore): # room membership. # # self._membership_stream_cache.all_entities_changed(token) # type: ignore[attr-defined] + elif ( + row.cache_func + == GET_E2E_CROSS_SIGNING_SIGNATURES_FOR_DEVICE_CACHE_NAME + ): + # "keys" is a list of strings, where each string is a + # JSON-encoded representation of the tuple keys, i.e. + # keys: ['["@userid:domain", "DEVICEID"]','["@userid2:domain", "DEVICEID2"]'] + # + # This is a side-effect of not being able to send nested + # information over replication. + for json_str in row.keys: + try: + user_id, device_id = json.loads(json_str) + except (json.JSONDecodeError, TypeError): + logger.error( + "Failed to deserialise cache key as valid JSON: %s", + json_str, + ) + continue + + # Invalidate each key. + # + # Note: .invalidate takes a tuple of arguments, hence the need + # to nest our tuple in another tuple. + self._get_e2e_cross_signing_signatures_for_device.invalidate( # type: ignore[attr-defined] + ((user_id, device_id),) + ) else: self._attempt_to_invalidate_cache(row.cache_func, row.keys) diff --git a/synapse/storage/databases/main/end_to_end_keys.py b/synapse/storage/databases/main/end_to_end_keys.py index de72e66ceb..17ccefe6b5 100644 --- a/synapse/storage/databases/main/end_to_end_keys.py +++ b/synapse/storage/databases/main/end_to_end_keys.py @@ -20,6 +20,7 @@ # # import abc +import json from typing import ( TYPE_CHECKING, Any, @@ -354,15 +355,17 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker ) for batch in batch_iter(signature_query, 50): - cross_sigs_result = await self.db_pool.runInteraction( - "get_e2e_cross_signing_signatures_for_devices", - self._get_e2e_cross_signing_signatures_for_devices_txn, - batch, + cross_sigs_result = ( + await self._get_e2e_cross_signing_signatures_for_devices(batch) ) # add each cross-signing signature to the correct device in the result dict. - for user_id, key_id, device_id, signature in cross_sigs_result: + for ( + user_id, + device_id, + ), signature_list in cross_sigs_result.items(): target_device_result = result[user_id][device_id] + # We've only looked up cross-signatures for non-deleted devices with key # data. assert target_device_result is not None @@ -373,7 +376,9 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker signing_user_signatures = target_device_signatures.setdefault( user_id, {} ) - signing_user_signatures[key_id] = signature + + for key_id, signature in signature_list: + signing_user_signatures[key_id] = signature log_kv(result) return result @@ -479,41 +484,83 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker return result - def _get_e2e_cross_signing_signatures_for_devices_txn( - self, txn: LoggingTransaction, device_query: Iterable[Tuple[str, str]] - ) -> List[Tuple[str, str, str, str]]: - """Get cross-signing signatures for a given list of devices - - Returns signatures made by the owners of the devices. - - Returns: a list of results; each entry in the list is a tuple of - (user_id, key_id, target_device_id, signature). + @cached() + def _get_e2e_cross_signing_signatures_for_device( + self, + user_id_and_device_id: Tuple[str, str], + ) -> Sequence[Tuple[str, str]]: """ - signature_query_clauses = [] - signature_query_params = [] + The single-item version of `_get_e2e_cross_signing_signatures_for_devices`. + See @cachedList for why a separate method is needed. + """ + raise NotImplementedError() - for user_id, device_id in device_query: - signature_query_clauses.append( - "target_user_id = ? AND target_device_id = ? AND user_id = ?" + @cachedList( + cached_method_name="_get_e2e_cross_signing_signatures_for_device", + list_name="device_query", + ) + async def _get_e2e_cross_signing_signatures_for_devices( + self, device_query: Iterable[Tuple[str, str]] + ) -> Mapping[Tuple[str, str], Sequence[Tuple[str, str]]]: + """Get cross-signing signatures for a given list of user IDs and devices. + + Args: + An iterable containing tuples of (user ID, device ID). + + Returns: + A mapping of results. The keys are the original (user_id, device_id) + tuple, while the value is the matching list of tuples of + (key_id, signature). The value will be an empty list if no + signatures exist for the device. + + Given this method is annotated with `@cachedList`, the return dict's + keys match the tuples within `device_query`, so that cache entries can + be computed from the corresponding values. + + As results are cached, the return type is immutable. + """ + + def _get_e2e_cross_signing_signatures_for_devices_txn( + txn: LoggingTransaction, device_query: Iterable[Tuple[str, str]] + ) -> Mapping[Tuple[str, str], Sequence[Tuple[str, str]]]: + where_clause_sql, where_clause_params = make_tuple_in_list_sql_clause( + self.database_engine, + columns=("target_user_id", "target_device_id", "user_id"), + iterable=[ + (user_id, device_id, user_id) for user_id, device_id in device_query + ], ) - signature_query_params.extend([user_id, device_id, user_id]) - signature_sql = """ - SELECT user_id, key_id, target_device_id, signature - FROM e2e_cross_signing_signatures WHERE %s - """ % (" OR ".join("(" + q + ")" for q in signature_query_clauses)) + signature_sql = f""" + SELECT user_id, key_id, target_device_id, signature + FROM e2e_cross_signing_signatures WHERE {where_clause_sql} + """ - txn.execute(signature_sql, signature_query_params) - return cast( - List[ - Tuple[ - str, - str, - str, - str, - ] - ], - txn.fetchall(), + txn.execute(signature_sql, where_clause_params) + + devices_and_signatures: Dict[Tuple[str, str], List[Tuple[str, str]]] = {} + + # `@cachedList` requires we return one key for every item in `device_query`. + # Pre-populate `devices_and_signatures` with each key so that none are missing. + # + # If any are missing, they will be cached as `None`, which is not + # what callers expected. + for user_id, device_id in device_query: + devices_and_signatures.setdefault((user_id, device_id), []) + + # Populate the return dictionary with each found key_id and signature. + for user_id, key_id, target_device_id, signature in txn.fetchall(): + signature_tuple = (key_id, signature) + devices_and_signatures[(user_id, target_device_id)].append( + signature_tuple + ) + + return devices_and_signatures + + return await self.db_pool.runInteraction( + "_get_e2e_cross_signing_signatures_for_devices_txn", + _get_e2e_cross_signing_signatures_for_devices_txn, + device_query, ) async def get_e2e_one_time_keys( @@ -1772,26 +1819,71 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker user_id: the user who made the signatures signatures: signatures to add """ - await self.db_pool.simple_insert_many( - "e2e_cross_signing_signatures", - keys=( - "user_id", - "key_id", - "target_user_id", - "target_device_id", - "signature", - ), - values=[ - ( - user_id, - item.signing_key_id, - item.target_user_id, - item.target_device_id, - item.signature, - ) + + def _store_e2e_cross_signing_signatures( + txn: LoggingTransaction, + signatures: "Iterable[SignatureListItem]", + ) -> None: + self.db_pool.simple_insert_many_txn( + txn, + "e2e_cross_signing_signatures", + keys=( + "user_id", + "key_id", + "target_user_id", + "target_device_id", + "signature", + ), + values=[ + ( + user_id, + item.signing_key_id, + item.target_user_id, + item.target_device_id, + item.signature, + ) + for item in signatures + ], + ) + + to_invalidate = [ + # Each entry is a tuple of arguments to + # `_get_e2e_cross_signing_signatures_for_device`, which + # itself takes a tuple. Hence the double-tuple. + ((user_id, item.target_device_id),) for item in signatures - ], - desc="add_e2e_signing_key", + ] + + if to_invalidate: + # Invalidate the local cache of this worker. + for cache_key in to_invalidate: + txn.call_after( + self._get_e2e_cross_signing_signatures_for_device.invalidate, + cache_key, + ) + + # Stream cache invalidate keys over replication. + # + # We can only send a primitive per function argument across + # replication. + # + # Encode the array of strings as a JSON string, and we'll unpack + # it on the other side. + to_send = [ + (json.dumps([user_id, item.target_device_id]),) + for item in signatures + ] + + self._send_invalidation_to_replication_bulk( + txn, + cache_name=self._get_e2e_cross_signing_signatures_for_device.__name__, + key_tuples=to_send, + ) + + await self.db_pool.runInteraction( + "add_e2e_signing_key", + _store_e2e_cross_signing_signatures, + signatures, ) diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index 9630cd6d26..47b8f4ddc8 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -579,9 +579,12 @@ def cachedList( Used to do batch lookups for an already created cache. One of the arguments is specified as a list that is iterated through to lookup keys in the original cache. A new tuple consisting of the (deduplicated) keys that weren't in - the cache gets passed to the original function, which is expected to results + the cache gets passed to the original function, which is expected to result in a map of key to value for each passed value. The new results are stored in the - original cache. Note that any missing values are cached as None. + original cache. + + Note that any values in the input that end up being missing from both the + cache and the returned dictionary will be cached as `None`. Args: cached_method_name: The name of the single-item lookup method. From 4367fb2d078c52959aeca0fe6874539c53e8360d Mon Sep 17 00:00:00 2001 From: Max Kratz Date: Thu, 18 Sep 2025 15:05:41 +0100 Subject: [PATCH 35/54] OIDC doc: adds missing `jwt_config` values to authentik example (#18931) Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> --- changelog.d/18931.doc | 2 ++ docs/openid.md | 7 +++++++ 2 files changed, 9 insertions(+) create mode 100644 changelog.d/18931.doc diff --git a/changelog.d/18931.doc b/changelog.d/18931.doc new file mode 100644 index 0000000000..8a2dcb8654 --- /dev/null +++ b/changelog.d/18931.doc @@ -0,0 +1,2 @@ +Clarify necessary `jwt_config` parameter in OIDC documentation for authentik. +Contributed by @maxkratz. diff --git a/docs/openid.md b/docs/openid.md index f86ba189c7..819f754390 100644 --- a/docs/openid.md +++ b/docs/openid.md @@ -186,6 +186,7 @@ oidc_providers: 4. Note the slug of your application, Client ID and Client Secret. Note: RSA keys must be used for signing for Authentik, ECC keys do not work. +Note: The provider must have a signing key set and must not use an encryption key. Synapse config: ```yaml @@ -204,6 +205,12 @@ oidc_providers: config: localpart_template: "{{ user.preferred_username }}" display_name_template: "{{ user.preferred_username|capitalize }}" # TO BE FILLED: If your users have names in Authentik and you want those in Synapse, this should be replaced with user.name|capitalize. +[...] +jwt_config: + enabled: true + secret: "your client secret" # TO BE FILLED (same as `client_secret` above) + algorithm: "RS256" + # (...other fields) ``` ### Dex From d80f515622a678289aad7068751a26c6bb256dcd Mon Sep 17 00:00:00 2001 From: Tulir Asokan Date: Mon, 22 Sep 2025 15:45:05 +0200 Subject: [PATCH 36/54] Update MSC4190 support (#18946) --- changelog.d/18946.misc | 1 + synapse/api/errors.py | 3 ++ synapse/rest/client/keys.py | 7 +++- synapse/rest/client/login.py | 7 ++++ synapse/rest/client/register.py | 14 ++++++-- synapse/storage/databases/main/appservice.py | 4 +++ tests/handlers/test_oauth_delegation.py | 6 +++- tests/rest/client/test_devices.py | 12 +++++-- tests/rest/client/test_login.py | 35 ++++++++++++++++++++ tests/rest/client/test_register.py | 29 ++++++++++++++++ tests/unittest.py | 2 ++ 11 files changed, 113 insertions(+), 7 deletions(-) create mode 100644 changelog.d/18946.misc diff --git a/changelog.d/18946.misc b/changelog.d/18946.misc new file mode 100644 index 0000000000..53c246a638 --- /dev/null +++ b/changelog.d/18946.misc @@ -0,0 +1 @@ +Update [MSC4190](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) support to return correct errors and allow appservices to reset cross-signing keys without user-interactive authentication. Contributed by @tulir @ Beeper. diff --git a/synapse/api/errors.py b/synapse/api/errors.py index ec4d707b7b..b3e391cd96 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -140,6 +140,9 @@ class Codes(str, Enum): # Part of MSC4155 INVITE_BLOCKED = "ORG.MATRIX.MSC4155.M_INVITE_BLOCKED" + # Part of MSC4190 + APPSERVICE_LOGIN_UNSUPPORTED = "IO.ELEMENT.MSC4190.M_APPSERVICE_LOGIN_UNSUPPORTED" + # Part of MSC4306: Thread Subscriptions MSC4306_CONFLICTING_UNSUBSCRIPTION = ( "IO.ELEMENT.MSC4306.M_CONFLICTING_UNSUBSCRIPTION" diff --git a/synapse/rest/client/keys.py b/synapse/rest/client/keys.py index 9f39889c75..6cf480952e 100644 --- a/synapse/rest/client/keys.py +++ b/synapse/rest/client/keys.py @@ -399,10 +399,15 @@ class SigningKeyUploadServlet(RestServlet): if not keys_are_different: return 200, {} + # MSC4190 can skip UIA for replacing cross-signing keys as well. + is_appservice_with_msc4190 = ( + requester.app_service and requester.app_service.msc4190_device_management + ) + # The keys are different; is x-signing set up? If no, then this is first-time # setup, and that is allowed without UIA, per MSC3967. # If yes, then we need to authenticate the change. - if is_cross_signing_setup: + if is_cross_signing_setup and not is_appservice_with_msc4190: # With MSC3861, UIA is not possible. Instead, the auth service has to # explicitly mark the master key as replaceable. if self.hs.config.mas.enabled: diff --git a/synapse/rest/client/login.py b/synapse/rest/client/login.py index acb9111ad2..921232a3ea 100644 --- a/synapse/rest/client/login.py +++ b/synapse/rest/client/login.py @@ -216,6 +216,13 @@ class LoginRestServlet(RestServlet): "This login method is only valid for application services" ) + if appservice.msc4190_device_management: + raise SynapseError( + 400, + "This appservice has MSC4190 enabled, so appservice login cannot be used.", + errcode=Codes.APPSERVICE_LOGIN_UNSUPPORTED, + ) + if appservice.is_rate_limited(): await self._address_ratelimiter.ratelimit( None, request.getClientAddress().host diff --git a/synapse/rest/client/register.py b/synapse/rest/client/register.py index 102c04bb67..b42006e4ce 100644 --- a/synapse/rest/client/register.py +++ b/synapse/rest/client/register.py @@ -782,8 +782,12 @@ class RegisterRestServlet(RestServlet): user_id, appservice = await self.registration_handler.appservice_register( username, as_token ) - if appservice.msc4190_device_management: - body["inhibit_login"] = True + if appservice.msc4190_device_management and not body.get("inhibit_login"): + raise SynapseError( + 400, + "This appservice has MSC4190 enabled, so the inhibit_login parameter must be set to true.", + errcode=Codes.APPSERVICE_LOGIN_UNSUPPORTED, + ) return await self._create_registration_details( user_id, @@ -923,6 +927,12 @@ class RegisterAppServiceOnlyRestServlet(RestServlet): "Registration has been disabled. Only m.login.application_service registrations are allowed.", errcode=Codes.FORBIDDEN, ) + if not body.get("inhibit_login"): + raise SynapseError( + 400, + "This server uses OAuth2, so the inhibit_login parameter must be set to true for appservice registrations.", + errcode=Codes.APPSERVICE_LOGIN_UNSUPPORTED, + ) kind = parse_string(request, "kind", default="user") diff --git a/synapse/storage/databases/main/appservice.py b/synapse/storage/databases/main/appservice.py index 9862e574fd..90ff0f0f12 100644 --- a/synapse/storage/databases/main/appservice.py +++ b/synapse/storage/databases/main/appservice.py @@ -83,6 +83,10 @@ class ApplicationServiceWorkerStore(RoomMemberWorkerStore): hs.hostname, hs.config.appservice.app_service_config_files ) self.exclusive_user_regex = _make_exclusive_regex(self.services_cache) + # When OAuth is enabled, force all appservices to enable MSC4190 too. + if hs.config.mas.enabled or hs.config.experimental.msc3861.enabled: + for appservice in self.services_cache: + appservice.msc4190_device_management = True def get_max_as_txn_id(txn: Cursor) -> int: logger.warning("Falling back to slow query, you should port to postgres") diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index d24614f6a3..b93e366b01 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -1219,7 +1219,11 @@ class DisabledEndpointsTestCase(HomeserverTestCase): channel = self.make_request( "POST", "/_matrix/client/v3/register", - {"username": "alice", "type": "m.login.application_service"}, + { + "username": "alice", + "type": "m.login.application_service", + "inhibit_login": True, + }, shorthand=False, access_token="i_am_an_app_service", ) diff --git a/tests/rest/client/test_devices.py b/tests/rest/client/test_devices.py index 2c498e97e1..309e6ec686 100644 --- a/tests/rest/client/test_devices.py +++ b/tests/rest/client/test_devices.py @@ -494,7 +494,9 @@ class MSC4190AppserviceDevicesTestCase(unittest.HomeserverTestCase): return self.hs def test_PUT_device(self) -> None: - self.register_appservice_user("alice", self.msc4190_service.token) + self.register_appservice_user( + "alice", self.msc4190_service.token, inhibit_login=True + ) self.register_appservice_user("bob", self.pre_msc_service.token) channel = self.make_request( @@ -542,7 +544,9 @@ class MSC4190AppserviceDevicesTestCase(unittest.HomeserverTestCase): self.assertEqual(channel.code, 404, channel.json_body) def test_DELETE_device(self) -> None: - self.register_appservice_user("alice", self.msc4190_service.token) + self.register_appservice_user( + "alice", self.msc4190_service.token, inhibit_login=True + ) # There should be no device channel = self.make_request( @@ -589,7 +593,9 @@ class MSC4190AppserviceDevicesTestCase(unittest.HomeserverTestCase): self.assertEqual(channel.json_body, {"devices": []}) def test_POST_delete_devices(self) -> None: - self.register_appservice_user("alice", self.msc4190_service.token) + self.register_appservice_user( + "alice", self.msc4190_service.token, inhibit_login=True + ) # There should be no device channel = self.make_request( diff --git a/tests/rest/client/test_login.py b/tests/rest/client/test_login.py index 8f9856fa2e..2f70a7a87e 100644 --- a/tests/rest/client/test_login.py +++ b/tests/rest/client/test_login.py @@ -1498,9 +1498,23 @@ class AppserviceLoginRestServletTestCase(unittest.HomeserverTestCase): ApplicationService.NS_ALIASES: [], }, ) + self.msc4190_service = ApplicationService( + id="third__identifier", + token="third_token", + sender=UserID.from_string("@as3bot:example.com"), + namespaces={ + ApplicationService.NS_USERS: [ + {"regex": r"@as3_user.*", "exclusive": False} + ], + ApplicationService.NS_ROOMS: [], + ApplicationService.NS_ALIASES: [], + }, + msc4190_device_management=True, + ) self.hs.get_datastores().main.services_cache.append(self.service) self.hs.get_datastores().main.services_cache.append(self.another_service) + self.hs.get_datastores().main.services_cache.append(self.msc4190_service) return self.hs def test_login_appservice_user(self) -> None: @@ -1517,6 +1531,27 @@ class AppserviceLoginRestServletTestCase(unittest.HomeserverTestCase): self.assertEqual(channel.code, 200, msg=channel.result) + def test_login_appservice_msc4190_fail(self) -> None: + """Test that an appservice user can use /login""" + self.register_appservice_user( + "as3_user_alice", self.msc4190_service.token, inhibit_login=True + ) + + params = { + "type": login.LoginRestServlet.APPSERVICE_TYPE, + "identifier": {"type": "m.id.user", "user": "as3_user_alice"}, + } + channel = self.make_request( + b"POST", LOGIN_URL, params, access_token=self.msc4190_service.token + ) + + self.assertEqual(channel.code, 400, msg=channel.result) + self.assertEqual( + channel.json_body.get("errcode"), + Codes.APPSERVICE_LOGIN_UNSUPPORTED, + channel.json_body, + ) + def test_login_appservice_user_bot(self) -> None: """Test that the appservice bot can use /login""" self.register_appservice_user(AS_USER, self.service.token) diff --git a/tests/rest/client/test_register.py b/tests/rest/client/test_register.py index 70e005caf4..0ffc64dd1f 100644 --- a/tests/rest/client/test_register.py +++ b/tests/rest/client/test_register.py @@ -136,6 +136,7 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): request_data = { "username": "as_user_kermit", "type": APP_SERVICE_REGISTRATION_TYPE, + "inhibit_login": True, } channel = self.make_request( @@ -147,6 +148,34 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase): self.assertLessEqual(det_data.items(), channel.json_body.items()) self.assertNotIn("access_token", channel.json_body) + def test_POST_appservice_msc4190_enabled_fail(self) -> None: + # With MSC4190 enabled, the registration should fail unless inhibit_login is set + as_token = "i_am_an_app_service" + + appservice = ApplicationService( + as_token, + id="1234", + namespaces={"users": [{"regex": r"@as_user.*", "exclusive": True}]}, + sender=UserID.from_string("@as:test"), + msc4190_device_management=True, + ) + + self.hs.get_datastores().main.services_cache.append(appservice) + request_data = { + "username": "as_user_kermit", + "type": APP_SERVICE_REGISTRATION_TYPE, + } + + channel = self.make_request( + b"POST", self.url + b"?access_token=i_am_an_app_service", request_data + ) + self.assertEqual(channel.code, 400, channel.json_body) + self.assertEqual( + channel.json_body.get("errcode"), + Codes.APPSERVICE_LOGIN_UNSUPPORTED, + channel.json_body, + ) + def test_POST_bad_password(self) -> None: request_data = {"username": "kermit", "password": 666} channel = self.make_request(b"POST", self.url, request_data) diff --git a/tests/unittest.py b/tests/unittest.py index 5e6957dc6d..c9f8c48665 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -782,6 +782,7 @@ class HomeserverTestCase(TestCase): self, username: str, appservice_token: str, + inhibit_login: bool = False, ) -> Tuple[str, Optional[str]]: """Register an appservice user as an application service. Requires the client-facing registration API be registered. @@ -802,6 +803,7 @@ class HomeserverTestCase(TestCase): { "username": username, "type": "m.login.application_service", + "inhibit_login": inhibit_login, }, access_token=appservice_token, ) From 83aca3f0972cf062f71127eaab95dbf57584ce03 Mon Sep 17 00:00:00 2001 From: SpiritCroc Date: Mon, 22 Sep 2025 15:50:52 +0200 Subject: [PATCH 37/54] Implement MSC4169: backwards-compatible redaction sending for rooms < v11 using the /send endpoint (#18898) Implement [MSC4169](https://github.com/matrix-org/matrix-spec-proposals/pull/4169) While there is a dedicated API endpoint for redactions, being able to send redactions using the normal send endpoint is useful when using [MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140) for sending delayed redactions to replicate expiring messages. Currently this would only work on rooms >= v11 but fail with an internal server error on older room versions when setting the `redacts` field in the content, since older rooms would require that field to be outside of `content`. We can address this by copying it over if necessary. Relevant spec at https://spec.matrix.org/v1.8/rooms/v11/#moving-the-redacts-property-of-mroomredaction-events-to-a-content-property --------- Co-authored-by: Tulir Asokan --- changelog.d/18898.feature | 1 + synapse/config/experimental.py | 3 +++ synapse/handlers/message.py | 27 +++++++++++++++++++++++++-- synapse/rest/client/versions.py | 2 ++ 4 files changed, 31 insertions(+), 2 deletions(-) create mode 100644 changelog.d/18898.feature diff --git a/changelog.d/18898.feature b/changelog.d/18898.feature new file mode 100644 index 0000000000..bf31dd55d1 --- /dev/null +++ b/changelog.d/18898.feature @@ -0,0 +1 @@ +Support [MSC4169](https://github.com/matrix-org/matrix-spec-proposals/pull/4169) for backwards-compatible redaction sending using the `/send` endpoint. Contributed by @SpiritCroc @ Beeper. diff --git a/synapse/config/experimental.py b/synapse/config/experimental.py index d086deab3f..d7a3d67558 100644 --- a/synapse/config/experimental.py +++ b/synapse/config/experimental.py @@ -556,6 +556,9 @@ class ExperimentalConfig(Config): # MSC4133: Custom profile fields self.msc4133_enabled: bool = experimental.get("msc4133_enabled", False) + # MSC4169: Backwards-compatible redaction sending using `/send` + self.msc4169_enabled: bool = experimental.get("msc4169_enabled", False) + # MSC4210: Remove legacy mentions self.msc4210_enabled: bool = experimental.get("msc4210_enabled", False) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index d850b617d8..6db031a4b9 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -1013,14 +1013,37 @@ class EventCreationHandler: await self.clock.sleep(random.randint(1, 10)) raise ShadowBanError() - if ratelimit: + room_version = None + + if ( + event_dict["type"] == EventTypes.Redaction + and "redacts" in event_dict["content"] + and self.hs.config.experimental.msc4169_enabled + ): room_id = event_dict["room_id"] try: room_version = await self.store.get_room_version(room_id) except NotFoundError: - # The room doesn't exist. raise AuthError(403, f"User {requester.user} not in room {room_id}") + if not room_version.updated_redaction_rules: + # Legacy room versions need the "redacts" field outside of the event's + # content. However clients may still send it within the content, so move + # the field if necessary for compatibility. + redacts = event_dict.get("redacts") or event_dict["content"].pop( + "redacts", None + ) + if redacts is not None and "redacts" not in event_dict: + event_dict["redacts"] = redacts + + if ratelimit: + if room_version is None: + room_id = event_dict["room_id"] + try: + room_version = await self.store.get_room_version(room_id) + except NotFoundError: + raise AuthError(403, f"User {requester.user} not in room {room_id}") + if room_version.updated_redaction_rules: redacts = event_dict["content"].get("redacts") else: diff --git a/synapse/rest/client/versions.py b/synapse/rest/client/versions.py index 1b8efd98cd..20395430d7 100644 --- a/synapse/rest/client/versions.py +++ b/synapse/rest/client/versions.py @@ -180,6 +180,8 @@ class VersionsRestServlet(RestServlet): "org.matrix.msc4155": self.config.experimental.msc4155_enabled, # MSC4306: Support for thread subscriptions "org.matrix.msc4306": self.config.experimental.msc4306_enabled, + # MSC4169: Backwards-compatible redaction sending using `/send` + "com.beeper.msc4169": self.config.experimental.msc4169_enabled, }, }, ) From 5a9ca1e3d9e2186afa3e2c2657e1f2d2f4ad1bd6 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 22 Sep 2025 10:27:59 -0500 Subject: [PATCH 38/54] Introduce `Clock.call_when_running(...)` to include logcontext by default (#18944) Introduce `Clock.call_when_running(...)` to wrap startup code in a logcontext, ensuring we can identify which server generated the logs. Background: > Ideally, nothing from the Synapse homeserver would be logged against the `sentinel` > logcontext as we want to know which server the logs came from. In practice, this is not > always the case yet especially outside of request handling. > > Global things outside of Synapse (e.g. Twisted reactor code) should run in the > `sentinel` logcontext. It's only when it calls into application code that a logcontext > gets activated. This means the reactor should be started in the `sentinel` logcontext, > and any time an awaitable yields control back to the reactor, it should reset the > logcontext to be the `sentinel` logcontext. This is important to avoid leaking the > current logcontext to the reactor (which would then get picked up and associated with > the next thing the reactor does). > > *-- `docs/log_contexts.md` Also adds a lint to prefer `Clock.call_when_running(...)` over `reactor.callWhenRunning(...)` Part of https://github.com/element-hq/synapse/issues/18905 --- changelog.d/18944.misc | 1 + scripts-dev/mypy_synapse_plugin.py | 40 ++++ scripts-dev/sign_json.py | 2 +- synapse/_scripts/synapse_port_db.py | 64 ++---- synapse/_scripts/update_synapse_database.py | 2 +- synapse/api/auth/mas.py | 2 +- synapse/api/auth/msc3861_delegated.py | 2 +- synapse/api/errors.py | 2 +- synapse/api/ratelimiting.py | 2 +- synapse/app/_base.py | 5 +- synapse/app/generic_worker.py | 6 +- synapse/app/homeserver.py | 18 +- synapse/appservice/scheduler.py | 2 +- synapse/events/builder.py | 2 +- synapse/federation/sender/__init__.py | 2 +- .../federation/sender/transaction_manager.py | 2 +- synapse/handlers/deactivate_account.py | 2 +- synapse/handlers/device.py | 2 +- synapse/handlers/devicemessage.py | 2 +- synapse/handlers/e2e_keys.py | 2 +- synapse/handlers/identity.py | 2 +- synapse/handlers/message.py | 3 +- synapse/handlers/oidc.py | 3 +- synapse/handlers/ui_auth/checkers.py | 2 +- synapse/http/client.py | 2 +- .../federation/matrix_federation_agent.py | 2 +- .../http/federation/well_known_resolver.py | 7 +- synapse/http/matrixfederationclient.py | 2 +- synapse/http/server.py | 10 +- synapse/http/servlet.py | 2 +- synapse/logging/handlers.py | 14 +- synapse/logging/opentracing.py | 2 +- synapse/media/_base.py | 2 +- synapse/media/media_storage.py | 2 +- synapse/media/oembed.py | 2 +- synapse/media/url_previewer.py | 2 +- synapse/module_api/__init__.py | 2 +- synapse/replication/tcp/commands.py | 2 +- synapse/replication/tcp/external_cache.py | 2 +- synapse/replication/tcp/protocol.py | 2 +- synapse/rest/client/sync.py | 2 +- synapse/rest/key/v2/remote_key_resource.py | 2 +- synapse/rest/well_known.py | 2 +- synapse/server.py | 2 +- synapse/storage/_base.py | 2 +- synapse/storage/background_updates.py | 3 +- .../storage/databases/main/account_data.py | 2 +- synapse/storage/databases/main/appservice.py | 2 +- .../storage/databases/main/censor_events.py | 2 +- .../storage/databases/main/delayed_events.py | 3 +- synapse/storage/databases/main/deviceinbox.py | 3 +- synapse/storage/databases/main/devices.py | 2 +- .../storage/databases/main/e2e_room_keys.py | 2 +- .../storage/databases/main/end_to_end_keys.py | 2 +- .../databases/main/event_federation.py | 2 +- .../databases/main/event_push_actions.py | 2 +- synapse/storage/databases/main/events.py | 2 +- .../databases/main/events_bg_updates.py | 2 +- synapse/storage/databases/main/lock.py | 2 +- synapse/storage/databases/main/push_rule.py | 3 +- synapse/storage/databases/main/pusher.py | 2 +- synapse/storage/databases/main/receipts.py | 2 +- synapse/storage/databases/main/room.py | 2 +- synapse/storage/databases/main/session.py | 2 +- .../storage/databases/main/sliding_sync.py | 2 +- synapse/storage/databases/main/tags.py | 2 +- .../storage/databases/main/task_scheduler.py | 2 +- synapse/storage/databases/main/ui_auth.py | 3 +- synapse/types/__init__.py | 16 +- synapse/util/__init__.py | 169 +------------- synapse/util/async_helpers.py | 5 +- synapse/util/batching_queue.py | 2 +- synapse/util/caches/expiringcache.py | 2 +- synapse/util/caches/lrucache.py | 7 +- synapse/util/caches/response_cache.py | 2 +- synapse/util/clock.py | 208 ++++++++++++++++++ synapse/util/json.py | 57 +++++ synapse/util/macaroons.py | 3 +- synapse/util/metrics.py | 2 +- synapse/util/ratelimitutils.py | 2 +- synapse/util/retryutils.py | 2 +- synapse/visibility.py | 2 +- synmark/__main__.py | 5 +- synmark/suites/logging.py | 2 +- tests/api/test_auth.py | 2 +- tests/api/test_filtering.py | 2 +- tests/api/test_urls.py | 2 +- tests/app/test_openid_listener.py | 2 +- tests/app/test_phone_stats_home.py | 2 +- tests/appservice/test_api.py | 2 +- tests/appservice/test_scheduler.py | 2 +- tests/config/test_room_directory.py | 2 +- tests/crypto/test_keyring.py | 2 +- tests/events/test_auto_accept_invites.py | 2 +- tests/events/test_presence_router.py | 2 +- tests/events/test_snapshot.py | 2 +- tests/federation/test_federation_catch_up.py | 2 +- tests/federation/test_federation_client.py | 2 +- tests/federation/test_federation_devices.py | 2 +- tests/federation/test_federation_media.py | 2 +- .../test_federation_out_of_band_membership.py | 2 +- tests/federation/test_federation_sender.py | 2 +- tests/federation/test_federation_server.py | 2 +- tests/federation/transport/test_knocking.py | 2 +- tests/handlers/test_admin.py | 2 +- tests/handlers/test_appservice.py | 2 +- tests/handlers/test_auth.py | 2 +- tests/handlers/test_cas.py | 2 +- tests/handlers/test_deactivate_account.py | 2 +- tests/handlers/test_device.py | 2 +- tests/handlers/test_directory.py | 2 +- tests/handlers/test_e2e_keys.py | 2 +- tests/handlers/test_e2e_room_keys.py | 2 +- tests/handlers/test_federation.py | 2 +- tests/handlers/test_federation_event.py | 2 +- tests/handlers/test_message.py | 2 +- tests/handlers/test_oauth_delegation.py | 2 +- tests/handlers/test_oidc.py | 2 +- tests/handlers/test_password_providers.py | 2 +- tests/handlers/test_presence.py | 2 +- tests/handlers/test_profile.py | 2 +- tests/handlers/test_receipts.py | 2 +- tests/handlers/test_register.py | 2 +- tests/handlers/test_room_member.py | 2 +- tests/handlers/test_room_policy.py | 2 +- tests/handlers/test_room_summary.py | 2 +- tests/handlers/test_saml.py | 2 +- tests/handlers/test_sliding_sync.py | 2 +- tests/handlers/test_sso.py | 2 +- tests/handlers/test_stats.py | 2 +- tests/handlers/test_sync.py | 2 +- tests/handlers/test_typing.py | 2 +- tests/handlers/test_user_directory.py | 2 +- tests/handlers/test_worker_lock.py | 2 +- tests/http/test_matrixfederationclient.py | 2 +- tests/http/test_simple_client.py | 2 +- tests/http/test_site.py | 2 +- tests/logging/test_opentracing.py | 18 +- tests/media/test_media_retention.py | 2 +- tests/media/test_media_storage.py | 2 +- tests/media/test_oembed.py | 2 +- tests/media/test_url_previewer.py | 2 +- tests/metrics/test_phone_home_stats.py | 2 +- tests/module_api/test_account_data_manager.py | 2 +- tests/module_api/test_api.py | 2 +- .../test_event_unsigned_addition.py | 2 +- tests/module_api/test_spamchecker.py | 2 +- tests/push/test_bulk_push_rule_evaluator.py | 2 +- tests/push/test_email.py | 2 +- tests/push/test_http.py | 2 +- tests/push/test_push_rule_evaluator.py | 2 +- tests/replication/_base.py | 2 +- tests/replication/storage/_base.py | 2 +- tests/replication/storage/test_events.py | 2 +- tests/replication/tcp/streams/test_events.py | 2 +- .../tcp/streams/test_thread_subscriptions.py | 2 +- tests/replication/test_auth.py | 2 +- tests/replication/test_federation_ack.py | 2 +- .../test_federation_sender_shard.py | 2 +- tests/replication/test_multi_media_repo.py | 2 +- tests/replication/test_pusher_shard.py | 2 +- .../test_sharded_event_persister.py | 2 +- tests/replication/test_sharded_receipts.py | 2 +- tests/rest/admin/test_admin.py | 2 +- tests/rest/admin/test_background_updates.py | 2 +- tests/rest/admin/test_device.py | 2 +- tests/rest/admin/test_event_reports.py | 2 +- tests/rest/admin/test_federation.py | 2 +- tests/rest/admin/test_media.py | 2 +- tests/rest/admin/test_registration_tokens.py | 2 +- tests/rest/admin/test_room.py | 2 +- tests/rest/admin/test_scheduled_tasks.py | 2 +- tests/rest/admin/test_server_notice.py | 2 +- tests/rest/admin/test_statistics.py | 2 +- tests/rest/admin/test_user.py | 2 +- tests/rest/admin/test_username_available.py | 2 +- .../sliding_sync/test_connection_tracking.py | 2 +- .../test_extension_account_data.py | 2 +- .../sliding_sync/test_extension_e2ee.py | 2 +- .../sliding_sync/test_extension_receipts.py | 2 +- .../test_extension_thread_subscriptions.py | 2 +- .../sliding_sync/test_extension_to_device.py | 2 +- .../sliding_sync/test_extension_typing.py | 2 +- .../client/sliding_sync/test_extensions.py | 2 +- .../client/sliding_sync/test_lists_filters.py | 2 +- .../sliding_sync/test_room_subscriptions.py | 2 +- .../client/sliding_sync/test_rooms_invites.py | 2 +- .../client/sliding_sync/test_rooms_meta.py | 2 +- .../sliding_sync/test_rooms_required_state.py | 2 +- .../sliding_sync/test_rooms_timeline.py | 2 +- .../client/sliding_sync/test_sliding_sync.py | 2 +- tests/rest/client/test_account.py | 2 +- tests/rest/client/test_auth.py | 2 +- tests/rest/client/test_capabilities.py | 2 +- tests/rest/client/test_consent.py | 2 +- tests/rest/client/test_delayed_events.py | 2 +- tests/rest/client/test_devices.py | 2 +- tests/rest/client/test_directory.py | 2 +- tests/rest/client/test_ephemeral_message.py | 2 +- tests/rest/client/test_events.py | 2 +- tests/rest/client/test_filter.py | 2 +- tests/rest/client/test_identity.py | 2 +- tests/rest/client/test_login.py | 2 +- tests/rest/client/test_login_token_request.py | 2 +- tests/rest/client/test_media.py | 2 +- tests/rest/client/test_mutual_rooms.py | 2 +- tests/rest/client/test_notifications.py | 2 +- tests/rest/client/test_owned_state.py | 2 +- tests/rest/client/test_password_policy.py | 2 +- tests/rest/client/test_power_levels.py | 2 +- tests/rest/client/test_presence.py | 2 +- tests/rest/client/test_profile.py | 2 +- tests/rest/client/test_read_marker.py | 2 +- tests/rest/client/test_receipts.py | 2 +- tests/rest/client/test_redactions.py | 2 +- tests/rest/client/test_register.py | 2 +- tests/rest/client/test_relations.py | 2 +- tests/rest/client/test_rendezvous.py | 2 +- tests/rest/client/test_reporting.py | 2 +- tests/rest/client/test_retention.py | 2 +- tests/rest/client/test_rooms.py | 2 +- tests/rest/client/test_shadow_banned.py | 2 +- tests/rest/client/test_sync.py | 2 +- tests/rest/client/test_third_party_rules.py | 2 +- .../rest/client/test_thread_subscriptions.py | 2 +- tests/rest/client/test_transactions.py | 2 +- tests/rest/client/test_typing.py | 2 +- tests/rest/client/test_upgrade_room.py | 2 +- tests/rest/key/v2/test_remote_key_resource.py | 2 +- tests/rest/media/test_domain_blocking.py | 2 +- tests/rest/media/test_url_preview.py | 2 +- tests/rest/synapse/mas/test_devices.py | 2 +- tests/rest/synapse/mas/test_users.py | 2 +- tests/server.py | 2 +- tests/server_notices/__init__.py | 2 +- tests/server_notices/test_consent.py | 2 +- .../test_resource_limits_server_notices.py | 2 +- tests/state/test_v21.py | 2 +- .../databases/main/test_deviceinbox.py | 2 +- .../databases/main/test_end_to_end_keys.py | 2 +- .../databases/main/test_events_worker.py | 2 +- tests/storage/databases/main/test_lock.py | 2 +- tests/storage/databases/main/test_receipts.py | 2 +- tests/storage/databases/main/test_room.py | 2 +- tests/storage/test__base.py | 2 +- tests/storage/test_account_data.py | 2 +- tests/storage/test_appservice.py | 2 +- tests/storage/test_background_update.py | 2 +- tests/storage/test_cleanup_extrems.py | 2 +- tests/storage/test_client_ips.py | 2 +- tests/storage/test_database.py | 2 +- tests/storage/test_devices.py | 2 +- tests/storage/test_directory.py | 2 +- tests/storage/test_e2e_room_keys.py | 2 +- tests/storage/test_end_to_end_keys.py | 2 +- tests/storage/test_event_chain.py | 2 +- tests/storage/test_event_federation.py | 3 +- tests/storage/test_event_push_actions.py | 2 +- tests/storage/test_events.py | 2 +- tests/storage/test_events_bg_updates.py | 2 +- tests/storage/test_id_generators.py | 2 +- tests/storage/test_monthly_active_users.py | 2 +- tests/storage/test_profile.py | 2 +- tests/storage/test_purge.py | 2 +- tests/storage/test_receipts.py | 2 +- tests/storage/test_redaction.py | 2 +- tests/storage/test_registration.py | 2 +- tests/storage/test_relations.py | 2 +- tests/storage/test_rollback_worker.py | 2 +- tests/storage/test_room.py | 2 +- tests/storage/test_room_search.py | 2 +- tests/storage/test_roommember.py | 2 +- tests/storage/test_sliding_sync_tables.py | 2 +- tests/storage/test_state.py | 2 +- tests/storage/test_state_deletion.py | 2 +- tests/storage/test_stream.py | 2 +- tests/storage/test_thread_subscriptions.py | 2 +- tests/storage/test_transactions.py | 2 +- tests/storage/test_txn_limit.py | 2 +- tests/storage/test_user_directory.py | 2 +- tests/storage/test_user_filters.py | 2 +- tests/test_mau.py | 2 +- tests/test_phone_home.py | 2 +- tests/test_server.py | 2 +- tests/test_state.py | 2 +- tests/test_terms_auth.py | 13 +- tests/test_utils/oidc.py | 2 +- tests/test_visibility.py | 2 +- tests/unittest.py | 2 +- tests/util/test_expiring_cache.py | 2 +- tests/util/test_logcontext.py | 2 +- tests/util/test_task_scheduler.py | 2 +- 292 files changed, 693 insertions(+), 527 deletions(-) create mode 100644 changelog.d/18944.misc create mode 100644 synapse/util/clock.py create mode 100644 synapse/util/json.py diff --git a/changelog.d/18944.misc b/changelog.d/18944.misc new file mode 100644 index 0000000000..9561aa9275 --- /dev/null +++ b/changelog.d/18944.misc @@ -0,0 +1 @@ +Introduce `Clock.call_when_running(...)` to wrap startup code in a logcontext, ensuring we can identify which server generated the logs. diff --git a/scripts-dev/mypy_synapse_plugin.py b/scripts-dev/mypy_synapse_plugin.py index 610dec415a..439a75fc7e 100644 --- a/scripts-dev/mypy_synapse_plugin.py +++ b/scripts-dev/mypy_synapse_plugin.py @@ -68,6 +68,12 @@ PROMETHEUS_METRIC_MISSING_FROM_LIST_TO_CHECK = ErrorCode( category="per-homeserver-tenant-metrics", ) +PREFER_SYNAPSE_CLOCK_CALL_WHEN_RUNNING = ErrorCode( + "prefer-synapse-clock-call-when-running", + "`synapse.util.Clock.call_when_running` should be used instead of `reactor.callWhenRunning`", + category="synapse-reactor-clock", +) + class Sentinel(enum.Enum): # defining a sentinel in this way allows mypy to correctly handle the @@ -229,9 +235,43 @@ class SynapsePlugin(Plugin): ): return check_is_cacheable_wrapper + if fullname in ( + "twisted.internet.interfaces.IReactorCore.callWhenRunning", + "synapse.types.ISynapseThreadlessReactor.callWhenRunning", + "synapse.types.ISynapseReactor.callWhenRunning", + ): + return check_call_when_running + return None +def check_call_when_running(ctx: MethodSigContext) -> CallableType: + """ + Ensure that the `reactor.callWhenRunning` callsites aren't used. + + `synapse.util.Clock.call_when_running` should always be used instead of + `reactor.callWhenRunning`. + + Since `reactor.callWhenRunning` is a reactor callback, the callback will start out + with the sentinel logcontext. `synapse.util.Clock` starts a default logcontext as we + want to know which server the logs came from. + + Args: + ctx: The `FunctionSigContext` from mypy. + """ + signature: CallableType = ctx.default_signature + ctx.api.fail( + ( + "Expected all `reactor.callWhenRunning` calls to use `synapse.util.Clock.call_when_running` instead. " + "This is so all Synapse code runs with a logcontext as we want to know which server the logs came from." + ), + ctx.context, + code=PREFER_SYNAPSE_CLOCK_CALL_WHEN_RUNNING, + ) + + return signature + + def analyze_prometheus_metric_classes(ctx: ClassDefContext) -> None: """ Cross-check the list of Prometheus metric classes against the diff --git a/scripts-dev/sign_json.py b/scripts-dev/sign_json.py index 14aacf9382..40e0c529da 100755 --- a/scripts-dev/sign_json.py +++ b/scripts-dev/sign_json.py @@ -30,7 +30,7 @@ from signedjson.sign import sign_json from synapse.api.room_versions import KNOWN_ROOM_VERSIONS from synapse.crypto.event_signing import add_hashes_and_signatures -from synapse.util import json_encoder +from synapse.util.json import json_encoder def main() -> None: diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py index a81db3cfbf..b0a067edcb 100755 --- a/synapse/_scripts/synapse_port_db.py +++ b/synapse/_scripts/synapse_port_db.py @@ -54,11 +54,11 @@ from twisted.internet import defer, reactor as reactor_ from synapse.config.database import DatabaseConnectionConfig from synapse.config.homeserver import HomeServerConfig from synapse.logging.context import ( - LoggingContext, make_deferred_yieldable, run_in_background, ) -from synapse.notifier import ReplicationNotifier +from synapse.server import HomeServer +from synapse.storage import DataStore from synapse.storage.database import DatabasePool, LoggingTransaction, make_conn from synapse.storage.databases.main import FilteringWorkerStore from synapse.storage.databases.main.account_data import AccountDataWorkerStore @@ -98,8 +98,7 @@ from synapse.storage.databases.state.bg_updates import StateBackgroundUpdateStor from synapse.storage.engines import create_engine from synapse.storage.prepare_database import prepare_database from synapse.types import ISynapseReactor -from synapse.util import SYNAPSE_VERSION, Clock -from synapse.util.stringutils import random_string +from synapse.util import SYNAPSE_VERSION # Cast safety: Twisted does some naughty magic which replaces the # twisted.internet.reactor module with a Reactor instance at runtime. @@ -318,31 +317,16 @@ class Store( ) -class MockHomeserver: +class MockHomeserver(HomeServer): + DATASTORE_CLASS = DataStore + def __init__(self, config: HomeServerConfig): - self.clock = Clock(reactor) - self.config = config - self.hostname = config.server.server_name - self.version_string = SYNAPSE_VERSION - self.instance_id = random_string(5) - - def get_clock(self) -> Clock: - return self.clock - - def get_reactor(self) -> ISynapseReactor: - return reactor - - def get_instance_id(self) -> str: - return self.instance_id - - def get_instance_name(self) -> str: - return "master" - - def should_send_federation(self) -> bool: - return False - - def get_replication_notifier(self) -> ReplicationNotifier: - return ReplicationNotifier() + super().__init__( + hostname=config.server.server_name, + config=config, + reactor=reactor, + version_string=f"Synapse/{SYNAPSE_VERSION}", + ) class Porter: @@ -351,12 +335,12 @@ class Porter: sqlite_config: Dict[str, Any], progress: "Progress", batch_size: int, - hs_config: HomeServerConfig, + hs: HomeServer, ): self.sqlite_config = sqlite_config self.progress = progress self.batch_size = batch_size - self.hs_config = hs_config + self.hs = hs async def setup_table(self, table: str) -> Tuple[str, int, int, int, int]: if table in APPEND_ONLY_TABLES: @@ -676,8 +660,7 @@ class Porter: engine = create_engine(db_config.config) - hs = MockHomeserver(self.hs_config) - server_name = hs.hostname + server_name = self.hs.hostname with make_conn( db_config=db_config, @@ -688,16 +671,16 @@ class Porter: engine.check_database( db_conn, allow_outdated_version=allow_outdated_version ) - prepare_database(db_conn, engine, config=self.hs_config) + prepare_database(db_conn, engine, config=self.hs.config) # Type safety: ignore that we're using Mock homeservers here. store = Store( DatabasePool( - hs, # type: ignore[arg-type] + self.hs, db_config, engine, ), db_conn, - hs, # type: ignore[arg-type] + self.hs, ) db_conn.commit() @@ -795,7 +778,7 @@ class Porter: return self.postgres_store = self.build_db_store( - self.hs_config.database.get_single_database() + self.hs.config.database.get_single_database() ) await self.remove_ignored_background_updates_from_database() @@ -1584,6 +1567,8 @@ def main() -> None: config = HomeServerConfig() config.parse_config_dict(hs_config, "", "") + hs = MockHomeserver(config) + def start(stdscr: Optional["curses.window"] = None) -> None: progress: Progress if stdscr: @@ -1595,15 +1580,14 @@ def main() -> None: sqlite_config=sqlite_config, progress=progress, batch_size=args.batch_size, - hs_config=config, + hs=hs, ) @defer.inlineCallbacks def run() -> Generator["defer.Deferred[Any]", Any, None]: - with LoggingContext("synapse_port_db_run"): - yield defer.ensureDeferred(porter.run()) + yield defer.ensureDeferred(porter.run()) - reactor.callWhenRunning(run) + hs.get_clock().call_when_running(run) reactor.run() diff --git a/synapse/_scripts/update_synapse_database.py b/synapse/_scripts/update_synapse_database.py index 3624db3544..caaecda161 100644 --- a/synapse/_scripts/update_synapse_database.py +++ b/synapse/_scripts/update_synapse_database.py @@ -74,7 +74,7 @@ def run_background_updates(hs: HomeServer) -> None: ) ) - reactor.callWhenRunning(run) + hs.get_clock().call_when_running(run) reactor.run() diff --git a/synapse/api/auth/mas.py b/synapse/api/auth/mas.py index 40b4a5bd34..ef82ea9cc7 100644 --- a/synapse/api/auth/mas.py +++ b/synapse/api/auth/mas.py @@ -43,9 +43,9 @@ from synapse.logging.opentracing import ( from synapse.metrics import SERVER_NAME_LABEL from synapse.synapse_rust.http_client import HttpClient from synapse.types import JsonDict, Requester, UserID, create_requester -from synapse.util import json_decoder from synapse.util.caches.cached_call import RetryOnExceptionCachedCall from synapse.util.caches.response_cache import ResponseCache, ResponseCacheContext +from synapse.util.json import json_decoder from . import introspection_response_timer diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index c406c683e7..11a89dd3ed 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -48,9 +48,9 @@ from synapse.logging.opentracing import ( from synapse.metrics import SERVER_NAME_LABEL from synapse.synapse_rust.http_client import HttpClient from synapse.types import Requester, UserID, create_requester -from synapse.util import json_decoder from synapse.util.caches.cached_call import RetryOnExceptionCachedCall from synapse.util.caches.response_cache import ResponseCache, ResponseCacheContext +from synapse.util.json import json_decoder from . import introspection_response_timer diff --git a/synapse/api/errors.py b/synapse/api/errors.py index b3e391cd96..551537fdda 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -30,7 +30,7 @@ from typing import Any, Dict, List, Optional, Union from twisted.web import http -from synapse.util import json_decoder +from synapse.util.json import json_decoder if typing.TYPE_CHECKING: from synapse.config.homeserver import HomeServerConfig diff --git a/synapse/api/ratelimiting.py b/synapse/api/ratelimiting.py index 509ef6b2c1..9d1c7801bc 100644 --- a/synapse/api/ratelimiting.py +++ b/synapse/api/ratelimiting.py @@ -26,7 +26,7 @@ from synapse.api.errors import LimitExceededError from synapse.config.ratelimiting import RatelimitSettings from synapse.storage.databases.main import DataStore from synapse.types import Requester -from synapse.util import Clock +from synapse.util.clock import Clock if TYPE_CHECKING: # To avoid circular imports: diff --git a/synapse/app/_base.py b/synapse/app/_base.py index cf3d260e65..02c56496bf 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -241,7 +241,7 @@ def redirect_stdio_to_logs() -> None: def register_start( - cb: Callable[P, Awaitable], *args: P.args, **kwargs: P.kwargs + hs: "HomeServer", cb: Callable[P, Awaitable], *args: P.args, **kwargs: P.kwargs ) -> None: """Register a callback with the reactor, to be called once it is running @@ -278,7 +278,8 @@ def register_start( # on as normal. os._exit(1) - reactor.callWhenRunning(lambda: defer.ensureDeferred(wrapper())) + clock = hs.get_clock() + clock.call_when_running(lambda: defer.ensureDeferred(wrapper())) def listen_metrics(bind_addresses: StrCollection, port: int) -> None: diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 543b26d8ba..0849f25059 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -356,11 +356,9 @@ def start(config_options: List[str]) -> None: handle_startup_exception(e) async def start() -> None: - # Re-establish log context now that we're back from the reactor - with LoggingContext("start"): - await _base.start(hs) + await _base.start(hs) - register_start(start) + register_start(hs, start) # redirect stdio to the logs, if configured. if not hs.config.logging.no_redirect_stdio: diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index dfc4a00719..54c41c0c28 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -377,19 +377,17 @@ def setup(config_options: List[str]) -> SynapseHomeServer: handle_startup_exception(e) async def start() -> None: - # Re-establish log context now that we're back from the reactor - with LoggingContext("start"): - # Load the OIDC provider metadatas, if OIDC is enabled. - if hs.config.oidc.oidc_enabled: - oidc = hs.get_oidc_handler() - # Loading the provider metadata also ensures the provider config is valid. - await oidc.load_metadata() + # Load the OIDC provider metadatas, if OIDC is enabled. + if hs.config.oidc.oidc_enabled: + oidc = hs.get_oidc_handler() + # Loading the provider metadata also ensures the provider config is valid. + await oidc.load_metadata() - await _base.start(hs) + await _base.start(hs) - hs.get_datastores().main.db_pool.updates.start_doing_background_updates() + hs.get_datastores().main.db_pool.updates.start_doing_background_updates() - register_start(start) + register_start(hs, start) return hs diff --git a/synapse/appservice/scheduler.py b/synapse/appservice/scheduler.py index 01f77c4cb6..c8678406a1 100644 --- a/synapse/appservice/scheduler.py +++ b/synapse/appservice/scheduler.py @@ -84,7 +84,7 @@ from synapse.logging.context import run_in_background from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage.databases.main import DataStore from synapse.types import DeviceListUpdates, JsonMapping -from synapse.util import Clock +from synapse.util.clock import Clock if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/events/builder.py b/synapse/events/builder.py index 5e1913d389..1c9f78c7ca 100644 --- a/synapse/events/builder.py +++ b/synapse/events/builder.py @@ -38,7 +38,7 @@ from synapse.storage.databases.main import DataStore from synapse.synapse_rust.events import EventInternalMetadata from synapse.types import EventID, JsonDict, StrCollection from synapse.types.state import StateFilter -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.stringutils import random_string if TYPE_CHECKING: diff --git a/synapse/federation/sender/__init__.py b/synapse/federation/sender/__init__.py index 6baa233143..8e3619d1bc 100644 --- a/synapse/federation/sender/__init__.py +++ b/synapse/federation/sender/__init__.py @@ -178,7 +178,7 @@ from synapse.types import ( StrCollection, get_domain_from_id, ) -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.metrics import Measure from synapse.util.retryutils import filter_destinations_by_retry_limiter diff --git a/synapse/federation/sender/transaction_manager.py b/synapse/federation/sender/transaction_manager.py index 050982c499..b548d9ed70 100644 --- a/synapse/federation/sender/transaction_manager.py +++ b/synapse/federation/sender/transaction_manager.py @@ -36,7 +36,7 @@ from synapse.logging.opentracing import ( ) from synapse.metrics import SERVER_NAME_LABEL from synapse.types import JsonDict -from synapse.util import json_decoder +from synapse.util.json import json_decoder from synapse.util.metrics import measure_func if TYPE_CHECKING: diff --git a/synapse/handlers/deactivate_account.py b/synapse/handlers/deactivate_account.py index e4169321cc..c0684380a7 100644 --- a/synapse/handlers/deactivate_account.py +++ b/synapse/handlers/deactivate_account.py @@ -62,7 +62,7 @@ class DeactivateAccountHandler: # Start the user parter loop so it can resume parting users from rooms where # it left off (if it has work left to do). if hs.config.worker.worker_app is None: - hs.get_reactor().callWhenRunning(self._start_user_parting) + hs.get_clock().call_when_running(self._start_user_parting) else: self._notify_account_deactivated_client = ( ReplicationNotifyAccountDeactivatedServlet.make_client(hs) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index acae34e71f..65b8a95f8e 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -1002,7 +1002,7 @@ class DeviceWriterHandler(DeviceHandler): # rolling-restarting Synapse. if self._is_main_device_list_writer: # On start up check if there are any updates pending. - hs.get_reactor().callWhenRunning(self._handle_new_device_update_async) + hs.get_clock().call_when_running(self._handle_new_device_update_async) self.device_list_updater = DeviceListUpdater(hs, self) hs.get_federation_registry().register_edu_handler( EduTypes.DEVICE_LIST_UPDATE, diff --git a/synapse/handlers/devicemessage.py b/synapse/handlers/devicemessage.py index b43cbd9c15..860e24d79d 100644 --- a/synapse/handlers/devicemessage.py +++ b/synapse/handlers/devicemessage.py @@ -34,7 +34,7 @@ from synapse.logging.opentracing import ( set_tag, ) from synapse.types import JsonDict, Requester, StreamKeyType, UserID, get_domain_from_id -from synapse.util import json_encoder +from synapse.util.json import json_encoder from synapse.util.stringutils import random_string if TYPE_CHECKING: diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index b9abad2188..fa3d207a90 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -44,9 +44,9 @@ from synapse.types import ( get_domain_from_id, get_verify_key_from_cross_signing_key, ) -from synapse.util import json_decoder from synapse.util.async_helpers import Linearizer, concurrently_execute from synapse.util.cancellation import cancellable +from synapse.util.json import json_decoder from synapse.util.retryutils import ( NotRetryingDestination, filter_destinations_by_retry_limiter, diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index d96b585308..be757201fc 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -39,8 +39,8 @@ from synapse.http import RequestTimedOutError from synapse.http.client import SimpleHttpClient from synapse.http.site import SynapseRequest from synapse.types import JsonDict, Requester -from synapse.util import json_decoder from synapse.util.hash import sha256_and_url_safe_base64 +from synapse.util.json import json_decoder from synapse.util.stringutils import ( assert_valid_client_secret, random_string, diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 6db031a4b9..c8c86d8749 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -81,9 +81,10 @@ from synapse.types import ( create_requester, ) from synapse.types.state import StateFilter -from synapse.util import json_decoder, json_encoder, log_failure, unwrapFirstError +from synapse.util import log_failure, unwrapFirstError from synapse.util.async_helpers import Linearizer, gather_results from synapse.util.caches.expiringcache import ExpiringCache +from synapse.util.json import json_decoder, json_encoder from synapse.util.metrics import measure_func from synapse.visibility import get_effective_room_visibility_from_state diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py index 8f3e717fb4..fc93c6b2a8 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py @@ -67,8 +67,9 @@ from synapse.http.site import SynapseRequest from synapse.logging.context import make_deferred_yieldable from synapse.module_api import ModuleApi from synapse.types import JsonDict, UserID, map_username_to_mxid_localpart -from synapse.util import Clock, json_decoder from synapse.util.caches.cached_call import RetryOnExceptionCachedCall +from synapse.util.clock import Clock +from synapse.util.json import json_decoder from synapse.util.macaroons import MacaroonGenerator, OidcSessionData from synapse.util.templates import _localpart_from_email_filter diff --git a/synapse/handlers/ui_auth/checkers.py b/synapse/handlers/ui_auth/checkers.py index 32dca8c43b..f3c295d9f2 100644 --- a/synapse/handlers/ui_auth/checkers.py +++ b/synapse/handlers/ui_auth/checkers.py @@ -27,7 +27,7 @@ from twisted.web.client import PartialDownloadError from synapse.api.constants import LoginType from synapse.api.errors import Codes, LoginError, SynapseError -from synapse.util import json_decoder +from synapse.util.json import json_decoder if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/http/client.py b/synapse/http/client.py index 1f6d4dcd86..bbb0efe8b5 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -87,8 +87,8 @@ from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.logging.opentracing import set_tag, start_active_span, tags from synapse.metrics import SERVER_NAME_LABEL from synapse.types import ISynapseReactor, StrSequence -from synapse.util import json_decoder from synapse.util.async_helpers import timeout_deferred +from synapse.util.json import json_decoder if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/http/federation/matrix_federation_agent.py b/synapse/http/federation/matrix_federation_agent.py index 6ebadf0dbf..2d152d441e 100644 --- a/synapse/http/federation/matrix_federation_agent.py +++ b/synapse/http/federation/matrix_federation_agent.py @@ -49,7 +49,7 @@ from synapse.http.federation.well_known_resolver import WellKnownResolver from synapse.http.proxyagent import ProxyAgent from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.types import ISynapseReactor -from synapse.util import Clock +from synapse.util.clock import Clock logger = logging.getLogger(__name__) diff --git a/synapse/http/federation/well_known_resolver.py b/synapse/http/federation/well_known_resolver.py index 70242ad0ae..49e58c8228 100644 --- a/synapse/http/federation/well_known_resolver.py +++ b/synapse/http/federation/well_known_resolver.py @@ -27,7 +27,6 @@ from typing import Callable, Dict, Optional, Tuple import attr from twisted.internet import defer -from twisted.internet.interfaces import IReactorTime from twisted.web.client import RedirectAgent from twisted.web.http import stringToDatetime from twisted.web.http_headers import Headers @@ -35,8 +34,10 @@ from twisted.web.iweb import IAgent, IResponse from synapse.http.client import BodyExceededMaxSize, read_body_with_max_size from synapse.logging.context import make_deferred_yieldable -from synapse.util import Clock, json_decoder +from synapse.types import ISynapseThreadlessReactor from synapse.util.caches.ttlcache import TTLCache +from synapse.util.clock import Clock +from synapse.util.json import json_decoder from synapse.util.metrics import Measure # period to cache .well-known results for by default @@ -88,7 +89,7 @@ class WellKnownResolver: def __init__( self, server_name: str, - reactor: IReactorTime, + reactor: ISynapseThreadlessReactor, agent: IAgent, user_agent: bytes, well_known_cache: Optional[TTLCache[bytes, Optional[bytes]]] = None, diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 15f8e147ab..419e4444ab 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -89,8 +89,8 @@ from synapse.logging.context import make_deferred_yieldable, run_in_background from synapse.logging.opentracing import set_tag, start_active_span, tags from synapse.metrics import SERVER_NAME_LABEL from synapse.types import JsonDict -from synapse.util import json_decoder from synapse.util.async_helpers import AwakenableSleeper, Linearizer, timeout_deferred +from synapse.util.json import json_decoder from synapse.util.metrics import Measure from synapse.util.stringutils import parse_and_validate_server_name diff --git a/synapse/http/server.py b/synapse/http/server.py index e395f79894..0b987f6aa9 100644 --- a/synapse/http/server.py +++ b/synapse/http/server.py @@ -52,10 +52,11 @@ from zope.interface import implementer from twisted.internet import defer, interfaces, reactor from twisted.internet.defer import CancelledError -from twisted.internet.interfaces import IReactorTime from twisted.python import failure from twisted.web import resource +from synapse.types import ISynapseThreadlessReactor + try: from twisted.web.pages import notFound except ImportError: @@ -77,10 +78,11 @@ from synapse.api.errors import ( from synapse.config.homeserver import HomeServerConfig from synapse.logging.context import defer_to_thread, preserve_fn, run_in_background from synapse.logging.opentracing import active_span, start_active_span, trace_servlet -from synapse.util import Clock, json_encoder from synapse.util.caches import intern_dict from synapse.util.cancellation import is_function_cancellable +from synapse.util.clock import Clock from synapse.util.iterutils import chunk_seq +from synapse.util.json import json_encoder if TYPE_CHECKING: import opentracing @@ -410,7 +412,7 @@ class DirectServeJsonResource(_AsyncResource): clock: Optional[Clock] = None, ): if clock is None: - clock = Clock(cast(IReactorTime, reactor)) + clock = Clock(cast(ISynapseThreadlessReactor, reactor)) super().__init__(clock, extract_context) self.canonical_json = canonical_json @@ -589,7 +591,7 @@ class DirectServeHtmlResource(_AsyncResource): clock: Optional[Clock] = None, ): if clock is None: - clock = Clock(cast(IReactorTime, reactor)) + clock = Clock(cast(ISynapseThreadlessReactor, reactor)) super().__init__(clock, extract_context) diff --git a/synapse/http/servlet.py b/synapse/http/servlet.py index 69bdce2b83..71e809b3f1 100644 --- a/synapse/http/servlet.py +++ b/synapse/http/servlet.py @@ -51,7 +51,7 @@ from synapse.api.errors import Codes, SynapseError from synapse.http import redact_uri from synapse.http.server import HttpServer from synapse.types import JsonDict, RoomAlias, RoomID, StrCollection -from synapse.util import json_decoder +from synapse.util.json import json_decoder if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/logging/handlers.py b/synapse/logging/handlers.py index dec2a2c3dd..b7945aac72 100644 --- a/synapse/logging/handlers.py +++ b/synapse/logging/handlers.py @@ -60,8 +60,18 @@ class PeriodicallyFlushingMemoryHandler(MemoryHandler): else: reactor_to_use = reactor - # call our hook when the reactor start up - reactor_to_use.callWhenRunning(on_reactor_running) + # Call our hook when the reactor start up + # + # type-ignore: Ideally, we'd use `Clock.call_when_running(...)`, but + # `PeriodicallyFlushingMemoryHandler` is instantiated via Python logging + # configuration, so it's not straightforward to pass in the homeserver's clock + # (and we don't want to burden other peoples logging config with the details). + # + # The important reason why we want to use `Clock.call_when_running` is so that + # the callback runs with a logcontext as we want to know which server the logs + # came from. But since we don't log anything in the callback, it's safe to + # ignore the lint here. + reactor_to_use.callWhenRunning(on_reactor_running) # type: ignore[prefer-synapse-clock-call-when-running] def shouldFlush(self, record: LogRecord) -> bool: """ diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py index b596b1abdb..405939d518 100644 --- a/synapse/logging/opentracing.py +++ b/synapse/logging/opentracing.py @@ -204,7 +204,7 @@ from twisted.web.http import Request from twisted.web.http_headers import Headers from synapse.config import ConfigError -from synapse.util import json_decoder, json_encoder +from synapse.util.json import json_decoder, json_encoder if TYPE_CHECKING: from synapse.http.site import SynapseRequest diff --git a/synapse/media/_base.py b/synapse/media/_base.py index 29911dab77..15b28074fd 100644 --- a/synapse/media/_base.py +++ b/synapse/media/_base.py @@ -54,8 +54,8 @@ from synapse.logging.context import ( make_deferred_yieldable, run_in_background, ) -from synapse.util import Clock from synapse.util.async_helpers import DeferredEvent +from synapse.util.clock import Clock from synapse.util.stringutils import is_ascii if TYPE_CHECKING: diff --git a/synapse/media/media_storage.py b/synapse/media/media_storage.py index afd33c02a1..99d002a8df 100644 --- a/synapse/media/media_storage.py +++ b/synapse/media/media_storage.py @@ -55,7 +55,7 @@ from synapse.api.errors import NotFoundError from synapse.logging.context import defer_to_thread, run_in_background from synapse.logging.opentracing import start_active_span, trace, trace_with_opname from synapse.media._base import ThreadedFileSender -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.file_consumer import BackgroundFileConsumer from ..types import JsonDict diff --git a/synapse/media/oembed.py b/synapse/media/oembed.py index 16d613d271..45b481f229 100644 --- a/synapse/media/oembed.py +++ b/synapse/media/oembed.py @@ -27,7 +27,7 @@ import attr from synapse.media.preview_html import parse_html_description from synapse.types import JsonDict -from synapse.util import json_decoder +from synapse.util.json import json_decoder if TYPE_CHECKING: from lxml import etree diff --git a/synapse/media/url_previewer.py b/synapse/media/url_previewer.py index 8f106a3d5f..81204913f7 100644 --- a/synapse/media/url_previewer.py +++ b/synapse/media/url_previewer.py @@ -46,9 +46,9 @@ from synapse.media.oembed import OEmbedProvider from synapse.media.preview_html import decode_body, parse_html_to_open_graph from synapse.metrics.background_process_metrics import run_as_background_process from synapse.types import JsonDict, UserID -from synapse.util import json_encoder from synapse.util.async_helpers import ObservableDeferred from synapse.util.caches.expiringcache import ExpiringCache +from synapse.util.json import json_encoder from synapse.util.stringutils import random_string if TYPE_CHECKING: diff --git a/synapse/module_api/__init__.py b/synapse/module_api/__init__.py index 6218135513..7a419145e0 100644 --- a/synapse/module_api/__init__.py +++ b/synapse/module_api/__init__.py @@ -158,9 +158,9 @@ from synapse.types import ( create_requester, ) from synapse.types.state import StateFilter -from synapse.util import Clock from synapse.util.async_helpers import maybe_awaitable from synapse.util.caches.descriptors import CachedFunction, cached as _cached +from synapse.util.clock import Clock from synapse.util.frozenutils import freeze if TYPE_CHECKING: diff --git a/synapse/replication/tcp/commands.py b/synapse/replication/tcp/commands.py index 6ab5356660..8eec68c3dd 100644 --- a/synapse/replication/tcp/commands.py +++ b/synapse/replication/tcp/commands.py @@ -29,7 +29,7 @@ import logging from typing import List, Optional, Tuple, Type, TypeVar from synapse.replication.tcp.streams._base import StreamRow -from synapse.util import json_decoder, json_encoder +from synapse.util.json import json_decoder, json_encoder logger = logging.getLogger(__name__) diff --git a/synapse/replication/tcp/external_cache.py b/synapse/replication/tcp/external_cache.py index 497b26fcaf..bcdd55d2e6 100644 --- a/synapse/replication/tcp/external_cache.py +++ b/synapse/replication/tcp/external_cache.py @@ -27,7 +27,7 @@ from prometheus_client import Counter, Histogram from synapse.logging import opentracing from synapse.logging.context import make_deferred_yieldable from synapse.metrics import SERVER_NAME_LABEL -from synapse.util import json_decoder, json_encoder +from synapse.util.json import json_decoder, json_encoder if TYPE_CHECKING: from txredisapi import ConnectionHandler diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py index 2ec25bf43d..25a7868cd7 100644 --- a/synapse/replication/tcp/protocol.py +++ b/synapse/replication/tcp/protocol.py @@ -55,7 +55,7 @@ from synapse.replication.tcp.commands import ( ServerCommand, parse_command_from_line, ) -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.stringutils import random_string if TYPE_CHECKING: diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index c424ca5325..bb63b51599 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -58,8 +58,8 @@ from synapse.logging.opentracing import log_kv, set_tag, trace_with_opname from synapse.rest.admin.experimental_features import ExperimentalFeature from synapse.types import JsonDict, Requester, SlidingSyncStreamToken, StreamToken from synapse.types.rest.client import SlidingSyncBody -from synapse.util import json_decoder from synapse.util.caches.lrucache import LruCache +from synapse.util.json import json_decoder from ._base import client_patterns, set_timeline_upper_limit diff --git a/synapse/rest/key/v2/remote_key_resource.py b/synapse/rest/key/v2/remote_key_resource.py index fea0b9706d..94c679b9e7 100644 --- a/synapse/rest/key/v2/remote_key_resource.py +++ b/synapse/rest/key/v2/remote_key_resource.py @@ -38,8 +38,8 @@ from synapse.http.servlet import ( from synapse.storage.keys import FetchKeyResultForRemote from synapse.types import JsonDict from synapse.types.rest import RequestBodyModel -from synapse.util import json_decoder from synapse.util.async_helpers import yieldable_gather_results +from synapse.util.json import json_decoder if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/rest/well_known.py b/synapse/rest/well_known.py index e4fe4c45ef..ae8c6a8fc0 100644 --- a/synapse/rest/well_known.py +++ b/synapse/rest/well_known.py @@ -28,7 +28,7 @@ from synapse.api.errors import NotFoundError from synapse.http.server import DirectServeJsonResource from synapse.http.site import SynapseRequest from synapse.types import JsonDict -from synapse.util import json_encoder +from synapse.util.json import json_encoder from synapse.util.stringutils import parse_server_name if TYPE_CHECKING: diff --git a/synapse/server.py b/synapse/server.py index 3fb29a7817..00862eb137 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -156,7 +156,7 @@ from synapse.storage.controllers import StorageControllers from synapse.streams.events import EventSources from synapse.synapse_rust.rendezvous import RendezvousHandler from synapse.types import DomainSpecificString, ISynapseReactor -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.distributor import Distributor from synapse.util.macaroons import MacaroonGenerator from synapse.util.ratelimitutils import FederationRateLimiter diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index d55c9e18ed..f214f55897 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -29,8 +29,8 @@ from synapse.storage.database import ( make_in_list_sql_clause, # noqa: F401 ) from synapse.types import get_domain_from_id -from synapse.util import json_decoder from synapse.util.caches.descriptors import CachedFunction +from synapse.util.json import json_decoder if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/storage/background_updates.py b/synapse/storage/background_updates.py index acc0abee63..9aa9e51aeb 100644 --- a/synapse/storage/background_updates.py +++ b/synapse/storage/background_updates.py @@ -45,7 +45,8 @@ from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage.engines import PostgresEngine from synapse.storage.types import Connection, Cursor from synapse.types import JsonDict, StrCollection -from synapse.util import Clock, json_encoder +from synapse.util.clock import Clock +from synapse.util.json import json_encoder from . import engines diff --git a/synapse/storage/databases/main/account_data.py b/synapse/storage/databases/main/account_data.py index c049789e44..16876e5461 100644 --- a/synapse/storage/databases/main/account_data.py +++ b/synapse/storage/databases/main/account_data.py @@ -48,9 +48,9 @@ from synapse.storage.databases.main.push_rule import PushRulesWorkerStore from synapse.storage.invite_rule import InviteRulesConfig from synapse.storage.util.id_generators import MultiWriterIdGenerator from synapse.types import JsonDict, JsonMapping -from synapse.util import json_encoder from synapse.util.caches.descriptors import cached from synapse.util.caches.stream_change_cache import StreamChangeCache +from synapse.util.json import json_encoder if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/storage/databases/main/appservice.py b/synapse/storage/databases/main/appservice.py index 90ff0f0f12..97dbbb1493 100644 --- a/synapse/storage/databases/main/appservice.py +++ b/synapse/storage/databases/main/appservice.py @@ -42,8 +42,8 @@ from synapse.storage.databases.main.roommember import RoomMemberWorkerStore from synapse.storage.types import Cursor from synapse.storage.util.sequence import build_sequence_generator from synapse.types import DeviceListUpdates, JsonMapping -from synapse.util import json_encoder from synapse.util.caches.descriptors import _CacheContext, cached +from synapse.util.json import json_encoder if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/storage/databases/main/censor_events.py b/synapse/storage/databases/main/censor_events.py index 5b15fd707d..3f9f482add 100644 --- a/synapse/storage/databases/main/censor_events.py +++ b/synapse/storage/databases/main/censor_events.py @@ -32,7 +32,7 @@ from synapse.storage.database import ( ) from synapse.storage.databases.main.cache import CacheInvalidationWorkerStore from synapse.storage.databases.main.events_worker import EventsWorkerStore -from synapse.util import json_encoder +from synapse.util.json import json_encoder if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/storage/databases/main/delayed_events.py b/synapse/storage/databases/main/delayed_events.py index c88682d55c..8428ee6ed4 100644 --- a/synapse/storage/databases/main/delayed_events.py +++ b/synapse/storage/databases/main/delayed_events.py @@ -22,7 +22,8 @@ from synapse.storage._base import SQLBaseStore, db_to_json from synapse.storage.database import LoggingTransaction, StoreError from synapse.storage.engines import PostgresEngine from synapse.types import JsonDict, RoomID -from synapse.util import json_encoder, stringutils as stringutils +from synapse.util import stringutils +from synapse.util.json import json_encoder logger = logging.getLogger(__name__) diff --git a/synapse/storage/databases/main/deviceinbox.py b/synapse/storage/databases/main/deviceinbox.py index c10e2d2611..f6f3c94a0d 100644 --- a/synapse/storage/databases/main/deviceinbox.py +++ b/synapse/storage/databases/main/deviceinbox.py @@ -53,10 +53,11 @@ from synapse.storage.database import ( ) from synapse.storage.util.id_generators import MultiWriterIdGenerator from synapse.types import JsonDict, StrCollection -from synapse.util import Duration, json_encoder +from synapse.util import Duration from synapse.util.caches.expiringcache import ExpiringCache from synapse.util.caches.stream_change_cache import StreamChangeCache from synapse.util.iterutils import batch_iter +from synapse.util.json import json_encoder from synapse.util.stringutils import parse_and_validate_server_name if TYPE_CHECKING: diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index a28cc40a95..fc1e1c73f1 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -64,11 +64,11 @@ from synapse.types import ( StrCollection, get_verify_key_from_cross_signing_key, ) -from synapse.util import json_decoder, json_encoder from synapse.util.caches.descriptors import cached, cachedList from synapse.util.caches.stream_change_cache import StreamChangeCache from synapse.util.cancellation import cancellable from synapse.util.iterutils import batch_iter +from synapse.util.json import json_decoder, json_encoder from synapse.util.stringutils import shortstr if TYPE_CHECKING: diff --git a/synapse/storage/databases/main/e2e_room_keys.py b/synapse/storage/databases/main/e2e_room_keys.py index 904ae5cb58..d978e115e4 100644 --- a/synapse/storage/databases/main/e2e_room_keys.py +++ b/synapse/storage/databases/main/e2e_room_keys.py @@ -41,7 +41,7 @@ from synapse.storage.database import ( LoggingTransaction, ) from synapse.types import JsonDict, JsonSerializable, StreamKeyType -from synapse.util import json_encoder +from synapse.util.json import json_encoder if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/storage/databases/main/end_to_end_keys.py b/synapse/storage/databases/main/end_to_end_keys.py index 17ccefe6b5..cc389d1582 100644 --- a/synapse/storage/databases/main/end_to_end_keys.py +++ b/synapse/storage/databases/main/end_to_end_keys.py @@ -61,10 +61,10 @@ from synapse.storage.databases.main.cache import CacheInvalidationWorkerStore from synapse.storage.engines import PostgresEngine from synapse.storage.util.id_generators import MultiWriterIdGenerator from synapse.types import JsonDict, JsonMapping, MultiWriterStreamToken -from synapse.util import json_decoder, json_encoder from synapse.util.caches.descriptors import cached, cachedList from synapse.util.cancellation import cancellable from synapse.util.iterutils import batch_iter +from synapse.util.json import json_decoder, json_encoder if TYPE_CHECKING: from synapse.handlers.e2e_keys import SignatureListItem diff --git a/synapse/storage/databases/main/event_federation.py b/synapse/storage/databases/main/event_federation.py index 26a91109df..5c9bd2e848 100644 --- a/synapse/storage/databases/main/event_federation.py +++ b/synapse/storage/databases/main/event_federation.py @@ -59,11 +59,11 @@ from synapse.storage.databases.main.events_worker import EventsWorkerStore from synapse.storage.databases.main.signatures import SignatureWorkerStore from synapse.storage.engines import PostgresEngine, Sqlite3Engine from synapse.types import JsonDict, StrCollection -from synapse.util import json_encoder from synapse.util.caches.descriptors import cached from synapse.util.caches.lrucache import LruCache from synapse.util.cancellation import cancellable from synapse.util.iterutils import batch_iter +from synapse.util.json import json_encoder if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/storage/databases/main/event_push_actions.py b/synapse/storage/databases/main/event_push_actions.py index f42023418e..4db0230421 100644 --- a/synapse/storage/databases/main/event_push_actions.py +++ b/synapse/storage/databases/main/event_push_actions.py @@ -107,8 +107,8 @@ from synapse.storage.database import ( from synapse.storage.databases.main.receipts import ReceiptsWorkerStore from synapse.storage.databases.main.stream import StreamWorkerStore from synapse.types import JsonDict, StrCollection -from synapse.util import json_encoder from synapse.util.caches.descriptors import cached +from synapse.util.json import json_encoder if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/storage/databases/main/events.py b/synapse/storage/databases/main/events.py index a50e889b9d..b6037468b3 100644 --- a/synapse/storage/databases/main/events.py +++ b/synapse/storage/databases/main/events.py @@ -83,9 +83,9 @@ from synapse.types import ( ) from synapse.types.handlers import SLIDING_SYNC_DEFAULT_BUMP_EVENT_TYPES from synapse.types.state import StateFilter -from synapse.util import json_encoder from synapse.util.events import get_plain_text_topic_from_event_content from synapse.util.iterutils import batch_iter, sorted_topologically +from synapse.util.json import json_encoder from synapse.util.stringutils import non_null_str_or_none if TYPE_CHECKING: diff --git a/synapse/storage/databases/main/events_bg_updates.py b/synapse/storage/databases/main/events_bg_updates.py index 8a59091da0..0a0102ee64 100644 --- a/synapse/storage/databases/main/events_bg_updates.py +++ b/synapse/storage/databases/main/events_bg_updates.py @@ -58,8 +58,8 @@ from synapse.types import JsonDict, RoomStreamToken, StateMap, StrCollection from synapse.types.handlers import SLIDING_SYNC_DEFAULT_BUMP_EVENT_TYPES from synapse.types.state import StateFilter from synapse.types.storage import _BackgroundUpdates -from synapse.util import json_encoder from synapse.util.iterutils import batch_iter +from synapse.util.json import json_encoder if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/storage/databases/main/lock.py b/synapse/storage/databases/main/lock.py index e733f65cb1..27c3578a31 100644 --- a/synapse/storage/databases/main/lock.py +++ b/synapse/storage/databases/main/lock.py @@ -38,7 +38,7 @@ from synapse.storage.database import ( LoggingTransaction, ) from synapse.types import ISynapseReactor -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.stringutils import random_string if TYPE_CHECKING: diff --git a/synapse/storage/databases/main/push_rule.py b/synapse/storage/databases/main/push_rule.py index d686140556..1860be1713 100644 --- a/synapse/storage/databases/main/push_rule.py +++ b/synapse/storage/databases/main/push_rule.py @@ -56,10 +56,11 @@ from synapse.storage.push_rule import InconsistentRuleException, RuleNotFoundExc from synapse.storage.util.id_generators import IdGenerator, MultiWriterIdGenerator from synapse.synapse_rust.push import FilteredPushRules, PushRule, PushRules from synapse.types import JsonDict -from synapse.util import json_encoder, unwrapFirstError +from synapse.util import unwrapFirstError from synapse.util.async_helpers import gather_results from synapse.util.caches.descriptors import cached, cachedList from synapse.util.caches.stream_change_cache import StreamChangeCache +from synapse.util.json import json_encoder if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/storage/databases/main/pusher.py b/synapse/storage/databases/main/pusher.py index 9a0a12b5c1..1b2aa79ab1 100644 --- a/synapse/storage/databases/main/pusher.py +++ b/synapse/storage/databases/main/pusher.py @@ -42,8 +42,8 @@ from synapse.storage.database import ( ) from synapse.storage.util.id_generators import MultiWriterIdGenerator from synapse.types import JsonDict -from synapse.util import json_encoder from synapse.util.caches.descriptors import cached +from synapse.util.json import json_encoder if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/storage/databases/main/receipts.py b/synapse/storage/databases/main/receipts.py index d74bb0184a..ff4eb9acb2 100644 --- a/synapse/storage/databases/main/receipts.py +++ b/synapse/storage/databases/main/receipts.py @@ -55,10 +55,10 @@ from synapse.types import ( PersistedPosition, StrCollection, ) -from synapse.util import json_encoder from synapse.util.caches.descriptors import cached, cachedList from synapse.util.caches.stream_change_cache import StreamChangeCache from synapse.util.iterutils import batch_iter +from synapse.util.json import json_encoder if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py index 6ffc3aed34..9f03c084a5 100644 --- a/synapse/storage/databases/main/room.py +++ b/synapse/storage/databases/main/room.py @@ -65,8 +65,8 @@ from synapse.storage.databases.main.cache import CacheInvalidationWorkerStore from synapse.storage.types import Cursor from synapse.storage.util.id_generators import IdGenerator, MultiWriterIdGenerator from synapse.types import JsonDict, RetentionPolicy, StrCollection, ThirdPartyInstanceID -from synapse.util import json_encoder from synapse.util.caches.descriptors import cached, cachedList +from synapse.util.json import json_encoder from synapse.util.stringutils import MXC_REGEX if TYPE_CHECKING: diff --git a/synapse/storage/databases/main/session.py b/synapse/storage/databases/main/session.py index 8a1331d4c8..8a5fa8386c 100644 --- a/synapse/storage/databases/main/session.py +++ b/synapse/storage/databases/main/session.py @@ -30,7 +30,7 @@ from synapse.storage.database import ( LoggingTransaction, ) from synapse.types import JsonDict -from synapse.util import json_encoder +from synapse.util.json import json_encoder if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/storage/databases/main/sliding_sync.py b/synapse/storage/databases/main/sliding_sync.py index 72ec8e6b90..f7af3e88d3 100644 --- a/synapse/storage/databases/main/sliding_sync.py +++ b/synapse/storage/databases/main/sliding_sync.py @@ -35,8 +35,8 @@ from synapse.types.handlers.sliding_sync import ( RoomStatusMap, RoomSyncConfig, ) -from synapse.util import json_encoder from synapse.util.caches.descriptors import cached +from synapse.util.json import json_encoder if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/storage/databases/main/tags.py b/synapse/storage/databases/main/tags.py index 97b190bccc..94cf7f4052 100644 --- a/synapse/storage/databases/main/tags.py +++ b/synapse/storage/databases/main/tags.py @@ -30,8 +30,8 @@ from synapse.storage.database import LoggingTransaction from synapse.storage.databases.main.account_data import AccountDataWorkerStore from synapse.storage.util.id_generators import AbstractStreamIdGenerator from synapse.types import JsonDict, JsonMapping -from synapse.util import json_encoder from synapse.util.caches.descriptors import cached +from synapse.util.json import json_encoder logger = logging.getLogger(__name__) diff --git a/synapse/storage/databases/main/task_scheduler.py b/synapse/storage/databases/main/task_scheduler.py index 4956870b1a..2d4804fef6 100644 --- a/synapse/storage/databases/main/task_scheduler.py +++ b/synapse/storage/databases/main/task_scheduler.py @@ -29,7 +29,7 @@ from synapse.storage.database import ( make_in_list_sql_clause, ) from synapse.types import JsonDict, JsonMapping, ScheduledTask, TaskStatus -from synapse.util import json_encoder +from synapse.util.json import json_encoder if TYPE_CHECKING: from synapse.server import HomeServer diff --git a/synapse/storage/databases/main/ui_auth.py b/synapse/storage/databases/main/ui_auth.py index 17bd0ac09a..569925e39f 100644 --- a/synapse/storage/databases/main/ui_auth.py +++ b/synapse/storage/databases/main/ui_auth.py @@ -27,7 +27,8 @@ from synapse.api.errors import StoreError from synapse.storage._base import SQLBaseStore, db_to_json from synapse.storage.database import LoggingTransaction from synapse.types import JsonDict -from synapse.util import json_encoder, stringutils +from synapse.util import stringutils +from synapse.util.json import json_encoder @attr.s(slots=True, auto_attribs=True) diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py index 2d5b07ab8f..0386cb77d6 100644 --- a/synapse/types/__init__.py +++ b/synapse/types/__init__.py @@ -116,13 +116,27 @@ StrSequence = Union[Tuple[str, ...], List[str]] # Note that this seems to require inheriting *directly* from Interface in order # for mypy-zope to realize it is an interface. -class ISynapseReactor( +class ISynapseThreadlessReactor( IReactorTCP, IReactorSSL, IReactorUNIX, IReactorPluggableNameResolver, IReactorTime, IReactorCore, + Interface, +): + """ + The interfaces necessary for Synapse to function (without threads). + + Helpful because we use `twisted.internet.testing.MemoryReactorClock` in tests which + doesn't implement `IReactorThreads`. + """ + + +# Note that this seems to require inheriting *directly* from Interface in order +# for mypy-zope to realize it is an interface. +class ISynapseReactor( + ISynapseThreadlessReactor, IReactorThreads, Interface, ): diff --git a/synapse/util/__init__.py b/synapse/util/__init__.py index 36129c3a67..2ae2e245a9 100644 --- a/synapse/util/__init__.py +++ b/synapse/util/__init__.py @@ -20,12 +20,9 @@ # import collections.abc -import json import logging import typing from typing import ( - Any, - Callable, Dict, Iterator, Mapping, @@ -36,17 +33,11 @@ from typing import ( ) import attr -from immutabledict import immutabledict from matrix_common.versionstring import get_distribution_version_string -from typing_extensions import ParamSpec -from twisted.internet import defer, task -from twisted.internet.interfaces import IDelayedCall, IReactorTime -from twisted.internet.task import LoopingCall +from twisted.internet import defer from twisted.python.failure import Failure -from synapse.logging import context - if typing.TYPE_CHECKING: pass @@ -62,41 +53,6 @@ class Duration: DAY_MS = 24 * HOUR_MS -def _reject_invalid_json(val: Any) -> None: - """Do not allow Infinity, -Infinity, or NaN values in JSON.""" - raise ValueError("Invalid JSON value: '%s'" % val) - - -def _handle_immutabledict(obj: Any) -> Dict[Any, Any]: - """Helper for json_encoder. Makes immutabledicts serializable by returning - the underlying dict - """ - if type(obj) is immutabledict: - # fishing the protected dict out of the object is a bit nasty, - # but we don't really want the overhead of copying the dict. - try: - # Safety: we catch the AttributeError immediately below. - return obj._dict - except AttributeError: - # If all else fails, resort to making a copy of the immutabledict - return dict(obj) - raise TypeError( - "Object of type %s is not JSON serializable" % obj.__class__.__name__ - ) - - -# A custom JSON encoder which: -# * handles immutabledicts -# * produces valid JSON (no NaNs etc) -# * reduces redundant whitespace -json_encoder = json.JSONEncoder( - allow_nan=False, separators=(",", ":"), default=_handle_immutabledict -) - -# Create a custom decoder to reject Python extensions to JSON. -json_decoder = json.JSONDecoder(parse_constant=_reject_invalid_json) - - def unwrapFirstError(failure: Failure) -> Failure: # Deprecated: you probably just want to catch defer.FirstError and reraise # the subFailure's value, which will do a better job of preserving stacktraces. @@ -105,129 +61,6 @@ def unwrapFirstError(failure: Failure) -> Failure: return failure.value.subFailure -P = ParamSpec("P") - - -@attr.s(slots=True) -class Clock: - """ - A Clock wraps a Twisted reactor and provides utilities on top of it. - - Args: - reactor: The Twisted reactor to use. - """ - - _reactor: IReactorTime = attr.ib() - - async def sleep(self, seconds: float) -> None: - d: defer.Deferred[float] = defer.Deferred() - with context.PreserveLoggingContext(): - self._reactor.callLater(seconds, d.callback, seconds) - await d - - def time(self) -> float: - """Returns the current system time in seconds since epoch.""" - return self._reactor.seconds() - - def time_msec(self) -> int: - """Returns the current system time in milliseconds since epoch.""" - return int(self.time() * 1000) - - def looping_call( - self, - f: Callable[P, object], - msec: float, - *args: P.args, - **kwargs: P.kwargs, - ) -> LoopingCall: - """Call a function repeatedly. - - Waits `msec` initially before calling `f` for the first time. - - If the function given to `looping_call` returns an awaitable/deferred, the next - call isn't scheduled until after the returned awaitable has finished. We get - this functionality thanks to this function being a thin wrapper around - `twisted.internet.task.LoopingCall`. - - Note that the function will be called with no logcontext, so if it is anything - other than trivial, you probably want to wrap it in run_as_background_process. - - Args: - f: The function to call repeatedly. - msec: How long to wait between calls in milliseconds. - *args: Positional arguments to pass to function. - **kwargs: Key arguments to pass to function. - """ - return self._looping_call_common(f, msec, False, *args, **kwargs) - - def looping_call_now( - self, - f: Callable[P, object], - msec: float, - *args: P.args, - **kwargs: P.kwargs, - ) -> LoopingCall: - """Call a function immediately, and then repeatedly thereafter. - - As with `looping_call`: subsequent calls are not scheduled until after the - the Awaitable returned by a previous call has finished. - - Also as with `looping_call`: the function is called with no logcontext and - you probably want to wrap it in `run_as_background_process`. - - Args: - f: The function to call repeatedly. - msec: How long to wait between calls in milliseconds. - *args: Positional arguments to pass to function. - **kwargs: Key arguments to pass to function. - """ - return self._looping_call_common(f, msec, True, *args, **kwargs) - - def _looping_call_common( - self, - f: Callable[P, object], - msec: float, - now: bool, - *args: P.args, - **kwargs: P.kwargs, - ) -> LoopingCall: - """Common functionality for `looping_call` and `looping_call_now`""" - call = task.LoopingCall(f, *args, **kwargs) - call.clock = self._reactor - d = call.start(msec / 1000.0, now=now) - d.addErrback(log_failure, "Looping call died", consumeErrors=False) - return call - - def call_later( - self, delay: float, callback: Callable, *args: Any, **kwargs: Any - ) -> IDelayedCall: - """Call something later - - Note that the function will be called with no logcontext, so if it is anything - other than trivial, you probably want to wrap it in run_as_background_process. - - Args: - delay: How long to wait in seconds. - callback: Function to call - *args: Postional arguments to pass to function. - **kwargs: Key arguments to pass to function. - """ - - def wrapped_callback(*args: Any, **kwargs: Any) -> None: - with context.PreserveLoggingContext(): - callback(*args, **kwargs) - - with context.PreserveLoggingContext(): - return self._reactor.callLater(delay, wrapped_callback, *args, **kwargs) - - def cancel_call_later(self, timer: IDelayedCall, ignore_errs: bool = False) -> None: - try: - timer.cancel() - except Exception: - if not ignore_errs: - raise - - def log_failure( failure: Failure, msg: str, consumeErrors: bool = True ) -> Optional[Failure]: diff --git a/synapse/util/async_helpers.py b/synapse/util/async_helpers.py index c21b7887f9..1c343f8d3e 100644 --- a/synapse/util/async_helpers.py +++ b/synapse/util/async_helpers.py @@ -65,7 +65,8 @@ from synapse.logging.context import ( run_coroutine_in_background, run_in_background, ) -from synapse.util import Clock +from synapse.types import ISynapseThreadlessReactor +from synapse.util.clock import Clock logger = logging.getLogger(__name__) @@ -566,7 +567,7 @@ class Linearizer: if not clock: from twisted.internet import reactor - clock = Clock(cast(IReactorTime, reactor)) + clock = Clock(cast(ISynapseThreadlessReactor, reactor)) self._clock = clock self.max_count = max_count diff --git a/synapse/util/batching_queue.py b/synapse/util/batching_queue.py index 4c0f129423..4c4037412a 100644 --- a/synapse/util/batching_queue.py +++ b/synapse/util/batching_queue.py @@ -39,7 +39,7 @@ from twisted.internet import defer from synapse.logging.context import PreserveLoggingContext, make_deferred_yieldable from synapse.metrics import SERVER_NAME_LABEL from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.util import Clock +from synapse.util.clock import Clock logger = logging.getLogger(__name__) diff --git a/synapse/util/caches/expiringcache.py b/synapse/util/caches/expiringcache.py index 1962a3fdfa..305af5051c 100644 --- a/synapse/util/caches/expiringcache.py +++ b/synapse/util/caches/expiringcache.py @@ -29,8 +29,8 @@ from twisted.internet import defer from synapse.config import cache as cache_config from synapse.metrics.background_process_metrics import run_as_background_process -from synapse.util import Clock from synapse.util.caches import EvictionReason, register_cache +from synapse.util.clock import Clock logger = logging.getLogger(__name__) diff --git a/synapse/util/caches/lrucache.py b/synapse/util/caches/lrucache.py index 927162700a..187380c433 100644 --- a/synapse/util/caches/lrucache.py +++ b/synapse/util/caches/lrucache.py @@ -46,20 +46,21 @@ from typing import ( ) from twisted.internet import defer, reactor -from twisted.internet.interfaces import IReactorTime from synapse.config import cache as cache_config from synapse.metrics.background_process_metrics import ( run_as_background_process, ) from synapse.metrics.jemalloc import get_jemalloc_stats -from synapse.util import Clock, caches +from synapse.types import ISynapseThreadlessReactor +from synapse.util import caches from synapse.util.caches import CacheMetric, EvictionReason, register_cache from synapse.util.caches.treecache import ( TreeCache, iterate_tree_cache_entry, iterate_tree_cache_items, ) +from synapse.util.clock import Clock from synapse.util.linked_list import ListNode if TYPE_CHECKING: @@ -496,7 +497,7 @@ class LruCache(Generic[KT, VT]): # Default `clock` to something sensible. Note that we rename it to # `real_clock` so that mypy doesn't think its still `Optional`. if clock is None: - real_clock = Clock(cast(IReactorTime, reactor)) + real_clock = Clock(cast(ISynapseThreadlessReactor, reactor)) else: real_clock = clock diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py index 49a9151916..79e34262df 100644 --- a/synapse/util/caches/response_cache.py +++ b/synapse/util/caches/response_cache.py @@ -41,9 +41,9 @@ from synapse.logging.opentracing import ( start_active_span, start_active_span_follows_from, ) -from synapse.util import Clock from synapse.util.async_helpers import AbstractObservableDeferred, ObservableDeferred from synapse.util.caches import EvictionReason, register_cache +from synapse.util.clock import Clock logger = logging.getLogger(__name__) diff --git a/synapse/util/clock.py b/synapse/util/clock.py new file mode 100644 index 0000000000..8d6ab007ba --- /dev/null +++ b/synapse/util/clock.py @@ -0,0 +1,208 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2025 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +# + + +from typing import ( + Any, + Callable, +) + +import attr +from typing_extensions import ParamSpec + +from twisted.internet import defer, task +from twisted.internet.interfaces import IDelayedCall +from twisted.internet.task import LoopingCall + +from synapse.logging import context +from synapse.types import ISynapseThreadlessReactor +from synapse.util import log_failure + +P = ParamSpec("P") + + +@attr.s(slots=True) +class Clock: + """ + A Clock wraps a Twisted reactor and provides utilities on top of it. + + Args: + reactor: The Twisted reactor to use. + """ + + _reactor: ISynapseThreadlessReactor = attr.ib() + + async def sleep(self, seconds: float) -> None: + d: defer.Deferred[float] = defer.Deferred() + with context.PreserveLoggingContext(): + self._reactor.callLater(seconds, d.callback, seconds) + await d + + def time(self) -> float: + """Returns the current system time in seconds since epoch.""" + return self._reactor.seconds() + + def time_msec(self) -> int: + """Returns the current system time in milliseconds since epoch.""" + return int(self.time() * 1000) + + def looping_call( + self, + f: Callable[P, object], + msec: float, + *args: P.args, + **kwargs: P.kwargs, + ) -> LoopingCall: + """Call a function repeatedly. + + Waits `msec` initially before calling `f` for the first time. + + If the function given to `looping_call` returns an awaitable/deferred, the next + call isn't scheduled until after the returned awaitable has finished. We get + this functionality thanks to this function being a thin wrapper around + `twisted.internet.task.LoopingCall`. + + Note that the function will be called with no logcontext, so if it is anything + other than trivial, you probably want to wrap it in run_as_background_process. + + Args: + f: The function to call repeatedly. + msec: How long to wait between calls in milliseconds. + *args: Positional arguments to pass to function. + **kwargs: Key arguments to pass to function. + """ + return self._looping_call_common(f, msec, False, *args, **kwargs) + + def looping_call_now( + self, + f: Callable[P, object], + msec: float, + *args: P.args, + **kwargs: P.kwargs, + ) -> LoopingCall: + """Call a function immediately, and then repeatedly thereafter. + + As with `looping_call`: subsequent calls are not scheduled until after the + the Awaitable returned by a previous call has finished. + + Also as with `looping_call`: the function is called with no logcontext and + you probably want to wrap it in `run_as_background_process`. + + Args: + f: The function to call repeatedly. + msec: How long to wait between calls in milliseconds. + *args: Positional arguments to pass to function. + **kwargs: Key arguments to pass to function. + """ + return self._looping_call_common(f, msec, True, *args, **kwargs) + + def _looping_call_common( + self, + f: Callable[P, object], + msec: float, + now: bool, + *args: P.args, + **kwargs: P.kwargs, + ) -> LoopingCall: + """Common functionality for `looping_call` and `looping_call_now`""" + call = task.LoopingCall(f, *args, **kwargs) + call.clock = self._reactor + d = call.start(msec / 1000.0, now=now) + d.addErrback(log_failure, "Looping call died", consumeErrors=False) + return call + + def call_later( + self, delay: float, callback: Callable, *args: Any, **kwargs: Any + ) -> IDelayedCall: + """Call something later + + Note that the function will be called with no logcontext, so if it is anything + other than trivial, you probably want to wrap it in run_as_background_process. + + Args: + delay: How long to wait in seconds. + callback: Function to call + *args: Postional arguments to pass to function. + **kwargs: Key arguments to pass to function. + """ + + def wrapped_callback(*args: Any, **kwargs: Any) -> None: + with context.PreserveLoggingContext(): + callback(*args, **kwargs) + + with context.PreserveLoggingContext(): + return self._reactor.callLater(delay, wrapped_callback, *args, **kwargs) + + def cancel_call_later(self, timer: IDelayedCall, ignore_errs: bool = False) -> None: + try: + timer.cancel() + except Exception: + if not ignore_errs: + raise + + def call_when_running( + self, + callback: Callable[P, object], + *args: P.args, + **kwargs: P.kwargs, + ) -> None: + """ + Call a function when the reactor is running. + + If the reactor has not started, the callable will be scheduled to run when it + does start. Otherwise, the callable will be invoked immediately. + + Args: + callback: Function to call + *args: Postional arguments to pass to function. + **kwargs: Key arguments to pass to function. + """ + + def wrapped_callback(*args: Any, **kwargs: Any) -> None: + # Since this callback can be invoked immediately if the reactor is already + # running, we can't always assume that we're running in the sentinel + # logcontext (i.e. we can't assert that we're in the sentinel context like + # we can in other methods). + # + # We will only be running in the sentinel logcontext if the reactor was not + # running when `call_when_running` was invoked and later starts up. + # + # assert context.current_context() is context.SENTINEL_CONTEXT + + # Because this is a callback from the reactor, we will be using the + # `sentinel` log context at this point. We want the function to log with + # some logcontext as we want to know which server the logs came from. + # + # We use `PreserveLoggingContext` to prevent our new `call_when_running` + # logcontext from finishing as soon as we exit this function, in case `f` + # returns an awaitable/deferred which would continue running and may try to + # restore the `loop_call` context when it's done (because it's trying to + # adhere to the Synapse logcontext rules.) + # + # This also ensures that we return to the `sentinel` context when we exit + # this function and yield control back to the reactor to avoid leaking the + # current logcontext to the reactor (which would then get picked up and + # associated with the next thing the reactor does) + with context.PreserveLoggingContext( + context.LoggingContext("call_when_running") + ): + # We use `run_in_background` to reset the logcontext after `f` (or the + # awaitable returned by `f`) completes to avoid leaking the current + # logcontext to the reactor + context.run_in_background(callback, *args, **kwargs) + + # We can ignore the lint here since this class is the one location + # callWhenRunning should be called. + self._reactor.callWhenRunning(wrapped_callback, *args, **kwargs) # type: ignore[prefer-synapse-clock-call-when-running] diff --git a/synapse/util/json.py b/synapse/util/json.py new file mode 100644 index 0000000000..e6db55f8e4 --- /dev/null +++ b/synapse/util/json.py @@ -0,0 +1,57 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2025 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +# + +import json +from typing import ( + Any, + Dict, +) + +from immutabledict import immutabledict + + +def _reject_invalid_json(val: Any) -> None: + """Do not allow Infinity, -Infinity, or NaN values in JSON.""" + raise ValueError("Invalid JSON value: '%s'" % val) + + +def _handle_immutabledict(obj: Any) -> Dict[Any, Any]: + """Helper for json_encoder. Makes immutabledicts serializable by returning + the underlying dict + """ + if type(obj) is immutabledict: + # fishing the protected dict out of the object is a bit nasty, + # but we don't really want the overhead of copying the dict. + try: + # Safety: we catch the AttributeError immediately below. + return obj._dict + except AttributeError: + # If all else fails, resort to making a copy of the immutabledict + return dict(obj) + raise TypeError( + "Object of type %s is not JSON serializable" % obj.__class__.__name__ + ) + + +# A custom JSON encoder which: +# * handles immutabledicts +# * produces valid JSON (no NaNs etc) +# * reduces redundant whitespace +json_encoder = json.JSONEncoder( + allow_nan=False, separators=(",", ":"), default=_handle_immutabledict +) + +# Create a custom decoder to reject Python extensions to JSON. +json_decoder = json.JSONDecoder(parse_constant=_reject_invalid_json) diff --git a/synapse/util/macaroons.py b/synapse/util/macaroons.py index 6fa15543ec..d683a57ab1 100644 --- a/synapse/util/macaroons.py +++ b/synapse/util/macaroons.py @@ -28,7 +28,8 @@ import attr import pymacaroons from pymacaroons.exceptions import MacaroonVerificationFailedException -from synapse.util import Clock, stringutils +from synapse.util import stringutils +from synapse.util.clock import Clock MacaroonType = Literal["access", "delete_pusher", "session"] diff --git a/synapse/util/metrics.py b/synapse/util/metrics.py index 608a4d4848..09ccdb8ca3 100644 --- a/synapse/util/metrics.py +++ b/synapse/util/metrics.py @@ -42,7 +42,7 @@ from synapse.logging.context import ( current_context, ) from synapse.metrics import SERVER_NAME_LABEL, InFlightGauge -from synapse.util import Clock +from synapse.util.clock import Clock logger = logging.getLogger(__name__) diff --git a/synapse/util/ratelimitutils.py b/synapse/util/ratelimitutils.py index 88edc07161..695eb462bf 100644 --- a/synapse/util/ratelimitutils.py +++ b/synapse/util/ratelimitutils.py @@ -53,7 +53,7 @@ from synapse.logging.context import ( ) from synapse.logging.opentracing import start_active_span from synapse.metrics import SERVER_NAME_LABEL, Histogram, LaterGauge -from synapse.util import Clock +from synapse.util.clock import Clock if typing.TYPE_CHECKING: from contextlib import _GeneratorContextManager diff --git a/synapse/util/retryutils.py b/synapse/util/retryutils.py index 149df405b3..42a0cc7aa8 100644 --- a/synapse/util/retryutils.py +++ b/synapse/util/retryutils.py @@ -27,7 +27,7 @@ from synapse.api.errors import CodeMessageException from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage import DataStore from synapse.types import StrCollection -from synapse.util import Clock +from synapse.util.clock import Clock if TYPE_CHECKING: from synapse.notifier import Notifier diff --git a/synapse/visibility.py b/synapse/visibility.py index d460d8f4c2..662f2636d0 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -55,7 +55,7 @@ from synapse.types import ( get_domain_from_id, ) from synapse.types.state import StateFilter -from synapse.util import Clock +from synapse.util.clock import Clock logger = logging.getLogger(__name__) filtered_event_logger = logging.getLogger("synapse.visibility.filtered_event_debug") diff --git a/synmark/__main__.py b/synmark/__main__.py index 4944c2f3b0..82717c4fc7 100644 --- a/synmark/__main__.py +++ b/synmark/__main__.py @@ -62,7 +62,10 @@ def make_test( return res d.addBoth(on_done) - reactor.callWhenRunning(lambda: d.callback(True)) + # type-ignore: This is outside of Synapse (just a utility benchmark script) + # so we don't need to worry about which server the logs are coming from + # (`Clock.call_when_running` manages the logcontext for us). + reactor.callWhenRunning(lambda: d.callback(True)) # type: ignore[prefer-synapse-clock-call-when-running] reactor.run() # mypy thinks this is an object for some reason. diff --git a/synmark/suites/logging.py b/synmark/suites/logging.py index 32282ba6cb..03bf5a94a7 100644 --- a/synmark/suites/logging.py +++ b/synmark/suites/logging.py @@ -37,7 +37,7 @@ from synapse.config.logger import _setup_stdlib_logging from synapse.logging import RemoteHandler from synapse.synapse_rust import reset_logging_config from synapse.types import ISynapseReactor -from synapse.util import Clock +from synapse.util.clock import Clock class LineCounter(LineOnlyReceiver): diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index b8fb21ab0d..2f6a76970c 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -39,7 +39,7 @@ from synapse.appservice import ApplicationService from synapse.server import HomeServer from synapse.storage.databases.main.registration import TokenLookupResult from synapse.types import Requester, UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.unittest import override_config diff --git a/tests/api/test_filtering.py b/tests/api/test_filtering.py index 8ad9a5a6f7..d74878a4e1 100644 --- a/tests/api/test_filtering.py +++ b/tests/api/test_filtering.py @@ -33,7 +33,7 @@ from synapse.api.filtering import Filter from synapse.api.presence import UserPresenceState from synapse.server import HomeServer from synapse.types import JsonDict, UserID -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.frozenutils import freeze from tests import unittest diff --git a/tests/api/test_urls.py b/tests/api/test_urls.py index bb46008ad2..00f54237ab 100644 --- a/tests/api/test_urls.py +++ b/tests/api/test_urls.py @@ -17,7 +17,7 @@ from twisted.internet.testing import MemoryReactor from synapse.api.urls import LoginSSORedirectURIBuilder from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/app/test_openid_listener.py b/tests/app/test_openid_listener.py index 63cb5ff46f..6ca514d557 100644 --- a/tests/app/test_openid_listener.py +++ b/tests/app/test_openid_listener.py @@ -29,7 +29,7 @@ from synapse.app.homeserver import SynapseHomeServer from synapse.config.server import parse_listener_def from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests.server import make_request from tests.unittest import HomeserverTestCase diff --git a/tests/app/test_phone_stats_home.py b/tests/app/test_phone_stats_home.py index 93af614def..73c3a9fd98 100644 --- a/tests/app/test_phone_stats_home.py +++ b/tests/app/test_phone_stats_home.py @@ -2,7 +2,7 @@ import synapse from synapse.app.phone_stats_home import start_phone_stats_home from synapse.rest.client import login, room from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.server import ThreadedMemoryReactorClock from tests.unittest import HomeserverTestCase diff --git a/tests/appservice/test_api.py b/tests/appservice/test_api.py index 5eba6d20c8..085dfd2d1d 100644 --- a/tests/appservice/test_api.py +++ b/tests/appservice/test_api.py @@ -26,7 +26,7 @@ from twisted.internet.testing import MemoryReactor from synapse.appservice import ApplicationService from synapse.server import HomeServer from synapse.types import JsonDict, UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.unittest import override_config diff --git a/tests/appservice/test_scheduler.py b/tests/appservice/test_scheduler.py index 11319bc52d..9498ea1279 100644 --- a/tests/appservice/test_scheduler.py +++ b/tests/appservice/test_scheduler.py @@ -41,7 +41,7 @@ from synapse.events import EventBase from synapse.logging.context import make_deferred_yieldable from synapse.server import HomeServer from synapse.types import DeviceListUpdates, JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/config/test_room_directory.py b/tests/config/test_room_directory.py index 5f3d8be2a5..4e044245e2 100644 --- a/tests/config/test_room_directory.py +++ b/tests/config/test_room_directory.py @@ -27,7 +27,7 @@ import synapse.rest.client.room from synapse.config._base import RootConfig from synapse.config.room_directory import RoomDirectoryConfig from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.unittest import override_config diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 80f9bd097e..1dc0de73fd 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -49,7 +49,7 @@ from synapse.logging.context import ( from synapse.server import HomeServer from synapse.storage.keys import FetchKeyResult from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.unittest import logcontext_clean, override_config diff --git a/tests/events/test_auto_accept_invites.py b/tests/events/test_auto_accept_invites.py index 8f1dc86984..fa7ea64105 100644 --- a/tests/events/test_auto_accept_invites.py +++ b/tests/events/test_auto_accept_invites.py @@ -41,7 +41,7 @@ from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.types import StreamToken, UserID, UserInfo, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests.handlers.test_sync import generate_sync_config from tests.unittest import ( diff --git a/tests/events/test_presence_router.py b/tests/events/test_presence_router.py index f7d55223b1..696d9dd6e2 100644 --- a/tests/events/test_presence_router.py +++ b/tests/events/test_presence_router.py @@ -34,7 +34,7 @@ from synapse.rest import admin from synapse.rest.client import login, presence, room from synapse.server import HomeServer from synapse.types import JsonDict, StreamToken, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests.handlers.test_sync import SyncRequestKey, generate_sync_config from tests.unittest import ( diff --git a/tests/events/test_snapshot.py b/tests/events/test_snapshot.py index 6d24730ed7..4d1dca08ef 100644 --- a/tests/events/test_snapshot.py +++ b/tests/events/test_snapshot.py @@ -26,7 +26,7 @@ from synapse.events.snapshot import EventContext from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.test_utils.event_injection import create_event diff --git a/tests/federation/test_federation_catch_up.py b/tests/federation/test_federation_catch_up.py index f99911b102..5edb651767 100644 --- a/tests/federation/test_federation_catch_up.py +++ b/tests/federation/test_federation_catch_up.py @@ -16,7 +16,7 @@ from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.retryutils import NotRetryingDestination from tests.test_utils import event_injection diff --git a/tests/federation/test_federation_client.py b/tests/federation/test_federation_client.py index df688cd21f..0535aed107 100644 --- a/tests/federation/test_federation_client.py +++ b/tests/federation/test_federation_client.py @@ -30,7 +30,7 @@ from synapse.events import EventBase from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.test_utils import FakeResponse, event_injection from tests.unittest import FederatingHomeserverTestCase diff --git a/tests/federation/test_federation_devices.py b/tests/federation/test_federation_devices.py index bf6204a7e3..c935669ce2 100644 --- a/tests/federation/test_federation_devices.py +++ b/tests/federation/test_federation_devices.py @@ -26,7 +26,7 @@ from twisted.internet.testing import MemoryReactor from synapse.handlers.device import DeviceListUpdater from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.retryutils import NotRetryingDestination from tests import unittest diff --git a/tests/federation/test_federation_media.py b/tests/federation/test_federation_media.py index b9ec2794a3..1e849fa605 100644 --- a/tests/federation/test_federation_media.py +++ b/tests/federation/test_federation_media.py @@ -32,7 +32,7 @@ from synapse.media.storage_provider import ( ) from synapse.server import HomeServer from synapse.types import UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.media.test_media_storage import small_png diff --git a/tests/federation/test_federation_out_of_band_membership.py b/tests/federation/test_federation_out_of_band_membership.py index acf343930f..fa4e7c63ba 100644 --- a/tests/federation/test_federation_out_of_band_membership.py +++ b/tests/federation/test_federation_out_of_band_membership.py @@ -50,7 +50,7 @@ from synapse.types import JsonDict, MutableStateMap, StateMap from synapse.types.handlers.sliding_sync import ( StateValues, ) -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.utils import test_timeout diff --git a/tests/federation/test_federation_sender.py b/tests/federation/test_federation_sender.py index b8dd61d04f..27b69a9180 100644 --- a/tests/federation/test_federation_sender.py +++ b/tests/federation/test_federation_sender.py @@ -36,7 +36,7 @@ from synapse.rest.client import login from synapse.server import HomeServer from synapse.storage.databases.main.events_worker import EventMetadata from synapse.types import JsonDict, ReadReceipt -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/federation/test_federation_server.py b/tests/federation/test_federation_server.py index 52fd32ba85..262f94367c 100644 --- a/tests/federation/test_federation_server.py +++ b/tests/federation/test_federation_server.py @@ -40,7 +40,7 @@ from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.storage.controllers.state import server_acl_evaluator_from_event from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.unittest import override_config diff --git a/tests/federation/transport/test_knocking.py b/tests/federation/transport/test_knocking.py index 14345be0f3..a243938255 100644 --- a/tests/federation/transport/test_knocking.py +++ b/tests/federation/transport/test_knocking.py @@ -31,7 +31,7 @@ from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.types import RoomAlias -from synapse.util import Clock +from synapse.util.clock import Clock from tests.test_utils import event_injection from tests.unittest import FederatingHomeserverTestCase, HomeserverTestCase diff --git a/tests/handlers/test_admin.py b/tests/handlers/test_admin.py index 906d241f1a..49bd3ba3f4 100644 --- a/tests/handlers/test_admin.py +++ b/tests/handlers/test_admin.py @@ -31,7 +31,7 @@ from synapse.api.room_versions import RoomVersions from synapse.rest.client import knock, login, room from synapse.server import HomeServer from synapse.types import UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/handlers/test_appservice.py b/tests/handlers/test_appservice.py index a47b03b143..999d7f5e6c 100644 --- a/tests/handlers/test_appservice.py +++ b/tests/handlers/test_appservice.py @@ -45,7 +45,7 @@ from synapse.types import ( StreamKeyType, UserID, ) -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.stringutils import random_string from tests import unittest diff --git a/tests/handlers/test_auth.py b/tests/handlers/test_auth.py index 0d9940c63e..acefd707f5 100644 --- a/tests/handlers/test_auth.py +++ b/tests/handlers/test_auth.py @@ -29,7 +29,7 @@ from synapse.api.errors import AuthError, ResourceLimitError from synapse.rest import admin from synapse.rest.client import login from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/handlers/test_cas.py b/tests/handlers/test_cas.py index 9de5e67863..f677f3be2a 100644 --- a/tests/handlers/test_cas.py +++ b/tests/handlers/test_cas.py @@ -25,7 +25,7 @@ from twisted.internet.testing import MemoryReactor from synapse.handlers.cas import CasResponse from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase, override_config diff --git a/tests/handlers/test_deactivate_account.py b/tests/handlers/test_deactivate_account.py index b7b8387780..1b749cee1f 100644 --- a/tests/handlers/test_deactivate_account.py +++ b/tests/handlers/test_deactivate_account.py @@ -28,7 +28,7 @@ from synapse.rest.client import account, login, room from synapse.server import HomeServer from synapse.synapse_rust.push import PushRule from synapse.types import UserID, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/handlers/test_device.py b/tests/handlers/test_device.py index 195cdfeaef..5b04da8640 100644 --- a/tests/handlers/test_device.py +++ b/tests/handlers/test_device.py @@ -35,7 +35,7 @@ from synapse.rest.client import devices, login, register from synapse.server import HomeServer from synapse.storage.databases.main.appservice import _make_exclusive_regex from synapse.types import JsonDict, UserID, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.task_scheduler import TaskScheduler from tests import unittest diff --git a/tests/handlers/test_directory.py b/tests/handlers/test_directory.py index 4d6243ef74..45b8f2353a 100644 --- a/tests/handlers/test_directory.py +++ b/tests/handlers/test_directory.py @@ -31,7 +31,7 @@ from synapse.events import EventBase from synapse.rest.client import directory, login, room from synapse.server import HomeServer from synapse.types import JsonDict, RoomAlias, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/handlers/test_e2e_keys.py b/tests/handlers/test_e2e_keys.py index fda485d413..4f0b1574b3 100644 --- a/tests/handlers/test_e2e_keys.py +++ b/tests/handlers/test_e2e_keys.py @@ -35,7 +35,7 @@ from synapse.handlers.device import DeviceWriterHandler from synapse.server import HomeServer from synapse.storage.databases.main.appservice import _make_exclusive_regex from synapse.types import JsonDict, UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.unittest import override_config diff --git a/tests/handlers/test_e2e_room_keys.py b/tests/handlers/test_e2e_room_keys.py index 9b280659ab..910c24c167 100644 --- a/tests/handlers/test_e2e_room_keys.py +++ b/tests/handlers/test_e2e_room_keys.py @@ -27,7 +27,7 @@ from twisted.internet.testing import MemoryReactor from synapse.api.errors import SynapseError from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py index 4de90e6578..a88ed6207c 100644 --- a/tests/handlers/test_federation.py +++ b/tests/handlers/test_federation.py @@ -43,7 +43,7 @@ from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.storage.databases.main.events_worker import EventCacheEntry -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.events import generate_fake_event_id from tests import unittest diff --git a/tests/handlers/test_federation_event.py b/tests/handlers/test_federation_event.py index 02dd60e76d..3b3f12796e 100644 --- a/tests/handlers/test_federation_event.py +++ b/tests/handlers/test_federation_event.py @@ -39,7 +39,7 @@ from synapse.server import HomeServer from synapse.state import StateResolutionStore from synapse.state.v2 import _mainline_sort, _reverse_topological_power_sort from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.test_utils import event_injection diff --git a/tests/handlers/test_message.py b/tests/handlers/test_message.py index 0a1092eae4..4262e805e7 100644 --- a/tests/handlers/test_message.py +++ b/tests/handlers/test_message.py @@ -31,7 +31,7 @@ from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.types import create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.stringutils import random_string from tests import unittest diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index b93e366b01..4640f35a1e 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -54,7 +54,7 @@ from synapse.rest import admin from synapse.rest.client import account, devices, keys, login, logout, register from synapse.server import HomeServer from synapse.types import JsonDict, UserID, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests.server import FakeChannel from tests.test_utils import get_awaitable_result diff --git a/tests/handlers/test_oidc.py b/tests/handlers/test_oidc.py index db37e7d185..5207382f00 100644 --- a/tests/handlers/test_oidc.py +++ b/tests/handlers/test_oidc.py @@ -31,7 +31,7 @@ from synapse.handlers.sso import MappingException from synapse.http.site import SynapseRequest from synapse.server import HomeServer from synapse.types import JsonDict, UserID -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.macaroons import get_value_from_macaroon from synapse.util.stringutils import random_string diff --git a/tests/handlers/test_password_providers.py b/tests/handlers/test_password_providers.py index 0a78fe0304..aa41875063 100644 --- a/tests/handlers/test_password_providers.py +++ b/tests/handlers/test_password_providers.py @@ -35,7 +35,7 @@ from synapse.module_api import ModuleApi from synapse.rest.client import account, devices, login, logout, register from synapse.server import HomeServer from synapse.types import JsonDict, UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.server import FakeChannel diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index 51b6c60531..de1bc90c67 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -57,7 +57,7 @@ from synapse.server import HomeServer from synapse.storage.database import LoggingDatabaseConnection from synapse.storage.keys import FetchKeyResult from synapse.types import JsonDict, UserID, get_domain_from_id -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.replication._base import BaseMultiWorkerStreamTestCase diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py index 93934e9ff7..73426c7b04 100644 --- a/tests/handlers/test_profile.py +++ b/tests/handlers/test_profile.py @@ -30,7 +30,7 @@ from synapse.api.errors import AuthError, SynapseError from synapse.rest import admin from synapse.server import HomeServer from synapse.types import JsonDict, UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/handlers/test_receipts.py b/tests/handlers/test_receipts.py index cf04ac6e00..4febccbfcf 100644 --- a/tests/handlers/test_receipts.py +++ b/tests/handlers/test_receipts.py @@ -27,7 +27,7 @@ from twisted.internet.testing import MemoryReactor from synapse.api.constants import EduTypes, ReceiptTypes from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 43ded2fc10..5e2eb8dee7 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -43,7 +43,7 @@ from synapse.types import ( UserID, create_requester, ) -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import override_config from tests.utils import mock_getRawHeaders diff --git a/tests/handlers/test_room_member.py b/tests/handlers/test_room_member.py index 3084f180f5..92c7c36602 100644 --- a/tests/handlers/test_room_member.py +++ b/tests/handlers/test_room_member.py @@ -15,7 +15,7 @@ from synapse.federation.federation_base import ( from synapse.federation.federation_client import SendJoinResult from synapse.server import HomeServer from synapse.types import UserID, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests.replication._base import BaseMultiWorkerStreamTestCase from tests.server import make_request diff --git a/tests/handlers/test_room_policy.py b/tests/handlers/test_room_policy.py index 3ea6f13cce..d1d0c484fa 100644 --- a/tests/handlers/test_room_policy.py +++ b/tests/handlers/test_room_policy.py @@ -23,7 +23,7 @@ from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.types import JsonDict, UserID from synapse.types.handlers.policy_server import RECOMMENDATION_OK, RECOMMENDATION_SPAM -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.test_utils import event_injection diff --git a/tests/handlers/test_room_summary.py b/tests/handlers/test_room_summary.py index 27646d7365..00592b9871 100644 --- a/tests/handlers/test_room_summary.py +++ b/tests/handlers/test_room_summary.py @@ -42,7 +42,7 @@ from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.types import JsonDict, UserID, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.unittest import override_config diff --git a/tests/handlers/test_saml.py b/tests/handlers/test_saml.py index 98a4276a3a..f7cbf91113 100644 --- a/tests/handlers/test_saml.py +++ b/tests/handlers/test_saml.py @@ -30,7 +30,7 @@ from synapse.api.errors import RedirectException from synapse.module_api import ModuleApi from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase, override_config diff --git a/tests/handlers/test_sliding_sync.py b/tests/handlers/test_sliding_sync.py index 8c390f0c57..1ffd15cadb 100644 --- a/tests/handlers/test_sliding_sync.py +++ b/tests/handlers/test_sliding_sync.py @@ -46,7 +46,7 @@ from synapse.storage.util.id_generators import MultiWriterIdGenerator from synapse.types import JsonDict, StateMap, StreamToken, UserID, create_requester from synapse.types.handlers.sliding_sync import PerConnectionState, SlidingSyncConfig from synapse.types.state import StateFilter -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.replication._base import BaseMultiWorkerStreamTestCase diff --git a/tests/handlers/test_sso.py b/tests/handlers/test_sso.py index 896e4fac9a..b09d0a42f5 100644 --- a/tests/handlers/test_sso.py +++ b/tests/handlers/test_sso.py @@ -27,7 +27,7 @@ from twisted.web.http_headers import Headers from synapse.api.errors import Codes, SynapseError from synapse.http.client import RawHeaders from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.test_utils import SMALL_PNG, FakeResponse diff --git a/tests/handlers/test_stats.py b/tests/handlers/test_stats.py index cd17cd86e0..abec5c2e39 100644 --- a/tests/handlers/test_stats.py +++ b/tests/handlers/test_stats.py @@ -26,7 +26,7 @@ from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.storage.databases.main import stats -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/handlers/test_sync.py b/tests/handlers/test_sync.py index 9d3e88c126..c61788fe90 100644 --- a/tests/handlers/test_sync.py +++ b/tests/handlers/test_sync.py @@ -50,7 +50,7 @@ from synapse.types import ( UserID, create_requester, ) -from synapse.util import Clock +from synapse.util.clock import Clock import tests.unittest import tests.utils diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index 614b12c62a..4d2807151e 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -36,7 +36,7 @@ from synapse.handlers.typing import FORGET_TIMEOUT, TypingWriterHandler from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent from synapse.server import HomeServer from synapse.types import JsonDict, Requester, StreamKeyType, UserID, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.server import ThreadedMemoryReactorClock diff --git a/tests/handlers/test_user_directory.py b/tests/handlers/test_user_directory.py index 7458fe0885..1ba0be51a2 100644 --- a/tests/handlers/test_user_directory.py +++ b/tests/handlers/test_user_directory.py @@ -32,7 +32,7 @@ from synapse.rest.client import login, register, room, user_directory from synapse.server import HomeServer from synapse.storage.roommember import ProfileInfo from synapse.types import JsonDict, UserID, UserProfile, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.storage.test_user_directory import GetUserDirectoryTables diff --git a/tests/handlers/test_worker_lock.py b/tests/handlers/test_worker_lock.py index 3d3904eac7..61ff51ff92 100644 --- a/tests/handlers/test_worker_lock.py +++ b/tests/handlers/test_worker_lock.py @@ -26,7 +26,7 @@ from twisted.internet import defer from twisted.internet.testing import MemoryReactor from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.replication._base import BaseMultiWorkerStreamTestCase diff --git a/tests/http/test_matrixfederationclient.py b/tests/http/test_matrixfederationclient.py index 224883b635..44752d69cb 100644 --- a/tests/http/test_matrixfederationclient.py +++ b/tests/http/test_matrixfederationclient.py @@ -48,7 +48,7 @@ from synapse.logging.context import ( current_context, ) from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.replication._base import BaseMultiWorkerStreamTestCase from tests.server import FakeTransport diff --git a/tests/http/test_simple_client.py b/tests/http/test_simple_client.py index c5ead59988..25c0e1081b 100644 --- a/tests/http/test_simple_client.py +++ b/tests/http/test_simple_client.py @@ -29,7 +29,7 @@ from twisted.internet.testing import MemoryReactor from synapse.http import RequestTimedOutError from synapse.http.client import SimpleHttpClient from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/http/test_site.py b/tests/http/test_site.py index 2eca4587e7..9e6d929c9e 100644 --- a/tests/http/test_site.py +++ b/tests/http/test_site.py @@ -24,7 +24,7 @@ from twisted.internet.testing import MemoryReactor, StringTransport from synapse.app.homeserver import SynapseHomeServer from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/logging/test_opentracing.py b/tests/logging/test_opentracing.py index 5fe57d100e..d102d50138 100644 --- a/tests/logging/test_opentracing.py +++ b/tests/logging/test_opentracing.py @@ -35,7 +35,7 @@ from synapse.logging.opentracing import ( tag_args, trace_with_opname, ) -from synapse.util import Clock +from synapse.util.clock import Clock try: import opentracing @@ -159,7 +159,13 @@ class TracingScopeTestCase(TestCase): def test_overlapping_spans(self) -> None: """Overlapping spans which are not neatly nested should work""" reactor = MemoryReactorClock() - clock = Clock(reactor) + # type-ignore: mypy-zope doesn't seem to recognise that `MemoryReactorClock` + # implements `ISynapseThreadlessReactor` (combination of the normal Twisted + # Reactor/Clock interfaces), via inheritance from + # `twisted.internet.testing.MemoryReactor` and `twisted.internet.testing.Clock` + clock = Clock( + reactor # type: ignore[arg-type] + ) scopes = [] @@ -223,7 +229,13 @@ class TracingScopeTestCase(TestCase): parent. """ reactor = MemoryReactorClock() - clock = Clock(reactor) + # type-ignore: mypy-zope doesn't seem to recognise that `MemoryReactorClock` + # implements `ISynapseThreadlessReactor` (combination of the normal Twisted + # Reactor/Clock interfaces), via inheritance from + # `twisted.internet.testing.MemoryReactor` and `twisted.internet.testing.Clock` + clock = Clock( + reactor # type: ignore[arg-type] + ) scope_map: Dict[str, opentracing.Scope] = {} diff --git a/tests/media/test_media_retention.py b/tests/media/test_media_retention.py index 6e01b9aecb..aec1adb040 100644 --- a/tests/media/test_media_retention.py +++ b/tests/media/test_media_retention.py @@ -30,7 +30,7 @@ from synapse.rest import admin from synapse.rest.client import login, register, room from synapse.server import HomeServer from synapse.types import UserID -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.stringutils import ( random_string, ) diff --git a/tests/media/test_media_storage.py b/tests/media/test_media_storage.py index bf334c0371..28c4ce676a 100644 --- a/tests/media/test_media_storage.py +++ b/tests/media/test_media_storage.py @@ -56,7 +56,7 @@ from synapse.rest import admin from synapse.rest.client import login, media from synapse.server import HomeServer from synapse.types import JsonDict, RoomAlias -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.server import FakeChannel diff --git a/tests/media/test_oembed.py b/tests/media/test_oembed.py index afae7e048c..dc13c03df3 100644 --- a/tests/media/test_oembed.py +++ b/tests/media/test_oembed.py @@ -29,7 +29,7 @@ from twisted.internet.testing import MemoryReactor from synapse.media.oembed import OEmbedProvider, OEmbedResult from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/media/test_url_previewer.py b/tests/media/test_url_previewer.py index bd7190e3e9..3d706c7e90 100644 --- a/tests/media/test_url_previewer.py +++ b/tests/media/test_url_previewer.py @@ -23,7 +23,7 @@ import os from twisted.internet.testing import MemoryReactor from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.unittest import override_config diff --git a/tests/metrics/test_phone_home_stats.py b/tests/metrics/test_phone_home_stats.py index cf18d8635d..4462385dae 100644 --- a/tests/metrics/test_phone_home_stats.py +++ b/tests/metrics/test_phone_home_stats.py @@ -23,7 +23,7 @@ from synapse.app.phone_stats_home import ( from synapse.rest import admin, login, register, room from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.server import ThreadedMemoryReactorClock diff --git a/tests/module_api/test_account_data_manager.py b/tests/module_api/test_account_data_manager.py index 6539871c11..0397c6a786 100644 --- a/tests/module_api/test_account_data_manager.py +++ b/tests/module_api/test_account_data_manager.py @@ -23,7 +23,7 @@ from twisted.internet.testing import MemoryReactor from synapse.api.errors import SynapseError from synapse.rest import admin from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/module_api/test_api.py b/tests/module_api/test_api.py index 6b761de36d..86f987f292 100644 --- a/tests/module_api/test_api.py +++ b/tests/module_api/test_api.py @@ -36,7 +36,7 @@ from synapse.rest import admin from synapse.rest.client import login, notifications, presence, profile, room from synapse.server import HomeServer from synapse.types import JsonDict, UserID, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests.events.test_presence_router import send_presence_update, sync_presence from tests.replication._base import BaseMultiWorkerStreamTestCase diff --git a/tests/module_api/test_event_unsigned_addition.py b/tests/module_api/test_event_unsigned_addition.py index 52e3858e6f..b6b43c469f 100644 --- a/tests/module_api/test_event_unsigned_addition.py +++ b/tests/module_api/test_event_unsigned_addition.py @@ -24,7 +24,7 @@ from synapse.events import EventBase from synapse.rest import admin, login, room from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/module_api/test_spamchecker.py b/tests/module_api/test_spamchecker.py index fa19232ee9..3f4d1d9d5f 100644 --- a/tests/module_api/test_spamchecker.py +++ b/tests/module_api/test_spamchecker.py @@ -20,7 +20,7 @@ from synapse.config.server import DEFAULT_ROOM_VERSION from synapse.rest import admin, login, room, room_upgrade_rest_servlet from synapse.server import HomeServer from synapse.types import Codes, JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests.server import FakeChannel from tests.unittest import HomeserverTestCase diff --git a/tests/push/test_bulk_push_rule_evaluator.py b/tests/push/test_bulk_push_rule_evaluator.py index 7342a72dff..560d7234ec 100644 --- a/tests/push/test_bulk_push_rule_evaluator.py +++ b/tests/push/test_bulk_push_rule_evaluator.py @@ -34,7 +34,7 @@ from synapse.rest import admin from synapse.rest.client import login, push_rule, register, room from synapse.server import HomeServer from synapse.types import JsonDict, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase, override_config diff --git a/tests/push/test_email.py b/tests/push/test_email.py index 4d9e42ac2c..80a22044dd 100644 --- a/tests/push/test_email.py +++ b/tests/push/test_email.py @@ -35,7 +35,7 @@ from synapse.push.emailpusher import EmailPusher from synapse.rest.client import login, room from synapse.rest.synapse.client.unsubscribe import UnsubscribeResource from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.server import FakeSite, make_request from tests.unittest import HomeserverTestCase diff --git a/tests/push/test_http.py b/tests/push/test_http.py index 370233c730..4c8aae5782 100644 --- a/tests/push/test_http.py +++ b/tests/push/test_http.py @@ -32,7 +32,7 @@ from synapse.rest.admin.experimental_features import ExperimentalFeature from synapse.rest.client import login, push_rule, pusher, receipts, room, versions from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase, override_config diff --git a/tests/push/test_push_rule_evaluator.py b/tests/push/test_push_rule_evaluator.py index 3a351acffa..718c9614e5 100644 --- a/tests/push/test_push_rule_evaluator.py +++ b/tests/push/test_push_rule_evaluator.py @@ -36,7 +36,7 @@ from synapse.server import HomeServer from synapse.storage.databases.main.appservice import _make_exclusive_regex from synapse.synapse_rust.push import PushRuleEvaluator from synapse.types import JsonDict, JsonMapping, UserID -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.frozenutils import freeze from tests import unittest diff --git a/tests/replication/_base.py b/tests/replication/_base.py index e756021937..36d3213908 100644 --- a/tests/replication/_base.py +++ b/tests/replication/_base.py @@ -38,7 +38,7 @@ from synapse.replication.tcp.protocol import ( ) from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.server import FakeTransport diff --git a/tests/replication/storage/_base.py b/tests/replication/storage/_base.py index 97e744127c..fb99cb2335 100644 --- a/tests/replication/storage/_base.py +++ b/tests/replication/storage/_base.py @@ -25,7 +25,7 @@ from unittest.mock import Mock from twisted.internet.testing import MemoryReactor from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.replication._base import BaseStreamTestCase diff --git a/tests/replication/storage/test_events.py b/tests/replication/storage/test_events.py index b3ca204995..fce3269005 100644 --- a/tests/replication/storage/test_events.py +++ b/tests/replication/storage/test_events.py @@ -38,7 +38,7 @@ from synapse.storage.databases.main.event_push_actions import ( from synapse.storage.databases.main.events_worker import EventsWorkerStore from synapse.storage.roommember import RoomsForUser from synapse.types import PersistedEventPosition -from synapse.util import Clock +from synapse.util.clock import Clock from ._base import BaseWorkerStoreTestCase diff --git a/tests/replication/tcp/streams/test_events.py b/tests/replication/tcp/streams/test_events.py index cd6fe53a96..782dad39f5 100644 --- a/tests/replication/tcp/streams/test_events.py +++ b/tests/replication/tcp/streams/test_events.py @@ -38,7 +38,7 @@ from synapse.replication.tcp.streams.events import ( from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.replication._base import BaseStreamTestCase from tests.test_utils.event_injection import inject_event, inject_member_event diff --git a/tests/replication/tcp/streams/test_thread_subscriptions.py b/tests/replication/tcp/streams/test_thread_subscriptions.py index 7283aa851e..04e46b9d93 100644 --- a/tests/replication/tcp/streams/test_thread_subscriptions.py +++ b/tests/replication/tcp/streams/test_thread_subscriptions.py @@ -20,7 +20,7 @@ from synapse.replication.tcp.streams._base import ( ) from synapse.server import HomeServer from synapse.storage.database import LoggingTransaction -from synapse.util import Clock +from synapse.util.clock import Clock from tests.replication._base import BaseStreamTestCase diff --git a/tests/replication/test_auth.py b/tests/replication/test_auth.py index 640ed4e8f3..30f636b3f1 100644 --- a/tests/replication/test_auth.py +++ b/tests/replication/test_auth.py @@ -24,7 +24,7 @@ from twisted.internet.testing import MemoryReactor from synapse.rest.client import register from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.replication._base import BaseMultiWorkerStreamTestCase from tests.server import FakeChannel, make_request diff --git a/tests/replication/test_federation_ack.py b/tests/replication/test_federation_ack.py index 440c1d45af..e6b9ea5383 100644 --- a/tests/replication/test_federation_ack.py +++ b/tests/replication/test_federation_ack.py @@ -28,7 +28,7 @@ from synapse.replication.tcp.commands import FederationAckCommand from synapse.replication.tcp.protocol import IReplicationConnection from synapse.replication.tcp.streams.federation import FederationStream from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/replication/test_federation_sender_shard.py b/tests/replication/test_federation_sender_shard.py index 1fed4ec631..92259f2542 100644 --- a/tests/replication/test_federation_sender_shard.py +++ b/tests/replication/test_federation_sender_shard.py @@ -41,7 +41,7 @@ from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.storage.keys import FetchKeyResult from synapse.types import JsonDict, UserID, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests.replication._base import BaseMultiWorkerStreamTestCase from tests.server import get_clock diff --git a/tests/replication/test_multi_media_repo.py b/tests/replication/test_multi_media_repo.py index 228a803c1d..f712ad1fe3 100644 --- a/tests/replication/test_multi_media_repo.py +++ b/tests/replication/test_multi_media_repo.py @@ -30,7 +30,7 @@ from twisted.web.server import Request from synapse.rest import admin from synapse.rest.client import login, media from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.http import ( TestServerTLSConnectionFactory, diff --git a/tests/replication/test_pusher_shard.py b/tests/replication/test_pusher_shard.py index d63054c631..033711b9b8 100644 --- a/tests/replication/test_pusher_shard.py +++ b/tests/replication/test_pusher_shard.py @@ -27,7 +27,7 @@ from twisted.internet.testing import MemoryReactor from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.replication._base import BaseMultiWorkerStreamTestCase diff --git a/tests/replication/test_sharded_event_persister.py b/tests/replication/test_sharded_event_persister.py index 797ad003ef..f37394c1f5 100644 --- a/tests/replication/test_sharded_event_persister.py +++ b/tests/replication/test_sharded_event_persister.py @@ -27,7 +27,7 @@ from synapse.rest import admin from synapse.rest.client import login, room, sync from synapse.server import HomeServer from synapse.storage.util.id_generators import MultiWriterIdGenerator -from synapse.util import Clock +from synapse.util.clock import Clock from tests.replication._base import BaseMultiWorkerStreamTestCase from tests.server import make_request diff --git a/tests/replication/test_sharded_receipts.py b/tests/replication/test_sharded_receipts.py index 6b3ecdad78..b986f33542 100644 --- a/tests/replication/test_sharded_receipts.py +++ b/tests/replication/test_sharded_receipts.py @@ -28,7 +28,7 @@ from synapse.rest.client import login, receipts, room, sync from synapse.server import HomeServer from synapse.storage.util.id_generators import MultiWriterIdGenerator from synapse.types import StreamToken -from synapse.util import Clock +from synapse.util.clock import Clock from tests.replication._base import BaseMultiWorkerStreamTestCase from tests.server import make_request diff --git a/tests/rest/admin/test_admin.py b/tests/rest/admin/test_admin.py index b74e8388e9..2a17389feb 100644 --- a/tests/rest/admin/test_admin.py +++ b/tests/rest/admin/test_admin.py @@ -33,7 +33,7 @@ from synapse.rest.admin import VersionServlet from synapse.rest.client import login, media, room from synapse.server import HomeServer from synapse.types import UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.test_utils import SMALL_PNG diff --git a/tests/rest/admin/test_background_updates.py b/tests/rest/admin/test_background_updates.py index dd116e79f1..25112baaa2 100644 --- a/tests/rest/admin/test_background_updates.py +++ b/tests/rest/admin/test_background_updates.py @@ -30,7 +30,7 @@ from synapse.rest.client import login from synapse.server import HomeServer from synapse.storage.background_updates import BackgroundUpdater from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/rest/admin/test_device.py b/tests/rest/admin/test_device.py index c564e0c9a7..4dff59e180 100644 --- a/tests/rest/admin/test_device.py +++ b/tests/rest/admin/test_device.py @@ -29,7 +29,7 @@ from synapse.api.errors import Codes from synapse.handlers.device import DeviceWriterHandler from synapse.rest.client import devices, login from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/rest/admin/test_event_reports.py b/tests/rest/admin/test_event_reports.py index a6f958658f..28be7fcd97 100644 --- a/tests/rest/admin/test_event_reports.py +++ b/tests/rest/admin/test_event_reports.py @@ -27,7 +27,7 @@ from synapse.api.errors import Codes from synapse.rest.client import login, reporting, room from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/rest/admin/test_federation.py b/tests/rest/admin/test_federation.py index cfea480bf0..d0b57d1faa 100644 --- a/tests/rest/admin/test_federation.py +++ b/tests/rest/admin/test_federation.py @@ -29,7 +29,7 @@ from synapse.api.errors import Codes from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/rest/admin/test_media.py b/tests/rest/admin/test_media.py index f863b5f8e7..3bf9f67e09 100644 --- a/tests/rest/admin/test_media.py +++ b/tests/rest/admin/test_media.py @@ -32,7 +32,7 @@ from synapse.api.errors import Codes from synapse.media.filepath import MediaFilePaths from synapse.rest.client import login, profile, room from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.test_utils import SMALL_CMYK_JPEG, SMALL_PNG diff --git a/tests/rest/admin/test_registration_tokens.py b/tests/rest/admin/test_registration_tokens.py index b8e111c804..9afe86b724 100644 --- a/tests/rest/admin/test_registration_tokens.py +++ b/tests/rest/admin/test_registration_tokens.py @@ -28,7 +28,7 @@ import synapse.rest.admin from synapse.api.errors import Codes from synapse.rest.client import login from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/rest/admin/test_room.py b/tests/rest/admin/test_room.py index ee5d0419ab..30b2de26e4 100644 --- a/tests/rest/admin/test_room.py +++ b/tests/rest/admin/test_room.py @@ -45,7 +45,7 @@ from synapse.storage.databases.main.purge_events import ( purge_room_tables_with_room_id_column, ) from synapse.types import UserID -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.task_scheduler import TaskScheduler from tests import unittest diff --git a/tests/rest/admin/test_scheduled_tasks.py b/tests/rest/admin/test_scheduled_tasks.py index ea7afc0101..16b80e214b 100644 --- a/tests/rest/admin/test_scheduled_tasks.py +++ b/tests/rest/admin/test_scheduled_tasks.py @@ -22,7 +22,7 @@ from synapse.api.errors import Codes from synapse.rest.client import login from synapse.server import HomeServer from synapse.types import JsonMapping, ScheduledTask, TaskStatus -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/rest/admin/test_server_notice.py b/tests/rest/admin/test_server_notice.py index 1f77e31d48..ebb6867d7c 100644 --- a/tests/rest/admin/test_server_notice.py +++ b/tests/rest/admin/test_server_notice.py @@ -28,7 +28,7 @@ from synapse.rest.client import login, room, sync from synapse.server import HomeServer from synapse.storage.roommember import RoomsForUser from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.stringutils import random_string from tests import unittest diff --git a/tests/rest/admin/test_statistics.py b/tests/rest/admin/test_statistics.py index 10efc4ef8b..4026c47a23 100644 --- a/tests/rest/admin/test_statistics.py +++ b/tests/rest/admin/test_statistics.py @@ -29,7 +29,7 @@ from synapse.api.errors import Codes from synapse.rest.client import login from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.test_utils import SMALL_PNG diff --git a/tests/rest/admin/test_user.py b/tests/rest/admin/test_user.py index 4432b6a7a0..ca41cd6c31 100644 --- a/tests/rest/admin/test_user.py +++ b/tests/rest/admin/test_user.py @@ -61,7 +61,7 @@ from synapse.rest.client import ( from synapse.server import HomeServer from synapse.storage.databases.main.client_ips import LAST_SEEN_GRANULARITY from synapse.types import JsonDict, UserID, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.replication._base import BaseMultiWorkerStreamTestCase diff --git a/tests/rest/admin/test_username_available.py b/tests/rest/admin/test_username_available.py index 9c3ab3e64c..b2c1d7ac0a 100644 --- a/tests/rest/admin/test_username_available.py +++ b/tests/rest/admin/test_username_available.py @@ -26,7 +26,7 @@ import synapse.rest.admin from synapse.api.errors import Codes, SynapseError from synapse.rest.client import login from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/rest/client/sliding_sync/test_connection_tracking.py b/tests/rest/client/sliding_sync/test_connection_tracking.py index f8ce1104a8..16d13fcc86 100644 --- a/tests/rest/client/sliding_sync/test_connection_tracking.py +++ b/tests/rest/client/sliding_sync/test_connection_tracking.py @@ -21,7 +21,7 @@ import synapse.rest.admin from synapse.api.constants import EventTypes from synapse.rest.client import login, room, sync from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase diff --git a/tests/rest/client/sliding_sync/test_extension_account_data.py b/tests/rest/client/sliding_sync/test_extension_account_data.py index 5949065722..8e08b4a2ac 100644 --- a/tests/rest/client/sliding_sync/test_extension_account_data.py +++ b/tests/rest/client/sliding_sync/test_extension_account_data.py @@ -24,7 +24,7 @@ from synapse.api.constants import AccountDataTypes from synapse.rest.client import login, room, sendtodevice, sync from synapse.server import HomeServer from synapse.types import StreamKeyType -from synapse.util import Clock +from synapse.util.clock import Clock from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase from tests.server import TimedOutException diff --git a/tests/rest/client/sliding_sync/test_extension_e2ee.py b/tests/rest/client/sliding_sync/test_extension_e2ee.py index baf6a5882e..4a5e407038 100644 --- a/tests/rest/client/sliding_sync/test_extension_e2ee.py +++ b/tests/rest/client/sliding_sync/test_extension_e2ee.py @@ -21,7 +21,7 @@ import synapse.rest.admin from synapse.rest.client import devices, login, room, sync from synapse.server import HomeServer from synapse.types import JsonDict, StreamKeyType -from synapse.util import Clock +from synapse.util.clock import Clock from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase from tests.server import TimedOutException diff --git a/tests/rest/client/sliding_sync/test_extension_receipts.py b/tests/rest/client/sliding_sync/test_extension_receipts.py index 1bba3038db..8c02217cde 100644 --- a/tests/rest/client/sliding_sync/test_extension_receipts.py +++ b/tests/rest/client/sliding_sync/test_extension_receipts.py @@ -22,7 +22,7 @@ from synapse.api.constants import EduTypes, ReceiptTypes from synapse.rest.client import login, receipts, room, sync from synapse.server import HomeServer from synapse.types import StreamKeyType -from synapse.util import Clock +from synapse.util.clock import Clock from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase from tests.server import TimedOutException diff --git a/tests/rest/client/sliding_sync/test_extension_thread_subscriptions.py b/tests/rest/client/sliding_sync/test_extension_thread_subscriptions.py index 775c4f96c9..4e151b9aae 100644 --- a/tests/rest/client/sliding_sync/test_extension_thread_subscriptions.py +++ b/tests/rest/client/sliding_sync/test_extension_thread_subscriptions.py @@ -21,7 +21,7 @@ import synapse.rest.admin from synapse.rest.client import login, room, sync, thread_subscriptions from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase diff --git a/tests/rest/client/sliding_sync/test_extension_to_device.py b/tests/rest/client/sliding_sync/test_extension_to_device.py index 151a5be665..a77b0a2e9f 100644 --- a/tests/rest/client/sliding_sync/test_extension_to_device.py +++ b/tests/rest/client/sliding_sync/test_extension_to_device.py @@ -22,7 +22,7 @@ import synapse.rest.admin from synapse.rest.client import login, sendtodevice, sync from synapse.server import HomeServer from synapse.types import JsonDict, StreamKeyType -from synapse.util import Clock +from synapse.util.clock import Clock from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase from tests.server import TimedOutException diff --git a/tests/rest/client/sliding_sync/test_extension_typing.py b/tests/rest/client/sliding_sync/test_extension_typing.py index 37c90d6ec2..68b935aaf9 100644 --- a/tests/rest/client/sliding_sync/test_extension_typing.py +++ b/tests/rest/client/sliding_sync/test_extension_typing.py @@ -22,7 +22,7 @@ from synapse.api.constants import EduTypes from synapse.rest.client import login, room, sync from synapse.server import HomeServer from synapse.types import StreamKeyType -from synapse.util import Clock +from synapse.util.clock import Clock from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase from tests.server import TimedOutException diff --git a/tests/rest/client/sliding_sync/test_extensions.py b/tests/rest/client/sliding_sync/test_extensions.py index 0643596e59..1c167b0414 100644 --- a/tests/rest/client/sliding_sync/test_extensions.py +++ b/tests/rest/client/sliding_sync/test_extensions.py @@ -23,7 +23,7 @@ import synapse.rest.admin from synapse.api.constants import ReceiptTypes from synapse.rest.client import login, receipts, room, sync from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase diff --git a/tests/rest/client/sliding_sync/test_lists_filters.py b/tests/rest/client/sliding_sync/test_lists_filters.py index 57d00a2a7a..3b7b2a16d8 100644 --- a/tests/rest/client/sliding_sync/test_lists_filters.py +++ b/tests/rest/client/sliding_sync/test_lists_filters.py @@ -28,7 +28,7 @@ from synapse.events import StrippedStateEvent from synapse.rest.client import login, room, sync, tags from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase diff --git a/tests/rest/client/sliding_sync/test_room_subscriptions.py b/tests/rest/client/sliding_sync/test_room_subscriptions.py index b78e4f2045..811478f1ba 100644 --- a/tests/rest/client/sliding_sync/test_room_subscriptions.py +++ b/tests/rest/client/sliding_sync/test_room_subscriptions.py @@ -22,7 +22,7 @@ import synapse.rest.admin from synapse.api.constants import EventTypes, HistoryVisibility from synapse.rest.client import login, room, sync from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase diff --git a/tests/rest/client/sliding_sync/test_rooms_invites.py b/tests/rest/client/sliding_sync/test_rooms_invites.py index a0f4ccd2cc..5a463303dd 100644 --- a/tests/rest/client/sliding_sync/test_rooms_invites.py +++ b/tests/rest/client/sliding_sync/test_rooms_invites.py @@ -22,7 +22,7 @@ from synapse.api.constants import EventTypes, HistoryVisibility from synapse.rest.client import login, room, sync from synapse.server import HomeServer from synapse.types import UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase diff --git a/tests/rest/client/sliding_sync/test_rooms_meta.py b/tests/rest/client/sliding_sync/test_rooms_meta.py index 4559bc7646..9e3f8aaf94 100644 --- a/tests/rest/client/sliding_sync/test_rooms_meta.py +++ b/tests/rest/client/sliding_sync/test_rooms_meta.py @@ -22,7 +22,7 @@ from synapse.api.constants import EventContentFields, EventTypes, Membership from synapse.api.room_versions import RoomVersions from synapse.rest.client import login, room, sync from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase from tests.test_utils.event_injection import create_event diff --git a/tests/rest/client/sliding_sync/test_rooms_required_state.py b/tests/rest/client/sliding_sync/test_rooms_required_state.py index cfff167c6e..210280bc48 100644 --- a/tests/rest/client/sliding_sync/test_rooms_required_state.py +++ b/tests/rest/client/sliding_sync/test_rooms_required_state.py @@ -23,7 +23,7 @@ from synapse.api.constants import EventContentFields, EventTypes, JoinRules, Mem from synapse.handlers.sliding_sync import StateValues from synapse.rest.client import knock, login, room, sync from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase from tests.test_utils.event_injection import mark_event_as_partial_state diff --git a/tests/rest/client/sliding_sync/test_rooms_timeline.py b/tests/rest/client/sliding_sync/test_rooms_timeline.py index 3d950eb20b..44a6068c11 100644 --- a/tests/rest/client/sliding_sync/test_rooms_timeline.py +++ b/tests/rest/client/sliding_sync/test_rooms_timeline.py @@ -23,7 +23,7 @@ from synapse.api.constants import EventTypes from synapse.rest.client import login, room, sync from synapse.server import HomeServer from synapse.types import StrSequence -from synapse.util import Clock +from synapse.util.clock import Clock from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase diff --git a/tests/rest/client/sliding_sync/test_sliding_sync.py b/tests/rest/client/sliding_sync/test_sliding_sync.py index ea4ee16359..8da5863b3a 100644 --- a/tests/rest/client/sliding_sync/test_sliding_sync.py +++ b/tests/rest/client/sliding_sync/test_sliding_sync.py @@ -42,7 +42,7 @@ from synapse.types import ( StreamKeyType, StreamToken, ) -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.stringutils import random_string from tests import unittest diff --git a/tests/rest/client/test_account.py b/tests/rest/client/test_account.py index 9a3202bd93..773f49dfc9 100644 --- a/tests/rest/client/test_account.py +++ b/tests/rest/client/test_account.py @@ -39,7 +39,7 @@ from synapse.rest.synapse.client.password_reset import PasswordResetSubmitTokenR from synapse.server import HomeServer from synapse.storage._base import db_to_json from synapse.types import JsonDict, UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.server import FakeSite, make_request diff --git a/tests/rest/client/test_auth.py b/tests/rest/client/test_auth.py index 4fe506845c..f5b7f95721 100644 --- a/tests/rest/client/test_auth.py +++ b/tests/rest/client/test_auth.py @@ -35,7 +35,7 @@ from synapse.rest.synapse.client import build_synapse_client_resource_tree from synapse.server import HomeServer from synapse.storage.database import LoggingTransaction from synapse.types import JsonDict, UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.handlers.test_oidc import HAS_OIDC diff --git a/tests/rest/client/test_capabilities.py b/tests/rest/client/test_capabilities.py index 8ae1cc935a..0eec313061 100644 --- a/tests/rest/client/test_capabilities.py +++ b/tests/rest/client/test_capabilities.py @@ -25,7 +25,7 @@ import synapse.rest.admin from synapse.api.room_versions import KNOWN_ROOM_VERSIONS from synapse.rest.client import capabilities, login from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.unittest import override_config diff --git a/tests/rest/client/test_consent.py b/tests/rest/client/test_consent.py index 1a64b3984f..d127f3abd2 100644 --- a/tests/rest/client/test_consent.py +++ b/tests/rest/client/test_consent.py @@ -28,7 +28,7 @@ from synapse.api.urls import ConsentURIBuilder from synapse.rest.client import login, room from synapse.rest.consent import consent_resource from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.server import FakeSite, make_request diff --git a/tests/rest/client/test_delayed_events.py b/tests/rest/client/test_delayed_events.py index 4b338d333f..221a4902f2 100644 --- a/tests/rest/client/test_delayed_events.py +++ b/tests/rest/client/test_delayed_events.py @@ -26,7 +26,7 @@ from synapse.rest import admin from synapse.rest.client import delayed_events, login, room, versions from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.unittest import HomeserverTestCase diff --git a/tests/rest/client/test_devices.py b/tests/rest/client/test_devices.py index 309e6ec686..de80b7c186 100644 --- a/tests/rest/client/test_devices.py +++ b/tests/rest/client/test_devices.py @@ -29,7 +29,7 @@ from synapse.rest import admin, devices, sync from synapse.rest.client import keys, login, register from synapse.server import HomeServer from synapse.types import JsonDict, UserID, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/rest/client/test_directory.py b/tests/rest/client/test_directory.py index 6548ac6fa8..f67f495dd2 100644 --- a/tests/rest/client/test_directory.py +++ b/tests/rest/client/test_directory.py @@ -26,7 +26,7 @@ from synapse.rest import admin from synapse.rest.client import directory, login, room from synapse.server import HomeServer from synapse.types import RoomAlias, UserID -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.stringutils import random_string from tests import unittest diff --git a/tests/rest/client/test_ephemeral_message.py b/tests/rest/client/test_ephemeral_message.py index 5b5c220825..6d806c630d 100644 --- a/tests/rest/client/test_ephemeral_message.py +++ b/tests/rest/client/test_ephemeral_message.py @@ -26,7 +26,7 @@ from synapse.rest import admin from synapse.rest.client import room from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/rest/client/test_events.py b/tests/rest/client/test_events.py index 142509bbf7..65371f4c7e 100644 --- a/tests/rest/client/test_events.py +++ b/tests/rest/client/test_events.py @@ -29,7 +29,7 @@ import synapse.rest.admin from synapse.api.constants import EduTypes from synapse.rest.client import events, login, room from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/rest/client/test_filter.py b/tests/rest/client/test_filter.py index 4153fb322d..0897c67ac3 100644 --- a/tests/rest/client/test_filter.py +++ b/tests/rest/client/test_filter.py @@ -25,7 +25,7 @@ from synapse.api.errors import Codes from synapse.rest.client import filter from synapse.server import HomeServer from synapse.types import UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/rest/client/test_identity.py b/tests/rest/client/test_identity.py index 87af18f473..cc29d0c01d 100644 --- a/tests/rest/client/test_identity.py +++ b/tests/rest/client/test_identity.py @@ -25,7 +25,7 @@ from twisted.internet.testing import MemoryReactor import synapse.rest.admin from synapse.rest.client import login, room from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/rest/client/test_login.py b/tests/rest/client/test_login.py index 2f70a7a87e..c54e409a6c 100644 --- a/tests/rest/client/test_login.py +++ b/tests/rest/client/test_login.py @@ -52,7 +52,7 @@ from synapse.rest.client.account import WhoamiRestServlet from synapse.rest.synapse.client import build_synapse_client_resource_tree from synapse.server import HomeServer from synapse.types import JsonDict, UserID, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.handlers.test_oidc import HAS_OIDC diff --git a/tests/rest/client/test_login_token_request.py b/tests/rest/client/test_login_token_request.py index 202d2cf351..835336f3d9 100644 --- a/tests/rest/client/test_login_token_request.py +++ b/tests/rest/client/test_login_token_request.py @@ -24,7 +24,7 @@ from twisted.internet.testing import MemoryReactor from synapse.rest import admin from synapse.rest.client import login, login_token_request, versions from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.unittest import override_config diff --git a/tests/rest/client/test_media.py b/tests/rest/client/test_media.py index ec6760feea..91bf94b672 100644 --- a/tests/rest/client/test_media.py +++ b/tests/rest/client/test_media.py @@ -59,7 +59,7 @@ from synapse.rest import admin from synapse.rest.client import login, media from synapse.server import HomeServer from synapse.types import JsonDict, UserID -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.stringutils import parse_and_validate_mxc_uri from tests import unittest diff --git a/tests/rest/client/test_mutual_rooms.py b/tests/rest/client/test_mutual_rooms.py index 2e37284680..8580d09006 100644 --- a/tests/rest/client/test_mutual_rooms.py +++ b/tests/rest/client/test_mutual_rooms.py @@ -25,7 +25,7 @@ from twisted.internet.testing import MemoryReactor import synapse.rest.admin from synapse.rest.client import login, mutual_rooms, room from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.server import FakeChannel diff --git a/tests/rest/client/test_notifications.py b/tests/rest/client/test_notifications.py index ec66567817..e00152389b 100644 --- a/tests/rest/client/test_notifications.py +++ b/tests/rest/client/test_notifications.py @@ -26,7 +26,7 @@ from twisted.internet.testing import MemoryReactor import synapse.rest.admin from synapse.rest.client import login, notifications, receipts, room from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/rest/client/test_owned_state.py b/tests/rest/client/test_owned_state.py index 386b95d616..f927d74c37 100644 --- a/tests/rest/client/test_owned_state.py +++ b/tests/rest/client/test_owned_state.py @@ -10,7 +10,7 @@ from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/rest/client/test_password_policy.py b/tests/rest/client/test_password_policy.py index 33bab684e3..5e98d8f1fd 100644 --- a/tests/rest/client/test_password_policy.py +++ b/tests/rest/client/test_password_policy.py @@ -28,7 +28,7 @@ from synapse.api.errors import Codes from synapse.rest import admin from synapse.rest.client import account, login, password_policy, register from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/rest/client/test_power_levels.py b/tests/rest/client/test_power_levels.py index 39ea9acef6..41610891fc 100644 --- a/tests/rest/client/test_power_levels.py +++ b/tests/rest/client/test_power_levels.py @@ -27,7 +27,7 @@ from synapse.events.utils import CANONICALJSON_MAX_INT, CANONICALJSON_MIN_INT from synapse.rest import admin from synapse.rest.client import login, room, sync from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/rest/client/test_presence.py b/tests/rest/client/test_presence.py index 7138cc92c2..5256c933e0 100644 --- a/tests/rest/client/test_presence.py +++ b/tests/rest/client/test_presence.py @@ -26,7 +26,7 @@ from synapse.handlers.presence import PresenceHandler from synapse.rest.client import presence from synapse.server import HomeServer from synapse.types import UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.unittest import override_config diff --git a/tests/rest/client/test_profile.py b/tests/rest/client/test_profile.py index 936e573bcd..18b3d3a089 100644 --- a/tests/rest/client/test_profile.py +++ b/tests/rest/client/test_profile.py @@ -36,7 +36,7 @@ from synapse.rest.client import login, profile, room from synapse.server import HomeServer from synapse.storage.databases.main.profile import MAX_PROFILE_SIZE from synapse.types import UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.utils import USE_POSTGRES_FOR_TESTS diff --git a/tests/rest/client/test_read_marker.py b/tests/rest/client/test_read_marker.py index a27eb9453b..c8bb0da5e6 100644 --- a/tests/rest/client/test_read_marker.py +++ b/tests/rest/client/test_read_marker.py @@ -25,7 +25,7 @@ from synapse.api.constants import EventTypes from synapse.rest import admin from synapse.rest.client import login, read_marker, register, room from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/rest/client/test_receipts.py b/tests/rest/client/test_receipts.py index ae4818c412..0c1b631b8e 100644 --- a/tests/rest/client/test_receipts.py +++ b/tests/rest/client/test_receipts.py @@ -28,7 +28,7 @@ from synapse.api.constants import EduTypes, EventTypes, HistoryVisibility, Recei from synapse.rest.client import login, receipts, room, sync from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/rest/client/test_redactions.py b/tests/rest/client/test_redactions.py index d435a9e393..e3ca108d03 100644 --- a/tests/rest/client/test_redactions.py +++ b/tests/rest/client/test_redactions.py @@ -32,7 +32,7 @@ from synapse.server import HomeServer from synapse.storage._base import db_to_json from synapse.storage.database import LoggingTransaction from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase, override_config diff --git a/tests/rest/client/test_register.py b/tests/rest/client/test_register.py index 0ffc64dd1f..c7c81aa81c 100644 --- a/tests/rest/client/test_register.py +++ b/tests/rest/client/test_register.py @@ -39,7 +39,7 @@ from synapse.rest.client import account, account_validity, login, logout, regist from synapse.server import HomeServer from synapse.storage._base import db_to_json from synapse.types import JsonDict, UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.server import ThreadedMemoryReactorClock diff --git a/tests/rest/client/test_relations.py b/tests/rest/client/test_relations.py index fd1e87296c..21fb86367a 100644 --- a/tests/rest/client/test_relations.py +++ b/tests/rest/client/test_relations.py @@ -30,7 +30,7 @@ from synapse.rest import admin from synapse.rest.client import login, register, relations, room, sync from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.server import FakeChannel diff --git a/tests/rest/client/test_rendezvous.py b/tests/rest/client/test_rendezvous.py index 01401f73da..160f852705 100644 --- a/tests/rest/client/test_rendezvous.py +++ b/tests/rest/client/test_rendezvous.py @@ -28,7 +28,7 @@ from twisted.web.resource import Resource from synapse.rest.client import rendezvous from synapse.rest.synapse.client.rendezvous import MSC4108RendezvousSessionResource from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.unittest import override_config diff --git a/tests/rest/client/test_reporting.py b/tests/rest/client/test_reporting.py index 5e5af34b42..0fd02f65a6 100644 --- a/tests/rest/client/test_reporting.py +++ b/tests/rest/client/test_reporting.py @@ -26,7 +26,7 @@ import synapse.rest.admin from synapse.rest.client import login, reporting, room from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.unittest import override_config diff --git a/tests/rest/client/test_retention.py b/tests/rest/client/test_retention.py index 24b007f779..7a816a66e0 100644 --- a/tests/rest/client/test_retention.py +++ b/tests/rest/client/test_retention.py @@ -27,7 +27,7 @@ from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.types import JsonDict, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.visibility import filter_events_for_client from tests import unittest diff --git a/tests/rest/client/test_rooms.py b/tests/rest/client/test_rooms.py index d3b5e26132..feae5f77cd 100644 --- a/tests/rest/client/test_rooms.py +++ b/tests/rest/client/test_rooms.py @@ -60,7 +60,7 @@ from synapse.rest.client import ( ) from synapse.server import HomeServer from synapse.types import JsonDict, RoomAlias, UserID, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.stringutils import random_string from tests import unittest diff --git a/tests/rest/client/test_shadow_banned.py b/tests/rest/client/test_shadow_banned.py index b990a8600b..bb240b943e 100644 --- a/tests/rest/client/test_shadow_banned.py +++ b/tests/rest/client/test_shadow_banned.py @@ -34,7 +34,7 @@ from synapse.rest.client import ( ) from synapse.server import HomeServer from synapse.types import UserID, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/rest/client/test_sync.py b/tests/rest/client/test_sync.py index 7f3cf5affb..e949bb69e6 100644 --- a/tests/rest/client/test_sync.py +++ b/tests/rest/client/test_sync.py @@ -36,7 +36,7 @@ from synapse.api.constants import ( from synapse.rest.client import devices, knock, login, read_marker, receipts, room, sync from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.federation.transport.test_knocking import ( diff --git a/tests/rest/client/test_third_party_rules.py b/tests/rest/client/test_third_party_rules.py index f14ca8237a..4161faa11f 100644 --- a/tests/rest/client/test_third_party_rules.py +++ b/tests/rest/client/test_third_party_rules.py @@ -36,7 +36,7 @@ from synapse.rest import admin from synapse.rest.client import account, login, profile, room from synapse.server import HomeServer from synapse.types import JsonDict, Requester, StateMap -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.frozenutils import unfreeze from tests import unittest diff --git a/tests/rest/client/test_thread_subscriptions.py b/tests/rest/client/test_thread_subscriptions.py index 3fbf3c5bfa..5aae07ef50 100644 --- a/tests/rest/client/test_thread_subscriptions.py +++ b/tests/rest/client/test_thread_subscriptions.py @@ -20,7 +20,7 @@ from synapse.rest import admin from synapse.rest.client import login, profile, room, thread_subscriptions from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/rest/client/test_transactions.py b/tests/rest/client/test_transactions.py index 5f42acb391..967f783680 100644 --- a/tests/rest/client/test_transactions.py +++ b/tests/rest/client/test_transactions.py @@ -28,7 +28,7 @@ from twisted.internet import defer, reactor as _reactor from synapse.logging.context import SENTINEL_CONTEXT, LoggingContext, current_context from synapse.rest.client.transactions import CLEANUP_PERIOD_MS, HttpTransactionCache from synapse.types import ISynapseReactor, JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.utils import MockClock diff --git a/tests/rest/client/test_typing.py b/tests/rest/client/test_typing.py index ce2504156c..19817d0321 100644 --- a/tests/rest/client/test_typing.py +++ b/tests/rest/client/test_typing.py @@ -27,7 +27,7 @@ from synapse.api.constants import EduTypes from synapse.rest.client import room from synapse.server import HomeServer from synapse.types import UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/rest/client/test_upgrade_room.py b/tests/rest/client/test_upgrade_room.py index 66fddc5475..da114e505d 100644 --- a/tests/rest/client/test_upgrade_room.py +++ b/tests/rest/client/test_upgrade_room.py @@ -28,7 +28,7 @@ from synapse.config.server import DEFAULT_ROOM_VERSION from synapse.rest import admin from synapse.rest.client import login, room, room_upgrade_rest_servlet from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.server import FakeChannel diff --git a/tests/rest/key/v2/test_remote_key_resource.py b/tests/rest/key/v2/test_remote_key_resource.py index 3717d70b6b..cf8241438c 100644 --- a/tests/rest/key/v2/test_remote_key_resource.py +++ b/tests/rest/key/v2/test_remote_key_resource.py @@ -36,7 +36,7 @@ from synapse.rest.key.v2 import KeyResource from synapse.server import HomeServer from synapse.storage.keys import FetchKeyResult from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.httpresourcetree import create_resource_tree from synapse.util.stringutils import random_string diff --git a/tests/rest/media/test_domain_blocking.py b/tests/rest/media/test_domain_blocking.py index 3feade4a4b..9eb0222102 100644 --- a/tests/rest/media/test_domain_blocking.py +++ b/tests/rest/media/test_domain_blocking.py @@ -25,7 +25,7 @@ from twisted.web.resource import Resource from synapse.media._base import FileInfo from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.test_utils import SMALL_PNG diff --git a/tests/rest/media/test_url_preview.py b/tests/rest/media/test_url_preview.py index e096780ce2..7c8d2fc998 100644 --- a/tests/rest/media/test_url_preview.py +++ b/tests/rest/media/test_url_preview.py @@ -36,7 +36,7 @@ from synapse.config.oembed import OEmbedEndpointConfig from synapse.media.url_previewer import IMAGE_CACHE_EXPIRY_MS from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.stringutils import parse_and_validate_mxc_uri from tests import unittest diff --git a/tests/rest/synapse/mas/test_devices.py b/tests/rest/synapse/mas/test_devices.py index 458878c13c..6b7596f1c6 100644 --- a/tests/rest/synapse/mas/test_devices.py +++ b/tests/rest/synapse/mas/test_devices.py @@ -15,7 +15,7 @@ from twisted.internet.testing import MemoryReactor from synapse.server import HomeServer from synapse.types import UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import skip_unless from tests.utils import HAS_AUTHLIB diff --git a/tests/rest/synapse/mas/test_users.py b/tests/rest/synapse/mas/test_users.py index b236aceaf2..4e8cf90700 100644 --- a/tests/rest/synapse/mas/test_users.py +++ b/tests/rest/synapse/mas/test_users.py @@ -18,7 +18,7 @@ from twisted.internet.testing import MemoryReactor from synapse.appservice import ApplicationService from synapse.server import HomeServer from synapse.types import JsonDict, UserID, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import skip_unless from tests.utils import HAS_AUTHLIB diff --git a/tests/server.py b/tests/server.py index 7432db1ac8..f66ce070c6 100644 --- a/tests/server.py +++ b/tests/server.py @@ -103,7 +103,7 @@ from synapse.storage.database import LoggingDatabaseConnection, make_pool from synapse.storage.engines import BaseDatabaseEngine, create_engine from synapse.storage.prepare_database import prepare_database from synapse.types import ISynapseReactor, JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests.utils import ( LEAVE_DB, diff --git a/tests/server_notices/__init__.py b/tests/server_notices/__init__.py index 1d23a126de..eca52930db 100644 --- a/tests/server_notices/__init__.py +++ b/tests/server_notices/__init__.py @@ -19,7 +19,7 @@ import synapse.rest.admin from synapse.rest.client import login, room, sync from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.unittest import override_config diff --git a/tests/server_notices/test_consent.py b/tests/server_notices/test_consent.py index db4a6370e8..45ed5b39d6 100644 --- a/tests/server_notices/test_consent.py +++ b/tests/server_notices/test_consent.py @@ -25,7 +25,7 @@ from twisted.internet.testing import MemoryReactor import synapse.rest.admin from synapse.rest.client import login, room, sync from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/server_notices/test_resource_limits_server_notices.py b/tests/server_notices/test_resource_limits_server_notices.py index 0da12f14cd..dd38528a7d 100644 --- a/tests/server_notices/test_resource_limits_server_notices.py +++ b/tests/server_notices/test_resource_limits_server_notices.py @@ -32,7 +32,7 @@ from synapse.server_notices.resource_limits_server_notices import ( ) from synapse.server_notices.server_notices_sender import ServerNoticesSender from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.unittest import override_config diff --git a/tests/state/test_v21.py b/tests/state/test_v21.py index 5e46b69fef..ff1715d4f7 100644 --- a/tests/state/test_v21.py +++ b/tests/state/test_v21.py @@ -38,7 +38,7 @@ from synapse.state.v2 import ( resolve_events_with_store, ) from synapse.types import StateMap -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.state.test_v2 import TestStateResolutionStore diff --git a/tests/storage/databases/main/test_deviceinbox.py b/tests/storage/databases/main/test_deviceinbox.py index d3ddeaa57e..dbf362a3cc 100644 --- a/tests/storage/databases/main/test_deviceinbox.py +++ b/tests/storage/databases/main/test_deviceinbox.py @@ -30,7 +30,7 @@ from synapse.server import HomeServer from synapse.storage.databases.main.deviceinbox import ( DEVICE_FEDERATION_INBOX_CLEANUP_DELAY_MS, ) -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/storage/databases/main/test_end_to_end_keys.py b/tests/storage/databases/main/test_end_to_end_keys.py index 3992fc3264..d0dd8f866b 100644 --- a/tests/storage/databases/main/test_end_to_end_keys.py +++ b/tests/storage/databases/main/test_end_to_end_keys.py @@ -26,7 +26,7 @@ from synapse.server import HomeServer from synapse.storage._base import db_to_json from synapse.storage.database import LoggingTransaction from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/storage/databases/main/test_events_worker.py b/tests/storage/databases/main/test_events_worker.py index f23609aee3..81a3447dbb 100644 --- a/tests/storage/databases/main/test_events_worker.py +++ b/tests/storage/databases/main/test_events_worker.py @@ -38,8 +38,8 @@ from synapse.storage.databases.main.events_worker import ( EventsWorkerStore, ) from synapse.storage.types import Connection -from synapse.util import Clock from synapse.util.async_helpers import yieldable_gather_results +from synapse.util.clock import Clock from tests import unittest from tests.test_utils.event_injection import create_event, inject_event diff --git a/tests/storage/databases/main/test_lock.py b/tests/storage/databases/main/test_lock.py index e18e0f2792..4296fcde13 100644 --- a/tests/storage/databases/main/test_lock.py +++ b/tests/storage/databases/main/test_lock.py @@ -27,7 +27,7 @@ from twisted.internet.testing import MemoryReactor from synapse.server import HomeServer from synapse.storage.databases.main.lock import _LOCK_TIMEOUT_MS, _RENEWAL_INTERVAL_MS -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/storage/databases/main/test_receipts.py b/tests/storage/databases/main/test_receipts.py index 4141f868d6..d084f5c2ba 100644 --- a/tests/storage/databases/main/test_receipts.py +++ b/tests/storage/databases/main/test_receipts.py @@ -27,7 +27,7 @@ from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.storage.database import LoggingTransaction -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/storage/databases/main/test_room.py b/tests/storage/databases/main/test_room.py index dda4294e63..4ed775ad76 100644 --- a/tests/storage/databases/main/test_room.py +++ b/tests/storage/databases/main/test_room.py @@ -28,7 +28,7 @@ from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.storage.databases.main.room import _BackgroundUpdates -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/storage/test__base.py b/tests/storage/test__base.py index 7d260b7915..5e773a5545 100644 --- a/tests/storage/test__base.py +++ b/tests/storage/test__base.py @@ -25,7 +25,7 @@ from typing import Generator, List, Tuple, cast from twisted.internet.testing import MemoryReactor from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/storage/test_account_data.py b/tests/storage/test_account_data.py index 794cefd04d..13c4be988e 100644 --- a/tests/storage/test_account_data.py +++ b/tests/storage/test_account_data.py @@ -26,7 +26,7 @@ from twisted.internet.testing import MemoryReactor from synapse.api.constants import AccountDataTypes from synapse.api.errors import Codes, SynapseError from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index 759fad6af1..b4df92c7a1 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -39,7 +39,7 @@ from synapse.storage.databases.main.appservice import ( ApplicationServiceTransactionStore, ) from synapse.types import DeviceListUpdates -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/storage/test_background_update.py b/tests/storage/test_background_update.py index 89a3b54a25..cf63b50c2f 100644 --- a/tests/storage/test_background_update.py +++ b/tests/storage/test_background_update.py @@ -37,7 +37,7 @@ from synapse.storage.background_updates import ( from synapse.storage.database import LoggingTransaction from synapse.storage.engines import PostgresEngine, Sqlite3Engine from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.unittest import override_config diff --git a/tests/storage/test_cleanup_extrems.py b/tests/storage/test_cleanup_extrems.py index 94fb8e01a1..054e4a4d0b 100644 --- a/tests/storage/test_cleanup_extrems.py +++ b/tests/storage/test_cleanup_extrems.py @@ -31,7 +31,7 @@ from synapse.server import HomeServer from synapse.storage import prepare_database from synapse.storage.types import Cursor from synapse.types import UserID, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index de95272b52..1cd97a9dd7 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -35,7 +35,7 @@ from synapse.storage.databases.main.client_ips import ( DeviceLastConnectionInfo, ) from synapse.types import UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.server import make_request diff --git a/tests/storage/test_database.py b/tests/storage/test_database.py index 5e5937ff17..fd6963bb82 100644 --- a/tests/storage/test_database.py +++ b/tests/storage/test_database.py @@ -33,7 +33,7 @@ from synapse.storage.database import ( LoggingTransaction, make_tuple_comparison_clause, ) -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/storage/test_devices.py b/tests/storage/test_devices.py index e8ea813668..bd6fcd8eeb 100644 --- a/tests/storage/test_devices.py +++ b/tests/storage/test_devices.py @@ -27,7 +27,7 @@ import synapse.api.errors from synapse.api.constants import EduTypes from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/storage/test_directory.py b/tests/storage/test_directory.py index 26bf6cf391..4fb81e38cd 100644 --- a/tests/storage/test_directory.py +++ b/tests/storage/test_directory.py @@ -23,7 +23,7 @@ from twisted.internet.testing import MemoryReactor from synapse.server import HomeServer from synapse.types import RoomAlias, RoomID -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/storage/test_e2e_room_keys.py b/tests/storage/test_e2e_room_keys.py index f390d11e41..2397d7b80e 100644 --- a/tests/storage/test_e2e_room_keys.py +++ b/tests/storage/test_e2e_room_keys.py @@ -23,7 +23,7 @@ from twisted.internet.testing import MemoryReactor from synapse.server import HomeServer from synapse.storage.databases.main.e2e_room_keys import RoomKey -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/storage/test_end_to_end_keys.py b/tests/storage/test_end_to_end_keys.py index e46999022a..24fdb0bf6d 100644 --- a/tests/storage/test_end_to_end_keys.py +++ b/tests/storage/test_end_to_end_keys.py @@ -22,7 +22,7 @@ from twisted.internet.testing import MemoryReactor from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/storage/test_event_chain.py b/tests/storage/test_event_chain.py index b2480a139d..fe9bb7bcca 100644 --- a/tests/storage/test_event_chain.py +++ b/tests/storage/test_event_chain.py @@ -37,7 +37,7 @@ from synapse.storage.database import LoggingTransaction from synapse.storage.databases.main.events import _LinkMap from synapse.storage.types import Cursor from synapse.types import create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/storage/test_event_federation.py b/tests/storage/test_event_federation.py index 2f79068f6b..ee9cf3687f 100644 --- a/tests/storage/test_event_federation.py +++ b/tests/storage/test_event_federation.py @@ -53,7 +53,8 @@ from synapse.storage.database import LoggingTransaction from synapse.storage.types import Cursor from synapse.synapse_rust.events import EventInternalMetadata from synapse.types import JsonDict -from synapse.util import Clock, json_encoder +from synapse.util.clock import Clock +from synapse.util.json import json_encoder import tests.unittest import tests.utils diff --git a/tests/storage/test_event_push_actions.py b/tests/storage/test_event_push_actions.py index 640490a6e5..30ba1ad94a 100644 --- a/tests/storage/test_event_push_actions.py +++ b/tests/storage/test_event_push_actions.py @@ -29,7 +29,7 @@ from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.storage.databases.main.event_push_actions import NotifCounts from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/storage/test_events.py b/tests/storage/test_events.py index 6d2e4e4bbe..93e9eab039 100644 --- a/tests/storage/test_events.py +++ b/tests/storage/test_events.py @@ -32,7 +32,7 @@ from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.types import StateMap -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/storage/test_events_bg_updates.py b/tests/storage/test_events_bg_updates.py index 7bbb5849a0..a1375aa4ac 100644 --- a/tests/storage/test_events_bg_updates.py +++ b/tests/storage/test_events_bg_updates.py @@ -20,7 +20,7 @@ from twisted.internet.testing import MemoryReactor from synapse.api.constants import MAX_DEPTH from synapse.api.room_versions import RoomVersion, RoomVersions from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/storage/test_id_generators.py b/tests/storage/test_id_generators.py index 9e949af482..4c1311a00e 100644 --- a/tests/storage/test_id_generators.py +++ b/tests/storage/test_id_generators.py @@ -35,7 +35,7 @@ from synapse.storage.util.sequence import ( PostgresSequenceGenerator, SequenceGenerator, ) -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase from tests.utils import USE_POSTGRES_FOR_TESTS diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index 78ef2e67a2..e684c6c161 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -24,7 +24,7 @@ from twisted.internet.testing import MemoryReactor from synapse.api.constants import UserTypes from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.unittest import default_config, override_config diff --git a/tests/storage/test_profile.py b/tests/storage/test_profile.py index 0f14e00e51..dbaf298697 100644 --- a/tests/storage/test_profile.py +++ b/tests/storage/test_profile.py @@ -25,7 +25,7 @@ from synapse.server import HomeServer from synapse.storage.database import LoggingTransaction from synapse.storage.engines import PostgresEngine from synapse.types import UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/storage/test_purge.py b/tests/storage/test_purge.py index 38d0cd6eb2..2894530d52 100644 --- a/tests/storage/test_purge.py +++ b/tests/storage/test_purge.py @@ -25,7 +25,7 @@ from synapse.rest.client import room from synapse.server import HomeServer from synapse.types.state import StateFilter from synapse.types.storage import _BackgroundUpdates -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/storage/test_receipts.py b/tests/storage/test_receipts.py index 8f7f736175..10ded391f4 100644 --- a/tests/storage/test_receipts.py +++ b/tests/storage/test_receipts.py @@ -26,7 +26,7 @@ from twisted.internet.testing import MemoryReactor from synapse.api.constants import ReceiptTypes from synapse.server import HomeServer from synapse.types import UserID, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests.test_utils.event_injection import create_event from tests.unittest import HomeserverTestCase diff --git a/tests/storage/test_redaction.py b/tests/storage/test_redaction.py index a9c0d7d9a9..7565376a59 100644 --- a/tests/storage/test_redaction.py +++ b/tests/storage/test_redaction.py @@ -31,7 +31,7 @@ from synapse.events.builder import EventBuilder from synapse.server import HomeServer from synapse.synapse_rust.events import EventInternalMetadata from synapse.types import JsonDict, RoomID, UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.utils import create_room diff --git a/tests/storage/test_registration.py b/tests/storage/test_registration.py index 992ccc779b..7ee81fabb6 100644 --- a/tests/storage/test_registration.py +++ b/tests/storage/test_registration.py @@ -24,7 +24,7 @@ from synapse.api.constants import UserTypes from synapse.api.errors import ThreepidValidationError from synapse.server import HomeServer from synapse.types import JsonDict, UserID, UserInfo -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase, override_config diff --git a/tests/storage/test_relations.py b/tests/storage/test_relations.py index 0f3e3fe7eb..96cee365f2 100644 --- a/tests/storage/test_relations.py +++ b/tests/storage/test_relations.py @@ -23,7 +23,7 @@ from twisted.internet.testing import MemoryReactor from synapse.api.constants import MAIN_TIMELINE from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/storage/test_rollback_worker.py b/tests/storage/test_rollback_worker.py index af69b93cf8..f61eb2e319 100644 --- a/tests/storage/test_rollback_worker.py +++ b/tests/storage/test_rollback_worker.py @@ -29,7 +29,7 @@ from synapse.storage.database import LoggingDatabaseConnection from synapse.storage.prepare_database import PrepareDatabaseException, prepare_database from synapse.storage.schema import SCHEMA_VERSION from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/storage/test_room.py b/tests/storage/test_room.py index a8a75d2973..f8c5260fa2 100644 --- a/tests/storage/test_room.py +++ b/tests/storage/test_room.py @@ -24,7 +24,7 @@ from twisted.internet.testing import MemoryReactor from synapse.api.room_versions import RoomVersions from synapse.server import HomeServer from synapse.types import RoomAlias, RoomID, UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/storage/test_room_search.py b/tests/storage/test_room_search.py index f7eaa83ec6..e530e59fa6 100644 --- a/tests/storage/test_room_search.py +++ b/tests/storage/test_room_search.py @@ -33,7 +33,7 @@ from synapse.storage.databases.main import DataStore from synapse.storage.databases.main.search import Phrase, SearchToken, _tokenize_query from synapse.storage.engines import PostgresEngine from synapse.storage.engines.sqlite import Sqlite3Engine -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase, skip_unless from tests.utils import USE_POSTGRES_FOR_TESTS diff --git a/tests/storage/test_roommember.py b/tests/storage/test_roommember.py index fd489022a8..b8933d957b 100644 --- a/tests/storage/test_roommember.py +++ b/tests/storage/test_roommember.py @@ -33,7 +33,7 @@ from synapse.server import HomeServer from synapse.storage.databases.main.roommember import extract_heroes_from_room_summary from synapse.storage.roommember import MemberSummary from synapse.types import UserID, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.server import TestHomeServer diff --git a/tests/storage/test_sliding_sync_tables.py b/tests/storage/test_sliding_sync_tables.py index 1a7a0b4c5c..f0df166bab 100644 --- a/tests/storage/test_sliding_sync_tables.py +++ b/tests/storage/test_sliding_sync_tables.py @@ -39,7 +39,7 @@ from synapse.storage.databases.main.events_bg_updates import ( ) from synapse.types import create_requester from synapse.types.storage import _BackgroundUpdates -from synapse.util import Clock +from synapse.util.clock import Clock from tests.test_utils.event_injection import create_event from tests.unittest import HomeserverTestCase diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py index cbf68b3032..bf6da71549 100644 --- a/tests/storage/test_state.py +++ b/tests/storage/test_state.py @@ -32,7 +32,7 @@ from synapse.events import EventBase from synapse.server import HomeServer from synapse.types import JsonDict, RoomID, StateMap, UserID from synapse.types.state import StateFilter -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/storage/test_state_deletion.py b/tests/storage/test_state_deletion.py index 58cd118567..d4079c372e 100644 --- a/tests/storage/test_state_deletion.py +++ b/tests/storage/test_state_deletion.py @@ -20,7 +20,7 @@ from twisted.internet.testing import MemoryReactor from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from tests.test_utils.event_injection import create_event from tests.unittest import HomeserverTestCase diff --git a/tests/storage/test_stream.py b/tests/storage/test_stream.py index ba2af1e044..0777c254c0 100644 --- a/tests/storage/test_stream.py +++ b/tests/storage/test_stream.py @@ -49,7 +49,7 @@ from synapse.types import ( UserID, create_requester, ) -from synapse.util import Clock +from synapse.util.clock import Clock from tests.test_utils.event_injection import create_event from tests.unittest import FederatingHomeserverTestCase, HomeserverTestCase diff --git a/tests/storage/test_thread_subscriptions.py b/tests/storage/test_thread_subscriptions.py index 2ce369247f..3f78308e45 100644 --- a/tests/storage/test_thread_subscriptions.py +++ b/tests/storage/test_thread_subscriptions.py @@ -24,7 +24,7 @@ from synapse.storage.databases.main.thread_subscriptions import ( ) from synapse.storage.engines.sqlite import Sqlite3Engine from synapse.types import EventOrderings -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/storage/test_transactions.py b/tests/storage/test_transactions.py index 7b2ac9fce1..a58c33aff6 100644 --- a/tests/storage/test_transactions.py +++ b/tests/storage/test_transactions.py @@ -22,7 +22,7 @@ from twisted.internet.testing import MemoryReactor from synapse.server import HomeServer from synapse.storage.databases.main.transactions import DestinationRetryTimings -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/storage/test_txn_limit.py b/tests/storage/test_txn_limit.py index 4722da5005..6bf4ce48f1 100644 --- a/tests/storage/test_txn_limit.py +++ b/tests/storage/test_txn_limit.py @@ -23,7 +23,7 @@ from twisted.internet.testing import MemoryReactor from synapse.server import HomeServer from synapse.storage.types import Cursor -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/storage/test_user_directory.py b/tests/storage/test_user_directory.py index 255de298f3..26e045135e 100644 --- a/tests/storage/test_user_directory.py +++ b/tests/storage/test_user_directory.py @@ -37,7 +37,7 @@ from synapse.storage.databases.main.user_directory import ( ) from synapse.storage.roommember import ProfileInfo from synapse.types import UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests.server import ThreadedMemoryReactorClock from tests.test_utils.event_injection import inject_member_event diff --git a/tests/storage/test_user_filters.py b/tests/storage/test_user_filters.py index 8d928aa55c..954a420cc0 100644 --- a/tests/storage/test_user_filters.py +++ b/tests/storage/test_user_filters.py @@ -25,7 +25,7 @@ from twisted.internet.testing import MemoryReactor from synapse.server import HomeServer from synapse.storage.database import LoggingTransaction from synapse.storage.engines import PostgresEngine -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest diff --git a/tests/test_mau.py b/tests/test_mau.py index 1000870aa9..fa98242bf7 100644 --- a/tests/test_mau.py +++ b/tests/test_mau.py @@ -30,7 +30,7 @@ from synapse.appservice import ApplicationService from synapse.rest.client import register, sync from synapse.server import HomeServer from synapse.types import JsonDict, UserID -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest from tests.unittest import override_config diff --git a/tests/test_phone_home.py b/tests/test_phone_home.py index 0b230ed0f5..ab21a5dde4 100644 --- a/tests/test_phone_home.py +++ b/tests/test_phone_home.py @@ -29,7 +29,7 @@ from synapse.rest import admin from synapse.rest.client import login, sync from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests.unittest import HomeserverTestCase diff --git a/tests/test_server.py b/tests/test_server.py index 0aa3584518..69efceafe8 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -36,8 +36,8 @@ from synapse.http.server import ( from synapse.http.site import SynapseRequest, SynapseSite from synapse.logging.context import make_deferred_yieldable from synapse.types import JsonDict -from synapse.util import Clock from synapse.util.cancellation import cancellable +from synapse.util.clock import Clock from tests import unittest from tests.http.server._base import test_disconnect diff --git a/tests/test_state.py b/tests/test_state.py index adb72b0730..16446c16bc 100644 --- a/tests/test_state.py +++ b/tests/test_state.py @@ -43,7 +43,7 @@ from synapse.events.snapshot import EventContext from synapse.state import StateHandler, StateResolutionHandler, _make_state_cache_entry from synapse.types import MutableStateMap, StateMap from synapse.types.state import StateFilter -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.macaroons import MacaroonGenerator from tests import unittest diff --git a/tests/test_terms_auth.py b/tests/test_terms_auth.py index 8065ae4b8a..bd5e6520ac 100644 --- a/tests/test_terms_auth.py +++ b/tests/test_terms_auth.py @@ -20,13 +20,12 @@ from unittest.mock import Mock -from twisted.internet.interfaces import IReactorTime -from twisted.internet.testing import MemoryReactor, MemoryReactorClock +from twisted.internet.testing import MemoryReactor from synapse.rest.client.register import register_servlets from synapse.server import HomeServer from synapse.types import JsonDict -from synapse.util import Clock +from synapse.util.clock import Clock from tests import unittest @@ -49,13 +48,7 @@ class TermsTestCase(unittest.HomeserverTestCase): ) return config - def prepare( - self, reactor: MemoryReactor, clock: Clock, homeserver: HomeServer - ) -> None: - # type-ignore: mypy-zope doesn't seem to recognise that MemoryReactorClock - # implements IReactorTime, via inheritance from twisted.internet.testing.Clock - self.clock: IReactorTime = MemoryReactorClock() # type: ignore[assignment] - self.hs_clock = Clock(self.clock) + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.url = "/_matrix/client/r0/register" self.registration_handler = Mock() self.auth_handler = Mock() diff --git a/tests/test_utils/oidc.py b/tests/test_utils/oidc.py index 5bf5e5cb0c..f2de8bded5 100644 --- a/tests/test_utils/oidc.py +++ b/tests/test_utils/oidc.py @@ -33,7 +33,7 @@ from twisted.web.http_headers import Headers from twisted.web.iweb import IResponse from synapse.server import HomeServer -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.stringutils import random_string from tests.test_utils import FakeResponse diff --git a/tests/test_visibility.py b/tests/test_visibility.py index 285e28e0f9..9a8cad6454 100644 --- a/tests/test_visibility.py +++ b/tests/test_visibility.py @@ -31,7 +31,7 @@ from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer from synapse.types import create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.visibility import filter_events_for_client, filter_events_for_server from tests import unittest diff --git a/tests/unittest.py b/tests/unittest.py index c9f8c48665..7d46f84496 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -81,7 +81,7 @@ from synapse.rest import RegisterServletsFunc from synapse.server import HomeServer from synapse.storage.keys import FetchKeyResult from synapse.types import JsonDict, Requester, UserID, create_requester -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.httpresourcetree import create_resource_tree from tests.server import ( diff --git a/tests/util/test_expiring_cache.py b/tests/util/test_expiring_cache.py index 75bf50e644..bfcc6cd12f 100644 --- a/tests/util/test_expiring_cache.py +++ b/tests/util/test_expiring_cache.py @@ -21,8 +21,8 @@ from typing import List, cast -from synapse.util import Clock from synapse.util.caches.expiringcache import ExpiringCache +from synapse.util.clock import Clock from tests.utils import MockClock diff --git a/tests/util/test_logcontext.py b/tests/util/test_logcontext.py index af36e685d7..43912d05da 100644 --- a/tests/util/test_logcontext.py +++ b/tests/util/test_logcontext.py @@ -34,7 +34,7 @@ from synapse.logging.context import ( run_in_background, ) from synapse.types import ISynapseReactor -from synapse.util import Clock +from synapse.util.clock import Clock from .. import unittest diff --git a/tests/util/test_task_scheduler.py b/tests/util/test_task_scheduler.py index 2171f91b4d..e97f0ed611 100644 --- a/tests/util/test_task_scheduler.py +++ b/tests/util/test_task_scheduler.py @@ -25,7 +25,7 @@ from twisted.internet.testing import MemoryReactor from synapse.server import HomeServer from synapse.types import JsonMapping, ScheduledTask, TaskStatus -from synapse.util import Clock +from synapse.util.clock import Clock from synapse.util.task_scheduler import TaskScheduler from tests.replication._base import BaseMultiWorkerStreamTestCase From 9a88d25f8edbd11fd2c6cb509f1b6297dc1949c2 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 22 Sep 2025 10:55:45 -0500 Subject: [PATCH 39/54] Fix `run_in_background` not be awaited properly causing `LoggingContext` problems (#18937) Basically, searching for any instance of `run_in_background(...)` and making sure we wrap the deferred in `make_deferred_yieldable(...)` if we try to `await` the result to make it follow the [Synapse logcontext rules](https://github.com/element-hq/synapse/blob/develop/docs/log_contexts.md). Turns out, we only have this problem in some tests (phew) Part of https://github.com/element-hq/synapse/issues/18905 --- changelog.d/18937.misc | 1 + tests/handlers/test_federation.py | 36 ++++++++++++------------------- 2 files changed, 15 insertions(+), 22 deletions(-) create mode 100644 changelog.d/18937.misc diff --git a/changelog.d/18937.misc b/changelog.d/18937.misc new file mode 100644 index 0000000000..ab1e9c4d47 --- /dev/null +++ b/changelog.d/18937.misc @@ -0,0 +1 @@ +Fix `run_in_background` not be awaited properly in some tests causing `LoggingContext` problems. diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py index a88ed6207c..77ea03e67d 100644 --- a/tests/handlers/test_federation.py +++ b/tests/handlers/test_federation.py @@ -38,7 +38,6 @@ from synapse.api.room_versions import RoomVersions from synapse.events import EventBase, make_event_from_dict from synapse.federation.federation_base import event_from_pdu_json from synapse.federation.federation_client import SendJoinResult -from synapse.logging.context import LoggingContext, run_in_background from synapse.rest import admin from synapse.rest.client import login, room from synapse.server import HomeServer @@ -149,11 +148,9 @@ class FederationTestCase(unittest.FederatingHomeserverTestCase): room_version, ) - with LoggingContext("send_rejected"): - d = run_in_background( - self.hs.get_federation_event_handler().on_receive_pdu, OTHER_SERVER, ev - ) - self.get_success(d) + self.get_success( + self.hs.get_federation_event_handler().on_receive_pdu(OTHER_SERVER, ev) + ) # that should have been rejected e = self.get_success(self.store.get_event(ev.event_id, allow_rejected=True)) @@ -203,11 +200,9 @@ class FederationTestCase(unittest.FederatingHomeserverTestCase): room_version, ) - with LoggingContext("send_rejected"): - d = run_in_background( - self.hs.get_federation_event_handler().on_receive_pdu, OTHER_SERVER, ev - ) - self.get_success(d) + self.get_success( + self.hs.get_federation_event_handler().on_receive_pdu(OTHER_SERVER, ev) + ) # that should have been rejected e = self.get_success(self.store.get_event(ev.event_id, allow_rejected=True)) @@ -323,15 +318,14 @@ class FederationTestCase(unittest.FederatingHomeserverTestCase): current_depth = 1 limit = 100 - with LoggingContext("receive_pdu"): - # Make sure backfill still works - d = run_in_background( - self.hs.get_federation_handler().maybe_backfill, + # Make sure backfill still works + self.get_success( + self.hs.get_federation_handler().maybe_backfill( room_id, current_depth, limit, ) - self.get_success(d) + ) def test_backfill_ignores_known_events(self) -> None: """ @@ -491,13 +485,11 @@ class FederationTestCase(unittest.FederatingHomeserverTestCase): # the auth code requires that a signature exists, but doesn't check that # signature... go figure. join_event.signatures[other_server] = {"x": "y"} - with LoggingContext("send_join"): - d = run_in_background( - self.hs.get_federation_event_handler().on_send_membership_event, - other_server, - join_event, + self.get_success( + self.hs.get_federation_event_handler().on_send_membership_event( + other_server, join_event ) - self.get_success(d) + ) # sanity-check: the room should show that the new user is a member r = self.get_success(self.store.get_partial_current_state_ids(room_id)) From 8d5d87fb0a548c8d99490b11f01bc2ec136f02bf Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 22 Sep 2025 11:02:08 -0500 Subject: [PATCH 40/54] Fix `run_as_background_process` not be awaited properly causing `LoggingContext` problems (#18938) Basically, searching for any instance of `run_as_background_process(...)` and making sure we wrap the deferred in `make_deferred_yieldable(...)` if we try to `await` the result to make it follow the [Synapse logcontext rules](https://github.com/element-hq/synapse/blob/develop/docs/log_contexts.md). Part of https://github.com/element-hq/synapse/issues/18905 --- changelog.d/18938.misc | 1 + synapse/handlers/delayed_events.py | 11 +++++++---- 2 files changed, 8 insertions(+), 4 deletions(-) create mode 100644 changelog.d/18938.misc diff --git a/changelog.d/18938.misc b/changelog.d/18938.misc new file mode 100644 index 0000000000..ab31cb7299 --- /dev/null +++ b/changelog.d/18938.misc @@ -0,0 +1 @@ +Fix `run_as_background_process` not be awaited properly causing `LoggingContext` problems in experimental [MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140): Delayed events implementation. diff --git a/synapse/handlers/delayed_events.py b/synapse/handlers/delayed_events.py index a6749801a5..fa24014438 100644 --- a/synapse/handlers/delayed_events.py +++ b/synapse/handlers/delayed_events.py @@ -21,9 +21,12 @@ from synapse.api.constants import EventTypes from synapse.api.errors import ShadowBanError from synapse.api.ratelimiting import Ratelimiter from synapse.config.workers import MAIN_PROCESS_INSTANCE_NAME +from synapse.logging.context import make_deferred_yieldable from synapse.logging.opentracing import set_tag from synapse.metrics import SERVER_NAME_LABEL, event_processing_positions -from synapse.metrics.background_process_metrics import run_as_background_process +from synapse.metrics.background_process_metrics import ( + run_as_background_process, +) from synapse.replication.http.delayed_events import ( ReplicationAddedDelayedEventRestServlet, ) @@ -328,7 +331,7 @@ class DelayedEventsHandler: requester, (requester.user.to_string(), requester.device_id), ) - await self._initialized_from_db + await make_deferred_yieldable(self._initialized_from_db) next_send_ts = await self._store.cancel_delayed_event( delay_id=delay_id, @@ -354,7 +357,7 @@ class DelayedEventsHandler: requester, (requester.user.to_string(), requester.device_id), ) - await self._initialized_from_db + await make_deferred_yieldable(self._initialized_from_db) next_send_ts = await self._store.restart_delayed_event( delay_id=delay_id, @@ -380,7 +383,7 @@ class DelayedEventsHandler: # Use standard request limiter for sending delayed events on-demand, # as an on-demand send is similar to sending a regular event. await self._request_ratelimiter.ratelimit(requester) - await self._initialized_from_db + await make_deferred_yieldable(self._initialized_from_db) event, next_send_ts = await self._store.process_target_delayed_event( delay_id=delay_id, From d05f44a1c6e87396ee738ea81d8cb6a4f51a9609 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 22 Sep 2025 11:47:22 -0500 Subject: [PATCH 41/54] Introduce `Clock.add_system_event_trigger(...)` to include logcontext by default (#18945) Introduce `Clock.add_system_event_trigger(...)` to wrap system event callback code in a logcontext, ensuring we can identify which server generated the logs. Background: > Ideally, nothing from the Synapse homeserver would be logged against the `sentinel` > logcontext as we want to know which server the logs came from. In practice, this is not > always the case yet especially outside of request handling. > > Global things outside of Synapse (e.g. Twisted reactor code) should run in the > `sentinel` logcontext. It's only when it calls into application code that a logcontext > gets activated. This means the reactor should be started in the `sentinel` logcontext, > and any time an awaitable yields control back to the reactor, it should reset the > logcontext to be the `sentinel` logcontext. This is important to avoid leaking the > current logcontext to the reactor (which would then get picked up and associated with > the next thing the reactor does). > > *-- `docs/log_contexts.md` Also adds a lint to prefer `Clock.add_system_event_trigger(...)` over `reactor.addSystemEventTrigger(...)` Part of https://github.com/element-hq/synapse/issues/18905 --- changelog.d/18945.misc | 1 + scripts-dev/mypy_synapse_plugin.py | 40 ++++++++++++++ synapse/app/_base.py | 8 +-- synapse/handlers/presence.py | 4 +- synapse/server.py | 2 +- synapse/storage/databases/main/client_ips.py | 2 +- synapse/storage/databases/main/lock.py | 2 +- synapse/util/clock.py | 56 ++++++++++++++++++++ 8 files changed, 107 insertions(+), 8 deletions(-) create mode 100644 changelog.d/18945.misc diff --git a/changelog.d/18945.misc b/changelog.d/18945.misc new file mode 100644 index 0000000000..e49077c8f2 --- /dev/null +++ b/changelog.d/18945.misc @@ -0,0 +1 @@ +Introduce `Clock.add_system_event_trigger(...)` to wrap system event callback code in a logcontext, ensuring we can identify which server generated the logs. diff --git a/scripts-dev/mypy_synapse_plugin.py b/scripts-dev/mypy_synapse_plugin.py index 439a75fc7e..e170aabdae 100644 --- a/scripts-dev/mypy_synapse_plugin.py +++ b/scripts-dev/mypy_synapse_plugin.py @@ -74,6 +74,12 @@ PREFER_SYNAPSE_CLOCK_CALL_WHEN_RUNNING = ErrorCode( category="synapse-reactor-clock", ) +PREFER_SYNAPSE_CLOCK_ADD_SYSTEM_EVENT_TRIGGER = ErrorCode( + "prefer-synapse-clock-add-system-event-trigger", + "`synapse.util.Clock.add_system_event_trigger` should be used instead of `reactor.addSystemEventTrigger`", + category="synapse-reactor-clock", +) + class Sentinel(enum.Enum): # defining a sentinel in this way allows mypy to correctly handle the @@ -242,6 +248,13 @@ class SynapsePlugin(Plugin): ): return check_call_when_running + if fullname in ( + "twisted.internet.interfaces.IReactorCore.addSystemEventTrigger", + "synapse.types.ISynapseThreadlessReactor.addSystemEventTrigger", + "synapse.types.ISynapseReactor.addSystemEventTrigger", + ): + return check_add_system_event_trigger + return None @@ -272,6 +285,33 @@ def check_call_when_running(ctx: MethodSigContext) -> CallableType: return signature +def check_add_system_event_trigger(ctx: MethodSigContext) -> CallableType: + """ + Ensure that the `reactor.addSystemEventTrigger` callsites aren't used. + + `synapse.util.Clock.add_system_event_trigger` should always be used instead of + `reactor.addSystemEventTrigger`. + + Since `reactor.addSystemEventTrigger` is a reactor callback, the callback will start out + with the sentinel logcontext. `synapse.util.Clock` starts a default logcontext as we + want to know which server the logs came from. + + Args: + ctx: The `FunctionSigContext` from mypy. + """ + signature: CallableType = ctx.default_signature + ctx.api.fail( + ( + "Expected all `reactor.addSystemEventTrigger` calls to use `synapse.util.Clock.add_system_event_trigger` instead. " + "This is so all Synapse code runs with a logcontext as we want to know which server the logs came from." + ), + ctx.context, + code=PREFER_SYNAPSE_CLOCK_ADD_SYSTEM_EVENT_TRIGGER, + ) + + return signature + + def analyze_prometheus_metric_classes(ctx: ClassDefContext) -> None: """ Cross-check the list of Prometheus metric classes against the diff --git a/synapse/app/_base.py b/synapse/app/_base.py index 02c56496bf..1cf76d2a0b 100644 --- a/synapse/app/_base.py +++ b/synapse/app/_base.py @@ -518,7 +518,9 @@ async def start(hs: "HomeServer") -> None: # numbers of DNS requests don't starve out other users of the threadpool. resolver_threadpool = ThreadPool(name="gai_resolver") resolver_threadpool.start() - reactor.addSystemEventTrigger("during", "shutdown", resolver_threadpool.stop) + hs.get_clock().add_system_event_trigger( + "during", "shutdown", resolver_threadpool.stop + ) reactor.installNameResolver( GAIResolver(reactor, getThreadPool=lambda: resolver_threadpool) ) @@ -605,7 +607,7 @@ async def start(hs: "HomeServer") -> None: logger.info("Shutting down...") # Log when we start the shut down process. - hs.get_reactor().addSystemEventTrigger("before", "shutdown", log_shutdown) + hs.get_clock().add_system_event_trigger("before", "shutdown", log_shutdown) setup_sentry(hs) setup_sdnotify(hs) @@ -720,7 +722,7 @@ def setup_sdnotify(hs: "HomeServer") -> None: # we're not using systemd. sdnotify(b"READY=1\nMAINPID=%i" % (os.getpid(),)) - hs.get_reactor().addSystemEventTrigger( + hs.get_clock().add_system_event_trigger( "before", "shutdown", sdnotify, b"STOPPING=1" ) diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index d7de20f884..c787c847bd 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -541,7 +541,7 @@ class WorkerPresenceHandler(BasePresenceHandler): self.send_stop_syncing, UPDATE_SYNCING_USERS_MS ) - hs.get_reactor().addSystemEventTrigger( + hs.get_clock().add_system_event_trigger( "before", "shutdown", run_as_background_process, @@ -842,7 +842,7 @@ class PresenceHandler(BasePresenceHandler): # have not yet been persisted self.unpersisted_users_changes: Set[str] = set() - hs.get_reactor().addSystemEventTrigger( + hs.get_clock().add_system_event_trigger( "before", "shutdown", run_as_background_process, diff --git a/synapse/server.py b/synapse/server.py index 00862eb137..118dee7051 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -1007,7 +1007,7 @@ class HomeServer(metaclass=abc.ABCMeta): ) media_threadpool.start() - self.get_reactor().addSystemEventTrigger( + self.get_clock().add_system_event_trigger( "during", "shutdown", media_threadpool.stop ) diff --git a/synapse/storage/databases/main/client_ips.py b/synapse/storage/databases/main/client_ips.py index cf7bc4ac69..c7a330cc83 100644 --- a/synapse/storage/databases/main/client_ips.py +++ b/synapse/storage/databases/main/client_ips.py @@ -455,7 +455,7 @@ class ClientIpWorkerStore(ClientIpBackgroundUpdateStore, MonthlyActiveUsersWorke self._client_ip_looper = self._clock.looping_call( self._update_client_ips_batch, 5 * 1000 ) - self.hs.get_reactor().addSystemEventTrigger( + self.hs.get_clock().add_system_event_trigger( "before", "shutdown", self._update_client_ips_batch ) diff --git a/synapse/storage/databases/main/lock.py b/synapse/storage/databases/main/lock.py index 27c3578a31..d0e4a91b59 100644 --- a/synapse/storage/databases/main/lock.py +++ b/synapse/storage/databases/main/lock.py @@ -99,7 +99,7 @@ class LockStore(SQLBaseStore): # lead to a race, as we may drop the lock while we are still processing. # However, a) it should be a small window, b) the lock is best effort # anyway and c) we want to really avoid leaking locks when we restart. - hs.get_reactor().addSystemEventTrigger( + hs.get_clock().add_system_event_trigger( "before", "shutdown", self._on_shutdown, diff --git a/synapse/util/clock.py b/synapse/util/clock.py index 8d6ab007ba..043b06a108 100644 --- a/synapse/util/clock.py +++ b/synapse/util/clock.py @@ -206,3 +206,59 @@ class Clock: # We can ignore the lint here since this class is the one location # callWhenRunning should be called. self._reactor.callWhenRunning(wrapped_callback, *args, **kwargs) # type: ignore[prefer-synapse-clock-call-when-running] + + def add_system_event_trigger( + self, + phase: str, + event_type: str, + callback: Callable[P, object], + *args: P.args, + **kwargs: P.kwargs, + ) -> None: + """ + Add a function to be called when a system event occurs. + + Equivalent to `reactor.addSystemEventTrigger` (see the that docstring for more + details), but ensures that the callback is run in a logging context. + + Args: + phase: a time to call the event -- either the string 'before', 'after', or + 'during', describing when to call it relative to the event's execution. + eventType: this is a string describing the type of event. + callback: Function to call + *args: Postional arguments to pass to function. + **kwargs: Key arguments to pass to function. + """ + + def wrapped_callback(*args: Any, **kwargs: Any) -> None: + assert context.current_context() is context.SENTINEL_CONTEXT, ( + "Expected `add_system_event_trigger` callback from the reactor to start with the sentinel logcontext " + f"but saw {context.current_context()}. In other words, another task shouldn't have " + "leaked their logcontext to us." + ) + + # Because this is a callback from the reactor, we will be using the + # `sentinel` log context at this point. We want the function to log with + # some logcontext as we want to know which server the logs came from. + # + # We use `PreserveLoggingContext` to prevent our new `system_event` + # logcontext from finishing as soon as we exit this function, in case `f` + # returns an awaitable/deferred which would continue running and may try to + # restore the `loop_call` context when it's done (because it's trying to + # adhere to the Synapse logcontext rules.) + # + # This also ensures that we return to the `sentinel` context when we exit + # this function and yield control back to the reactor to avoid leaking the + # current logcontext to the reactor (which would then get picked up and + # associated with the next thing the reactor does) + with context.PreserveLoggingContext(context.LoggingContext("system_event")): + # We use `run_in_background` to reset the logcontext after `f` (or the + # awaitable returned by `f`) completes to avoid leaking the current + # logcontext to the reactor + context.run_in_background(callback, *args, **kwargs) + + # We can ignore the lint here since this class is the one location + # `addSystemEventTrigger` should be called. + self._reactor.addSystemEventTrigger( + phase, event_type, wrapped_callback, *args, **kwargs + ) # type: ignore[prefer-synapse-clock-add-system-event-trigger] From e7d98d3429902919b0dc34462153caafb4114138 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 22 Sep 2025 14:51:13 -0500 Subject: [PATCH 42/54] Remove `sentinel` logcontext in `Clock` utilities (`looping_call`, `looping_call_now`, `call_later`) (#18907) Part of https://github.com/element-hq/synapse/issues/18905 Lints for ensuring we use `Clock.call_later` instead of `reactor.callLater`, etc are coming in https://github.com/element-hq/synapse/pull/18944 ### Testing strategy 1. Configure Synapse to log at the `DEBUG` level 1. Start Synapse: `poetry run synapse_homeserver --config-path homeserver.yaml` 1. Wait 10 seconds for the [database profiling loop](https://github.com/element-hq/synapse/blob/9cc400177822805e2a08d4d934daad6f3bc2a4df/synapse/storage/database.py#L711) to execute 1. Notice the logcontext being used for the `Total database time` log line Before (`sentinel`): ``` 2025-09-10 16:36:58,651 - synapse.storage.TIME - 707 - DEBUG - sentinel - Total database time: 0.646% {room_forgetter_stream_pos(2): 0.131%, reap_monthly_active_users(1): 0.083%, get_device_change_last_converted_pos(1): 0.078%} ``` After (`looping_call`): ``` 2025-09-10 16:36:58,651 - synapse.storage.TIME - 707 - DEBUG - looping_call - Total database time: 0.646% {room_forgetter_stream_pos(2): 0.131%, reap_monthly_active_users(1): 0.083%, get_device_change_last_converted_pos(1): 0.078%} ``` --- changelog.d/18907.misc | 1 + synapse/util/clock.py | 86 +++++++++++-- tests/push/test_email.py | 3 +- tests/util/test_logcontext.py | 226 +++++++++++++++++++++++++++++++--- 4 files changed, 286 insertions(+), 30 deletions(-) create mode 100644 changelog.d/18907.misc diff --git a/changelog.d/18907.misc b/changelog.d/18907.misc new file mode 100644 index 0000000000..4fca9ec8fb --- /dev/null +++ b/changelog.d/18907.misc @@ -0,0 +1 @@ +Remove `sentinel` logcontext usage in `Clock` utilities like `looping_call` and `call_later`. diff --git a/synapse/util/clock.py b/synapse/util/clock.py index 043b06a108..d28dbac357 100644 --- a/synapse/util/clock.py +++ b/synapse/util/clock.py @@ -23,6 +23,7 @@ import attr from typing_extensions import ParamSpec from twisted.internet import defer, task +from twisted.internet.defer import Deferred from twisted.internet.interfaces import IDelayedCall from twisted.internet.task import LoopingCall @@ -46,6 +47,8 @@ class Clock: async def sleep(self, seconds: float) -> None: d: defer.Deferred[float] = defer.Deferred() + # Start task in the `sentinel` logcontext, to avoid leaking the current context + # into the reactor once it finishes. with context.PreserveLoggingContext(): self._reactor.callLater(seconds, d.callback, seconds) await d @@ -74,8 +77,9 @@ class Clock: this functionality thanks to this function being a thin wrapper around `twisted.internet.task.LoopingCall`. - Note that the function will be called with no logcontext, so if it is anything - other than trivial, you probably want to wrap it in run_as_background_process. + Note that the function will be called with generic `looping_call` logcontext, so + if it is anything other than a trivial task, you probably want to wrap it in + `run_as_background_process` to give it more specific label and track metrics. Args: f: The function to call repeatedly. @@ -97,8 +101,9 @@ class Clock: As with `looping_call`: subsequent calls are not scheduled until after the the Awaitable returned by a previous call has finished. - Also as with `looping_call`: the function is called with no logcontext and - you probably want to wrap it in `run_as_background_process`. + Note that the function will be called with generic `looping_call` logcontext, so + if it is anything other than a trivial task, you probably want to wrap it in + `run_as_background_process` to give it more specific label and track metrics. Args: f: The function to call repeatedly. @@ -117,9 +122,43 @@ class Clock: **kwargs: P.kwargs, ) -> LoopingCall: """Common functionality for `looping_call` and `looping_call_now`""" - call = task.LoopingCall(f, *args, **kwargs) + + def wrapped_f(*args: P.args, **kwargs: P.kwargs) -> Deferred: + assert context.current_context() is context.SENTINEL_CONTEXT, ( + "Expected `looping_call` callback from the reactor to start with the sentinel logcontext " + f"but saw {context.current_context()}. In other words, another task shouldn't have " + "leaked their logcontext to us." + ) + + # Because this is a callback from the reactor, we will be using the + # `sentinel` log context at this point. We want the function to log with + # some logcontext as we want to know which server the logs came from. + # + # We use `PreserveLoggingContext` to prevent our new `looping_call` + # logcontext from finishing as soon as we exit this function, in case `f` + # returns an awaitable/deferred which would continue running and may try to + # restore the `loop_call` context when it's done (because it's trying to + # adhere to the Synapse logcontext rules.) + # + # This also ensures that we return to the `sentinel` context when we exit + # this function and yield control back to the reactor to avoid leaking the + # current logcontext to the reactor (which would then get picked up and + # associated with the next thing the reactor does) + with context.PreserveLoggingContext(context.LoggingContext("looping_call")): + # We use `run_in_background` to reset the logcontext after `f` (or the + # awaitable returned by `f`) completes to avoid leaking the current + # logcontext to the reactor + return context.run_in_background(f, *args, **kwargs) + + call = task.LoopingCall(wrapped_f, *args, **kwargs) call.clock = self._reactor - d = call.start(msec / 1000.0, now=now) + # If `now=true`, the function will be called here immediately so we need to be + # in the sentinel context now. + # + # We want to start the task in the `sentinel` logcontext, to avoid leaking the + # current context into the reactor after the function finishes. + with context.PreserveLoggingContext(): + d = call.start(msec / 1000.0, now=now) d.addErrback(log_failure, "Looping call died", consumeErrors=False) return call @@ -128,8 +167,9 @@ class Clock: ) -> IDelayedCall: """Call something later - Note that the function will be called with no logcontext, so if it is anything - other than trivial, you probably want to wrap it in run_as_background_process. + Note that the function will be called with generic `call_later` logcontext, so + if it is anything other than a trivial task, you probably want to wrap it in + `run_as_background_process` to give it more specific label and track metrics. Args: delay: How long to wait in seconds. @@ -139,11 +179,33 @@ class Clock: """ def wrapped_callback(*args: Any, **kwargs: Any) -> None: - with context.PreserveLoggingContext(): - callback(*args, **kwargs) + assert context.current_context() is context.SENTINEL_CONTEXT, ( + "Expected `call_later` callback from the reactor to start with the sentinel logcontext " + f"but saw {context.current_context()}. In other words, another task shouldn't have " + "leaked their logcontext to us." + ) - with context.PreserveLoggingContext(): - return self._reactor.callLater(delay, wrapped_callback, *args, **kwargs) + # Because this is a callback from the reactor, we will be using the + # `sentinel` log context at this point. We want the function to log with + # some logcontext as we want to know which server the logs came from. + # + # We use `PreserveLoggingContext` to prevent our new `call_later` + # logcontext from finishing as soon as we exit this function, in case `f` + # returns an awaitable/deferred which would continue running and may try to + # restore the `loop_call` context when it's done (because it's trying to + # adhere to the Synapse logcontext rules.) + # + # This also ensures that we return to the `sentinel` context when we exit + # this function and yield control back to the reactor to avoid leaking the + # current logcontext to the reactor (which would then get picked up and + # associated with the next thing the reactor does) + with context.PreserveLoggingContext(context.LoggingContext("call_later")): + # We use `run_in_background` to reset the logcontext after `f` (or the + # awaitable returned by `f`) completes to avoid leaking the current + # logcontext to the reactor + context.run_in_background(callback, *args, **kwargs) + + return self._reactor.callLater(delay, wrapped_callback, *args, **kwargs) def cancel_call_later(self, timer: IDelayedCall, ignore_errs: bool = False) -> None: try: diff --git a/tests/push/test_email.py b/tests/push/test_email.py index 80a22044dd..26819e2d3c 100644 --- a/tests/push/test_email.py +++ b/tests/push/test_email.py @@ -31,6 +31,7 @@ from twisted.internet.testing import MemoryReactor import synapse.rest.admin from synapse.api.errors import Codes, SynapseError +from synapse.logging.context import make_deferred_yieldable from synapse.push.emailpusher import EmailPusher from synapse.rest.client import login, room from synapse.rest.synapse.client.unsubscribe import UnsubscribeResource @@ -89,7 +90,7 @@ class EmailPusherTests(HomeserverTestCase): # This mocks out synapse.reactor.send_email._sendmail. d: Deferred = Deferred() self.email_attempts.append((d, args, kwargs)) - return d + return make_deferred_yieldable(d) hs.get_send_email_handler()._sendmail = sendmail # type: ignore[assignment] diff --git a/tests/util/test_logcontext.py b/tests/util/test_logcontext.py index 43912d05da..0ecf712bab 100644 --- a/tests/util/test_logcontext.py +++ b/tests/util/test_logcontext.py @@ -19,6 +19,7 @@ # # +import logging from typing import Callable, Generator, cast import twisted.python.failure @@ -28,6 +29,7 @@ from synapse.logging.context import ( SENTINEL_CONTEXT, LoggingContext, PreserveLoggingContext, + _Sentinel, current_context, make_deferred_yieldable, nested_logging_context, @@ -36,7 +38,10 @@ from synapse.logging.context import ( from synapse.types import ISynapseReactor from synapse.util.clock import Clock -from .. import unittest +from tests import unittest +from tests.unittest import logcontext_clean + +logger = logging.getLogger(__name__) reactor = cast(ISynapseReactor, _reactor) @@ -44,33 +49,212 @@ reactor = cast(ISynapseReactor, _reactor) class LoggingContextTestCase(unittest.TestCase): def _check_test_key(self, value: str) -> None: context = current_context() - assert isinstance(context, LoggingContext) - self.assertEqual(context.name, value) + assert isinstance(context, LoggingContext) or isinstance(context, _Sentinel), ( + f"Expected LoggingContext({value}) but saw {context}" + ) + self.assertEqual( + str(context), value, f"Expected LoggingContext({value}) but saw {context}" + ) + @logcontext_clean def test_with_context(self) -> None: with LoggingContext("test"): self._check_test_key("test") + @logcontext_clean async def test_sleep(self) -> None: + """ + Test `Clock.sleep` + """ clock = Clock(reactor) + # Sanity check that we start in the sentinel context + self._check_test_key("sentinel") + + callback_finished = False + async def competing_callback() -> None: - with LoggingContext("competing"): - await clock.sleep(0) - self._check_test_key("competing") + nonlocal callback_finished + try: + # A callback from the reactor should start with the sentinel context. In + # other words, another task shouldn't have leaked their context to us. + self._check_test_key("sentinel") + + with LoggingContext("competing"): + await clock.sleep(0) + self._check_test_key("competing") + + self._check_test_key("sentinel") + finally: + # When exceptions happen, we still want to mark the callback as finished + # so that the test can complete and we see the underlying error. + callback_finished = True reactor.callLater(0, lambda: defer.ensureDeferred(competing_callback())) - with LoggingContext("one"): + with LoggingContext("foo"): await clock.sleep(0) - self._check_test_key("one") + self._check_test_key("foo") + await clock.sleep(0) + self._check_test_key("foo") + + self.assertTrue( + callback_finished, + "Callback never finished which means the test probably didn't wait long enough", + ) + + # Back to the sentinel context + self._check_test_key("sentinel") + + @logcontext_clean + async def test_looping_call(self) -> None: + """ + Test `Clock.looping_call` + """ + clock = Clock(reactor) + + # Sanity check that we start in the sentinel context + self._check_test_key("sentinel") + + callback_finished = False + + async def competing_callback() -> None: + nonlocal callback_finished + try: + # A `looping_call` callback should have *some* logcontext since we should know + # which server spawned this loop and which server the logs came from. + self._check_test_key("looping_call") + + with LoggingContext("competing"): + await clock.sleep(0) + self._check_test_key("competing") + + self._check_test_key("looping_call") + finally: + # When exceptions happen, we still want to mark the callback as finished + # so that the test can complete and we see the underlying error. + callback_finished = True + + with LoggingContext("foo"): + lc = clock.looping_call( + lambda: defer.ensureDeferred(competing_callback()), 0 + ) + self._check_test_key("foo") + await clock.sleep(0) + self._check_test_key("foo") + await clock.sleep(0) + self._check_test_key("foo") + + self.assertTrue( + callback_finished, + "Callback never finished which means the test probably didn't wait long enough", + ) + + # Back to the sentinel context + self._check_test_key("sentinel") + + # Stop the looping call to prevent "Reactor was unclean" errors + lc.stop() + + @logcontext_clean + async def test_looping_call_now(self) -> None: + """ + Test `Clock.looping_call_now` + """ + clock = Clock(reactor) + + # Sanity check that we start in the sentinel context + self._check_test_key("sentinel") + + callback_finished = False + + async def competing_callback() -> None: + nonlocal callback_finished + try: + # A `looping_call` callback should have *some* logcontext since we should know + # which server spawned this loop and which server the logs came from. + self._check_test_key("looping_call") + + with LoggingContext("competing"): + await clock.sleep(0) + self._check_test_key("competing") + + self._check_test_key("looping_call") + finally: + # When exceptions happen, we still want to mark the callback as finished + # so that the test can complete and we see the underlying error. + callback_finished = True + + with LoggingContext("foo"): + lc = clock.looping_call_now( + lambda: defer.ensureDeferred(competing_callback()), 0 + ) + self._check_test_key("foo") + await clock.sleep(0) + self._check_test_key("foo") + + self.assertTrue( + callback_finished, + "Callback never finished which means the test probably didn't wait long enough", + ) + + # Back to the sentinel context + self._check_test_key("sentinel") + + # Stop the looping call to prevent "Reactor was unclean" errors + lc.stop() + + @logcontext_clean + async def test_call_later(self) -> None: + """ + Test `Clock.call_later` + """ + clock = Clock(reactor) + + # Sanity check that we start in the sentinel context + self._check_test_key("sentinel") + + callback_finished = False + + async def competing_callback() -> None: + nonlocal callback_finished + try: + # A `call_later` callback should have *some* logcontext since we should know + # which server spawned this loop and which server the logs came from. + self._check_test_key("call_later") + + with LoggingContext("competing"): + await clock.sleep(0) + self._check_test_key("competing") + + self._check_test_key("call_later") + finally: + # When exceptions happen, we still want to mark the callback as finished + # so that the test can complete and we see the underlying error. + callback_finished = True + + with LoggingContext("foo"): + clock.call_later(0, lambda: defer.ensureDeferred(competing_callback())) + self._check_test_key("foo") + await clock.sleep(0) + self._check_test_key("foo") + await clock.sleep(0) + self._check_test_key("foo") + + self.assertTrue( + callback_finished, + "Callback never finished which means the test probably didn't wait long enough", + ) + + # Back to the sentinel context + self._check_test_key("sentinel") def _test_run_in_background(self, function: Callable[[], object]) -> defer.Deferred: sentinel_context = current_context() callback_completed = False - with LoggingContext("one"): + with LoggingContext("foo"): # fire off function, but don't wait on it. d2 = run_in_background(function) @@ -81,7 +265,7 @@ class LoggingContextTestCase(unittest.TestCase): d2.addCallback(cb) - self._check_test_key("one") + self._check_test_key("foo") # now wait for the function under test to have run, and check that # the logcontext is left in a sane state. @@ -105,12 +289,14 @@ class LoggingContextTestCase(unittest.TestCase): # test is done once d2 finishes return d2 + @logcontext_clean def test_run_in_background_with_blocking_fn(self) -> defer.Deferred: async def blocking_function() -> None: await Clock(reactor).sleep(0) return self._test_run_in_background(blocking_function) + @logcontext_clean def test_run_in_background_with_non_blocking_fn(self) -> defer.Deferred: @defer.inlineCallbacks def nonblocking_function() -> Generator["defer.Deferred[object]", object, None]: @@ -119,6 +305,7 @@ class LoggingContextTestCase(unittest.TestCase): return self._test_run_in_background(nonblocking_function) + @logcontext_clean def test_run_in_background_with_chained_deferred(self) -> defer.Deferred: # a function which returns a deferred which looks like it has been # called, but is actually paused @@ -127,22 +314,25 @@ class LoggingContextTestCase(unittest.TestCase): return self._test_run_in_background(testfunc) + @logcontext_clean def test_run_in_background_with_coroutine(self) -> defer.Deferred: async def testfunc() -> None: - self._check_test_key("one") + self._check_test_key("foo") d = defer.ensureDeferred(Clock(reactor).sleep(0)) self.assertIs(current_context(), SENTINEL_CONTEXT) await d - self._check_test_key("one") + self._check_test_key("foo") return self._test_run_in_background(testfunc) + @logcontext_clean def test_run_in_background_with_nonblocking_coroutine(self) -> defer.Deferred: async def testfunc() -> None: - self._check_test_key("one") + self._check_test_key("foo") return self._test_run_in_background(testfunc) + @logcontext_clean @defer.inlineCallbacks def test_make_deferred_yieldable( self, @@ -156,7 +346,7 @@ class LoggingContextTestCase(unittest.TestCase): sentinel_context = current_context() - with LoggingContext("one"): + with LoggingContext("foo"): d1 = make_deferred_yieldable(blocking_function()) # make sure that the context was reset by make_deferred_yieldable self.assertIs(current_context(), sentinel_context) @@ -164,15 +354,16 @@ class LoggingContextTestCase(unittest.TestCase): yield d1 # now it should be restored - self._check_test_key("one") + self._check_test_key("foo") + @logcontext_clean @defer.inlineCallbacks def test_make_deferred_yieldable_with_chained_deferreds( self, ) -> Generator["defer.Deferred[object]", object, None]: sentinel_context = current_context() - with LoggingContext("one"): + with LoggingContext("foo"): d1 = make_deferred_yieldable(_chained_deferred_function()) # make sure that the context was reset by make_deferred_yieldable self.assertIs(current_context(), sentinel_context) @@ -180,8 +371,9 @@ class LoggingContextTestCase(unittest.TestCase): yield d1 # now it should be restored - self._check_test_key("one") + self._check_test_key("foo") + @logcontext_clean def test_nested_logging_context(self) -> None: with LoggingContext("foo"): nested_context = nested_logging_context(suffix="bar") From 5be7679dd9dec79b72a2c541ab61efbbd79e1d62 Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Mon, 22 Sep 2025 14:53:02 -0500 Subject: [PATCH 43/54] Split loading config vs homeserver `setup` (#18933) This allows us to get access to `server_name` so we can use it when creating the `LoggingContext("main")` in the future (pre-requisite for https://github.com/element-hq/synapse/pull/18868). This also allows us more flexibility to parse config however we want and setup a Synapse homeserver. Like what we do in [Synapse Pro for Small Hosts](https://github.com/element-hq/synapse-small-hosts). Split out from https://github.com/element-hq/synapse/pull/18868 --- changelog.d/18933.misc | 1 + synapse/app/admin_cmd.py | 17 +++++++++----- synapse/app/appservice.py | 7 +++--- synapse/app/client_reader.py | 7 +++--- synapse/app/event_creator.py | 7 +++--- synapse/app/federation_reader.py | 7 +++--- synapse/app/federation_sender.py | 7 +++--- synapse/app/frontend_proxy.py | 7 +++--- synapse/app/generic_worker.py | 22 +++++++++++++++---- synapse/app/homeserver.py | 28 ++++++++++++++++++++---- synapse/app/media_repository.py | 7 +++--- synapse/app/pusher.py | 7 +++--- synapse/app/synchrotron.py | 7 +++--- synapse/app/user_dir.py | 7 +++--- synapse/config/_base.py | 8 +++++-- tests/app/test_homeserver_start.py | 5 ++++- tests/config/test_registration_config.py | 5 ++++- 17 files changed, 108 insertions(+), 48 deletions(-) create mode 100644 changelog.d/18933.misc diff --git a/changelog.d/18933.misc b/changelog.d/18933.misc new file mode 100644 index 0000000000..3308d367e7 --- /dev/null +++ b/changelog.d/18933.misc @@ -0,0 +1 @@ +Split loading config from homeserver `setup`. diff --git a/synapse/app/admin_cmd.py b/synapse/app/admin_cmd.py index 7c9b94c65c..c0c594577e 100644 --- a/synapse/app/admin_cmd.py +++ b/synapse/app/admin_cmd.py @@ -24,7 +24,7 @@ import logging import os import sys import tempfile -from typing import List, Mapping, Optional, Sequence +from typing import List, Mapping, Optional, Sequence, Tuple from twisted.internet import defer, task @@ -256,7 +256,7 @@ class FileExfiltrationWriter(ExfiltrationWriter): return self.base_directory -def start(config_options: List[str]) -> None: +def load_config(argv_options: List[str]) -> Tuple[HomeServerConfig, argparse.Namespace]: parser = argparse.ArgumentParser(description="Synapse Admin Command") HomeServerConfig.add_arguments_to_parser(parser) @@ -282,11 +282,15 @@ def start(config_options: List[str]) -> None: export_data_parser.set_defaults(func=export_data_command) try: - config, args = HomeServerConfig.load_config_with_parser(parser, config_options) + config, args = HomeServerConfig.load_config_with_parser(parser, argv_options) except ConfigError as e: sys.stderr.write("\n" + str(e) + "\n") sys.exit(1) + return config, args + + +def start(config: HomeServerConfig, args: argparse.Namespace) -> None: if config.worker.worker_app is not None: assert config.worker.worker_app == "synapse.app.admin_cmd" @@ -325,7 +329,7 @@ def start(config_options: List[str]) -> None: # command. async def run() -> None: - with LoggingContext("command"): + with LoggingContext(name="command"): await _base.start(ss) await args.func(ss, args) @@ -337,5 +341,6 @@ def start(config_options: List[str]) -> None: if __name__ == "__main__": - with LoggingContext("main"): - start(sys.argv[1:]) + homeserver_config, args = load_config(sys.argv[1:]) + with LoggingContext(name="main"): + start(homeserver_config, args) diff --git a/synapse/app/appservice.py b/synapse/app/appservice.py index c85ce2869a..95a253dbb9 100644 --- a/synapse/app/appservice.py +++ b/synapse/app/appservice.py @@ -21,13 +21,14 @@ import sys -from synapse.app.generic_worker import start +from synapse.app.generic_worker import load_config, start from synapse.util.logcontext import LoggingContext def main() -> None: - with LoggingContext("main"): - start(sys.argv[1:]) + homeserver_config = load_config(sys.argv[1:]) + with LoggingContext(name="main"): + start(homeserver_config) if __name__ == "__main__": diff --git a/synapse/app/client_reader.py b/synapse/app/client_reader.py index c85ce2869a..95a253dbb9 100644 --- a/synapse/app/client_reader.py +++ b/synapse/app/client_reader.py @@ -21,13 +21,14 @@ import sys -from synapse.app.generic_worker import start +from synapse.app.generic_worker import load_config, start from synapse.util.logcontext import LoggingContext def main() -> None: - with LoggingContext("main"): - start(sys.argv[1:]) + homeserver_config = load_config(sys.argv[1:]) + with LoggingContext(name="main"): + start(homeserver_config) if __name__ == "__main__": diff --git a/synapse/app/event_creator.py b/synapse/app/event_creator.py index f0d41ed22c..351cf93b76 100644 --- a/synapse/app/event_creator.py +++ b/synapse/app/event_creator.py @@ -20,13 +20,14 @@ import sys -from synapse.app.generic_worker import start +from synapse.app.generic_worker import load_config, start from synapse.util.logcontext import LoggingContext def main() -> None: - with LoggingContext("main"): - start(sys.argv[1:]) + homeserver_config = load_config(sys.argv[1:]) + with LoggingContext(name="main"): + start(homeserver_config) if __name__ == "__main__": diff --git a/synapse/app/federation_reader.py b/synapse/app/federation_reader.py index c85ce2869a..95a253dbb9 100644 --- a/synapse/app/federation_reader.py +++ b/synapse/app/federation_reader.py @@ -21,13 +21,14 @@ import sys -from synapse.app.generic_worker import start +from synapse.app.generic_worker import load_config, start from synapse.util.logcontext import LoggingContext def main() -> None: - with LoggingContext("main"): - start(sys.argv[1:]) + homeserver_config = load_config(sys.argv[1:]) + with LoggingContext(name="main"): + start(homeserver_config) if __name__ == "__main__": diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py index c85ce2869a..95a253dbb9 100644 --- a/synapse/app/federation_sender.py +++ b/synapse/app/federation_sender.py @@ -21,13 +21,14 @@ import sys -from synapse.app.generic_worker import start +from synapse.app.generic_worker import load_config, start from synapse.util.logcontext import LoggingContext def main() -> None: - with LoggingContext("main"): - start(sys.argv[1:]) + homeserver_config = load_config(sys.argv[1:]) + with LoggingContext(name="main"): + start(homeserver_config) if __name__ == "__main__": diff --git a/synapse/app/frontend_proxy.py b/synapse/app/frontend_proxy.py index c85ce2869a..95a253dbb9 100644 --- a/synapse/app/frontend_proxy.py +++ b/synapse/app/frontend_proxy.py @@ -21,13 +21,14 @@ import sys -from synapse.app.generic_worker import start +from synapse.app.generic_worker import load_config, start from synapse.util.logcontext import LoggingContext def main() -> None: - with LoggingContext("main"): - start(sys.argv[1:]) + homeserver_config = load_config(sys.argv[1:]) + with LoggingContext(name="main"): + start(homeserver_config) if __name__ == "__main__": diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index 0849f25059..afc6b85eae 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -310,13 +310,26 @@ class GenericWorkerServer(HomeServer): self.get_replication_command_handler().start_replication(self) -def start(config_options: List[str]) -> None: +def load_config(argv_options: List[str]) -> HomeServerConfig: + """ + Parse the commandline and config files (does not generate config) + + Args: + argv_options: The options passed to Synapse. Usually `sys.argv[1:]`. + + Returns: + Config object. + """ try: - config = HomeServerConfig.load_config("Synapse worker", config_options) + config = HomeServerConfig.load_config("Synapse worker", argv_options) except ConfigError as e: sys.stderr.write("\n" + str(e) + "\n") sys.exit(1) + return config + + +def start(config: HomeServerConfig) -> None: # For backwards compatibility let any of the old app names. assert config.worker.worker_app in ( "synapse.app.appservice", @@ -368,8 +381,9 @@ def start(config_options: List[str]) -> None: def main() -> None: - with LoggingContext("main"): - start(sys.argv[1:]) + homeserver_config = load_config(sys.argv[1:]) + with LoggingContext(name="main"): + start(homeserver_config) if __name__ == "__main__": diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index 54c41c0c28..163f7c70ab 100644 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -308,17 +308,21 @@ class SynapseHomeServer(HomeServer): logger.warning("Unrecognized listener type: %s", listener.type) -def setup(config_options: List[str]) -> SynapseHomeServer: +def load_or_generate_config(argv_options: List[str]) -> HomeServerConfig: """ + Parse the commandline and config files + + Supports generation of config files, so is used for the main homeserver app. + Args: - config_options_options: The options passed to Synapse. Usually `sys.argv[1:]`. + argv_options: The options passed to Synapse. Usually `sys.argv[1:]`. Returns: A homeserver instance. """ try: config = HomeServerConfig.load_or_generate_config( - "Synapse Homeserver", config_options + "Synapse Homeserver", argv_options ) except ConfigError as e: sys.stderr.write("\n") @@ -332,6 +336,20 @@ def setup(config_options: List[str]) -> SynapseHomeServer: # generating config files and shouldn't try to continue. sys.exit(0) + return config + + +def setup(config: HomeServerConfig) -> SynapseHomeServer: + """ + Create and setup a Synapse homeserver instance given a configuration. + + Args: + config: The configuration for the homeserver. + + Returns: + A homeserver instance. + """ + if config.worker.worker_app: raise ConfigError( "You have specified `worker_app` in the config but are attempting to start a non-worker " @@ -405,10 +423,12 @@ def run(hs: HomeServer) -> None: def main() -> None: + homeserver_config = load_or_generate_config(sys.argv[1:]) + with LoggingContext("main"): # check base requirements check_requirements() - hs = setup(sys.argv[1:]) + hs = setup(homeserver_config) # redirect stdio to the logs, if configured. if not hs.config.logging.no_redirect_stdio: diff --git a/synapse/app/media_repository.py b/synapse/app/media_repository.py index c85ce2869a..95a253dbb9 100644 --- a/synapse/app/media_repository.py +++ b/synapse/app/media_repository.py @@ -21,13 +21,14 @@ import sys -from synapse.app.generic_worker import start +from synapse.app.generic_worker import load_config, start from synapse.util.logcontext import LoggingContext def main() -> None: - with LoggingContext("main"): - start(sys.argv[1:]) + homeserver_config = load_config(sys.argv[1:]) + with LoggingContext(name="main"): + start(homeserver_config) if __name__ == "__main__": diff --git a/synapse/app/pusher.py b/synapse/app/pusher.py index c85ce2869a..95a253dbb9 100644 --- a/synapse/app/pusher.py +++ b/synapse/app/pusher.py @@ -21,13 +21,14 @@ import sys -from synapse.app.generic_worker import start +from synapse.app.generic_worker import load_config, start from synapse.util.logcontext import LoggingContext def main() -> None: - with LoggingContext("main"): - start(sys.argv[1:]) + homeserver_config = load_config(sys.argv[1:]) + with LoggingContext(name="main"): + start(homeserver_config) if __name__ == "__main__": diff --git a/synapse/app/synchrotron.py b/synapse/app/synchrotron.py index c85ce2869a..95a253dbb9 100644 --- a/synapse/app/synchrotron.py +++ b/synapse/app/synchrotron.py @@ -21,13 +21,14 @@ import sys -from synapse.app.generic_worker import start +from synapse.app.generic_worker import load_config, start from synapse.util.logcontext import LoggingContext def main() -> None: - with LoggingContext("main"): - start(sys.argv[1:]) + homeserver_config = load_config(sys.argv[1:]) + with LoggingContext(name="main"): + start(homeserver_config) if __name__ == "__main__": diff --git a/synapse/app/user_dir.py b/synapse/app/user_dir.py index dadcc48779..b6385381b4 100644 --- a/synapse/app/user_dir.py +++ b/synapse/app/user_dir.py @@ -21,13 +21,14 @@ import sys -from synapse.app.generic_worker import start +from synapse.app.generic_worker import load_config, start from synapse.util.logcontext import LoggingContext def main() -> None: - with LoggingContext("main"): - start(sys.argv[1:]) + homeserver_config = load_config(sys.argv[1:]) + with LoggingContext(name="main"): + start(homeserver_config) if __name__ == "__main__": diff --git a/synapse/config/_base.py b/synapse/config/_base.py index 191253ddda..f3b6e9f887 100644 --- a/synapse/config/_base.py +++ b/synapse/config/_base.py @@ -646,12 +646,16 @@ class RootConfig: @classmethod def load_or_generate_config( - cls: Type[TRootConfig], description: str, argv: List[str] + cls: Type[TRootConfig], description: str, argv_options: List[str] ) -> Optional[TRootConfig]: """Parse the commandline and config files Supports generation of config files, so is used for the main homeserver app. + Args: + description: TODO + argv_options: The options passed to Synapse. Usually `sys.argv[1:]`. + Returns: Config object, or None if --generate-config or --generate-keys was set """ @@ -747,7 +751,7 @@ class RootConfig: ) cls.invoke_all_static("add_arguments", parser) - config_args = parser.parse_args(argv) + config_args = parser.parse_args(argv_options) config_files = find_config_files(search_paths=config_args.config_path) diff --git a/tests/app/test_homeserver_start.py b/tests/app/test_homeserver_start.py index 9dc20800b2..0d257c98aa 100644 --- a/tests/app/test_homeserver_start.py +++ b/tests/app/test_homeserver_start.py @@ -37,4 +37,7 @@ class HomeserverAppStartTestCase(ConfigFileTestCase): self.add_lines_to_config([" main:", " host: 127.0.0.1", " port: 1234"]) # Ensure that starting master process with worker config raises an exception with self.assertRaises(ConfigError): - synapse.app.homeserver.setup(["-c", self.config_file]) + homeserver_config = synapse.app.homeserver.load_or_generate_config( + ["-c", self.config_file] + ) + synapse.app.homeserver.setup(homeserver_config) diff --git a/tests/config/test_registration_config.py b/tests/config/test_registration_config.py index 7fd6df2f93..a8520c91d1 100644 --- a/tests/config/test_registration_config.py +++ b/tests/config/test_registration_config.py @@ -112,4 +112,7 @@ class RegistrationConfigTestCase(ConfigFileTestCase): # Test that allowing open registration without verification raises an error with self.assertRaises(ConfigError): - synapse.app.homeserver.setup(["-c", self.config_file]) + homeserver_config = synapse.app.homeserver.load_or_generate_config( + ["-c", self.config_file] + ) + synapse.app.homeserver.setup(homeserver_config) From ddc7627b2255abd6a53806405d5c8fcdf0630477 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Tue, 23 Sep 2025 09:47:30 +0100 Subject: [PATCH 44/54] Fix performance regression related to delayed events processing (#18926) --- changelog.d/18926.bugfix | 1 + synapse/handlers/delayed_events.py | 108 ++++++++++++++++-- synapse/handlers/presence.py | 2 +- synapse/handlers/sliding_sync/room_lists.py | 8 +- synapse/storage/controllers/state.py | 2 + .../storage/databases/main/delayed_events.py | 15 +++ .../storage/databases/main/events_worker.py | 33 ++++++ .../storage/databases/main/state_deltas.py | 2 + synapse/util/sentinel.py | 21 ++++ tests/storage/test_events.py | 73 +++++++++++- 10 files changed, 245 insertions(+), 20 deletions(-) create mode 100644 changelog.d/18926.bugfix create mode 100644 synapse/util/sentinel.py diff --git a/changelog.d/18926.bugfix b/changelog.d/18926.bugfix new file mode 100644 index 0000000000..c450313764 --- /dev/null +++ b/changelog.d/18926.bugfix @@ -0,0 +1 @@ +Fix a performance regression related to the experimental Delayed Events ([MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140)) feature. \ No newline at end of file diff --git a/synapse/handlers/delayed_events.py b/synapse/handlers/delayed_events.py index fa24014438..d47e3fd263 100644 --- a/synapse/handlers/delayed_events.py +++ b/synapse/handlers/delayed_events.py @@ -18,7 +18,7 @@ from typing import TYPE_CHECKING, List, Optional, Set, Tuple from twisted.internet.interfaces import IDelayedCall from synapse.api.constants import EventTypes -from synapse.api.errors import ShadowBanError +from synapse.api.errors import ShadowBanError, SynapseError from synapse.api.ratelimiting import Ratelimiter from synapse.config.workers import MAIN_PROCESS_INSTANCE_NAME from synapse.logging.context import make_deferred_yieldable @@ -48,6 +48,7 @@ from synapse.types import ( ) from synapse.util.events import generate_fake_event_id from synapse.util.metrics import Measure +from synapse.util.sentinel import Sentinel if TYPE_CHECKING: from synapse.server import HomeServer @@ -149,10 +150,37 @@ class DelayedEventsHandler: ) async def _unsafe_process_new_event(self) -> None: + # We purposefully fetch the current max room stream ordering before + # doing anything else, as it could increment duing processing of state + # deltas. We want to avoid updating `delayed_events_stream_pos` past + # the stream ordering of the state deltas we've processed. Otherwise + # we'll leave gaps in our processing. + room_max_stream_ordering = self._store.get_room_max_stream_ordering() + + # Check that there are actually any delayed events to process. If not, bail early. + delayed_events_count = await self._store.get_count_of_delayed_events() + if delayed_events_count == 0: + # There are no delayed events to process. Update the + # `delayed_events_stream_pos` to the latest `events` stream pos and + # exit early. + self._event_pos = room_max_stream_ordering + + logger.debug( + "No delayed events to process. Updating `delayed_events_stream_pos` to max stream ordering (%s)", + room_max_stream_ordering, + ) + + await self._store.update_delayed_events_stream_pos(room_max_stream_ordering) + + event_processing_positions.labels( + name="delayed_events", **{SERVER_NAME_LABEL: self.server_name} + ).set(room_max_stream_ordering) + + return + # If self._event_pos is None then means we haven't fetched it from the DB yet if self._event_pos is None: self._event_pos = await self._store.get_delayed_events_stream_pos() - room_max_stream_ordering = self._store.get_room_max_stream_ordering() if self._event_pos > room_max_stream_ordering: # apparently, we've processed more events than exist in the database! # this can happen if events are removed with history purge or similar. @@ -170,7 +198,7 @@ class DelayedEventsHandler: self._clock, name="delayed_events_delta", server_name=self.server_name ): room_max_stream_ordering = self._store.get_room_max_stream_ordering() - if self._event_pos == room_max_stream_ordering: + if self._event_pos >= room_max_stream_ordering: return logger.debug( @@ -205,23 +233,81 @@ class DelayedEventsHandler: Process current state deltas to cancel other users' pending delayed events that target the same state. """ + # Get the senders of each delta's state event (as sender information is + # not currently stored in the `current_state_deltas` table). + event_id_and_sender_dict = await self._store.get_senders_for_event_ids( + [delta.event_id for delta in deltas if delta.event_id is not None] + ) + + # Note: No need to batch as `get_current_state_deltas` will only ever + # return 100 rows at a time. for delta in deltas: + logger.debug( + "Handling: %r %r, %s", delta.event_type, delta.state_key, delta.event_id + ) + + # `delta.event_id` and `delta.sender` can be `None` in a few valid + # cases (see the docstring of + # `get_current_state_delta_membership_changes_for_user` for details). if delta.event_id is None: - logger.debug( - "Not handling delta for deleted state: %r %r", + # TODO: Differentiate between this being caused by a state reset + # which removed a user from a room, or the homeserver + # purposefully having left the room. We can do so by checking + # whether there are any local memberships still left in the + # room. If so, then this is the result of a state reset. + # + # If it is a state reset, we should avoid cancelling new, + # delayed state events due to old state resurfacing. So we + # should skip and log a warning in this case. + # + # If the homeserver has left the room, then we should cancel all + # delayed state events intended for this room, as there is no + # need to try and send a delayed event into a room we've left. + logger.warning( + "Skipping state delta (%r, %r) without corresponding event ID. " + "This can happen if the homeserver has left the room (in which " + "case this can be ignored), or if there has been a state reset " + "which has caused the sender to be kicked out of the room", delta.event_type, delta.state_key, ) continue - logger.debug( - "Handling: %r %r, %s", delta.event_type, delta.state_key, delta.event_id + sender_str = event_id_and_sender_dict.get( + delta.event_id, Sentinel.UNSET_SENTINEL ) - - event = await self._store.get_event(delta.event_id, allow_none=True) - if not event: + if sender_str is None: + # An event exists, but the `sender` field was "null" and Synapse + # incorrectly accepted the event. This is not expected. + logger.error( + "Skipping state delta with event ID '%s' as 'sender' was None. " + "This is unexpected - please report it as a bug!", + delta.event_id, + ) + continue + if sender_str is Sentinel.UNSET_SENTINEL: + # We have an event ID, but the event was not found in the + # datastore. This can happen if a room, or its history, is + # purged. State deltas related to the room are left behind, but + # the event no longer exists. + # + # As we cannot get the sender of this event, we can't calculate + # whether to cancel delayed events related to this one. So we skip. + logger.debug( + "Skipping state delta with event ID '%s' - the room, or its history, may have been purged", + delta.event_id, + ) + continue + + try: + sender = UserID.from_string(sender_str) + except SynapseError as e: + logger.error( + "Skipping state delta with Matrix User ID '%s' that failed to parse: %s", + sender_str, + e, + ) continue - sender = UserID.from_string(event.sender) next_send_ts = await self._store.cancel_delayed_state_events( room_id=delta.room_id, diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index c787c847bd..63cdf66198 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -1548,7 +1548,7 @@ class PresenceHandler(BasePresenceHandler): self.clock, name="presence_delta", server_name=self.server_name ): room_max_stream_ordering = self.store.get_room_max_stream_ordering() - if self._event_pos == room_max_stream_ordering: + if self._event_pos >= room_max_stream_ordering: return logger.debug( diff --git a/synapse/handlers/sliding_sync/room_lists.py b/synapse/handlers/sliding_sync/room_lists.py index e196199f8a..19116590f7 100644 --- a/synapse/handlers/sliding_sync/room_lists.py +++ b/synapse/handlers/sliding_sync/room_lists.py @@ -13,7 +13,6 @@ # -import enum import logging from itertools import chain from typing import ( @@ -75,6 +74,7 @@ from synapse.types.handlers.sliding_sync import ( ) from synapse.types.state import StateFilter from synapse.util import MutableOverlayMapping +from synapse.util.sentinel import Sentinel if TYPE_CHECKING: from synapse.server import HomeServer @@ -83,12 +83,6 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) -class Sentinel(enum.Enum): - # defining a sentinel in this way allows mypy to correctly handle the - # type of a dictionary lookup and subsequent type narrowing. - UNSET_SENTINEL = object() - - # Helper definition for the types that we might return. We do this to avoid # copying data between types (which can be expensive for many rooms). RoomsForUserType = Union[RoomsForUserStateReset, RoomsForUser, RoomsForUserSlidingSync] diff --git a/synapse/storage/controllers/state.py b/synapse/storage/controllers/state.py index 8997f4526f..ad90a1be13 100644 --- a/synapse/storage/controllers/state.py +++ b/synapse/storage/controllers/state.py @@ -682,6 +682,8 @@ class StateStorageController: - the stream id which these results go up to - list of current_state_delta_stream rows. If it is empty, we are up to date. + + A maximum of 100 rows will be returned. """ # FIXME(faster_joins): what do we do here? # https://github.com/matrix-org/synapse/issues/13008 diff --git a/synapse/storage/databases/main/delayed_events.py b/synapse/storage/databases/main/delayed_events.py index 8428ee6ed4..78f55b983f 100644 --- a/synapse/storage/databases/main/delayed_events.py +++ b/synapse/storage/databases/main/delayed_events.py @@ -183,6 +183,21 @@ class DelayedEventsStore(SQLBaseStore): "restart_delayed_event", restart_delayed_event_txn ) + async def get_count_of_delayed_events(self) -> int: + """Returns the number of pending delayed events in the DB.""" + + def _get_count_of_delayed_events(txn: LoggingTransaction) -> int: + sql = "SELECT count(*) FROM delayed_events" + + txn.execute(sql) + resp = txn.fetchone() + return resp[0] if resp is not None else 0 + + return await self.db_pool.runInteraction( + "get_count_of_delayed_events", + _get_count_of_delayed_events, + ) + async def get_all_delayed_events_for_user( self, user_localpart: str, diff --git a/synapse/storage/databases/main/events_worker.py b/synapse/storage/databases/main/events_worker.py index cc031d8996..31e2312211 100644 --- a/synapse/storage/databases/main/events_worker.py +++ b/synapse/storage/databases/main/events_worker.py @@ -2135,6 +2135,39 @@ class EventsWorkerStore(SQLBaseStore): return rows, to_token, True + async def get_senders_for_event_ids( + self, event_ids: Collection[str] + ) -> Dict[str, Optional[str]]: + """ + Given a sequence of event IDs, return the sender associated with each. + + Args: + event_ids: A collection of event IDs as strings. + + Returns: + A dict of event ID -> sender of the event. + + If a given event ID does not exist in the `events` table, then no entry + for that event ID will be returned. + """ + + def _get_senders_for_event_ids( + txn: LoggingTransaction, + ) -> Dict[str, Optional[str]]: + rows = self.db_pool.simple_select_many_txn( + txn=txn, + table="events", + column="event_id", + iterable=event_ids, + keyvalues={}, + retcols=["event_id", "sender"], + ) + return dict(rows) + + return await self.db_pool.runInteraction( + "get_senders_for_event_ids", _get_senders_for_event_ids + ) + @cached(max_entries=5000) async def get_event_ordering(self, event_id: str, room_id: str) -> Tuple[int, int]: res = await self.db_pool.simple_select_one( diff --git a/synapse/storage/databases/main/state_deltas.py b/synapse/storage/databases/main/state_deltas.py index 00f87cc3a1..303b232d7b 100644 --- a/synapse/storage/databases/main/state_deltas.py +++ b/synapse/storage/databases/main/state_deltas.py @@ -94,6 +94,8 @@ class StateDeltasStore(SQLBaseStore): - the stream id which these results go up to - list of current_state_delta_stream rows. If it is empty, we are up to date. + + A maximum of 100 rows will be returned. """ prev_stream_id = int(prev_stream_id) diff --git a/synapse/util/sentinel.py b/synapse/util/sentinel.py new file mode 100644 index 0000000000..c8434fc97a --- /dev/null +++ b/synapse/util/sentinel.py @@ -0,0 +1,21 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2025 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# + +import enum + + +class Sentinel(enum.Enum): + # defining a sentinel in this way allows mypy to correctly handle the + # type of a dictionary lookup and subsequent type narrowing. + UNSET_SENTINEL = object() diff --git a/tests/storage/test_events.py b/tests/storage/test_events.py index 93e9eab039..25a380e325 100644 --- a/tests/storage/test_events.py +++ b/tests/storage/test_events.py @@ -20,7 +20,7 @@ # import logging -from typing import List, Optional +from typing import Dict, List, Optional from twisted.internet.testing import MemoryReactor @@ -39,6 +39,77 @@ from tests.unittest import HomeserverTestCase logger = logging.getLogger(__name__) +class EventsTestCase(HomeserverTestCase): + servlets = [ + admin.register_servlets, + room.register_servlets, + login.register_servlets, + ] + + def prepare( + self, reactor: MemoryReactor, clock: Clock, homeserver: HomeServer + ) -> None: + self._store = self.hs.get_datastores().main + + def test_get_senders_for_event_ids(self) -> None: + """Tests the `get_senders_for_event_ids` storage function.""" + + users_and_tokens: Dict[str, str] = {} + for localpart_suffix in range(10): + localpart = f"user_{localpart_suffix}" + user_id = self.register_user(localpart, "rabbit") + token = self.login(localpart, "rabbit") + + users_and_tokens[user_id] = token + + room_creator_user_id = self.register_user("room_creator", "rabbit") + room_creator_token = self.login("room_creator", "rabbit") + users_and_tokens[room_creator_user_id] = room_creator_token + + # Create a room and invite some users. + room_id = self.helper.create_room_as( + room_creator_user_id, tok=room_creator_token + ) + event_ids_to_senders: Dict[str, str] = {} + for user_id, token in users_and_tokens.items(): + if user_id == room_creator_user_id: + continue + + self.helper.invite( + room=room_id, + targ=user_id, + tok=room_creator_token, + ) + + # Have the user accept the invite and join the room. + self.helper.join( + room=room_id, + user=user_id, + tok=token, + ) + + # Have the user send an event. + response = self.helper.send_event( + room_id=room_id, + type="m.room.message", + content={ + "msgtype": "m.text", + "body": f"hello, I'm {user_id}!", + }, + tok=token, + ) + + # Record the event ID and sender. + event_id = response["event_id"] + event_ids_to_senders[event_id] = user_id + + # Check that `get_senders_for_event_ids` returns the correct data. + response = self.get_success( + self._store.get_senders_for_event_ids(list(event_ids_to_senders.keys())) + ) + self.assert_dict(event_ids_to_senders, response) + + class ExtremPruneTestCase(HomeserverTestCase): servlets = [ admin.register_servlets, From daf33e49544cf42376ad6deb4bce16539ff8af53 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 23 Sep 2025 13:28:34 +0100 Subject: [PATCH 45/54] 1.139.0rc1 --- CHANGES.md | 67 +++++++++++++++++++++++++++++++ changelog.d/18583.removal | 1 - changelog.d/18641.bugfix | 1 - changelog.d/18695.feature | 1 - changelog.d/18791.misc | 1 - changelog.d/18819.misc | 1 - changelog.d/18823.bugfix | 1 - changelog.d/18846.feature | 1 - changelog.d/18848.feature | 1 - changelog.d/18856.doc | 1 - changelog.d/18870.misc | 1 - changelog.d/18874.misc | 1 - changelog.d/18875.bugfix | 1 - changelog.d/18878.docker | 1 - changelog.d/18886.misc | 1 - changelog.d/18898.feature | 1 - changelog.d/18899.feature | 1 - changelog.d/18900.misc | 1 - changelog.d/18906.misc | 1 - changelog.d/18907.misc | 1 - changelog.d/18909.bugfix | 1 - changelog.d/18910.misc | 1 - changelog.d/18926.bugfix | 1 - changelog.d/18931.doc | 2 - changelog.d/18933.misc | 1 - changelog.d/18937.misc | 1 - changelog.d/18938.misc | 1 - changelog.d/18944.misc | 1 - changelog.d/18945.misc | 1 - changelog.d/18946.misc | 1 - debian/changelog | 6 +++ pyproject.toml | 2 +- schema/synapse-config.schema.yaml | 2 +- 33 files changed, 75 insertions(+), 32 deletions(-) delete mode 100644 changelog.d/18583.removal delete mode 100644 changelog.d/18641.bugfix delete mode 100644 changelog.d/18695.feature delete mode 100644 changelog.d/18791.misc delete mode 100644 changelog.d/18819.misc delete mode 100644 changelog.d/18823.bugfix delete mode 100644 changelog.d/18846.feature delete mode 100644 changelog.d/18848.feature delete mode 100644 changelog.d/18856.doc delete mode 100644 changelog.d/18870.misc delete mode 100644 changelog.d/18874.misc delete mode 100644 changelog.d/18875.bugfix delete mode 100644 changelog.d/18878.docker delete mode 100644 changelog.d/18886.misc delete mode 100644 changelog.d/18898.feature delete mode 100644 changelog.d/18899.feature delete mode 100644 changelog.d/18900.misc delete mode 100644 changelog.d/18906.misc delete mode 100644 changelog.d/18907.misc delete mode 100644 changelog.d/18909.bugfix delete mode 100644 changelog.d/18910.misc delete mode 100644 changelog.d/18926.bugfix delete mode 100644 changelog.d/18931.doc delete mode 100644 changelog.d/18933.misc delete mode 100644 changelog.d/18937.misc delete mode 100644 changelog.d/18938.misc delete mode 100644 changelog.d/18944.misc delete mode 100644 changelog.d/18945.misc delete mode 100644 changelog.d/18946.misc diff --git a/CHANGES.md b/CHANGES.md index 91fe74c60f..8ed2bb2400 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,70 @@ +# Synapse 1.139.0rc1 (2025-09-23) + +## Features + +- Add experimental support for [MSC4308: Thread Subscriptions extension to Sliding Sync](https://github.com/matrix-org/matrix-spec-proposals/pull/4308) when [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-spec-proposals/pull/4306) and [MSC4186: Simplified Sliding Sync](https://github.com/matrix-org/matrix-spec-proposals/pull/4186) are enabled. ([\#18695](https://github.com/element-hq/synapse/issues/18695)) +- Update push rules for experimental [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-doc/issues/4306) to follow a newer draft. ([\#18846](https://github.com/element-hq/synapse/issues/18846)) +- Add `get_media_upload_limits_for_user` and `on_media_upload_limit_exceeded` module API callbacks to the media repository. ([\#18848](https://github.com/element-hq/synapse/issues/18848)) +- Support [MSC4169](https://github.com/matrix-org/matrix-spec-proposals/pull/4169) for backwards-compatible redaction sending using the `/send` endpoint. Contributed by @SpiritCroc @ Beeper. ([\#18898](https://github.com/element-hq/synapse/issues/18898)) +- Add an in-memory cache to `_get_e2e_cross_signing_signatures_for_devices` to reduce DB load. ([\#18899](https://github.com/element-hq/synapse/issues/18899)) + +## Bugfixes + +- Ensure all PDUs sent via `/send` pass canonical JSON checks. ([\#18641](https://github.com/element-hq/synapse/issues/18641)) +- Fix bug where we did not send invite revocations over federation. ([\#18823](https://github.com/element-hq/synapse/issues/18823)) +- Fix prefixed support for [MSC4133](https://github.com/matrix-org/matrix-spec-proposals/pull/4133). ([\#18875](https://github.com/element-hq/synapse/issues/18875)) +- Fix open redirect in legacy SSO flow with the `idp` query parameter. ([\#18909](https://github.com/element-hq/synapse/issues/18909)) +- Fix a performance regression related to the experimental Delayed Events ([MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140)) feature. ([\#18926](https://github.com/element-hq/synapse/issues/18926)) + +## Updates to the Docker image + +- Suppress "Applying schema" log noise bulk when `SYNAPSE_LOG_TESTING` is set. ([\#18878](https://github.com/element-hq/synapse/issues/18878)) + +## Improved Documentation + +- Clarify Python dependency constraints in our deprecation policy. ([\#18856](https://github.com/element-hq/synapse/issues/18856)) +- Clarify necessary `jwt_config` parameter in OIDC documentation for authentik. Contributed by @maxkratz. ([\#18931](https://github.com/element-hq/synapse/issues/18931)) + +## Deprecations and Removals + +- Remove obsolete and experimental `/sync/e2ee` endpoint. ([\#18583](https://github.com/element-hq/synapse/issues/18583)) + +## Internal Changes + +- Fix `LaterGauge` metrics to collect from all servers. ([\#18791](https://github.com/element-hq/synapse/issues/18791)) +- Configure Synapse to run [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-spec-proposals/pull/4306) Complement tests. ([\#18819](https://github.com/element-hq/synapse/issues/18819)) +- Remove `sentinel` logcontext usage where we log in `setup`, `start` and `exit`. ([\#18870](https://github.com/element-hq/synapse/issues/18870)) +- Use the `Enum`'s value for the dictionary key when responding to an admin request for experimental features. ([\#18874](https://github.com/element-hq/synapse/issues/18874)) +- Start background tasks after we fork the process (daemonize). ([\#18886](https://github.com/element-hq/synapse/issues/18886)) +- Better explain how we manage the logcontext in `run_in_background(...)` and `run_as_background_process(...)`. ([\#18900](https://github.com/element-hq/synapse/issues/18900), [\#18906](https://github.com/element-hq/synapse/issues/18906)) +- Remove `sentinel` logcontext usage in `Clock` utilities like `looping_call` and `call_later`. ([\#18907](https://github.com/element-hq/synapse/issues/18907)) +- Replace usages of the deprecated `pkg_resources` interface in preparation of setuptools dropping it soon. ([\#18910](https://github.com/element-hq/synapse/issues/18910)) +- Split loading config from homeserver `setup`. ([\#18933](https://github.com/element-hq/synapse/issues/18933)) +- Fix `run_in_background` not being awaited properly in some tests causing `LoggingContext` problems. ([\#18937](https://github.com/element-hq/synapse/issues/18937)) +- Fix `run_as_background_process` not being awaited properly causing `LoggingContext` problems in experimental [MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140): Delayed events implementation. ([\#18938](https://github.com/element-hq/synapse/issues/18938)) +- Introduce `Clock.call_when_running(...)` to wrap startup code in a logcontext, ensuring we can identify which server generated the logs. ([\#18944](https://github.com/element-hq/synapse/issues/18944)) +- Introduce `Clock.add_system_event_trigger(...)` to wrap system event callback code in a logcontext, ensuring we can identify which server generated the logs. ([\#18945](https://github.com/element-hq/synapse/issues/18945)) +- Update [MSC4190](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) support to return correct errors and allow appservices to reset cross-signing keys without user-interactive authentication. Contributed by @tulir @ Beeper. ([\#18946](https://github.com/element-hq/synapse/issues/18946)) + + + +### Updates to locked dependencies + +* Bump actions/setup-go from 5.5.0 to 6.0.0. ([\#18891](https://github.com/element-hq/synapse/issues/18891)) +* Bump actions/setup-python from 5.6.0 to 6.0.0. ([\#18890](https://github.com/element-hq/synapse/issues/18890)) +* Bump authlib from 1.6.1 to 1.6.3. ([\#18921](https://github.com/element-hq/synapse/issues/18921)) +* Bump jsonschema from 4.25.0 to 4.25.1. ([\#18897](https://github.com/element-hq/synapse/issues/18897)) +* Bump log from 0.4.27 to 0.4.28. ([\#18892](https://github.com/element-hq/synapse/issues/18892)) +* Bump phonenumbers from 9.0.12 to 9.0.13. ([\#18893](https://github.com/element-hq/synapse/issues/18893)) +* Bump pydantic from 2.11.7 to 2.11.9. ([\#18922](https://github.com/element-hq/synapse/issues/18922)) +* Bump serde from 1.0.219 to 1.0.223. ([\#18920](https://github.com/element-hq/synapse/issues/18920)) +* Bump serde_json from 1.0.143 to 1.0.145. ([\#18919](https://github.com/element-hq/synapse/issues/18919)) +* Bump sigstore/cosign-installer from 3.9.2 to 3.10.0. ([\#18917](https://github.com/element-hq/synapse/issues/18917)) +* Bump towncrier from 24.8.0 to 25.8.0. ([\#18894](https://github.com/element-hq/synapse/issues/18894)) +* Bump types-psycopg2 from 2.9.21.20250809 to 2.9.21.20250915. ([\#18918](https://github.com/element-hq/synapse/issues/18918)) +* Bump types-requests from 2.32.4.20250611 to 2.32.4.20250809. ([\#18895](https://github.com/element-hq/synapse/issues/18895)) +* Bump types-setuptools from 80.9.0.20250809 to 80.9.0.20250822. ([\#18924](https://github.com/element-hq/synapse/issues/18924)) + # Synapse 1.138.0 (2025-09-09) No significant changes since 1.138.0rc1. diff --git a/changelog.d/18583.removal b/changelog.d/18583.removal deleted file mode 100644 index d7baa85147..0000000000 --- a/changelog.d/18583.removal +++ /dev/null @@ -1 +0,0 @@ -Remove obsolete and experimental `/sync/e2ee` endpoint. \ No newline at end of file diff --git a/changelog.d/18641.bugfix b/changelog.d/18641.bugfix deleted file mode 100644 index 8f2a2e3d8b..0000000000 --- a/changelog.d/18641.bugfix +++ /dev/null @@ -1 +0,0 @@ -Ensure all PDUs sent via `/send` pass canonical JSON checks. diff --git a/changelog.d/18695.feature b/changelog.d/18695.feature deleted file mode 100644 index 1481a27f23..0000000000 --- a/changelog.d/18695.feature +++ /dev/null @@ -1 +0,0 @@ -Add experimental support for [MSC4308: Thread Subscriptions extension to Sliding Sync](https://github.com/matrix-org/matrix-spec-proposals/pull/4308) when [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-spec-proposals/pull/4306) and [MSC4186: Simplified Sliding Sync](https://github.com/matrix-org/matrix-spec-proposals/pull/4186) are enabled. \ No newline at end of file diff --git a/changelog.d/18791.misc b/changelog.d/18791.misc deleted file mode 100644 index 6ecd498286..0000000000 --- a/changelog.d/18791.misc +++ /dev/null @@ -1 +0,0 @@ -Fix `LaterGauge` metrics to collect from all servers. diff --git a/changelog.d/18819.misc b/changelog.d/18819.misc deleted file mode 100644 index c76e050e79..0000000000 --- a/changelog.d/18819.misc +++ /dev/null @@ -1 +0,0 @@ -Configure Synapse to run MSC4306: Thread Subscriptions Complement tests. \ No newline at end of file diff --git a/changelog.d/18823.bugfix b/changelog.d/18823.bugfix deleted file mode 100644 index 473c865aa4..0000000000 --- a/changelog.d/18823.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug where we did not send invite revocations over federation. diff --git a/changelog.d/18846.feature b/changelog.d/18846.feature deleted file mode 100644 index 4a873d4446..0000000000 --- a/changelog.d/18846.feature +++ /dev/null @@ -1 +0,0 @@ -Update push rules for experimental [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-doc/issues/4306) to follow newer draft. \ No newline at end of file diff --git a/changelog.d/18848.feature b/changelog.d/18848.feature deleted file mode 100644 index 302a6e7b66..0000000000 --- a/changelog.d/18848.feature +++ /dev/null @@ -1 +0,0 @@ -Add `get_media_upload_limits_for_user` and `on_media_upload_limit_exceeded` module API callbacks for media repository. diff --git a/changelog.d/18856.doc b/changelog.d/18856.doc deleted file mode 100644 index 0e5e55377f..0000000000 --- a/changelog.d/18856.doc +++ /dev/null @@ -1 +0,0 @@ -Clarify Python dependency constraints in our deprecation policy. diff --git a/changelog.d/18870.misc b/changelog.d/18870.misc deleted file mode 100644 index e54ba4f37a..0000000000 --- a/changelog.d/18870.misc +++ /dev/null @@ -1 +0,0 @@ -Remove `sentinel` logcontext usage where we log in `setup`, `start` and exit. diff --git a/changelog.d/18874.misc b/changelog.d/18874.misc deleted file mode 100644 index 729befb5e8..0000000000 --- a/changelog.d/18874.misc +++ /dev/null @@ -1 +0,0 @@ -Use the `Enum`'s value for the dictionary key when responding to an admin request for experimental features. diff --git a/changelog.d/18875.bugfix b/changelog.d/18875.bugfix deleted file mode 100644 index 3bda7a1d18..0000000000 --- a/changelog.d/18875.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix prefixed support for MSC4133. diff --git a/changelog.d/18878.docker b/changelog.d/18878.docker deleted file mode 100644 index cf74f67cc8..0000000000 --- a/changelog.d/18878.docker +++ /dev/null @@ -1 +0,0 @@ -Suppress "Applying schema" log noise bulk when `SYNAPSE_LOG_TESTING` is set. diff --git a/changelog.d/18886.misc b/changelog.d/18886.misc deleted file mode 100644 index d0d32e59ab..0000000000 --- a/changelog.d/18886.misc +++ /dev/null @@ -1 +0,0 @@ -Start background tasks after we fork the process (daemonize). diff --git a/changelog.d/18898.feature b/changelog.d/18898.feature deleted file mode 100644 index bf31dd55d1..0000000000 --- a/changelog.d/18898.feature +++ /dev/null @@ -1 +0,0 @@ -Support [MSC4169](https://github.com/matrix-org/matrix-spec-proposals/pull/4169) for backwards-compatible redaction sending using the `/send` endpoint. Contributed by @SpiritCroc @ Beeper. diff --git a/changelog.d/18899.feature b/changelog.d/18899.feature deleted file mode 100644 index ee7141efc5..0000000000 --- a/changelog.d/18899.feature +++ /dev/null @@ -1 +0,0 @@ -Add an in-memory cache to `_get_e2e_cross_signing_signatures_for_devices` to reduce DB load. \ No newline at end of file diff --git a/changelog.d/18900.misc b/changelog.d/18900.misc deleted file mode 100644 index d7d8b47eb0..0000000000 --- a/changelog.d/18900.misc +++ /dev/null @@ -1 +0,0 @@ -Better explain how we manage the logcontext in `run_in_background(...)` and `run_as_background_process(...)`. diff --git a/changelog.d/18906.misc b/changelog.d/18906.misc deleted file mode 100644 index d7d8b47eb0..0000000000 --- a/changelog.d/18906.misc +++ /dev/null @@ -1 +0,0 @@ -Better explain how we manage the logcontext in `run_in_background(...)` and `run_as_background_process(...)`. diff --git a/changelog.d/18907.misc b/changelog.d/18907.misc deleted file mode 100644 index 4fca9ec8fb..0000000000 --- a/changelog.d/18907.misc +++ /dev/null @@ -1 +0,0 @@ -Remove `sentinel` logcontext usage in `Clock` utilities like `looping_call` and `call_later`. diff --git a/changelog.d/18909.bugfix b/changelog.d/18909.bugfix deleted file mode 100644 index 10d17631f0..0000000000 --- a/changelog.d/18909.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix open redirect in legacy SSO flow with the `idp` query parameter. diff --git a/changelog.d/18910.misc b/changelog.d/18910.misc deleted file mode 100644 index d5bd3ef314..0000000000 --- a/changelog.d/18910.misc +++ /dev/null @@ -1 +0,0 @@ -Replace usages of the deprecated `pkg_resources` interface in preparation of setuptools dropping it soon. \ No newline at end of file diff --git a/changelog.d/18926.bugfix b/changelog.d/18926.bugfix deleted file mode 100644 index c450313764..0000000000 --- a/changelog.d/18926.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix a performance regression related to the experimental Delayed Events ([MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140)) feature. \ No newline at end of file diff --git a/changelog.d/18931.doc b/changelog.d/18931.doc deleted file mode 100644 index 8a2dcb8654..0000000000 --- a/changelog.d/18931.doc +++ /dev/null @@ -1,2 +0,0 @@ -Clarify necessary `jwt_config` parameter in OIDC documentation for authentik. -Contributed by @maxkratz. diff --git a/changelog.d/18933.misc b/changelog.d/18933.misc deleted file mode 100644 index 3308d367e7..0000000000 --- a/changelog.d/18933.misc +++ /dev/null @@ -1 +0,0 @@ -Split loading config from homeserver `setup`. diff --git a/changelog.d/18937.misc b/changelog.d/18937.misc deleted file mode 100644 index ab1e9c4d47..0000000000 --- a/changelog.d/18937.misc +++ /dev/null @@ -1 +0,0 @@ -Fix `run_in_background` not be awaited properly in some tests causing `LoggingContext` problems. diff --git a/changelog.d/18938.misc b/changelog.d/18938.misc deleted file mode 100644 index ab31cb7299..0000000000 --- a/changelog.d/18938.misc +++ /dev/null @@ -1 +0,0 @@ -Fix `run_as_background_process` not be awaited properly causing `LoggingContext` problems in experimental [MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140): Delayed events implementation. diff --git a/changelog.d/18944.misc b/changelog.d/18944.misc deleted file mode 100644 index 9561aa9275..0000000000 --- a/changelog.d/18944.misc +++ /dev/null @@ -1 +0,0 @@ -Introduce `Clock.call_when_running(...)` to wrap startup code in a logcontext, ensuring we can identify which server generated the logs. diff --git a/changelog.d/18945.misc b/changelog.d/18945.misc deleted file mode 100644 index e49077c8f2..0000000000 --- a/changelog.d/18945.misc +++ /dev/null @@ -1 +0,0 @@ -Introduce `Clock.add_system_event_trigger(...)` to wrap system event callback code in a logcontext, ensuring we can identify which server generated the logs. diff --git a/changelog.d/18946.misc b/changelog.d/18946.misc deleted file mode 100644 index 53c246a638..0000000000 --- a/changelog.d/18946.misc +++ /dev/null @@ -1 +0,0 @@ -Update [MSC4190](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) support to return correct errors and allow appservices to reset cross-signing keys without user-interactive authentication. Contributed by @tulir @ Beeper. diff --git a/debian/changelog b/debian/changelog index 035d06ad2b..f047143923 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.139.0~rc1) stable; urgency=medium + + * New Synapse release 1.139.0rc1. + + -- Synapse Packaging team Tue, 23 Sep 2025 13:24:50 +0100 + matrix-synapse-py3 (1.138.0) stable; urgency=medium * New Synapse release 1.138.0. diff --git a/pyproject.toml b/pyproject.toml index f9dd0ca26b..68824812ba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -101,7 +101,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.138.0" +version = "1.139.0rc1" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" diff --git a/schema/synapse-config.schema.yaml b/schema/synapse-config.schema.yaml index fdce4219ae..2a7f94a700 100644 --- a/schema/synapse-config.schema.yaml +++ b/schema/synapse-config.schema.yaml @@ -1,5 +1,5 @@ $schema: https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json -$id: https://element-hq.github.io/synapse/schema/synapse/v1.138/synapse-config.schema.json +$id: https://element-hq.github.io/synapse/schema/synapse/v1.139/synapse-config.schema.json type: object properties: modules: From d308469e90f6a9528c052d678b30c2b63939dbeb Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 23 Sep 2025 14:28:38 +0100 Subject: [PATCH 46/54] Update changelog to move MSC4190 entry to Features --- CHANGES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 8ed2bb2400..26cb661742 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -7,6 +7,7 @@ - Add `get_media_upload_limits_for_user` and `on_media_upload_limit_exceeded` module API callbacks to the media repository. ([\#18848](https://github.com/element-hq/synapse/issues/18848)) - Support [MSC4169](https://github.com/matrix-org/matrix-spec-proposals/pull/4169) for backwards-compatible redaction sending using the `/send` endpoint. Contributed by @SpiritCroc @ Beeper. ([\#18898](https://github.com/element-hq/synapse/issues/18898)) - Add an in-memory cache to `_get_e2e_cross_signing_signatures_for_devices` to reduce DB load. ([\#18899](https://github.com/element-hq/synapse/issues/18899)) +- Update [MSC4190](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) support to return correct errors and allow appservices to reset cross-signing keys without user-interactive authentication. Contributed by @tulir @ Beeper. ([\#18946](https://github.com/element-hq/synapse/issues/18946)) ## Bugfixes @@ -44,7 +45,6 @@ - Fix `run_as_background_process` not being awaited properly causing `LoggingContext` problems in experimental [MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140): Delayed events implementation. ([\#18938](https://github.com/element-hq/synapse/issues/18938)) - Introduce `Clock.call_when_running(...)` to wrap startup code in a logcontext, ensuring we can identify which server generated the logs. ([\#18944](https://github.com/element-hq/synapse/issues/18944)) - Introduce `Clock.add_system_event_trigger(...)` to wrap system event callback code in a logcontext, ensuring we can identify which server generated the logs. ([\#18945](https://github.com/element-hq/synapse/issues/18945)) -- Update [MSC4190](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) support to return correct errors and allow appservices to reset cross-signing keys without user-interactive authentication. Contributed by @tulir @ Beeper. ([\#18946](https://github.com/element-hq/synapse/issues/18946)) From fd29e3219c932d5cca6b0443b306144335475c37 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Tue, 23 Sep 2025 15:28:40 +0100 Subject: [PATCH 47/54] Drop support for Ubuntu 24.10 'Oracular Oriole', add support for Ubuntu 25.04 'Plucky Puffin' (#18962) --- changelog.d/18962.misc | 1 + scripts-dev/build_debian_packages.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/18962.misc diff --git a/changelog.d/18962.misc b/changelog.d/18962.misc new file mode 100644 index 0000000000..22f762a37f --- /dev/null +++ b/changelog.d/18962.misc @@ -0,0 +1 @@ +Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin. \ No newline at end of file diff --git a/scripts-dev/build_debian_packages.py b/scripts-dev/build_debian_packages.py index 6ee695b2ba..6150dc15a3 100755 --- a/scripts-dev/build_debian_packages.py +++ b/scripts-dev/build_debian_packages.py @@ -32,7 +32,7 @@ DISTS = ( "debian:sid", # (rolling distro, no EOL) "ubuntu:jammy", # 22.04 LTS (EOL 2027-04) (our EOL forced by Python 3.10 is 2026-10-04) "ubuntu:noble", # 24.04 LTS (EOL 2029-06) - "ubuntu:oracular", # 24.10 (EOL 2025-07) + "ubuntu:plucky", # 25.04 (EOL 2026-01) "debian:trixie", # (EOL not specified yet) ) From b10f3f595922e5d21da46f5501a121a85bd67ee4 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 23 Sep 2025 15:31:49 +0100 Subject: [PATCH 48/54] 1.139.0rc2 --- CHANGES.md | 9 +++++++++ changelog.d/18962.misc | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- 4 files changed, 16 insertions(+), 2 deletions(-) delete mode 100644 changelog.d/18962.misc diff --git a/CHANGES.md b/CHANGES.md index 26cb661742..5936e777ed 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,12 @@ +# Synapse 1.139.0rc2 (2025-09-23) + +## Internal Changes + +- Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin. ([\#18962](https://github.com/element-hq/synapse/issues/18962)) + + + + # Synapse 1.139.0rc1 (2025-09-23) ## Features diff --git a/changelog.d/18962.misc b/changelog.d/18962.misc deleted file mode 100644 index 22f762a37f..0000000000 --- a/changelog.d/18962.misc +++ /dev/null @@ -1 +0,0 @@ -Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin. \ No newline at end of file diff --git a/debian/changelog b/debian/changelog index f047143923..c378473551 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.139.0~rc2) stable; urgency=medium + + * New Synapse release 1.139.0rc2. + + -- Synapse Packaging team Tue, 23 Sep 2025 15:31:42 +0100 + matrix-synapse-py3 (1.139.0~rc1) stable; urgency=medium * New Synapse release 1.139.0rc1. diff --git a/pyproject.toml b/pyproject.toml index 68824812ba..846bf78e08 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -101,7 +101,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.139.0rc1" +version = "1.139.0rc2" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" From 5857d2de59ffb80e042627319513a94c08b8d4c3 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 23 Sep 2025 15:34:26 +0100 Subject: [PATCH 49/54] Note ubuntu release support update in the upgrade notes --- docs/upgrade.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/upgrade.md b/docs/upgrade.md index 082d204b58..9855ee99c7 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -117,6 +117,14 @@ each upgrade are complete before moving on to the next upgrade, to avoid stacking them up. You can monitor the currently running background updates with [the Admin API](usage/administration/admin_api/background_updates.html#status). +# Upgrading to v1.139.0 + +## Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin + +Ubuntu 24.10 Oracular Oriole [has been end-of-life since 10 Jul +2025](https://endoflife.date/ubuntu). This release drops support for Ubuntu +24.10, and in its place adds support for Ubuntu 25.04 Plucky Puffin. + # Upgrading to v1.136.0 ## Deprecate `run_as_background_process` exported as part of the module API interface in favor of `ModuleApi.run_as_background_process` From 9c4ba13a107f811e6b8da0c49bd88bd6c2665de0 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 23 Sep 2025 16:27:38 +0100 Subject: [PATCH 50/54] Add entry to v1.139.0 upgrade notes about appservices and /register requests --- docs/upgrade.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/docs/upgrade.md b/docs/upgrade.md index 9855ee99c7..d4f37227dd 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -125,6 +125,21 @@ Ubuntu 24.10 Oracular Oriole [has been end-of-life since 10 Jul 2025](https://endoflife.date/ubuntu). This release drops support for Ubuntu 24.10, and in its place adds support for Ubuntu 25.04 Plucky Puffin. +## `/register` requests from old application service implementations may break when using MAS + +Application Services that do not set `inhibit_login=true` when calling `POST +/_matrix/client/v3/register` will receive the error +`IO.ELEMENT.MSC4190.M_APPSERVICE_LOGIN_UNSUPPORTED` in response. This is a +result of [MSC4190: Device management for application +services](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) which +adds new endpoints for application services to create encryption-ready devices +with other than `/login` or `/register` without `inhibit_login=true`. + +If an application service you use starts to fail with the mentioned error, +ensure it is up to date. If it is, then kindly let the author know that they +need to update their implementation to call `/register` with +`inhibit_login=true`. + # Upgrading to v1.136.0 ## Deprecate `run_as_background_process` exported as part of the module API interface in favor of `ModuleApi.run_as_background_process` From acb9ec3c38e2c9af1d63dd097b8190baafce84dd Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Wed, 24 Sep 2025 10:24:47 -0500 Subject: [PATCH 51/54] Fix `run_coroutine_in_background(...)` incorrectly handling logcontext (#18964) Regressed in https://github.com/element-hq/synapse/pull/18900#discussion_r2331554278 (see conversation there for more context) ### How is this a regression? > To give this an update with more hindsight; this logic *was* redundant with the early return and it is safe to remove this complexity :white_check_mark: > > It seems like this actually has to do with completed vs incomplete deferreds... > > To explain how things previously worked *without* the early-return shortcut: > > With the normal case of **incomplete awaitable**, we store the `calling_context` and the `f` function is called and runs until it yields to the reactor. Because `f` follows the logcontext rules, it sets the `sentinel` logcontext. Then in `run_in_background(...)`, we restore the `calling_context`, store the current `ctx` (which is `sentinel`) and return. When the deferred completes, we restore `ctx` (which is `sentinel`) before yielding to the reactor again (all good :white_check_mark:) > > With the other case where we see a **completed awaitable**, we store the `calling_context` and the `f` function is called and runs to completion (no logcontext change). *This is where the shortcut would kick in but I'm going to continue explaining as if we commented out the shortcut.* -- Then in `run_in_background(...)`, we restore the `calling_context`, store the current `ctx` (which is same as the `calling_context`). Because the deferred is already completed, our extra callback is called immediately and we restore `ctx` (which is same as the `calling_context`). Since we never yield to the reactor, the `calling_context` is perfect as that's what we want again (all good :white_check_mark:) > > --- > > But this also means that our early-return shortcut is no longer just an optimization and is *necessary* to act correctly in the **completed awaitable** case as we want to return with the `calling_context` and not reset to the `sentinel` context. I've updated the comment in https://github.com/element-hq/synapse/pull/18964 to explain the necessity as it's currently just described as an optimization. > > But because we made the same change to `run_coroutine_in_background(...)` which didn't have the same early-return shortcut, we regressed the correct behavior :x: . This is being fixed in https://github.com/element-hq/synapse/pull/18964 > > > *-- @MadLittleMods, https://github.com/element-hq/synapse/pull/18900#discussion_r2373582917* ### How did we find this problem? Spawning from @wrjlewis [seeing](https://matrix.to/#/!SGNQGPGUwtcPBUotTL:matrix.org/$h3TxxPVlqC6BTL07dbrsz6PmaUoZxLiXnSTEY-QYDtA?via=jki.re&via=matrix.org&via=element.io) `Starting metrics collection 'typing.get_new_events' from sentinel context: metrics will be lost` in the logs:
More logs ``` synapse.http.request_metrics - 222 - ERROR - sentinel - Trying to stop RequestMetrics in the sentinel context. 2025-09-23 14:43:19,712 - synapse.util.metrics - 212 - WARNING - sentinel - Starting metrics collection 'typing.get_new_events' from sentinel context: metrics will be lost 2025-09-23 14:43:19,713 - synapse.rest.client.sync - 851 - INFO - sentinel - Client has disconnected; not serializing response. 2025-09-23 14:43:19,713 - synapse.http.server - 825 - WARNING - sentinel - Not sending response to request , already dis connected. 2025-09-23 14:43:19,713 - synapse.access.http.8008 - 515 - INFO - sentinel - 92.40.194.87 - 8008 - {@me:wi11.co.uk} Processed request: 30.005sec/-8.041sec (0.001sec, 0.000sec) (0.000sec/0.002sec/2) 0B 200! "POST /_matrix/client/unstable/org.matrix.simplified_msc3575/ ```
From the logs there, we can see things relating to `typing.get_new_events` and `/_matrix/client/unstable/org.matrix.simplified_msc3575/sync` which led me to trying out Sliding Sync with the typing extension enabled and allowed me to reproduce the problem locally. Sliding Sync is a unique scenario as it's the only place we use `gather_optional_coroutines(...)` -> `run_coroutine_in_background(...)` (introduced in https://github.com/element-hq/synapse/pull/17884) to exhibit this behavior. ### Testing strategy 1. Configure Synapse to enable [MSC4186](https://github.com/matrix-org/matrix-spec-proposals/pull/4186): Simplified Sliding Sync which is actually under [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) ```yaml experimental_features: msc3575_enabled: true ``` 1. Start synapse: `poetry run synapse_homeserver --config-path homeserver.yaml` 1. Make a Sliding Sync request with one of the extensions enabled ```http POST http://localhost:8008/_matrix/client/unstable/org.matrix.simplified_msc3575/sync { "lists": {}, "room_subscriptions": { "!FlgJYGQKAIvAscfBhq:my.synapse.linux.server": { "required_state": [], "timeline_limit": 1 } }, "extensions": { "typing": { "enabled": true } } } ``` 1. Open your homeserver logs and notice warnings about `Starting ... from sentinel context: metrics will be lost` --- changelog.d/18964.misc | 1 + synapse/logging/context.py | 83 +++++++-------- tests/util/test_logcontext.py | 184 ++++++++++++++++++++++++++-------- 3 files changed, 181 insertions(+), 87 deletions(-) create mode 100644 changelog.d/18964.misc diff --git a/changelog.d/18964.misc b/changelog.d/18964.misc new file mode 100644 index 0000000000..69be53ad27 --- /dev/null +++ b/changelog.d/18964.misc @@ -0,0 +1 @@ +Fix `run_coroutine_in_background(...)` incorrectly handling logcontext. diff --git a/synapse/logging/context.py b/synapse/logging/context.py index aa4b98e7c7..b5b434f3a8 100644 --- a/synapse/logging/context.py +++ b/synapse/logging/context.py @@ -802,13 +802,15 @@ def run_in_background( deferred returned by the function completes. To explain how the log contexts work here: - - When `run_in_background` is called, the current context is stored ("original"), - we kick off the background task in the current context, and we restore that - original context before returning - - When the background task finishes, we don't want to leak our context into the - reactor which would erroneously get attached to the next operation picked up by - the event loop. We add a callback to the deferred which will clear the logging - context after it finishes and yields control back to the reactor. + - When `run_in_background` is called, the calling logcontext is stored + ("original"), we kick off the background task in the current context, and we + restore that original context before returning. + - For a completed deferred, that's the end of the story. + - For an incomplete deferred, when the background task finishes, we don't want to + leak our context into the reactor which would erroneously get attached to the + next operation picked up by the event loop. We add a callback to the deferred + which will clear the logging context after it finishes and yields control back to + the reactor. Useful for wrapping functions that return a deferred or coroutine, which you don't yield or await on (for instance because you want to pass it to @@ -857,22 +859,36 @@ def run_in_background( # The deferred has already completed if d.called and not d.paused: - # The function should have maintained the logcontext, so we can - # optimise out the messing about + # If the function messes with logcontexts, we can assume it follows the Synapse + # logcontext rules (Rules for functions returning awaitables: "If the awaitable + # is already complete, the function returns with the same logcontext it started + # with."). If it function doesn't touch logcontexts at all, we can also assume + # the logcontext is unchanged. + # + # Either way, the function should have maintained the calling logcontext, so we + # can avoid messing with it further. Additionally, if the deferred has already + # completed, then it would be a mistake to then add a deferred callback (below) + # to reset the logcontext to the sentinel logcontext as that would run + # immediately (remember our goal is to maintain the calling logcontext when we + # return). return d - # The function may have reset the context before returning, so we need to restore it - # now. + # Since the function we called may follow the Synapse logcontext rules (Rules for + # functions returning awaitables: "If the awaitable is incomplete, the function + # clears the logcontext before returning"), the function may have reset the + # logcontext before returning, so we need to restore the calling logcontext now + # before we return ourselves. # # Our goal is to have the caller logcontext unchanged after firing off the # background task and returning. set_current_context(calling_context) - # The original logcontext will be restored when the deferred completes, but - # there is nothing waiting for it, so it will get leaked into the reactor (which - # would then get picked up by the next thing the reactor does). We therefore - # need to reset the logcontext here (set the `sentinel` logcontext) before - # yielding control back to the reactor. + # If the function we called is playing nice and following the Synapse logcontext + # rules, it will restore original calling logcontext when the deferred completes; + # but there is nothing waiting for it, so it will get leaked into the reactor (which + # would then get picked up by the next thing the reactor does). We therefore need to + # reset the logcontext here (set the `sentinel` logcontext) before yielding control + # back to the reactor. # # (If this feels asymmetric, consider it this way: we are # effectively forking a new thread of execution. We are @@ -894,10 +910,9 @@ def run_coroutine_in_background( Useful for wrapping coroutines that you don't yield or await on (for instance because you want to pass it to deferred.gatherResults()). - This is a special case of `run_in_background` where we can accept a - coroutine directly rather than a function. We can do this because coroutines - do not run until called, and so calling an async function without awaiting - cannot change the log contexts. + This is a special case of `run_in_background` where we can accept a coroutine + directly rather than a function. We can do this because coroutines do not continue + running once they have yielded. This is an ergonomic helper so we can do this: ```python @@ -908,33 +923,7 @@ def run_coroutine_in_background( run_in_background(lambda: func1(arg1)) ``` """ - calling_context = current_context() - - # Wrap the coroutine in a deferred, which will have the side effect of executing the - # coroutine in the background. - d = defer.ensureDeferred(coroutine) - - # The function may have reset the context before returning, so we need to restore it - # now. - # - # Our goal is to have the caller logcontext unchanged after firing off the - # background task and returning. - set_current_context(calling_context) - - # The original logcontext will be restored when the deferred completes, but - # there is nothing waiting for it, so it will get leaked into the reactor (which - # would then get picked up by the next thing the reactor does). We therefore - # need to reset the logcontext here (set the `sentinel` logcontext) before - # yielding control back to the reactor. - # - # (If this feels asymmetric, consider it this way: we are - # effectively forking a new thread of execution. We are - # probably currently within a ``with LoggingContext()`` block, - # which is supposed to have a single entry and exit point. But - # by spawning off another deferred, we are effectively - # adding a new exit point.) - d.addBoth(_set_context_cb, SENTINEL_CONTEXT) - return d + return run_in_background(lambda: coroutine) T = TypeVar("T") diff --git a/tests/util/test_logcontext.py b/tests/util/test_logcontext.py index 0ecf712bab..651dd84483 100644 --- a/tests/util/test_logcontext.py +++ b/tests/util/test_logcontext.py @@ -22,7 +22,6 @@ import logging from typing import Callable, Generator, cast -import twisted.python.failure from twisted.internet import defer, reactor as _reactor from synapse.logging.context import ( @@ -33,6 +32,7 @@ from synapse.logging.context import ( current_context, make_deferred_yieldable, nested_logging_context, + run_coroutine_in_background, run_in_background, ) from synapse.types import ISynapseReactor @@ -249,73 +249,80 @@ class LoggingContextTestCase(unittest.TestCase): # Back to the sentinel context self._check_test_key("sentinel") - def _test_run_in_background(self, function: Callable[[], object]) -> defer.Deferred: - sentinel_context = current_context() + async def _test_run_in_background(self, function: Callable[[], object]) -> None: + clock = Clock(reactor) - callback_completed = False + # Sanity check that we start in the sentinel context + self._check_test_key("sentinel") + + callback_finished = False with LoggingContext("foo"): - # fire off function, but don't wait on it. - d2 = run_in_background(function) + # Fire off the function, but don't wait on it. + deferred = run_in_background(function) + self._check_test_key("foo") - def cb(res: object) -> object: - nonlocal callback_completed - callback_completed = True - return res + def callback(result: object) -> object: + nonlocal callback_finished + callback_finished = True + # Pass through the result + return result - d2.addCallback(cb) + # We `addBoth` because when exceptions happen, we still want to mark the + # callback as finished so that the test can complete and we see the + # underlying error. + deferred.addBoth(callback) self._check_test_key("foo") - # now wait for the function under test to have run, and check that - # the logcontext is left in a sane state. - d2 = defer.Deferred() + # Now wait for the function under test to have run, and check that + # the logcontext is left in a sane state. + while not callback_finished: + await clock.sleep(0) + self._check_test_key("foo") - def check_logcontext() -> None: - if not callback_completed: - reactor.callLater(0.01, check_logcontext) - return + self.assertTrue( + callback_finished, + "Callback never finished which means the test probably didn't wait long enough", + ) - # make sure that the context was reset before it got thrown back - # into the reactor - try: - self.assertIs(current_context(), sentinel_context) - d2.callback(None) - except BaseException: - d2.errback(twisted.python.failure.Failure()) - - reactor.callLater(0.01, check_logcontext) - - # test is done once d2 finishes - return d2 + # Back to the sentinel context + self._check_test_key("sentinel") @logcontext_clean - def test_run_in_background_with_blocking_fn(self) -> defer.Deferred: + async def test_run_in_background_with_blocking_fn(self) -> None: async def blocking_function() -> None: await Clock(reactor).sleep(0) - return self._test_run_in_background(blocking_function) + await self._test_run_in_background(blocking_function) @logcontext_clean - def test_run_in_background_with_non_blocking_fn(self) -> defer.Deferred: + async def test_run_in_background_with_non_blocking_fn(self) -> None: @defer.inlineCallbacks def nonblocking_function() -> Generator["defer.Deferred[object]", object, None]: with PreserveLoggingContext(): yield defer.succeed(None) - return self._test_run_in_background(nonblocking_function) + await self._test_run_in_background(nonblocking_function) @logcontext_clean - def test_run_in_background_with_chained_deferred(self) -> defer.Deferred: + async def test_run_in_background_with_chained_deferred(self) -> None: # a function which returns a deferred which looks like it has been # called, but is actually paused def testfunc() -> defer.Deferred: return make_deferred_yieldable(_chained_deferred_function()) - return self._test_run_in_background(testfunc) + await self._test_run_in_background(testfunc) @logcontext_clean - def test_run_in_background_with_coroutine(self) -> defer.Deferred: + async def test_run_in_background_with_coroutine(self) -> None: + """ + Test `run_in_background` with a coroutine that yields control back to the + reactor. + + This will stress the logic around incomplete deferreds in `run_in_background`. + """ + async def testfunc() -> None: self._check_test_key("foo") d = defer.ensureDeferred(Clock(reactor).sleep(0)) @@ -323,14 +330,111 @@ class LoggingContextTestCase(unittest.TestCase): await d self._check_test_key("foo") - return self._test_run_in_background(testfunc) + await self._test_run_in_background(testfunc) @logcontext_clean - def test_run_in_background_with_nonblocking_coroutine(self) -> defer.Deferred: + async def test_run_in_background_with_nonblocking_coroutine(self) -> None: + """ + Test `run_in_background` with a "nonblocking" coroutine (never yields control + back to the reactor). + + This will stress the logic around completed deferreds in `run_in_background`. + """ + async def testfunc() -> None: self._check_test_key("foo") - return self._test_run_in_background(testfunc) + await self._test_run_in_background(testfunc) + + @logcontext_clean + async def test_run_coroutine_in_background(self) -> None: + """ + Test `run_coroutine_in_background` with a coroutine that yields control back to the + reactor. + + This will stress the logic around incomplete deferreds in `run_coroutine_in_background`. + """ + clock = Clock(reactor) + + # Sanity check that we start in the sentinel context + self._check_test_key("sentinel") + + callback_finished = False + + async def competing_callback() -> None: + nonlocal callback_finished + try: + # The callback should have the same logcontext as the caller + self._check_test_key("foo") + + with LoggingContext("competing"): + await clock.sleep(0) + self._check_test_key("competing") + + self._check_test_key("foo") + finally: + # When exceptions happen, we still want to mark the callback as finished + # so that the test can complete and we see the underlying error. + callback_finished = True + + with LoggingContext("foo"): + run_coroutine_in_background(competing_callback()) + self._check_test_key("foo") + await clock.sleep(0) + self._check_test_key("foo") + + self.assertTrue( + callback_finished, + "Callback never finished which means the test probably didn't wait long enough", + ) + + # Back to the sentinel context + self._check_test_key("sentinel") + + @logcontext_clean + async def test_run_coroutine_in_background_with_nonblocking_coroutine(self) -> None: + """ + Test `run_coroutine_in_background` with a "nonblocking" coroutine (never yields control + back to the reactor). + + This will stress the logic around completed deferreds in `run_coroutine_in_background`. + """ + # Sanity check that we start in the sentinel context + self._check_test_key("sentinel") + + callback_finished = False + + async def competing_callback() -> None: + nonlocal callback_finished + try: + # The callback should have the same logcontext as the caller + self._check_test_key("foo") + + with LoggingContext("competing"): + # We `await` here but there is nothing to wait for here since the + # deferred is already complete so we should immediately continue + # executing in the same context. + await defer.succeed(None) + + self._check_test_key("competing") + + self._check_test_key("foo") + finally: + # When exceptions happen, we still want to mark the callback as finished + # so that the test can complete and we see the underlying error. + callback_finished = True + + with LoggingContext("foo"): + run_coroutine_in_background(competing_callback()) + self._check_test_key("foo") + + self.assertTrue( + callback_finished, + "Callback never finished which means the test probably didn't wait long enough", + ) + + # Back to the sentinel context + self._check_test_key("sentinel") @logcontext_clean @defer.inlineCallbacks From e2ec3b7d0dbbc53609feb82ac8907982ebdd6876 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Thu, 25 Sep 2025 12:14:20 +0100 Subject: [PATCH 52/54] 1.139.0rc3 --- CHANGES.md | 9 +++++++++ changelog.d/18964.misc | 1 - debian/changelog | 6 ++++++ pyproject.toml | 2 +- 4 files changed, 16 insertions(+), 2 deletions(-) delete mode 100644 changelog.d/18964.misc diff --git a/CHANGES.md b/CHANGES.md index 5936e777ed..d9a95f8e72 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,12 @@ +# Synapse 1.139.0rc3 (2025-09-25) + +## Bugfixes + +- Fix a bug introduced in 1.139.0rc1 where `run_coroutine_in_background(...)` incorrectly handled logcontexts, resulting in partially broken logging. ([\#18964](https://github.com/element-hq/synapse/issues/18964)) + + + + # Synapse 1.139.0rc2 (2025-09-23) ## Internal Changes diff --git a/changelog.d/18964.misc b/changelog.d/18964.misc deleted file mode 100644 index 69be53ad27..0000000000 --- a/changelog.d/18964.misc +++ /dev/null @@ -1 +0,0 @@ -Fix `run_coroutine_in_background(...)` incorrectly handling logcontext. diff --git a/debian/changelog b/debian/changelog index c378473551..8f8877638a 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.139.0~rc3) stable; urgency=medium + + * New Synapse release 1.139.0rc3. + + -- Synapse Packaging team Thu, 25 Sep 2025 12:13:23 +0100 + matrix-synapse-py3 (1.139.0~rc2) stable; urgency=medium * New Synapse release 1.139.0rc2. diff --git a/pyproject.toml b/pyproject.toml index 846bf78e08..7f58386087 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -101,7 +101,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.139.0rc2" +version = "1.139.0rc3" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" From 72020f3f2c1890e6b262001bcdd6f642b729b9a9 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 30 Sep 2025 11:58:59 +0100 Subject: [PATCH 53/54] 1.139.0 --- CHANGES.md | 7 +++++++ debian/changelog | 6 ++++++ pyproject.toml | 2 +- 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index d9a95f8e72..9984efc99e 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,10 @@ +# Synapse 1.139.0 (2025-09-30) + +No significant changes since 1.139.0rc3. + + + + # Synapse 1.139.0rc3 (2025-09-25) ## Bugfixes diff --git a/debian/changelog b/debian/changelog index 8f8877638a..f3a2314dca 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.139.0) stable; urgency=medium + + * New Synapse release 1.139.0. + + -- Synapse Packaging team Tue, 30 Sep 2025 11:58:55 +0100 + matrix-synapse-py3 (1.139.0~rc3) stable; urgency=medium * New Synapse release 1.139.0rc3. diff --git a/pyproject.toml b/pyproject.toml index 7f58386087..0f886a6b6a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -101,7 +101,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.139.0rc3" +version = "1.139.0" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" From 0aeb95fb07066636362bf109e8da98969e6667a3 Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Tue, 30 Sep 2025 12:05:28 +0100 Subject: [PATCH 54/54] Add MAS note to 1.139.0 changelog --- CHANGES.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/CHANGES.md b/CHANGES.md index 9984efc99e..e8b04c419c 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,5 +1,15 @@ # Synapse 1.139.0 (2025-09-30) +### `/register` requests from old application service implementations may break when using MAS + +If you are using Matrix Authentication Service (MAS), as of this release any +Application Services that do not set `inhibit_login=true` when calling `POST +/_matrix/client/v3/register` will receive the error +`IO.ELEMENT.MSC4190.M_APPSERVICE_LOGIN_UNSUPPORTED` in response. Please see [the +upgrade +notes](https://element-hq.github.io/synapse/develop/upgrade.html#register-requests-from-old-application-service-implementations-may-break-when-using-mas) +for more information. + No significant changes since 1.139.0rc3.