From 0bcb651b3f2b0b6e33649e118f859fc72bc15659 Mon Sep 17 00:00:00 2001 From: Patrick Cloke Date: Thu, 7 Apr 2022 07:08:23 -0400 Subject: [PATCH 1/3] Support the v1 endpoint for `/relations`. (#12403) Now that MSC2675 has passed FCP and the implementation is compliant with the final version. --- changelog.d/12403.feature | 1 + synapse/rest/client/relations.py | 2 +- tests/rest/client/test_relations.py | 22 +++++++++++----------- 3 files changed, 13 insertions(+), 12 deletions(-) create mode 100644 changelog.d/12403.feature diff --git a/changelog.d/12403.feature b/changelog.d/12403.feature new file mode 100644 index 0000000000..5b55e86ecb --- /dev/null +++ b/changelog.d/12403.feature @@ -0,0 +1 @@ +Support the stable `v1` endpoint for `/relations`, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675). diff --git a/synapse/rest/client/relations.py b/synapse/rest/client/relations.py index 55c96a2af3..3cae6d2b55 100644 --- a/synapse/rest/client/relations.py +++ b/synapse/rest/client/relations.py @@ -35,7 +35,7 @@ class RelationPaginationServlet(RestServlet): PATTERNS = client_patterns( "/rooms/(?P[^/]*)/relations/(?P[^/]*)" "(/(?P[^/]*)(/(?P[^/]*))?)?$", - releases=(), + releases=("v1",), ) def __init__(self, hs: "HomeServer"): diff --git a/tests/rest/client/test_relations.py b/tests/rest/client/test_relations.py index 419eef166a..2f2ec3a685 100644 --- a/tests/rest/client/test_relations.py +++ b/tests/rest/client/test_relations.py @@ -125,7 +125,7 @@ class BaseRelationsTestCase(unittest.HomeserverTestCase): # Request the relations of the event. channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}", + f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}", access_token=self.user_token, ) self.assertEquals(200, channel.code, channel.json_body) @@ -138,7 +138,7 @@ class BaseRelationsTestCase(unittest.HomeserverTestCase): # Fetch the bundled aggregations of the event. channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/event/{self.parent_id}", + f"/_matrix/client/v3/rooms/{self.room}/event/{self.parent_id}", access_token=self.user_token, ) self.assertEquals(200, channel.code, channel.json_body) @@ -340,7 +340,7 @@ class RelationsTestCase(BaseRelationsTestCase): # They should be ignored when fetching relations. channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{room2}/relations/{parent_id}", + f"/_matrix/client/v1/rooms/{room2}/relations/{parent_id}", access_token=self.user_token, ) self.assertEqual(200, channel.code, channel.json_body) @@ -633,7 +633,7 @@ class RelationsTestCase(BaseRelationsTestCase): channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=1", + f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?limit=1", access_token=self.user_token, ) self.assertEqual(200, channel.code, channel.json_body) @@ -685,7 +685,7 @@ class RelationsTestCase(BaseRelationsTestCase): # Only the "good" annotation should be found. channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=10", + f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?limit=10", access_token=self.user_token, ) self.assertEqual(200, channel.code, channel.json_body) @@ -710,7 +710,7 @@ class RelationsTestCase(BaseRelationsTestCase): # annotation. channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=10", + f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?limit=10", access_token=self.user_token, ) self.assertEqual(200, channel.code, channel.json_body) @@ -731,7 +731,7 @@ class RelationPaginationTestCase(BaseRelationsTestCase): channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=1", + f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?limit=1", access_token=self.user_token, ) self.assertEqual(200, channel.code, channel.json_body) @@ -762,7 +762,7 @@ class RelationPaginationTestCase(BaseRelationsTestCase): # Request the relations again, but with a different direction. channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/relations" + f"/_matrix/client/v1/rooms/{self.room}/relations" f"/{self.parent_id}?limit=1&org.matrix.msc3715.dir=f", access_token=self.user_token, ) @@ -801,7 +801,7 @@ class RelationPaginationTestCase(BaseRelationsTestCase): channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=1{from_token}", + f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?limit=1{from_token}", access_token=self.user_token, ) self.assertEqual(200, channel.code, channel.json_body) @@ -865,7 +865,7 @@ class RelationPaginationTestCase(BaseRelationsTestCase): for from_token in (sync_prev_batch, messages_end): channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?from={from_token}", + f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?from={from_token}", access_token=self.user_token, ) self.assertEqual(200, channel.code, channel.json_body) @@ -1088,7 +1088,7 @@ class BundledAggregationsTestCase(BaseRelationsTestCase): # It should also be included when the entire thread is requested. channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=1", + f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?limit=1", access_token=self.user_token, ) self.assertEqual(200, channel.code, channel.json_body) From 3a7e97c7ade17a47517aadc0e9e305a1894119ac Mon Sep 17 00:00:00 2001 From: David Robertson Date: Thu, 7 Apr 2022 12:43:31 +0100 Subject: [PATCH 2/3] Poetry: use locked environment in Docker images (#12385) --- .dockerignore | 6 ++- changelog.d/12385.docker | 1 + docker/Dockerfile | 80 +++++++++++++++++++++++++++++----------- docker/start.py | 10 ++--- 4 files changed, 70 insertions(+), 27 deletions(-) create mode 100644 changelog.d/12385.docker diff --git a/.dockerignore b/.dockerignore index 434231fce9..a236760cf1 100644 --- a/.dockerignore +++ b/.dockerignore @@ -4,8 +4,12 @@ # things to include !docker !synapse -!MANIFEST.in !README.rst +!pyproject.toml +!poetry.lock + +# TODO: remove these once we have moved over to using poetry-core in pyproject.toml +!MANIFEST.in !setup.py **/__pycache__ diff --git a/changelog.d/12385.docker b/changelog.d/12385.docker new file mode 100644 index 0000000000..abe2127ea0 --- /dev/null +++ b/changelog.d/12385.docker @@ -0,0 +1 @@ +Bundle locked versions of dependencies into the Docker image. \ No newline at end of file diff --git a/docker/Dockerfile b/docker/Dockerfile index 24b5515eb9..6009da7db7 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -14,20 +14,61 @@ # DOCKER_BUILDKIT=1 docker build -f docker/Dockerfile --build-arg PYTHON_VERSION=3.10 . # +# Irritatingly, there is no blessed guide on how to distribute an application with its +# poetry-managed environment in a docker image. We have opted for +# `poetry export | pip install -r /dev/stdin`, but there are known bugs in +# in `poetry export` whose fixes (scheduled for poetry 1.2) have yet to be released. +# In case we get bitten by those bugs in the future, the recommendations here might +# be useful: +# https://github.com/python-poetry/poetry/discussions/1879#discussioncomment-216865 +# https://stackoverflow.com/questions/53835198/integrating-python-poetry-with-docker?answertab=scoredesc + + + ARG PYTHON_VERSION=3.9 ### -### Stage 0: builder +### Stage 0: generate requirements.txt +### +FROM docker.io/python:${PYTHON_VERSION}-slim as requirements + +# RUN --mount is specific to buildkit and is documented at +# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount. +# Here we use it to set up a cache for apt (and below for pip), to improve +# rebuild speeds on slow connections. +RUN \ + --mount=type=cache,target=/var/cache/apt,sharing=locked \ + --mount=type=cache,target=/var/lib/apt,sharing=locked \ + apt-get update && apt-get install -y git \ + && rm -rf /var/lib/apt/lists/* + +# We install poetry in its own build stage to avoid its dependencies conflicting with +# synapse's dependencies. +# We use a specific commit from poetry's master branch instead of our usual 1.1.12, +# to incorporate fixes to some bugs in `poetry export`. This commit corresponds to +# https://github.com/python-poetry/poetry/pull/5156 and +# https://github.com/python-poetry/poetry/issues/5141 ; +# without it, we generate a requirements.txt with incorrect environment markers, +# which causes necessary packages to be omitted when we `pip install`. +# +# NB: In poetry 1.2 `poetry export` will be moved into a plugin; we'll need to also +# pip install poetry-plugin-export (https://github.com/python-poetry/poetry-plugin-export). +RUN --mount=type=cache,target=/root/.cache/pip \ + pip install --user git+https://github.com/python-poetry/poetry.git@fb13b3a676f476177f7937ffa480ee5cff9a90a5 + +WORKDIR /synapse + +# Copy just what we need to run `poetry export`... +COPY pyproject.toml poetry.lock README.rst /synapse/ + +RUN /root/.local/bin/poetry export --extras all -o /synapse/requirements.txt + +### +### Stage 1: builder ### FROM docker.io/python:${PYTHON_VERSION}-slim as builder # install the OS build deps -# -# RUN --mount is specific to buildkit and is documented at -# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount. -# Here we use it to set up a cache for apt, to improve rebuild speeds on -# slow connections. -# RUN \ --mount=type=cache,target=/var/cache/apt,sharing=locked \ --mount=type=cache,target=/var/lib/apt,sharing=locked \ @@ -45,30 +86,27 @@ RUN \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* -# Copy just what we need to pip install -COPY MANIFEST.in README.rst setup.py /synapse/ -COPY synapse/__init__.py /synapse/synapse/__init__.py -COPY synapse/python_dependencies.py /synapse/synapse/python_dependencies.py - # To speed up rebuilds, install all of the dependencies before we copy over -# the whole synapse project so that we this layer in the Docker cache can be +# the whole synapse project, so that this layer in the Docker cache can be # used while you develop on the source # -# This is aiming at installing the `install_requires` and `extras_require` from `setup.py` +# This is aiming at installing the `[tool.poetry.depdendencies]` from pyproject.toml. +COPY --from=requirements /synapse/requirements.txt /synapse/ RUN --mount=type=cache,target=/root/.cache/pip \ - pip install --prefix="/install" --no-warn-script-location \ - /synapse[all] + pip install --prefix="/install" --no-warn-script-location -r /synapse/requirements.txt -# Copy over the rest of the project +# Copy over the rest of the synapse source code. COPY synapse /synapse/synapse/ +# ... and what we need to `pip install`. +# TODO: once pyproject.toml declares poetry-core as its build system, we'll need to copy +# pyproject.toml here, ditching setup.py and MANIFEST.in. +COPY setup.py MANIFEST.in README.rst /synapse/ -# Install the synapse package itself and all of its children packages. -# -# This is aiming at installing only the `packages=find_packages(...)` from `setup.py +# Install the synapse package itself. RUN pip install --prefix="/install" --no-deps --no-warn-script-location /synapse ### -### Stage 1: runtime +### Stage 2: runtime ### FROM docker.io/python:${PYTHON_VERSION}-slim diff --git a/docker/start.py b/docker/start.py index ec9eeb49ae..ac62bbc8ba 100755 --- a/docker/start.py +++ b/docker/start.py @@ -108,7 +108,7 @@ def generate_config_from_template(config_dir, config_path, environ, ownership): # Hopefully we already have a signing key, but generate one if not. args = [ - "python", + sys.executable, "-m", "synapse.app.homeserver", "--config-path", @@ -158,7 +158,7 @@ def run_generate_config(environ, ownership): # generate the main config file, and a signing key. args = [ - "python", + sys.executable, "-m", "synapse.app.homeserver", "--server-name", @@ -175,7 +175,7 @@ def run_generate_config(environ, ownership): "--open-private-ports", ] # log("running %s" % (args, )) - os.execv("/usr/local/bin/python", args) + os.execv(sys.executable, args) def main(args, environ): @@ -254,12 +254,12 @@ running with 'migrate_config'. See the README for more details. log("Starting synapse with args " + " ".join(args)) - args = ["python"] + args + args = [sys.executable] + args if ownership is not None: args = ["gosu", ownership] + args os.execve("/usr/sbin/gosu", args, environ) else: - os.execve("/usr/local/bin/python", args, environ) + os.execve(sys.executable, args, environ) if __name__ == "__main__": From d1cd96ce2966bc5bd268eac56a485e785779e9e0 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 7 Apr 2022 13:18:29 +0100 Subject: [PATCH 3/3] Add opentracing spans to calls to external cache (#12380) --- changelog.d/12380.misc | 1 + synapse/logging/opentracing.py | 3 +++ synapse/replication/tcp/external_cache.py | 31 +++++++++++++++-------- 3 files changed, 24 insertions(+), 11 deletions(-) create mode 100644 changelog.d/12380.misc diff --git a/changelog.d/12380.misc b/changelog.d/12380.misc new file mode 100644 index 0000000000..178a00321a --- /dev/null +++ b/changelog.d/12380.misc @@ -0,0 +1 @@ +Add opentracing spans to calls to external cache. diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py index 3ebed5c161..f86ee9aac7 100644 --- a/synapse/logging/opentracing.py +++ b/synapse/logging/opentracing.py @@ -289,6 +289,9 @@ class SynapseTags: # Uniqueish ID of a database transaction DB_TXN_ID = "db.txn_id" + # The name of the external cache + CACHE_NAME = "cache.name" + class SynapseBaggage: FORCE_TRACING = "synapse-force-tracing" diff --git a/synapse/replication/tcp/external_cache.py b/synapse/replication/tcp/external_cache.py index bf7d017968..a448dd7eb1 100644 --- a/synapse/replication/tcp/external_cache.py +++ b/synapse/replication/tcp/external_cache.py @@ -17,6 +17,7 @@ from typing import TYPE_CHECKING, Any, Optional from prometheus_client import Counter, Histogram +from synapse.logging import opentracing from synapse.logging.context import make_deferred_yieldable from synapse.util import json_decoder, json_encoder @@ -93,14 +94,18 @@ class ExternalCache: logger.debug("Caching %s %s: %r", cache_name, key, encoded_value) - with response_timer.labels("set").time(): - return await make_deferred_yieldable( - self._redis_connection.set( - self._get_redis_key(cache_name, key), - encoded_value, - pexpire=expiry_ms, + with opentracing.start_active_span( + "ExternalCache.set", + tags={opentracing.SynapseTags.CACHE_NAME: cache_name}, + ): + with response_timer.labels("set").time(): + return await make_deferred_yieldable( + self._redis_connection.set( + self._get_redis_key(cache_name, key), + encoded_value, + pexpire=expiry_ms, + ) ) - ) async def get(self, cache_name: str, key: str) -> Optional[Any]: """Look up a key/value in the named cache.""" @@ -108,10 +113,14 @@ class ExternalCache: if self._redis_connection is None: return None - with response_timer.labels("get").time(): - result = await make_deferred_yieldable( - self._redis_connection.get(self._get_redis_key(cache_name, key)) - ) + with opentracing.start_active_span( + "ExternalCache.get", + tags={opentracing.SynapseTags.CACHE_NAME: cache_name}, + ): + with response_timer.labels("get").time(): + result = await make_deferred_yieldable( + self._redis_connection.get(self._get_redis_key(cache_name, key)) + ) logger.debug("Got cache result %s %s: %r", cache_name, key, result)