diff --git a/changelog.d/12380.misc b/changelog.d/12380.misc new file mode 100644 index 0000000000..178a00321a --- /dev/null +++ b/changelog.d/12380.misc @@ -0,0 +1 @@ +Add opentracing spans to calls to external cache. diff --git a/changelog.d/12385.docker b/changelog.d/12385.docker new file mode 100644 index 0000000000..abe2127ea0 --- /dev/null +++ b/changelog.d/12385.docker @@ -0,0 +1 @@ +Bundle locked versions of dependencies into the Docker image. \ No newline at end of file diff --git a/changelog.d/12403.feature b/changelog.d/12403.feature new file mode 100644 index 0000000000..5b55e86ecb --- /dev/null +++ b/changelog.d/12403.feature @@ -0,0 +1 @@ +Support the stable `v1` endpoint for `/relations`, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675). diff --git a/docker/Dockerfile b/docker/Dockerfile index 132ac56d24..bedf09b1ba 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -14,31 +14,59 @@ # DOCKER_BUILDKIT=1 docker build -f docker/Dockerfile --build-arg PYTHON_VERSION=3.10 . # -ARG PYTHON_VERSION=3.9 - -FROM docker.io/python:${PYTHON_VERSION}-slim as base - -### -### Stage 0: builder -### - # Irritatingly, there is no blessed guide on how to distribute an application with its -# poetry-managed environment in a docker image. For a while, -# `poetry export | pip install -r /dev/stdin` seemed plausible but is limited by bugs +# poetry-managed environment in a docker image. We have opted for +# `poetry export | pip install -r /dev/stdin`, but there are known bugs in # in `poetry export` whose fixes (scheduled for poetry 1.2) have yet to be released. -# This is inspired from: +# In case we get bitten by those bugs in the future, the recommendations here might +# be useful: # https://github.com/python-poetry/poetry/discussions/1879#discussioncomment-216865 # https://stackoverflow.com/questions/53835198/integrating-python-poetry-with-docker?answertab=scoredesc -FROM base as builder + + + +ARG PYTHON_VERSION=3.9 + +### +### Stage 0: generate requirements.txt +### +FROM docker.io/python:${PYTHON_VERSION}-slim as requirements # RUN --mount is specific to buildkit and is documented at # https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount. -# Here we use it to set up a cache for pip (below, for apt and poetry), to improve +# Here we use it to set up a cache for apt (and below for pip), to improve # rebuild speeds on slow connections. -# We install poetry as --user so that it doesn't end up in the system-wide python -# installation. That gets copied later into the runtime image. +RUN \ + --mount=type=cache,target=/var/cache/apt,sharing=locked \ + --mount=type=cache,target=/var/lib/apt,sharing=locked \ + apt-get update && apt-get install -y git \ + && rm -rf /var/lib/apt/lists/* + +# We install poetry in its own build stage to avoid its dependencies conflicting with +# synapse's dependencies. +# We use a specific commit from poetry's master branch instead of our usual 1.1.12, +# to incorporate fixes to some bugs in `poetry export`. This commit corresponds to +# https://github.com/python-poetry/poetry/pull/5156 and +# https://github.com/python-poetry/poetry/issues/5141 ; +# without it, we generate a requirements.txt with incorrect environment markers, +# which causes necessary packages to be omitted when we `pip install`. +# +# NB: In poetry 1.2 `poetry export` will be moved into a plugin; we'll need to also +# pip install poetry-plugin-export (https://github.com/python-poetry/poetry-plugin-export). RUN --mount=type=cache,target=/root/.cache/pip \ - pip install --user poetry==1.1.12 + pip install --user git+https://github.com/python-poetry/poetry.git@fb13b3a676f476177f7937ffa480ee5cff9a90a5 + +WORKDIR /synapse + +# Copy just what we need to run `poetry export`... +COPY pyproject.toml poetry.lock README.rst /synapse/ + +RUN /root/.local/bin/poetry export --extras all -o /synapse/requirements.txt + +### +### Stage 1: builder +### +FROM docker.io/python:${PYTHON_VERSION}-slim as builder # install the OS build deps RUN \ @@ -58,33 +86,25 @@ RUN \ zlib1g-dev \ && rm -rf /var/lib/apt/lists/* -WORKDIR /synapse - -# Copy just what we need to run `poetry install` -COPY pyproject.toml poetry.lock README.rst /synapse/ - -# Install to the Python installation which hosts `pip`. In this case, it's the system -# Python. -ENV POETRY_VIRTUALENVS_IN_PROJECT=true \ - POETRY_VIRTUALENVS_CREATE=true \ - POETRY_HOME=/opt/poetry # To speed up rebuilds, install all of the dependencies before we copy over # the whole synapse project, so that this layer in the Docker cache can be # used while you develop on the source -RUN --mount=type=cache,target=/opt/poetry/artifacts \ - --mount=type=cache,target=/opt/poetry/.cache/pypoetry/cache \ - /root/.local/bin/poetry install --no-dev --no-root --no-interaction --no-ansi --extras all +# +# This is aiming at installing the `[tool.poetry.depdendencies]` from pyproject.toml. +COPY --from=requirements /synapse/requirements.txt /synapse/ +RUN --mount=type=cache,target=/root/.cache/pip \ + pip install --prefix="/install" --no-warn-script-location -r /synapse/requirements.txt -# Copy over the synapse source code. +# Copy over the rest of the synapse source code. COPY synapse /synapse/synapse/ +# ... and what we need to `pip install`. +COPY pyproject.toml poetry.lock README.rst /synapse/ -# Install the synapse package itself, by omitting the --no-root argument -RUN --mount=type=cache,target=/opt/poetry/artifacts \ - --mount=type=cache,target=/opt/poetry/cache \ - /root/.local/bin/poetry install --no-dev --no-interaction --no-ansi --extras all +# Install the synapse package itself. +RUN pip install --prefix="/install" --no-deps --no-warn-script-location /synapse ### -### Stage 1: runtime +### Stage 2: runtime ### FROM base diff --git a/docker/start.py b/docker/start.py index 9161c6ce3a..3de2635eb1 100755 --- a/docker/start.py +++ b/docker/start.py @@ -9,8 +9,6 @@ import sys import jinja2 -VIRTUALENV_INTERPRETER = "/synapse/.venv/bin/python" - # Utility functions def log(txt): @@ -110,7 +108,7 @@ def generate_config_from_template(config_dir, config_path, environ, ownership): # Hopefully we already have a signing key, but generate one if not. args = [ - VIRTUALENV_INTERPRETER, + sys.executable, "-m", "synapse.app.homeserver", "--config-path", @@ -160,7 +158,7 @@ def run_generate_config(environ, ownership): # generate the main config file, and a signing key. args = [ - VIRTUALENV_INTERPRETER, + sys.executable, "-m", "synapse.app.homeserver", "--server-name", @@ -177,7 +175,7 @@ def run_generate_config(environ, ownership): "--open-private-ports", ] # log("running %s" % (args, )) - os.execv(VIRTUALENV_INTERPRETER, args) + os.execv(sys.executable, args) def main(args, environ): @@ -256,12 +254,12 @@ running with 'migrate_config'. See the README for more details. log("Starting synapse with args " + " ".join(args)) - args = [VIRTUALENV_INTERPRETER] + args + args = [sys.executable] + args if ownership is not None: args = ["gosu", ownership] + args os.execve("/usr/sbin/gosu", args, environ) else: - os.execve(VIRTUALENV_INTERPRETER, args, environ) + os.execve(sys.executable, args, environ) if __name__ == "__main__": diff --git a/synapse/logging/opentracing.py b/synapse/logging/opentracing.py index 3ebed5c161..f86ee9aac7 100644 --- a/synapse/logging/opentracing.py +++ b/synapse/logging/opentracing.py @@ -289,6 +289,9 @@ class SynapseTags: # Uniqueish ID of a database transaction DB_TXN_ID = "db.txn_id" + # The name of the external cache + CACHE_NAME = "cache.name" + class SynapseBaggage: FORCE_TRACING = "synapse-force-tracing" diff --git a/synapse/replication/tcp/external_cache.py b/synapse/replication/tcp/external_cache.py index bf7d017968..a448dd7eb1 100644 --- a/synapse/replication/tcp/external_cache.py +++ b/synapse/replication/tcp/external_cache.py @@ -17,6 +17,7 @@ from typing import TYPE_CHECKING, Any, Optional from prometheus_client import Counter, Histogram +from synapse.logging import opentracing from synapse.logging.context import make_deferred_yieldable from synapse.util import json_decoder, json_encoder @@ -93,14 +94,18 @@ class ExternalCache: logger.debug("Caching %s %s: %r", cache_name, key, encoded_value) - with response_timer.labels("set").time(): - return await make_deferred_yieldable( - self._redis_connection.set( - self._get_redis_key(cache_name, key), - encoded_value, - pexpire=expiry_ms, + with opentracing.start_active_span( + "ExternalCache.set", + tags={opentracing.SynapseTags.CACHE_NAME: cache_name}, + ): + with response_timer.labels("set").time(): + return await make_deferred_yieldable( + self._redis_connection.set( + self._get_redis_key(cache_name, key), + encoded_value, + pexpire=expiry_ms, + ) ) - ) async def get(self, cache_name: str, key: str) -> Optional[Any]: """Look up a key/value in the named cache.""" @@ -108,10 +113,14 @@ class ExternalCache: if self._redis_connection is None: return None - with response_timer.labels("get").time(): - result = await make_deferred_yieldable( - self._redis_connection.get(self._get_redis_key(cache_name, key)) - ) + with opentracing.start_active_span( + "ExternalCache.get", + tags={opentracing.SynapseTags.CACHE_NAME: cache_name}, + ): + with response_timer.labels("get").time(): + result = await make_deferred_yieldable( + self._redis_connection.get(self._get_redis_key(cache_name, key)) + ) logger.debug("Got cache result %s %s: %r", cache_name, key, result) diff --git a/synapse/rest/client/relations.py b/synapse/rest/client/relations.py index 55c96a2af3..3cae6d2b55 100644 --- a/synapse/rest/client/relations.py +++ b/synapse/rest/client/relations.py @@ -35,7 +35,7 @@ class RelationPaginationServlet(RestServlet): PATTERNS = client_patterns( "/rooms/(?P[^/]*)/relations/(?P[^/]*)" "(/(?P[^/]*)(/(?P[^/]*))?)?$", - releases=(), + releases=("v1",), ) def __init__(self, hs: "HomeServer"): diff --git a/tests/rest/client/test_relations.py b/tests/rest/client/test_relations.py index 419eef166a..2f2ec3a685 100644 --- a/tests/rest/client/test_relations.py +++ b/tests/rest/client/test_relations.py @@ -125,7 +125,7 @@ class BaseRelationsTestCase(unittest.HomeserverTestCase): # Request the relations of the event. channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}", + f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}", access_token=self.user_token, ) self.assertEquals(200, channel.code, channel.json_body) @@ -138,7 +138,7 @@ class BaseRelationsTestCase(unittest.HomeserverTestCase): # Fetch the bundled aggregations of the event. channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/event/{self.parent_id}", + f"/_matrix/client/v3/rooms/{self.room}/event/{self.parent_id}", access_token=self.user_token, ) self.assertEquals(200, channel.code, channel.json_body) @@ -340,7 +340,7 @@ class RelationsTestCase(BaseRelationsTestCase): # They should be ignored when fetching relations. channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{room2}/relations/{parent_id}", + f"/_matrix/client/v1/rooms/{room2}/relations/{parent_id}", access_token=self.user_token, ) self.assertEqual(200, channel.code, channel.json_body) @@ -633,7 +633,7 @@ class RelationsTestCase(BaseRelationsTestCase): channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=1", + f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?limit=1", access_token=self.user_token, ) self.assertEqual(200, channel.code, channel.json_body) @@ -685,7 +685,7 @@ class RelationsTestCase(BaseRelationsTestCase): # Only the "good" annotation should be found. channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=10", + f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?limit=10", access_token=self.user_token, ) self.assertEqual(200, channel.code, channel.json_body) @@ -710,7 +710,7 @@ class RelationsTestCase(BaseRelationsTestCase): # annotation. channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=10", + f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?limit=10", access_token=self.user_token, ) self.assertEqual(200, channel.code, channel.json_body) @@ -731,7 +731,7 @@ class RelationPaginationTestCase(BaseRelationsTestCase): channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=1", + f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?limit=1", access_token=self.user_token, ) self.assertEqual(200, channel.code, channel.json_body) @@ -762,7 +762,7 @@ class RelationPaginationTestCase(BaseRelationsTestCase): # Request the relations again, but with a different direction. channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/relations" + f"/_matrix/client/v1/rooms/{self.room}/relations" f"/{self.parent_id}?limit=1&org.matrix.msc3715.dir=f", access_token=self.user_token, ) @@ -801,7 +801,7 @@ class RelationPaginationTestCase(BaseRelationsTestCase): channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=1{from_token}", + f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?limit=1{from_token}", access_token=self.user_token, ) self.assertEqual(200, channel.code, channel.json_body) @@ -865,7 +865,7 @@ class RelationPaginationTestCase(BaseRelationsTestCase): for from_token in (sync_prev_batch, messages_end): channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?from={from_token}", + f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?from={from_token}", access_token=self.user_token, ) self.assertEqual(200, channel.code, channel.json_body) @@ -1088,7 +1088,7 @@ class BundledAggregationsTestCase(BaseRelationsTestCase): # It should also be included when the entire thread is requested. channel = self.make_request( "GET", - f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=1", + f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?limit=1", access_token=self.user_token, ) self.assertEqual(200, channel.code, channel.json_body)