1
0

Merge branch 'develop' into dmr/pyproject-poetry

Signed-off-by: Dan Callahan <danc@element.io>
This commit is contained in:
Dan Callahan
2022-04-07 14:14:29 +01:00
9 changed files with 98 additions and 65 deletions

1
changelog.d/12380.misc Normal file
View File

@@ -0,0 +1 @@
Add opentracing spans to calls to external cache.

1
changelog.d/12385.docker Normal file
View File

@@ -0,0 +1 @@
Bundle locked versions of dependencies into the Docker image.

View File

@@ -0,0 +1 @@
Support the stable `v1` endpoint for `/relations`, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675).

View File

@@ -14,31 +14,59 @@
# DOCKER_BUILDKIT=1 docker build -f docker/Dockerfile --build-arg PYTHON_VERSION=3.10 .
#
ARG PYTHON_VERSION=3.9
FROM docker.io/python:${PYTHON_VERSION}-slim as base
###
### Stage 0: builder
###
# Irritatingly, there is no blessed guide on how to distribute an application with its
# poetry-managed environment in a docker image. For a while,
# `poetry export | pip install -r /dev/stdin` seemed plausible but is limited by bugs
# poetry-managed environment in a docker image. We have opted for
# `poetry export | pip install -r /dev/stdin`, but there are known bugs in
# in `poetry export` whose fixes (scheduled for poetry 1.2) have yet to be released.
# This is inspired from:
# In case we get bitten by those bugs in the future, the recommendations here might
# be useful:
# https://github.com/python-poetry/poetry/discussions/1879#discussioncomment-216865
# https://stackoverflow.com/questions/53835198/integrating-python-poetry-with-docker?answertab=scoredesc
FROM base as builder
ARG PYTHON_VERSION=3.9
###
### Stage 0: generate requirements.txt
###
FROM docker.io/python:${PYTHON_VERSION}-slim as requirements
# RUN --mount is specific to buildkit and is documented at
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
# Here we use it to set up a cache for pip (below, for apt and poetry), to improve
# Here we use it to set up a cache for apt (and below for pip), to improve
# rebuild speeds on slow connections.
# We install poetry as --user so that it doesn't end up in the system-wide python
# installation. That gets copied later into the runtime image.
RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update && apt-get install -y git \
&& rm -rf /var/lib/apt/lists/*
# We install poetry in its own build stage to avoid its dependencies conflicting with
# synapse's dependencies.
# We use a specific commit from poetry's master branch instead of our usual 1.1.12,
# to incorporate fixes to some bugs in `poetry export`. This commit corresponds to
# https://github.com/python-poetry/poetry/pull/5156 and
# https://github.com/python-poetry/poetry/issues/5141 ;
# without it, we generate a requirements.txt with incorrect environment markers,
# which causes necessary packages to be omitted when we `pip install`.
#
# NB: In poetry 1.2 `poetry export` will be moved into a plugin; we'll need to also
# pip install poetry-plugin-export (https://github.com/python-poetry/poetry-plugin-export).
RUN --mount=type=cache,target=/root/.cache/pip \
pip install --user poetry==1.1.12
pip install --user git+https://github.com/python-poetry/poetry.git@fb13b3a676f476177f7937ffa480ee5cff9a90a5
WORKDIR /synapse
# Copy just what we need to run `poetry export`...
COPY pyproject.toml poetry.lock README.rst /synapse/
RUN /root/.local/bin/poetry export --extras all -o /synapse/requirements.txt
###
### Stage 1: builder
###
FROM docker.io/python:${PYTHON_VERSION}-slim as builder
# install the OS build deps
RUN \
@@ -58,33 +86,25 @@ RUN \
zlib1g-dev \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /synapse
# Copy just what we need to run `poetry install`
COPY pyproject.toml poetry.lock README.rst /synapse/
# Install to the Python installation which hosts `pip`. In this case, it's the system
# Python.
ENV POETRY_VIRTUALENVS_IN_PROJECT=true \
POETRY_VIRTUALENVS_CREATE=true \
POETRY_HOME=/opt/poetry
# To speed up rebuilds, install all of the dependencies before we copy over
# the whole synapse project, so that this layer in the Docker cache can be
# used while you develop on the source
RUN --mount=type=cache,target=/opt/poetry/artifacts \
--mount=type=cache,target=/opt/poetry/.cache/pypoetry/cache \
/root/.local/bin/poetry install --no-dev --no-root --no-interaction --no-ansi --extras all
#
# This is aiming at installing the `[tool.poetry.depdendencies]` from pyproject.toml.
COPY --from=requirements /synapse/requirements.txt /synapse/
RUN --mount=type=cache,target=/root/.cache/pip \
pip install --prefix="/install" --no-warn-script-location -r /synapse/requirements.txt
# Copy over the synapse source code.
# Copy over the rest of the synapse source code.
COPY synapse /synapse/synapse/
# ... and what we need to `pip install`.
COPY pyproject.toml poetry.lock README.rst /synapse/
# Install the synapse package itself, by omitting the --no-root argument
RUN --mount=type=cache,target=/opt/poetry/artifacts \
--mount=type=cache,target=/opt/poetry/cache \
/root/.local/bin/poetry install --no-dev --no-interaction --no-ansi --extras all
# Install the synapse package itself.
RUN pip install --prefix="/install" --no-deps --no-warn-script-location /synapse
###
### Stage 1: runtime
### Stage 2: runtime
###
FROM base

View File

@@ -9,8 +9,6 @@ import sys
import jinja2
VIRTUALENV_INTERPRETER = "/synapse/.venv/bin/python"
# Utility functions
def log(txt):
@@ -110,7 +108,7 @@ def generate_config_from_template(config_dir, config_path, environ, ownership):
# Hopefully we already have a signing key, but generate one if not.
args = [
VIRTUALENV_INTERPRETER,
sys.executable,
"-m",
"synapse.app.homeserver",
"--config-path",
@@ -160,7 +158,7 @@ def run_generate_config(environ, ownership):
# generate the main config file, and a signing key.
args = [
VIRTUALENV_INTERPRETER,
sys.executable,
"-m",
"synapse.app.homeserver",
"--server-name",
@@ -177,7 +175,7 @@ def run_generate_config(environ, ownership):
"--open-private-ports",
]
# log("running %s" % (args, ))
os.execv(VIRTUALENV_INTERPRETER, args)
os.execv(sys.executable, args)
def main(args, environ):
@@ -256,12 +254,12 @@ running with 'migrate_config'. See the README for more details.
log("Starting synapse with args " + " ".join(args))
args = [VIRTUALENV_INTERPRETER] + args
args = [sys.executable] + args
if ownership is not None:
args = ["gosu", ownership] + args
os.execve("/usr/sbin/gosu", args, environ)
else:
os.execve(VIRTUALENV_INTERPRETER, args, environ)
os.execve(sys.executable, args, environ)
if __name__ == "__main__":

View File

@@ -289,6 +289,9 @@ class SynapseTags:
# Uniqueish ID of a database transaction
DB_TXN_ID = "db.txn_id"
# The name of the external cache
CACHE_NAME = "cache.name"
class SynapseBaggage:
FORCE_TRACING = "synapse-force-tracing"

View File

@@ -17,6 +17,7 @@ from typing import TYPE_CHECKING, Any, Optional
from prometheus_client import Counter, Histogram
from synapse.logging import opentracing
from synapse.logging.context import make_deferred_yieldable
from synapse.util import json_decoder, json_encoder
@@ -93,14 +94,18 @@ class ExternalCache:
logger.debug("Caching %s %s: %r", cache_name, key, encoded_value)
with response_timer.labels("set").time():
return await make_deferred_yieldable(
self._redis_connection.set(
self._get_redis_key(cache_name, key),
encoded_value,
pexpire=expiry_ms,
with opentracing.start_active_span(
"ExternalCache.set",
tags={opentracing.SynapseTags.CACHE_NAME: cache_name},
):
with response_timer.labels("set").time():
return await make_deferred_yieldable(
self._redis_connection.set(
self._get_redis_key(cache_name, key),
encoded_value,
pexpire=expiry_ms,
)
)
)
async def get(self, cache_name: str, key: str) -> Optional[Any]:
"""Look up a key/value in the named cache."""
@@ -108,10 +113,14 @@ class ExternalCache:
if self._redis_connection is None:
return None
with response_timer.labels("get").time():
result = await make_deferred_yieldable(
self._redis_connection.get(self._get_redis_key(cache_name, key))
)
with opentracing.start_active_span(
"ExternalCache.get",
tags={opentracing.SynapseTags.CACHE_NAME: cache_name},
):
with response_timer.labels("get").time():
result = await make_deferred_yieldable(
self._redis_connection.get(self._get_redis_key(cache_name, key))
)
logger.debug("Got cache result %s %s: %r", cache_name, key, result)

View File

@@ -35,7 +35,7 @@ class RelationPaginationServlet(RestServlet):
PATTERNS = client_patterns(
"/rooms/(?P<room_id>[^/]*)/relations/(?P<parent_id>[^/]*)"
"(/(?P<relation_type>[^/]*)(/(?P<event_type>[^/]*))?)?$",
releases=(),
releases=("v1",),
)
def __init__(self, hs: "HomeServer"):

View File

@@ -125,7 +125,7 @@ class BaseRelationsTestCase(unittest.HomeserverTestCase):
# Request the relations of the event.
channel = self.make_request(
"GET",
f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}",
f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}",
access_token=self.user_token,
)
self.assertEquals(200, channel.code, channel.json_body)
@@ -138,7 +138,7 @@ class BaseRelationsTestCase(unittest.HomeserverTestCase):
# Fetch the bundled aggregations of the event.
channel = self.make_request(
"GET",
f"/_matrix/client/unstable/rooms/{self.room}/event/{self.parent_id}",
f"/_matrix/client/v3/rooms/{self.room}/event/{self.parent_id}",
access_token=self.user_token,
)
self.assertEquals(200, channel.code, channel.json_body)
@@ -340,7 +340,7 @@ class RelationsTestCase(BaseRelationsTestCase):
# They should be ignored when fetching relations.
channel = self.make_request(
"GET",
f"/_matrix/client/unstable/rooms/{room2}/relations/{parent_id}",
f"/_matrix/client/v1/rooms/{room2}/relations/{parent_id}",
access_token=self.user_token,
)
self.assertEqual(200, channel.code, channel.json_body)
@@ -633,7 +633,7 @@ class RelationsTestCase(BaseRelationsTestCase):
channel = self.make_request(
"GET",
f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=1",
f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?limit=1",
access_token=self.user_token,
)
self.assertEqual(200, channel.code, channel.json_body)
@@ -685,7 +685,7 @@ class RelationsTestCase(BaseRelationsTestCase):
# Only the "good" annotation should be found.
channel = self.make_request(
"GET",
f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=10",
f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?limit=10",
access_token=self.user_token,
)
self.assertEqual(200, channel.code, channel.json_body)
@@ -710,7 +710,7 @@ class RelationsTestCase(BaseRelationsTestCase):
# annotation.
channel = self.make_request(
"GET",
f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=10",
f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?limit=10",
access_token=self.user_token,
)
self.assertEqual(200, channel.code, channel.json_body)
@@ -731,7 +731,7 @@ class RelationPaginationTestCase(BaseRelationsTestCase):
channel = self.make_request(
"GET",
f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=1",
f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?limit=1",
access_token=self.user_token,
)
self.assertEqual(200, channel.code, channel.json_body)
@@ -762,7 +762,7 @@ class RelationPaginationTestCase(BaseRelationsTestCase):
# Request the relations again, but with a different direction.
channel = self.make_request(
"GET",
f"/_matrix/client/unstable/rooms/{self.room}/relations"
f"/_matrix/client/v1/rooms/{self.room}/relations"
f"/{self.parent_id}?limit=1&org.matrix.msc3715.dir=f",
access_token=self.user_token,
)
@@ -801,7 +801,7 @@ class RelationPaginationTestCase(BaseRelationsTestCase):
channel = self.make_request(
"GET",
f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=1{from_token}",
f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?limit=1{from_token}",
access_token=self.user_token,
)
self.assertEqual(200, channel.code, channel.json_body)
@@ -865,7 +865,7 @@ class RelationPaginationTestCase(BaseRelationsTestCase):
for from_token in (sync_prev_batch, messages_end):
channel = self.make_request(
"GET",
f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?from={from_token}",
f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?from={from_token}",
access_token=self.user_token,
)
self.assertEqual(200, channel.code, channel.json_body)
@@ -1088,7 +1088,7 @@ class BundledAggregationsTestCase(BaseRelationsTestCase):
# It should also be included when the entire thread is requested.
channel = self.make_request(
"GET",
f"/_matrix/client/unstable/rooms/{self.room}/relations/{self.parent_id}?limit=1",
f"/_matrix/client/v1/rooms/{self.room}/relations/{self.parent_id}?limit=1",
access_token=self.user_token,
)
self.assertEqual(200, channel.code, channel.json_body)