Compare commits
214 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| ba33466939 | |||
| 8cb9abe8e7 | |||
| 0a6f1d1ac2 | |||
| a50fb411b3 | |||
| b82fff66df | |||
| f46b223354 | |||
| f5668f0b4a | |||
| 09b4f6e46d | |||
| 01c8f9ca69 | |||
| e5a76ec00b | |||
| 103f51d867 | |||
| f8f06fc773 | |||
| 05e8a5d298 | |||
| 3e2e76ca15 | |||
| ecef741add | |||
| d0c1f4ca4c | |||
| 4bc8cb4669 | |||
| eed38c5027 | |||
| c1482a352a | |||
| b80bb7e452 | |||
| 798deb3a10 | |||
| a1f87f57ff | |||
| fbdee86004 | |||
| 7dec4ce7e4 | |||
| dbe016e258 | |||
| 0921d93dcd | |||
| b121a3ad2b | |||
| dfc7646504 | |||
| 9f512ff537 | |||
| 88fe72cc1e | |||
| f8d3ee9570 | |||
| 3c758d9808 | |||
| aaaff98202 | |||
| 7efddbebef | |||
| 960b4fb409 | |||
| a743f7d33e | |||
| 0b014eb25e | |||
| 535a689cfc | |||
| 6b3e0ea6bd | |||
| 8af8a9bce5 | |||
| 8e2759f2d8 | |||
| 0922462fc7 | |||
| 73d8ded0b0 | |||
| e3a49f4784 | |||
| d24cd17820 | |||
| 36d8b83888 | |||
| 32545d2e26 | |||
| 5a275a2377 | |||
| 58c657322a | |||
| aa28110264 | |||
| 4bdbebccb9 | |||
| ba1588461b | |||
| a468768104 | |||
| 9535fd0f9c | |||
| 9b1f360091 | |||
| 643c0c50c1 | |||
| 320186319a | |||
| e31d06f6f0 | |||
| 86cf6a3a17 | |||
| 3810730ba5 | |||
| 641f43ba81 | |||
| 1783156dbc | |||
| 4e13743738 | |||
| 3ad74b63e5 | |||
| 5f8173dd80 | |||
| ab3165efb7 | |||
| 4586119f0b | |||
| 214f3b7d21 | |||
| 772bad2562 | |||
| 3cdf5a1386 | |||
| 961ee75a9b | |||
| 5f72ea1bde | |||
| 85ca963c1a | |||
| 98ec375b26 | |||
| e630722f11 | |||
| 0cd182f296 | |||
| dd5cc37aa4 | |||
| 95a038c106 | |||
| 2e2d8cc2f9 | |||
| 7851a2c62f | |||
| 78e4d96a4d | |||
| 7732c4902c | |||
| 36af768c13 | |||
| 1a90c1e3af | |||
| d1cd96ce29 | |||
| 3a7e97c7ad | |||
| 0bcb651b3f | |||
| 350062661c | |||
| f931c0602a | |||
| 6902e9ff2b | |||
| 05a37f4008 | |||
| 2cf74cf2fc | |||
| 6fe757d69e | |||
| ae01a7edd3 | |||
| 793d03e2c5 | |||
| 573cd0f92f | |||
| 7ec9b06303 | |||
| fd1e7d0fc2 | |||
| 163fd686b5 | |||
| 79e7c2c426 | |||
| 31c1209c50 | |||
| 9c4c49991d | |||
| 800ba87cc8 | |||
| ab3fdcf960 | |||
| 41b5f72677 | |||
| 66053b6bfb | |||
| d666fc02fa | |||
| ac80bfba42 | |||
| 42d8710f38 | |||
| 708d88b1a2 | |||
| 7a95e80418 | |||
| efdbcfd6af | |||
| ca7e34cb57 | |||
| a7293ef16f | |||
| 5218fe7670 | |||
| f608e6c8cf | |||
| 9633eb2162 | |||
| b446c99ac9 | |||
| 5c9e39e619 | |||
| 80839a44f1 | |||
| f0b03186d9 | |||
| 33ebee47e4 | |||
| c4cf916ed7 | |||
| 336bff1104 | |||
| 993d90f82b | |||
| 21351820e0 | |||
| b7762b0c9f | |||
| f871222880 | |||
| 319a805cd3 | |||
| 9b43df1f7b | |||
| e4409301ba | |||
| 4e900ece42 | |||
| bebf994ee8 | |||
| 6927d87254 | |||
| 11df4ec6c2 | |||
| 5e88143dff | |||
| 34a8370d7b | |||
| adbf975623 | |||
| 15cdcf8f30 | |||
| 5a32ec59b2 | |||
| 9a3f1f5383 | |||
| f96b85eca8 | |||
| c31c1091d4 | |||
| d8d0271977 | |||
| 2fc15ac718 | |||
| a7fb66e800 | |||
| 19a1d6a42a | |||
| c8cbd66d3b | |||
| 9b67715bc3 | |||
| 437a8ed9ef | |||
| e0bb268134 | |||
| 1f32b90b0f | |||
| 4d693f9b79 | |||
| 013f3f5e44 | |||
| 8a519f8abc | |||
| a2b00a4486 | |||
| 8a5d691140 | |||
| 512007f829 | |||
| e9220adffc | |||
| 28a64807b2 | |||
| d653f6fbec | |||
| c20d0ca6c2 | |||
| b690fe749b | |||
| 6f2943714b | |||
| 287a9c1e20 | |||
| ac95167d2f | |||
| 4ba55a620f | |||
| 8cd760fca8 | |||
| 89f11f8c6f | |||
| a4643a685c | |||
| 3c41d87b67 | |||
| 7ca8ee67a5 | |||
| 38adf14998 | |||
| 14662d3c18 | |||
| fffb3c4c8f | |||
| 61aae18d45 | |||
| 5859e2fe0c | |||
| 8b7b371ff6 | |||
| b0659a112d | |||
| 1800bd47a8 | |||
| 9925f9b8b0 | |||
| 1642abd77e | |||
| 84eb14c4d2 | |||
| 0004260952 | |||
| a503c2c388 | |||
| e689cae47d | |||
| 088f3ae182 | |||
| 8810c93e82 | |||
| 4df10d3214 | |||
| 5436b014f4 | |||
| e78d4f61fc | |||
| f4c5e5864c | |||
| c5776780f0 | |||
| 692b82838e | |||
| 516d092ff9 | |||
| 831d4797ab | |||
| 6b26536a52 | |||
| a701a09f9b | |||
| 34baf76451 | |||
| 01211e0c16 | |||
| d9bc65918e | |||
| 9d21ecf7ce | |||
| 0a59f977a2 | |||
| 6134b3079e | |||
| 1530cef192 | |||
| afa17f0eab | |||
| bf9d549e3a | |||
| 8fe930c215 | |||
| 80e0e1f35e | |||
| 2177e356bc | |||
| c46065fa3d | |||
| 872dbb0181 | |||
| 12d1f82db2 | |||
| 9e06e22064 |
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: CI run against latest deps is failing
|
||||
---
|
||||
See https://github.com/{{env.GITHUB_REPOSITORY}}/actions/runs/{{env.GITHUB_RUN_ID}}
|
||||
@@ -1,8 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# replaces the dependency on Twisted in `python_dependencies` with trunk.
|
||||
|
||||
set -e
|
||||
cd "$(dirname "$0")"/..
|
||||
|
||||
sed -i -e 's#"Twisted.*"#"Twisted @ git+https://github.com/twisted/twisted"#' synapse/python_dependencies.py
|
||||
@@ -2,29 +2,24 @@
|
||||
|
||||
# Test for the export-data admin command against sqlite and postgres
|
||||
|
||||
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
||||
# Expects `poetry` to be available on the `PATH`.
|
||||
|
||||
set -xe
|
||||
cd "$(dirname "$0")/../.."
|
||||
|
||||
echo "--- Install dependencies"
|
||||
|
||||
# Install dependencies for this test.
|
||||
pip install psycopg2
|
||||
|
||||
# Install Synapse itself. This won't update any libraries.
|
||||
pip install -e .
|
||||
|
||||
echo "--- Generate the signing key"
|
||||
|
||||
# Generate the server's signing key.
|
||||
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
|
||||
echo "--- Prepare test database"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# Run the export-data command on the sqlite test database
|
||||
python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
poetry run python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
--output-directory /tmp/export_data
|
||||
|
||||
# Test that the output directory exists and contains the rooms directory
|
||||
@@ -37,14 +32,14 @@ else
|
||||
fi
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
|
||||
# Port the SQLite databse to postgres so we can check command works against postgres
|
||||
echo "+++ Port SQLite3 databse to postgres"
|
||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
# Run the export-data command on postgres database
|
||||
python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
poetry run python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
--output-directory /tmp/export_data2
|
||||
|
||||
# Test that the output directory exists and contains the rooms directory
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
#!/usr/bin/env bash
|
||||
# this script is run by GitHub Actions in a plain `focal` container; it installs the
|
||||
# minimal requirements for tox and hands over to the py3-old tox environment.
|
||||
# this script is run by GitHub Actions in a plain `focal` container; it
|
||||
# - installs the minimal system requirements, and poetry;
|
||||
# - patches the project definition file to refer to old versions only;
|
||||
# - creates a venv with these old versions using poetry; and finally
|
||||
# - invokes `trial` to run the tests with old deps.
|
||||
|
||||
# Prevent tzdata from asking for user input
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
@@ -9,12 +12,70 @@ set -ex
|
||||
|
||||
apt-get update
|
||||
apt-get install -y \
|
||||
python3 python3-dev python3-pip python3-venv \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox libjpeg-dev libwebp-dev
|
||||
python3 python3-dev python3-pip python3-venv pipx \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
|
||||
export LANG="C.UTF-8"
|
||||
|
||||
# Prevent virtualenv from auto-updating pip to an incompatible version
|
||||
export VIRTUALENV_NO_DOWNLOAD=1
|
||||
|
||||
exec tox -e py3-old
|
||||
# TODO: in the future, we could use an implementation of
|
||||
# https://github.com/python-poetry/poetry/issues/3527
|
||||
# https://github.com/pypa/pip/issues/8085
|
||||
# to select the lowest possible versions, rather than resorting to this sed script.
|
||||
|
||||
# Patch the project definitions in-place:
|
||||
# - Replace all lower and tilde bounds with exact bounds
|
||||
# - Make the pyopenssl 17.0, which is the oldest version that works with
|
||||
# a `cryptography` compiled against OpenSSL 1.1.
|
||||
# - Delete all lines referring to psycopg2 --- so no testing of postgres support.
|
||||
# - Omit systemd: we're not logging to journal here.
|
||||
|
||||
# TODO: also replace caret bounds, see https://python-poetry.org/docs/dependency-specification/#version-constraints
|
||||
# We don't use these yet, but IIRC they are the default bound used when you `poetry add`.
|
||||
# The sed expression 's/\^/==/g' ought to do the trick. But it would also change
|
||||
# `python = "^3.7"` to `python = "==3.7", which would mean we fail because olddeps
|
||||
# runs on 3.8 (#12343).
|
||||
|
||||
sed -i \
|
||||
-e "s/[~>]=/==/g" \
|
||||
-e "/psycopg2/d" \
|
||||
-e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
|
||||
-e '/systemd/d' \
|
||||
pyproject.toml
|
||||
|
||||
# Use poetry to do the installation. This ensures that the versions are all mutually
|
||||
# compatible (as far the package metadata declares, anyway); pip's package resolver
|
||||
# is more lax.
|
||||
#
|
||||
# Rather than `poetry install --no-dev`, we drop all dev dependencies from the
|
||||
# toml file. This means we don't have to ensure compatibility between old deps and
|
||||
# dev tools.
|
||||
|
||||
pip install --user toml
|
||||
|
||||
REMOVE_DEV_DEPENDENCIES="
|
||||
import toml
|
||||
with open('pyproject.toml', 'r') as f:
|
||||
data = toml.loads(f.read())
|
||||
|
||||
del data['tool']['poetry']['dev-dependencies']
|
||||
|
||||
with open('pyproject.toml', 'w') as f:
|
||||
toml.dump(data, f)
|
||||
"
|
||||
python3 -c "$REMOVE_DEV_DEPENDENCIES"
|
||||
|
||||
pipx install poetry==1.1.12
|
||||
~/.local/bin/poetry lock
|
||||
|
||||
echo "::group::Patched pyproject.toml"
|
||||
cat pyproject.toml
|
||||
echo "::endgroup::"
|
||||
echo "::group::Lockfile after patch"
|
||||
cat poetry.lock
|
||||
echo "::endgroup::"
|
||||
|
||||
~/.local/bin/poetry install -E "all test"
|
||||
~/.local/bin/poetry run trial --jobs=2 tests
|
||||
|
||||
@@ -1,43 +1,37 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Test script for 'synapse_port_db'.
|
||||
# - sets up synapse and deps
|
||||
# - configures synapse and a postgres server.
|
||||
# - runs the port script on a prepopulated test sqlite db
|
||||
# - also runs it against an new sqlite db
|
||||
|
||||
#
|
||||
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
||||
# Expects `poetry` to be available on the `PATH`.
|
||||
|
||||
set -xe
|
||||
cd "$(dirname "$0")/../.."
|
||||
|
||||
echo "--- Install dependencies"
|
||||
|
||||
# Install dependencies for this test.
|
||||
pip install psycopg2 coverage coverage-enable-subprocess
|
||||
|
||||
# Install Synapse itself. This won't update any libraries.
|
||||
pip install -e .
|
||||
|
||||
echo "--- Generate the signing key"
|
||||
|
||||
# Generate the server's signing key.
|
||||
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
|
||||
echo "--- Prepare test database"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against test database"
|
||||
# TODO: this invocation of synapse_port_db (and others below) used to be prepended with `coverage run`,
|
||||
# but coverage seems unable to find the entrypoints installed by `pip install -e .`.
|
||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
# We should be able to run twice against the same database.
|
||||
echo "+++ Run synapse_port_db a second time"
|
||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
#####
|
||||
|
||||
@@ -48,12 +42,12 @@ echo "--- Prepare empty SQLite database"
|
||||
# we do this by deleting the sqlite db, and then doing the same again.
|
||||
rm .ci/test_db.db
|
||||
|
||||
update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# re-create the PostgreSQL database.
|
||||
.ci/scripts/postgres_exec.py \
|
||||
poetry run .ci/scripts/postgres_exec.py \
|
||||
"DROP DATABASE synapse" \
|
||||
"CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against empty database"
|
||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
+2
-2
@@ -4,8 +4,8 @@
|
||||
# things to include
|
||||
!docker
|
||||
!synapse
|
||||
!MANIFEST.in
|
||||
!README.rst
|
||||
!setup.py
|
||||
!pyproject.toml
|
||||
!poetry.lock
|
||||
|
||||
**/__pycache__
|
||||
|
||||
@@ -22,7 +22,7 @@ jobs:
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@4b5ef36b314c2599664ca107bb8c02412548d79d # v1.1.14
|
||||
with:
|
||||
mdbook-version: '0.4.9'
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
|
||||
@@ -0,0 +1,156 @@
|
||||
# People who are freshly `pip install`ing from PyPI will pull in the latest versions of
|
||||
# dependencies which match the broad requirements. Since most CI runs are against
|
||||
# the locked poetry environment, run specifically against the latest dependencies to
|
||||
# know if there's an upcoming breaking change.
|
||||
#
|
||||
# As an overview this workflow:
|
||||
# - checks out develop,
|
||||
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
|
||||
# - runs mypy and test suites in that checkout.
|
||||
#
|
||||
# Based on the twisted trunk CI job.
|
||||
|
||||
name: Latest dependencies
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 7 * * *
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
mypy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||
# poetry-core versions), so we install with poetry.
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "1.2.0b1"
|
||||
# Dump installed versions for debugging.
|
||||
- run: poetry run pip list > before.txt
|
||||
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
||||
# `pip install matrix-synapse[all]` as closely as possible.
|
||||
- run: poetry update --no-dev
|
||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
||||
- run: poetry run mypy
|
||||
trial:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- database: "sqlite"
|
||||
- database: "postgres"
|
||||
postgres-version: "14"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
if: ${{ matrix.postgres-version }}
|
||||
run: |
|
||||
docker run -d -p 5432:5432 \
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: pip install .[all,test]
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: python -m twisted.trial --jobs=2 tests
|
||||
env:
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
|
||||
sytest:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:testing
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- sytest-tag: focal
|
||||
|
||||
- sytest-tag: focal
|
||||
postgres: postgres
|
||||
workers: workers
|
||||
redis: redis
|
||||
env:
|
||||
POSTGRES: ${{ matrix.postgres && 1}}
|
||||
WORKERS: ${{ matrix.workers && 1 }}
|
||||
REDIS: ${{ matrix.redis && 1 }}
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Ensure sytest runs `pip install`
|
||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||
run: rm /src/poetry.lock
|
||||
working-directory: /src
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
path: |
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
|
||||
# TODO: run complement (as with twisted trunk, see #12473).
|
||||
|
||||
# open an issue if the build fails, so we know about it.
|
||||
open-issue:
|
||||
if: failure()
|
||||
needs:
|
||||
# TODO: should mypy be included here? It feels more brittle than the other two.
|
||||
- mypy
|
||||
- trial
|
||||
- sytest
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
update_existing: true
|
||||
filename: .ci/latest_deps_build_failed_issue_template.md
|
||||
|
||||
+30
-50
@@ -15,25 +15,18 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install -e .
|
||||
- run: pip install .
|
||||
- run: scripts-dev/generate_sample_config.sh --check
|
||||
- run: scripts-dev/config-lint.sh
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
toxenv:
|
||||
- "check_codestyle"
|
||||
- "check_isort"
|
||||
- "mypy"
|
||||
- "packaging"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install tox
|
||||
- run: tox -e ${{ matrix.toxenv }}
|
||||
# This does a vanilla `poetry install` - no extras. I'm slightly anxious
|
||||
# that we might skip some typechecks on code that uses extras. However,
|
||||
# I think the right way to fix this is to mark any extras needed for
|
||||
# typechecking as development dependencies. To detect this, we ought to
|
||||
# turn up mypy's strictness: disallow unknown imports and be accept fewer
|
||||
# uses of `Any`.
|
||||
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1"
|
||||
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -72,23 +65,23 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
database: ["sqlite"]
|
||||
toxenv: ["py"]
|
||||
extras: ["all"]
|
||||
include:
|
||||
# Newest Python without optional deps
|
||||
- python-version: "3.10"
|
||||
toxenv: "py-noextras"
|
||||
extras: ""
|
||||
|
||||
# Oldest Python with PostgreSQL
|
||||
- python-version: "3.7"
|
||||
database: "postgres"
|
||||
postgres-version: "10"
|
||||
toxenv: "py"
|
||||
extras: "all"
|
||||
|
||||
# Newest Python with newest PostgreSQL
|
||||
- python-version: "3.10"
|
||||
database: "postgres"
|
||||
postgres-version: "14"
|
||||
toxenv: "py"
|
||||
extras: "all"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -100,17 +93,16 @@ jobs:
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- run: pip install tox
|
||||
extras: ${{ matrix.extras }}
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: tox -e ${{ matrix.toxenv }}
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
@@ -129,6 +121,7 @@ jobs:
|
||||
|| true
|
||||
|
||||
trial-olddeps:
|
||||
# Note: sqlite only; no postgres
|
||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
@@ -136,11 +129,11 @@ jobs:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Test with old deps
|
||||
uses: docker://ubuntu:focal # For old python and sqlite
|
||||
# Note: focal seems to be using 3.8, but the oldest is 3.7?
|
||||
# See https://github.com/matrix-org/synapse/issues/12343
|
||||
with:
|
||||
workdir: /github/workspace
|
||||
entrypoint: .ci/scripts/test_old_deps.sh
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
@@ -156,23 +149,24 @@ jobs:
|
||||
|
||||
trial-pypy:
|
||||
# Very slow; only run if the branch name includes 'pypy'
|
||||
# Note: sqlite only; no postgres. Completely untested since poetry move.
|
||||
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["pypy-3.7"]
|
||||
extras: ["all"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- run: pip install tox
|
||||
- run: tox -e py
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
extras: ${{ matrix.extras }}
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
@@ -271,9 +265,10 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.9"
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_export_data_command.sh
|
||||
|
||||
portdb:
|
||||
@@ -308,9 +303,10 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_synapse_port_db.sh
|
||||
|
||||
complement:
|
||||
@@ -362,27 +358,11 @@ jobs:
|
||||
(wget -O - "https://github.com/matrix-org/complement/archive/$BRANCH_NAME.tar.gz" | tar -xz --strip-components=1 -C complement) && break
|
||||
done
|
||||
|
||||
# Build initial Synapse image
|
||||
- run: docker build -t matrixdotorg/synapse:latest -f docker/Dockerfile .
|
||||
working-directory: synapse
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
|
||||
# Build a ready-to-run Synapse image based on the initial image above.
|
||||
# This new image includes a config file, keys for signing and TLS, and
|
||||
# other settings to make it suitable for testing under Complement.
|
||||
- run: docker build -t complement-synapse -f Synapse.Dockerfile .
|
||||
working-directory: complement/dockerfiles
|
||||
|
||||
# Run Complement
|
||||
- run: |
|
||||
set -o pipefail
|
||||
go test -v -json -p 1 -tags synapse_blacklist,msc2403,msc2716,msc3030 ./tests/... 2>&1 | gotestfmt
|
||||
COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
env:
|
||||
COMPLEMENT_BASE_IMAGE: complement-synapse:latest
|
||||
working-directory: complement
|
||||
|
||||
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
|
||||
tests-done:
|
||||
|
||||
@@ -6,16 +6,25 @@ on:
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
mypy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: .ci/patch_for_twisted_trunk.sh
|
||||
- run: pip install tox
|
||||
- run: tox -e mypy
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
extras: "all"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- run: poetry run mypy
|
||||
|
||||
trial:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -23,14 +32,15 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: 3.7
|
||||
- run: .ci/patch_for_twisted_trunk.sh
|
||||
- run: pip install tox
|
||||
- run: tox -e py
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
python-version: "3.x"
|
||||
extras: "all test"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- run: poetry run trial --jobs 2 tests
|
||||
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
@@ -55,11 +65,23 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Patch dependencies
|
||||
run: .ci/patch_for_twisted_trunk.sh
|
||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||
# but the sytest-synapse container expects it to be in /venv/.
|
||||
# We symlink it before running poetry so that poetry actually
|
||||
# ends up installing to `/venv`.
|
||||
run: |
|
||||
ln -s -T /venv /src/.venv
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
working-directory: /src
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
env:
|
||||
# Use offline mode to avoid reinstalling the pinned version of
|
||||
# twisted.
|
||||
OFFLINE: 1
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
|
||||
@@ -15,6 +15,9 @@ _trial_temp*/
|
||||
.DS_Store
|
||||
__pycache__/
|
||||
|
||||
# We do want the poetry lockfile.
|
||||
!poetry.lock
|
||||
|
||||
# stuff that is likely to exist when you run a server locally
|
||||
/*.db
|
||||
/*.log
|
||||
@@ -30,6 +33,9 @@ __pycache__/
|
||||
/media_store/
|
||||
/uploads
|
||||
|
||||
# For direnv users
|
||||
/.envrc
|
||||
|
||||
# IDEs
|
||||
/.idea/
|
||||
/.ropeproject/
|
||||
|
||||
+249
-9395
File diff suppressed because it is too large
Load Diff
-54
@@ -1,54 +0,0 @@
|
||||
include LICENSE
|
||||
include VERSION
|
||||
include *.rst
|
||||
include *.md
|
||||
include demo/README
|
||||
include demo/demo.tls.dh
|
||||
include demo/*.py
|
||||
include demo/*.sh
|
||||
|
||||
include synapse/py.typed
|
||||
recursive-include synapse/storage *.sql
|
||||
recursive-include synapse/storage *.sql.postgres
|
||||
recursive-include synapse/storage *.sql.sqlite
|
||||
recursive-include synapse/storage *.py
|
||||
recursive-include synapse/storage *.txt
|
||||
recursive-include synapse/storage *.md
|
||||
|
||||
recursive-include docs *
|
||||
recursive-include scripts-dev *
|
||||
recursive-include synapse *.pyi
|
||||
recursive-include tests *.py
|
||||
recursive-include tests *.pem
|
||||
recursive-include tests *.p8
|
||||
recursive-include tests *.crt
|
||||
recursive-include tests *.key
|
||||
|
||||
recursive-include synapse/res *
|
||||
recursive-include synapse/static *.css
|
||||
recursive-include synapse/static *.gif
|
||||
recursive-include synapse/static *.html
|
||||
recursive-include synapse/static *.js
|
||||
|
||||
exclude .codecov.yml
|
||||
exclude .coveragerc
|
||||
exclude .dockerignore
|
||||
exclude .editorconfig
|
||||
exclude Dockerfile
|
||||
exclude mypy.ini
|
||||
exclude sytest-blacklist
|
||||
exclude test_postgresql.sh
|
||||
|
||||
include book.toml
|
||||
include pyproject.toml
|
||||
recursive-include changelog.d *
|
||||
|
||||
include .flake8
|
||||
prune .circleci
|
||||
prune .github
|
||||
prune .ci
|
||||
prune contrib
|
||||
prune debian
|
||||
prune demo/etc
|
||||
prune docker
|
||||
prune stubs
|
||||
+18
-15
@@ -293,39 +293,42 @@ directory of your choice::
|
||||
git clone https://github.com/matrix-org/synapse.git
|
||||
cd synapse
|
||||
|
||||
Synapse has a number of external dependencies, that are easiest
|
||||
to install using pip and a virtualenv::
|
||||
Synapse has a number of external dependencies. We maintain a fixed development
|
||||
environment using [poetry](https://python-poetry.org/). First, install poetry. We recommend
|
||||
|
||||
python3 -m venv ./env
|
||||
source ./env/bin/activate
|
||||
pip install -e ".[all,dev]"
|
||||
pip install --user pipx
|
||||
pipx install poetry
|
||||
|
||||
as described `here <https://python-poetry.org/docs/#installing-with-pipx>`_.
|
||||
(See `poetry's installation docs <https://python-poetry.org/docs/#installation>`
|
||||
for other installation methods.) Then ask poetry to create a virtual environment
|
||||
from the project and install Synapse's dependencies::
|
||||
|
||||
poetry install --extras "all test"
|
||||
|
||||
This will run a process of downloading and installing all the needed
|
||||
dependencies into a virtual env. If any dependencies fail to install,
|
||||
try installing the failing modules individually::
|
||||
|
||||
pip install -e "module-name"
|
||||
dependencies into a virtual env.
|
||||
|
||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`
|
||||
|
||||
./demo/start.sh
|
||||
poetry run ./demo/start.sh
|
||||
|
||||
(to stop, you can use `./demo/stop.sh`)
|
||||
(to stop, you can use `poetry run ./demo/stop.sh`)
|
||||
|
||||
See the [demo documentation](https://matrix-org.github.io/synapse/develop/development/demo.html)
|
||||
See the `demo documentation <https://matrix-org.github.io/synapse/develop/development/demo.html>`_
|
||||
for more information.
|
||||
|
||||
If you just want to start a single instance of the app and run it directly::
|
||||
|
||||
# Create the homeserver.yaml config once
|
||||
python -m synapse.app.homeserver \
|
||||
poetry run synapse_homeserver \
|
||||
--server-name my.domain.name \
|
||||
--config-path homeserver.yaml \
|
||||
--generate-config \
|
||||
--report-stats=[yes|no]
|
||||
|
||||
# Start the app
|
||||
python -m synapse.app.homeserver --config-path homeserver.yaml
|
||||
poetry run synapse_homeserver --config-path homeserver.yaml
|
||||
|
||||
|
||||
Running the unit tests
|
||||
@@ -334,7 +337,7 @@ Running the unit tests
|
||||
After getting up and running, you may wish to run Synapse's unit tests to
|
||||
check that everything is installed correctly::
|
||||
|
||||
trial tests
|
||||
poetry run trial tests
|
||||
|
||||
This should end with a 'PASSED' result (note that exact numbers will
|
||||
differ)::
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
Implement [MSC3383](https://github.com/matrix-org/matrix-spec-proposals/pull/3383) for including the destination in server-to-server authentication headers. Contributed by @Bubu and @jcgruenhage for Famedly GmbH.
|
||||
@@ -1 +0,0 @@
|
||||
Add tests for database transaction callbacks.
|
||||
@@ -1 +0,0 @@
|
||||
Handle cancellation in `DatabasePool.runInteraction()`.
|
||||
@@ -0,0 +1 @@
|
||||
Prevent a sync request from removing a user's busy presence status.
|
||||
@@ -1 +0,0 @@
|
||||
Add missing type hints for cache storage.
|
||||
@@ -1 +0,0 @@
|
||||
Clean-up logic around rebasing URLs for URL image previews.
|
||||
@@ -1 +0,0 @@
|
||||
Add type hints to tests files.
|
||||
@@ -1 +0,0 @@
|
||||
Use the `ignored_users` table in additional places instead of re-parsing the account data.
|
||||
@@ -1 +0,0 @@
|
||||
Refactor the relations endpoints to add a `RelationsHandler`.
|
||||
@@ -1 +0,0 @@
|
||||
Fix the link to the module documentation in the legacy spam checker warning message.
|
||||
@@ -1 +0,0 @@
|
||||
Refactor relations tests to improve code re-use.
|
||||
@@ -0,0 +1 @@
|
||||
Fix bug with incremental sync missing events when rejoining/backfilling. Contributed by Nick @ Beeper.
|
||||
@@ -0,0 +1 @@
|
||||
Use poetry to manage Synapse's dependencies.
|
||||
@@ -0,0 +1 @@
|
||||
Fix rendering of the documentation site when using the 'print' feature.
|
||||
@@ -0,0 +1 @@
|
||||
The groups/communities feature in Synapse has been disabled by default.
|
||||
@@ -0,0 +1 @@
|
||||
Enable processing of device list updates asynchronously.
|
||||
@@ -0,0 +1 @@
|
||||
Add a manual documenting config file options.
|
||||
@@ -0,0 +1 @@
|
||||
Remove unstable identifiers from [MSC3440](https://github.com/matrix-org/matrix-doc/pull/3440).
|
||||
@@ -0,0 +1 @@
|
||||
Preparation for faster-room-join work: start a background process to resynchronise the room state after a room join.
|
||||
@@ -0,0 +1 @@
|
||||
Remove an unstable identifier from [MSC3083](https://github.com/matrix-org/matrix-doc/pull/3083).
|
||||
@@ -0,0 +1 @@
|
||||
Preparation for faster-room-join work: Implement a tracking mechanism to allow functions to wait for full room state to arrive.
|
||||
@@ -0,0 +1 @@
|
||||
Run twisted trunk CI job in the locked poetry environment.
|
||||
@@ -0,0 +1 @@
|
||||
Implement [MSC2815](https://github.com/matrix-org/matrix-spec-proposals/pull/2815) to allow room moderators to view redacted event content. Contributed by @tulir.
|
||||
@@ -0,0 +1 @@
|
||||
Run lints under poetry in CI, and remove corresponding tox lint jobs.
|
||||
@@ -0,0 +1 @@
|
||||
Run "main" trial tests under `poetry`.
|
||||
@@ -0,0 +1 @@
|
||||
Bump twisted version in `poetry.lock` to work around [pip bug #9644](https://github.com/pypa/pip/issues/9644).
|
||||
@@ -0,0 +1 @@
|
||||
Change Mutual Rooms' `unstable_features` flag to `uk.half-shot.msc2666.mutual_rooms` which matches the current MSC iteration.
|
||||
@@ -0,0 +1 @@
|
||||
Use `poetry` to manage the virtualenv in debian packages.
|
||||
@@ -0,0 +1 @@
|
||||
Fix typo in the release script help string.
|
||||
@@ -0,0 +1 @@
|
||||
Update documentation to reflect that both the `run_background_tasks_on` option and the options for moving stream writers off of the main process are no longer experimental.
|
||||
@@ -0,0 +1 @@
|
||||
Limit length of device_id to less than 512 characters.
|
||||
@@ -0,0 +1 @@
|
||||
Reintroduce the list of targets to the linter script, to avoid linting unwanted local-only directories during development.
|
||||
@@ -0,0 +1 @@
|
||||
Update worker documentation and replace old `federation_reader` with `generic_worker`.
|
||||
@@ -0,0 +1 @@
|
||||
Dockerfile-workers: reduce the amount we install in the image.
|
||||
@@ -0,0 +1 @@
|
||||
Enable processing of device list updates asynchronously.
|
||||
@@ -0,0 +1 @@
|
||||
Dockerfile-workers: give the master its own log config.
|
||||
@@ -0,0 +1 @@
|
||||
complement-synapse-workers: factor out separate entry point script.
|
||||
@@ -0,0 +1 @@
|
||||
Update `delay_cancellation` to accept any awaitable, rather than just `Deferred`s.
|
||||
@@ -0,0 +1 @@
|
||||
Add a CI job which tests Synapse against the latest version of all dependencies.
|
||||
@@ -0,0 +1 @@
|
||||
Back out experimental implementation of [MSC2314](https://github.com/matrix-org/matrix-spec-proposals/pull/2314).
|
||||
@@ -0,0 +1 @@
|
||||
Strongly recommend `poetry` for development.
|
||||
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug which incorrectly caused `GET /_matrix/client/r3/rooms/{roomId}/event/{eventId}` to return edited events rather than the original.
|
||||
@@ -0,0 +1 @@
|
||||
Use poetry-core instead of setuptools to build wheels.
|
||||
@@ -0,0 +1 @@
|
||||
Fix grammatical error in federation error response when the room version of a room is unknown.
|
||||
@@ -0,0 +1 @@
|
||||
Fix a broken link in `README.rst`.
|
||||
@@ -0,0 +1 @@
|
||||
Fix bug where the admin API for [deleting forward extremities](https://github.com/matrix-org/synapse/blob/erikj/fix_delete_event_response_count/docs/admin_api/rooms.md#deleting-forward-extremities) would always return a count of 1 no matter how many extremities were deleted. Broke in v1.27.0.
|
||||
@@ -0,0 +1 @@
|
||||
Fix a minor typo in the Debian changelogs generated by the release script.
|
||||
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug where the image thumbanils embedded into email notifications were broken.
|
||||
@@ -0,0 +1 @@
|
||||
Remove unnecessary configuration overrides in tests.
|
||||
@@ -0,0 +1 @@
|
||||
Use poetry-core instead of setuptools to build wheels.
|
||||
@@ -0,0 +1 @@
|
||||
Fix a bug in the implementation of MSC3202 where Synapse would use the field name `device_unused_fallback_keys`, rather than `device_unused_fallback_key_types`.
|
||||
@@ -0,0 +1 @@
|
||||
Add default_power_level_content_override config option.
|
||||
@@ -193,12 +193,15 @@ class TrivialXmppClient:
|
||||
time.sleep(7)
|
||||
print("SSRC spammer started")
|
||||
while self.running:
|
||||
ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % {
|
||||
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||
"nick": self.userId,
|
||||
"assrc": self.ssrcs["audio"],
|
||||
"vssrc": self.ssrcs["video"],
|
||||
}
|
||||
ssrcMsg = (
|
||||
"<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>"
|
||||
% {
|
||||
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||
"nick": self.userId,
|
||||
"assrc": self.ssrcs["audio"],
|
||||
"vssrc": self.ssrcs["video"],
|
||||
}
|
||||
)
|
||||
res = self.sendIq(ssrcMsg)
|
||||
print("reply from ssrc announce: ", res)
|
||||
time.sleep(10)
|
||||
|
||||
Vendored
+15
-3
@@ -30,9 +30,19 @@ case $(dpkg-architecture -q DEB_HOST_ARCH) in
|
||||
;;
|
||||
esac
|
||||
|
||||
# Use --builtin-venv to use the better `venv` module from CPython 3.4+ rather
|
||||
# than the 2/3 compatible `virtualenv`.
|
||||
# Manually install Poetry and export a pip-compatible `requirements.txt`
|
||||
# We need a Poetry pre-release as the export command is buggy in < 1.2
|
||||
TEMP_VENV="$(mktemp -d)"
|
||||
python3 -m venv "$TEMP_VENV"
|
||||
source "$TEMP_VENV/bin/activate"
|
||||
pip install -U pip
|
||||
pip install poetry==1.2.0b1
|
||||
poetry export --extras all --extras test -o exported_requirements.txt
|
||||
deactivate
|
||||
rm -rf "$TEMP_VENV"
|
||||
|
||||
# Use --no-deps to only install pinned versions in exported_requirements.txt,
|
||||
# and to avoid https://github.com/pypa/pip/issues/9644
|
||||
dh_virtualenv \
|
||||
--install-suffix "matrix-synapse" \
|
||||
--builtin-venv \
|
||||
@@ -41,9 +51,11 @@ dh_virtualenv \
|
||||
--preinstall="lxml" \
|
||||
--preinstall="mock" \
|
||||
--preinstall="wheel" \
|
||||
--extra-pip-arg="--no-deps" \
|
||||
--extra-pip-arg="--no-cache-dir" \
|
||||
--extra-pip-arg="--compile" \
|
||||
--extras="all,systemd,test"
|
||||
--extras="all,systemd,test" \
|
||||
--requirements="exported_requirements.txt"
|
||||
|
||||
PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
|
||||
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
|
||||
|
||||
Vendored
+54
@@ -1,3 +1,57 @@
|
||||
matrix-synapse-py3 (1.58.0+nmu1) UNRELEASED; urgency=medium
|
||||
|
||||
* Use poetry to manage the bundled virtualenv included with this package.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Mar 2022 12:21:43 +0100
|
||||
|
||||
matrix-synapse-py3 (1.57.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.57.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 20 Apr 2022 15:27:21 +0100
|
||||
|
||||
matrix-synapse-py3 (1.57.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.57.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Apr 2022 10:58:42 +0100
|
||||
|
||||
matrix-synapse-py3 (1.57.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.57.0~rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Apr 2022 13:36:25 +0100
|
||||
|
||||
matrix-synapse-py3 (1.56.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.56.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Apr 2022 12:38:39 +0100
|
||||
|
||||
matrix-synapse-py3 (1.56.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.56.0~rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 29 Mar 2022 10:40:50 +0100
|
||||
|
||||
matrix-synapse-py3 (1.55.2) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.55.2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 24 Mar 2022 19:07:11 +0000
|
||||
|
||||
matrix-synapse-py3 (1.55.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.55.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 24 Mar 2022 17:44:23 +0000
|
||||
|
||||
matrix-synapse-py3 (1.55.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.55.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Mar 2022 13:59:26 +0000
|
||||
|
||||
matrix-synapse-py3 (1.55.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.55.0~rc1.
|
||||
|
||||
Vendored
+1
@@ -0,0 +1 @@
|
||||
exported_requirements.txt
|
||||
@@ -38,6 +38,7 @@ for port in 8080 8081 8082; do
|
||||
printf '\n\n# Customisation made by demo/start.sh\n\n'
|
||||
echo "public_baseurl: http://localhost:$port/"
|
||||
echo 'enable_registration: true'
|
||||
echo 'enable_registration_without_verification: true'
|
||||
echo ''
|
||||
|
||||
# Warning, this heredoc depends on the interaction of tabs and spaces.
|
||||
|
||||
+57
-21
@@ -14,20 +14,61 @@
|
||||
# DOCKER_BUILDKIT=1 docker build -f docker/Dockerfile --build-arg PYTHON_VERSION=3.10 .
|
||||
#
|
||||
|
||||
# Irritatingly, there is no blessed guide on how to distribute an application with its
|
||||
# poetry-managed environment in a docker image. We have opted for
|
||||
# `poetry export | pip install -r /dev/stdin`, but there are known bugs in
|
||||
# in `poetry export` whose fixes (scheduled for poetry 1.2) have yet to be released.
|
||||
# In case we get bitten by those bugs in the future, the recommendations here might
|
||||
# be useful:
|
||||
# https://github.com/python-poetry/poetry/discussions/1879#discussioncomment-216865
|
||||
# https://stackoverflow.com/questions/53835198/integrating-python-poetry-with-docker?answertab=scoredesc
|
||||
|
||||
|
||||
|
||||
ARG PYTHON_VERSION=3.9
|
||||
|
||||
###
|
||||
### Stage 0: builder
|
||||
### Stage 0: generate requirements.txt
|
||||
###
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim as requirements
|
||||
|
||||
# RUN --mount is specific to buildkit and is documented at
|
||||
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
|
||||
# Here we use it to set up a cache for apt (and below for pip), to improve
|
||||
# rebuild speeds on slow connections.
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && apt-get install -y git \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# We install poetry in its own build stage to avoid its dependencies conflicting with
|
||||
# synapse's dependencies.
|
||||
# We use a specific commit from poetry's master branch instead of our usual 1.1.12,
|
||||
# to incorporate fixes to some bugs in `poetry export`. This commit corresponds to
|
||||
# https://github.com/python-poetry/poetry/pull/5156 and
|
||||
# https://github.com/python-poetry/poetry/issues/5141 ;
|
||||
# without it, we generate a requirements.txt with incorrect environment markers,
|
||||
# which causes necessary packages to be omitted when we `pip install`.
|
||||
#
|
||||
# NB: In poetry 1.2 `poetry export` will be moved into a plugin; we'll need to also
|
||||
# pip install poetry-plugin-export (https://github.com/python-poetry/poetry-plugin-export).
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install --user git+https://github.com/python-poetry/poetry.git@fb13b3a676f476177f7937ffa480ee5cff9a90a5
|
||||
|
||||
WORKDIR /synapse
|
||||
|
||||
# Copy just what we need to run `poetry export`...
|
||||
COPY pyproject.toml poetry.lock /synapse/
|
||||
|
||||
RUN /root/.local/bin/poetry export --extras all -o /synapse/requirements.txt
|
||||
|
||||
###
|
||||
### Stage 1: builder
|
||||
###
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim as builder
|
||||
|
||||
# install the OS build deps
|
||||
#
|
||||
# RUN --mount is specific to buildkit and is documented at
|
||||
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
|
||||
# Here we use it to set up a cache for apt, to improve rebuild speeds on
|
||||
# slow connections.
|
||||
#
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
@@ -45,30 +86,25 @@ RUN \
|
||||
zlib1g-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy just what we need to pip install
|
||||
COPY MANIFEST.in README.rst setup.py /synapse/
|
||||
COPY synapse/__init__.py /synapse/synapse/__init__.py
|
||||
COPY synapse/python_dependencies.py /synapse/synapse/python_dependencies.py
|
||||
|
||||
# To speed up rebuilds, install all of the dependencies before we copy over
|
||||
# the whole synapse project so that we this layer in the Docker cache can be
|
||||
# the whole synapse project, so that this layer in the Docker cache can be
|
||||
# used while you develop on the source
|
||||
#
|
||||
# This is aiming at installing the `install_requires` and `extras_require` from `setup.py`
|
||||
# This is aiming at installing the `[tool.poetry.depdendencies]` from pyproject.toml.
|
||||
COPY --from=requirements /synapse/requirements.txt /synapse/
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install --prefix="/install" --no-warn-script-location \
|
||||
/synapse[all]
|
||||
pip install --prefix="/install" --no-deps --no-warn-script-location -r /synapse/requirements.txt
|
||||
|
||||
# Copy over the rest of the project
|
||||
# Copy over the rest of the synapse source code.
|
||||
COPY synapse /synapse/synapse/
|
||||
# ... and what we need to `pip install`.
|
||||
COPY pyproject.toml README.rst /synapse/
|
||||
|
||||
# Install the synapse package itself and all of its children packages.
|
||||
#
|
||||
# This is aiming at installing only the `packages=find_packages(...)` from `setup.py
|
||||
# Install the synapse package itself.
|
||||
RUN pip install --prefix="/install" --no-deps --no-warn-script-location /synapse
|
||||
|
||||
###
|
||||
### Stage 1: runtime
|
||||
### Stage 2: runtime
|
||||
###
|
||||
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
# Use the Sytest image that comes with a lot of the build dependencies
|
||||
# pre-installed
|
||||
FROM matrixdotorg/sytest:focal
|
||||
|
||||
# The Sytest image doesn't come with python, so install that
|
||||
RUN apt-get update && apt-get -qq install -y python3 python3-dev python3-pip
|
||||
|
||||
# We need tox to run the tests in run_pg_tests.sh
|
||||
RUN python3 -m pip install tox
|
||||
|
||||
# Initialise the db
|
||||
RUN su -c '/usr/lib/postgresql/10/bin/initdb -D /var/lib/postgresql/data -E "UTF-8" --lc-collate="C.UTF-8" --lc-ctype="C.UTF-8" --username=postgres' postgres
|
||||
|
||||
# Add a user with our UID and GID so that files get created on the host owned
|
||||
# by us, not root.
|
||||
ARG UID
|
||||
ARG GID
|
||||
RUN groupadd --gid $GID user
|
||||
RUN useradd --uid $UID --gid $GID --groups sudo --no-create-home user
|
||||
|
||||
# Ensure we can start postgres by sudo-ing as the postgres user.
|
||||
RUN apt-get update && apt-get -qq install -y sudo
|
||||
RUN echo "user ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
ADD run_pg_tests.sh /run_pg_tests.sh
|
||||
# Use the "exec form" of ENTRYPOINT (https://docs.docker.com/engine/reference/builder/#entrypoint)
|
||||
# so that we can `docker run` this container and pass arguments to pg_tests.sh
|
||||
ENTRYPOINT ["/run_pg_tests.sh"]
|
||||
|
||||
USER user
|
||||
@@ -2,10 +2,19 @@
|
||||
FROM matrixdotorg/synapse
|
||||
|
||||
# Install deps
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y supervisor redis nginx
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
|
||||
redis-server nginx-light
|
||||
|
||||
# Remove the default nginx sites
|
||||
# Install supervisord with pip instead of apt, to avoid installing a second
|
||||
# copy of python.
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install supervisor~=4.2
|
||||
|
||||
# Disable the default nginx sites
|
||||
RUN rm /etc/nginx/sites-enabled/default
|
||||
|
||||
# Copy Synapse worker, nginx and supervisord configuration template files
|
||||
@@ -14,13 +23,12 @@ COPY ./docker/conf-workers/* /conf/
|
||||
# Expose nginx listener port
|
||||
EXPOSE 8080/tcp
|
||||
|
||||
# Volume for user-editable config files, logs etc.
|
||||
VOLUME ["/data"]
|
||||
|
||||
# A script to read environment variables and create the necessary
|
||||
# files to run the desired worker configuration. Will start supervisord.
|
||||
COPY ./docker/configure_workers_and_start.py /configure_workers_and_start.py
|
||||
ENTRYPOINT ["/configure_workers_and_start.py"]
|
||||
|
||||
# Replace the healthcheck with one which checks *all* the workers. The script
|
||||
# is generated by configure_workers_and_start.py.
|
||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||
CMD /bin/sh /healthcheck.sh
|
||||
|
||||
+22
-24
@@ -10,10 +10,10 @@ Note that running Synapse's unit tests from within the docker image is not suppo
|
||||
|
||||
## Testing with SQLite and single-process Synapse
|
||||
|
||||
> Note that `scripts-dev/complement.sh` is a script that will automatically build
|
||||
> Note that `scripts-dev/complement.sh` is a script that will automatically build
|
||||
> and run an SQLite-based, single-process of Synapse against Complement.
|
||||
|
||||
The instructions below will set up Complement testing for a single-process,
|
||||
The instructions below will set up Complement testing for a single-process,
|
||||
SQLite-based Synapse deployment.
|
||||
|
||||
Start by building the base Synapse docker image. If you wish to run tests with the latest
|
||||
@@ -26,23 +26,22 @@ docker build -t matrixdotorg/synapse -f docker/Dockerfile .
|
||||
|
||||
This will build an image with the tag `matrixdotorg/synapse`.
|
||||
|
||||
Next, build the Synapse image for Complement. You will need a local checkout
|
||||
of Complement. Change to the root of your Complement checkout and run:
|
||||
Next, build the Synapse image for Complement.
|
||||
|
||||
```sh
|
||||
docker build -t complement-synapse -f "dockerfiles/Synapse.Dockerfile" dockerfiles
|
||||
docker build -t complement-synapse -f "docker/complement/Dockerfile" docker/complement
|
||||
```
|
||||
|
||||
This will build an image with the tag `complement-synapse`, which can be handed to
|
||||
Complement for testing via the `COMPLEMENT_BASE_IMAGE` environment variable. Refer to
|
||||
[Complement's documentation](https://github.com/matrix-org/complement/#running) for
|
||||
This will build an image with the tag `complement-synapse`, which can be handed to
|
||||
Complement for testing via the `COMPLEMENT_BASE_IMAGE` environment variable. Refer to
|
||||
[Complement's documentation](https://github.com/matrix-org/complement/#running) for
|
||||
how to run the tests, as well as the various available command line flags.
|
||||
|
||||
## Testing with PostgreSQL and single or multi-process Synapse
|
||||
|
||||
The above docker image only supports running Synapse with SQLite and in a
|
||||
single-process topology. The following instructions are used to build a Synapse image for
|
||||
Complement that supports either single or multi-process topology with a PostgreSQL
|
||||
The above docker image only supports running Synapse with SQLite and in a
|
||||
single-process topology. The following instructions are used to build a Synapse image for
|
||||
Complement that supports either single or multi-process topology with a PostgreSQL
|
||||
database backend.
|
||||
|
||||
As with the single-process image, build the base Synapse docker image. If you wish to run
|
||||
@@ -55,7 +54,7 @@ docker build -t matrixdotorg/synapse -f docker/Dockerfile .
|
||||
|
||||
This will build an image with the tag `matrixdotorg/synapse`.
|
||||
|
||||
Next, we build a new image with worker support based on `matrixdotorg/synapse:latest`.
|
||||
Next, we build a new image with worker support based on `matrixdotorg/synapse:latest`.
|
||||
Again, from the root of the repository:
|
||||
|
||||
```sh
|
||||
@@ -64,21 +63,20 @@ docker build -t matrixdotorg/synapse-workers -f docker/Dockerfile-workers .
|
||||
|
||||
This will build an image with the tag` matrixdotorg/synapse-workers`.
|
||||
|
||||
It's worth noting at this point that this image is fully functional, and
|
||||
can be used for testing against locally. See instructions for using the container
|
||||
It's worth noting at this point that this image is fully functional, and
|
||||
can be used for testing against locally. See instructions for using the container
|
||||
under
|
||||
[Running the Dockerfile-worker image standalone](#running-the-dockerfile-worker-image-standalone)
|
||||
below.
|
||||
|
||||
Finally, build the Synapse image for Complement, which is based on
|
||||
`matrixdotorg/synapse-workers`. You will need a local checkout of Complement. Change to
|
||||
the root of your Complement checkout and run:
|
||||
`matrixdotorg/synapse-workers`.
|
||||
|
||||
```sh
|
||||
docker build -t matrixdotorg/complement-synapse-workers -f dockerfiles/SynapseWorkers.Dockerfile dockerfiles
|
||||
docker build -t matrixdotorg/complement-synapse-workers -f docker/complement/SynapseWorkers.Dockerfile docker/complement
|
||||
```
|
||||
|
||||
This will build an image with the tag `complement-synapse`, which can be handed to
|
||||
This will build an image with the tag `complement-synapse-workers`, which can be handed to
|
||||
Complement for testing via the `COMPLEMENT_BASE_IMAGE` environment variable. Refer to
|
||||
[Complement's documentation](https://github.com/matrix-org/complement/#running) for
|
||||
how to run the tests, as well as the various available command line flags.
|
||||
@@ -91,10 +89,10 @@ bundling all necessary components together for a workerised homeserver instance.
|
||||
|
||||
This includes any desired Synapse worker processes, a nginx to route traffic accordingly,
|
||||
a redis for worker communication and a supervisord instance to start up and monitor all
|
||||
processes. You will need to provide your own postgres container to connect to, and TLS
|
||||
processes. You will need to provide your own postgres container to connect to, and TLS
|
||||
is not handled by the container.
|
||||
|
||||
Once you've built the image using the above instructions, you can run it. Be sure
|
||||
Once you've built the image using the above instructions, you can run it. Be sure
|
||||
you've set up a volume according to the [usual Synapse docker instructions](README.md).
|
||||
Then run something along the lines of:
|
||||
|
||||
@@ -112,7 +110,7 @@ docker run -d --name synapse \
|
||||
matrixdotorg/synapse-workers
|
||||
```
|
||||
|
||||
...substituting `POSTGRES*` variables for those that match a postgres host you have
|
||||
...substituting `POSTGRES*` variables for those that match a postgres host you have
|
||||
available (usually a running postgres docker container).
|
||||
|
||||
The `SYNAPSE_WORKER_TYPES` environment variable is a comma-separated list of workers to
|
||||
@@ -130,11 +128,11 @@ Otherwise, `SYNAPSE_WORKER_TYPES` can either be left empty or unset to spawn no
|
||||
(leaving only the main process). The container is configured to use redis-based worker
|
||||
mode.
|
||||
|
||||
Logs for workers and the main process are logged to stdout and can be viewed with
|
||||
standard `docker logs` tooling. Worker logs contain their worker name
|
||||
Logs for workers and the main process are logged to stdout and can be viewed with
|
||||
standard `docker logs` tooling. Worker logs contain their worker name
|
||||
after the timestamp.
|
||||
|
||||
Setting `SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK=1` will cause worker logs to be written to
|
||||
`<data_dir>/logs/<worker_name>.log`. Logs are kept for 1 week and rotate every day at 00:
|
||||
00, according to the container's clock. Logging for the main process must still be
|
||||
00, according to the container's clock. Logging for the main process must still be
|
||||
configured by modifying the homeserver's log config in your Synapse data volume.
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
# A dockerfile which builds an image suitable for testing Synapse under
|
||||
# complement.
|
||||
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
|
||||
FROM matrixdotorg/synapse:${SYNAPSE_VERSION}
|
||||
|
||||
ENV SERVER_NAME=localhost
|
||||
|
||||
COPY conf/* /conf/
|
||||
|
||||
# generate a signing key
|
||||
RUN generate_signing_key -o /conf/server.signing.key
|
||||
|
||||
WORKDIR /data
|
||||
|
||||
EXPOSE 8008 8448
|
||||
|
||||
ENTRYPOINT ["/conf/start.sh"]
|
||||
|
||||
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
||||
CMD curl -fSs http://localhost:8008/health || exit 1
|
||||
@@ -0,0 +1 @@
|
||||
Stuff for building the docker image used for testing under complement.
|
||||
@@ -0,0 +1,47 @@
|
||||
# This dockerfile builds on top of 'docker/Dockerfile-worker' in matrix-org/synapse
|
||||
# by including a built-in postgres instance, as well as setting up the homeserver so
|
||||
# that it is ready for testing via Complement.
|
||||
#
|
||||
# Instructions for building this image from those it depends on is detailed in this guide:
|
||||
# https://github.com/matrix-org/synapse/blob/develop/docker/README-testing.md#testing-with-postgresql-and-single-or-multi-process-synapse
|
||||
FROM matrixdotorg/synapse-workers
|
||||
|
||||
# Download a caddy server to stand in front of nginx and terminate TLS using Complement's
|
||||
# custom CA.
|
||||
# We include this near the top of the file in order to cache the result.
|
||||
RUN curl -OL "https://github.com/caddyserver/caddy/releases/download/v2.3.0/caddy_2.3.0_linux_amd64.tar.gz" && \
|
||||
tar xzf caddy_2.3.0_linux_amd64.tar.gz && rm caddy_2.3.0_linux_amd64.tar.gz && mv caddy /root
|
||||
|
||||
# Install postgresql
|
||||
RUN apt-get update && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y postgresql-13
|
||||
|
||||
# Configure a user and create a database for Synapse
|
||||
RUN pg_ctlcluster 13 main start && su postgres -c "echo \
|
||||
\"ALTER USER postgres PASSWORD 'somesecret'; \
|
||||
CREATE DATABASE synapse \
|
||||
ENCODING 'UTF8' \
|
||||
LC_COLLATE='C' \
|
||||
LC_CTYPE='C' \
|
||||
template=template0;\" | psql" && pg_ctlcluster 13 main stop
|
||||
|
||||
# Modify the shared homeserver config with postgres support, certificate setup
|
||||
# and the disabling of rate-limiting
|
||||
COPY conf-workers/workers-shared.yaml /conf/workers/shared.yaml
|
||||
|
||||
WORKDIR /data
|
||||
|
||||
# Copy the caddy config
|
||||
COPY conf-workers/caddy.complement.json /root/caddy.json
|
||||
|
||||
# Copy the entrypoint
|
||||
COPY conf-workers/start-complement-synapse-workers.sh /
|
||||
|
||||
# Expose caddy's listener ports
|
||||
EXPOSE 8008 8448
|
||||
|
||||
ENTRYPOINT /start-complement-synapse-workers.sh
|
||||
|
||||
# Update the healthcheck to have a shorter check interval
|
||||
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
||||
CMD /bin/sh /healthcheck.sh
|
||||
@@ -0,0 +1,72 @@
|
||||
{
|
||||
"apps": {
|
||||
"http": {
|
||||
"servers": {
|
||||
"srv0": {
|
||||
"listen": [
|
||||
":8448"
|
||||
],
|
||||
"routes": [
|
||||
{
|
||||
"match": [
|
||||
{
|
||||
"host": [
|
||||
"{{ server_name }}"
|
||||
]
|
||||
}
|
||||
],
|
||||
"handle": [
|
||||
{
|
||||
"handler": "subroute",
|
||||
"routes": [
|
||||
{
|
||||
"handle": [
|
||||
{
|
||||
"handler": "reverse_proxy",
|
||||
"upstreams": [
|
||||
{
|
||||
"dial": "localhost:8008"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"terminal": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"tls": {
|
||||
"automation": {
|
||||
"policies": [
|
||||
{
|
||||
"subjects": [
|
||||
"{{ server_name }}"
|
||||
],
|
||||
"issuers": [
|
||||
{
|
||||
"module": "internal"
|
||||
}
|
||||
],
|
||||
"on_demand": true
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"pki": {
|
||||
"certificate_authorities": {
|
||||
"local": {
|
||||
"name": "Complement CA",
|
||||
"root": {
|
||||
"certificate": "/complement/ca/ca.crt",
|
||||
"private_key": "/complement/ca/ca.key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,50 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Default ENTRYPOINT for the docker image used for testing synapse with workers under complement
|
||||
|
||||
set -e
|
||||
|
||||
function log {
|
||||
d=$(date +"%Y-%m-%d %H:%M:%S,%3N")
|
||||
echo "$d $@"
|
||||
}
|
||||
|
||||
# Replace the server name in the caddy config
|
||||
sed -i "s/{{ server_name }}/${SERVER_NAME}/g" /root/caddy.json
|
||||
|
||||
log "starting postgres"
|
||||
pg_ctlcluster 13 main start
|
||||
|
||||
log "starting caddy"
|
||||
/root/caddy start --config /root/caddy.json
|
||||
|
||||
# Set the server name of the homeserver
|
||||
export SYNAPSE_SERVER_NAME=${SERVER_NAME}
|
||||
|
||||
# No need to report stats here
|
||||
export SYNAPSE_REPORT_STATS=no
|
||||
|
||||
# Set postgres authentication details which will be placed in the homeserver config file
|
||||
export POSTGRES_PASSWORD=somesecret
|
||||
export POSTGRES_USER=postgres
|
||||
export POSTGRES_HOST=localhost
|
||||
|
||||
# Specify the workers to test with
|
||||
export SYNAPSE_WORKER_TYPES="\
|
||||
event_persister, \
|
||||
event_persister, \
|
||||
background_worker, \
|
||||
frontend_proxy, \
|
||||
event_creator, \
|
||||
user_dir, \
|
||||
media_repository, \
|
||||
federation_inbound, \
|
||||
federation_reader, \
|
||||
federation_sender, \
|
||||
synchrotron, \
|
||||
appservice, \
|
||||
pusher"
|
||||
|
||||
# Run the script that writes the necessary config files and starts supervisord, which in turn
|
||||
# starts everything else
|
||||
exec /configure_workers_and_start.py
|
||||
@@ -0,0 +1,72 @@
|
||||
## Server ##
|
||||
report_stats: False
|
||||
trusted_key_servers: []
|
||||
enable_registration: true
|
||||
enable_registration_without_verification: true
|
||||
bcrypt_rounds: 4
|
||||
|
||||
## Federation ##
|
||||
|
||||
# trust certs signed by Complement's CA
|
||||
federation_custom_ca_list:
|
||||
- /complement/ca/ca.crt
|
||||
|
||||
# unblacklist RFC1918 addresses
|
||||
federation_ip_range_blacklist: []
|
||||
|
||||
# Disable server rate-limiting
|
||||
rc_federation:
|
||||
window_size: 1000
|
||||
sleep_limit: 10
|
||||
sleep_delay: 500
|
||||
reject_limit: 99999
|
||||
concurrent: 3
|
||||
|
||||
rc_message:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_registration:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_login:
|
||||
address:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
account:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
failed_attempts:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_admin_redaction:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_joins:
|
||||
local:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
remote:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
federation_rr_transactions_per_room_per_second: 9999
|
||||
|
||||
## Experimental Features ##
|
||||
|
||||
experimental_features:
|
||||
# Enable history backfilling support
|
||||
msc2716_enabled: true
|
||||
# Enable spaces support
|
||||
spaces_enabled: true
|
||||
# Enable jump to date endpoint
|
||||
msc3030_enabled: true
|
||||
|
||||
server_notices:
|
||||
system_mxid_localpart: _server
|
||||
system_mxid_display_name: "Server Alert"
|
||||
system_mxid_avatar_url: ""
|
||||
room_name: "Server Alert"
|
||||
@@ -0,0 +1,117 @@
|
||||
## Server ##
|
||||
|
||||
server_name: SERVER_NAME
|
||||
log_config: /conf/log_config.yaml
|
||||
report_stats: False
|
||||
signing_key_path: /conf/server.signing.key
|
||||
trusted_key_servers: []
|
||||
enable_registration: true
|
||||
enable_registration_without_verification: true
|
||||
|
||||
## Listeners ##
|
||||
|
||||
tls_certificate_path: /conf/server.tls.crt
|
||||
tls_private_key_path: /conf/server.tls.key
|
||||
bcrypt_rounds: 4
|
||||
registration_shared_secret: complement
|
||||
|
||||
listeners:
|
||||
- port: 8448
|
||||
bind_addresses: ['::']
|
||||
type: http
|
||||
tls: true
|
||||
resources:
|
||||
- names: [federation]
|
||||
|
||||
- port: 8008
|
||||
bind_addresses: ['::']
|
||||
type: http
|
||||
|
||||
resources:
|
||||
- names: [client]
|
||||
|
||||
## Database ##
|
||||
|
||||
database:
|
||||
name: "sqlite3"
|
||||
args:
|
||||
# We avoid /data, as it is a volume and is not transferred when the container is committed,
|
||||
# which is a fundamental necessity in complement.
|
||||
database: "/conf/homeserver.db"
|
||||
|
||||
## Federation ##
|
||||
|
||||
# trust certs signed by the complement CA
|
||||
federation_custom_ca_list:
|
||||
- /complement/ca/ca.crt
|
||||
|
||||
# unblacklist RFC1918 addresses
|
||||
ip_range_blacklist: []
|
||||
|
||||
# Disable server rate-limiting
|
||||
rc_federation:
|
||||
window_size: 1000
|
||||
sleep_limit: 10
|
||||
sleep_delay: 500
|
||||
reject_limit: 99999
|
||||
concurrent: 3
|
||||
|
||||
rc_message:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_registration:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_login:
|
||||
address:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
account:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
failed_attempts:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_admin_redaction:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_joins:
|
||||
local:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
remote:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
federation_rr_transactions_per_room_per_second: 9999
|
||||
|
||||
## API Configuration ##
|
||||
|
||||
# A list of application service config files to use
|
||||
#
|
||||
app_service_config_files:
|
||||
AS_REGISTRATION_FILES
|
||||
|
||||
## Experimental Features ##
|
||||
|
||||
experimental_features:
|
||||
# Enable spaces support
|
||||
spaces_enabled: true
|
||||
# Enable history backfilling support
|
||||
msc2716_enabled: true
|
||||
# server-side support for partial state in /send_join responses
|
||||
msc3706_enabled: true
|
||||
# client-side support for partial state in /send_join responses
|
||||
faster_joins: true
|
||||
# Enable jump to date endpoint
|
||||
msc3030_enabled: true
|
||||
|
||||
server_notices:
|
||||
system_mxid_localpart: _server
|
||||
system_mxid_display_name: "Server Alert"
|
||||
system_mxid_avatar_url: ""
|
||||
room_name: "Server Alert"
|
||||
@@ -0,0 +1,24 @@
|
||||
version: 1
|
||||
|
||||
formatters:
|
||||
precise:
|
||||
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
||||
|
||||
filters:
|
||||
context:
|
||||
(): synapse.logging.context.LoggingContextFilter
|
||||
request: ""
|
||||
|
||||
handlers:
|
||||
console:
|
||||
class: logging.StreamHandler
|
||||
formatter: precise
|
||||
filters: [context]
|
||||
# log to stdout, for easier use with 'docker logs'
|
||||
stream: 'ext://sys.stdout'
|
||||
|
||||
root:
|
||||
level: INFO
|
||||
handlers: [console]
|
||||
|
||||
disable_existing_loggers: false
|
||||
Executable
+30
@@ -0,0 +1,30 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
sed -i "s/SERVER_NAME/${SERVER_NAME}/g" /conf/homeserver.yaml
|
||||
|
||||
# Add the application service registration files to the homeserver.yaml config
|
||||
for filename in /complement/appservice/*.yaml; do
|
||||
[ -f "$filename" ] || break
|
||||
|
||||
as_id=$(basename "$filename" .yaml)
|
||||
|
||||
# Insert the path to the registration file and the AS_REGISTRATION_FILES marker after
|
||||
# so we can add the next application service in the next iteration of this for loop
|
||||
sed -i "s/AS_REGISTRATION_FILES/ - \/complement\/appservice\/${as_id}.yaml\nAS_REGISTRATION_FILES/g" /conf/homeserver.yaml
|
||||
done
|
||||
# Remove the AS_REGISTRATION_FILES entry
|
||||
sed -i "s/AS_REGISTRATION_FILES//g" /conf/homeserver.yaml
|
||||
|
||||
# generate an ssl key and cert for the server, signed by the complement CA
|
||||
openssl genrsa -out /conf/server.tls.key 2048
|
||||
|
||||
openssl req -new -key /conf/server.tls.key -out /conf/server.tls.csr \
|
||||
-subj "/CN=${SERVER_NAME}"
|
||||
openssl x509 -req -in /conf/server.tls.csr \
|
||||
-CA /complement/ca/ca.crt -CAkey /complement/ca/ca.key -set_serial 1 \
|
||||
-out /conf/server.tls.crt
|
||||
|
||||
exec python -m synapse.app.homeserver -c /conf/homeserver.yaml "$@"
|
||||
|
||||
@@ -5,6 +5,9 @@
|
||||
nodaemon=true
|
||||
user=root
|
||||
|
||||
[include]
|
||||
files = /etc/supervisor/conf.d/*.conf
|
||||
|
||||
[program:nginx]
|
||||
command=/usr/sbin/nginx -g "daemon off;"
|
||||
priority=500
|
||||
|
||||
@@ -29,6 +29,7 @@
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Any, Dict, Mapping, Set
|
||||
|
||||
import jinja2
|
||||
import yaml
|
||||
@@ -36,7 +37,7 @@ import yaml
|
||||
MAIN_PROCESS_HTTP_LISTENER_PORT = 8080
|
||||
|
||||
|
||||
WORKERS_CONFIG = {
|
||||
WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"pusher": {
|
||||
"app": "synapse.app.pusher",
|
||||
"listener_resources": [],
|
||||
@@ -307,7 +308,7 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
|
||||
Args:
|
||||
environ: _Environ[str]
|
||||
config_path: Where to output the generated Synapse main worker config file.
|
||||
config_path: The location of the generated Synapse main worker config file.
|
||||
data_dir: The location of the synapse data directory. Where log and
|
||||
user-facing config files live.
|
||||
"""
|
||||
@@ -320,7 +321,8 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
# and adding a replication listener.
|
||||
|
||||
# First read the original config file and extract the listeners block. Then we'll add
|
||||
# another listener for replication. Later we'll write out the result.
|
||||
# another listener for replication. Later we'll write out the result to the shared
|
||||
# config file.
|
||||
listeners = [
|
||||
{
|
||||
"port": 9093,
|
||||
@@ -339,7 +341,7 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
# base shared worker jinja2 template.
|
||||
#
|
||||
# This config file will be passed to all workers, included Synapse's main process.
|
||||
shared_config = {"listeners": listeners}
|
||||
shared_config: Dict[str, Any] = {"listeners": listeners}
|
||||
|
||||
# The supervisord config. The contents of which will be inserted into the
|
||||
# base supervisord jinja2 template.
|
||||
@@ -355,7 +357,7 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
# worker_type: {1234, 1235, ...}}
|
||||
# }
|
||||
# and will be used to construct 'upstream' nginx directives.
|
||||
nginx_upstreams = {}
|
||||
nginx_upstreams: Dict[str, Set[int]] = {}
|
||||
|
||||
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what will be
|
||||
# placed after the proxy_pass directive. The main benefit to representing this data as a
|
||||
@@ -384,7 +386,11 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
# A counter of worker_type -> int. Used for determining the name for a given
|
||||
# worker type when generating its config file, as each worker's name is just
|
||||
# worker_type + instance #
|
||||
worker_type_counter = {}
|
||||
worker_type_counter: Dict[str, int] = {}
|
||||
|
||||
# A list of internal endpoints to healthcheck, starting with the main process
|
||||
# which exists even if no workers do.
|
||||
healthcheck_urls = ["http://localhost:8080/health"]
|
||||
|
||||
# For each worker type specified by the user, create config values
|
||||
for worker_type in worker_types:
|
||||
@@ -404,12 +410,14 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
# e.g. federation_reader1
|
||||
worker_name = worker_type + str(new_worker_count)
|
||||
worker_config.update(
|
||||
{"name": worker_name, "port": worker_port, "config_path": config_path}
|
||||
{"name": worker_name, "port": str(worker_port), "config_path": config_path}
|
||||
)
|
||||
|
||||
# Update the shared config with any worker-type specific options
|
||||
shared_config.update(worker_config["shared_extra_conf"])
|
||||
|
||||
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
||||
|
||||
# Check if more than one instance of this worker type has been specified
|
||||
worker_type_total_count = worker_types.count(worker_type)
|
||||
if worker_type_total_count > 1:
|
||||
@@ -438,21 +446,7 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
|
||||
# Write out the worker's logging config file
|
||||
|
||||
# Check whether we should write worker logs to disk, in addition to the console
|
||||
extra_log_template_args = {}
|
||||
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
||||
extra_log_template_args["LOG_FILE_PATH"] = "{dir}/logs/{name}.log".format(
|
||||
dir=data_dir, name=worker_name
|
||||
)
|
||||
|
||||
# Render and write the file
|
||||
log_config_filepath = "/conf/workers/{name}.log.config".format(name=worker_name)
|
||||
convert(
|
||||
"/conf/log.config",
|
||||
log_config_filepath,
|
||||
worker_name=worker_name,
|
||||
**extra_log_template_args,
|
||||
)
|
||||
log_config_filepath = generate_worker_log_config(environ, worker_name, data_dir)
|
||||
|
||||
# Then a worker config file
|
||||
convert(
|
||||
@@ -475,15 +469,10 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
# Determine the load-balancing upstreams to configure
|
||||
nginx_upstream_config = ""
|
||||
|
||||
# At the same time, prepare a list of internal endpoints to healthcheck
|
||||
# starting with the main process which exists even if no workers do.
|
||||
healthcheck_urls = ["http://localhost:8080/health"]
|
||||
|
||||
for upstream_worker_type, upstream_worker_ports in nginx_upstreams.items():
|
||||
body = ""
|
||||
for port in upstream_worker_ports:
|
||||
body += " server localhost:%d;\n" % (port,)
|
||||
healthcheck_urls.append("http://localhost:%d/health" % (port,))
|
||||
|
||||
# Add to the list of configured upstreams
|
||||
nginx_upstream_config += NGINX_UPSTREAM_CONFIG_BLOCK.format(
|
||||
@@ -493,6 +482,10 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
|
||||
# Finally, we'll write out the config files.
|
||||
|
||||
# log config for the master process
|
||||
master_log_config = generate_worker_log_config(environ, "master", data_dir)
|
||||
shared_config["log_config"] = master_log_config
|
||||
|
||||
# Shared homeserver config
|
||||
convert(
|
||||
"/conf/shared.yaml.j2",
|
||||
@@ -509,9 +502,10 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
)
|
||||
|
||||
# Supervisord config
|
||||
os.makedirs("/etc/supervisor", exist_ok=True)
|
||||
convert(
|
||||
"/conf/supervisord.conf.j2",
|
||||
"/etc/supervisor/conf.d/supervisord.conf",
|
||||
"/etc/supervisor/supervisord.conf",
|
||||
main_config_path=config_path,
|
||||
worker_config=supervisord_config,
|
||||
)
|
||||
@@ -529,12 +523,28 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
os.mkdir(log_dir)
|
||||
|
||||
|
||||
def start_supervisord():
|
||||
"""Starts up supervisord which then starts and monitors all other necessary processes
|
||||
def generate_worker_log_config(
|
||||
environ: Mapping[str, str], worker_name: str, data_dir: str
|
||||
) -> str:
|
||||
"""Generate a log.config file for the given worker.
|
||||
|
||||
Raises: CalledProcessError if calling start.py return a non-zero exit code.
|
||||
Returns: the path to the generated file
|
||||
"""
|
||||
subprocess.run(["/usr/bin/supervisord"], stdin=subprocess.PIPE)
|
||||
# Check whether we should write worker logs to disk, in addition to the console
|
||||
extra_log_template_args = {}
|
||||
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
||||
extra_log_template_args["LOG_FILE_PATH"] = "{dir}/logs/{name}.log".format(
|
||||
dir=data_dir, name=worker_name
|
||||
)
|
||||
# Render and write the file
|
||||
log_config_filepath = "/conf/workers/{name}.log.config".format(name=worker_name)
|
||||
convert(
|
||||
"/conf/log.config",
|
||||
log_config_filepath,
|
||||
worker_name=worker_name,
|
||||
**extra_log_template_args,
|
||||
)
|
||||
return log_config_filepath
|
||||
|
||||
|
||||
def main(args, environ):
|
||||
@@ -564,7 +574,13 @@ def main(args, environ):
|
||||
|
||||
# Start supervisord, which will start Synapse, all of the configured worker
|
||||
# processes, redis, nginx etc. according to the config we created above.
|
||||
start_supervisord()
|
||||
log("Starting supervisord")
|
||||
os.execl(
|
||||
"/usr/local/bin/supervisord",
|
||||
"supervisord",
|
||||
"-c",
|
||||
"/etc/supervisor/supervisord.conf",
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# This script runs the PostgreSQL tests inside a Docker container. It expects
|
||||
# the relevant source files to be mounted into /src (done automatically by the
|
||||
# caller script). It will set up the database, run it, and then use the tox
|
||||
# configuration to run the tests.
|
||||
|
||||
set -e
|
||||
|
||||
# Set PGUSER so Synapse's tests know what user to connect to the database with
|
||||
export PGUSER=postgres
|
||||
|
||||
# Start the database
|
||||
sudo -u postgres /usr/lib/postgresql/10/bin/pg_ctl -w -D /var/lib/postgresql/data start
|
||||
|
||||
# Run the tests
|
||||
cd /src
|
||||
export TRIAL_FLAGS="-j 4"
|
||||
tox --workdir=./.tox-pg-container -e py37-postgres "$@"
|
||||
+5
-5
@@ -108,7 +108,7 @@ def generate_config_from_template(config_dir, config_path, environ, ownership):
|
||||
|
||||
# Hopefully we already have a signing key, but generate one if not.
|
||||
args = [
|
||||
"python",
|
||||
sys.executable,
|
||||
"-m",
|
||||
"synapse.app.homeserver",
|
||||
"--config-path",
|
||||
@@ -158,7 +158,7 @@ def run_generate_config(environ, ownership):
|
||||
|
||||
# generate the main config file, and a signing key.
|
||||
args = [
|
||||
"python",
|
||||
sys.executable,
|
||||
"-m",
|
||||
"synapse.app.homeserver",
|
||||
"--server-name",
|
||||
@@ -175,7 +175,7 @@ def run_generate_config(environ, ownership):
|
||||
"--open-private-ports",
|
||||
]
|
||||
# log("running %s" % (args, ))
|
||||
os.execv("/usr/local/bin/python", args)
|
||||
os.execv(sys.executable, args)
|
||||
|
||||
|
||||
def main(args, environ):
|
||||
@@ -254,12 +254,12 @@ running with 'migrate_config'. See the README for more details.
|
||||
|
||||
log("Starting synapse with args " + " ".join(args))
|
||||
|
||||
args = ["python"] + args
|
||||
args = [sys.executable] + args
|
||||
if ownership is not None:
|
||||
args = ["gosu", ownership] + args
|
||||
os.execve("/usr/sbin/gosu", args, environ)
|
||||
else:
|
||||
os.execve("/usr/local/bin/python", args, environ)
|
||||
os.execve(sys.executable, args, environ)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
# Usage
|
||||
- [Federation](federate.md)
|
||||
- [Configuration](usage/configuration/README.md)
|
||||
- [Configuration Manual](usage/configuration/config_documentation.md)
|
||||
- [Homeserver Sample Config File](usage/configuration/homeserver_sample_config.md)
|
||||
- [Logging Sample Config File](usage/configuration/logging_sample_config.md)
|
||||
- [Structured Logging](structured_logging.md)
|
||||
@@ -45,6 +46,7 @@
|
||||
- [Account validity callbacks](modules/account_validity_callbacks.md)
|
||||
- [Password auth provider callbacks](modules/password_auth_provider_callbacks.md)
|
||||
- [Background update controller callbacks](modules/background_update_controller_callbacks.md)
|
||||
- [Account data callbacks](modules/account_data_callbacks.md)
|
||||
- [Porting a legacy module to the new interface](modules/porting_legacy_module.md)
|
||||
- [Workers](workers.md)
|
||||
- [Using `synctl` with Workers](synctl_workers.md)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1 @@
|
||||
This directory contains changelogs for previous years.
|
||||
+15
-41
@@ -6,62 +6,36 @@ The Synapse codebase uses a number of code formatting tools in order to
|
||||
quickly and automatically check for formatting (and sometimes logical)
|
||||
errors in code.
|
||||
|
||||
The necessary tools are detailed below.
|
||||
The necessary tools are:
|
||||
|
||||
First install them with:
|
||||
- [black](https://black.readthedocs.io/en/stable/), a source code formatter;
|
||||
- [isort](https://pycqa.github.io/isort/), which organises each file's imports;
|
||||
- [flake8](https://flake8.pycqa.org/en/latest/), which can spot common errors; and
|
||||
- [mypy](https://mypy.readthedocs.io/en/stable/), a type checker.
|
||||
|
||||
Install them with:
|
||||
|
||||
```sh
|
||||
pip install -e ".[lint,mypy]"
|
||||
```
|
||||
|
||||
- **black**
|
||||
The easiest way to run the lints is to invoke the linter script as follows.
|
||||
|
||||
The Synapse codebase uses [black](https://pypi.org/project/black/)
|
||||
as an opinionated code formatter, ensuring all comitted code is
|
||||
properly formatted.
|
||||
|
||||
Have `black` auto-format your code (it shouldn't change any
|
||||
functionality) with:
|
||||
|
||||
```sh
|
||||
black . --exclude="\.tox|build|env"
|
||||
```
|
||||
|
||||
- **flake8**
|
||||
|
||||
`flake8` is a code checking tool. We require code to pass `flake8`
|
||||
before being merged into the codebase.
|
||||
|
||||
Check all application and test code with:
|
||||
|
||||
```sh
|
||||
flake8 synapse tests
|
||||
```
|
||||
|
||||
- **isort**
|
||||
|
||||
`isort` ensures imports are nicely formatted, and can suggest and
|
||||
auto-fix issues such as double-importing.
|
||||
|
||||
Auto-fix imports with:
|
||||
|
||||
```sh
|
||||
isort -rc synapse tests
|
||||
```
|
||||
|
||||
`-rc` means to recursively search the given directories.
|
||||
```sh
|
||||
scripts-dev/lint.sh
|
||||
```
|
||||
|
||||
It's worth noting that modern IDEs and text editors can run these tools
|
||||
automatically on save. It may be worth looking into whether this
|
||||
functionality is supported in your editor for a more convenient
|
||||
development workflow. It is not, however, recommended to run `flake8` on
|
||||
save as it takes a while and is very resource intensive.
|
||||
development workflow. It is not, however, recommended to run `flake8` or `mypy`
|
||||
on save as they take a while and can be very resource intensive.
|
||||
|
||||
## General rules
|
||||
|
||||
- **Naming**:
|
||||
- Use camel case for class and type names
|
||||
- Use underscores for functions and variables.
|
||||
- Use `CamelCase` for class and type names
|
||||
- Use underscores for `function_names` and `variable_names`.
|
||||
- **Docstrings**: should follow the [google code
|
||||
style](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings).
|
||||
See the
|
||||
|
||||
@@ -48,19 +48,28 @@ can find many good git tutorials on the web.
|
||||
|
||||
# 4. Install the dependencies
|
||||
|
||||
Once you have installed Python 3 and added the source, please open a terminal and
|
||||
setup a *virtualenv*, as follows:
|
||||
Synapse uses the [poetry](https://python-poetry.org/) project to manage its dependencies
|
||||
and development environment. Once you have installed Python 3 and added the
|
||||
source, you should install `poetry`.
|
||||
Of their installation methods, we recommend
|
||||
[installing `poetry` using `pipx`](https://python-poetry.org/docs/#installing-with-pipx),
|
||||
|
||||
```shell
|
||||
pip install --user pipx
|
||||
pipx install poetry
|
||||
```
|
||||
|
||||
but see poetry's [installation instructions](https://python-poetry.org/docs/#installation)
|
||||
for other installation methods.
|
||||
|
||||
Next, open a terminal and install dependencies as follows:
|
||||
|
||||
```sh
|
||||
cd path/where/you/have/cloned/the/repository
|
||||
python3 -m venv ./env
|
||||
source ./env/bin/activate
|
||||
pip install wheel
|
||||
pip install -e ".[all,dev]"
|
||||
pip install tox
|
||||
poetry install --extras all
|
||||
```
|
||||
|
||||
This will install the developer dependencies for the project.
|
||||
This will install the runtime and developer dependencies for the project.
|
||||
|
||||
|
||||
# 5. Get in touch.
|
||||
@@ -117,11 +126,10 @@ The linters look at your code and do two things:
|
||||
- ensure that your code follows the coding style adopted by the project;
|
||||
- catch a number of errors in your code.
|
||||
|
||||
The linter takes no time at all to run as soon as you've [downloaded the dependencies into your python virtual environment](#4-install-the-dependencies).
|
||||
The linter takes no time at all to run as soon as you've [downloaded the dependencies](#4-install-the-dependencies).
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh
|
||||
poetry run ./scripts-dev/lint.sh
|
||||
```
|
||||
|
||||
Note that this script *will modify your files* to fix styling errors.
|
||||
@@ -131,15 +139,13 @@ If you wish to restrict the linters to only the files changed since the last com
|
||||
(much faster!), you can instead run:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh -d
|
||||
poetry run ./scripts-dev/lint.sh -d
|
||||
```
|
||||
|
||||
Or if you know exactly which files you wish to lint, you can instead run:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
|
||||
poetry run ./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
|
||||
```
|
||||
|
||||
## Run the unit tests (Twisted trial).
|
||||
@@ -148,16 +154,14 @@ The unit tests run parts of Synapse, including your changes, to see if anything
|
||||
was broken. They are slower than the linters but will typically catch more errors.
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
trial tests
|
||||
poetry run trial tests
|
||||
```
|
||||
|
||||
If you wish to only run *some* unit tests, you may specify
|
||||
another module instead of `tests` - or a test class or a method:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
trial tests.rest.admin.test_room tests.handlers.test_admin.ExfiltrateData.test_invite
|
||||
poetry run trial tests.rest.admin.test_room tests.handlers.test_admin.ExfiltrateData.test_invite
|
||||
```
|
||||
|
||||
If your tests fail, you may wish to look at the logs (the default log level is `ERROR`):
|
||||
@@ -169,7 +173,7 @@ less _trial_temp/test.log
|
||||
To increase the log level for the tests, set `SYNAPSE_TEST_LOG_LEVEL`:
|
||||
|
||||
```sh
|
||||
SYNAPSE_TEST_LOG_LEVEL=DEBUG trial tests
|
||||
SYNAPSE_TEST_LOG_LEVEL=DEBUG poetry run trial tests
|
||||
```
|
||||
|
||||
By default, tests will use an in-memory SQLite database for test data. For additional
|
||||
@@ -180,7 +184,7 @@ database state to be stored in a file named `test.db` under the trial process'
|
||||
working directory. Typically, this ends up being `_trial_temp/test.db`. For example:
|
||||
|
||||
```sh
|
||||
SYNAPSE_TEST_PERSIST_SQLITE_DB=1 trial tests
|
||||
SYNAPSE_TEST_PERSIST_SQLITE_DB=1 poetry run trial tests
|
||||
```
|
||||
|
||||
The database file can then be inspected with:
|
||||
@@ -206,9 +210,10 @@ To do so, [configure Postgres](../postgres.md) and run `trial` with the
|
||||
following environment variables matching your configuration:
|
||||
|
||||
- `SYNAPSE_POSTGRES` to anything nonempty
|
||||
- `SYNAPSE_POSTGRES_HOST`
|
||||
- `SYNAPSE_POSTGRES_USER`
|
||||
- `SYNAPSE_POSTGRES_PASSWORD`
|
||||
- `SYNAPSE_POSTGRES_HOST` (optional if it's the default: UNIX socket)
|
||||
- `SYNAPSE_POSTGRES_PORT` (optional if it's the default: 5432)
|
||||
- `SYNAPSE_POSTGRES_USER` (optional if using a UNIX socket)
|
||||
- `SYNAPSE_POSTGRES_PASSWORD` (optional if using a UNIX socket)
|
||||
|
||||
For example:
|
||||
|
||||
@@ -220,26 +225,12 @@ export SYNAPSE_POSTGRES_PASSWORD=mydevenvpassword
|
||||
trial
|
||||
```
|
||||
|
||||
#### Prebuilt container
|
||||
You don't need to specify the host, user, port or password if your Postgres
|
||||
server is set to authenticate you over the UNIX socket (i.e. if the `psql` command
|
||||
works without further arguments).
|
||||
|
||||
Since configuring PostgreSQL can be fiddly, we can make use of a pre-made
|
||||
Docker container to set up PostgreSQL and run our tests for us. To do so, run
|
||||
Your Postgres account needs to be able to create databases.
|
||||
|
||||
```shell
|
||||
scripts-dev/test_postgresql.sh
|
||||
```
|
||||
|
||||
Any extra arguments to the script will be passed to `tox` and then to `trial`,
|
||||
so we can run a specific test in this container with e.g.
|
||||
|
||||
```shell
|
||||
scripts-dev/test_postgresql.sh tests.replication.test_sharded_event_persister.EventPersisterShardTestCase
|
||||
```
|
||||
|
||||
The container creates a folder in your Synapse checkout called
|
||||
`.tox-pg-container` and uses this as a tox environment. The output of any
|
||||
`trial` runs goes into `_trial_temp` in your synapse source directory — the same
|
||||
as running `trial` directly on your host machine.
|
||||
|
||||
## Run the integration tests ([Sytest](https://github.com/matrix-org/sytest)).
|
||||
|
||||
@@ -254,8 +245,14 @@ configuration:
|
||||
```sh
|
||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:buster
|
||||
```
|
||||
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)
|
||||
|
||||
This configuration should generally cover your needs. For more details about other configurations, see [documentation in the SyTest repo](https://github.com/matrix-org/sytest/blob/develop/docker/README.md).
|
||||
This configuration should generally cover your needs.
|
||||
|
||||
- To run with Postgres, supply the `-e POSTGRES=1 -e MULTI_POSTGRES=1` environment flags.
|
||||
- To run with Synapse in worker mode, supply the `-e WORKERS=1 -e REDIS=1` environment flags (in addition to the Postgres flags).
|
||||
|
||||
For more details about other configurations, see the [Docker-specific documentation in the SyTest repo](https://github.com/matrix-org/sytest/blob/develop/docker/README.md).
|
||||
|
||||
|
||||
## Run the integration tests ([Complement](https://github.com/matrix-org/complement)).
|
||||
|
||||
@@ -0,0 +1,239 @@
|
||||
# Managing dependencies with Poetry
|
||||
|
||||
This is a quick cheat sheet for developers on how to use [`poetry`](https://python-poetry.org/).
|
||||
|
||||
# Background
|
||||
|
||||
Synapse uses a variety of third-party Python packages to function as a homeserver.
|
||||
Some of these are direct dependencies, listed in `pyproject.toml` under the
|
||||
`[tool.poetry.dependencies]` section. The rest are transitive dependencies (the
|
||||
things that our direct dependencies themselves depend on, and so on recursively.)
|
||||
|
||||
We maintain a locked list of all our dependencies (transitive included) so that
|
||||
we can track exactly which version of each dependency appears in a given release.
|
||||
See [here](https://github.com/matrix-org/synapse/issues/11537#issue-1074469665)
|
||||
for discussion of why we wanted this for Synapse. We chose to use
|
||||
[`poetry`](https://python-poetry.org/) to manage this locked list; see
|
||||
[this comment](https://github.com/matrix-org/synapse/issues/11537#issuecomment-1015975819)
|
||||
for the reasoning.
|
||||
|
||||
The locked dependencies get included in our "self-contained" releases: namely,
|
||||
our docker images and our debian packages. We also use the locked dependencies
|
||||
in development and our continuous integration.
|
||||
|
||||
Separately, our "broad" dependencies—the version ranges specified in
|
||||
`pyproject.toml`—are included as metadata in our "sdists" and "wheels" [uploaded
|
||||
to PyPI](https://pypi.org/project/matrix-synapse). Installing from PyPI or from
|
||||
the Synapse source tree directly will _not_ use the locked dependencies; instead,
|
||||
they'll pull in the latest version of each package available at install time.
|
||||
|
||||
## Example dependency
|
||||
|
||||
An example may help. We have a broad dependency on
|
||||
[`phonenumbers`](https://pypi.org/project/phonenumbers/), as declared in
|
||||
this snippet from pyproject.toml [as of Synapse 1.57](
|
||||
https://github.com/matrix-org/synapse/blob/release-v1.57/pyproject.toml#L133
|
||||
):
|
||||
|
||||
```toml
|
||||
[tool.poetry.dependencies]
|
||||
# ...
|
||||
phonenumbers = ">=8.2.0"
|
||||
```
|
||||
|
||||
In our lockfile this is
|
||||
[pinned]( https://github.com/matrix-org/synapse/blob/dfc7646504cef3e4ff396c36089e1c6f1b1634de/poetry.lock#L679-L685)
|
||||
to version 8.12.44, even though
|
||||
[newer versions are available](https://pypi.org/project/phonenumbers/#history).
|
||||
|
||||
```toml
|
||||
[[package]]
|
||||
name = "phonenumbers"
|
||||
version = "8.12.44"
|
||||
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
```
|
||||
|
||||
The lockfile also includes a
|
||||
[cryptographic checksum](https://github.com/matrix-org/synapse/blob/release-v1.57/poetry.lock#L2178-L2181)
|
||||
of the sdists and wheels provided for this version of `phonenumbers`.
|
||||
|
||||
```toml
|
||||
[metadata.files]
|
||||
# ...
|
||||
phonenumbers = [
|
||||
{file = "phonenumbers-8.12.44-py2.py3-none-any.whl", hash = "sha256:cc1299cf37b309ecab6214297663ab86cb3d64ae37fd5b88e904fe7983a874a6"},
|
||||
{file = "phonenumbers-8.12.44.tar.gz", hash = "sha256:26cfd0257d1704fe2f88caff2caabb70d16a877b1e65b6aae51f9fbbe10aa8ce"},
|
||||
]
|
||||
```
|
||||
|
||||
We can see this pinned version inside the docker image for that release:
|
||||
|
||||
```
|
||||
$ docker pull matrixdotorg/synapse:v1.57.0
|
||||
...
|
||||
$ docker run --entrypoint pip matrixdotorg/synapse:v1.57.0 show phonenumbers
|
||||
Name: phonenumbers
|
||||
Version: 8.12.44
|
||||
Summary: Python version of Google's common library for parsing, formatting, storing and validating international phone numbers.
|
||||
Home-page: https://github.com/daviddrysdale/python-phonenumbers
|
||||
Author: David Drysdale
|
||||
Author-email: dmd@lurklurk.org
|
||||
License: Apache License 2.0
|
||||
Location: /usr/local/lib/python3.9/site-packages
|
||||
Requires:
|
||||
Required-by: matrix-synapse
|
||||
```
|
||||
|
||||
Whereas the wheel metadata just contains the broad dependencies:
|
||||
|
||||
```
|
||||
$ cd /tmp
|
||||
$ wget https://files.pythonhosted.org/packages/ca/5e/d722d572cc5b3092402b783d6b7185901b444427633bd8a6b00ea0dd41b7/matrix_synapse-1.57.0rc1-py3-none-any.whl
|
||||
...
|
||||
$ unzip -c matrix_synapse-1.57.0rc1-py3-none-any.whl matrix_synapse-1.57.0rc1.dist-info/METADATA | grep phonenumbers
|
||||
Requires-Dist: phonenumbers (>=8.2.0)
|
||||
```
|
||||
|
||||
# Tooling recommendation: direnv
|
||||
|
||||
[`direnv`](https://direnv.net/) is a tool for activating environments in your
|
||||
shell inside a given directory. Its support for poetry is unofficial (a
|
||||
community wiki recipe only), but works solidly in our experience. We thoroughly
|
||||
recommend it for daily use. To use it:
|
||||
|
||||
1. [Install `direnv`](https://direnv.net/docs/installation.html) - it's likely
|
||||
packaged for your system already.
|
||||
2. Teach direnv about poetry. The [shell config here](https://github.com/direnv/direnv/wiki/Python#poetry)
|
||||
needs to be added to `~/.config/direnv/direnvrc` (or more generally `$XDG_CONFIG_HOME/direnv/direnvrc`).
|
||||
3. Mark the synapse checkout as a poetry project: `echo layout poetry > .envrc`.
|
||||
4. Convince yourself that you trust this `.envrc` configuration and project.
|
||||
Then formally confirm this to `direnv` by running `direnv allow`.
|
||||
|
||||
Then whenever you navigate to the synapse checkout, you should be able to run
|
||||
e.g. `mypy` instead of `poetry run mypy`; `python` instead of
|
||||
`poetry run python`; and your shell commands will automatically run in the
|
||||
context of poetry's venv, without having to run `poetry shell` beforehand.
|
||||
|
||||
|
||||
# How do I...
|
||||
|
||||
## ...reset my venv to the locked environment?
|
||||
|
||||
```shell
|
||||
poetry install --extras all --remove-untracked
|
||||
```
|
||||
|
||||
## ...run a command in the `poetry` virtualenv?
|
||||
|
||||
Use `poetry run cmd args` when you need the python virtualenv context.
|
||||
To avoid typing `poetry run` all the time, you can run `poetry shell`
|
||||
to start a new shell in the poetry virtualenv context. Within `poetry shell`,
|
||||
`python`, `pip`, `mypy`, `trial`, etc. are all run inside the project virtualenv
|
||||
and isolated from the rest o the system.
|
||||
|
||||
Roughly speaking, the translation from a traditional virtualenv is:
|
||||
- `env/bin/activate` -> `poetry shell`, and
|
||||
- `deactivate` -> close the terminal (Ctrl-D, `exit`, etc.)
|
||||
|
||||
See also the direnv recommendation above, which makes `poetry run` and
|
||||
`poetry shell` unnecessary.
|
||||
|
||||
|
||||
## ...inspect the `poetry` virtualenv?
|
||||
|
||||
Some suggestions:
|
||||
|
||||
```shell
|
||||
# Current env only
|
||||
poetry env info
|
||||
# All envs: this allows you to have e.g. a poetry managed venv for Python 3.7,
|
||||
# and another for Python 3.10.
|
||||
poetry env list --full-path
|
||||
poetry run pip list
|
||||
```
|
||||
|
||||
Note that `poetry show` describes the abstract *lock file* rather than your
|
||||
on-disk environment. With that said, `poetry show --tree` can sometimes be
|
||||
useful.
|
||||
|
||||
|
||||
## ...add a new dependency?
|
||||
|
||||
Either:
|
||||
- manually update `pyproject.toml`; then `poetry lock --no-update`; or else
|
||||
- `poetry add packagename`. See `poetry add --help`; note the `--dev`,
|
||||
`--extras` and `--optional` flags in particular.
|
||||
- **NB**: this specifies the new package with a version given by a "caret bound". This won't get forced to its lowest version in the old deps CI job: see [this TODO](https://github.com/matrix-org/synapse/blob/4e1374373857f2f7a911a31c50476342d9070681/.ci/scripts/test_old_deps.sh#L35-L39).
|
||||
|
||||
Include the updated `pyproject.toml` and `poetry.lock` files in your commit.
|
||||
|
||||
## ...remove a dependency?
|
||||
|
||||
This is not done often and is untested, but
|
||||
|
||||
```shell
|
||||
poetry remove packagename
|
||||
```
|
||||
|
||||
ought to do the trick. Alternatively, manually update `pyproject.toml` and
|
||||
`poetry lock --no-update`. Include the updated `pyproject.toml` and poetry.lock`
|
||||
files in your commit.
|
||||
|
||||
## ...update the version range for an existing dependency?
|
||||
|
||||
Best done by manually editing `pyproject.toml`, then `poetry lock --no-update`.
|
||||
Include the updated `pyproject.toml` and `poetry.lock` in your commit.
|
||||
|
||||
## ...update a dependency in the locked environment?
|
||||
|
||||
Use
|
||||
|
||||
```shell
|
||||
poetry update packagename
|
||||
```
|
||||
|
||||
to use the latest version of `packagename` in the locked environment, without
|
||||
affecting the broad dependencies listed in the wheel.
|
||||
|
||||
There doesn't seem to be a way to do this whilst locking a _specific_ version of
|
||||
`packagename`. We can workaround this (crudely) as follows:
|
||||
|
||||
```shell
|
||||
poetry add packagename==1.2.3
|
||||
# This should update pyproject.lock.
|
||||
|
||||
# Now undo the changes to pyproject.toml. For example
|
||||
# git restore pyproject.toml
|
||||
|
||||
# Get poetry to recompute the content-hash of pyproject.toml without changing
|
||||
# the locked package versions.
|
||||
poetry lock --no-update
|
||||
```
|
||||
|
||||
Either way, include the updated `poetry.lock` file in your commit.
|
||||
|
||||
## ...export a `requirements.txt` file?
|
||||
|
||||
```shell
|
||||
poetry export --extras all
|
||||
```
|
||||
|
||||
Be wary of bugs in `poetry export` and `pip install -r requirements.txt`.
|
||||
|
||||
Note: `poetry export` will be made a plugin in Poetry 1.2. Additional config may
|
||||
be required.
|
||||
|
||||
## ...build a test wheel?
|
||||
|
||||
I usually use
|
||||
|
||||
```shell
|
||||
poetry run pip install build && poetry run python -m build
|
||||
```
|
||||
|
||||
because [`build`](https://github.com/pypa/build) is a standardish tool which
|
||||
doesn't require poetry. (It's what we use in CI too). However, you could try
|
||||
`poetry build` too.
|
||||
@@ -39,7 +39,8 @@ yet correlated to the DAG.
|
||||
Outliers typically arise when we fetch the auth chain or state for a given
|
||||
event. When that happens, we just grab the events in the state/auth chain,
|
||||
without calculating the state at those events, or backfilling their
|
||||
`prev_events`.
|
||||
`prev_events`. Since we don't have the state at any events fetched in that
|
||||
way, we mark them as outliers.
|
||||
|
||||
So, typically, we won't have the `prev_events` of an `outlier` in the database,
|
||||
(though it's entirely possible that we *might* have them for some other
|
||||
|
||||
@@ -0,0 +1,106 @@
|
||||
# Account data callbacks
|
||||
|
||||
Account data callbacks allow module developers to react to changes of the account data
|
||||
of local users. Account data callbacks can be registered using the module API's
|
||||
`register_account_data_callbacks` method.
|
||||
|
||||
## Callbacks
|
||||
|
||||
The available account data callbacks are:
|
||||
|
||||
### `on_account_data_updated`
|
||||
|
||||
_First introduced in Synapse v1.57.0_
|
||||
|
||||
```python
|
||||
async def on_account_data_updated(
|
||||
user_id: str,
|
||||
room_id: Optional[str],
|
||||
account_data_type: str,
|
||||
content: "synapse.module_api.JsonDict",
|
||||
) -> None:
|
||||
```
|
||||
|
||||
Called after user's account data has been updated. The module is given the
|
||||
Matrix ID of the user whose account data is changing, the room ID the data is associated
|
||||
with, the type associated with the change, as well as the new content. If the account
|
||||
data is not associated with a specific room, then the room ID is `None`.
|
||||
|
||||
This callback is triggered when new account data is added or when the data associated with
|
||||
a given type (and optionally room) changes. This includes deletion, since in Matrix,
|
||||
deleting account data consists of replacing the data associated with a given type
|
||||
(and optionally room) with an empty dictionary (`{}`).
|
||||
|
||||
Note that this doesn't trigger when changing the tags associated with a room, as these are
|
||||
processed separately by Synapse.
|
||||
|
||||
If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
## Example
|
||||
|
||||
The example below is a module that implements the `on_account_data_updated` callback, and
|
||||
sends an event to an audit room when a user changes their account data.
|
||||
|
||||
```python
|
||||
import json
|
||||
import attr
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from synapse.module_api import JsonDict, ModuleApi
|
||||
from synapse.module_api.errors import ConfigError
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True)
|
||||
class CustomAccountDataConfig:
|
||||
audit_room: str
|
||||
sender: str
|
||||
|
||||
|
||||
class CustomAccountDataModule:
|
||||
def __init__(self, config: CustomAccountDataConfig, api: ModuleApi):
|
||||
self.api = api
|
||||
self.config = config
|
||||
|
||||
self.api.register_account_data_callbacks(
|
||||
on_account_data_updated=self.log_new_account_data,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_config(config: Dict[str, Any]) -> CustomAccountDataConfig:
|
||||
def check_in_config(param: str):
|
||||
if param not in config:
|
||||
raise ConfigError(f"'{param}' is required")
|
||||
|
||||
check_in_config("audit_room")
|
||||
check_in_config("sender")
|
||||
|
||||
return CustomAccountDataConfig(
|
||||
audit_room=config["audit_room"],
|
||||
sender=config["sender"],
|
||||
)
|
||||
|
||||
async def log_new_account_data(
|
||||
self,
|
||||
user_id: str,
|
||||
room_id: Optional[str],
|
||||
account_data_type: str,
|
||||
content: JsonDict,
|
||||
) -> None:
|
||||
content_raw = json.dumps(content)
|
||||
msg_content = f"{user_id} has changed their account data for type {account_data_type} to: {content_raw}"
|
||||
|
||||
if room_id is not None:
|
||||
msg_content += f" (in room {room_id})"
|
||||
|
||||
await self.api.create_and_send_event_into_room(
|
||||
{
|
||||
"room_id": self.config.audit_room,
|
||||
"sender": self.config.sender,
|
||||
"type": "m.room.message",
|
||||
"content": {
|
||||
"msgtype": "m.text",
|
||||
"body": msg_content
|
||||
}
|
||||
}
|
||||
)
|
||||
```
|
||||
@@ -172,7 +172,7 @@ any of the subsequent implementations of this callback.
|
||||
_First introduced in Synapse v1.37.0_
|
||||
|
||||
```python
|
||||
async def check_username_for_spam(user_profile: Dict[str, str]) -> bool
|
||||
async def check_username_for_spam(user_profile: synapse.module_api.UserProfile) -> bool
|
||||
```
|
||||
|
||||
Called when computing search results in the user directory. The module must return a
|
||||
@@ -182,9 +182,11 @@ search results; otherwise return `False`.
|
||||
|
||||
The profile is represented as a dictionary with the following keys:
|
||||
|
||||
* `user_id`: The Matrix ID for this user.
|
||||
* `display_name`: The user's display name.
|
||||
* `avatar_url`: The `mxc://` URL to the user's avatar.
|
||||
* `user_id: str`. The Matrix ID for this user.
|
||||
* `display_name: Optional[str]`. The user's display name, or `None` if this user
|
||||
has not set a display name.
|
||||
* `avatar_url: Optional[str]`. The `mxc://` URL to the user's avatar, or `None`
|
||||
if this user has not set an avatar.
|
||||
|
||||
The module is given a copy of the original dictionary, so modifying it from within the
|
||||
module cannot modify a user's profile when included in user directory search results.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user