Compare commits
2 Commits
erikj/tree
...
erikj/work
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a48296dd86 | ||
|
|
13f9422e38 |
13
.buildkite/.env
Normal file
13
.buildkite/.env
Normal file
@@ -0,0 +1,13 @@
|
||||
CI
|
||||
BUILDKITE
|
||||
BUILDKITE_BUILD_NUMBER
|
||||
BUILDKITE_BRANCH
|
||||
BUILDKITE_BUILD_NUMBER
|
||||
BUILDKITE_JOB_ID
|
||||
BUILDKITE_BUILD_URL
|
||||
BUILDKITE_PROJECT_SLUG
|
||||
BUILDKITE_COMMIT
|
||||
BUILDKITE_PULL_REQUEST
|
||||
BUILDKITE_TAG
|
||||
CODECOV_TOKEN
|
||||
TRIAL_FLAGS
|
||||
35
.buildkite/merge_base_branch.sh
Executable file
35
.buildkite/merge_base_branch.sh
Executable file
@@ -0,0 +1,35 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
if [[ "$BUILDKITE_BRANCH" =~ ^(develop|master|dinsic|shhs|release-.*)$ ]]; then
|
||||
echo "Not merging forward, as this is a release branch"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ -z $BUILDKITE_PULL_REQUEST_BASE_BRANCH ]]; then
|
||||
echo "Not a pull request, or hasn't had a PR opened yet..."
|
||||
|
||||
# It probably hasn't had a PR opened yet. Since all PRs land on develop, we
|
||||
# can probably assume it's based on it and will be merged into it.
|
||||
GITBASE="develop"
|
||||
else
|
||||
# Get the reference, using the GitHub API
|
||||
GITBASE=$BUILDKITE_PULL_REQUEST_BASE_BRANCH
|
||||
fi
|
||||
|
||||
echo "--- merge_base_branch $GITBASE"
|
||||
|
||||
# Show what we are before
|
||||
git --no-pager show -s
|
||||
|
||||
# Set up username so it can do a merge
|
||||
git config --global user.email bot@matrix.org
|
||||
git config --global user.name "A robot"
|
||||
|
||||
# Fetch and merge. If it doesn't work, it will raise due to set -e.
|
||||
git fetch -u origin $GITBASE
|
||||
git merge --no-edit --no-commit origin/$GITBASE
|
||||
|
||||
# Show what we are after.
|
||||
git --no-pager show -s
|
||||
@@ -3,7 +3,7 @@
|
||||
# CI's Docker setup at the point where this file is considered.
|
||||
server_name: "localhost:8800"
|
||||
|
||||
signing_key_path: ".ci/test.signing.key"
|
||||
signing_key_path: ".buildkite/test.signing.key"
|
||||
|
||||
report_stats: false
|
||||
|
||||
@@ -11,7 +11,7 @@ database:
|
||||
name: "psycopg2"
|
||||
args:
|
||||
user: postgres
|
||||
host: localhost
|
||||
host: postgres
|
||||
password: postgres
|
||||
database: synapse
|
||||
|
||||
@@ -23,7 +23,7 @@ import psycopg2
|
||||
# We use "postgres" as a database because it's bound to exist and the "synapse" one
|
||||
# doesn't exist yet.
|
||||
db_conn = psycopg2.connect(
|
||||
user="postgres", host="localhost", password="postgres", dbname="postgres"
|
||||
user="postgres", host="postgres", password="postgres", dbname="postgres"
|
||||
)
|
||||
db_conn.autocommit = True
|
||||
cur = db_conn.cursor()
|
||||
16
.buildkite/scripts/test_old_deps.sh
Executable file
16
.buildkite/scripts/test_old_deps.sh
Executable file
@@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# this script is run by buildkite in a plain `bionic` container; it installs the
|
||||
# minimal requirements for tox and hands over to the py3-old tox environment.
|
||||
|
||||
set -ex
|
||||
|
||||
apt-get update
|
||||
apt-get install -y python3 python3-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox
|
||||
|
||||
export LANG="C.UTF-8"
|
||||
|
||||
# Prevent virtualenv from auto-updating pip to an incompatible version
|
||||
export VIRTUALENV_NO_DOWNLOAD=1
|
||||
|
||||
exec tox -e py3-old,combine
|
||||
57
.buildkite/scripts/test_synapse_port_db.sh
Executable file
57
.buildkite/scripts/test_synapse_port_db.sh
Executable file
@@ -0,0 +1,57 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Test script for 'synapse_port_db'.
|
||||
# - sets up synapse and deps
|
||||
# - runs the port script on a prepopulated test sqlite db
|
||||
# - also runs it against an new sqlite db
|
||||
|
||||
|
||||
set -xe
|
||||
cd `dirname $0`/../..
|
||||
|
||||
echo "--- Install dependencies"
|
||||
|
||||
# Install dependencies for this test.
|
||||
pip install psycopg2 coverage coverage-enable-subprocess
|
||||
|
||||
# Install Synapse itself. This won't update any libraries.
|
||||
pip install -e .
|
||||
|
||||
echo "--- Generate the signing key"
|
||||
|
||||
# Generate the server's signing key.
|
||||
python -m synapse.app.homeserver --generate-keys -c .buildkite/sqlite-config.yaml
|
||||
|
||||
echo "--- Prepare test database"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
scripts-dev/update_database --database-config .buildkite/sqlite-config.yaml
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
./.buildkite/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against test database"
|
||||
coverage run scripts/synapse_port_db --sqlite-database .buildkite/test_db.db --postgres-config .buildkite/postgres-config.yaml
|
||||
|
||||
# We should be able to run twice against the same database.
|
||||
echo "+++ Run synapse_port_db a second time"
|
||||
coverage run scripts/synapse_port_db --sqlite-database .buildkite/test_db.db --postgres-config .buildkite/postgres-config.yaml
|
||||
|
||||
#####
|
||||
|
||||
# Now do the same again, on an empty database.
|
||||
|
||||
echo "--- Prepare empty SQLite database"
|
||||
|
||||
# we do this by deleting the sqlite db, and then doing the same again.
|
||||
rm .buildkite/test_db.db
|
||||
|
||||
scripts-dev/update_database --database-config .buildkite/sqlite-config.yaml
|
||||
|
||||
# re-create the PostgreSQL database.
|
||||
./.buildkite/scripts/postgres_exec.py \
|
||||
"DROP DATABASE synapse" \
|
||||
"CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against empty database"
|
||||
coverage run scripts/synapse_port_db --sqlite-database .buildkite/test_db.db --postgres-config .buildkite/postgres-config.yaml
|
||||
@@ -3,14 +3,14 @@
|
||||
# schema and run background updates on it.
|
||||
server_name: "localhost:8800"
|
||||
|
||||
signing_key_path: ".ci/test.signing.key"
|
||||
signing_key_path: ".buildkite/test.signing.key"
|
||||
|
||||
report_stats: false
|
||||
|
||||
database:
|
||||
name: "sqlite3"
|
||||
args:
|
||||
database: ".ci/test_db.db"
|
||||
database: ".buildkite/test_db.db"
|
||||
|
||||
# Suppress the key server warning.
|
||||
trusted_key_servers: []
|
||||
10
.buildkite/worker-blacklist
Normal file
10
.buildkite/worker-blacklist
Normal file
@@ -0,0 +1,10 @@
|
||||
# This file serves as a blacklist for SyTest tests that we expect will fail in
|
||||
# Synapse when run under worker mode. For more details, see sytest-blacklist.
|
||||
|
||||
Can re-join room if re-invited
|
||||
|
||||
# new failures as of https://github.com/matrix-org/sytest/pull/732
|
||||
Device list doesn't change if remote server is down
|
||||
|
||||
# https://buildkite.com/matrix-dot-org/synapse/builds/6134#6f67bf47-e234-474d-80e8-c6e1868b15c5
|
||||
Server correctly handles incoming m.device_list_update
|
||||
@@ -1,93 +0,0 @@
|
||||
{{- /*gotype: github.com/haveyoudebuggedit/gotestfmt/parser.Package*/ -}}
|
||||
{{- /*
|
||||
This template contains the format for an individual package. GitHub actions does not currently support nested groups so
|
||||
we are creating a stylized header for each package.
|
||||
|
||||
This template is based on https://github.com/haveyoudebuggedit/gotestfmt/blob/f179b0e462a9dcf7101515d87eec4e4d7e58b92a/.gotestfmt/github/package.gotpl
|
||||
which is under the Unlicense licence.
|
||||
*/ -}}
|
||||
{{- $settings := .Settings -}}
|
||||
{{- if and (or (not $settings.HideSuccessfulPackages) (ne .Result "PASS")) (or (not $settings.HideEmptyPackages) (ne .Result "SKIP") (ne (len .TestCases) 0)) -}}
|
||||
{{- if eq .Result "PASS" -}}
|
||||
{{ "\033" }}[0;32m
|
||||
{{- else if eq .Result "SKIP" -}}
|
||||
{{ "\033" }}[0;33m
|
||||
{{- else -}}
|
||||
{{ "\033" }}[0;31m
|
||||
{{- end -}}
|
||||
📦 {{ .Name }}{{- "\033" }}[0m
|
||||
{{- with .Coverage -}}
|
||||
{{- "\033" -}}[0;37m ({{ . }}% coverage){{- "\033" -}}[0m
|
||||
{{- end -}}
|
||||
{{- "\n" -}}
|
||||
{{- with .Reason -}}
|
||||
{{- " " -}}🛑 {{ . -}}{{- "\n" -}}
|
||||
{{- end -}}
|
||||
{{- with .Output -}}
|
||||
{{- . -}}{{- "\n" -}}
|
||||
{{- end -}}
|
||||
{{- with .TestCases -}}
|
||||
{{- /* Failing tests are first */ -}}
|
||||
{{- range . -}}
|
||||
{{- if and (ne .Result "PASS") (ne .Result "SKIP") -}}
|
||||
::group::{{ "\033" }}[0;31m❌{{ " " }}{{- .Name -}}
|
||||
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
||||
{{- with .Coverage -}}
|
||||
, coverage: {{ . }}%
|
||||
{{- end -}})
|
||||
{{- "\033" -}}[0m
|
||||
{{- "\n" -}}
|
||||
|
||||
{{- with .Output -}}
|
||||
{{- formatTestOutput . $settings -}}
|
||||
{{- "\n" -}}
|
||||
{{- end -}}
|
||||
|
||||
::endgroup::{{- "\n" -}}
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
|
||||
|
||||
{{- /* Then skipped tests are second */ -}}
|
||||
{{- range . -}}
|
||||
{{- if eq .Result "SKIP" -}}
|
||||
::group::{{ "\033" }}[0;33m🚧{{ " " }}{{- .Name -}}
|
||||
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
||||
{{- with .Coverage -}}
|
||||
, coverage: {{ . }}%
|
||||
{{- end -}})
|
||||
{{- "\033" -}}[0m
|
||||
{{- "\n" -}}
|
||||
|
||||
{{- with .Output -}}
|
||||
{{- formatTestOutput . $settings -}}
|
||||
{{- "\n" -}}
|
||||
{{- end -}}
|
||||
|
||||
::endgroup::{{- "\n" -}}
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
|
||||
|
||||
{{- /* Then passing tests are last */ -}}
|
||||
{{- range . -}}
|
||||
{{- if eq .Result "PASS" -}}
|
||||
::group::{{ "\033" }}[0;32m✅{{ " " }}{{- .Name -}}
|
||||
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
||||
{{- with .Coverage -}}
|
||||
, coverage: {{ . }}%
|
||||
{{- end -}})
|
||||
{{- "\033" -}}[0m
|
||||
{{- "\n" -}}
|
||||
|
||||
{{- with .Output -}}
|
||||
{{- formatTestOutput . $settings -}}
|
||||
{{- "\n" -}}
|
||||
{{- end -}}
|
||||
|
||||
::endgroup::{{- "\n" -}}
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
{{- "\n" -}}
|
||||
{{- end -}}
|
||||
@@ -1,4 +0,0 @@
|
||||
---
|
||||
title: CI run against latest deps is failing
|
||||
---
|
||||
See https://github.com/{{env.GITHUB_REPOSITORY}}/actions/runs/{{env.GITHUB_RUN_ID}}
|
||||
@@ -1,25 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Fetches a version of complement which best matches the current build.
|
||||
#
|
||||
# The tarball is unpacked into `./complement`.
|
||||
|
||||
set -e
|
||||
mkdir -p complement
|
||||
|
||||
# Pick an appropriate version of complement. Depending on whether this is a PR or release,
|
||||
# etc. we need to use different fallbacks:
|
||||
#
|
||||
# 1. First check if there's a similarly named branch (GITHUB_HEAD_REF
|
||||
# for pull requests, otherwise GITHUB_REF).
|
||||
# 2. Attempt to use the base branch, e.g. when merging into release-vX.Y
|
||||
# (GITHUB_BASE_REF for pull requests).
|
||||
# 3. Use the default complement branch ("HEAD").
|
||||
for BRANCH_NAME in "$GITHUB_HEAD_REF" "$GITHUB_BASE_REF" "${GITHUB_REF#refs/heads/}" "HEAD"; do
|
||||
# Skip empty branch names and merge commits.
|
||||
if [[ -z "$BRANCH_NAME" || $BRANCH_NAME =~ ^refs/pull/.* ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
(wget -O - "https://github.com/matrix-org/complement/archive/$BRANCH_NAME.tar.gz" | tar -xz --strip-components=1 -C complement) && break
|
||||
done
|
||||
@@ -1,36 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Common commands to set up Complement's prerequisites in a GitHub Actions CI run.
|
||||
#
|
||||
# Must be called after Synapse has been checked out to `synapse/`.
|
||||
#
|
||||
set -eu
|
||||
|
||||
alias block='{ set +x; } 2>/dev/null; func() { echo "::group::$*"; set -x; }; func'
|
||||
alias endblock='{ set +x; } 2>/dev/null; func() { echo "::endgroup::"; set -x; }; func'
|
||||
|
||||
block Set Go Version
|
||||
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
|
||||
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
|
||||
|
||||
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
|
||||
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
|
||||
# Add the Go path to the PATH: We need this so we can call gotestfmt
|
||||
echo "~/go/bin" >> $GITHUB_PATH
|
||||
endblock
|
||||
|
||||
block Install Complement Dependencies
|
||||
sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev
|
||||
go get -v github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
endblock
|
||||
|
||||
block Install custom gotestfmt template
|
||||
mkdir .gotestfmt/github -p
|
||||
cp synapse/.ci/complement_package.gotpl .gotestfmt/github/package.gotpl
|
||||
endblock
|
||||
|
||||
block Check out Complement
|
||||
# Attempt to check out the same branch of Complement as the PR. If it
|
||||
# doesn't exist, fallback to HEAD.
|
||||
synapse/.ci/scripts/checkout_complement.sh
|
||||
endblock
|
||||
@@ -1,52 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Test for the export-data admin command against sqlite and postgres
|
||||
|
||||
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
||||
# Expects `poetry` to be available on the `PATH`.
|
||||
|
||||
set -xe
|
||||
cd "$(dirname "$0")/../.."
|
||||
|
||||
echo "--- Generate the signing key"
|
||||
|
||||
# Generate the server's signing key.
|
||||
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
|
||||
echo "--- Prepare test database"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# Run the export-data command on the sqlite test database
|
||||
poetry run python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
--output-directory /tmp/export_data
|
||||
|
||||
# Test that the output directory exists and contains the rooms directory
|
||||
dir="/tmp/export_data/rooms"
|
||||
if [ -d "$dir" ]; then
|
||||
echo "Command successful, this test passes"
|
||||
else
|
||||
echo "No output directories found, the command fails against a sqlite database."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
|
||||
# Port the SQLite databse to postgres so we can check command works against postgres
|
||||
echo "+++ Port SQLite3 databse to postgres"
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
# Run the export-data command on postgres database
|
||||
poetry run python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
--output-directory /tmp/export_data2
|
||||
|
||||
# Test that the output directory exists and contains the rooms directory
|
||||
dir2="/tmp/export_data2/rooms"
|
||||
if [ -d "$dir2" ]; then
|
||||
echo "Command successful, this test passes"
|
||||
else
|
||||
echo "No output directories found, the command fails against a postgres database."
|
||||
exit 1
|
||||
fi
|
||||
@@ -1,83 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# this script is run by GitHub Actions in a plain `focal` container; it
|
||||
# - installs the minimal system requirements, and poetry;
|
||||
# - patches the project definition file to refer to old versions only;
|
||||
# - creates a venv with these old versions using poetry; and finally
|
||||
# - invokes `trial` to run the tests with old deps.
|
||||
|
||||
# Prevent tzdata from asking for user input
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
set -ex
|
||||
|
||||
apt-get update
|
||||
apt-get install -y \
|
||||
python3 python3-dev python3-pip python3-venv pipx \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
|
||||
export LANG="C.UTF-8"
|
||||
|
||||
# Prevent virtualenv from auto-updating pip to an incompatible version
|
||||
export VIRTUALENV_NO_DOWNLOAD=1
|
||||
|
||||
# TODO: in the future, we could use an implementation of
|
||||
# https://github.com/python-poetry/poetry/issues/3527
|
||||
# https://github.com/pypa/pip/issues/8085
|
||||
# to select the lowest possible versions, rather than resorting to this sed script.
|
||||
|
||||
# Patch the project definitions in-place:
|
||||
# - Replace all lower and tilde bounds with exact bounds
|
||||
# - Replace all caret bounds---but not the one that defines the supported Python version!
|
||||
# - Delete all lines referring to psycopg2 --- so no testing of postgres support.
|
||||
# - Use pyopenssl 17.0, which is the oldest version that works with
|
||||
# a `cryptography` compiled against OpenSSL 1.1.
|
||||
# - Omit systemd: we're not logging to journal here.
|
||||
|
||||
# TODO: also replace caret bounds, see https://python-poetry.org/docs/dependency-specification/#version-constraints
|
||||
# We don't use these yet, but IIRC they are the default bound used when you `poetry add`.
|
||||
# The sed expression 's/\^/==/g' ought to do the trick. But it would also change
|
||||
# `python = "^3.7"` to `python = "==3.7", which would mean we fail because olddeps
|
||||
# runs on 3.8 (#12343).
|
||||
|
||||
sed -i \
|
||||
-e "s/[~>]=/==/g" \
|
||||
-e '/^python = "^/!s/\^/==/g' \
|
||||
-e "/psycopg2/d" \
|
||||
-e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
|
||||
-e '/systemd/d' \
|
||||
pyproject.toml
|
||||
|
||||
# Use poetry to do the installation. This ensures that the versions are all mutually
|
||||
# compatible (as far the package metadata declares, anyway); pip's package resolver
|
||||
# is more lax.
|
||||
#
|
||||
# Rather than `poetry install --no-dev`, we drop all dev dependencies from the
|
||||
# toml file. This means we don't have to ensure compatibility between old deps and
|
||||
# dev tools.
|
||||
|
||||
pip install --user toml
|
||||
|
||||
REMOVE_DEV_DEPENDENCIES="
|
||||
import toml
|
||||
with open('pyproject.toml', 'r') as f:
|
||||
data = toml.loads(f.read())
|
||||
|
||||
del data['tool']['poetry']['dev-dependencies']
|
||||
|
||||
with open('pyproject.toml', 'w') as f:
|
||||
toml.dump(data, f)
|
||||
"
|
||||
python3 -c "$REMOVE_DEV_DEPENDENCIES"
|
||||
|
||||
pipx install poetry==1.1.12
|
||||
~/.local/bin/poetry lock
|
||||
|
||||
echo "::group::Patched pyproject.toml"
|
||||
cat pyproject.toml
|
||||
echo "::endgroup::"
|
||||
echo "::group::Lockfile after patch"
|
||||
cat poetry.lock
|
||||
echo "::endgroup::"
|
||||
|
||||
~/.local/bin/poetry install -E "all test"
|
||||
~/.local/bin/poetry run trial --jobs=2 tests
|
||||
@@ -1,53 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Test script for 'synapse_port_db'.
|
||||
# - configures synapse and a postgres server.
|
||||
# - runs the port script on a prepopulated test sqlite db
|
||||
# - also runs it against an new sqlite db
|
||||
#
|
||||
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
||||
# Expects `poetry` to be available on the `PATH`.
|
||||
|
||||
set -xe
|
||||
cd "$(dirname "$0")/../.."
|
||||
|
||||
echo "--- Generate the signing key"
|
||||
|
||||
# Generate the server's signing key.
|
||||
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
|
||||
echo "--- Prepare test database"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against test database"
|
||||
# TODO: this invocation of synapse_port_db (and others below) used to be prepended with `coverage run`,
|
||||
# but coverage seems unable to find the entrypoints installed by `pip install -e .`.
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
# We should be able to run twice against the same database.
|
||||
echo "+++ Run synapse_port_db a second time"
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
#####
|
||||
|
||||
# Now do the same again, on an empty database.
|
||||
|
||||
echo "--- Prepare empty SQLite database"
|
||||
|
||||
# we do this by deleting the sqlite db, and then doing the same again.
|
||||
rm .ci/test_db.db
|
||||
|
||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# re-create the PostgreSQL database.
|
||||
poetry run .ci/scripts/postgres_exec.py \
|
||||
"DROP DATABASE synapse" \
|
||||
"CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against empty database"
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
@@ -1,4 +0,0 @@
|
||||
---
|
||||
title: CI run against Twisted trunk is failing
|
||||
---
|
||||
See https://github.com/{{env.GITHUB_REPOSITORY}}/actions/runs/{{env.GITHUB_RUN_ID}}
|
||||
@@ -1,2 +0,0 @@
|
||||
# This file serves as a blacklist for SyTest tests that we expect will fail in
|
||||
# Synapse when run under worker mode. For more details, see sytest-blacklist.
|
||||
@@ -3,9 +3,11 @@
|
||||
|
||||
# things to include
|
||||
!docker
|
||||
!scripts
|
||||
!synapse
|
||||
!MANIFEST.in
|
||||
!README.rst
|
||||
!pyproject.toml
|
||||
!poetry.lock
|
||||
!setup.py
|
||||
!synctl
|
||||
|
||||
**/__pycache__
|
||||
|
||||
@@ -7,4 +7,3 @@ root = true
|
||||
[*.py]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
max_line_length = 88
|
||||
|
||||
11
.flake8
11
.flake8
@@ -1,11 +0,0 @@
|
||||
# TODO: incorporate this into pyproject.toml if flake8 supports it in the future.
|
||||
# See https://github.com/PyCQA/flake8/issues/234
|
||||
[flake8]
|
||||
# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes
|
||||
# for error codes. The ones we ignore are:
|
||||
# W503: line break before binary operator
|
||||
# W504: line break after binary operator
|
||||
# E203: whitespace before ':' (which is contrary to pep8?)
|
||||
# E731: do not assign a lambda expression, use a def
|
||||
# E501: Line too long (black enforces this for us)
|
||||
ignore=W503,W504,E203,E731,E501
|
||||
@@ -6,6 +6,3 @@ aff1eb7c671b0a3813407321d2702ec46c71fa56
|
||||
|
||||
# Update black to 20.8b1 (#9381).
|
||||
0a00b7ff14890987f09112a2ae696c61001e6cf1
|
||||
|
||||
# Convert tests/rest/admin/test_room.py to unix file endings (#7953).
|
||||
c4268e3da64f1abb5b31deaeb5769adb6510c0a7
|
||||
2
.github/CODEOWNERS
vendored
2
.github/CODEOWNERS
vendored
@@ -1,2 +0,0 @@
|
||||
# Automatically request reviews from the synapse-core team when a pull request comes in.
|
||||
* @matrix-org/synapse-core
|
||||
72
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
Normal file
72
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
|
||||
**THIS IS NOT A SUPPORT CHANNEL!**
|
||||
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**,
|
||||
please ask in **#synapse:matrix.org** (using a matrix.org account if necessary)
|
||||
|
||||
If you want to report a security issue, please see https://matrix.org/security-disclosure-policy/
|
||||
|
||||
This is a bug report template. By following the instructions below and
|
||||
filling out the sections with your information, you will help the us to get all
|
||||
the necessary data to fix your issue.
|
||||
|
||||
You can also preview your report before submitting it. You may remove sections
|
||||
that aren't relevant to your particular case.
|
||||
|
||||
Text between <!-- and --> marks will be invisible in the report.
|
||||
|
||||
-->
|
||||
|
||||
### Description
|
||||
|
||||
<!-- Describe here the problem that you are experiencing -->
|
||||
|
||||
### Steps to reproduce
|
||||
|
||||
- list the steps
|
||||
- that reproduce the bug
|
||||
- using hyphens as bullet points
|
||||
|
||||
<!--
|
||||
Describe how what happens differs from what you expected.
|
||||
|
||||
If you can identify any relevant log snippets from _homeserver.log_, please include
|
||||
those (please be careful to remove any personal or private data). Please surround them with
|
||||
``` (three backticks, on a line on their own), so that they are formatted legibly.
|
||||
-->
|
||||
|
||||
### Version information
|
||||
|
||||
<!-- IMPORTANT: please answer the following questions, to help us narrow down the problem -->
|
||||
|
||||
<!-- Was this issue identified on matrix.org or another homeserver? -->
|
||||
- **Homeserver**:
|
||||
|
||||
If not matrix.org:
|
||||
|
||||
<!--
|
||||
What version of Synapse is running?
|
||||
|
||||
You can find the Synapse version with this command:
|
||||
|
||||
$ curl http://localhost:8008/_synapse/admin/v1/server_version
|
||||
|
||||
(You may need to replace `localhost:8008` if Synapse is not configured to
|
||||
listen on that port.)
|
||||
-->
|
||||
- **Version**:
|
||||
|
||||
- **Install method**:
|
||||
<!-- examples: package manager/git clone/pip -->
|
||||
|
||||
- **Platform**:
|
||||
<!--
|
||||
Tell us about the environment in which your homeserver is operating
|
||||
distro, hardware, if it's running in a vm/container, etc.
|
||||
-->
|
||||
103
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
103
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
@@ -1,103 +0,0 @@
|
||||
name: Bug report
|
||||
description: Create a report to help us improve
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
**THIS IS NOT A SUPPORT CHANNEL!**
|
||||
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**, please ask in **[#synapse:matrix.org](https://matrix.to/#/#synapse:matrix.org)** (using a matrix.org account if necessary).
|
||||
|
||||
If you want to report a security issue, please see https://matrix.org/security-disclosure-policy/
|
||||
|
||||
This is a bug report form. By following the instructions below and completing the sections with your information, you will help the us to get all the necessary data to fix your issue.
|
||||
|
||||
You can also preview your report before submitting it.
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: Describe the problem that you are experiencing
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: reproduction_steps
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: |
|
||||
Describe the series of steps that leads you to the problem.
|
||||
|
||||
Describe how what happens differs from what you expected.
|
||||
placeholder: Tell us what you see!
|
||||
value: |
|
||||
- list the steps
|
||||
- that reproduce the bug
|
||||
- using hyphens as bullet points
|
||||
validations:
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
---
|
||||
|
||||
**IMPORTANT**: please answer the following questions, to help us narrow down the problem.
|
||||
- type: input
|
||||
id: homeserver
|
||||
attributes:
|
||||
label: Homeserver
|
||||
description: Which homeserver was this issue identified on? (matrix.org, another homeserver, etc)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Synapse Version
|
||||
description: |
|
||||
What version of Synapse is this homeserver running?
|
||||
|
||||
You can find the Synapse version by visiting https://yourserver.example.com/_matrix/federation/v1/version
|
||||
|
||||
or with this command:
|
||||
|
||||
```
|
||||
$ curl http://localhost:8008/_synapse/admin/v1/server_version
|
||||
```
|
||||
|
||||
(You may need to replace `localhost:8008` if Synapse is not configured to listen on that port.)
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install_method
|
||||
attributes:
|
||||
label: Installation Method
|
||||
options:
|
||||
- Docker (matrixdotorg/synapse)
|
||||
- Debian packages from packages.matrix.org
|
||||
- pip (from PyPI)
|
||||
- Other (please mention below)
|
||||
- type: textarea
|
||||
id: platform
|
||||
attributes:
|
||||
label: Platform
|
||||
description: |
|
||||
Tell us about the environment in which your homeserver is operating...
|
||||
e.g. distro, hardware, if it's running in a vm/container, etc.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: |
|
||||
Please copy and paste any relevant log output, ideally at INFO or DEBUG log level.
|
||||
This will be automatically formatted into code, so there is no need for backticks.
|
||||
|
||||
Please be careful to remove any personal or private data.
|
||||
|
||||
**Bug reports are usually very difficult to diagnose without logging.**
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: anything_else
|
||||
attributes:
|
||||
label: Anything else that would be useful to know?
|
||||
10
.github/PULL_REQUEST_TEMPLATE.md
vendored
10
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,14 +1,12 @@
|
||||
### Pull Request Checklist
|
||||
|
||||
<!-- Please read https://matrix-org.github.io/synapse/latest/development/contributing_guide.html before submitting your pull request -->
|
||||
<!-- Please read CONTRIBUTING.md before submitting your pull request -->
|
||||
|
||||
* [ ] Pull request is based on the develop branch
|
||||
* [ ] Pull request includes a [changelog file](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should:
|
||||
* [ ] Pull request includes a [changelog file](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#changelog). The entry should:
|
||||
- Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.".
|
||||
- Use markdown where necessary, mostly for `code blocks`.
|
||||
- End with either a period (.) or an exclamation mark (!).
|
||||
- Start with a capital letter.
|
||||
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
|
||||
* [ ] Pull request includes a [sign off](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#sign-off)
|
||||
* [ ] [Code style](https://matrix-org.github.io/synapse/latest/code_style.html) is correct
|
||||
(run the [linters](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
|
||||
* [ ] Pull request includes a [sign off](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#sign-off)
|
||||
* [ ] Code style is correct (run the [linters](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#code-style))
|
||||
|
||||
37
.github/workflows/docker.yml
vendored
37
.github/workflows/docker.yml
vendored
@@ -5,7 +5,7 @@ name: Build docker images
|
||||
on:
|
||||
push:
|
||||
tags: ["v*"]
|
||||
branches: [ master, main, develop ]
|
||||
branches: [ master, main ]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
@@ -36,22 +36,37 @@ jobs:
|
||||
|
||||
- name: Calculate docker image tag
|
||||
id: set-tag
|
||||
uses: docker/metadata-action@master
|
||||
run: |
|
||||
case "${GITHUB_REF}" in
|
||||
refs/heads/master|refs/heads/main)
|
||||
tag=latest
|
||||
;;
|
||||
refs/tags/*)
|
||||
tag=${GITHUB_REF#refs/tags/}
|
||||
;;
|
||||
*)
|
||||
tag=${GITHUB_SHA}
|
||||
;;
|
||||
esac
|
||||
echo "::set-output name=tag::$tag"
|
||||
|
||||
# for release builds, we want to get the amd64 image out asap, so first
|
||||
# we do an amd64-only build, before following up with a multiarch build.
|
||||
- name: Build and push amd64
|
||||
uses: docker/build-push-action@v2
|
||||
if: "${{ startsWith(github.ref, 'refs/tags/v') }}"
|
||||
with:
|
||||
images: matrixdotorg/synapse
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
type=raw,value=develop,enable=${{ github.ref == 'refs/heads/develop' }}
|
||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
|
||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
||||
type=pep440,pattern={{raw}}
|
||||
push: true
|
||||
labels: "gitsha1=${{ github.sha }}"
|
||||
tags: "matrixdotorg/synapse:${{ steps.set-tag.outputs.tag }}"
|
||||
file: "docker/Dockerfile"
|
||||
platforms: linux/amd64
|
||||
|
||||
- name: Build and push all platforms
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
labels: "gitsha1=${{ github.sha }}"
|
||||
tags: "${{ steps.set-tag.outputs.tags }}"
|
||||
tags: "matrixdotorg/synapse:${{ steps.set-tag.outputs.tag }}"
|
||||
file: "docker/Dockerfile"
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
3
.github/workflows/docs.yaml
vendored
3
.github/workflows/docs.yaml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@4b5ef36b314c2599664ca107bb8c02412548d79d # v1.1.14
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
mdbook-version: '0.4.9'
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
@@ -61,5 +61,6 @@ jobs:
|
||||
uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
keep_files: true
|
||||
publish_dir: ./book
|
||||
destination_dir: ./${{ steps.vars.outputs.branch-version }}
|
||||
|
||||
159
.github/workflows/latest_deps.yml
vendored
159
.github/workflows/latest_deps.yml
vendored
@@ -1,159 +0,0 @@
|
||||
# People who are freshly `pip install`ing from PyPI will pull in the latest versions of
|
||||
# dependencies which match the broad requirements. Since most CI runs are against
|
||||
# the locked poetry environment, run specifically against the latest dependencies to
|
||||
# know if there's an upcoming breaking change.
|
||||
#
|
||||
# As an overview this workflow:
|
||||
# - checks out develop,
|
||||
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
|
||||
# - runs mypy and test suites in that checkout.
|
||||
#
|
||||
# Based on the twisted trunk CI job.
|
||||
|
||||
name: Latest dependencies
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 7 * * *
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
mypy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||
# poetry-core versions), so we install with poetry.
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "1.2.0b1"
|
||||
extras: "all"
|
||||
# Dump installed versions for debugging.
|
||||
- run: poetry run pip list > before.txt
|
||||
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
||||
# `pip install matrix-synapse[all]` as closely as possible.
|
||||
- run: poetry update --no-dev
|
||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
||||
- name: Remove warn_unused_ignores from mypy config
|
||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||
- run: poetry run mypy
|
||||
trial:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- database: "sqlite"
|
||||
- database: "postgres"
|
||||
postgres-version: "14"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
if: ${{ matrix.postgres-version }}
|
||||
run: |
|
||||
docker run -d -p 5432:5432 \
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: pip install .[all,test]
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: python -m twisted.trial --jobs=2 tests
|
||||
env:
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
|
||||
sytest:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:testing
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- sytest-tag: focal
|
||||
|
||||
- sytest-tag: focal
|
||||
postgres: postgres
|
||||
workers: workers
|
||||
redis: redis
|
||||
env:
|
||||
POSTGRES: ${{ matrix.postgres && 1}}
|
||||
WORKERS: ${{ matrix.workers && 1 }}
|
||||
REDIS: ${{ matrix.redis && 1 }}
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Ensure sytest runs `pip install`
|
||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||
run: rm /src/poetry.lock
|
||||
working-directory: /src
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
path: |
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
|
||||
# TODO: run complement (as with twisted trunk, see #12473).
|
||||
|
||||
# open an issue if the build fails, so we know about it.
|
||||
open-issue:
|
||||
if: failure()
|
||||
needs:
|
||||
# TODO: should mypy be included here? It feels more brittle than the other two.
|
||||
- mypy
|
||||
- trial
|
||||
- sytest
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
update_existing: true
|
||||
filename: .ci/latest_deps_build_failed_issue_template.md
|
||||
|
||||
21
.github/workflows/release-artifacts.yml
vendored
21
.github/workflows/release-artifacts.yml
vendored
@@ -7,7 +7,7 @@ on:
|
||||
# of things breaking (but only build one set of debs)
|
||||
pull_request:
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
branches: ["develop"]
|
||||
|
||||
# we do the full build on tags.
|
||||
tags: ["v*"]
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||
dists='["debian:sid"]'
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
|
||||
dists=$(scripts-dev/build_debian_packages --show-dists-json)
|
||||
fi
|
||||
echo "::set-output name=distros::$dists"
|
||||
# map the step outputs to job outputs
|
||||
@@ -74,7 +74,7 @@ jobs:
|
||||
# see https://github.com/docker/build-push-action/issues/252
|
||||
# for the cache magic here
|
||||
run: |
|
||||
./src/scripts-dev/build_debian_packages.py \
|
||||
./src/scripts-dev/build_debian_packages \
|
||||
--docker-build-arg=--cache-from=type=local,src=/tmp/.buildx-cache \
|
||||
--docker-build-arg=--cache-to=type=local,mode=max,dest=/tmp/.buildx-cache-new \
|
||||
--docker-build-arg=--progress=plain \
|
||||
@@ -91,7 +91,17 @@ jobs:
|
||||
|
||||
build-sdist:
|
||||
name: "Build pypi distribution files"
|
||||
uses: "matrix-org/backend-meta/.github/workflows/packaging.yml@v1"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install wheel
|
||||
- run: |
|
||||
python setup.py sdist bdist_wheel
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: python-dist
|
||||
path: dist/*
|
||||
|
||||
# if it's a tag, create a release and attach the artifacts to it
|
||||
attach-assets:
|
||||
@@ -112,8 +122,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
files: |
|
||||
Sdist/*
|
||||
Wheel/*
|
||||
python-dist/*
|
||||
debs.tar.xz
|
||||
# if it's not already published, keep the release as a draft.
|
||||
draft: true
|
||||
|
||||
287
.github/workflows/tests.yml
vendored
287
.github/workflows/tests.yml
vendored
@@ -8,29 +8,24 @@ on:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
|
||||
jobs:
|
||||
check-sampleconfig:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install .
|
||||
- run: scripts-dev/generate_sample_config.sh --check
|
||||
- run: scripts-dev/config-lint.sh
|
||||
|
||||
check-schema-delta:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
||||
- run: scripts-dev/check_schema_delta.py --force-colors
|
||||
|
||||
lint:
|
||||
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1"
|
||||
with:
|
||||
typechecking-extras: "all"
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
toxenv:
|
||||
- "check-sampleconfig"
|
||||
- "check_codestyle"
|
||||
- "check_isort"
|
||||
- "mypy"
|
||||
- "packaging"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install tox
|
||||
- run: tox -e ${{ matrix.toxenv }}
|
||||
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -43,20 +38,39 @@ jobs:
|
||||
if: ${{ github.base_ref == 'develop' || contains(github.base_ref, 'release-') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Note: This and the script can be simplified once we drop Buildkite. See:
|
||||
# https://github.com/actions/checkout/issues/266#issuecomment-638346893
|
||||
# https://github.com/actions/checkout/issues/416
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v2
|
||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
||||
- run: scripts-dev/check-newsfragment.sh
|
||||
env:
|
||||
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
||||
- run: pip install tox
|
||||
- name: Patch Buildkite-specific test script
|
||||
run: |
|
||||
sed -i -e 's/\$BUILDKITE_PULL_REQUEST/${{ github.event.number }}/' \
|
||||
scripts-dev/check-newsfragment
|
||||
- run: scripts-dev/check-newsfragment
|
||||
|
||||
lint-sdist:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: pip install wheel
|
||||
- run: python setup.py sdist bdist_wheel
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: Python Distributions
|
||||
path: dist/*
|
||||
|
||||
# Dummy step to gate other tests on without repeating the whole list
|
||||
linting-done:
|
||||
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
||||
needs: [lint, lint-crlf, lint-newsfile, check-sampleconfig, check-schema-delta]
|
||||
needs: [lint, lint-crlf, lint-newsfile, lint-sdist]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: "true"
|
||||
@@ -67,25 +81,22 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
python-version: ["3.6", "3.7", "3.8", "3.9"]
|
||||
database: ["sqlite"]
|
||||
extras: ["all"]
|
||||
include:
|
||||
# Newest Python without optional deps
|
||||
- python-version: "3.10"
|
||||
extras: ""
|
||||
- python-version: "3.9"
|
||||
toxenv: "py-noextras,combine"
|
||||
|
||||
# Oldest Python with PostgreSQL
|
||||
- python-version: "3.7"
|
||||
- python-version: "3.6"
|
||||
database: "postgres"
|
||||
postgres-version: "10"
|
||||
extras: "all"
|
||||
postgres-version: "9.6"
|
||||
|
||||
# Newest Python with newest PostgreSQL
|
||||
- python-version: "3.10"
|
||||
# Newest Python with PostgreSQL
|
||||
- python-version: "3.9"
|
||||
database: "postgres"
|
||||
postgres-version: "14"
|
||||
extras: "all"
|
||||
postgres-version: "13"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -97,23 +108,22 @@ jobs:
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: ${{ matrix.extras }}
|
||||
- run: pip install tox
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
- run: tox -e py,combine
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
@@ -125,22 +135,19 @@ jobs:
|
||||
|| true
|
||||
|
||||
trial-olddeps:
|
||||
# Note: sqlite only; no postgres
|
||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Test with old deps
|
||||
uses: docker://ubuntu:focal # For old python and sqlite
|
||||
# Note: focal seems to be using 3.8, but the oldest is 3.7?
|
||||
# See https://github.com/matrix-org/synapse/issues/12343
|
||||
uses: docker://ubuntu:bionic # For old python and sqlite
|
||||
with:
|
||||
workdir: /github/workspace
|
||||
entrypoint: .ci/scripts/test_old_deps.sh
|
||||
entrypoint: .buildkite/scripts/test_old_deps.sh
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
@@ -153,27 +160,24 @@ jobs:
|
||||
|
||||
trial-pypy:
|
||||
# Very slow; only run if the branch name includes 'pypy'
|
||||
# Note: sqlite only; no postgres. Completely untested since poetry move.
|
||||
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["pypy-3.7"]
|
||||
extras: ["all"]
|
||||
python-version: ["pypy-3.6"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: ${{ matrix.extras }}
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
- run: pip install tox
|
||||
- run: tox -e py,combine
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
@@ -193,27 +197,26 @@ jobs:
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
env:
|
||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
||||
BUILDKITE_BRANCH: ${{ github.head_ref }}
|
||||
POSTGRES: ${{ matrix.postgres && 1}}
|
||||
MULTI_POSTGRES: ${{ (matrix.postgres == 'multi-postgres') && 1}}
|
||||
WORKERS: ${{ matrix.workers && 1 }}
|
||||
REDIS: ${{ matrix.redis && 1 }}
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
TOP: ${{ github.workspace }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- sytest-tag: focal
|
||||
- sytest-tag: bionic
|
||||
|
||||
- sytest-tag: focal
|
||||
- sytest-tag: bionic
|
||||
postgres: postgres
|
||||
|
||||
- sytest-tag: testing
|
||||
postgres: postgres
|
||||
|
||||
- sytest-tag: focal
|
||||
- sytest-tag: bionic
|
||||
postgres: multi-postgres
|
||||
workers: workers
|
||||
|
||||
@@ -229,7 +232,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
run: cat sytest-blacklist .buildkite/worker-blacklist > synapse-blacklist-with-workers
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
@@ -245,50 +248,18 @@ jobs:
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
export-data:
|
||||
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: [linting-done, portdb]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TOP: ${{ github.workspace }}
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres
|
||||
ports:
|
||||
- 5432:5432
|
||||
env:
|
||||
POSTGRES_PASSWORD: "postgres"
|
||||
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_export_data_command.sh
|
||||
|
||||
portdb:
|
||||
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TOP: ${{ github.workspace }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- python-version: "3.7"
|
||||
postgres-version: "10"
|
||||
- python-version: "3.6"
|
||||
postgres-version: "9.6"
|
||||
|
||||
- python-version: "3.10"
|
||||
postgres-version: "14"
|
||||
- python-version: "3.9"
|
||||
postgres-version: "13"
|
||||
|
||||
services:
|
||||
postgres:
|
||||
@@ -307,26 +278,30 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_synapse_port_db.sh
|
||||
- name: Patch Buildkite-specific test scripts
|
||||
run: |
|
||||
sed -i -e 's/host="postgres"/host="localhost"/' .buildkite/scripts/postgres_exec.py
|
||||
sed -i -e 's/host: postgres/host: localhost/' .buildkite/postgres-config.yaml
|
||||
sed -i -e 's|/src/||' .buildkite/{sqlite,postgres}-config.yaml
|
||||
sed -i -e 's/\$TOP/\$GITHUB_WORKSPACE/' .coveragerc
|
||||
- run: .buildkite/scripts/test_synapse_port_db.sh
|
||||
|
||||
complement:
|
||||
if: "${{ !failure() && !cancelled() }}"
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arrangement: monolith
|
||||
database: SQLite
|
||||
|
||||
- arrangement: monolith
|
||||
database: Postgres
|
||||
container:
|
||||
# https://github.com/matrix-org/complement/blob/master/dockerfiles/ComplementCIBuildkite.Dockerfile
|
||||
image: matrixdotorg/complement:latest
|
||||
env:
|
||||
CI: true
|
||||
ports:
|
||||
- 8448:8448
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
|
||||
steps:
|
||||
- name: Run actions/checkout@v2 for synapse
|
||||
@@ -334,59 +309,71 @@ jobs:
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- run: |
|
||||
set -o pipefail
|
||||
POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
||||
# Attempt to check out the same branch of Complement as the PR. If it
|
||||
# doesn't exist, fallback to master.
|
||||
- name: Checkout complement
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
run: |
|
||||
mkdir -p complement
|
||||
# Attempt to use the version of complement which best matches the current
|
||||
# build. Depending on whether this is a PR or release, etc. we need to
|
||||
# use different fallbacks.
|
||||
#
|
||||
# 1. First check if there's a similarly named branch (GITHUB_HEAD_REF
|
||||
# for pull requests, otherwise GITHUB_REF).
|
||||
# 2. Attempt to use the base branch, e.g. when merging into release-vX.Y
|
||||
# (GITHUB_BASE_REF for pull requests).
|
||||
# 3. Use the default complement branch ("master").
|
||||
for BRANCH_NAME in "$GITHUB_HEAD_REF" "$GITHUB_BASE_REF" "${GITHUB_REF#refs/heads/}" "master"; do
|
||||
# Skip empty branch names and merge commits.
|
||||
if [[ -z "$BRANCH_NAME" || $BRANCH_NAME =~ ^refs/pull/.* ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# XXX When complement with workers is stable, move this back into the standard
|
||||
# "complement" matrix above.
|
||||
#
|
||||
# See https://github.com/matrix-org/synapse/issues/13161
|
||||
complement-workers:
|
||||
if: "${{ !failure() && !cancelled() }}"
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
(wget -O - "https://github.com/matrix-org/complement/archive/$BRANCH_NAME.tar.gz" | tar -xz --strip-components=1 -C complement) && break
|
||||
done
|
||||
|
||||
steps:
|
||||
- name: Run actions/checkout@v2 for synapse
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: synapse
|
||||
# Build initial Synapse image
|
||||
- run: docker build -t matrixdotorg/synapse:latest -f docker/Dockerfile .
|
||||
working-directory: synapse
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
# Build a ready-to-run Synapse image based on the initial image above.
|
||||
# This new image includes a config file, keys for signing and TLS, and
|
||||
# other settings to make it suitable for testing under Complement.
|
||||
- run: docker build -t complement-synapse -f Synapse.Dockerfile .
|
||||
working-directory: complement/dockerfiles
|
||||
|
||||
- run: |
|
||||
set -o pipefail
|
||||
POSTGRES=1 WORKERS=1 COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
# Run Complement
|
||||
- run: go test -v -tags synapse_blacklist,msc2403,msc2946,msc3083 ./tests/...
|
||||
env:
|
||||
COMPLEMENT_BASE_IMAGE: complement-synapse:latest
|
||||
working-directory: complement
|
||||
|
||||
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
|
||||
tests-done:
|
||||
if: ${{ always() }}
|
||||
needs:
|
||||
- check-sampleconfig
|
||||
- lint
|
||||
- lint-crlf
|
||||
- lint-newsfile
|
||||
- lint-sdist
|
||||
- trial
|
||||
- trial-olddeps
|
||||
- sytest
|
||||
- export-data
|
||||
- portdb
|
||||
- complement
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: matrix-org/done-action@v2
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
# The newsfile lint may be skipped on non PR builds
|
||||
skippable:
|
||||
lint-newsfile
|
||||
- name: Set build result
|
||||
env:
|
||||
NEEDS_CONTEXT: ${{ toJSON(needs) }}
|
||||
# the `jq` incantation dumps out a series of "<job> <result>" lines
|
||||
run: |
|
||||
set -o pipefail
|
||||
jq -r 'to_entries[] | [.key,.value.result] | join(" ")' \
|
||||
<<< $NEEDS_CONTEXT |
|
||||
while read job result; do
|
||||
if [ "$result" != "success" ]; then
|
||||
echo "::set-failed ::Job $job returned $result"
|
||||
fi
|
||||
done
|
||||
|
||||
162
.github/workflows/twisted_trunk.yml
vendored
162
.github/workflows/twisted_trunk.yml
vendored
@@ -1,162 +0,0 @@
|
||||
name: Twisted Trunk
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 8 * * *
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
mypy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
extras: "all"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- name: Remove warn_unused_ignores from mypy config
|
||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||
- run: poetry run mypy
|
||||
|
||||
trial:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
extras: "all test"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- run: poetry run trial --jobs 2 tests
|
||||
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
sytest:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:buster
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Patch dependencies
|
||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||
# but the sytest-synapse container expects it to be in /venv/.
|
||||
# We symlink it before running poetry so that poetry actually
|
||||
# ends up installing to `/venv`.
|
||||
run: |
|
||||
ln -s -T /venv /src/.venv
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
working-directory: /src
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
env:
|
||||
# Use offline mode to avoid reinstalling the pinned version of
|
||||
# twisted.
|
||||
OFFLINE: 1
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
path: |
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
complement:
|
||||
if: "${{ !failure() && !cancelled() }}"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arrangement: monolith
|
||||
database: SQLite
|
||||
|
||||
- arrangement: monolith
|
||||
database: Postgres
|
||||
|
||||
- arrangement: workers
|
||||
database: Postgres
|
||||
|
||||
steps:
|
||||
- name: Run actions/checkout@v2 for synapse
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
# This step is specific to the 'Twisted trunk' test run:
|
||||
- name: Patch dependencies
|
||||
run: |
|
||||
set -x
|
||||
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
|
||||
pipx install poetry==1.1.12
|
||||
|
||||
poetry remove -n twisted
|
||||
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry lock --no-update
|
||||
# NOT IN 1.1.12 poetry lock --check
|
||||
working-directory: synapse
|
||||
|
||||
- run: |
|
||||
set -o pipefail
|
||||
TEST_ONLY_SKIP_DEP_HASH_VERIFICATION=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
|
||||
# open an issue if the build fails, so we know about it.
|
||||
open-issue:
|
||||
if: failure()
|
||||
needs:
|
||||
- mypy
|
||||
- trial
|
||||
- sytest
|
||||
- complement
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
update_existing: true
|
||||
filename: .ci/twisted_trunk_build_failed_issue_template.md
|
||||
11
.gitignore
vendored
11
.gitignore
vendored
@@ -15,9 +15,6 @@ _trial_temp*/
|
||||
.DS_Store
|
||||
__pycache__/
|
||||
|
||||
# We do want the poetry lockfile.
|
||||
!poetry.lock
|
||||
|
||||
# stuff that is likely to exist when you run a server locally
|
||||
/*.db
|
||||
/*.log
|
||||
@@ -33,9 +30,6 @@ __pycache__/
|
||||
/media_store/
|
||||
/uploads
|
||||
|
||||
# For direnv users
|
||||
/.envrc
|
||||
|
||||
# IDEs
|
||||
/.idea/
|
||||
/.ropeproject/
|
||||
@@ -46,7 +40,6 @@ __pycache__/
|
||||
/.coverage*
|
||||
/.mypy_cache/
|
||||
/.tox
|
||||
/.tox-pg-container
|
||||
/build/
|
||||
/coverage.*
|
||||
/dist/
|
||||
@@ -56,7 +49,3 @@ __pycache__/
|
||||
|
||||
# docs
|
||||
book/
|
||||
|
||||
# complement
|
||||
/complement-*
|
||||
/master.tar.gz
|
||||
|
||||
9168
CHANGES.md
9168
CHANGES.md
File diff suppressed because it is too large
Load Diff
405
CONTRIBUTING.md
405
CONTRIBUTING.md
@@ -1,3 +1,404 @@
|
||||
# Welcome to Synapse
|
||||
Welcome to Synapse
|
||||
|
||||
Please see the [contributors' guide](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html) in our rendered documentation.
|
||||
This document aims to get you started with contributing to this repo!
|
||||
|
||||
- [1. Who can contribute to Synapse?](#1-who-can-contribute-to-synapse)
|
||||
- [2. What do I need?](#2-what-do-i-need)
|
||||
- [3. Get the source.](#3-get-the-source)
|
||||
- [4. Install the dependencies](#4-install-the-dependencies)
|
||||
* [Under Unix (macOS, Linux, BSD, ...)](#under-unix-macos-linux-bsd-)
|
||||
* [Under Windows](#under-windows)
|
||||
- [5. Get in touch.](#5-get-in-touch)
|
||||
- [6. Pick an issue.](#6-pick-an-issue)
|
||||
- [7. Turn coffee and documentation into code and documentation!](#7-turn-coffee-and-documentation-into-code-and-documentation)
|
||||
- [8. Test, test, test!](#8-test-test-test)
|
||||
* [Run the linters.](#run-the-linters)
|
||||
* [Run the unit tests.](#run-the-unit-tests)
|
||||
* [Run the integration tests.](#run-the-integration-tests)
|
||||
- [9. Submit your patch.](#9-submit-your-patch)
|
||||
* [Changelog](#changelog)
|
||||
+ [How do I know what to call the changelog file before I create the PR?](#how-do-i-know-what-to-call-the-changelog-file-before-i-create-the-pr)
|
||||
+ [Debian changelog](#debian-changelog)
|
||||
* [Sign off](#sign-off)
|
||||
- [10. Turn feedback into better code.](#10-turn-feedback-into-better-code)
|
||||
- [11. Find a new issue.](#11-find-a-new-issue)
|
||||
- [Notes for maintainers on merging PRs etc](#notes-for-maintainers-on-merging-prs-etc)
|
||||
- [Conclusion](#conclusion)
|
||||
|
||||
# 1. Who can contribute to Synapse?
|
||||
|
||||
Everyone is welcome to contribute code to [matrix.org
|
||||
projects](https://github.com/matrix-org), provided that they are willing to
|
||||
license their contributions under the same license as the project itself. We
|
||||
follow a simple 'inbound=outbound' model for contributions: the act of
|
||||
submitting an 'inbound' contribution means that the contributor agrees to
|
||||
license the code under the same terms as the project's overall 'outbound'
|
||||
license - in our case, this is almost always Apache Software License v2 (see
|
||||
[LICENSE](LICENSE)).
|
||||
|
||||
# 2. What do I need?
|
||||
|
||||
The code of Synapse is written in Python 3. To do pretty much anything, you'll need [a recent version of Python 3](https://wiki.python.org/moin/BeginnersGuide/Download).
|
||||
|
||||
The source code of Synapse is hosted on GitHub. You will also need [a recent version of git](https://github.com/git-guides/install-git).
|
||||
|
||||
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
|
||||
|
||||
|
||||
# 3. Get the source.
|
||||
|
||||
The preferred and easiest way to contribute changes is to fork the relevant
|
||||
project on GitHub, and then [create a pull request](
|
||||
https://help.github.com/articles/using-pull-requests/) to ask us to pull your
|
||||
changes into our repo.
|
||||
|
||||
Please base your changes on the `develop` branch.
|
||||
|
||||
```sh
|
||||
git clone git@github.com:YOUR_GITHUB_USER_NAME/synapse.git
|
||||
git checkout develop
|
||||
```
|
||||
|
||||
If you need help getting started with git, this is beyond the scope of the document, but you
|
||||
can find many good git tutorials on the web.
|
||||
|
||||
# 4. Install the dependencies
|
||||
|
||||
## Under Unix (macOS, Linux, BSD, ...)
|
||||
|
||||
Once you have installed Python 3 and added the source, please open a terminal and
|
||||
setup a *virtualenv*, as follows:
|
||||
|
||||
```sh
|
||||
cd path/where/you/have/cloned/the/repository
|
||||
python3 -m venv ./env
|
||||
source ./env/bin/activate
|
||||
pip install -e ".[all,lint,mypy,test]"
|
||||
pip install tox
|
||||
```
|
||||
|
||||
This will install the developer dependencies for the project.
|
||||
|
||||
## Under Windows
|
||||
|
||||
TBD
|
||||
|
||||
|
||||
# 5. Get in touch.
|
||||
|
||||
Join our developer community on Matrix: #synapse-dev:matrix.org !
|
||||
|
||||
|
||||
# 6. Pick an issue.
|
||||
|
||||
Fix your favorite problem or perhaps find a [Good First Issue](https://github.com/matrix-org/synapse/issues?q=is%3Aopen+is%3Aissue+label%3A%22Good+First+Issue%22)
|
||||
to work on.
|
||||
|
||||
|
||||
# 7. Turn coffee and documentation into code and documentation!
|
||||
|
||||
Synapse's code style is documented [here](docs/code_style.md). Please follow
|
||||
it, including the conventions for the [sample configuration
|
||||
file](docs/code_style.md#configuration-file-format).
|
||||
|
||||
There is a growing amount of documentation located in the [docs](docs)
|
||||
directory. This documentation is intended primarily for sysadmins running their
|
||||
own Synapse instance, as well as developers interacting externally with
|
||||
Synapse. [docs/dev](docs/dev) exists primarily to house documentation for
|
||||
Synapse developers. [docs/admin_api](docs/admin_api) houses documentation
|
||||
regarding Synapse's Admin API, which is used mostly by sysadmins and external
|
||||
service developers.
|
||||
|
||||
If you add new files added to either of these folders, please use [GitHub-Flavoured
|
||||
Markdown](https://guides.github.com/features/mastering-markdown/).
|
||||
|
||||
Some documentation also exists in [Synapse's GitHub
|
||||
Wiki](https://github.com/matrix-org/synapse/wiki), although this is primarily
|
||||
contributed to by community authors.
|
||||
|
||||
|
||||
# 8. Test, test, test!
|
||||
<a name="test-test-test"></a>
|
||||
|
||||
While you're developing and before submitting a patch, you'll
|
||||
want to test your code.
|
||||
|
||||
## Run the linters.
|
||||
|
||||
The linters look at your code and do two things:
|
||||
|
||||
- ensure that your code follows the coding style adopted by the project;
|
||||
- catch a number of errors in your code.
|
||||
|
||||
They're pretty fast, don't hesitate!
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh
|
||||
```
|
||||
|
||||
Note that this script *will modify your files* to fix styling errors.
|
||||
Make sure that you have saved all your files.
|
||||
|
||||
If you wish to restrict the linters to only the files changed since the last commit
|
||||
(much faster!), you can instead run:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh -d
|
||||
```
|
||||
|
||||
Or if you know exactly which files you wish to lint, you can instead run:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
|
||||
```
|
||||
|
||||
## Run the unit tests.
|
||||
|
||||
The unit tests run parts of Synapse, including your changes, to see if anything
|
||||
was broken. They are slower than the linters but will typically catch more errors.
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
trial tests
|
||||
```
|
||||
|
||||
If you wish to only run *some* unit tests, you may specify
|
||||
another module instead of `tests` - or a test class or a method:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
trial tests.rest.admin.test_room tests.handlers.test_admin.ExfiltrateData.test_invite
|
||||
```
|
||||
|
||||
If your tests fail, you may wish to look at the logs (the default log level is `ERROR`):
|
||||
|
||||
```sh
|
||||
less _trial_temp/test.log
|
||||
```
|
||||
|
||||
To increase the log level for the tests, set `SYNAPSE_TEST_LOG_LEVEL`:
|
||||
|
||||
```sh
|
||||
SYNAPSE_TEST_LOG_LEVEL=DEBUG trial tests
|
||||
```
|
||||
|
||||
|
||||
## Run the integration tests.
|
||||
|
||||
The integration tests are a more comprehensive suite of tests. They
|
||||
run a full version of Synapse, including your changes, to check if
|
||||
anything was broken. They are slower than the unit tests but will
|
||||
typically catch more errors.
|
||||
|
||||
The following command will let you run the integration test with the most common
|
||||
configuration:
|
||||
|
||||
```sh
|
||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:py37
|
||||
```
|
||||
|
||||
This configuration should generally cover your needs. For more details about other configurations, see [documentation in the SyTest repo](https://github.com/matrix-org/sytest/blob/develop/docker/README.md).
|
||||
|
||||
|
||||
# 9. Submit your patch.
|
||||
|
||||
Once you're happy with your patch, it's time to prepare a Pull Request.
|
||||
|
||||
To prepare a Pull Request, please:
|
||||
|
||||
1. verify that [all the tests pass](#test-test-test), including the coding style;
|
||||
2. [sign off](#sign-off) your contribution;
|
||||
3. `git push` your commit to your fork of Synapse;
|
||||
4. on GitHub, [create the Pull Request](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request);
|
||||
5. add a [changelog entry](#changelog) and push it to your Pull Request;
|
||||
6. for most contributors, that's all - however, if you are a member of the organization `matrix-org`, on GitHub, please request a review from `matrix.org / Synapse Core`.
|
||||
|
||||
|
||||
## Changelog
|
||||
|
||||
All changes, even minor ones, need a corresponding changelog / newsfragment
|
||||
entry. These are managed by [Towncrier](https://github.com/hawkowl/towncrier).
|
||||
|
||||
To create a changelog entry, make a new file in the `changelog.d` directory named
|
||||
in the format of `PRnumber.type`. The type can be one of the following:
|
||||
|
||||
* `feature`
|
||||
* `bugfix`
|
||||
* `docker` (for updates to the Docker image)
|
||||
* `doc` (for updates to the documentation)
|
||||
* `removal` (also used for deprecations)
|
||||
* `misc` (for internal-only changes)
|
||||
|
||||
This file will become part of our [changelog](
|
||||
https://github.com/matrix-org/synapse/blob/master/CHANGES.md) at the next
|
||||
release, so the content of the file should be a short description of your
|
||||
change in the same style as the rest of the changelog. The file can contain Markdown
|
||||
formatting, and should end with a full stop (.) or an exclamation mark (!) for
|
||||
consistency.
|
||||
|
||||
Adding credits to the changelog is encouraged, we value your
|
||||
contributions and would like to have you shouted out in the release notes!
|
||||
|
||||
For example, a fix in PR #1234 would have its changelog entry in
|
||||
`changelog.d/1234.bugfix`, and contain content like:
|
||||
|
||||
> The security levels of Florbs are now validated when received
|
||||
> via the `/federation/florb` endpoint. Contributed by Jane Matrix.
|
||||
|
||||
If there are multiple pull requests involved in a single bugfix/feature/etc,
|
||||
then the content for each `changelog.d` file should be the same. Towncrier will
|
||||
merge the matching files together into a single changelog entry when we come to
|
||||
release.
|
||||
|
||||
### How do I know what to call the changelog file before I create the PR?
|
||||
|
||||
Obviously, you don't know if you should call your newsfile
|
||||
`1234.bugfix` or `5678.bugfix` until you create the PR, which leads to a
|
||||
chicken-and-egg problem.
|
||||
|
||||
There are two options for solving this:
|
||||
|
||||
1. Open the PR without a changelog file, see what number you got, and *then*
|
||||
add the changelog file to your branch (see [Updating your pull
|
||||
request](#updating-your-pull-request)), or:
|
||||
|
||||
1. Look at the [list of all
|
||||
issues/PRs](https://github.com/matrix-org/synapse/issues?q=), add one to the
|
||||
highest number you see, and quickly open the PR before somebody else claims
|
||||
your number.
|
||||
|
||||
[This
|
||||
script](https://github.com/richvdh/scripts/blob/master/next_github_number.sh)
|
||||
might be helpful if you find yourself doing this a lot.
|
||||
|
||||
Sorry, we know it's a bit fiddly, but it's *really* helpful for us when we come
|
||||
to put together a release!
|
||||
|
||||
### Debian changelog
|
||||
|
||||
Changes which affect the debian packaging files (in `debian`) are an
|
||||
exception to the rule that all changes require a `changelog.d` file.
|
||||
|
||||
In this case, you will need to add an entry to the debian changelog for the
|
||||
next release. For this, run the following command:
|
||||
|
||||
```
|
||||
dch
|
||||
```
|
||||
|
||||
This will make up a new version number (if there isn't already an unreleased
|
||||
version in flight), and open an editor where you can add a new changelog entry.
|
||||
(Our release process will ensure that the version number and maintainer name is
|
||||
corrected for the release.)
|
||||
|
||||
If your change affects both the debian packaging *and* files outside the debian
|
||||
directory, you will need both a regular newsfragment *and* an entry in the
|
||||
debian changelog. (Though typically such changes should be submitted as two
|
||||
separate pull requests.)
|
||||
|
||||
## Sign off
|
||||
|
||||
In order to have a concrete record that your contribution is intentional
|
||||
and you agree to license it under the same terms as the project's license, we've adopted the
|
||||
same lightweight approach that the Linux Kernel
|
||||
[submitting patches process](
|
||||
https://www.kernel.org/doc/html/latest/process/submitting-patches.html#sign-your-work-the-developer-s-certificate-of-origin>),
|
||||
[Docker](https://github.com/docker/docker/blob/master/CONTRIBUTING.md), and many other
|
||||
projects use: the DCO (Developer Certificate of Origin:
|
||||
http://developercertificate.org/). This is a simple declaration that you wrote
|
||||
the contribution or otherwise have the right to contribute it to Matrix:
|
||||
|
||||
```
|
||||
Developer Certificate of Origin
|
||||
Version 1.1
|
||||
|
||||
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
|
||||
660 York Street, Suite 102,
|
||||
San Francisco, CA 94110 USA
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies of this
|
||||
license document, but changing it is not allowed.
|
||||
|
||||
Developer's Certificate of Origin 1.1
|
||||
|
||||
By making a contribution to this project, I certify that:
|
||||
|
||||
(a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
|
||||
(b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
|
||||
(c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
|
||||
(d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
```
|
||||
|
||||
If you agree to this for your contribution, then all that's needed is to
|
||||
include the line in your commit or pull request comment:
|
||||
|
||||
```
|
||||
Signed-off-by: Your Name <your@email.example.org>
|
||||
```
|
||||
|
||||
We accept contributions under a legally identifiable name, such as
|
||||
your name on government documentation or common-law names (names
|
||||
claimed by legitimate usage or repute). Unfortunately, we cannot
|
||||
accept anonymous contributions at this time.
|
||||
|
||||
Git allows you to add this signoff automatically when using the `-s`
|
||||
flag to `git commit`, which uses the name and email set in your
|
||||
`user.name` and `user.email` git configs.
|
||||
|
||||
|
||||
# 10. Turn feedback into better code.
|
||||
|
||||
Once the Pull Request is opened, you will see a few things:
|
||||
|
||||
1. our automated CI (Continuous Integration) pipeline will run (again) the linters, the unit tests, the integration tests and more;
|
||||
2. one or more of the developers will take a look at your Pull Request and offer feedback.
|
||||
|
||||
From this point, you should:
|
||||
|
||||
1. Look at the results of the CI pipeline.
|
||||
- If there is any error, fix the error.
|
||||
2. If a developer has requested changes, make these changes and let us know if it is ready for a developer to review again.
|
||||
3. Create a new commit with the changes.
|
||||
- Please do NOT overwrite the history. New commits make the reviewer's life easier.
|
||||
- Push this commits to your Pull Request.
|
||||
4. Back to 1.
|
||||
|
||||
Once both the CI and the developers are happy, the patch will be merged into Synapse and released shortly!
|
||||
|
||||
# 11. Find a new issue.
|
||||
|
||||
By now, you know the drill!
|
||||
|
||||
# Notes for maintainers on merging PRs etc
|
||||
|
||||
There are some notes for those with commit access to the project on how we
|
||||
manage git [here](docs/development/git.md).
|
||||
|
||||
# Conclusion
|
||||
|
||||
That's it! Matrix is a very open and collaborative project as you might expect
|
||||
given our obsession with open communication. If we're going to successfully
|
||||
matrix together all the fragmented communication technologies out there we are
|
||||
reliant on contributions and collaboration from the community to do so. So
|
||||
please get involved - and we hope you have as much fun hacking on Matrix as we
|
||||
do!
|
||||
|
||||
55
MANIFEST.in
Normal file
55
MANIFEST.in
Normal file
@@ -0,0 +1,55 @@
|
||||
include synctl
|
||||
include LICENSE
|
||||
include VERSION
|
||||
include *.rst
|
||||
include *.md
|
||||
include demo/README
|
||||
include demo/demo.tls.dh
|
||||
include demo/*.py
|
||||
include demo/*.sh
|
||||
|
||||
recursive-include synapse/storage *.sql
|
||||
recursive-include synapse/storage *.sql.postgres
|
||||
recursive-include synapse/storage *.sql.sqlite
|
||||
recursive-include synapse/storage *.py
|
||||
recursive-include synapse/storage *.txt
|
||||
recursive-include synapse/storage *.md
|
||||
|
||||
recursive-include docs *
|
||||
recursive-include scripts *
|
||||
recursive-include scripts-dev *
|
||||
recursive-include synapse *.pyi
|
||||
recursive-include tests *.py
|
||||
recursive-include tests *.pem
|
||||
recursive-include tests *.p8
|
||||
recursive-include tests *.crt
|
||||
recursive-include tests *.key
|
||||
|
||||
recursive-include synapse/res *
|
||||
recursive-include synapse/static *.css
|
||||
recursive-include synapse/static *.gif
|
||||
recursive-include synapse/static *.html
|
||||
recursive-include synapse/static *.js
|
||||
|
||||
exclude .codecov.yml
|
||||
exclude .coveragerc
|
||||
exclude .dockerignore
|
||||
exclude .editorconfig
|
||||
exclude Dockerfile
|
||||
exclude mypy.ini
|
||||
exclude sytest-blacklist
|
||||
exclude test_postgresql.sh
|
||||
|
||||
include book.toml
|
||||
include pyproject.toml
|
||||
recursive-include changelog.d *
|
||||
|
||||
prune .buildkite
|
||||
prune .circleci
|
||||
prune .github
|
||||
prune contrib
|
||||
prune debian
|
||||
prune demo/etc
|
||||
prune docker
|
||||
prune snap
|
||||
prune stubs
|
||||
82
README.rst
82
README.rst
@@ -1,6 +1,6 @@
|
||||
=========================================================================
|
||||
Synapse |support| |development| |documentation| |license| |pypi| |python|
|
||||
=========================================================================
|
||||
=========================================================
|
||||
Synapse |support| |development| |license| |pypi| |python|
|
||||
=========================================================
|
||||
|
||||
.. contents::
|
||||
|
||||
@@ -55,8 +55,11 @@ solutions. The hope is for Matrix to act as the building blocks for a new
|
||||
generation of fully open and interoperable messaging and VoIP apps for the
|
||||
internet.
|
||||
|
||||
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
|
||||
team, written in Python 3/Twisted.
|
||||
Synapse is a reference "homeserver" implementation of Matrix from the core
|
||||
development team at matrix.org, written in Python/Twisted. It is intended to
|
||||
showcase the concept of Matrix and let folks see the spec in the context of a
|
||||
codebase and let you run your own homeserver and generally help bootstrap the
|
||||
ecosystem.
|
||||
|
||||
In Matrix, every user runs one or more Matrix clients, which connect through to
|
||||
a Matrix homeserver. The homeserver stores all their personal chat history and
|
||||
@@ -82,14 +85,9 @@ For support installing or managing Synapse, please join |room|_ (from a matrix.o
|
||||
account if necessary) and ask questions there. We do not use GitHub issues for
|
||||
support requests, only for bug reports and feature requests.
|
||||
|
||||
Synapse's documentation is `nicely rendered on GitHub Pages <https://matrix-org.github.io/synapse>`_,
|
||||
with its source available in |docs|_.
|
||||
|
||||
.. |room| replace:: ``#synapse:matrix.org``
|
||||
.. _room: https://matrix.to/#/#synapse:matrix.org
|
||||
|
||||
.. |docs| replace:: ``docs``
|
||||
.. _docs: docs
|
||||
|
||||
Synapse Installation
|
||||
====================
|
||||
@@ -246,7 +244,7 @@ Password reset
|
||||
==============
|
||||
|
||||
Users can reset their password through their client. Alternatively, a server admin
|
||||
can reset a users password using the `admin API <docs/admin_api/user_admin_api.md#reset-password>`_
|
||||
can reset a users password using the `admin API <docs/admin_api/user_admin_api.rst#reset-password>`_
|
||||
or by directly editing the database as shown below.
|
||||
|
||||
First calculate the hash of the new password::
|
||||
@@ -265,27 +263,11 @@ Then update the ``users`` table in the database::
|
||||
Synapse Development
|
||||
===================
|
||||
|
||||
The best place to get started is our
|
||||
`guide for contributors <https://matrix-org.github.io/synapse/latest/development/contributing_guide.html>`_.
|
||||
This is part of our larger `documentation <https://matrix-org.github.io/synapse/latest>`_, which includes
|
||||
information for synapse developers as well as synapse administrators.
|
||||
|
||||
Developers might be particularly interested in:
|
||||
|
||||
* `Synapse's database schema <https://matrix-org.github.io/synapse/latest/development/database_schema.html>`_,
|
||||
* `notes on Synapse's implementation details <https://matrix-org.github.io/synapse/latest/development/internal_documentation/index.html>`_, and
|
||||
* `how we use git <https://matrix-org.github.io/synapse/latest/development/git.html>`_.
|
||||
|
||||
Alongside all that, join our developer community on Matrix:
|
||||
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
||||
|
||||
|
||||
Quick start
|
||||
-----------
|
||||
Join our developer community on Matrix: `#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_
|
||||
|
||||
Before setting up a development environment for synapse, make sure you have the
|
||||
system dependencies (such as the python header files) installed - see
|
||||
`Platform-specific prerequisites <https://matrix-org.github.io/synapse/latest/setup/installation.html#platform-specific-prerequisites>`_.
|
||||
`Installing from source <https://matrix-org.github.io/synapse/latest/setup/installation.html#installing-from-source>`_.
|
||||
|
||||
To check out a synapse for development, clone the git repo into a working
|
||||
directory of your choice::
|
||||
@@ -293,51 +275,45 @@ directory of your choice::
|
||||
git clone https://github.com/matrix-org/synapse.git
|
||||
cd synapse
|
||||
|
||||
Synapse has a number of external dependencies. We maintain a fixed development
|
||||
environment using `Poetry <https://python-poetry.org/>`_. First, install poetry. We recommend::
|
||||
Synapse has a number of external dependencies, that are easiest
|
||||
to install using pip and a virtualenv::
|
||||
|
||||
pip install --user pipx
|
||||
pipx install poetry
|
||||
|
||||
as described `here <https://python-poetry.org/docs/#installing-with-pipx>`_.
|
||||
(See `poetry's installation docs <https://python-poetry.org/docs/#installation>`_
|
||||
for other installation methods.) Then ask poetry to create a virtual environment
|
||||
from the project and install Synapse's dependencies::
|
||||
|
||||
poetry install --extras "all test"
|
||||
python3 -m venv ./env
|
||||
source ./env/bin/activate
|
||||
pip install -e ".[all,test]"
|
||||
|
||||
This will run a process of downloading and installing all the needed
|
||||
dependencies into a virtual env.
|
||||
dependencies into a virtual env. If any dependencies fail to install,
|
||||
try installing the failing modules individually::
|
||||
|
||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`::
|
||||
pip install -e "module-name"
|
||||
|
||||
poetry run ./demo/start.sh
|
||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`
|
||||
|
||||
(to stop, you can use ``poetry run ./demo/stop.sh``)
|
||||
./demo/start.sh
|
||||
|
||||
See the `demo documentation <https://matrix-org.github.io/synapse/develop/development/demo.html>`_
|
||||
for more information.
|
||||
(to stop, you can use `./demo/stop.sh`)
|
||||
|
||||
If you just want to start a single instance of the app and run it directly::
|
||||
|
||||
# Create the homeserver.yaml config once
|
||||
poetry run synapse_homeserver \
|
||||
python -m synapse.app.homeserver \
|
||||
--server-name my.domain.name \
|
||||
--config-path homeserver.yaml \
|
||||
--generate-config \
|
||||
--report-stats=[yes|no]
|
||||
|
||||
# Start the app
|
||||
poetry run synapse_homeserver --config-path homeserver.yaml
|
||||
python -m synapse.app.homeserver --config-path homeserver.yaml
|
||||
|
||||
|
||||
Running the unit tests
|
||||
----------------------
|
||||
======================
|
||||
|
||||
After getting up and running, you may wish to run Synapse's unit tests to
|
||||
check that everything is installed correctly::
|
||||
|
||||
poetry run trial tests
|
||||
trial tests
|
||||
|
||||
This should end with a 'PASSED' result (note that exact numbers will
|
||||
differ)::
|
||||
@@ -351,7 +327,7 @@ to see the logging output, see the `CONTRIBUTING doc <CONTRIBUTING.md#run-the-un
|
||||
|
||||
|
||||
Running the Integration Tests
|
||||
-----------------------------
|
||||
=============================
|
||||
|
||||
Synapse is accompanied by `SyTest <https://github.com/matrix-org/sytest>`_,
|
||||
a Matrix homeserver integration testing suite, which uses HTTP requests to
|
||||
@@ -469,10 +445,6 @@ This is normally caused by a misconfiguration in your reverse-proxy. See
|
||||
:alt: (discuss development on #synapse-dev:matrix.org)
|
||||
:target: https://matrix.to/#/#synapse-dev:matrix.org
|
||||
|
||||
.. |documentation| image:: https://img.shields.io/badge/documentation-%E2%9C%93-success
|
||||
:alt: (Rendered documentation on GitHub Pages)
|
||||
:target: https://matrix-org.github.io/synapse/latest/
|
||||
|
||||
.. |license| image:: https://img.shields.io/github/license/matrix-org/synapse
|
||||
:alt: (check license in LICENSE file)
|
||||
:target: LICENSE
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
Upgrading Synapse
|
||||
=================
|
||||
|
||||
This document has moved to the `Synapse documentation website <https://matrix-org.github.io/synapse/latest/upgrade>`_.
|
||||
This document has moved to the `Synapse documentation website <https://matrix-org.github.io/synapse/latest/upgrading>`_.
|
||||
Please update your links.
|
||||
|
||||
The markdown source is available in `docs/upgrade.md <docs/upgrade.md>`_.
|
||||
|
||||
1
changelog.d/10254.feature
Normal file
1
changelog.d/10254.feature
Normal file
@@ -0,0 +1 @@
|
||||
Update support for [MSC3083](https://github.com/matrix-org/matrix-doc/pull/3083) to consider changes in the MSC around which servers can issue join events.
|
||||
1
changelog.d/10283.feature
Normal file
1
changelog.d/10283.feature
Normal file
@@ -0,0 +1 @@
|
||||
Initial support for MSC3244, Room version capabilities over the /capabilities API.
|
||||
1
changelog.d/10407.feature
Normal file
1
changelog.d/10407.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add a buffered logging handler which periodically flushes itself.
|
||||
1
changelog.d/10408.misc
Normal file
1
changelog.d/10408.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add type hints to `synapse.federation.transport.client` module.
|
||||
1
changelog.d/10426.feature
Normal file
1
changelog.d/10426.feature
Normal file
@@ -0,0 +1 @@
|
||||
Email notifications now state whether an invitation is to a room or a space.
|
||||
1
changelog.d/10429.misc
Normal file
1
changelog.d/10429.misc
Normal file
@@ -0,0 +1 @@
|
||||
Drop backwards-compatibility code that was required to support Ubuntu Xenial.
|
||||
1
changelog.d/10431.misc
Normal file
1
changelog.d/10431.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use a docker image cache for the prerequisites for the debian package build.
|
||||
1
changelog.d/10432.misc
Normal file
1
changelog.d/10432.misc
Normal file
@@ -0,0 +1 @@
|
||||
Connect historical chunks together with chunk events instead of a content field (MSC2716).
|
||||
1
changelog.d/10437.misc
Normal file
1
changelog.d/10437.misc
Normal file
@@ -0,0 +1 @@
|
||||
Improve servlet type hints.
|
||||
1
changelog.d/10438.misc
Normal file
1
changelog.d/10438.misc
Normal file
@@ -0,0 +1 @@
|
||||
Improve servlet type hints.
|
||||
1
changelog.d/10442.misc
Normal file
1
changelog.d/10442.misc
Normal file
@@ -0,0 +1 @@
|
||||
Replace usage of `or_ignore` in `simple_insert` with `simple_upsert` usage, to stop spamming postgres logs with spurious ERROR messages.
|
||||
1
changelog.d/10444.misc
Normal file
1
changelog.d/10444.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update the `tests-done` Github Actions status.
|
||||
1
changelog.d/10445.doc
Normal file
1
changelog.d/10445.doc
Normal file
@@ -0,0 +1 @@
|
||||
Fix hierarchy of providers on the OpenID page.
|
||||
1
changelog.d/10446.misc
Normal file
1
changelog.d/10446.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update type annotations to work with forthcoming Twisted 21.7.0 release.
|
||||
1
changelog.d/10448.feature
Normal file
1
changelog.d/10448.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add `creation_ts` to list users admin API.
|
||||
1
changelog.d/10451.misc
Normal file
1
changelog.d/10451.misc
Normal file
@@ -0,0 +1 @@
|
||||
Cancel redundant GHA workflows when a new commit is pushed.
|
||||
1
changelog.d/10453.doc
Normal file
1
changelog.d/10453.doc
Normal file
@@ -0,0 +1 @@
|
||||
Consolidate development documentation to `docs/development/`.
|
||||
1
changelog.d/10461.misc
Normal file
1
changelog.d/10461.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix an error which prevented the Github Actions workflow to build the docker images from running.
|
||||
1
changelog.d/10463.misc
Normal file
1
changelog.d/10463.misc
Normal file
@@ -0,0 +1 @@
|
||||
Disable `msc2716` Complement tests until Complement updates are merged.
|
||||
1
changelog.d/10468.misc
Normal file
1
changelog.d/10468.misc
Normal file
@@ -0,0 +1 @@
|
||||
Mitigate media repo XSS attacks on IE11 via the non-standard X-Content-Security-Policy header.
|
||||
1
changelog.d/10482.misc
Normal file
1
changelog.d/10482.misc
Normal file
@@ -0,0 +1 @@
|
||||
Additional type hints in the state handler.
|
||||
1
changelog.d/10488.misc
Normal file
1
changelog.d/10488.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update syntax used to run complement tests.
|
||||
@@ -1 +0,0 @@
|
||||
Remove code which incorrectly attempted to reconcile state with remote servers when processing incoming events.
|
||||
@@ -1 +0,0 @@
|
||||
Drop tables used for groups/communities.
|
||||
@@ -1 +0,0 @@
|
||||
Make the AS login method call `Auth.get_user_by_req` for checking the AS token.
|
||||
@@ -1 +0,0 @@
|
||||
Add prometheus counters for ephemeral events and to device messages pushed to app services. Contributed by Brad @ Beeper.
|
||||
@@ -1 +0,0 @@
|
||||
Drop support for delegating email verification to an external server.
|
||||
@@ -1 +0,0 @@
|
||||
Refactor receipts servlet logic to avoid duplicated code.
|
||||
@@ -1 +0,0 @@
|
||||
Add a `room_type` field in the responses for the list room and room details admin API. Contributed by @andrewdoh.
|
||||
@@ -1 +0,0 @@
|
||||
Preparation for database schema simplifications: populate `state_key` and `rejection_reason` for existing rows in the `events` table.
|
||||
@@ -1 +0,0 @@
|
||||
Remove unused database table `event_reference_hashes`.
|
||||
@@ -1 +0,0 @@
|
||||
Add support for room version 10.
|
||||
@@ -1 +0,0 @@
|
||||
Further reduce queries used sending events when creating new rooms. Contributed by Nick @ Beeper (@fizzadar).
|
||||
@@ -1 +0,0 @@
|
||||
Provide an example of using the Admin API. Contributed by @jejo86.
|
||||
@@ -1 +0,0 @@
|
||||
Move the documentation for how URL previews work to the URL preview module.
|
||||
@@ -1 +0,0 @@
|
||||
Drop support for calling `/_matrix/client/v3/account/3pid/bind` without an `id_access_token`, which was not permitted by the spec. Contributed by @Vetchu.
|
||||
@@ -1 +0,0 @@
|
||||
Call the v2 identity service `/3pid/unbind` endpoint, rather than v1.
|
||||
@@ -1 +0,0 @@
|
||||
Use an asynchronous cache wrapper for the get event cache. Contributed by Nick @ Beeper (@fizzadar).
|
||||
@@ -1 +0,0 @@
|
||||
Optimise federation sender and appservice pusher event stream processing queries. Contributed by Nick @ Beeper (@fizzadar).
|
||||
@@ -1 +0,0 @@
|
||||
Preparatory work for a per-room rate limiter on joins.
|
||||
@@ -1 +0,0 @@
|
||||
Preparatory work for a per-room rate limiter on joins.
|
||||
@@ -1 +0,0 @@
|
||||
Preparatory work for a per-room rate limiter on joins.
|
||||
@@ -1 +0,0 @@
|
||||
Log the stack when waiting for an entire room to be un-partial stated.
|
||||
@@ -1 +0,0 @@
|
||||
Clean-up tests for notifications.
|
||||
@@ -1 +0,0 @@
|
||||
Move the documentation for how URL previews work to the URL preview module.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug introduced in Synapse 1.15.0 where adding a user through the Synapse Admin API with a phone number would fail if the "enable_email_notifs" and "email_notifs_for_new_users" options were enabled. Contributed by @thomasweston12.
|
||||
@@ -1 +0,0 @@
|
||||
Do not fail build if complement with workers fails.
|
||||
@@ -1 +0,0 @@
|
||||
Don't pull out state in `compute_event_context` for unconflicted state.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug introduced in Synapse 1.40 where a user invited to a restricted room would be briefly unable to join.
|
||||
@@ -1 +0,0 @@
|
||||
Don't pull out state in `compute_event_context` for unconflicted state.
|
||||
@@ -1 +0,0 @@
|
||||
Fix long-standing bug where in rare instances Synapse could store the incorrect state for a room after a state resolution.
|
||||
@@ -1 +0,0 @@
|
||||
Reduce the rebuild time for the complement-synapse docker image.
|
||||
@@ -1 +0,0 @@
|
||||
Update locked version of `frozendict` to 2.3.2, which has a fix for a memory leak.
|
||||
@@ -1 +0,0 @@
|
||||
Make `DictionaryCache` expire full entries if they haven't been queried in a while, even if specific keys have been queried recently.
|
||||
1
changelog.d/9918.feature
Normal file
1
changelog.d/9918.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add support for [MSC2033](https://github.com/matrix-org/matrix-doc/pull/2033): `device_id` on `/account/whoami`.
|
||||
@@ -16,7 +16,6 @@
|
||||
|
||||
""" Starts a synapse client console. """
|
||||
import argparse
|
||||
import binascii
|
||||
import cmd
|
||||
import getpass
|
||||
import json
|
||||
@@ -27,8 +26,9 @@ import urllib
|
||||
from http import TwistedHttpClient
|
||||
from typing import Optional
|
||||
|
||||
import nacl.encoding
|
||||
import nacl.signing
|
||||
import urlparse
|
||||
from signedjson.key import NACL_ED25519, decode_verify_key_bytes
|
||||
from signedjson.sign import SignatureVerifyException, verify_signed_json
|
||||
|
||||
from twisted.internet import defer, reactor, threads
|
||||
@@ -41,6 +41,7 @@ TRUSTED_ID_SERVERS = ["localhost:8001"]
|
||||
|
||||
|
||||
class SynapseCmd(cmd.Cmd):
|
||||
|
||||
"""Basic synapse command-line processor.
|
||||
|
||||
This processes commands from the user and calls the relevant HTTP methods.
|
||||
@@ -419,8 +420,8 @@ class SynapseCmd(cmd.Cmd):
|
||||
pubKey = None
|
||||
pubKeyObj = yield self.http_client.do_request("GET", url)
|
||||
if "public_key" in pubKeyObj:
|
||||
pubKey = decode_verify_key_bytes(
|
||||
NACL_ED25519, binascii.unhexlify(pubKeyObj["public_key"])
|
||||
pubKey = nacl.signing.VerifyKey(
|
||||
pubKeyObj["public_key"], encoder=nacl.encoding.HexEncoder
|
||||
)
|
||||
else:
|
||||
print("No public key found in pubkey response!")
|
||||
|
||||
@@ -14,7 +14,6 @@ services:
|
||||
# failure
|
||||
restart: unless-stopped
|
||||
# See the readme for a full documentation of the environment settings
|
||||
# NOTE: You must edit homeserver.yaml to use postgres, it defaults to sqlite
|
||||
environment:
|
||||
- SYNAPSE_CONFIG_PATH=/data/homeserver.yaml
|
||||
volumes:
|
||||
|
||||
@@ -1,125 +0,0 @@
|
||||
# Setting up Synapse with Workers using Docker Compose
|
||||
|
||||
This directory describes how deploy and manage Synapse and workers via [Docker Compose](https://docs.docker.com/compose/).
|
||||
|
||||
Example worker configuration files can be found [here](workers).
|
||||
|
||||
All examples and snippets assume that your Synapse service is called `synapse` in your Docker Compose file.
|
||||
|
||||
An example Docker Compose file can be found [here](docker-compose.yaml).
|
||||
|
||||
## Worker Service Examples in Docker Compose
|
||||
|
||||
In order to start the Synapse container as a worker, you must specify an `entrypoint` that loads both the `homeserver.yaml` and the configuration for the worker (`synapse-generic-worker-1.yaml` in the example below). You must also include the worker type in the environment variable `SYNAPSE_WORKER` or alternatively pass `-m synapse.app.generic_worker` as part of the `entrypoint` after `"/start.py", "run"`).
|
||||
|
||||
### Generic Worker Example
|
||||
|
||||
```yaml
|
||||
synapse-generic-worker-1:
|
||||
image: matrixdotorg/synapse:latest
|
||||
container_name: synapse-generic-worker-1
|
||||
restart: unless-stopped
|
||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-generic-worker-1.yaml"]
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "curl -fSs http://localhost:8081/health || exit 1"]
|
||||
start_period: "5s"
|
||||
interval: "15s"
|
||||
timeout: "5s"
|
||||
volumes:
|
||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
||||
environment:
|
||||
SYNAPSE_WORKER: synapse.app.generic_worker
|
||||
# Expose port if required so your reverse proxy can send requests to this worker
|
||||
# Port configuration will depend on how the http listener is defined in the worker configuration file
|
||||
ports:
|
||||
- 8081:8081
|
||||
depends_on:
|
||||
- synapse
|
||||
```
|
||||
|
||||
### Federation Sender Example
|
||||
|
||||
Please note: The federation sender does not receive REST API calls so no exposed ports are required.
|
||||
|
||||
```yaml
|
||||
synapse-federation-sender-1:
|
||||
image: matrixdotorg/synapse:latest
|
||||
container_name: synapse-federation-sender-1
|
||||
restart: unless-stopped
|
||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-federation-sender-1.yaml"]
|
||||
healthcheck:
|
||||
disable: true
|
||||
volumes:
|
||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
||||
environment:
|
||||
SYNAPSE_WORKER: synapse.app.federation_sender
|
||||
depends_on:
|
||||
- synapse
|
||||
```
|
||||
|
||||
## `homeserver.yaml` Configuration
|
||||
|
||||
### Enable Redis
|
||||
|
||||
Locate the `redis` section of your `homeserver.yaml` and enable and configure it:
|
||||
|
||||
```yaml
|
||||
redis:
|
||||
enabled: true
|
||||
host: redis
|
||||
port: 6379
|
||||
# password: <secret_password>
|
||||
```
|
||||
|
||||
This assumes that your Redis service is called `redis` in your Docker Compose file.
|
||||
|
||||
### Add a replication Listener
|
||||
|
||||
Locate the `listeners` section of your `homeserver.yaml` and add the following replication listener:
|
||||
|
||||
```yaml
|
||||
listeners:
|
||||
# Other listeners
|
||||
|
||||
- port: 9093
|
||||
type: http
|
||||
resources:
|
||||
- names: [replication]
|
||||
```
|
||||
|
||||
This listener is used by the workers for replication and is referred to in worker config files using the following settings:
|
||||
|
||||
```yaml
|
||||
worker_replication_host: synapse
|
||||
worker_replication_http_port: 9093
|
||||
```
|
||||
|
||||
### Add Workers to `instance_map`
|
||||
|
||||
Locate the `instance_map` section of your `homeserver.yaml` and populate it with your workers:
|
||||
|
||||
```yaml
|
||||
instance_map:
|
||||
synapse-generic-worker-1: # The worker_name setting in your worker configuration file
|
||||
host: synapse-generic-worker-1 # The name of the worker service in your Docker Compose file
|
||||
port: 8034 # The port assigned to the replication listener in your worker config file
|
||||
synapse-federation-sender-1:
|
||||
host: synapse-federation-sender-1
|
||||
port: 8034
|
||||
```
|
||||
|
||||
### Configure Federation Senders
|
||||
|
||||
This section is applicable if you are using Federation senders (synapse.app.federation_sender). Locate the `send_federation` and `federation_sender_instances` settings in your `homeserver.yaml` and configure them:
|
||||
|
||||
```yaml
|
||||
# This will disable federation sending on the main Synapse instance
|
||||
send_federation: false
|
||||
|
||||
federation_sender_instances:
|
||||
- synapse-federation-sender-1 # The worker_name setting in your federation sender worker configuration file
|
||||
```
|
||||
|
||||
## Other Worker types
|
||||
|
||||
Using the concepts shown here it is possible to create other worker types in Docker Compose. See the [Workers](https://matrix-org.github.io/synapse/latest/workers.html#available-worker-applications) documentation for a list of available workers.
|
||||
@@ -1,77 +0,0 @@
|
||||
networks:
|
||||
backend:
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:latest
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ${VOLUME_PATH}/var/lib/postgresql/data:/var/lib/postgresql/data:rw
|
||||
networks:
|
||||
- backend
|
||||
environment:
|
||||
POSTGRES_DB: synapse
|
||||
POSTGRES_USER: synapse_user
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_INITDB_ARGS: --encoding=UTF8 --locale=C
|
||||
|
||||
redis:
|
||||
image: redis:latest
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- backend
|
||||
|
||||
synapse:
|
||||
image: matrixdotorg/synapse:latest
|
||||
container_name: synapse
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ${VOLUME_PATH}/data:/data:rw
|
||||
ports:
|
||||
- 8008:8008
|
||||
networks:
|
||||
- backend
|
||||
environment:
|
||||
SYNAPSE_CONFIG_DIR: /data
|
||||
SYNAPSE_CONFIG_PATH: /data/homeserver.yaml
|
||||
depends_on:
|
||||
- postgres
|
||||
|
||||
synapse-generic-worker-1:
|
||||
image: matrixdotorg/synapse:latest
|
||||
container_name: synapse-generic-worker-1
|
||||
restart: unless-stopped
|
||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-generic-worker-1.yaml"]
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "curl -fSs http://localhost:8081/health || exit 1"]
|
||||
start_period: "5s"
|
||||
interval: "15s"
|
||||
timeout: "5s"
|
||||
networks:
|
||||
- backend
|
||||
volumes:
|
||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
||||
environment:
|
||||
SYNAPSE_WORKER: synapse.app.generic_worker
|
||||
# Expose port if required so your reverse proxy can send requests to this worker
|
||||
# Port configuration will depend on how the http listener is defined in the worker configuration file
|
||||
ports:
|
||||
- 8081:8081
|
||||
depends_on:
|
||||
- synapse
|
||||
|
||||
synapse-federation-sender-1:
|
||||
image: matrixdotorg/synapse:latest
|
||||
container_name: synapse-federation-sender-1
|
||||
restart: unless-stopped
|
||||
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-federation-sender-1.yaml"]
|
||||
healthcheck:
|
||||
disable: true
|
||||
networks:
|
||||
- backend
|
||||
volumes:
|
||||
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
|
||||
environment:
|
||||
SYNAPSE_WORKER: synapse.app.federation_sender
|
||||
depends_on:
|
||||
- synapse
|
||||
@@ -1,14 +0,0 @@
|
||||
worker_app: synapse.app.federation_sender
|
||||
worker_name: synapse-federation-sender-1
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: synapse
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8034
|
||||
resources:
|
||||
- names: [replication]
|
||||
|
||||
worker_log_config: /data/federation_sender.log.config
|
||||
@@ -1,19 +0,0 @@
|
||||
worker_app: synapse.app.generic_worker
|
||||
worker_name: synapse-generic-worker-1
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: synapse
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8034
|
||||
resources:
|
||||
- names: [replication]
|
||||
- type: http
|
||||
port: 8081
|
||||
x_forwarded: true
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
|
||||
worker_log_config: /data/worker.log.config
|
||||
165
contrib/experiments/cursesio.py
Normal file
165
contrib/experiments/cursesio.py
Normal file
@@ -0,0 +1,165 @@
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import curses
|
||||
import curses.wrapper
|
||||
from curses.ascii import isprint
|
||||
|
||||
from twisted.internet import reactor
|
||||
|
||||
|
||||
class CursesStdIO:
|
||||
def __init__(self, stdscr, callback=None):
|
||||
self.statusText = "Synapse test app -"
|
||||
self.searchText = ""
|
||||
self.stdscr = stdscr
|
||||
|
||||
self.logLine = ""
|
||||
|
||||
self.callback = callback
|
||||
|
||||
self._setup()
|
||||
|
||||
def _setup(self):
|
||||
self.stdscr.nodelay(1) # Make non blocking
|
||||
|
||||
self.rows, self.cols = self.stdscr.getmaxyx()
|
||||
self.lines = []
|
||||
|
||||
curses.use_default_colors()
|
||||
|
||||
self.paintStatus(self.statusText)
|
||||
self.stdscr.refresh()
|
||||
|
||||
def set_callback(self, callback):
|
||||
self.callback = callback
|
||||
|
||||
def fileno(self):
|
||||
"""We want to select on FD 0"""
|
||||
return 0
|
||||
|
||||
def connectionLost(self, reason):
|
||||
self.close()
|
||||
|
||||
def print_line(self, text):
|
||||
"""add a line to the internal list of lines"""
|
||||
|
||||
self.lines.append(text)
|
||||
self.redraw()
|
||||
|
||||
def print_log(self, text):
|
||||
self.logLine = text
|
||||
self.redraw()
|
||||
|
||||
def redraw(self):
|
||||
"""method for redisplaying lines based on internal list of lines"""
|
||||
|
||||
self.stdscr.clear()
|
||||
self.paintStatus(self.statusText)
|
||||
i = 0
|
||||
index = len(self.lines) - 1
|
||||
while i < (self.rows - 3) and index >= 0:
|
||||
self.stdscr.addstr(self.rows - 3 - i, 0, self.lines[index], curses.A_NORMAL)
|
||||
i = i + 1
|
||||
index = index - 1
|
||||
|
||||
self.printLogLine(self.logLine)
|
||||
|
||||
self.stdscr.refresh()
|
||||
|
||||
def paintStatus(self, text):
|
||||
if len(text) > self.cols:
|
||||
raise RuntimeError("TextTooLongError")
|
||||
|
||||
self.stdscr.addstr(
|
||||
self.rows - 2, 0, text + " " * (self.cols - len(text)), curses.A_STANDOUT
|
||||
)
|
||||
|
||||
def printLogLine(self, text):
|
||||
self.stdscr.addstr(
|
||||
0, 0, text + " " * (self.cols - len(text)), curses.A_STANDOUT
|
||||
)
|
||||
|
||||
def doRead(self):
|
||||
"""Input is ready!"""
|
||||
curses.noecho()
|
||||
c = self.stdscr.getch() # read a character
|
||||
|
||||
if c == curses.KEY_BACKSPACE:
|
||||
self.searchText = self.searchText[:-1]
|
||||
|
||||
elif c == curses.KEY_ENTER or c == 10:
|
||||
text = self.searchText
|
||||
self.searchText = ""
|
||||
|
||||
self.print_line(">> %s" % text)
|
||||
|
||||
try:
|
||||
if self.callback:
|
||||
self.callback.on_line(text)
|
||||
except Exception as e:
|
||||
self.print_line(str(e))
|
||||
|
||||
self.stdscr.refresh()
|
||||
|
||||
elif isprint(c):
|
||||
if len(self.searchText) == self.cols - 2:
|
||||
return
|
||||
self.searchText = self.searchText + chr(c)
|
||||
|
||||
self.stdscr.addstr(
|
||||
self.rows - 1,
|
||||
0,
|
||||
self.searchText + (" " * (self.cols - len(self.searchText) - 2)),
|
||||
)
|
||||
|
||||
self.paintStatus(self.statusText + " %d" % len(self.searchText))
|
||||
self.stdscr.move(self.rows - 1, len(self.searchText))
|
||||
self.stdscr.refresh()
|
||||
|
||||
def logPrefix(self):
|
||||
return "CursesStdIO"
|
||||
|
||||
def close(self):
|
||||
"""clean up"""
|
||||
|
||||
curses.nocbreak()
|
||||
self.stdscr.keypad(0)
|
||||
curses.echo()
|
||||
curses.endwin()
|
||||
|
||||
|
||||
class Callback:
|
||||
def __init__(self, stdio):
|
||||
self.stdio = stdio
|
||||
|
||||
def on_line(self, text):
|
||||
self.stdio.print_line(text)
|
||||
|
||||
|
||||
def main(stdscr):
|
||||
screen = CursesStdIO(stdscr) # create Screen object
|
||||
|
||||
callback = Callback(screen)
|
||||
|
||||
screen.set_callback(callback)
|
||||
|
||||
stdscr.refresh()
|
||||
reactor.addReader(screen)
|
||||
reactor.run()
|
||||
screen.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
curses.wrapper(main)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user