Compare commits
2 Commits
erikj/tree
...
shay/ratel
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1ecb40d90d | ||
|
|
44b3c05f74 |
@@ -27,10 +27,10 @@ which is under the Unlicense licence.
|
||||
{{- . -}}{{- "\n" -}}
|
||||
{{- end -}}
|
||||
{{- with .TestCases -}}
|
||||
{{- /* Passing tests are first */ -}}
|
||||
{{- /* Failing tests are first */ -}}
|
||||
{{- range . -}}
|
||||
{{- if eq .Result "PASS" -}}
|
||||
::group::{{ "\033" }}[0;32m✅{{ " " }}{{- .Name -}}
|
||||
{{- if and (ne .Result "PASS") (ne .Result "SKIP") -}}
|
||||
::group::{{ "\033" }}[0;31m❌{{ " " }}{{- .Name -}}
|
||||
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
||||
{{- with .Coverage -}}
|
||||
, coverage: {{ . }}%
|
||||
@@ -47,6 +47,7 @@ which is under the Unlicense licence.
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
|
||||
|
||||
{{- /* Then skipped tests are second */ -}}
|
||||
{{- range . -}}
|
||||
{{- if eq .Result "SKIP" -}}
|
||||
@@ -67,10 +68,11 @@ which is under the Unlicense licence.
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
|
||||
{{- /* and failing tests are last */ -}}
|
||||
|
||||
{{- /* Then passing tests are last */ -}}
|
||||
{{- range . -}}
|
||||
{{- if and (ne .Result "PASS") (ne .Result "SKIP") -}}
|
||||
::group::{{ "\033" }}[0;31m❌{{ " " }}{{- .Name -}}
|
||||
{{- if eq .Result "PASS" -}}
|
||||
::group::{{ "\033" }}[0;32m✅{{ " " }}{{- .Name -}}
|
||||
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
||||
{{- with .Coverage -}}
|
||||
, coverage: {{ . }}%
|
||||
|
||||
@@ -1,132 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Wraps `auditwheel repair` to first check if we're repairing a potentially abi3
|
||||
# compatible wheel, if so rename the wheel before repairing it.
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
from typing import Optional
|
||||
from zipfile import ZipFile
|
||||
|
||||
from packaging.tags import Tag
|
||||
from packaging.utils import parse_wheel_filename
|
||||
from packaging.version import Version
|
||||
|
||||
|
||||
def check_is_abi3_compatible(wheel_file: str) -> None:
|
||||
"""Check the contents of the built wheel for any `.so` files that are *not*
|
||||
abi3 compatible.
|
||||
"""
|
||||
|
||||
with ZipFile(wheel_file, "r") as wheel:
|
||||
for file in wheel.namelist():
|
||||
if not file.endswith(".so"):
|
||||
continue
|
||||
|
||||
if not file.endswith(".abi3.so"):
|
||||
raise Exception(f"Found non-abi3 lib: {file}")
|
||||
|
||||
|
||||
def cpython(wheel_file: str, name: str, version: Version, tag: Tag) -> str:
|
||||
"""Replaces the cpython wheel file with a ABI3 compatible wheel"""
|
||||
|
||||
if tag.abi == "abi3":
|
||||
# Nothing to do.
|
||||
return wheel_file
|
||||
|
||||
check_is_abi3_compatible(wheel_file)
|
||||
|
||||
abi3_tag = Tag(tag.interpreter, "abi3", tag.platform)
|
||||
|
||||
dirname = os.path.dirname(wheel_file)
|
||||
new_wheel_file = os.path.join(
|
||||
dirname,
|
||||
f"{name}-{version}-{abi3_tag}.whl",
|
||||
)
|
||||
|
||||
os.rename(wheel_file, new_wheel_file)
|
||||
|
||||
print("Renamed wheel to", new_wheel_file)
|
||||
|
||||
return new_wheel_file
|
||||
|
||||
|
||||
def main(wheel_file: str, dest_dir: str, archs: Optional[str]) -> None:
|
||||
"""Entry point"""
|
||||
|
||||
# Parse the wheel file name into its parts. Note that `parse_wheel_filename`
|
||||
# normalizes the package name (i.e. it converts matrix_synapse ->
|
||||
# matrix-synapse), which is not what we want.
|
||||
_, version, build, tags = parse_wheel_filename(os.path.basename(wheel_file))
|
||||
name = os.path.basename(wheel_file).split("-")[0]
|
||||
|
||||
if len(tags) != 1:
|
||||
# We expect only a wheel file with only a single tag
|
||||
raise Exception(f"Unexpectedly found multiple tags: {tags}")
|
||||
|
||||
tag = next(iter(tags))
|
||||
|
||||
if build:
|
||||
# We don't use build tags in Synapse
|
||||
raise Exception(f"Unexpected build tag: {build}")
|
||||
|
||||
# If the wheel is for cpython then convert it into an abi3 wheel.
|
||||
if tag.interpreter.startswith("cp"):
|
||||
wheel_file = cpython(wheel_file, name, version, tag)
|
||||
|
||||
# Finally, repair the wheel.
|
||||
if archs is not None:
|
||||
# If we are given archs then we are on macos and need to use
|
||||
# `delocate-listdeps`.
|
||||
subprocess.run(["delocate-listdeps", wheel_file], check=True)
|
||||
subprocess.run(
|
||||
["delocate-wheel", "--require-archs", archs, "-w", dest_dir, wheel_file],
|
||||
check=True,
|
||||
)
|
||||
else:
|
||||
subprocess.run(["auditwheel", "repair", "-w", dest_dir, wheel_file], check=True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Tag wheel as abi3 and repair it.")
|
||||
|
||||
parser.add_argument(
|
||||
"--wheel-dir",
|
||||
"-w",
|
||||
metavar="WHEEL_DIR",
|
||||
help="Directory to store delocated wheels",
|
||||
required=True,
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--require-archs",
|
||||
metavar="archs",
|
||||
default=None,
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"wheel_file",
|
||||
metavar="WHEEL_FILE",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
wheel_file = args.wheel_file
|
||||
wheel_dir = args.wheel_dir
|
||||
archs = args.require_archs
|
||||
|
||||
main(wheel_file, wheel_dir, archs)
|
||||
@@ -1,135 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Calculate the trial jobs to run based on if we're in a PR or not.
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
|
||||
def set_output(key: str, value: str):
|
||||
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-output-parameter
|
||||
with open(os.environ["GITHUB_OUTPUT"], "at") as f:
|
||||
print(f"{key}={value}", file=f)
|
||||
|
||||
|
||||
IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
|
||||
|
||||
# First calculate the various trial jobs.
|
||||
#
|
||||
# For each type of test we only run on Py3.7 on PRs
|
||||
|
||||
trial_sqlite_tests = [
|
||||
{
|
||||
"python-version": "3.7",
|
||||
"database": "sqlite",
|
||||
"extras": "all",
|
||||
}
|
||||
]
|
||||
|
||||
if not IS_PR:
|
||||
trial_sqlite_tests.extend(
|
||||
{
|
||||
"python-version": version,
|
||||
"database": "sqlite",
|
||||
"extras": "all",
|
||||
}
|
||||
for version in ("3.8", "3.9", "3.10", "3.11")
|
||||
)
|
||||
|
||||
|
||||
trial_postgres_tests = [
|
||||
{
|
||||
"python-version": "3.7",
|
||||
"database": "postgres",
|
||||
"postgres-version": "11",
|
||||
"extras": "all",
|
||||
}
|
||||
]
|
||||
|
||||
if not IS_PR:
|
||||
trial_postgres_tests.append(
|
||||
{
|
||||
"python-version": "3.11",
|
||||
"database": "postgres",
|
||||
"postgres-version": "15",
|
||||
"extras": "all",
|
||||
}
|
||||
)
|
||||
|
||||
trial_no_extra_tests = [
|
||||
{
|
||||
"python-version": "3.7",
|
||||
"database": "sqlite",
|
||||
"extras": "",
|
||||
}
|
||||
]
|
||||
|
||||
print("::group::Calculated trial jobs")
|
||||
print(
|
||||
json.dumps(
|
||||
trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests, indent=4
|
||||
)
|
||||
)
|
||||
print("::endgroup::")
|
||||
|
||||
test_matrix = json.dumps(
|
||||
trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests
|
||||
)
|
||||
set_output("trial_test_matrix", test_matrix)
|
||||
|
||||
|
||||
# First calculate the various sytest jobs.
|
||||
#
|
||||
# For each type of test we only run on focal on PRs
|
||||
|
||||
|
||||
sytest_tests = [
|
||||
{
|
||||
"sytest-tag": "focal",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "focal",
|
||||
"postgres": "postgres",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "focal",
|
||||
"postgres": "multi-postgres",
|
||||
"workers": "workers",
|
||||
},
|
||||
]
|
||||
|
||||
if not IS_PR:
|
||||
sytest_tests.extend(
|
||||
[
|
||||
{
|
||||
"sytest-tag": "testing",
|
||||
"postgres": "postgres",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "buster",
|
||||
"postgres": "multi-postgres",
|
||||
"workers": "workers",
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
print("::group::Calculated sytest jobs")
|
||||
print(json.dumps(sytest_tests, indent=4))
|
||||
print("::endgroup::")
|
||||
|
||||
test_matrix = json.dumps(sytest_tests)
|
||||
set_output("sytest_test_matrix", test_matrix)
|
||||
@@ -1,21 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# wraps `gotestfmt`, hiding output from successful packages unless
|
||||
# all tests passed.
|
||||
|
||||
set -o pipefail
|
||||
set -e
|
||||
|
||||
# tee the test results to a log, whilst also piping them into gotestfmt,
|
||||
# telling it to hide successful results, so that we can clearly see
|
||||
# unsuccessful results.
|
||||
tee complement.log | gotestfmt -hide successful-packages
|
||||
|
||||
# gotestfmt will exit non-zero if there were any failures, so if we got to this
|
||||
# point, we must have had a successful result.
|
||||
echo "All tests successful; showing all test results"
|
||||
|
||||
# Pipe the test results back through gotestfmt, showing all results.
|
||||
# The log file consists of JSON lines giving the test results, interspersed
|
||||
# with regular stdout lines (including reports of downloaded packages).
|
||||
grep '^{"Time":' complement.log | gotestfmt
|
||||
31
.ci/scripts/postgres_exec.py
Executable file
31
.ci/scripts/postgres_exec.py
Executable file
@@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
|
||||
import psycopg2
|
||||
|
||||
# a very simple replacment for `psql`, to make up for the lack of the postgres client
|
||||
# libraries in the synapse docker image.
|
||||
|
||||
# We use "postgres" as a database because it's bound to exist and the "synapse" one
|
||||
# doesn't exist yet.
|
||||
db_conn = psycopg2.connect(
|
||||
user="postgres", host="localhost", password="postgres", dbname="postgres"
|
||||
)
|
||||
db_conn.autocommit = True
|
||||
cur = db_conn.cursor()
|
||||
for c in sys.argv[1:]:
|
||||
cur.execute(c)
|
||||
@@ -1,36 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Common commands to set up Complement's prerequisites in a GitHub Actions CI run.
|
||||
#
|
||||
# Must be called after Synapse has been checked out to `synapse/`.
|
||||
#
|
||||
set -eu
|
||||
|
||||
alias block='{ set +x; } 2>/dev/null; func() { echo "::group::$*"; set -x; }; func'
|
||||
alias endblock='{ set +x; } 2>/dev/null; func() { echo "::endgroup::"; set -x; }; func'
|
||||
|
||||
block Set Go Version
|
||||
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
|
||||
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
|
||||
|
||||
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
|
||||
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
|
||||
# Add the Go path to the PATH: We need this so we can call gotestfmt
|
||||
echo "~/go/bin" >> $GITHUB_PATH
|
||||
endblock
|
||||
|
||||
block Install Complement Dependencies
|
||||
sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev
|
||||
go get -v github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
endblock
|
||||
|
||||
block Install custom gotestfmt template
|
||||
mkdir .gotestfmt/github -p
|
||||
cp synapse/.ci/complement_package.gotpl .gotestfmt/github/package.gotpl
|
||||
endblock
|
||||
|
||||
block Check out Complement
|
||||
# Attempt to check out the same branch of Complement as the PR. If it
|
||||
# doesn't exist, fallback to HEAD.
|
||||
synapse/.ci/scripts/checkout_complement.sh
|
||||
endblock
|
||||
@@ -32,7 +32,7 @@ else
|
||||
fi
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
psql -c "CREATE DATABASE synapse"
|
||||
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
|
||||
# Port the SQLite databse to postgres so we can check command works against postgres
|
||||
echo "+++ Port SQLite3 databse to postgres"
|
||||
|
||||
@@ -5,8 +5,18 @@
|
||||
# - creates a venv with these old versions using poetry; and finally
|
||||
# - invokes `trial` to run the tests with old deps.
|
||||
|
||||
# Prevent tzdata from asking for user input
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
set -ex
|
||||
|
||||
apt-get update
|
||||
apt-get install -y \
|
||||
python3 python3-dev python3-pip python3-venv pipx \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
|
||||
export LANG="C.UTF-8"
|
||||
|
||||
# Prevent virtualenv from auto-updating pip to an incompatible version
|
||||
export VIRTUALENV_NO_DOWNLOAD=1
|
||||
|
||||
@@ -23,6 +33,12 @@ export VIRTUALENV_NO_DOWNLOAD=1
|
||||
# a `cryptography` compiled against OpenSSL 1.1.
|
||||
# - Omit systemd: we're not logging to journal here.
|
||||
|
||||
# TODO: also replace caret bounds, see https://python-poetry.org/docs/dependency-specification/#version-constraints
|
||||
# We don't use these yet, but IIRC they are the default bound used when you `poetry add`.
|
||||
# The sed expression 's/\^/==/g' ought to do the trick. But it would also change
|
||||
# `python = "^3.7"` to `python = "==3.7", which would mean we fail because olddeps
|
||||
# runs on 3.8 (#12343).
|
||||
|
||||
sed -i \
|
||||
-e "s/[~>]=/==/g" \
|
||||
-e '/^python = "^/!s/\^/==/g' \
|
||||
@@ -39,7 +55,7 @@ sed -i \
|
||||
# toml file. This means we don't have to ensure compatibility between old deps and
|
||||
# dev tools.
|
||||
|
||||
pip install toml wheel
|
||||
pip install --user toml
|
||||
|
||||
REMOVE_DEV_DEPENDENCIES="
|
||||
import toml
|
||||
@@ -53,8 +69,8 @@ with open('pyproject.toml', 'w') as f:
|
||||
"
|
||||
python3 -c "$REMOVE_DEV_DEPENDENCIES"
|
||||
|
||||
pip install poetry==1.2.0
|
||||
poetry lock
|
||||
pipx install poetry==1.1.12
|
||||
~/.local/bin/poetry lock
|
||||
|
||||
echo "::group::Patched pyproject.toml"
|
||||
cat pyproject.toml
|
||||
@@ -62,3 +78,6 @@ echo "::endgroup::"
|
||||
echo "::group::Lockfile after patch"
|
||||
cat poetry.lock
|
||||
echo "::endgroup::"
|
||||
|
||||
~/.local/bin/poetry install -E "all test"
|
||||
~/.local/bin/poetry run trial --jobs=2 tests
|
||||
@@ -2,27 +2,27 @@
|
||||
#
|
||||
# Test script for 'synapse_port_db'.
|
||||
# - configures synapse and a postgres server.
|
||||
# - runs the port script on a prepopulated test sqlite db. Checks that the
|
||||
# return code is zero.
|
||||
# - reruns the port script on the same sqlite db, targetting the same postgres db.
|
||||
# Checks that the return code is zero.
|
||||
# - runs the port script against a new sqlite db. Checks the return code is zero.
|
||||
# - runs the port script on a prepopulated test sqlite db
|
||||
# - also runs it against an new sqlite db
|
||||
#
|
||||
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
||||
# Expects `poetry` to be available on the `PATH`.
|
||||
|
||||
set -xe -o pipefail
|
||||
set -xe
|
||||
cd "$(dirname "$0")/../.."
|
||||
|
||||
echo "--- Generate the signing key"
|
||||
|
||||
# Generate the server's signing key.
|
||||
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
|
||||
echo "--- Prepare test database"
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background updates.
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
psql -c "CREATE DATABASE synapse"
|
||||
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against test database"
|
||||
# TODO: this invocation of synapse_port_db (and others below) used to be prepended with `coverage run`,
|
||||
@@ -45,23 +45,9 @@ rm .ci/test_db.db
|
||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# re-create the PostgreSQL database.
|
||||
psql \
|
||||
-c "DROP DATABASE synapse" \
|
||||
-c "CREATE DATABASE synapse"
|
||||
poetry run .ci/scripts/postgres_exec.py \
|
||||
"DROP DATABASE synapse" \
|
||||
"CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against empty database"
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
echo "--- Create a brand new postgres database from schema"
|
||||
cp .ci/postgres-config.yaml .ci/postgres-config-unported.yaml
|
||||
sed -i -e 's/database: synapse/database: synapse_unported/' .ci/postgres-config-unported.yaml
|
||||
psql -c "CREATE DATABASE synapse_unported"
|
||||
poetry run update_synapse_database --database-config .ci/postgres-config-unported.yaml --run-background-updates
|
||||
|
||||
echo "+++ Comparing ported schema with unported schema"
|
||||
# Ignore the tables that portdb creates. (Should it tidy them up when the porting is completed?)
|
||||
psql synapse -c "DROP TABLE port_from_sqlite3;"
|
||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse_unported > unported.sql
|
||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse > ported.sql
|
||||
# By default, `diff` returns zero if there are no changes and nonzero otherwise
|
||||
diff -u unported.sql ported.sql | tee schema_diff
|
||||
@@ -4,15 +4,8 @@
|
||||
# things to include
|
||||
!docker
|
||||
!synapse
|
||||
!rust
|
||||
!README.rst
|
||||
!pyproject.toml
|
||||
!poetry.lock
|
||||
!Cargo.lock
|
||||
!Cargo.toml
|
||||
!build_rust.py
|
||||
|
||||
rust/target
|
||||
synapse/*.so
|
||||
|
||||
**/__pycache__
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
root = true
|
||||
|
||||
# 4 space indentation
|
||||
[*.{py,pyi}]
|
||||
[*.py]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
max_line_length = 88
|
||||
|
||||
9
.flake8
9
.flake8
@@ -8,11 +8,4 @@
|
||||
# E203: whitespace before ':' (which is contrary to pep8?)
|
||||
# E731: do not assign a lambda expression, use a def
|
||||
# E501: Line too long (black enforces this for us)
|
||||
#
|
||||
# flake8-bugbear runs extra checks. Its error codes are described at
|
||||
# https://github.com/PyCQA/flake8-bugbear#list-of-warnings
|
||||
# B019: Use of functools.lru_cache or functools.cache on methods can lead to memory leaks
|
||||
# B023: Functions defined inside a loop must not use variables redefined in the loop
|
||||
# B024: Abstract base class with no abstract method.
|
||||
|
||||
ignore=W503,W504,E203,E731,E501,B019,B023,B024
|
||||
ignore=W503,W504,E203,E731,E501
|
||||
|
||||
@@ -1,16 +1,3 @@
|
||||
# Commits in this file will be removed from GitHub blame results.
|
||||
#
|
||||
# To use this file locally, use:
|
||||
# git blame --ignore-revs-file="path/to/.git-blame-ignore-revs" <files>
|
||||
#
|
||||
# or configure the `blame.ignoreRevsFile` option in your git config.
|
||||
#
|
||||
# If ignoring a pull request that was not squash merged, only the merge
|
||||
# commit needs to be put here. Child commits will be resolved from it.
|
||||
|
||||
# Run black (#3679).
|
||||
8b3d9b6b199abb87246f982d5db356f1966db925
|
||||
|
||||
# Black reformatting (#5482).
|
||||
32e7c9e7f20b57dd081023ac42d6931a8da9b3a3
|
||||
|
||||
|
||||
45
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
45
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
@@ -74,36 +74,6 @@ body:
|
||||
- Debian packages from packages.matrix.org
|
||||
- pip (from PyPI)
|
||||
- Other (please mention below)
|
||||
- I don't know
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: database
|
||||
attributes:
|
||||
label: Database
|
||||
description: |
|
||||
Are you using SQLite or PostgreSQL? What's the version of your database?
|
||||
|
||||
If PostgreSQL, please also answer the following:
|
||||
- are you using a single PostgreSQL server
|
||||
or [separate servers for `main` and `state`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#databases)?
|
||||
- have you previously ported from SQLite using the Synapse "portdb" script?
|
||||
- have you previously restored from a backup?
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: workers
|
||||
attributes:
|
||||
label: Workers
|
||||
description: |
|
||||
Are you running a single Synapse process, or are you running
|
||||
[2 or more workers](https://matrix-org.github.io/synapse/latest/workers.html)?
|
||||
options:
|
||||
- Single process
|
||||
- Multiple workers
|
||||
- I don't know
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: platform
|
||||
attributes:
|
||||
@@ -113,28 +83,17 @@ body:
|
||||
e.g. distro, hardware, if it's running in a vm/container, etc.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Configuration
|
||||
description: |
|
||||
Do you have any unusual config options turned on? If so, please provide details.
|
||||
|
||||
- Experimental or undocumented features
|
||||
- [Presence](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#presence)
|
||||
- [Message retention](https://matrix-org.github.io/synapse/latest/message_retention_policies.html)
|
||||
- [Synapse modules](https://matrix-org.github.io/synapse/latest/modules/index.html)
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: |
|
||||
Please copy and paste any relevant log output, ideally at INFO or DEBUG log level.
|
||||
This will be automatically formatted into code, so there is no need for backticks (`\``).
|
||||
This will be automatically formatted into code, so there is no need for backticks.
|
||||
|
||||
Please be careful to remove any personal or private data.
|
||||
|
||||
**Bug reports are usually impossible to diagnose without logging.**
|
||||
**Bug reports are usually very difficult to diagnose without logging.**
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
|
||||
23
.github/dependabot.yml
vendored
23
.github/dependabot.yml
vendored
@@ -1,23 +0,0 @@
|
||||
version: 2
|
||||
updates:
|
||||
- # "pip" is the correct setting for poetry, per https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#package-ecosystem
|
||||
package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/docker"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
|
||||
- package-ecosystem: "cargo"
|
||||
directory: "/"
|
||||
versioning-strategy: "lockfile-only"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
46
.github/workflows/dependabot_changelog.yml
vendored
46
.github/workflows/dependabot_changelog.yml
vendored
@@ -1,46 +0,0 @@
|
||||
name: Write changelog for dependabot PR
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- reopened # For debugging!
|
||||
|
||||
permissions:
|
||||
# Needed to be able to push the commit. See
|
||||
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#enable-auto-merge-on-a-pull-request
|
||||
# for a similar example
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
add-changelog:
|
||||
runs-on: 'ubuntu-latest'
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
- name: Write, commit and push changelog
|
||||
run: |
|
||||
echo "${{ github.event.pull_request.title }}." > "changelog.d/${{ github.event.pull_request.number }}".misc
|
||||
git add changelog.d
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git config user.name "GitHub Actions"
|
||||
git commit -m "Changelog"
|
||||
git push
|
||||
shell: bash
|
||||
# The `git push` above does not trigger CI on the dependabot PR.
|
||||
#
|
||||
# By default, workflows can't trigger other workflows when they're just using the
|
||||
# default `GITHUB_TOKEN` access token. (This is intended to stop you from writing
|
||||
# recursive workflow loops by accident, because that'll get very expensive very
|
||||
# quickly.) Instead, you have to manually call out to another workflow, or else
|
||||
# make your changes (i.e. the `git push` above) using a personal access token.
|
||||
# See
|
||||
# https://docs.github.com/en/actions/using-workflows/triggering-a-workflow#triggering-a-workflow-from-a-workflow
|
||||
#
|
||||
# I have tried and failed to find a way to trigger CI on the "merge ref" of the PR.
|
||||
# See git commit history for previous attempts. If anyone desperately wants to try
|
||||
# again in the future, make a matrix-bot account and use its access token to git push.
|
||||
|
||||
# THIS WORKFLOW HAS WRITE PERMISSIONS---do not add other jobs here unless they
|
||||
# are sufficiently locked down to dependabot only as above.
|
||||
15
.github/workflows/docker.yml
vendored
15
.github/workflows/docker.yml
vendored
@@ -17,19 +17,19 @@ jobs:
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
id: qemu
|
||||
uses: docker/setup-qemu-action@v2
|
||||
uses: docker/setup-qemu-action@v1
|
||||
with:
|
||||
platforms: arm64
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Inspect builder
|
||||
run: docker buildx inspect
|
||||
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
@@ -48,15 +48,10 @@ jobs:
|
||||
type=pep440,pattern={{raw}}
|
||||
|
||||
- name: Build and push all platforms
|
||||
uses: docker/build-push-action@v3
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
labels: "gitsha1=${{ github.sha }}"
|
||||
tags: "${{ steps.set-tag.outputs.tags }}"
|
||||
file: "docker/Dockerfile"
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
# arm64 builds OOM without the git fetch setting. c.f.
|
||||
# https://github.com/rust-lang/cargo/issues/10583
|
||||
build-args: |
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||
|
||||
34
.github/workflows/docs-pr-netlify.yaml
vendored
34
.github/workflows/docs-pr-netlify.yaml
vendored
@@ -1,34 +0,0 @@
|
||||
name: Deploy documentation PR preview
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: [ "Prepare documentation PR preview" ]
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
netlify:
|
||||
if: github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.event == 'pull_request'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
||||
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
||||
- name: 📥 Download artifact
|
||||
uses: dawidd6/action-download-artifact@e6e25ac3a2b93187502a8be1ef9e9603afc34925 # v2.24.2
|
||||
with:
|
||||
workflow: docs-pr.yaml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
name: book
|
||||
path: book
|
||||
|
||||
- name: 📤 Deploy to Netlify
|
||||
uses: matrix-org/netlify-pr-preview@v1
|
||||
with:
|
||||
path: book
|
||||
owner: ${{ github.event.workflow_run.head_repository.owner.login }}
|
||||
branch: ${{ github.event.workflow_run.head_branch }}
|
||||
revision: ${{ github.event.workflow_run.head_sha }}
|
||||
token: ${{ secrets.NETLIFY_AUTH_TOKEN }}
|
||||
site_id: ${{ secrets.NETLIFY_SITE_ID }}
|
||||
desc: Documentation preview
|
||||
deployment_env: PR Documentation Preview
|
||||
34
.github/workflows/docs-pr.yaml
vendored
34
.github/workflows/docs-pr.yaml
vendored
@@ -1,34 +0,0 @@
|
||||
name: Prepare documentation PR preview
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- docs/**
|
||||
|
||||
jobs:
|
||||
pages:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
||||
# as the default. Let's opt for the welcome page instead.
|
||||
run: |
|
||||
mdbook build
|
||||
cp book/welcome_and_overview.html book/index.html
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: book
|
||||
path: book
|
||||
# We'll only use this in a workflow_run, then we're done with it
|
||||
retention-days: 1
|
||||
8
.github/workflows/docs.yaml
vendored
8
.github/workflows/docs.yaml
vendored
@@ -17,10 +17,10 @@ jobs:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
uses: peaceiris/actions-mdbook@4b5ef36b314c2599664ca107bb8c02412548d79d # v1.1.14
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
@@ -54,11 +54,11 @@ jobs:
|
||||
esac
|
||||
|
||||
# finally, set the 'branch-version' var.
|
||||
echo "branch-version=$branch" >> "$GITHUB_OUTPUT"
|
||||
echo "::set-output name=branch-version::$branch"
|
||||
|
||||
# Deploy to the target directory.
|
||||
- name: Deploy to gh pages
|
||||
uses: peaceiris/actions-gh-pages@de7ea6f8efb354206b205ef54722213d99067935 # v3.9.0
|
||||
uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./book
|
||||
|
||||
85
.github/workflows/latest_deps.yml
vendored
85
.github/workflows/latest_deps.yml
vendored
@@ -5,7 +5,7 @@
|
||||
#
|
||||
# As an overview this workflow:
|
||||
# - checks out develop,
|
||||
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
|
||||
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
|
||||
# - runs mypy and test suites in that checkout.
|
||||
#
|
||||
# Based on the twisted trunk CI job.
|
||||
@@ -25,19 +25,13 @@ jobs:
|
||||
mypy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||
# poetry-core versions), so we install with poetry.
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "1.2.0"
|
||||
poetry-version: "1.2.0b1"
|
||||
extras: "all"
|
||||
# Dump installed versions for debugging.
|
||||
- run: poetry run pip list > before.txt
|
||||
@@ -58,14 +52,7 @@ jobs:
|
||||
postgres-version: "14"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
if: ${{ matrix.postgres-version }}
|
||||
@@ -74,7 +61,7 @@ jobs:
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: pip install .[all,test]
|
||||
@@ -82,12 +69,6 @@ jobs:
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
|
||||
# We nuke the local copy, as we've installed synapse into the virtualenv
|
||||
# (rather than use an editable install, which we no longer support). If we
|
||||
# don't do this then python can't find the native lib.
|
||||
- run: rm -rf synapse/
|
||||
|
||||
- run: python -m twisted.trial --jobs=2 tests
|
||||
env:
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
@@ -131,14 +112,7 @@ jobs:
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- name: Ensure sytest runs `pip install`
|
||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||
run: rm /src/poetry.lock
|
||||
@@ -152,7 +126,7 @@ jobs:
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
@@ -161,56 +135,25 @@ jobs:
|
||||
/logs/**/*.log*
|
||||
|
||||
|
||||
complement:
|
||||
if: "${{ !failure() && !cancelled() }}"
|
||||
runs-on: ubuntu-latest
|
||||
# TODO: run complement (as with twisted trunk, see #12473).
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arrangement: monolith
|
||||
database: SQLite
|
||||
|
||||
- arrangement: monolith
|
||||
database: Postgres
|
||||
|
||||
- arrangement: workers
|
||||
database: Postgres
|
||||
|
||||
steps:
|
||||
- name: Run actions/checkout@v3 for synapse
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- run: |
|
||||
set -o pipefail
|
||||
TEST_ONLY_IGNORE_POETRY_LOCKFILE=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
|
||||
# Open an issue if the build fails, so we know about it.
|
||||
# Only do this if we're not experimenting with this action in a PR.
|
||||
# open an issue if the build fails, so we know about it.
|
||||
open-issue:
|
||||
if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request'"
|
||||
if: failure()
|
||||
needs:
|
||||
# TODO: should mypy be included here? It feels more brittle than the others.
|
||||
# TODO: should mypy be included here? It feels more brittle than the other two.
|
||||
- mypy
|
||||
- trial
|
||||
- sytest
|
||||
- complement
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: JasonEtco/create-an-issue@77399b6110ef82b94c1c9f9f615acf9e604f7f56 # v2.5.0, 2020-12-06
|
||||
- uses: actions/checkout@v2
|
||||
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
update_existing: true
|
||||
filename: .ci/latest_deps_build_failed_issue_template.md
|
||||
|
||||
|
||||
74
.github/workflows/push_complement_image.yml
vendored
74
.github/workflows/push_complement_image.yml
vendored
@@ -1,74 +0,0 @@
|
||||
# This task does not run complement tests, see tests.yaml instead.
|
||||
# This task does not build docker images for synapse for use on docker hub, see docker.yaml instead
|
||||
|
||||
name: Store complement-synapse image in ghcr.io
|
||||
on:
|
||||
push:
|
||||
branches: [ "master" ]
|
||||
schedule:
|
||||
- cron: '0 5 * * *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
required: true
|
||||
default: 'develop'
|
||||
type: choice
|
||||
options:
|
||||
- develop
|
||||
- master
|
||||
|
||||
# Only run this action once per pull request/branch; restart if a new commit arrives.
|
||||
# C.f. https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency
|
||||
# and https://docs.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#github-context
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build and push complement image
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout specific branch (debug build)
|
||||
uses: actions/checkout@v3
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
with:
|
||||
ref: ${{ inputs.branch }}
|
||||
- name: Checkout clean copy of develop (scheduled build)
|
||||
uses: actions/checkout@v3
|
||||
if: github.event_name == 'schedule'
|
||||
with:
|
||||
ref: develop
|
||||
- name: Checkout clean copy of master (on-push)
|
||||
uses: actions/checkout@v3
|
||||
if: github.event_name == 'push'
|
||||
with:
|
||||
ref: master
|
||||
- name: Login to registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Work out labels for complement image
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository }}/complement-synapse
|
||||
tags: |
|
||||
type=schedule,pattern=nightly,enable=${{ github.event_name == 'schedule'}}
|
||||
type=raw,value=develop,enable=${{ github.event_name == 'schedule' || inputs.branch == 'develop' }}
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'push' || inputs.branch == 'master' }}
|
||||
type=sha,format=long
|
||||
- name: Run scripts-dev/complement.sh to generate complement-synapse:latest image.
|
||||
run: scripts-dev/complement.sh --build-only
|
||||
- name: Tag and push generated image
|
||||
run: |
|
||||
for TAG in ${{ join(fromJson(steps.meta.outputs.json).tags, ' ') }}; do
|
||||
echo "tag and push $TAG"
|
||||
docker tag complement-synapse $TAG
|
||||
docker push $TAG
|
||||
done
|
||||
112
.github/workflows/release-artifacts.yml
vendored
112
.github/workflows/release-artifacts.yml
vendored
@@ -11,12 +11,11 @@ on:
|
||||
|
||||
# we do the full build on tags.
|
||||
tags: ["v*"]
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
@@ -25,10 +24,8 @@ jobs:
|
||||
name: "Calculate list of debian distros"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- id: set-distros
|
||||
run: |
|
||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||
@@ -36,7 +33,7 @@ jobs:
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
|
||||
fi
|
||||
echo "distros=$dists" >> "$GITHUB_OUTPUT"
|
||||
echo "::set-output name=distros::$dists"
|
||||
# map the step outputs to job outputs
|
||||
outputs:
|
||||
distros: ${{ steps.set-distros.outputs.distros }}
|
||||
@@ -52,18 +49,18 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: src
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
install: true
|
||||
|
||||
- name: Set up docker layer caching
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
@@ -71,9 +68,7 @@ jobs:
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
- name: Set up python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.x'
|
||||
uses: actions/setup-python@v2
|
||||
|
||||
- name: Build the packages
|
||||
# see https://github.com/docker/build-push-action/issues/252
|
||||
@@ -89,96 +84,14 @@ jobs:
|
||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
||||
|
||||
- name: Upload debs as artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: debs
|
||||
path: debs/*
|
||||
|
||||
build-wheels:
|
||||
name: Build wheels on ${{ matrix.os }} for ${{ matrix.arch }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-20.04, macos-11]
|
||||
arch: [x86_64, aarch64]
|
||||
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
||||
# It is not read by the rest of the workflow.
|
||||
is_pr:
|
||||
- ${{ startsWith(github.ref, 'refs/pull/') }}
|
||||
|
||||
exclude:
|
||||
# Don't build macos wheels on PR CI.
|
||||
- is_pr: true
|
||||
os: "macos-11"
|
||||
# Don't build aarch64 wheels on mac.
|
||||
- os: "macos-11"
|
||||
arch: aarch64
|
||||
# Don't build aarch64 wheels on PR CI.
|
||||
- is_pr: true
|
||||
arch: aarch64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
||||
# here, because `python` on osx points to Python 2.7.
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install cibuildwheel
|
||||
run: python -m pip install cibuildwheel==2.9.0 poetry==1.2.0
|
||||
|
||||
- name: Set up QEMU to emulate aarch64
|
||||
if: matrix.arch == 'aarch64'
|
||||
uses: docker/setup-qemu-action@v2
|
||||
with:
|
||||
platforms: arm64
|
||||
|
||||
- name: Build aarch64 wheels
|
||||
if: matrix.arch == 'aarch64'
|
||||
run: echo 'CIBW_ARCHS_LINUX=aarch64' >> $GITHUB_ENV
|
||||
|
||||
- name: Only build a single wheel on PR
|
||||
if: startsWith(github.ref, 'refs/pull/')
|
||||
run: echo "CIBW_BUILD="cp37-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
|
||||
|
||||
- name: Build wheels
|
||||
run: python -m cibuildwheel --output-dir wheelhouse
|
||||
env:
|
||||
# Skip testing for platforms which various libraries don't have wheels
|
||||
# for, and so need extra build deps.
|
||||
CIBW_TEST_SKIP: pp39-* *i686* *musl* pp37-macosx*
|
||||
# Fix Rust OOM errors on emulated aarch64: https://github.com/rust-lang/cargo/issues/10583
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
||||
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Wheel
|
||||
path: ./wheelhouse/*.whl
|
||||
|
||||
build-sdist:
|
||||
name: Build sdist
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- run: pip install build
|
||||
|
||||
- name: Build sdist
|
||||
run: python -m build --sdist
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Sdist
|
||||
path: dist/*.tar.gz
|
||||
|
||||
name: "Build pypi distribution files"
|
||||
uses: "matrix-org/backend-meta/.github/workflows/packaging.yml@v1"
|
||||
|
||||
# if it's a tag, create a release and attach the artifacts to it
|
||||
attach-assets:
|
||||
@@ -186,12 +99,11 @@ jobs:
|
||||
if: ${{ !failure() && !cancelled() && startsWith(github.ref, 'refs/tags/') }}
|
||||
needs:
|
||||
- build-debs
|
||||
- build-wheels
|
||||
- build-sdist
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download all workflow run artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
uses: actions/download-artifact@v2
|
||||
- name: Build a tarball for the debs
|
||||
run: tar -cvJf debs.tar.xz debs
|
||||
- name: Attach to release
|
||||
|
||||
454
.github/workflows/tests.yml
vendored
454
.github/workflows/tests.yml
vendored
@@ -4,51 +4,26 @@ on:
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
|
||||
# don't modify Rust code.
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
rust: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.rust }}
|
||||
steps:
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
# We only check on PRs
|
||||
if: startsWith(github.ref, 'refs/pull/')
|
||||
with:
|
||||
filters: |
|
||||
rust:
|
||||
- 'rust/**'
|
||||
- 'Cargo.toml'
|
||||
- 'Cargo.lock'
|
||||
|
||||
check-sampleconfig:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
extras: "all"
|
||||
- run: poetry run scripts-dev/generate_sample_config.sh --check
|
||||
- run: poetry run scripts-dev/config-lint.sh
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install .
|
||||
- run: scripts-dev/generate_sample_config.sh --check
|
||||
- run: scripts-dev/config-lint.sh
|
||||
|
||||
check-schema-delta:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
||||
- run: scripts-dev/check_schema_delta.py --force-colors
|
||||
|
||||
@@ -60,169 +35,79 @@ jobs:
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- name: Check line endings
|
||||
run: scripts-dev/check_line_terminators.sh
|
||||
|
||||
lint-newsfile:
|
||||
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
||||
if: ${{ github.base_ref == 'develop' || contains(github.base_ref, 'release-') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- uses: actions/setup-python@v2
|
||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
||||
- run: scripts-dev/check-newsfragment.sh
|
||||
env:
|
||||
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
||||
|
||||
lint-pydantic:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
extras: "all"
|
||||
- run: poetry run scripts-dev/check_pydantic_models.py
|
||||
|
||||
lint-clippy:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: cargo clippy -- -D warnings
|
||||
|
||||
# We also lint against a nightly rustc so that we can lint the benchmark
|
||||
# suite, which requires a nightly compiler.
|
||||
lint-clippy-nightly:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: cargo clippy --all-features -- -D warnings
|
||||
|
||||
lint-rustfmt:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
components: rustfmt
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: cargo fmt --check
|
||||
|
||||
# Dummy step to gate other tests on without repeating the whole list
|
||||
linting-done:
|
||||
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
||||
needs:
|
||||
- lint
|
||||
- lint-crlf
|
||||
- lint-newsfile
|
||||
- lint-pydantic
|
||||
- check-sampleconfig
|
||||
- check-schema-delta
|
||||
- lint-clippy
|
||||
- lint-rustfmt
|
||||
needs: [lint, lint-crlf, lint-newsfile, check-sampleconfig, check-schema-delta]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: "true"
|
||||
|
||||
calculate-test-jobs:
|
||||
trial:
|
||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- id: get-matrix
|
||||
run: .ci/scripts/calculate_jobs.py
|
||||
outputs:
|
||||
trial_test_matrix: ${{ steps.get-matrix.outputs.trial_test_matrix }}
|
||||
sytest_test_matrix: ${{ steps.get-matrix.outputs.sytest_test_matrix }}
|
||||
|
||||
trial:
|
||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: calculate-test-jobs
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
database: ["sqlite"]
|
||||
extras: ["all"]
|
||||
include:
|
||||
# Newest Python without optional deps
|
||||
- python-version: "3.10"
|
||||
extras: ""
|
||||
|
||||
# Oldest Python with PostgreSQL
|
||||
- python-version: "3.7"
|
||||
database: "postgres"
|
||||
postgres-version: "10"
|
||||
extras: "all"
|
||||
|
||||
# Newest Python with newest PostgreSQL
|
||||
- python-version: "3.10"
|
||||
database: "postgres"
|
||||
postgres-version: "14"
|
||||
extras: "all"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
if: ${{ matrix.postgres-version }}
|
||||
run: |
|
||||
docker run -d -p 5432:5432 \
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.job.postgres-version }}
|
||||
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.job.python-version }}
|
||||
extras: ${{ matrix.job.extras }}
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: ${{ matrix.extras }}
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
env:
|
||||
SYNAPSE_POSTGRES: ${{ matrix.job.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
@@ -243,56 +128,16 @@ jobs:
|
||||
# Note: sqlite only; no postgres
|
||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
- uses: actions/checkout@v2
|
||||
- name: Test with old deps
|
||||
uses: docker://ubuntu:focal # For old python and sqlite
|
||||
# Note: focal seems to be using 3.8, but the oldest is 3.7?
|
||||
# See https://github.com/matrix-org/synapse/issues/12343
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
# There aren't wheels for some of the older deps, so we need to install
|
||||
# their build dependencies
|
||||
- run: |
|
||||
sudo apt-get -qq install build-essential libffi-dev python-dev \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.7'
|
||||
|
||||
# Calculating the old-deps actually takes a bunch of time, so we cache the
|
||||
# pyproject.toml / poetry.lock. We need to cache pyproject.toml as
|
||||
# otherwise the `poetry install` step will error due to the poetry.lock
|
||||
# file being outdated.
|
||||
#
|
||||
# This caches the output of `Prepare old deps`, which should generate the
|
||||
# same `pyproject.toml` and `poetry.lock` for a given `pyproject.toml` input.
|
||||
- uses: actions/cache@v3
|
||||
id: cache-poetry-old-deps
|
||||
name: Cache poetry.lock
|
||||
with:
|
||||
path: |
|
||||
poetry.lock
|
||||
pyproject.toml
|
||||
key: poetry-old-deps2-${{ hashFiles('pyproject.toml') }}
|
||||
- name: Prepare old deps
|
||||
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
||||
run: .ci/scripts/prepare_old_deps.sh
|
||||
|
||||
# We only now install poetry so that `setup-python-poetry` caches the
|
||||
# right poetry.lock's dependencies.
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: '3.7'
|
||||
extras: "all test"
|
||||
|
||||
- run: poetry run trial -j2 tests
|
||||
workdir: /github/workspace
|
||||
entrypoint: .ci/scripts/test_old_deps.sh
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
@@ -318,7 +163,7 @@ jobs:
|
||||
extras: ["all"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
@@ -341,39 +186,50 @@ jobs:
|
||||
|
||||
sytest:
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
needs: calculate-test-jobs
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:${{ matrix.job.sytest-tag }}
|
||||
image: matrixdotorg/sytest-synapse:${{ matrix.sytest-tag }}
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
env:
|
||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
||||
POSTGRES: ${{ matrix.job.postgres && 1}}
|
||||
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') && 1}}
|
||||
WORKERS: ${{ matrix.job.workers && 1 }}
|
||||
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
|
||||
POSTGRES: ${{ matrix.postgres && 1}}
|
||||
MULTI_POSTGRES: ${{ (matrix.postgres == 'multi-postgres') && 1}}
|
||||
WORKERS: ${{ matrix.workers && 1 }}
|
||||
REDIS: ${{ matrix.redis && 1 }}
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
TOP: ${{ github.workspace }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
||||
include:
|
||||
- sytest-tag: focal
|
||||
|
||||
- sytest-tag: focal
|
||||
postgres: postgres
|
||||
|
||||
- sytest-tag: testing
|
||||
postgres: postgres
|
||||
|
||||
- sytest-tag: focal
|
||||
postgres: multi-postgres
|
||||
workers: workers
|
||||
|
||||
- sytest-tag: buster
|
||||
postgres: multi-postgres
|
||||
workers: workers
|
||||
|
||||
- sytest-tag: buster
|
||||
postgres: postgres
|
||||
workers: workers
|
||||
redis: redis
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
@@ -381,10 +237,10 @@ jobs:
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
path: |
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
@@ -411,31 +267,28 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_export_data_command.sh
|
||||
env:
|
||||
PGHOST: localhost
|
||||
PGUSER: postgres
|
||||
PGPASSWORD: postgres
|
||||
PGDATABASE: postgres
|
||||
|
||||
|
||||
portdb:
|
||||
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TOP: ${{ github.workspace }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- python-version: "3.7"
|
||||
postgres-version: "11"
|
||||
postgres-version: "10"
|
||||
|
||||
- python-version: "3.11"
|
||||
postgres-version: "15"
|
||||
- python-version: "3.10"
|
||||
postgres-version: "14"
|
||||
|
||||
services:
|
||||
postgres:
|
||||
@@ -452,37 +305,13 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Add PostgreSQL apt repository
|
||||
# We need a version of pg_dump that can handle the version of
|
||||
# PostgreSQL being tested against. The Ubuntu package repository lags
|
||||
# behind new releases, so we have to use the PostreSQL apt repository.
|
||||
# Steps taken from https://www.postgresql.org/download/linux/ubuntu/
|
||||
run: |
|
||||
sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
|
||||
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
|
||||
sudo apt-get update
|
||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_synapse_port_db.sh
|
||||
id: run_tester_script
|
||||
env:
|
||||
PGHOST: localhost
|
||||
PGUSER: postgres
|
||||
PGPASSWORD: postgres
|
||||
PGDATABASE: postgres
|
||||
- name: "Upload schema differences"
|
||||
uses: actions/upload-artifact@v3
|
||||
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
||||
with:
|
||||
name: Schema dumps
|
||||
path: |
|
||||
unported.sql
|
||||
ported.sql
|
||||
schema_diff
|
||||
|
||||
complement:
|
||||
if: "${{ !failure() && !cancelled() }}"
|
||||
@@ -499,65 +328,102 @@ jobs:
|
||||
- arrangement: monolith
|
||||
database: Postgres
|
||||
|
||||
- arrangement: workers
|
||||
database: Postgres
|
||||
|
||||
steps:
|
||||
- name: Run actions/checkout@v3 for synapse
|
||||
uses: actions/checkout@v3
|
||||
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
|
||||
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
|
||||
- name: "Set Go Version"
|
||||
run: |
|
||||
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
|
||||
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
|
||||
# Add the Go path to the PATH: We need this so we can call gotestfmt
|
||||
echo "~/go/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: "Install Complement Dependencies"
|
||||
run: |
|
||||
sudo apt-get update && sudo apt-get install -y libolm3 libolm-dev
|
||||
go get -v github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
|
||||
- name: Run actions/checkout@v2 for synapse
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: "Install custom gotestfmt template"
|
||||
run: |
|
||||
mkdir .gotestfmt/github -p
|
||||
cp synapse/.ci/complement_package.gotpl .gotestfmt/github/package.gotpl
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
# Attempt to check out the same branch of Complement as the PR. If it
|
||||
# doesn't exist, fallback to HEAD.
|
||||
- name: Checkout complement
|
||||
run: synapse/.ci/scripts/checkout_complement.sh
|
||||
|
||||
- run: |
|
||||
set -o pipefail
|
||||
POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
||||
POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
|
||||
cargo-test:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
# We only run the workers tests on `develop` for now, because they're too slow to wait for on PRs.
|
||||
# Sadly, you can't have an `if` condition on the value of a matrix, so this is a temporary, separate job for now.
|
||||
# GitHub Actions doesn't support YAML anchors, so it's full-on duplication for now.
|
||||
complement-developonly:
|
||||
if: "${{ !failure() && !cancelled() && (github.ref == 'refs/heads/develop') }}"
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- linting-done
|
||||
- changes
|
||||
|
||||
name: "Complement Workers (develop only)"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
|
||||
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
|
||||
- name: "Set Go Version"
|
||||
run: |
|
||||
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
|
||||
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
|
||||
# Add the Go path to the PATH: We need this so we can call gotestfmt
|
||||
echo "~/go/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Install Rust
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
- name: "Install Complement Dependencies"
|
||||
run: |
|
||||
sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev
|
||||
go get -v github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
|
||||
- name: Run actions/checkout@v2 for synapse
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
path: synapse
|
||||
|
||||
- run: cargo test
|
||||
- name: "Install custom gotestfmt template"
|
||||
run: |
|
||||
mkdir .gotestfmt/github -p
|
||||
cp synapse/.ci/complement_package.gotpl .gotestfmt/github/package.gotpl
|
||||
|
||||
# Attempt to check out the same branch of Complement as the PR. If it
|
||||
# doesn't exist, fallback to HEAD.
|
||||
- name: Checkout complement
|
||||
run: synapse/.ci/scripts/checkout_complement.sh
|
||||
|
||||
- run: |
|
||||
set -o pipefail
|
||||
WORKERS=1 COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
|
||||
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
|
||||
tests-done:
|
||||
if: ${{ always() }}
|
||||
needs:
|
||||
- check-sampleconfig
|
||||
- lint
|
||||
- lint-crlf
|
||||
- lint-newsfile
|
||||
- trial
|
||||
- trial-olddeps
|
||||
- sytest
|
||||
- export-data
|
||||
- portdb
|
||||
- complement
|
||||
- cargo-test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: matrix-org/done-action@v2
|
||||
@@ -565,7 +431,5 @@ jobs:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
# The newsfile lint may be skipped on non PR builds
|
||||
# Cargo test is skipped if there is no changes on Rust code
|
||||
skippable: |
|
||||
skippable:
|
||||
lint-newsfile
|
||||
cargo-test
|
||||
|
||||
15
.github/workflows/triage-incoming.yml
vendored
15
.github/workflows/triage-incoming.yml
vendored
@@ -1,15 +0,0 @@
|
||||
name: Move new issues into the issue triage board
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [ opened ]
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v1
|
||||
with:
|
||||
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
|
||||
content_id: ${{ github.event.issue.node_id }}
|
||||
secrets:
|
||||
github_access_token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
|
||||
44
.github/workflows/triage_labelled.yml
vendored
44
.github/workflows/triage_labelled.yml
vendored
@@ -1,44 +0,0 @@
|
||||
name: Move labelled issues to correct projects
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [ labeled ]
|
||||
|
||||
jobs:
|
||||
move_needs_info:
|
||||
name: Move X-Needs-Info on the triage board
|
||||
runs-on: ubuntu-latest
|
||||
if: >
|
||||
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
||||
steps:
|
||||
- uses: actions/add-to-project@main
|
||||
id: add_project
|
||||
with:
|
||||
project-url: "https://github.com/orgs/matrix-org/projects/67"
|
||||
github-token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
- name: Set status
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
run: |
|
||||
gh api graphql -f query='
|
||||
mutation(
|
||||
$project: ID!
|
||||
$item: ID!
|
||||
$fieldid: ID!
|
||||
$columnid: String!
|
||||
) {
|
||||
updateProjectV2ItemFieldValue(
|
||||
input: {
|
||||
projectId: $project
|
||||
itemId: $item
|
||||
fieldId: $fieldid
|
||||
value: {
|
||||
singleSelectOptionId: $columnid
|
||||
}
|
||||
}
|
||||
) {
|
||||
projectV2Item {
|
||||
id
|
||||
}
|
||||
}
|
||||
}' -f project="PVT_kwDOAIB0Bs4AFDdZ" -f item=${{ steps.add_project.outputs.itemId }} -f fieldid="PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4" -f columnid=ba22e43c --silent
|
||||
78
.github/workflows/twisted_trunk.yml
vendored
78
.github/workflows/twisted_trunk.yml
vendored
@@ -15,14 +15,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
@@ -39,15 +32,8 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
@@ -79,14 +65,7 @@ jobs:
|
||||
- ${{ github.workspace }}:/src
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- name: Patch dependencies
|
||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||
# but the sytest-synapse container expects it to be in /venv/.
|
||||
@@ -109,7 +88,7 @@ jobs:
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
@@ -117,50 +96,6 @@ jobs:
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
complement:
|
||||
if: "${{ !failure() && !cancelled() }}"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arrangement: monolith
|
||||
database: SQLite
|
||||
|
||||
- arrangement: monolith
|
||||
database: Postgres
|
||||
|
||||
- arrangement: workers
|
||||
database: Postgres
|
||||
|
||||
steps:
|
||||
- name: Run actions/checkout@v3 for synapse
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
# This step is specific to the 'Twisted trunk' test run:
|
||||
- name: Patch dependencies
|
||||
run: |
|
||||
set -x
|
||||
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
|
||||
pipx install poetry==1.2.0
|
||||
|
||||
poetry remove -n twisted
|
||||
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry lock --no-update
|
||||
working-directory: synapse
|
||||
|
||||
- run: |
|
||||
set -o pipefail
|
||||
TEST_ONLY_SKIP_DEP_HASH_VERIFICATION=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
|
||||
# open an issue if the build fails, so we know about it.
|
||||
open-issue:
|
||||
if: failure()
|
||||
@@ -168,13 +103,12 @@ jobs:
|
||||
- mypy
|
||||
- trial
|
||||
- sytest
|
||||
- complement
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: JasonEtco/create-an-issue@77399b6110ef82b94c1c9f9f615acf9e604f7f56 # v2.5.0, 2020-12-06
|
||||
- uses: actions/checkout@v2
|
||||
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
|
||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -15,9 +15,8 @@ _trial_temp*/
|
||||
.DS_Store
|
||||
__pycache__/
|
||||
|
||||
# We do want the poetry and cargo lockfile.
|
||||
# We do want the poetry lockfile.
|
||||
!poetry.lock
|
||||
!Cargo.lock
|
||||
|
||||
# stuff that is likely to exist when you run a server locally
|
||||
/*.db
|
||||
@@ -61,10 +60,3 @@ book/
|
||||
# complement
|
||||
/complement-*
|
||||
/master.tar.gz
|
||||
|
||||
# rust
|
||||
/target/
|
||||
/synapse/*.so
|
||||
|
||||
# Poetry will create a setup.py, which we don't want to include.
|
||||
/setup.py
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
group_imports = "StdExternalCrate"
|
||||
1256
CHANGES.md
1256
CHANGES.md
File diff suppressed because it is too large
Load Diff
466
Cargo.lock
generated
466
Cargo.lock
generated
@@ -1,466 +0,0 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "0.7.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.66"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "216261ddc8289130e551ddcd5ce8a064710c0d064a4d2895c67151c92b5443f6"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
version = "1.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "983cd8b9d4b02a6dc6ffa557262eb5858a27a0038ffffe21a0f133eaa819a164"
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "blake2"
|
||||
version = "0.10.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b12e5fd123190ce1c2e559308a94c9bacad77907d4c6005d9e58fe1a0689e55e"
|
||||
dependencies = [
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "block-buffer"
|
||||
version = "0.10.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e"
|
||||
dependencies = [
|
||||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "crypto-common"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
|
||||
dependencies = [
|
||||
"generic-array",
|
||||
"typenum",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "digest"
|
||||
version = "0.10.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c"
|
||||
dependencies = [
|
||||
"block-buffer",
|
||||
"crypto-common",
|
||||
"subtle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "generic-array"
|
||||
version = "0.14.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9"
|
||||
dependencies = [
|
||||
"typenum",
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hex"
|
||||
version = "0.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
||||
|
||||
[[package]]
|
||||
name = "indoc"
|
||||
version = "1.0.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "adab1eaa3408fb7f0c777a73e7465fd5656136fc93b670eb6df3c88c2c1344e3"
|
||||
|
||||
[[package]]
|
||||
name = "itoa"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.135"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c"
|
||||
|
||||
[[package]]
|
||||
name = "lock_api"
|
||||
version = "0.4.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"scopeguard",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
|
||||
|
||||
[[package]]
|
||||
name = "memoffset"
|
||||
version = "0.6.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1"
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
|
||||
dependencies = [
|
||||
"lock_api",
|
||||
"parking_lot_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot_core"
|
||||
version = "0.9.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"smallvec",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.46"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3"
|
||||
version = "0.17.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "268be0c73583c183f2b14052337465768c07726936a260f480f0857cb95ba543"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cfg-if",
|
||||
"indoc",
|
||||
"libc",
|
||||
"memoffset",
|
||||
"parking_lot",
|
||||
"pyo3-build-config",
|
||||
"pyo3-ffi",
|
||||
"pyo3-macros",
|
||||
"unindent",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-build-config"
|
||||
version = "0.17.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28fcd1e73f06ec85bf3280c48c67e731d8290ad3d730f8be9dc07946923005c8"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"target-lexicon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-ffi"
|
||||
version = "0.17.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0f6cb136e222e49115b3c51c32792886defbfb0adead26a688142b346a0b9ffc"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"pyo3-build-config",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-log"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e5695ccff5060c13ca1751cf8c857a12da9b0bf0378cb071c5e0326f7c7e4c1b"
|
||||
dependencies = [
|
||||
"arc-swap",
|
||||
"log",
|
||||
"pyo3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros"
|
||||
version = "0.17.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94144a1266e236b1c932682136dc35a9dee8d3589728f68130c7c3861ef96b28"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-macros-backend",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros-backend"
|
||||
version = "0.17.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c8df9be978a2d2f0cdebabb03206ed73b11314701a5bfe71b0d753b81997777f"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pythonize"
|
||||
version = "0.17.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0f7f0c136f5fbc01868185eef462800e49659eb23acca83b9e884367a006acb6"
|
||||
dependencies = [
|
||||
"pyo3",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.2.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.6.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
|
||||
|
||||
[[package]]
|
||||
name = "scopeguard"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.148"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e53f64bb4ba0191d6d0676e1b141ca55047d83b74f5607e6d8eb88126c52c2dc"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.148"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a55492425aa53521babf6137309e7d34c20bbfbbfcfe2c7f3a047fd1f6b92c0c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.89"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "020ff22c755c2ed3f8cf162dbb41a7268d934702f3ed3631656ea597e08fc3db"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
|
||||
|
||||
[[package]]
|
||||
name = "subtle"
|
||||
version = "2.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.104"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4ae548ec36cf198c0ef7710d3c230987c2d6d7bd98ad6edc0274462724c585ce"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "synapse"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"blake2",
|
||||
"hex",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"pyo3",
|
||||
"pyo3-log",
|
||||
"pythonize",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "target-lexicon"
|
||||
version = "0.12.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c02424087780c9b71cc96799eaeddff35af2bc513278cda5c99fc1f5d026d3c1"
|
||||
|
||||
[[package]]
|
||||
name = "typenum"
|
||||
version = "1.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
|
||||
|
||||
[[package]]
|
||||
name = "unindent"
|
||||
version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "58ee9362deb4a96cef4d437d1ad49cffc9b9e92d202b6995674e928ce684f112"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.36.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2"
|
||||
dependencies = [
|
||||
"windows_aarch64_msvc",
|
||||
"windows_i686_gnu",
|
||||
"windows_i686_msvc",
|
||||
"windows_x86_64_gnu",
|
||||
"windows_x86_64_msvc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.36.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.36.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.36.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.36.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.36.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680"
|
||||
@@ -1,9 +0,0 @@
|
||||
# We make the whole Synapse folder a workspace so that we can run `cargo`
|
||||
# commands from the root (rather than having to cd into rust/).
|
||||
|
||||
[workspace]
|
||||
members = ["rust"]
|
||||
|
||||
[profile.dbgrelease]
|
||||
inherits = "release"
|
||||
debug = true
|
||||
476
README.rst
476
README.rst
@@ -2,70 +2,152 @@
|
||||
Synapse |support| |development| |documentation| |license| |pypi| |python|
|
||||
=========================================================================
|
||||
|
||||
Synapse is an open-source `Matrix <https://matrix.org/>`_ homeserver written and
|
||||
maintained by the Matrix.org Foundation. We began rapid development in 2014,
|
||||
reaching v1.0.0 in 2019. Development on Synapse and the Matrix protocol itself continues
|
||||
in earnest today.
|
||||
|
||||
Briefly, Matrix is an open standard for communications on the internet, supporting
|
||||
federation, encryption and VoIP. Matrix.org has more to say about the `goals of the
|
||||
Matrix project <https://matrix.org/docs/guides/introduction>`_, and the `formal specification
|
||||
<https://spec.matrix.org/>`_ describes the technical details.
|
||||
|
||||
.. contents::
|
||||
|
||||
Installing and configuration
|
||||
============================
|
||||
Introduction
|
||||
============
|
||||
|
||||
The Synapse documentation describes `how to install Synapse <https://matrix-org.github.io/synapse/latest/setup/installation.html>`_. We recommend using
|
||||
`Docker images <https://matrix-org.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
|
||||
<https://matrix-org.github.io/synapse/latest/setup/installation.html#matrixorg-packages>`_.
|
||||
Matrix is an ambitious new ecosystem for open federated Instant Messaging and
|
||||
VoIP. The basics you need to know to get up and running are:
|
||||
|
||||
- Everything in Matrix happens in a room. Rooms are distributed and do not
|
||||
exist on any single server. Rooms can be located using convenience aliases
|
||||
like ``#matrix:matrix.org`` or ``#test:localhost:8448``.
|
||||
|
||||
- Matrix user IDs look like ``@matthew:matrix.org`` (although in the future
|
||||
you will normally refer to yourself and others using a third party identifier
|
||||
(3PID): email address, phone number, etc rather than manipulating Matrix user IDs)
|
||||
|
||||
The overall architecture is::
|
||||
|
||||
client <----> homeserver <=====================> homeserver <----> client
|
||||
https://somewhere.org/_matrix https://elsewhere.net/_matrix
|
||||
|
||||
``#matrix:matrix.org`` is the official support room for Matrix, and can be
|
||||
accessed by any client from https://matrix.org/docs/projects/try-matrix-now.html or
|
||||
via IRC bridge at irc://irc.libera.chat/matrix.
|
||||
|
||||
Synapse is currently in rapid development, but as of version 0.5 we believe it
|
||||
is sufficiently stable to be run as an internet-facing service for real usage!
|
||||
|
||||
About Matrix
|
||||
============
|
||||
|
||||
Matrix specifies a set of pragmatic RESTful HTTP JSON APIs as an open standard,
|
||||
which handle:
|
||||
|
||||
- Creating and managing fully distributed chat rooms with no
|
||||
single points of control or failure
|
||||
- Eventually-consistent cryptographically secure synchronisation of room
|
||||
state across a global open network of federated servers and services
|
||||
- Sending and receiving extensible messages in a room with (optional)
|
||||
end-to-end encryption
|
||||
- Inviting, joining, leaving, kicking, banning room members
|
||||
- Managing user accounts (registration, login, logout)
|
||||
- Using 3rd Party IDs (3PIDs) such as email addresses, phone numbers,
|
||||
Facebook accounts to authenticate, identify and discover users on Matrix.
|
||||
- Placing 1:1 VoIP and Video calls
|
||||
|
||||
These APIs are intended to be implemented on a wide range of servers, services
|
||||
and clients, letting developers build messaging and VoIP functionality on top
|
||||
of the entirely open Matrix ecosystem rather than using closed or proprietary
|
||||
solutions. The hope is for Matrix to act as the building blocks for a new
|
||||
generation of fully open and interoperable messaging and VoIP apps for the
|
||||
internet.
|
||||
|
||||
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
|
||||
team, written in Python 3/Twisted.
|
||||
|
||||
In Matrix, every user runs one or more Matrix clients, which connect through to
|
||||
a Matrix homeserver. The homeserver stores all their personal chat history and
|
||||
user account information - much as a mail client connects through to an
|
||||
IMAP/SMTP server. Just like email, you can either run your own Matrix
|
||||
homeserver and control and own your own communications and history or use one
|
||||
hosted by someone else (e.g. matrix.org) - there is no single point of control
|
||||
or mandatory service provider in Matrix, unlike WhatsApp, Facebook, Hangouts,
|
||||
etc.
|
||||
|
||||
We'd like to invite you to join #matrix:matrix.org (via
|
||||
https://matrix.org/docs/projects/try-matrix-now.html), run a homeserver, take a look
|
||||
at the `Matrix spec <https://matrix.org/docs/spec>`_, and experiment with the
|
||||
`APIs <https://matrix.org/docs/api>`_ and `Client SDKs
|
||||
<https://matrix.org/docs/projects/try-matrix-now.html#client-sdks>`_.
|
||||
|
||||
Thanks for using Matrix!
|
||||
|
||||
Support
|
||||
=======
|
||||
|
||||
For support installing or managing Synapse, please join |room|_ (from a matrix.org
|
||||
account if necessary) and ask questions there. We do not use GitHub issues for
|
||||
support requests, only for bug reports and feature requests.
|
||||
|
||||
Synapse's documentation is `nicely rendered on GitHub Pages <https://matrix-org.github.io/synapse>`_,
|
||||
with its source available in |docs|_.
|
||||
|
||||
.. |room| replace:: ``#synapse:matrix.org``
|
||||
.. _room: https://matrix.to/#/#synapse:matrix.org
|
||||
|
||||
.. |docs| replace:: ``docs``
|
||||
.. _docs: docs
|
||||
|
||||
Synapse Installation
|
||||
====================
|
||||
|
||||
.. _federation:
|
||||
|
||||
Synapse has a variety of `config options
|
||||
<https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html>`_
|
||||
which can be used to customise its behaviour after installation.
|
||||
There are additional details on how to `configure Synapse for federation here
|
||||
<https://matrix-org.github.io/synapse/latest/federate.html>`_.
|
||||
|
||||
.. _reverse-proxy:
|
||||
|
||||
Using a reverse proxy with Synapse
|
||||
----------------------------------
|
||||
|
||||
It is recommended to put a reverse proxy such as
|
||||
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
||||
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
||||
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
|
||||
`HAProxy <https://www.haproxy.org/>`_ or
|
||||
`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
|
||||
doing so is that it means that you can expose the default https port (443) to
|
||||
Matrix clients without needing to run Synapse with root privileges.
|
||||
For information on configuring one, see `the reverse proxy docs
|
||||
<https://matrix-org.github.io/synapse/latest/reverse_proxy.html>`_.
|
||||
|
||||
Upgrading an existing Synapse
|
||||
-----------------------------
|
||||
|
||||
The instructions for upgrading Synapse are in `the upgrade notes`_.
|
||||
Please check these instructions as upgrading may require extra steps for some
|
||||
versions of Synapse.
|
||||
|
||||
.. _the upgrade notes: https://matrix-org.github.io/synapse/develop/upgrade.html
|
||||
* For details on how to install synapse, see
|
||||
`Installation Instructions <https://matrix-org.github.io/synapse/latest/setup/installation.html>`_.
|
||||
* For specific details on how to configure Synapse for federation see `docs/federate.md <docs/federate.md>`_
|
||||
|
||||
|
||||
Platform dependencies
|
||||
---------------------
|
||||
Connecting to Synapse from a client
|
||||
===================================
|
||||
|
||||
Synapse uses a number of platform dependencies such as Python and PostgreSQL,
|
||||
and aims to follow supported upstream versions. See the
|
||||
`deprecation policy <https://matrix-org.github.io/synapse/latest/deprecation_policy.html>`_
|
||||
for more details.
|
||||
The easiest way to try out your new Synapse installation is by connecting to it
|
||||
from a web client.
|
||||
|
||||
Unless you are running a test instance of Synapse on your local machine, in
|
||||
general, you will need to enable TLS support before you can successfully
|
||||
connect from a client: see
|
||||
`TLS certificates <https://matrix-org.github.io/synapse/latest/setup/installation.html#tls-certificates>`_.
|
||||
|
||||
An easy way to get started is to login or register via Element at
|
||||
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
||||
You will need to change the server you are logging into from ``matrix.org``
|
||||
and instead specify a Homeserver URL of ``https://<server_name>:8448``
|
||||
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
||||
If you prefer to use another client, refer to our
|
||||
`client breakdown <https://matrix.org/docs/projects/clients-matrix>`_.
|
||||
|
||||
If all goes well you should at least be able to log in, create a room, and
|
||||
start sending messages.
|
||||
|
||||
.. _`client-user-reg`:
|
||||
|
||||
Registering a new user from a client
|
||||
------------------------------------
|
||||
|
||||
By default, registration of new users via Matrix clients is disabled. To enable
|
||||
it, specify ``enable_registration: true`` in ``homeserver.yaml``. (It is then
|
||||
recommended to also set up CAPTCHA - see `<docs/CAPTCHA_SETUP.md>`_.)
|
||||
|
||||
Once ``enable_registration`` is set to ``true``, it is possible to register a
|
||||
user via a Matrix client.
|
||||
|
||||
Your new user name will be formed partly from the ``server_name``, and partly
|
||||
from a localpart you specify when you create the account. Your name will take
|
||||
the form of::
|
||||
|
||||
@localpart:my.domain.name
|
||||
|
||||
(pronounced "at localpart on my dot domain dot name").
|
||||
|
||||
As when logging in, you will need to specify a "Custom server". Specify your
|
||||
desired ``localpart`` in the 'User name' box.
|
||||
|
||||
Security note
|
||||
-------------
|
||||
=============
|
||||
|
||||
Matrix serves raw, user-supplied data in some APIs -- specifically the `content
|
||||
repository endpoints`_.
|
||||
@@ -105,76 +187,30 @@ Following this advice ensures that even if an XSS is found in Synapse, the
|
||||
impact to other applications will be minimal.
|
||||
|
||||
|
||||
Testing a new installation
|
||||
==========================
|
||||
Upgrading an existing Synapse
|
||||
=============================
|
||||
|
||||
The easiest way to try out your new Synapse installation is by connecting to it
|
||||
from a web client.
|
||||
The instructions for upgrading synapse are in `the upgrade notes`_.
|
||||
Please check these instructions as upgrading may require extra steps for some
|
||||
versions of synapse.
|
||||
|
||||
Unless you are running a test instance of Synapse on your local machine, in
|
||||
general, you will need to enable TLS support before you can successfully
|
||||
connect from a client: see
|
||||
`TLS certificates <https://matrix-org.github.io/synapse/latest/setup/installation.html#tls-certificates>`_.
|
||||
.. _the upgrade notes: https://matrix-org.github.io/synapse/develop/upgrade.html
|
||||
|
||||
An easy way to get started is to login or register via Element at
|
||||
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
||||
You will need to change the server you are logging into from ``matrix.org``
|
||||
and instead specify a Homeserver URL of ``https://<server_name>:8448``
|
||||
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
||||
If you prefer to use another client, refer to our
|
||||
`client breakdown <https://matrix.org/docs/projects/clients-matrix>`_.
|
||||
.. _reverse-proxy:
|
||||
|
||||
If all goes well you should at least be able to log in, create a room, and
|
||||
start sending messages.
|
||||
Using a reverse proxy with Synapse
|
||||
==================================
|
||||
|
||||
.. _`client-user-reg`:
|
||||
It is recommended to put a reverse proxy such as
|
||||
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
||||
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
||||
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
|
||||
`HAProxy <https://www.haproxy.org/>`_ or
|
||||
`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
|
||||
doing so is that it means that you can expose the default https port (443) to
|
||||
Matrix clients without needing to run Synapse with root privileges.
|
||||
|
||||
Registering a new user from a client
|
||||
------------------------------------
|
||||
|
||||
By default, registration of new users via Matrix clients is disabled. To enable
|
||||
it:
|
||||
|
||||
1. In the
|
||||
`registration config section <https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#registration>`_
|
||||
set ``enable_registration: true`` in ``homeserver.yaml``.
|
||||
2. Then **either**:
|
||||
|
||||
a. set up a `CAPTCHA <https://matrix-org.github.io/synapse/latest/CAPTCHA_SETUP.html>`_, or
|
||||
b. set ``enable_registration_without_verification: true`` in ``homeserver.yaml``.
|
||||
|
||||
We **strongly** recommend using a CAPTCHA, particularly if your homeserver is exposed to
|
||||
the public internet. Without it, anyone can freely register accounts on your homeserver.
|
||||
This can be exploited by attackers to create spambots targetting the rest of the Matrix
|
||||
federation.
|
||||
|
||||
Your new user name will be formed partly from the ``server_name``, and partly
|
||||
from a localpart you specify when you create the account. Your name will take
|
||||
the form of::
|
||||
|
||||
@localpart:my.domain.name
|
||||
|
||||
(pronounced "at localpart on my dot domain dot name").
|
||||
|
||||
As when logging in, you will need to specify a "Custom server". Specify your
|
||||
desired ``localpart`` in the 'User name' box.
|
||||
|
||||
Troubleshooting and support
|
||||
===========================
|
||||
|
||||
The `Admin FAQ <https://matrix-org.github.io/synapse/latest/usage/administration/admin_faq.html>`_
|
||||
includes tips on dealing with some common problems. For more details, see
|
||||
`Synapse's wider documentation <https://matrix-org.github.io/synapse/latest/>`_.
|
||||
|
||||
For additional support installing or managing Synapse, please ask in the community
|
||||
support room |room|_ (from a matrix.org account if necessary). We do not use GitHub
|
||||
issues for support requests, only for bug reports and feature requests.
|
||||
|
||||
.. |room| replace:: ``#synapse:matrix.org``
|
||||
.. _room: https://matrix.to/#/#synapse:matrix.org
|
||||
|
||||
.. |docs| replace:: ``docs``
|
||||
.. _docs: docs
|
||||
For information on configuring one, see `<docs/reverse_proxy.md>`_.
|
||||
|
||||
Identity Servers
|
||||
================
|
||||
@@ -206,15 +242,34 @@ an email address with your account, or send an invite to another user via their
|
||||
email address.
|
||||
|
||||
|
||||
Development
|
||||
===========
|
||||
Password reset
|
||||
==============
|
||||
|
||||
Users can reset their password through their client. Alternatively, a server admin
|
||||
can reset a users password using the `admin API <docs/admin_api/user_admin_api.md#reset-password>`_
|
||||
or by directly editing the database as shown below.
|
||||
|
||||
First calculate the hash of the new password::
|
||||
|
||||
$ ~/synapse/env/bin/hash_password
|
||||
Password:
|
||||
Confirm password:
|
||||
$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
||||
|
||||
Then update the ``users`` table in the database::
|
||||
|
||||
UPDATE users SET password_hash='$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
|
||||
WHERE name='@test:test.com';
|
||||
|
||||
|
||||
Synapse Development
|
||||
===================
|
||||
|
||||
We welcome contributions to Synapse from the community!
|
||||
The best place to get started is our
|
||||
`guide for contributors <https://matrix-org.github.io/synapse/latest/development/contributing_guide.html>`_.
|
||||
This is part of our larger `documentation <https://matrix-org.github.io/synapse/latest>`_, which includes
|
||||
information for synapse developers as well as synapse administrators.
|
||||
|
||||
information for Synapse developers as well as Synapse administrators.
|
||||
Developers might be particularly interested in:
|
||||
|
||||
* `Synapse's database schema <https://matrix-org.github.io/synapse/latest/development/database_schema.html>`_,
|
||||
@@ -225,6 +280,187 @@ Alongside all that, join our developer community on Matrix:
|
||||
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
||||
|
||||
|
||||
Quick start
|
||||
-----------
|
||||
|
||||
Before setting up a development environment for synapse, make sure you have the
|
||||
system dependencies (such as the python header files) installed - see
|
||||
`Platform-specific prerequisites <https://matrix-org.github.io/synapse/latest/setup/installation.html#platform-specific-prerequisites>`_.
|
||||
|
||||
To check out a synapse for development, clone the git repo into a working
|
||||
directory of your choice::
|
||||
|
||||
git clone https://github.com/matrix-org/synapse.git
|
||||
cd synapse
|
||||
|
||||
Synapse has a number of external dependencies. We maintain a fixed development
|
||||
environment using `Poetry <https://python-poetry.org/>`_. First, install poetry. We recommend::
|
||||
|
||||
pip install --user pipx
|
||||
pipx install poetry
|
||||
|
||||
as described `here <https://python-poetry.org/docs/#installing-with-pipx>`_.
|
||||
(See `poetry's installation docs <https://python-poetry.org/docs/#installation>`_
|
||||
for other installation methods.) Then ask poetry to create a virtual environment
|
||||
from the project and install Synapse's dependencies::
|
||||
|
||||
poetry install --extras "all test"
|
||||
|
||||
This will run a process of downloading and installing all the needed
|
||||
dependencies into a virtual env.
|
||||
|
||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`::
|
||||
|
||||
poetry run ./demo/start.sh
|
||||
|
||||
(to stop, you can use ``poetry run ./demo/stop.sh``)
|
||||
|
||||
See the `demo documentation <https://matrix-org.github.io/synapse/develop/development/demo.html>`_
|
||||
for more information.
|
||||
|
||||
If you just want to start a single instance of the app and run it directly::
|
||||
|
||||
# Create the homeserver.yaml config once
|
||||
poetry run synapse_homeserver \
|
||||
--server-name my.domain.name \
|
||||
--config-path homeserver.yaml \
|
||||
--generate-config \
|
||||
--report-stats=[yes|no]
|
||||
|
||||
# Start the app
|
||||
poetry run synapse_homeserver --config-path homeserver.yaml
|
||||
|
||||
|
||||
Running the unit tests
|
||||
----------------------
|
||||
|
||||
After getting up and running, you may wish to run Synapse's unit tests to
|
||||
check that everything is installed correctly::
|
||||
|
||||
poetry run trial tests
|
||||
|
||||
This should end with a 'PASSED' result (note that exact numbers will
|
||||
differ)::
|
||||
|
||||
Ran 1337 tests in 716.064s
|
||||
|
||||
PASSED (skips=15, successes=1322)
|
||||
|
||||
For more tips on running the unit tests, like running a specific test or
|
||||
to see the logging output, see the `CONTRIBUTING doc <CONTRIBUTING.md#run-the-unit-tests>`_.
|
||||
|
||||
|
||||
Running the Integration Tests
|
||||
-----------------------------
|
||||
|
||||
Synapse is accompanied by `SyTest <https://github.com/matrix-org/sytest>`_,
|
||||
a Matrix homeserver integration testing suite, which uses HTTP requests to
|
||||
access the API as a Matrix client would. It is able to run Synapse directly from
|
||||
the source tree, so installation of the server is not required.
|
||||
|
||||
Testing with SyTest is recommended for verifying that changes related to the
|
||||
Client-Server API are functioning correctly. See the `SyTest installation
|
||||
instructions <https://github.com/matrix-org/sytest#installing>`_ for details.
|
||||
|
||||
|
||||
Platform dependencies
|
||||
=====================
|
||||
|
||||
Synapse uses a number of platform dependencies such as Python and PostgreSQL,
|
||||
and aims to follow supported upstream versions. See the
|
||||
`<docs/deprecation_policy.md>`_ document for more details.
|
||||
|
||||
|
||||
Troubleshooting
|
||||
===============
|
||||
|
||||
Need help? Join our community support room on Matrix:
|
||||
`#synapse:matrix.org <https://matrix.to/#/#synapse:matrix.org>`_
|
||||
|
||||
Running out of File Handles
|
||||
---------------------------
|
||||
|
||||
If synapse runs out of file handles, it typically fails badly - live-locking
|
||||
at 100% CPU, and/or failing to accept new TCP connections (blocking the
|
||||
connecting client). Matrix currently can legitimately use a lot of file handles,
|
||||
thanks to busy rooms like #matrix:matrix.org containing hundreds of participating
|
||||
servers. The first time a server talks in a room it will try to connect
|
||||
simultaneously to all participating servers, which could exhaust the available
|
||||
file descriptors between DNS queries & HTTPS sockets, especially if DNS is slow
|
||||
to respond. (We need to improve the routing algorithm used to be better than
|
||||
full mesh, but as of March 2019 this hasn't happened yet).
|
||||
|
||||
If you hit this failure mode, we recommend increasing the maximum number of
|
||||
open file handles to be at least 4096 (assuming a default of 1024 or 256).
|
||||
This is typically done by editing ``/etc/security/limits.conf``
|
||||
|
||||
Separately, Synapse may leak file handles if inbound HTTP requests get stuck
|
||||
during processing - e.g. blocked behind a lock or talking to a remote server etc.
|
||||
This is best diagnosed by matching up the 'Received request' and 'Processed request'
|
||||
log lines and looking for any 'Processed request' lines which take more than
|
||||
a few seconds to execute. Please let us know at #synapse:matrix.org if
|
||||
you see this failure mode so we can help debug it, however.
|
||||
|
||||
Help!! Synapse is slow and eats all my RAM/CPU!
|
||||
-----------------------------------------------
|
||||
|
||||
First, ensure you are running the latest version of Synapse, using Python 3
|
||||
with a PostgreSQL database.
|
||||
|
||||
Synapse's architecture is quite RAM hungry currently - we deliberately
|
||||
cache a lot of recent room data and metadata in RAM in order to speed up
|
||||
common requests. We'll improve this in the future, but for now the easiest
|
||||
way to either reduce the RAM usage (at the risk of slowing things down)
|
||||
is to set the almost-undocumented ``SYNAPSE_CACHE_FACTOR`` environment
|
||||
variable. The default is 0.5, which can be decreased to reduce RAM usage
|
||||
in memory constrained enviroments, or increased if performance starts to
|
||||
degrade.
|
||||
|
||||
However, degraded performance due to a low cache factor, common on
|
||||
machines with slow disks, often leads to explosions in memory use due
|
||||
backlogged requests. In this case, reducing the cache factor will make
|
||||
things worse. Instead, try increasing it drastically. 2.0 is a good
|
||||
starting value.
|
||||
|
||||
Using `libjemalloc <http://jemalloc.net/>`_ can also yield a significant
|
||||
improvement in overall memory use, and especially in terms of giving back
|
||||
RAM to the OS. To use it, the library must simply be put in the
|
||||
LD_PRELOAD environment variable when launching Synapse. On Debian, this
|
||||
can be done by installing the ``libjemalloc1`` package and adding this
|
||||
line to ``/etc/default/matrix-synapse``::
|
||||
|
||||
LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.1
|
||||
|
||||
This can make a significant difference on Python 2.7 - it's unclear how
|
||||
much of an improvement it provides on Python 3.x.
|
||||
|
||||
If you're encountering high CPU use by the Synapse process itself, you
|
||||
may be affected by a bug with presence tracking that leads to a
|
||||
massive excess of outgoing federation requests (see `discussion
|
||||
<https://github.com/matrix-org/synapse/issues/3971>`_). If metrics
|
||||
indicate that your server is also issuing far more outgoing federation
|
||||
requests than can be accounted for by your users' activity, this is a
|
||||
likely cause. The misbehavior can be worked around by setting
|
||||
the following in the Synapse config file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
presence:
|
||||
enabled: false
|
||||
|
||||
People can't accept room invitations from me
|
||||
--------------------------------------------
|
||||
|
||||
The typical failure mode here is that you send an invitation to someone
|
||||
to join a room or direct chat, but when they go to accept it, they get an
|
||||
error (typically along the lines of "Invalid signature"). They might see
|
||||
something like the following in their logs::
|
||||
|
||||
2019-09-11 19:32:04,271 - synapse.federation.transport.server - 288 - WARNING - GET-11752 - authenticate_request failed: 401: Invalid signature for server <server> with key ed25519:a_EqML: Unable to verify signature for <server>
|
||||
|
||||
This is normally caused by a misconfiguration in your reverse-proxy. See
|
||||
`<docs/reverse_proxy.md>`_ and double-check that your settings are correct.
|
||||
|
||||
.. |support| image:: https://img.shields.io/matrix/synapse:matrix.org?label=support&logo=matrix
|
||||
:alt: (get support on #synapse:matrix.org)
|
||||
:target: https://matrix.to/#/#synapse:matrix.org
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
# A build script for poetry that adds the rust extension.
|
||||
|
||||
import os
|
||||
from typing import Any, Dict
|
||||
|
||||
from setuptools_rust import Binding, RustExtension
|
||||
|
||||
|
||||
def build(setup_kwargs: Dict[str, Any]) -> None:
|
||||
original_project_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")
|
||||
|
||||
extension = RustExtension(
|
||||
target="synapse.synapse_rust",
|
||||
path=cargo_toml_path,
|
||||
binding=Binding.PyO3,
|
||||
py_limited_api=True,
|
||||
# We force always building in release mode, as we can't tell the
|
||||
# difference between using `poetry` in development vs production.
|
||||
debug=False,
|
||||
)
|
||||
setup_kwargs.setdefault("rust_extensions", []).append(extension)
|
||||
setup_kwargs["zip_safe"] = False
|
||||
1
changelog.d/13029.doc
Normal file
1
changelog.d/13029.doc
Normal file
@@ -0,0 +1 @@
|
||||
Add an explanation of the `--report-stats` argument to the docs.
|
||||
1
changelog.d/13031.feature
Normal file
1
changelog.d/13031.feature
Normal file
@@ -0,0 +1 @@
|
||||
Implement [MSC3827](https://github.com/matrix-org/matrix-spec-proposals/pull/3827): Filtering of /publicRooms by room type.
|
||||
1
changelog.d/13116.doc
Normal file
1
changelog.d/13116.doc
Normal file
@@ -0,0 +1 @@
|
||||
Fix wrong section header for `allow_public_rooms_over_federation` in the homeserver config documentation.
|
||||
1
changelog.d/13119.misc
Normal file
1
changelog.d/13119.misc
Normal file
@@ -0,0 +1 @@
|
||||
Reduce DB usage of `/sync` when a large number of unread messages have recently been sent in a room.
|
||||
1
changelog.d/13134.misc
Normal file
1
changelog.d/13134.misc
Normal file
@@ -0,0 +1 @@
|
||||
Apply ratelimiting earlier in processing of /send request.
|
||||
@@ -1 +0,0 @@
|
||||
Optimise push badge count calculations. Contributed by Nick @ Beeper (@fizzadar).
|
||||
@@ -1 +0,0 @@
|
||||
Stop using deprecated `keyIds` parameter when calling `/_matrix/key/v2/server`.
|
||||
@@ -1 +0,0 @@
|
||||
Update worker settings for `pusher` and `federation_sender` functionality.
|
||||
@@ -1 +0,0 @@
|
||||
Add links to third party package repositories, and point to the bug which highlights Ubuntu's out-of-date packages.
|
||||
@@ -1 +0,0 @@
|
||||
Stop using deprecated `keyIds` parameter when calling `/_matrix/key/v2/server`.
|
||||
@@ -1 +0,0 @@
|
||||
Share the `ClientRestResource` for both workers and the main process.
|
||||
@@ -1 +0,0 @@
|
||||
Faster joins: use servers list approximation to send read receipts when in partial state instead of waiting for the full state of the room.
|
||||
@@ -1 +0,0 @@
|
||||
Add new `push.enabled` config option to allow opting out of push notification calculation.
|
||||
@@ -1 +0,0 @@
|
||||
Modernize unit tests configuration related to workers.
|
||||
@@ -1 +0,0 @@
|
||||
Advertise support for Matrix 1.5 on `/_matrix/client/versions`.
|
||||
@@ -1 +0,0 @@
|
||||
Bump jsonschema from 4.17.0 to 4.17.3.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a long-standing bug where a device list update might not be sent to clients in certain circumstances.
|
||||
@@ -1 +0,0 @@
|
||||
Add missing type hints.
|
||||
@@ -1 +0,0 @@
|
||||
Fix Rust lint CI.
|
||||
@@ -1 +0,0 @@
|
||||
Bump JasonEtco/create-an-issue from 2.5.0 to 2.8.1.
|
||||
@@ -94,6 +94,20 @@ worker_replication_host: synapse
|
||||
worker_replication_http_port: 9093
|
||||
```
|
||||
|
||||
### Add Workers to `instance_map`
|
||||
|
||||
Locate the `instance_map` section of your `homeserver.yaml` and populate it with your workers:
|
||||
|
||||
```yaml
|
||||
instance_map:
|
||||
synapse-generic-worker-1: # The worker_name setting in your worker configuration file
|
||||
host: synapse-generic-worker-1 # The name of the worker service in your Docker Compose file
|
||||
port: 8034 # The port assigned to the replication listener in your worker config file
|
||||
synapse-federation-sender-1:
|
||||
host: synapse-federation-sender-1
|
||||
port: 8034
|
||||
```
|
||||
|
||||
### Configure Federation Senders
|
||||
|
||||
This section is applicable if you are using Federation senders (synapse.app.federation_sender). Locate the `send_federation` and `federation_sender_instances` settings in your `homeserver.yaml` and configure them:
|
||||
@@ -108,4 +122,4 @@ federation_sender_instances:
|
||||
|
||||
## Other Worker types
|
||||
|
||||
Using the concepts shown here it is possible to create other worker types in Docker Compose. See the [Workers](https://matrix-org.github.io/synapse/latest/workers.html#available-worker-applications) documentation for a list of available workers.
|
||||
Using the concepts shown here it is possible to create other worker types in Docker Compose. See the [Workers](https://matrix-org.github.io/synapse/latest/workers.html#available-worker-applications) documentation for a list of available workers.
|
||||
@@ -5,4 +5,10 @@ worker_name: synapse-federation-sender-1
|
||||
worker_replication_host: synapse
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8034
|
||||
resources:
|
||||
- names: [replication]
|
||||
|
||||
worker_log_config: /data/federation_sender.log.config
|
||||
|
||||
@@ -6,6 +6,10 @@ worker_replication_host: synapse
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8034
|
||||
resources:
|
||||
- names: [replication]
|
||||
- type: http
|
||||
port: 8081
|
||||
x_forwarded: true
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
21
contrib/prometheus/synapse-v1.rules
Normal file
21
contrib/prometheus/synapse-v1.rules
Normal file
@@ -0,0 +1,21 @@
|
||||
synapse_federation_transaction_queue_pendingEdus:total = sum(synapse_federation_transaction_queue_pendingEdus or absent(synapse_federation_transaction_queue_pendingEdus)*0)
|
||||
synapse_federation_transaction_queue_pendingPdus:total = sum(synapse_federation_transaction_queue_pendingPdus or absent(synapse_federation_transaction_queue_pendingPdus)*0)
|
||||
|
||||
synapse_http_server_request_count:method{servlet=""} = sum(synapse_http_server_request_count) by (method)
|
||||
synapse_http_server_request_count:servlet{method=""} = sum(synapse_http_server_request_count) by (servlet)
|
||||
|
||||
synapse_http_server_request_count:total{servlet=""} = sum(synapse_http_server_request_count:by_method) by (servlet)
|
||||
|
||||
synapse_cache:hit_ratio_5m = rate(synapse_util_caches_cache:hits[5m]) / rate(synapse_util_caches_cache:total[5m])
|
||||
synapse_cache:hit_ratio_30s = rate(synapse_util_caches_cache:hits[30s]) / rate(synapse_util_caches_cache:total[30s])
|
||||
|
||||
synapse_federation_client_sent{type="EDU"} = synapse_federation_client_sent_edus + 0
|
||||
synapse_federation_client_sent{type="PDU"} = synapse_federation_client_sent_pdu_destinations:count + 0
|
||||
synapse_federation_client_sent{type="Query"} = sum(synapse_federation_client_sent_queries) by (job)
|
||||
|
||||
synapse_federation_server_received{type="EDU"} = synapse_federation_server_received_edus + 0
|
||||
synapse_federation_server_received{type="PDU"} = synapse_federation_server_received_pdus + 0
|
||||
synapse_federation_server_received{type="Query"} = sum(synapse_federation_server_received_queries) by (job)
|
||||
|
||||
synapse_federation_transaction_queue_pending{type="EDU"} = synapse_federation_transaction_queue_pending_edus + 0
|
||||
synapse_federation_transaction_queue_pending{type="PDU"} = synapse_federation_transaction_queue_pending_pdus + 0
|
||||
@@ -1,20 +1,37 @@
|
||||
groups:
|
||||
- name: synapse
|
||||
rules:
|
||||
- record: "synapse_federation_transaction_queue_pendingEdus:total"
|
||||
expr: "sum(synapse_federation_transaction_queue_pendingEdus or absent(synapse_federation_transaction_queue_pendingEdus)*0)"
|
||||
- record: "synapse_federation_transaction_queue_pendingPdus:total"
|
||||
expr: "sum(synapse_federation_transaction_queue_pendingPdus or absent(synapse_federation_transaction_queue_pendingPdus)*0)"
|
||||
- record: 'synapse_http_server_request_count:method'
|
||||
labels:
|
||||
servlet: ""
|
||||
expr: "sum(synapse_http_server_request_count) by (method)"
|
||||
- record: 'synapse_http_server_request_count:servlet'
|
||||
labels:
|
||||
method: ""
|
||||
expr: 'sum(synapse_http_server_request_count) by (servlet)'
|
||||
|
||||
- record: 'synapse_http_server_request_count:total'
|
||||
labels:
|
||||
servlet: ""
|
||||
expr: 'sum(synapse_http_server_request_count:by_method) by (servlet)'
|
||||
|
||||
- record: 'synapse_cache:hit_ratio_5m'
|
||||
expr: 'rate(synapse_util_caches_cache:hits[5m]) / rate(synapse_util_caches_cache:total[5m])'
|
||||
- record: 'synapse_cache:hit_ratio_30s'
|
||||
expr: 'rate(synapse_util_caches_cache:hits[30s]) / rate(synapse_util_caches_cache:total[30s])'
|
||||
|
||||
###
|
||||
### Prometheus Console Only
|
||||
### The following rules are only needed if you use the Prometheus Console
|
||||
### in contrib/prometheus/consoles/synapse.html
|
||||
###
|
||||
- record: 'synapse_federation_client_sent'
|
||||
labels:
|
||||
type: "EDU"
|
||||
expr: 'synapse_federation_client_sent_edus_total + 0'
|
||||
expr: 'synapse_federation_client_sent_edus + 0'
|
||||
- record: 'synapse_federation_client_sent'
|
||||
labels:
|
||||
type: "PDU"
|
||||
expr: 'synapse_federation_client_sent_pdu_destinations_count_total + 0'
|
||||
expr: 'synapse_federation_client_sent_pdu_destinations:count + 0'
|
||||
- record: 'synapse_federation_client_sent'
|
||||
labels:
|
||||
type: "Query"
|
||||
@@ -23,11 +40,11 @@ groups:
|
||||
- record: 'synapse_federation_server_received'
|
||||
labels:
|
||||
type: "EDU"
|
||||
expr: 'synapse_federation_server_received_edus_total + 0'
|
||||
expr: 'synapse_federation_server_received_edus + 0'
|
||||
- record: 'synapse_federation_server_received'
|
||||
labels:
|
||||
type: "PDU"
|
||||
expr: 'synapse_federation_server_received_pdus_total + 0'
|
||||
expr: 'synapse_federation_server_received_pdus + 0'
|
||||
- record: 'synapse_federation_server_received'
|
||||
labels:
|
||||
type: "Query"
|
||||
@@ -41,34 +58,21 @@ groups:
|
||||
labels:
|
||||
type: "PDU"
|
||||
expr: 'synapse_federation_transaction_queue_pending_pdus + 0'
|
||||
###
|
||||
### End of 'Prometheus Console Only' rules block
|
||||
###
|
||||
|
||||
|
||||
###
|
||||
### Grafana Only
|
||||
### The following rules are only needed if you use the Grafana dashboard
|
||||
### in contrib/grafana/synapse.json
|
||||
###
|
||||
- record: synapse_storage_events_persisted_by_source_type
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_type="remote"})
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_type="remote"})
|
||||
labels:
|
||||
type: remote
|
||||
- record: synapse_storage_events_persisted_by_source_type
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity="*client*",origin_type="local"})
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_entity="*client*",origin_type="local"})
|
||||
labels:
|
||||
type: local
|
||||
- record: synapse_storage_events_persisted_by_source_type
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity!="*client*",origin_type="local"})
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_entity!="*client*",origin_type="local"})
|
||||
labels:
|
||||
type: bridges
|
||||
|
||||
- record: synapse_storage_events_persisted_by_event_type
|
||||
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep_total)
|
||||
|
||||
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep)
|
||||
- record: synapse_storage_events_persisted_by_origin
|
||||
expr: sum without(type) (synapse_storage_events_persisted_events_sep_total)
|
||||
###
|
||||
### End of 'Grafana Only' rules block
|
||||
###
|
||||
expr: sum without(type) (synapse_storage_events_persisted_events_sep)
|
||||
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
# Creating multiple generic workers with a bash script
|
||||
|
||||
Setting up multiple worker configuration files manually can be time-consuming.
|
||||
You can alternatively create multiple worker configuration files with a simple `bash` script. For example:
|
||||
|
||||
```sh
|
||||
#!/bin/bash
|
||||
for i in {1..5}
|
||||
do
|
||||
cat << EOF > generic_worker$i.yaml
|
||||
worker_app: synapse.app.generic_worker
|
||||
worker_name: generic_worker$i
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_main_http_uri: http://localhost:8008/
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 808$i
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/generic-worker-log.yaml
|
||||
EOF
|
||||
done
|
||||
```
|
||||
|
||||
This would create five generic workers with a unique `worker_name` field in each file and listening on ports 8081-8085.
|
||||
|
||||
Customise the script to your needs.
|
||||
@@ -1,145 +0,0 @@
|
||||
# Creating multiple stream writers with a bash script
|
||||
|
||||
This script creates multiple [stream writer](https://github.com/matrix-org/synapse/blob/develop/docs/workers.md#stream-writers) workers.
|
||||
|
||||
Stream writers require both replication and HTTP listeners.
|
||||
|
||||
It also prints out the example lines for Synapse main configuration file.
|
||||
|
||||
Remember to route necessary endpoints directly to a worker associated with it.
|
||||
|
||||
If you run the script as-is, it will create workers with the replication listener starting from port 8034 and another, regular http listener starting from 8044. If you don't need all of the stream writers listed in the script, just remove them from the ```STREAM_WRITERS``` array.
|
||||
|
||||
```sh
|
||||
#!/bin/bash
|
||||
|
||||
# Start with these replication and http ports.
|
||||
# The script loop starts with the exact port and then increments it by one.
|
||||
REP_START_PORT=8034
|
||||
HTTP_START_PORT=8044
|
||||
|
||||
# Stream writer workers to generate. Feel free to add or remove them as you wish.
|
||||
# Event persister ("events") isn't included here as it does not require its
|
||||
# own HTTP listener.
|
||||
|
||||
STREAM_WRITERS+=( "presence" "typing" "receipts" "to_device" "account_data" )
|
||||
|
||||
NUM_WRITERS=$(expr ${#STREAM_WRITERS[@]})
|
||||
|
||||
i=0
|
||||
|
||||
while [ $i -lt "$NUM_WRITERS" ]
|
||||
do
|
||||
cat << EOF > ${STREAM_WRITERS[$i]}_stream_writer.yaml
|
||||
worker_app: synapse.app.generic_worker
|
||||
worker_name: ${STREAM_WRITERS[$i]}_stream_writer
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: $(expr $REP_START_PORT + $i)
|
||||
resources:
|
||||
- names: [replication]
|
||||
|
||||
- type: http
|
||||
port: $(expr $HTTP_START_PORT + $i)
|
||||
resources:
|
||||
- names: [client]
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/stream-writer-log.yaml
|
||||
EOF
|
||||
HOMESERVER_YAML_INSTANCE_MAP+=$" ${STREAM_WRITERS[$i]}_stream_writer:
|
||||
host: 127.0.0.1
|
||||
port: $(expr $REP_START_PORT + $i)
|
||||
"
|
||||
|
||||
HOMESERVER_YAML_STREAM_WRITERS+=$" ${STREAM_WRITERS[$i]}: ${STREAM_WRITERS[$i]}_stream_writer
|
||||
"
|
||||
|
||||
((i++))
|
||||
done
|
||||
|
||||
cat << EXAMPLECONFIG
|
||||
# Add these lines to your homeserver.yaml.
|
||||
# Don't forget to configure your reverse proxy and
|
||||
# necessary endpoints to their respective worker.
|
||||
|
||||
# See https://github.com/matrix-org/synapse/blob/develop/docs/workers.md
|
||||
# for more information.
|
||||
|
||||
# Remember: Under NO circumstances should the replication
|
||||
# listener be exposed to the public internet;
|
||||
# it has no authentication and is unencrypted.
|
||||
|
||||
instance_map:
|
||||
$HOMESERVER_YAML_INSTANCE_MAP
|
||||
stream_writers:
|
||||
$HOMESERVER_YAML_STREAM_WRITERS
|
||||
EXAMPLECONFIG
|
||||
```
|
||||
|
||||
Copy the code above save it to a file ```create_stream_writers.sh``` (for example).
|
||||
|
||||
Make the script executable by running ```chmod +x create_stream_writers.sh```.
|
||||
|
||||
## Run the script to create workers and print out a sample configuration
|
||||
|
||||
Simply run the script to create YAML files in the current folder and print out the required configuration for ```homeserver.yaml```.
|
||||
|
||||
```console
|
||||
$ ./create_stream_writers.sh
|
||||
|
||||
# Add these lines to your homeserver.yaml.
|
||||
# Don't forget to configure your reverse proxy and
|
||||
# necessary endpoints to their respective worker.
|
||||
|
||||
# See https://github.com/matrix-org/synapse/blob/develop/docs/workers.md
|
||||
# for more information
|
||||
|
||||
# Remember: Under NO circumstances should the replication
|
||||
# listener be exposed to the public internet;
|
||||
# it has no authentication and is unencrypted.
|
||||
|
||||
instance_map:
|
||||
presence_stream_writer:
|
||||
host: 127.0.0.1
|
||||
port: 8034
|
||||
typing_stream_writer:
|
||||
host: 127.0.0.1
|
||||
port: 8035
|
||||
receipts_stream_writer:
|
||||
host: 127.0.0.1
|
||||
port: 8036
|
||||
to_device_stream_writer:
|
||||
host: 127.0.0.1
|
||||
port: 8037
|
||||
account_data_stream_writer:
|
||||
host: 127.0.0.1
|
||||
port: 8038
|
||||
|
||||
stream_writers:
|
||||
presence: presence_stream_writer
|
||||
typing: typing_stream_writer
|
||||
receipts: receipts_stream_writer
|
||||
to_device: to_device_stream_writer
|
||||
account_data: account_data_stream_writer
|
||||
```
|
||||
|
||||
Simply copy-and-paste the output to an appropriate place in your Synapse main configuration file.
|
||||
|
||||
## Write directly to Synapse configuration file
|
||||
|
||||
You could also write the output directly to homeserver main configuration file. **This, however, is not recommended** as even a small typo (such as replacing >> with >) can erase the entire ```homeserver.yaml```.
|
||||
|
||||
If you do this, back up your original configuration file first:
|
||||
|
||||
```console
|
||||
# Back up homeserver.yaml first
|
||||
cp /etc/matrix-synapse/homeserver.yaml /etc/matrix-synapse/homeserver.yaml.bak
|
||||
|
||||
# Create workers and write output to your homeserver.yaml
|
||||
./create_stream_writers.sh >> /etc/matrix-synapse/homeserver.yaml
|
||||
```
|
||||
9
debian/build_virtualenv
vendored
9
debian/build_virtualenv
vendored
@@ -36,7 +36,7 @@ TEMP_VENV="$(mktemp -d)"
|
||||
python3 -m venv "$TEMP_VENV"
|
||||
source "$TEMP_VENV/bin/activate"
|
||||
pip install -U pip
|
||||
pip install poetry==1.2.0
|
||||
pip install poetry==1.2.0b1
|
||||
poetry export \
|
||||
--extras all \
|
||||
--extras test \
|
||||
@@ -61,7 +61,7 @@ dh_virtualenv \
|
||||
--extras="all,systemd,test" \
|
||||
--requirements="exported_requirements.txt"
|
||||
|
||||
PACKAGE_BUILD_DIR="$(pwd)/debian/matrix-synapse-py3"
|
||||
PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
|
||||
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
|
||||
TARGET_PYTHON="${VIRTUALENV_DIR}/bin/python"
|
||||
|
||||
@@ -78,14 +78,9 @@ case "$DEB_BUILD_OPTIONS" in
|
||||
|
||||
cp -r tests "$tmpdir"
|
||||
|
||||
# To avoid pulling in the unbuilt Synapse in the local directory
|
||||
pushd /
|
||||
|
||||
PYTHONPATH="$tmpdir" \
|
||||
"${TARGET_PYTHON}" -m twisted.trial --reporter=text -j2 tests
|
||||
|
||||
popd
|
||||
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
237
debian/changelog
vendored
237
debian/changelog
vendored
@@ -1,240 +1,3 @@
|
||||
matrix-synapse-py3 (1.73.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.73.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 01 Dec 2022 10:02:19 +0000
|
||||
|
||||
matrix-synapse-py3 (1.73.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.73.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 29 Nov 2022 12:28:13 +0000
|
||||
|
||||
matrix-synapse-py3 (1.72.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.72.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Nov 2022 10:57:30 +0000
|
||||
|
||||
matrix-synapse-py3 (1.72.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.72.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 16 Nov 2022 15:10:59 +0000
|
||||
|
||||
matrix-synapse-py3 (1.71.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.71.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Nov 2022 10:38:10 +0000
|
||||
|
||||
matrix-synapse-py3 (1.71.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.71.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 04 Nov 2022 12:00:33 +0000
|
||||
|
||||
matrix-synapse-py3 (1.71.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.71.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Nov 2022 12:10:17 +0000
|
||||
|
||||
matrix-synapse-py3 (1.70.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.70.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 28 Oct 2022 12:10:21 +0100
|
||||
|
||||
matrix-synapse-py3 (1.70.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.70.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 26 Oct 2022 11:11:50 +0100
|
||||
|
||||
matrix-synapse-py3 (1.70.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.70.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Oct 2022 10:59:47 +0100
|
||||
|
||||
matrix-synapse-py3 (1.70.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.70.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 19 Oct 2022 14:11:57 +0100
|
||||
|
||||
matrix-synapse-py3 (1.69.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.69.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 17 Oct 2022 11:31:03 +0100
|
||||
|
||||
matrix-synapse-py3 (1.69.0~rc4) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.69.0rc4.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 14 Oct 2022 15:04:47 +0100
|
||||
|
||||
matrix-synapse-py3 (1.69.0~rc3) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.69.0rc3.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 12 Oct 2022 13:24:04 +0100
|
||||
|
||||
matrix-synapse-py3 (1.69.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.69.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 06 Oct 2022 14:45:00 +0100
|
||||
|
||||
matrix-synapse-py3 (1.69.0~rc1) stable; urgency=medium
|
||||
|
||||
* The man page for the hash_password script has been updated to reflect
|
||||
the correct default value of 'bcrypt_rounds'.
|
||||
* New Synapse release 1.69.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Oct 2022 11:17:16 +0100
|
||||
|
||||
matrix-synapse-py3 (1.68.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.68.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 27 Sep 2022 12:02:09 +0100
|
||||
|
||||
matrix-synapse-py3 (1.68.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.68.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 23 Sep 2022 09:40:10 +0100
|
||||
|
||||
matrix-synapse-py3 (1.68.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.68.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Sep 2022 11:18:20 +0100
|
||||
|
||||
matrix-synapse-py3 (1.67.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.67.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Sep 2022 09:19:56 +0100
|
||||
|
||||
matrix-synapse-py3 (1.67.0~rc1) stable; urgency=medium
|
||||
|
||||
[ Erik Johnston ]
|
||||
* Use stable poetry 1.2.0 version, rather than a prerelease.
|
||||
|
||||
[ Synapse Packaging team ]
|
||||
* New Synapse release 1.67.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Sep 2022 09:01:06 +0100
|
||||
|
||||
matrix-synapse-py3 (1.66.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.66.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 31 Aug 2022 11:20:17 +0100
|
||||
|
||||
matrix-synapse-py3 (1.66.0~rc2+nmu1) UNRELEASED; urgency=medium
|
||||
|
||||
[ Jörg Behrmann ]
|
||||
* Update debhelper to compatibility level 12.
|
||||
* Drop the preinst script stopping synapse.
|
||||
* Allocate a group for the system user.
|
||||
* Change dpkg-statoverride to --force-statoverride-add.
|
||||
|
||||
[ Erik Johnston ]
|
||||
* Disable `dh_auto_configure` as it broke during Rust build.
|
||||
|
||||
-- Jörg Behrmann <behrmann@physik.fu-berlin.de> Tue, 23 Aug 2022 17:17:00 +0100
|
||||
|
||||
matrix-synapse-py3 (1.66.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.66.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Aug 2022 12:25:19 +0100
|
||||
|
||||
matrix-synapse-py3 (1.66.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.66.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Aug 2022 09:48:55 +0100
|
||||
|
||||
matrix-synapse-py3 (1.65.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.65.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 16 Aug 2022 16:51:26 +0100
|
||||
|
||||
matrix-synapse-py3 (1.65.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.65.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 11 Aug 2022 11:38:18 +0100
|
||||
|
||||
matrix-synapse-py3 (1.65.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.65.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 09 Aug 2022 11:39:29 +0100
|
||||
|
||||
matrix-synapse-py3 (1.64.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.64.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Aug 2022 10:32:30 +0100
|
||||
|
||||
matrix-synapse-py3 (1.64.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.64.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 29 Jul 2022 12:22:53 +0100
|
||||
|
||||
matrix-synapse-py3 (1.64.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.64.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Jul 2022 12:11:49 +0100
|
||||
|
||||
matrix-synapse-py3 (1.63.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.63.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 20 Jul 2022 13:36:52 +0100
|
||||
|
||||
matrix-synapse-py3 (1.63.0) stable; urgency=medium
|
||||
|
||||
* Clarify that homeserver server names are included in the data reported
|
||||
by opt-in server stats reporting (`report_stats` homeserver config option).
|
||||
* New Synapse release 1.63.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Jul 2022 14:42:24 +0200
|
||||
|
||||
matrix-synapse-py3 (1.63.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.63.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Jul 2022 11:26:02 +0100
|
||||
|
||||
matrix-synapse-py3 (1.62.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.62.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Jul 2022 11:14:15 +0100
|
||||
|
||||
matrix-synapse-py3 (1.62.0~rc3) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.62.0rc3.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 04 Jul 2022 16:07:01 +0100
|
||||
|
||||
matrix-synapse-py3 (1.62.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.62.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 01 Jul 2022 11:42:41 +0100
|
||||
|
||||
matrix-synapse-py3 (1.62.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.62.0rc1.
|
||||
|
||||
1
debian/compat
vendored
Normal file
1
debian/compat
vendored
Normal file
@@ -0,0 +1 @@
|
||||
10
|
||||
2
debian/control
vendored
2
debian/control
vendored
@@ -4,7 +4,7 @@ Priority: extra
|
||||
Maintainer: Synapse Packaging team <packages@matrix.org>
|
||||
# keep this list in sync with the build dependencies in docker/Dockerfile-dhvirtualenv.
|
||||
Build-Depends:
|
||||
debhelper-compat (= 12),
|
||||
debhelper (>= 10),
|
||||
dh-virtualenv (>= 1.1),
|
||||
libsystemd-dev,
|
||||
libpq-dev,
|
||||
|
||||
2
debian/hash_password.1
vendored
2
debian/hash_password.1
vendored
@@ -10,7 +10,7 @@
|
||||
.P
|
||||
\fBhash_password\fR takes a password as an parameter either on the command line or the \fBSTDIN\fR if not supplied\.
|
||||
.P
|
||||
It accepts an YAML file which can be used to specify parameters like the number of rounds for bcrypt and password_config section having the pepper value used for the hashing\. By default \fBbcrypt_rounds\fR is set to \fB12\fR\.
|
||||
It accepts an YAML file which can be used to specify parameters like the number of rounds for bcrypt and password_config section having the pepper value used for the hashing\. By default \fBbcrypt_rounds\fR is set to \fB10\fR\.
|
||||
.P
|
||||
The hashed password is written on the \fBSTDOUT\fR\.
|
||||
.SH "FILES"
|
||||
|
||||
2
debian/hash_password.ronn
vendored
2
debian/hash_password.ronn
vendored
@@ -14,7 +14,7 @@ or the `STDIN` if not supplied.
|
||||
|
||||
It accepts an YAML file which can be used to specify parameters like the
|
||||
number of rounds for bcrypt and password_config section having the pepper
|
||||
value used for the hashing. By default `bcrypt_rounds` is set to **12**.
|
||||
value used for the hashing. By default `bcrypt_rounds` is set to **10**.
|
||||
|
||||
The hashed password is written on the `STDOUT`.
|
||||
|
||||
|
||||
6
debian/matrix-synapse-py3.postinst
vendored
6
debian/matrix-synapse-py3.postinst
vendored
@@ -31,7 +31,7 @@ EOF
|
||||
# This file is autogenerated, and will be recreated on upgrade if it is deleted.
|
||||
# Any changes you make will be preserved.
|
||||
|
||||
# Whether to report homeserver usage statistics.
|
||||
# Whether to report anonymized homeserver usage statistics.
|
||||
report_stats: false
|
||||
EOF
|
||||
fi
|
||||
@@ -40,12 +40,12 @@ EOF
|
||||
/opt/venvs/matrix-synapse/lib/manage_debconf.pl update
|
||||
|
||||
if ! getent passwd $USER >/dev/null; then
|
||||
adduser --quiet --system --group --no-create-home --home /var/lib/matrix-synapse $USER
|
||||
adduser --quiet --system --no-create-home --home /var/lib/matrix-synapse $USER
|
||||
fi
|
||||
|
||||
for DIR in /var/lib/matrix-synapse /var/log/matrix-synapse /etc/matrix-synapse; do
|
||||
if ! dpkg-statoverride --list --quiet $DIR >/dev/null; then
|
||||
dpkg-statoverride --force-statoverride-add --quiet --update --add $USER "$(id -gn $USER)" 0755 $DIR
|
||||
dpkg-statoverride --force --quiet --update --add $USER nogroup 0755 $DIR
|
||||
fi
|
||||
done
|
||||
|
||||
|
||||
31
debian/matrix-synapse-py3.preinst
vendored
Normal file
31
debian/matrix-synapse-py3.preinst
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
#!/bin/sh -e
|
||||
|
||||
# Attempt to undo some of the braindamage caused by
|
||||
# https://github.com/matrix-org/package-synapse-debian/issues/18.
|
||||
#
|
||||
# Due to reasons [1], the old python2 matrix-synapse package will not stop the
|
||||
# service when the package is uninstalled. Our maintainer scripts will do the
|
||||
# right thing in terms of ensuring the service is enabled and unmasked, but
|
||||
# then do a `systemctl start matrix-synapse`, which of course does nothing -
|
||||
# leaving the old (py2) service running.
|
||||
#
|
||||
# There should normally be no reason for the service to be running during our
|
||||
# preinst, so we assume that if it *is* running, it's due to that situation,
|
||||
# and stop it.
|
||||
#
|
||||
# [1] dh_systemd_start doesn't do anything because it sees that there is an
|
||||
# init.d script with the same name, so leaves it to dh_installinit.
|
||||
#
|
||||
# dh_installinit doesn't do anything because somebody gave it a --no-start
|
||||
# for unknown reasons.
|
||||
|
||||
if [ -x /bin/systemctl ]; then
|
||||
if /bin/systemctl --quiet is-active -- matrix-synapse; then
|
||||
echo >&2 "stopping existing matrix-synapse service"
|
||||
/bin/systemctl stop matrix-synapse || true
|
||||
fi
|
||||
fi
|
||||
|
||||
#DEBHELPER#
|
||||
|
||||
exit 0
|
||||
2
debian/matrix-synapse.default
vendored
Normal file
2
debian/matrix-synapse.default
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
# Specify environment variables used when running Synapse
|
||||
# SYNAPSE_CACHE_FACTOR=0.5 (default)
|
||||
6
debian/matrix-synapse.service
vendored
6
debian/matrix-synapse.service
vendored
@@ -5,6 +5,7 @@ Description=Synapse Matrix homeserver
|
||||
Type=notify
|
||||
User=matrix-synapse
|
||||
WorkingDirectory=/var/lib/matrix-synapse
|
||||
EnvironmentFile=-/etc/default/matrix-synapse
|
||||
ExecStartPre=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/ --generate-keys
|
||||
ExecStart=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/
|
||||
ExecReload=/bin/kill -HUP $MAINPID
|
||||
@@ -12,10 +13,5 @@ Restart=always
|
||||
RestartSec=3
|
||||
SyslogIdentifier=matrix-synapse
|
||||
|
||||
# The environment file is not shipped by default anymore and the below directive
|
||||
# is for backwards compatibility only. Please use your homeserver.yaml if
|
||||
# possible.
|
||||
EnvironmentFile=-/etc/default/matrix-synapse
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
12
debian/po/templates.pot
vendored
12
debian/po/templates.pot
vendored
@@ -37,7 +37,7 @@ msgstr ""
|
||||
#. Type: boolean
|
||||
#. Description
|
||||
#: ../templates:2001
|
||||
msgid "Report homeserver usage statistics?"
|
||||
msgid "Report anonymous statistics?"
|
||||
msgstr ""
|
||||
|
||||
#. Type: boolean
|
||||
@@ -45,11 +45,11 @@ msgstr ""
|
||||
#: ../templates:2001
|
||||
msgid ""
|
||||
"Developers of Matrix and Synapse really appreciate helping the project out "
|
||||
"by reporting homeserver usage statistics from this homeserver. Your "
|
||||
"homeserver's server name, along with very basic aggregate data (e.g. "
|
||||
"number of users) will be reported. But it helps track the growth of the "
|
||||
"Matrix community, and helps in making Matrix a success, as well as to "
|
||||
"convince other networks that they should peer with Matrix."
|
||||
"by reporting anonymized usage statistics from this homeserver. Only very "
|
||||
"basic aggregate data (e.g. number of users) will be reported, but it helps "
|
||||
"track the growth of the Matrix community, and helps in making Matrix a "
|
||||
"success, as well as to convince other networks that they should peer with "
|
||||
"Matrix."
|
||||
msgstr ""
|
||||
|
||||
#. Type: boolean
|
||||
|
||||
14
debian/rules
vendored
14
debian/rules
vendored
@@ -6,19 +6,15 @@
|
||||
# assume we only have one package
|
||||
PACKAGE_NAME:=`dh_listpackages`
|
||||
|
||||
override_dh_installsystemd:
|
||||
dh_installsystemd --name=matrix-synapse
|
||||
override_dh_systemd_enable:
|
||||
dh_systemd_enable --name=matrix-synapse
|
||||
|
||||
override_dh_installinit:
|
||||
dh_installinit --name=matrix-synapse
|
||||
|
||||
# we don't really want to strip the symbols from our object files.
|
||||
override_dh_strip:
|
||||
|
||||
override_dh_auto_configure:
|
||||
|
||||
# many libraries pulled from PyPI have allocatable sections after
|
||||
# non-allocatable ones on which dwz errors out. For those without the issue the
|
||||
# gains are only marginal
|
||||
override_dh_dwz:
|
||||
|
||||
# dh_shlibdeps calls dpkg-shlibdeps, which finds all the binary files
|
||||
# (executables and shared libs) in the package, and looks for the shared
|
||||
# libraries that they depend on. It then adds a dependency on the package that
|
||||
|
||||
13
debian/templates
vendored
13
debian/templates
vendored
@@ -10,13 +10,12 @@ _Description: Name of the server:
|
||||
Template: matrix-synapse/report-stats
|
||||
Type: boolean
|
||||
Default: false
|
||||
_Description: Report homeserver usage statistics?
|
||||
_Description: Report anonymous statistics?
|
||||
Developers of Matrix and Synapse really appreciate helping the
|
||||
project out by reporting homeserver usage statistics from this
|
||||
homeserver. Your homeserver's server name, along with very basic
|
||||
aggregate data (e.g. number of users) will be reported. But it
|
||||
helps track the growth of the Matrix community, and helps in
|
||||
making Matrix a success, as well as to convince other networks
|
||||
that they should peer with Matrix.
|
||||
project out by reporting anonymized usage statistics from this
|
||||
homeserver. Only very basic aggregate data (e.g. number of users)
|
||||
will be reported, but it helps track the growth of the Matrix
|
||||
community, and helps in making Matrix a success, as well as to
|
||||
convince other networks that they should peer with Matrix.
|
||||
.
|
||||
Thank you.
|
||||
|
||||
@@ -31,9 +31,7 @@ ARG PYTHON_VERSION=3.9
|
||||
###
|
||||
### Stage 0: generate requirements.txt
|
||||
###
|
||||
# We hardcode the use of Debian bullseye here because this could change upstream
|
||||
# and other Dockerfiles used for testing are expecting bullseye.
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as requirements
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim as requirements
|
||||
|
||||
# RUN --mount is specific to buildkit and is documented at
|
||||
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
|
||||
@@ -42,43 +40,34 @@ FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as requirements
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && apt-get install -yqq \
|
||||
build-essential cargo git libffi-dev libssl-dev \
|
||||
apt-get update -qq && apt-get install -yqq git \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# We install poetry in its own build stage to avoid its dependencies conflicting with
|
||||
# synapse's dependencies.
|
||||
# We use a specific commit from poetry's master branch instead of our usual 1.1.12,
|
||||
# to incorporate fixes to some bugs in `poetry export`. This commit corresponds to
|
||||
# https://github.com/python-poetry/poetry/pull/5156 and
|
||||
# https://github.com/python-poetry/poetry/issues/5141 ;
|
||||
# without it, we generate a requirements.txt with incorrect environment markers,
|
||||
# which causes necessary packages to be omitted when we `pip install`.
|
||||
#
|
||||
# NB: In poetry 1.2 `poetry export` will be moved into a plugin; we'll need to also
|
||||
# pip install poetry-plugin-export (https://github.com/python-poetry/poetry-plugin-export).
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install --user "poetry==1.2.0"
|
||||
pip install --user "poetry-core==1.1.0a7" "git+https://github.com/python-poetry/poetry.git@fb13b3a676f476177f7937ffa480ee5cff9a90a5"
|
||||
|
||||
WORKDIR /synapse
|
||||
|
||||
# Copy just what we need to run `poetry export`...
|
||||
COPY pyproject.toml poetry.lock /synapse/
|
||||
|
||||
|
||||
# If specified, we won't verify the hashes of dependencies.
|
||||
# This is only needed if the hashes of dependencies cannot be checked for some
|
||||
# reason, such as when a git repository is used directly as a dependency.
|
||||
ARG TEST_ONLY_SKIP_DEP_HASH_VERIFICATION
|
||||
|
||||
# If specified, we won't use the Poetry lockfile.
|
||||
# Instead, we'll just install what a regular `pip install` would from PyPI.
|
||||
ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
|
||||
|
||||
# Export the dependencies, but only if we're actually going to use the Poetry lockfile.
|
||||
# Otherwise, just create an empty requirements file so that the Dockerfile can
|
||||
# proceed.
|
||||
RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
||||
/root/.local/bin/poetry export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \
|
||||
else \
|
||||
touch /synapse/requirements.txt; \
|
||||
fi
|
||||
RUN /root/.local/bin/poetry export --extras all -o /synapse/requirements.txt
|
||||
|
||||
###
|
||||
### Stage 1: builder
|
||||
###
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as builder
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim as builder
|
||||
|
||||
# install the OS build deps
|
||||
RUN \
|
||||
@@ -94,26 +83,10 @@ RUN \
|
||||
libxml++2.6-dev \
|
||||
libxslt1-dev \
|
||||
openssl \
|
||||
rustc \
|
||||
zlib1g-dev \
|
||||
git \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
||||
# Install rust and ensure its in the PATH
|
||||
ENV RUSTUP_HOME=/rust
|
||||
ENV CARGO_HOME=/cargo
|
||||
ENV PATH=/cargo/bin:/rust/bin:$PATH
|
||||
RUN mkdir /rust /cargo
|
||||
|
||||
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
||||
|
||||
|
||||
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
|
||||
# set to true, so we expose it as a build-arg.
|
||||
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
|
||||
ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
|
||||
|
||||
# To speed up rebuilds, install all of the dependencies before we copy over
|
||||
# the whole synapse project, so that this layer in the Docker cache can be
|
||||
# used while you develop on the source
|
||||
@@ -125,29 +98,17 @@ RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
|
||||
# Copy over the rest of the synapse source code.
|
||||
COPY synapse /synapse/synapse/
|
||||
COPY rust /synapse/rust/
|
||||
# ... and what we need to `pip install`.
|
||||
COPY pyproject.toml README.rst build_rust.py Cargo.toml Cargo.lock /synapse/
|
||||
|
||||
# Repeat of earlier build argument declaration, as this is a new build stage.
|
||||
ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
|
||||
COPY pyproject.toml README.rst /synapse/
|
||||
|
||||
# Install the synapse package itself.
|
||||
# If we have populated requirements.txt, we don't install any dependencies
|
||||
# as we should already have those from the previous `pip install` step.
|
||||
RUN --mount=type=cache,target=/synapse/target,sharing=locked \
|
||||
--mount=type=cache,target=${CARGO_HOME}/registry,sharing=locked \
|
||||
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
||||
pip install --prefix="/install" --no-deps --no-warn-script-location /synapse[all]; \
|
||||
else \
|
||||
pip install --prefix="/install" --no-warn-script-location /synapse[all]; \
|
||||
fi
|
||||
RUN pip install --prefix="/install" --no-deps --no-warn-script-location /synapse
|
||||
|
||||
###
|
||||
### Stage 2: runtime
|
||||
###
|
||||
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim
|
||||
|
||||
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
||||
LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md'
|
||||
|
||||
@@ -72,7 +72,6 @@ RUN apt-get update -qq -o Acquire::Languages=none \
|
||||
&& env DEBIAN_FRONTEND=noninteractive apt-get install \
|
||||
-yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \
|
||||
build-essential \
|
||||
curl \
|
||||
debhelper \
|
||||
devscripts \
|
||||
libsystemd-dev \
|
||||
@@ -86,15 +85,6 @@ RUN apt-get update -qq -o Acquire::Languages=none \
|
||||
libpq-dev \
|
||||
xmlsec1
|
||||
|
||||
# Install rust and ensure it's in the PATH
|
||||
ENV RUSTUP_HOME=/rust
|
||||
ENV CARGO_HOME=/cargo
|
||||
ENV PATH=/cargo/bin:/rust/bin:$PATH
|
||||
RUN mkdir /rust /cargo
|
||||
|
||||
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
||||
|
||||
|
||||
COPY --from=builder /dh-virtualenv_1.2.2-1_all.deb /
|
||||
|
||||
# install dhvirtualenv. Update the apt cache again first, in case we got a
|
||||
|
||||
@@ -1,66 +1,38 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
# Inherit from the official Synapse docker image
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
|
||||
# first of all, we create a base image with an nginx which we can copy into the
|
||||
# target image. For repeated rebuilds, this is much faster than apt installing
|
||||
# each time.
|
||||
|
||||
FROM debian:bullseye-slim AS deps_base
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
|
||||
redis-server nginx-light
|
||||
|
||||
# Similarly, a base to copy the redis server from.
|
||||
#
|
||||
# The redis docker image has fewer dynamic libraries than the debian package,
|
||||
# which makes it much easier to copy (but we need to make sure we use an image
|
||||
# based on the same debian version as the synapse image, to make sure we get
|
||||
# the expected version of libc.
|
||||
FROM redis:6-bullseye AS redis_base
|
||||
|
||||
# now build the final image, based on the the regular Synapse docker image
|
||||
FROM matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||
|
||||
# Install supervisord with pip instead of apt, to avoid installing a second
|
||||
# copy of python.
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install supervisor~=4.2
|
||||
RUN mkdir -p /etc/supervisor/conf.d
|
||||
# Install deps
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
|
||||
redis-server nginx-light
|
||||
|
||||
# Copy over redis and nginx
|
||||
COPY --from=redis_base /usr/local/bin/redis-server /usr/local/bin
|
||||
# Install supervisord with pip instead of apt, to avoid installing a second
|
||||
# copy of python.
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install supervisor~=4.2
|
||||
|
||||
COPY --from=deps_base /usr/sbin/nginx /usr/sbin
|
||||
COPY --from=deps_base /usr/share/nginx /usr/share/nginx
|
||||
COPY --from=deps_base /usr/lib/nginx /usr/lib/nginx
|
||||
COPY --from=deps_base /etc/nginx /etc/nginx
|
||||
RUN rm /etc/nginx/sites-enabled/default
|
||||
RUN mkdir /var/log/nginx /var/lib/nginx
|
||||
RUN chown www-data /var/lib/nginx
|
||||
# Disable the default nginx sites
|
||||
RUN rm /etc/nginx/sites-enabled/default
|
||||
|
||||
# have nginx log to stderr/out
|
||||
RUN ln -sf /dev/stdout /var/log/nginx/access.log
|
||||
RUN ln -sf /dev/stderr /var/log/nginx/error.log
|
||||
# Copy Synapse worker, nginx and supervisord configuration template files
|
||||
COPY ./docker/conf-workers/* /conf/
|
||||
|
||||
# Copy Synapse worker, nginx and supervisord configuration template files
|
||||
COPY ./docker/conf-workers/* /conf/
|
||||
# Copy a script to prefix log lines with the supervisor program name
|
||||
COPY ./docker/prefix-log /usr/local/bin/
|
||||
|
||||
# Copy a script to prefix log lines with the supervisor program name
|
||||
COPY ./docker/prefix-log /usr/local/bin/
|
||||
# Expose nginx listener port
|
||||
EXPOSE 8080/tcp
|
||||
|
||||
# Expose nginx listener port
|
||||
EXPOSE 8080/tcp
|
||||
# A script to read environment variables and create the necessary
|
||||
# files to run the desired worker configuration. Will start supervisord.
|
||||
COPY ./docker/configure_workers_and_start.py /configure_workers_and_start.py
|
||||
ENTRYPOINT ["/configure_workers_and_start.py"]
|
||||
|
||||
# A script to read environment variables and create the necessary
|
||||
# files to run the desired worker configuration. Will start supervisord.
|
||||
COPY ./docker/configure_workers_and_start.py /configure_workers_and_start.py
|
||||
ENTRYPOINT ["/configure_workers_and_start.py"]
|
||||
|
||||
# Replace the healthcheck with one which checks *all* the workers. The script
|
||||
# is generated by configure_workers_and_start.py.
|
||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||
CMD /bin/sh /healthcheck.sh
|
||||
# Replace the healthcheck with one which checks *all* the workers. The script
|
||||
# is generated by configure_workers_and_start.py.
|
||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||
CMD /bin/sh /healthcheck.sh
|
||||
|
||||
@@ -22,10 +22,6 @@ Consult the [contributing guide][guideComplementSh] for instructions on how to u
|
||||
Under some circumstances, you may wish to build the images manually.
|
||||
The instructions below will lead you to doing that.
|
||||
|
||||
Note that these images can only be built using [BuildKit](https://docs.docker.com/develop/develop-images/build_enhancements/),
|
||||
therefore BuildKit needs to be enabled when calling `docker build`. This can be done by
|
||||
setting `DOCKER_BUILDKIT=1` in your environment.
|
||||
|
||||
Start by building the base Synapse docker image. If you wish to run tests with the latest
|
||||
release of Synapse, instead of your current checkout, you can skip this step. From the
|
||||
root of the repository:
|
||||
|
||||
@@ -67,13 +67,6 @@ The following environment variables are supported in `generate` mode:
|
||||
* `UID`, `GID`: the user id and group id to use for creating the data
|
||||
directories. If unset, and no user is set via `docker run --user`, defaults
|
||||
to `991`, `991`.
|
||||
* `SYNAPSE_LOG_LEVEL`: the log level to use (one of `DEBUG`, `INFO`, `WARNING` or `ERROR`).
|
||||
Defaults to `INFO`.
|
||||
* `SYNAPSE_LOG_SENSITIVE`: if set and the log level is set to `DEBUG`, Synapse
|
||||
will log sensitive information such as access tokens.
|
||||
This should not be needed unless you are a developer attempting to debug something
|
||||
particularly tricky.
|
||||
|
||||
|
||||
## Postgres
|
||||
|
||||
@@ -191,7 +184,7 @@ If you need to build the image from a Synapse checkout, use the following `docke
|
||||
build` command from the repo's root:
|
||||
|
||||
```
|
||||
DOCKER_BUILDKIT=1 docker build -t matrixdotorg/synapse -f docker/Dockerfile .
|
||||
docker build -t matrixdotorg/synapse -f docker/Dockerfile .
|
||||
```
|
||||
|
||||
You can choose to build a different docker image by changing the value of the `-f` flag to
|
||||
@@ -241,4 +234,4 @@ healthcheck:
|
||||
|
||||
Jemalloc is embedded in the image and will be used instead of the default allocator.
|
||||
You can read about jemalloc by reading the Synapse
|
||||
[Admin FAQ](https://matrix-org.github.io/synapse/latest/usage/administration/admin_faq.html#help-synapse-is-slow-and-eats-all-my-ramcpu).
|
||||
[README](https://github.com/matrix-org/synapse/blob/HEAD/README.rst#help-synapse-is-slow-and-eats-all-my-ram-cpu).
|
||||
|
||||
@@ -1,56 +1,45 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
# This dockerfile builds on top of 'docker/Dockerfile-workers' in matrix-org/synapse
|
||||
# by including a built-in postgres instance, as well as setting up the homeserver so
|
||||
# that it is ready for testing via Complement.
|
||||
#
|
||||
# Instructions for building this image from those it depends on is detailed in this guide:
|
||||
# https://github.com/matrix-org/synapse/blob/develop/docker/README-testing.md#testing-with-postgresql-and-single-or-multi-process-synapse
|
||||
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
|
||||
FROM matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||
# First of all, we copy postgres server from the official postgres image,
|
||||
# since for repeated rebuilds, this is much faster than apt installing
|
||||
# postgres each time.
|
||||
|
||||
# This trick only works because (a) the Synapse image happens to have all the
|
||||
# shared libraries that postgres wants, (b) we use a postgres image based on
|
||||
# the same debian version as Synapse's docker image (so the versions of the
|
||||
# shared libraries match).
|
||||
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||
COPY --from=postgres:13-bullseye /usr/lib/postgresql /usr/lib/postgresql
|
||||
COPY --from=postgres:13-bullseye /usr/share/postgresql /usr/share/postgresql
|
||||
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
|
||||
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
||||
ENV PGDATA=/var/lib/postgresql/data
|
||||
# Install postgresql
|
||||
RUN apt-get update && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -yqq postgresql-13
|
||||
|
||||
# We also initialize the database at build time, rather than runtime, so that it's faster to spin up the image.
|
||||
RUN gosu postgres initdb --locale=C --encoding=UTF-8 --auth-host password
|
||||
# Configure a user and create a database for Synapse
|
||||
RUN pg_ctlcluster 13 main start && su postgres -c "echo \
|
||||
\"ALTER USER postgres PASSWORD 'somesecret'; \
|
||||
CREATE DATABASE synapse \
|
||||
ENCODING 'UTF8' \
|
||||
LC_COLLATE='C' \
|
||||
LC_CTYPE='C' \
|
||||
template=template0;\" | psql" && pg_ctlcluster 13 main stop
|
||||
|
||||
# Configure a password and create a database for Synapse
|
||||
RUN echo "ALTER USER postgres PASSWORD 'somesecret'" | gosu postgres postgres --single
|
||||
RUN echo "CREATE DATABASE synapse" | gosu postgres postgres --single
|
||||
# Extend the shared homeserver config to disable rate-limiting,
|
||||
# set Complement's static shared secret, enable registration, amongst other
|
||||
# tweaks to get Synapse ready for testing.
|
||||
# To do this, we copy the old template out of the way and then include it
|
||||
# with Jinja2.
|
||||
RUN mv /conf/shared.yaml.j2 /conf/shared-orig.yaml.j2
|
||||
COPY conf/workers-shared-extra.yaml.j2 /conf/shared.yaml.j2
|
||||
|
||||
# Extend the shared homeserver config to disable rate-limiting,
|
||||
# set Complement's static shared secret, enable registration, amongst other
|
||||
# tweaks to get Synapse ready for testing.
|
||||
# To do this, we copy the old template out of the way and then include it
|
||||
# with Jinja2.
|
||||
RUN mv /conf/shared.yaml.j2 /conf/shared-orig.yaml.j2
|
||||
COPY conf/workers-shared-extra.yaml.j2 /conf/shared.yaml.j2
|
||||
WORKDIR /data
|
||||
|
||||
WORKDIR /data
|
||||
COPY conf/postgres.supervisord.conf /etc/supervisor/conf.d/postgres.conf
|
||||
|
||||
COPY conf/postgres.supervisord.conf /etc/supervisor/conf.d/postgres.conf
|
||||
# Copy the entrypoint
|
||||
COPY conf/start_for_complement.sh /
|
||||
|
||||
# Copy the entrypoint
|
||||
COPY conf/start_for_complement.sh /
|
||||
# Expose nginx's listener ports
|
||||
EXPOSE 8008 8448
|
||||
|
||||
# Expose nginx's listener ports
|
||||
EXPOSE 8008 8448
|
||||
ENTRYPOINT ["/start_for_complement.sh"]
|
||||
|
||||
ENTRYPOINT ["/start_for_complement.sh"]
|
||||
|
||||
# Update the healthcheck to have a shorter check interval
|
||||
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
||||
CMD /bin/sh /healthcheck.sh
|
||||
# Update the healthcheck to have a shorter check interval
|
||||
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
||||
CMD /bin/sh /healthcheck.sh
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
[program:postgres]
|
||||
command=/usr/local/bin/prefix-log gosu postgres postgres
|
||||
command=/usr/local/bin/prefix-log /usr/bin/pg_ctlcluster 13 main start --foreground
|
||||
|
||||
# Only start if START_POSTGRES=1
|
||||
autostart=%(ENV_START_POSTGRES)s
|
||||
|
||||
@@ -45,12 +45,7 @@ esac
|
||||
|
||||
if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
||||
# Specify the workers to test with
|
||||
# Allow overriding by explicitly setting SYNAPSE_WORKER_TYPES outside, while still
|
||||
# utilizing WORKERS=1 for backwards compatibility.
|
||||
# -n True if the length of string is non-zero.
|
||||
# -z True if the length of string is zero.
|
||||
if [[ -z "$SYNAPSE_WORKER_TYPES" ]]; then
|
||||
export SYNAPSE_WORKER_TYPES="\
|
||||
export SYNAPSE_WORKER_TYPES="\
|
||||
event_persister, \
|
||||
event_persister, \
|
||||
background_worker, \
|
||||
@@ -62,14 +57,8 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
||||
federation_reader, \
|
||||
federation_sender, \
|
||||
synchrotron, \
|
||||
client_reader, \
|
||||
appservice, \
|
||||
pusher"
|
||||
|
||||
fi
|
||||
log "Workers requested: $SYNAPSE_WORKER_TYPES"
|
||||
# Improve startup times by using a launcher based on fork()
|
||||
export SYNAPSE_USE_EXPERIMENTAL_FORKING_LAUNCHER=1
|
||||
else
|
||||
# Empty string here means 'main process only'
|
||||
export SYNAPSE_WORKER_TYPES=""
|
||||
|
||||
@@ -12,8 +12,6 @@ trusted_key_servers: []
|
||||
enable_registration: true
|
||||
enable_registration_without_verification: true
|
||||
bcrypt_rounds: 4
|
||||
url_preview_enabled: true
|
||||
url_preview_ip_range_blacklist: []
|
||||
|
||||
## Registration ##
|
||||
|
||||
@@ -69,10 +67,6 @@ rc_joins:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_joins_per_room:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_3pid_validation:
|
||||
per_second: 1000
|
||||
burst_count: 1000
|
||||
@@ -87,11 +81,11 @@ rc_invites:
|
||||
|
||||
federation_rr_transactions_per_room_per_second: 9999
|
||||
|
||||
allow_device_name_lookup_over_federation: true
|
||||
|
||||
## Experimental Features ##
|
||||
|
||||
experimental_features:
|
||||
# Enable spaces support
|
||||
spaces_enabled: true
|
||||
# Enable history backfilling support
|
||||
msc2716_enabled: true
|
||||
# server-side support for partial state in /send_join responses
|
||||
@@ -100,8 +94,8 @@ experimental_features:
|
||||
# client-side support for partial state in /send_join responses
|
||||
faster_joins: true
|
||||
{% endif %}
|
||||
# Filtering /messages by relation type.
|
||||
msc3874_enabled: true
|
||||
# Enable jump to date endpoint
|
||||
msc3030_enabled: true
|
||||
|
||||
server_notices:
|
||||
system_mxid_localpart: _server
|
||||
|
||||
@@ -19,7 +19,7 @@ username=www-data
|
||||
autorestart=true
|
||||
|
||||
[program:redis]
|
||||
command=/usr/local/bin/prefix-log /usr/local/bin/redis-server
|
||||
command=/usr/local/bin/prefix-log /usr/bin/redis-server /etc/redis/redis.conf --daemonize no
|
||||
priority=1
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
|
||||
@@ -1,24 +1,3 @@
|
||||
{% if use_forking_launcher %}
|
||||
[program:synapse_fork]
|
||||
command=/usr/local/bin/python -m synapse.app.complement_fork_starter
|
||||
{{ main_config_path }}
|
||||
synapse.app.homeserver
|
||||
--config-path="{{ main_config_path }}"
|
||||
--config-path=/conf/workers/shared.yaml
|
||||
{%- for worker in workers %}
|
||||
-- {{ worker.app }}
|
||||
--config-path="{{ main_config_path }}"
|
||||
--config-path=/conf/workers/shared.yaml
|
||||
--config-path=/conf/workers/{{ worker.name }}.yaml
|
||||
{%- endfor %}
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
autorestart=unexpected
|
||||
exitcodes=0
|
||||
|
||||
{% else %}
|
||||
[program:synapse_main]
|
||||
command=/usr/local/bin/prefix-log /usr/local/bin/python -m synapse.app.homeserver
|
||||
--config-path="{{ main_config_path }}"
|
||||
@@ -34,7 +13,7 @@ autorestart=unexpected
|
||||
exitcodes=0
|
||||
|
||||
|
||||
{% for worker in workers %}
|
||||
{% for worker in workers %}
|
||||
[program:synapse_{{ worker.name }}]
|
||||
command=/usr/local/bin/prefix-log /usr/local/bin/python -m {{ worker.app }}
|
||||
--config-path="{{ main_config_path }}"
|
||||
@@ -48,5 +27,4 @@ stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
@@ -2,11 +2,7 @@ version: 1
|
||||
|
||||
formatters:
|
||||
precise:
|
||||
{% if include_worker_name_in_log_line %}
|
||||
format: '{{ worker_name }} | %(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
||||
{% else %}
|
||||
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
||||
{% endif %}
|
||||
|
||||
handlers:
|
||||
{% if LOG_FILE_PATH %}
|
||||
@@ -49,17 +45,11 @@ handlers:
|
||||
class: logging.StreamHandler
|
||||
formatter: precise
|
||||
|
||||
{% if not SYNAPSE_LOG_SENSITIVE %}
|
||||
{#
|
||||
If SYNAPSE_LOG_SENSITIVE is unset, then override synapse.storage.SQL to INFO
|
||||
so that DEBUG entries (containing sensitive information) are not emitted.
|
||||
#}
|
||||
loggers:
|
||||
synapse.storage.SQL:
|
||||
# beware: increasing this to DEBUG will make synapse log sensitive
|
||||
# information such as access tokens.
|
||||
level: INFO
|
||||
{% endif %}
|
||||
|
||||
root:
|
||||
level: {{ SYNAPSE_LOG_LEVEL or "INFO" }}
|
||||
|
||||
@@ -20,48 +20,35 @@
|
||||
# * SYNAPSE_SERVER_NAME: The desired server_name of the homeserver.
|
||||
# * SYNAPSE_REPORT_STATS: Whether to report stats.
|
||||
# * SYNAPSE_WORKER_TYPES: A comma separated list of worker names as specified in WORKER_CONFIG
|
||||
# below. Leave empty for no workers.
|
||||
# below. Leave empty for no workers, or set to '*' for all possible workers.
|
||||
# * SYNAPSE_AS_REGISTRATION_DIR: If specified, a directory in which .yaml and .yml files
|
||||
# will be treated as Application Service registration files.
|
||||
# * SYNAPSE_TLS_CERT: Path to a TLS certificate in PEM format.
|
||||
# * SYNAPSE_TLS_KEY: Path to a TLS key. If this and SYNAPSE_TLS_CERT are specified,
|
||||
# Nginx will be configured to serve TLS on port 8448.
|
||||
# * SYNAPSE_USE_EXPERIMENTAL_FORKING_LAUNCHER: Whether to use the forking launcher,
|
||||
# only intended for usage in Complement at the moment.
|
||||
# No stability guarantees are provided.
|
||||
# * SYNAPSE_LOG_LEVEL: Set this to DEBUG, INFO, WARNING or ERROR to change the
|
||||
# log level. INFO is the default.
|
||||
# * SYNAPSE_LOG_SENSITIVE: If unset, SQL and SQL values won't be logged,
|
||||
# regardless of the SYNAPSE_LOG_LEVEL setting.
|
||||
#
|
||||
# NOTE: According to Complement's ENTRYPOINT expectations for a homeserver image (as defined
|
||||
# in the project's README), this script may be run multiple times, and functionality should
|
||||
# continue to work if so.
|
||||
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional, Set
|
||||
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Set
|
||||
|
||||
import yaml
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
MAIN_PROCESS_HTTP_LISTENER_PORT = 8080
|
||||
|
||||
# Workers with exposed endpoints needs either "client", "federation", or "media" listener_resources
|
||||
# Watching /_matrix/client needs a "client" listener
|
||||
# Watching /_matrix/federation needs a "federation" listener
|
||||
# Watching /_matrix/media and related needs a "media" listener
|
||||
# Stream Writers require "client" and "replication" listeners because they
|
||||
# have to attach by instance_map to the master process and have client endpoints.
|
||||
|
||||
WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"pusher": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"app": "synapse.app.pusher",
|
||||
"listener_resources": [],
|
||||
"endpoint_patterns": [],
|
||||
"shared_extra_conf": {},
|
||||
"shared_extra_conf": {"start_pushers": False},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"user_dir": {
|
||||
@@ -84,11 +71,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"^/_synapse/admin/v1/media/.*$",
|
||||
"^/_synapse/admin/v1/quarantine_media/.*$",
|
||||
],
|
||||
# The first configured media worker will run the media background jobs
|
||||
"shared_extra_conf": {
|
||||
"enable_media_repo": False,
|
||||
"media_instance_running_background_jobs": "media_repository1",
|
||||
},
|
||||
"shared_extra_conf": {"enable_media_repo": False},
|
||||
"worker_extra_conf": "enable_media_repo: true",
|
||||
},
|
||||
"appservice": {
|
||||
@@ -99,10 +82,10 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"federation_sender": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"app": "synapse.app.federation_sender",
|
||||
"listener_resources": [],
|
||||
"endpoint_patterns": [],
|
||||
"shared_extra_conf": {},
|
||||
"shared_extra_conf": {"send_federation": False},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"synchrotron": {
|
||||
@@ -117,35 +100,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"client_reader": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["client"],
|
||||
"endpoint_patterns": [
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/publicRooms$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/joined_members$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/context/.*$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/members$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/state$",
|
||||
"^/_matrix/client/v1/rooms/.*/hierarchy$",
|
||||
"^/_matrix/client/(v1|unstable)/rooms/.*/relations/",
|
||||
"^/_matrix/client/v1/rooms/.*/threads$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/login$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/3pid$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/whoami$",
|
||||
"^/_matrix/client/versions$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/register$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/auth/.*/fallback/web$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/messages$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/event",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/joined_rooms",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable/.*)/rooms/.*/aliases",
|
||||
"^/_matrix/client/v1/rooms/.*/timestamp_to_event$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/search",
|
||||
],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"federation_reader": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["federation"],
|
||||
@@ -164,7 +118,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"^/_matrix/federation/(v1|v2)/invite/",
|
||||
"^/_matrix/federation/(v1|v2)/query_auth/",
|
||||
"^/_matrix/federation/(v1|v2)/event_auth/",
|
||||
"^/_matrix/federation/v1/timestamp_to_event/",
|
||||
"^/_matrix/federation/(v1|v2)/exchange_third_party_invite/",
|
||||
"^/_matrix/federation/(v1|v2)/user/devices/",
|
||||
"^/_matrix/federation/(v1|v2)/get_groups_publicised$",
|
||||
@@ -211,54 +164,14 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"frontend_proxy": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"app": "synapse.app.frontend_proxy",
|
||||
"listener_resources": ["client", "replication"],
|
||||
"endpoint_patterns": ["^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload"],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"account_data": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["client", "replication"],
|
||||
"endpoint_patterns": [
|
||||
"^/_matrix/client/(r0|v3|unstable)/.*/tags",
|
||||
"^/_matrix/client/(r0|v3|unstable)/.*/account_data",
|
||||
],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"presence": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["client", "replication"],
|
||||
"endpoint_patterns": ["^/_matrix/client/(api/v1|r0|v3|unstable)/presence/"],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"receipts": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["client", "replication"],
|
||||
"endpoint_patterns": [
|
||||
"^/_matrix/client/(r0|v3|unstable)/rooms/.*/receipt",
|
||||
"^/_matrix/client/(r0|v3|unstable)/rooms/.*/read_markers",
|
||||
],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"to_device": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["client", "replication"],
|
||||
"endpoint_patterns": ["^/_matrix/client/(r0|v3|unstable)/sendToDevice/"],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"typing": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["client", "replication"],
|
||||
"endpoint_patterns": [
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/typing"
|
||||
],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
"worker_extra_conf": (
|
||||
"worker_main_http_uri: http://127.0.0.1:%d"
|
||||
% (MAIN_PROCESS_HTTP_LISTENER_PORT,)
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
@@ -281,19 +194,24 @@ upstream {upstream_worker_type} {{
|
||||
|
||||
# Utility functions
|
||||
def log(txt: str) -> None:
|
||||
"""Log something to the stdout.
|
||||
|
||||
Args:
|
||||
txt: The text to log.
|
||||
"""
|
||||
print(txt)
|
||||
|
||||
|
||||
def error(txt: str) -> NoReturn:
|
||||
print(txt, file=sys.stderr)
|
||||
"""Log something and exit with an error code.
|
||||
|
||||
Args:
|
||||
txt: The text to log in error.
|
||||
"""
|
||||
log(txt)
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
def flush_buffers() -> None:
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
|
||||
|
||||
def convert(src: str, dst: str, **template_vars: object) -> None:
|
||||
"""Generate a file from a template
|
||||
|
||||
@@ -322,14 +240,14 @@ def convert(src: str, dst: str, **template_vars: object) -> None:
|
||||
outfile.write(rendered)
|
||||
|
||||
|
||||
def add_worker_roles_to_shared_config(
|
||||
def add_sharding_to_shared_config(
|
||||
shared_config: dict,
|
||||
worker_type: str,
|
||||
worker_name: str,
|
||||
worker_port: int,
|
||||
) -> None:
|
||||
"""Given a dictionary representing a config file shared across all workers,
|
||||
append appropriate worker information to it for the current worker_type instance.
|
||||
append sharded worker information to it for the current worker_type instance.
|
||||
|
||||
Args:
|
||||
shared_config: The config dict that all worker instances share (after being converted to YAML)
|
||||
@@ -360,19 +278,9 @@ def add_worker_roles_to_shared_config(
|
||||
"port": worker_port,
|
||||
}
|
||||
|
||||
elif worker_type in ["account_data", "presence", "receipts", "to_device", "typing"]:
|
||||
# Update the list of stream writers
|
||||
# It's convenient that the name of the worker type is the same as the stream to write
|
||||
shared_config.setdefault("stream_writers", {}).setdefault(
|
||||
worker_type, []
|
||||
).append(worker_name)
|
||||
|
||||
# Map of stream writer instance names to host/ports combos
|
||||
# For now, all stream writers need http replication ports
|
||||
instance_map[worker_name] = {
|
||||
"host": "localhost",
|
||||
"port": worker_port,
|
||||
}
|
||||
elif worker_type == "media_repository":
|
||||
# The first configured media worker will run the media background jobs
|
||||
shared_config.setdefault("media_instance_running_background_jobs", worker_name)
|
||||
|
||||
|
||||
def generate_base_homeserver_config() -> None:
|
||||
@@ -384,7 +292,7 @@ def generate_base_homeserver_config() -> None:
|
||||
# start.py already does this for us, so just call that.
|
||||
# note that this script is copied in in the official, monolith dockerfile
|
||||
os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT)
|
||||
subprocess.run(["/usr/local/bin/python", "/start.py", "migrate_config"], check=True)
|
||||
subprocess.check_output(["/usr/local/bin/python", "/start.py", "migrate_config"])
|
||||
|
||||
|
||||
def generate_worker_files(
|
||||
@@ -458,8 +366,8 @@ def generate_worker_files(
|
||||
# No workers, just the main process
|
||||
worker_types = []
|
||||
else:
|
||||
# Split type names by comma, ignoring whitespace.
|
||||
worker_types = [x.strip() for x in worker_types_env.split(",")]
|
||||
# Split type names by comma
|
||||
worker_types = worker_types_env.split(",")
|
||||
|
||||
# Create the worker configuration directory if it doesn't already exist
|
||||
os.makedirs("/conf/workers", exist_ok=True)
|
||||
@@ -478,11 +386,14 @@ def generate_worker_files(
|
||||
|
||||
# For each worker type specified by the user, create config values
|
||||
for worker_type in worker_types:
|
||||
worker_type = worker_type.strip()
|
||||
|
||||
worker_config = WORKERS_CONFIG.get(worker_type)
|
||||
if worker_config:
|
||||
worker_config = worker_config.copy()
|
||||
else:
|
||||
error(worker_type + " is an unknown worker type! Please fix!")
|
||||
log(worker_type + " is an unknown worker type! It will be ignored")
|
||||
continue
|
||||
|
||||
new_worker_count = worker_type_counter.setdefault(worker_type, 0) + 1
|
||||
worker_type_counter[worker_type] = new_worker_count
|
||||
@@ -501,11 +412,11 @@ def generate_worker_files(
|
||||
|
||||
# Check if more than one instance of this worker type has been specified
|
||||
worker_type_total_count = worker_types.count(worker_type)
|
||||
|
||||
# Update the shared config with sharding-related options if necessary
|
||||
add_worker_roles_to_shared_config(
|
||||
shared_config, worker_type, worker_name, worker_port
|
||||
)
|
||||
if worker_type_total_count > 1:
|
||||
# Update the shared config with sharding-related options if necessary
|
||||
add_sharding_to_shared_config(
|
||||
shared_config, worker_type, worker_name, worker_port
|
||||
)
|
||||
|
||||
# Enable the worker in supervisord
|
||||
worker_descriptors.append(worker_config)
|
||||
@@ -614,7 +525,6 @@ def generate_worker_files(
|
||||
"/etc/supervisor/conf.d/synapse.conf",
|
||||
workers=worker_descriptors,
|
||||
main_config_path=config_path,
|
||||
use_forking_launcher=environ.get("SYNAPSE_USE_EXPERIMENTAL_FORKING_LAUNCHER"),
|
||||
)
|
||||
|
||||
# healthcheck config
|
||||
@@ -638,25 +548,18 @@ def generate_worker_log_config(
|
||||
Returns: the path to the generated file
|
||||
"""
|
||||
# Check whether we should write worker logs to disk, in addition to the console
|
||||
extra_log_template_args: Dict[str, Optional[str]] = {}
|
||||
extra_log_template_args = {}
|
||||
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
||||
extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log"
|
||||
|
||||
extra_log_template_args["SYNAPSE_LOG_LEVEL"] = environ.get("SYNAPSE_LOG_LEVEL")
|
||||
extra_log_template_args["SYNAPSE_LOG_SENSITIVE"] = environ.get(
|
||||
"SYNAPSE_LOG_SENSITIVE"
|
||||
)
|
||||
|
||||
extra_log_template_args["LOG_FILE_PATH"] = "{dir}/logs/{name}.log".format(
|
||||
dir=data_dir, name=worker_name
|
||||
)
|
||||
# Render and write the file
|
||||
log_config_filepath = f"/conf/workers/{worker_name}.log.config"
|
||||
log_config_filepath = "/conf/workers/{name}.log.config".format(name=worker_name)
|
||||
convert(
|
||||
"/conf/log.config",
|
||||
log_config_filepath,
|
||||
worker_name=worker_name,
|
||||
**extra_log_template_args,
|
||||
include_worker_name_in_log_line=environ.get(
|
||||
"SYNAPSE_USE_EXPERIMENTAL_FORKING_LAUNCHER"
|
||||
),
|
||||
)
|
||||
return log_config_filepath
|
||||
|
||||
@@ -686,24 +589,14 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
with open(mark_filepath, "w") as f:
|
||||
f.write("")
|
||||
|
||||
# Lifted right out of start.py
|
||||
jemallocpath = "/usr/lib/%s-linux-gnu/libjemalloc.so.2" % (platform.machine(),)
|
||||
|
||||
if os.path.isfile(jemallocpath):
|
||||
environ["LD_PRELOAD"] = jemallocpath
|
||||
else:
|
||||
log("Could not find %s, will not use" % (jemallocpath,))
|
||||
|
||||
# Start supervisord, which will start Synapse, all of the configured worker
|
||||
# processes, redis, nginx etc. according to the config we created above.
|
||||
log("Starting supervisord")
|
||||
flush_buffers()
|
||||
os.execle(
|
||||
os.execl(
|
||||
"/usr/local/bin/supervisord",
|
||||
"supervisord",
|
||||
"-c",
|
||||
"/etc/supervisor/supervisord.conf",
|
||||
environ,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -13,19 +13,14 @@ import jinja2
|
||||
|
||||
# Utility functions
|
||||
def log(txt: str) -> None:
|
||||
print(txt)
|
||||
print(txt, file=sys.stderr)
|
||||
|
||||
|
||||
def error(txt: str) -> NoReturn:
|
||||
print(txt, file=sys.stderr)
|
||||
log(txt)
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
def flush_buffers() -> None:
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
|
||||
|
||||
def convert(src: str, dst: str, environ: Mapping[str, object]) -> None:
|
||||
"""Generate a file from a template
|
||||
|
||||
@@ -115,11 +110,7 @@ def generate_config_from_template(
|
||||
|
||||
log_config_file = environ["SYNAPSE_LOG_CONFIG"]
|
||||
log("Generating log config file " + log_config_file)
|
||||
convert(
|
||||
"/conf/log.config",
|
||||
log_config_file,
|
||||
{**environ, "include_worker_name_in_log_line": False},
|
||||
)
|
||||
convert("/conf/log.config", log_config_file, environ)
|
||||
|
||||
# Hopefully we already have a signing key, but generate one if not.
|
||||
args = [
|
||||
@@ -136,10 +127,10 @@ def generate_config_from_template(
|
||||
|
||||
if ownership is not None:
|
||||
log(f"Setting ownership on /data to {ownership}")
|
||||
subprocess.run(["chown", "-R", ownership, "/data"], check=True)
|
||||
subprocess.check_output(["chown", "-R", ownership, "/data"])
|
||||
args = ["gosu", ownership] + args
|
||||
|
||||
subprocess.run(args, check=True)
|
||||
subprocess.check_output(args)
|
||||
|
||||
|
||||
def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> None:
|
||||
@@ -163,7 +154,7 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) ->
|
||||
if ownership is not None:
|
||||
# make sure that synapse has perms to write to the data dir.
|
||||
log(f"Setting ownership on {data_dir} to {ownership}")
|
||||
subprocess.run(["chown", ownership, data_dir], check=True)
|
||||
subprocess.check_output(["chown", ownership, data_dir])
|
||||
|
||||
# create a suitable log config from our template
|
||||
log_config_file = "%s/%s.log.config" % (config_dir, server_name)
|
||||
@@ -190,7 +181,6 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) ->
|
||||
"--open-private-ports",
|
||||
]
|
||||
# log("running %s" % (args, ))
|
||||
flush_buffers()
|
||||
os.execv(sys.executable, args)
|
||||
|
||||
|
||||
@@ -273,10 +263,8 @@ running with 'migrate_config'. See the README for more details.
|
||||
args = [sys.executable] + args
|
||||
if ownership is not None:
|
||||
args = ["gosu", ownership] + args
|
||||
flush_buffers()
|
||||
os.execve("/usr/sbin/gosu", args, environ)
|
||||
else:
|
||||
flush_buffers()
|
||||
os.execve(sys.executable, args, environ)
|
||||
|
||||
|
||||
|
||||
@@ -9,8 +9,6 @@
|
||||
- [Configuring a Reverse Proxy](reverse_proxy.md)
|
||||
- [Configuring a Forward/Outbound Proxy](setup/forward_proxy.md)
|
||||
- [Configuring a Turn Server](turn-howto.md)
|
||||
- [coturn TURN server](setup/turn/coturn.md)
|
||||
- [eturnal TURN server](setup/turn/eturnal.md)
|
||||
- [Delegation](delegate.md)
|
||||
|
||||
# Upgrading
|
||||
@@ -37,6 +35,7 @@
|
||||
- [Application Services](application_services.md)
|
||||
- [Server Notices](server_notices.md)
|
||||
- [Consent Tracking](consent_tracking.md)
|
||||
- [URL Previews](development/url_previews.md)
|
||||
- [User Directory](user_directory.md)
|
||||
- [Message Retention Policies](message_retention_policies.md)
|
||||
- [Pluggable Modules](modules/index.md)
|
||||
@@ -70,8 +69,6 @@
|
||||
- [Federation](usage/administration/admin_api/federation.md)
|
||||
- [Manhole](manhole.md)
|
||||
- [Monitoring](metrics-howto.md)
|
||||
- [Reporting Homeserver Usage Statistics](usage/administration/monitoring/reporting_homeserver_usage_statistics.md)
|
||||
- [Monthly Active Users](usage/administration/monthly_active_users.md)
|
||||
- [Understanding Synapse Through Grafana Graphs](usage/administration/understanding_synapse_through_grafana_graphs.md)
|
||||
- [Useful SQL for Admins](usage/administration/useful_sql_for_admins.md)
|
||||
- [Database Maintenance Tools](usage/administration/database_maintenance_tools.md)
|
||||
@@ -83,7 +80,6 @@
|
||||
# Development
|
||||
- [Contributing Guide](development/contributing_guide.md)
|
||||
- [Code Style](code_style.md)
|
||||
- [Reviewing Code](development/reviews.md)
|
||||
- [Release Cycle](development/releases.md)
|
||||
- [Git Usage](development/git.md)
|
||||
- [Testing]()
|
||||
|
||||
@@ -5,9 +5,9 @@ non-interactive way. This is generally used for bootstrapping a Synapse
|
||||
instance with administrator accounts.
|
||||
|
||||
To authenticate yourself to the server, you will need both the shared secret
|
||||
([`registration_shared_secret`](../usage/configuration/config_documentation.md#registration_shared_secret)
|
||||
in the homeserver configuration), and a one-time nonce. If the registration
|
||||
shared secret is not configured, this API is not enabled.
|
||||
(`registration_shared_secret` in the homeserver configuration), and a
|
||||
one-time nonce. If the registration shared secret is not configured, this API
|
||||
is not enabled.
|
||||
|
||||
To fetch the nonce, you need to request one from the API:
|
||||
|
||||
@@ -46,24 +46,7 @@ As an example:
|
||||
The MAC is the hex digest output of the HMAC-SHA1 algorithm, with the key being
|
||||
the shared secret and the content being the nonce, user, password, either the
|
||||
string "admin" or "notadmin", and optionally the user_type
|
||||
each separated by NULs.
|
||||
|
||||
Here is an easy way to generate the HMAC digest if you have Bash and OpenSSL:
|
||||
|
||||
```bash
|
||||
# Update these values and then paste this code block into a bash terminal
|
||||
nonce='thisisanonce'
|
||||
username='pepper_roni'
|
||||
password='pizza'
|
||||
admin='admin'
|
||||
secret='shared_secret'
|
||||
|
||||
printf '%s\0%s\0%s\0%s' "$nonce" "$username" "$password" "$admin" |
|
||||
openssl sha1 -hmac "$secret" |
|
||||
awk '{print $2}'
|
||||
```
|
||||
|
||||
For an example of generation in Python:
|
||||
each separated by NULs. For an example of generation in Python:
|
||||
|
||||
```python
|
||||
import hmac, hashlib
|
||||
@@ -87,4 +70,4 @@ def generate_mac(nonce, user, password, admin=False, user_type=None):
|
||||
mac.update(user_type.encode('utf8'))
|
||||
|
||||
return mac.hexdigest()
|
||||
```
|
||||
```
|
||||
@@ -59,7 +59,6 @@ The following fields are possible in the JSON response body:
|
||||
- `guest_access` - Whether guests can join the room. One of: ["can_join", "forbidden"].
|
||||
- `history_visibility` - Who can see the room history. One of: ["invited", "joined", "shared", "world_readable"].
|
||||
- `state_events` - Total number of state_events of a room. Complexity of the room.
|
||||
- `room_type` - The type of the room taken from the room's creation event; for example "m.space" if the room is a space. If the room does not define a type, the value will be `null`.
|
||||
* `offset` - The current pagination offset in rooms. This parameter should be
|
||||
used instead of `next_token` for room offset as `next_token` is
|
||||
not intended to be parsed.
|
||||
@@ -102,8 +101,7 @@ A response body like the following is returned:
|
||||
"join_rules": "invite",
|
||||
"guest_access": null,
|
||||
"history_visibility": "shared",
|
||||
"state_events": 93534,
|
||||
"room_type": "m.space"
|
||||
"state_events": 93534
|
||||
},
|
||||
... (8 hidden items) ...
|
||||
{
|
||||
@@ -120,8 +118,7 @@ A response body like the following is returned:
|
||||
"join_rules": "invite",
|
||||
"guest_access": null,
|
||||
"history_visibility": "shared",
|
||||
"state_events": 8345,
|
||||
"room_type": null
|
||||
"state_events": 8345
|
||||
}
|
||||
],
|
||||
"offset": 0,
|
||||
@@ -154,8 +151,7 @@ A response body like the following is returned:
|
||||
"join_rules": "invite",
|
||||
"guest_access": null,
|
||||
"history_visibility": "shared",
|
||||
"state_events": 8,
|
||||
"room_type": null
|
||||
"state_events": 8
|
||||
}
|
||||
],
|
||||
"offset": 0,
|
||||
@@ -188,8 +184,7 @@ A response body like the following is returned:
|
||||
"join_rules": "invite",
|
||||
"guest_access": null,
|
||||
"history_visibility": "shared",
|
||||
"state_events": 93534,
|
||||
"room_type": null
|
||||
"state_events": 93534
|
||||
},
|
||||
... (98 hidden items) ...
|
||||
{
|
||||
@@ -206,8 +201,7 @@ A response body like the following is returned:
|
||||
"join_rules": "invite",
|
||||
"guest_access": null,
|
||||
"history_visibility": "shared",
|
||||
"state_events": 8345,
|
||||
"room_type": "m.space"
|
||||
"state_events": 8345
|
||||
}
|
||||
],
|
||||
"offset": 0,
|
||||
@@ -244,9 +238,7 @@ A response body like the following is returned:
|
||||
"join_rules": "invite",
|
||||
"guest_access": null,
|
||||
"history_visibility": "shared",
|
||||
"state_events": 93534,
|
||||
"room_type": "m.space"
|
||||
|
||||
"state_events": 93534
|
||||
},
|
||||
... (48 hidden items) ...
|
||||
{
|
||||
@@ -263,9 +255,7 @@ A response body like the following is returned:
|
||||
"join_rules": "invite",
|
||||
"guest_access": null,
|
||||
"history_visibility": "shared",
|
||||
"state_events": 8345,
|
||||
"room_type": null
|
||||
|
||||
"state_events": 8345
|
||||
}
|
||||
],
|
||||
"offset": 100,
|
||||
@@ -300,10 +290,6 @@ The following fields are possible in the JSON response body:
|
||||
* `guest_access` - Whether guests can join the room. One of: ["can_join", "forbidden"].
|
||||
* `history_visibility` - Who can see the room history. One of: ["invited", "joined", "shared", "world_readable"].
|
||||
* `state_events` - Total number of state_events of a room. Complexity of the room.
|
||||
* `room_type` - The type of the room taken from the room's creation event; for example "m.space" if the room is a space.
|
||||
If the room does not define a type, the value will be `null`.
|
||||
* `forgotten` - Whether all local users have
|
||||
[forgotten](https://spec.matrix.org/latest/client-server-api/#leaving-rooms) the room.
|
||||
|
||||
The API is:
|
||||
|
||||
@@ -331,14 +317,10 @@ A response body like the following is returned:
|
||||
"join_rules": "invite",
|
||||
"guest_access": null,
|
||||
"history_visibility": "shared",
|
||||
"state_events": 93534,
|
||||
"room_type": "m.space",
|
||||
"forgotten": false
|
||||
"state_events": 93534
|
||||
}
|
||||
```
|
||||
|
||||
_Changed in Synapse 1.66:_ Added the `forgotten` key to the response body.
|
||||
|
||||
# Room Members API
|
||||
|
||||
The Room Members admin API allows server admins to get a list of all members of a room.
|
||||
@@ -393,151 +375,6 @@ A response body like the following is returned:
|
||||
}
|
||||
```
|
||||
|
||||
# Room Messages API
|
||||
|
||||
The Room Messages admin API allows server admins to get all messages
|
||||
sent to a room in a given timeframe. There are various parameters available
|
||||
that allow for filtering and ordering the returned list. This API supports pagination.
|
||||
|
||||
To use it, you will need to authenticate by providing an `access_token`
|
||||
for a server admin: see [Admin API](../usage/administration/admin_api).
|
||||
|
||||
This endpoint mirrors the [Matrix Spec defined Messages API](https://spec.matrix.org/v1.1/client-server-api/#get_matrixclientv3roomsroomidmessages).
|
||||
|
||||
The API is:
|
||||
```
|
||||
GET /_synapse/admin/v1/rooms/<room_id>/messages
|
||||
```
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following path parameters are required:
|
||||
|
||||
* `room_id` - The ID of the room you wish you fetch messages from.
|
||||
|
||||
The following query parameters are available:
|
||||
|
||||
* `from` (required) - The token to start returning events from. This token can be obtained from a prev_batch
|
||||
or next_batch token returned by the /sync endpoint, or from an end token returned by a previous request to this endpoint.
|
||||
* `to` - The token to spot returning events at.
|
||||
* `limit` - The maximum number of events to return. Defaults to `10`.
|
||||
* `filter` - A JSON RoomEventFilter to filter returned events with.
|
||||
* `dir` - The direction to return events from. Either `f` for forwards or `b` for backwards. Setting
|
||||
this value to `b` will reverse the above sort order. Defaults to `f`.
|
||||
|
||||
**Response**
|
||||
|
||||
The following fields are possible in the JSON response body:
|
||||
|
||||
* `chunk` - A list of room events. The order depends on the dir parameter.
|
||||
Note that an empty chunk does not necessarily imply that no more events are available. Clients should continue to paginate until no end property is returned.
|
||||
* `end` - A token corresponding to the end of chunk. This token can be passed back to this endpoint to request further events.
|
||||
If no further events are available, this property is omitted from the response.
|
||||
* `start` - A token corresponding to the start of chunk.
|
||||
* `state` - A list of state events relevant to showing the chunk.
|
||||
|
||||
**Example**
|
||||
|
||||
For more details on each chunk, read [the Matrix specification](https://spec.matrix.org/v1.1/client-server-api/#get_matrixclientv3roomsroomidmessages).
|
||||
|
||||
```json
|
||||
{
|
||||
"chunk": [
|
||||
{
|
||||
"content": {
|
||||
"body": "This is an example text message",
|
||||
"format": "org.matrix.custom.html",
|
||||
"formatted_body": "<b>This is an example text message</b>",
|
||||
"msgtype": "m.text"
|
||||
},
|
||||
"event_id": "$143273582443PhrSn:example.org",
|
||||
"origin_server_ts": 1432735824653,
|
||||
"room_id": "!636q39766251:example.com",
|
||||
"sender": "@example:example.org",
|
||||
"type": "m.room.message",
|
||||
"unsigned": {
|
||||
"age": 1234
|
||||
}
|
||||
},
|
||||
{
|
||||
"content": {
|
||||
"name": "The room name"
|
||||
},
|
||||
"event_id": "$143273582443PhrSn:example.org",
|
||||
"origin_server_ts": 1432735824653,
|
||||
"room_id": "!636q39766251:example.com",
|
||||
"sender": "@example:example.org",
|
||||
"state_key": "",
|
||||
"type": "m.room.name",
|
||||
"unsigned": {
|
||||
"age": 1234
|
||||
}
|
||||
},
|
||||
{
|
||||
"content": {
|
||||
"body": "Gangnam Style",
|
||||
"info": {
|
||||
"duration": 2140786,
|
||||
"h": 320,
|
||||
"mimetype": "video/mp4",
|
||||
"size": 1563685,
|
||||
"thumbnail_info": {
|
||||
"h": 300,
|
||||
"mimetype": "image/jpeg",
|
||||
"size": 46144,
|
||||
"w": 300
|
||||
},
|
||||
"thumbnail_url": "mxc://example.org/FHyPlCeYUSFFxlgbQYZmoEoe",
|
||||
"w": 480
|
||||
},
|
||||
"msgtype": "m.video",
|
||||
"url": "mxc://example.org/a526eYUSFFxlgbQYZmo442"
|
||||
},
|
||||
"event_id": "$143273582443PhrSn:example.org",
|
||||
"origin_server_ts": 1432735824653,
|
||||
"room_id": "!636q39766251:example.com",
|
||||
"sender": "@example:example.org",
|
||||
"type": "m.room.message",
|
||||
"unsigned": {
|
||||
"age": 1234
|
||||
}
|
||||
}
|
||||
],
|
||||
"end": "t47409-4357353_219380_26003_2265",
|
||||
"start": "t47429-4392820_219380_26003_2265"
|
||||
}
|
||||
```
|
||||
|
||||
# Room Timestamp to Event API
|
||||
|
||||
The Room Timestamp to Event API endpoint fetches the `event_id` of the closest event to the given
|
||||
timestamp (`ts` query parameter) in the given direction (`dir` query parameter).
|
||||
|
||||
Useful for cases like jump to date so you can start paginating messages from
|
||||
a given date in the archive.
|
||||
|
||||
The API is:
|
||||
```
|
||||
GET /_synapse/admin/v1/rooms/<room_id>/timestamp_to_event
|
||||
```
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following path parameters are required:
|
||||
|
||||
* `room_id` - The ID of the room you wish to check.
|
||||
|
||||
The following query parameters are available:
|
||||
|
||||
* `ts` - a timestamp in milliseconds where we will find the closest event in
|
||||
the given direction.
|
||||
* `dir` - can be `f` or `b` to indicate forwards and backwards in time from the
|
||||
given timestamp. Defaults to `f`.
|
||||
|
||||
**Response**
|
||||
|
||||
* `event_id` - converted from timestamp
|
||||
|
||||
# Block Room API
|
||||
The Block Room admin API allows server admins to block and unblock rooms,
|
||||
and query to see if a given room is blocked.
|
||||
|
||||
@@ -37,13 +37,11 @@ It returns a JSON body like the following:
|
||||
"is_guest": 0,
|
||||
"admin": 0,
|
||||
"deactivated": 0,
|
||||
"erased": false,
|
||||
"shadow_banned": 0,
|
||||
"creation_ts": 1560432506,
|
||||
"appservice_id": null,
|
||||
"consent_server_notice_sent": null,
|
||||
"consent_version": null,
|
||||
"consent_ts": null,
|
||||
"external_ids": [
|
||||
{
|
||||
"auth_provider": "<provider1>",
|
||||
@@ -126,8 +124,9 @@ Body parameters:
|
||||
- `address` - string. Value of third-party ID.
|
||||
belonging to a user.
|
||||
- `external_ids` - array, optional. Allow setting the identifier of the external identity
|
||||
provider for SSO (Single sign-on). Details in the configuration manual under the
|
||||
sections [sso](../usage/configuration/config_documentation.md#sso) and [oidc_providers](../usage/configuration/config_documentation.md#oidc_providers).
|
||||
provider for SSO (Single sign-on). Details in
|
||||
[Sample Configuration File](../usage/configuration/homeserver_sample_config.html)
|
||||
section `sso` and `oidc_providers`.
|
||||
- `auth_provider` - string. ID of the external identity provider. Value of `idp_id`
|
||||
in the homeserver configuration. Note that no error is raised if the provided
|
||||
value is not in the homeserver configuration.
|
||||
@@ -168,7 +167,6 @@ A response body like the following is returned:
|
||||
"admin": 0,
|
||||
"user_type": null,
|
||||
"deactivated": 0,
|
||||
"erased": false,
|
||||
"shadow_banned": 0,
|
||||
"displayname": "<User One>",
|
||||
"avatar_url": null,
|
||||
@@ -179,7 +177,6 @@ A response body like the following is returned:
|
||||
"admin": 1,
|
||||
"user_type": null,
|
||||
"deactivated": 0,
|
||||
"erased": false,
|
||||
"shadow_banned": 0,
|
||||
"displayname": "<User Two>",
|
||||
"avatar_url": "<avatar_url>",
|
||||
@@ -250,7 +247,6 @@ The following fields are returned in the JSON response body:
|
||||
- `user_type` - string - Type of the user. Normal users are type `None`.
|
||||
This allows user type specific behaviour. There are also types `support` and `bot`.
|
||||
- `deactivated` - bool - Status if that user has been marked as deactivated.
|
||||
- `erased` - bool - Status if that user has been marked as erased.
|
||||
- `shadow_banned` - bool - Status if that user has been marked as shadow banned.
|
||||
- `displayname` - string - The user's display name if they have set one.
|
||||
- `avatar_url` - string - The user's avatar URL if they have set one.
|
||||
@@ -369,7 +365,6 @@ The following actions are **NOT** performed. The list may be incomplete.
|
||||
- Remove the user's creation (registration) timestamp
|
||||
- [Remove rate limit overrides](#override-ratelimiting-for-users)
|
||||
- Remove from monthly active users
|
||||
- Remove user's consent information (consent version and timestamp)
|
||||
|
||||
## Reset password
|
||||
|
||||
@@ -550,7 +545,7 @@ Gets a list of all local media that a specific `user_id` has created.
|
||||
These are media that the user has uploaded themselves
|
||||
([local media](../media_repository.md#local-media)), as well as
|
||||
[URL preview images](../media_repository.md#url-previews) requested by the user if the
|
||||
[feature is enabled](../usage/configuration/config_documentation.md#url_preview_enabled).
|
||||
[feature is enabled](../development/url_previews.md).
|
||||
|
||||
By default, the response is ordered by descending creation date and ascending media ID.
|
||||
The newest media is on top. You can change the order with parameters
|
||||
@@ -759,7 +754,6 @@ A response body like the following is returned:
|
||||
"device_id": "QBUAZIFURK",
|
||||
"display_name": "android",
|
||||
"last_seen_ip": "1.2.3.4",
|
||||
"last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
|
||||
"last_seen_ts": 1474491775024,
|
||||
"user_id": "<user_id>"
|
||||
},
|
||||
@@ -767,7 +761,6 @@ A response body like the following is returned:
|
||||
"device_id": "AUIECTSRND",
|
||||
"display_name": "ios",
|
||||
"last_seen_ip": "1.2.3.5",
|
||||
"last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
|
||||
"last_seen_ts": 1474491775025,
|
||||
"user_id": "<user_id>"
|
||||
}
|
||||
@@ -794,8 +787,6 @@ The following fields are returned in the JSON response body:
|
||||
Absent if no name has been set.
|
||||
- `last_seen_ip` - The IP address where this device was last seen.
|
||||
(May be a few minutes out of date, for efficiency reasons).
|
||||
- `last_seen_user_agent` - The user agent of the device when it was last seen.
|
||||
(May be a few minutes out of date, for efficiency reasons).
|
||||
- `last_seen_ts` - The timestamp (in milliseconds since the unix epoch) when this
|
||||
devices was last seen. (May be a few minutes out of date, for efficiency reasons).
|
||||
- `user_id` - Owner of device.
|
||||
@@ -847,7 +838,6 @@ A response body like the following is returned:
|
||||
"device_id": "<device_id>",
|
||||
"display_name": "android",
|
||||
"last_seen_ip": "1.2.3.4",
|
||||
"last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
|
||||
"last_seen_ts": 1474491775024,
|
||||
"user_id": "<user_id>"
|
||||
}
|
||||
@@ -869,8 +859,6 @@ The following fields are returned in the JSON response body:
|
||||
Absent if no name has been set.
|
||||
- `last_seen_ip` - The IP address where this device was last seen.
|
||||
(May be a few minutes out of date, for efficiency reasons).
|
||||
- `last_seen_user_agent` - The user agent of the device when it was last seen.
|
||||
(May be a few minutes out of date, for efficiency reasons).
|
||||
- `last_seen_ts` - The timestamp (in milliseconds since the unix epoch) when this
|
||||
devices was last seen. (May be a few minutes out of date, for efficiency reasons).
|
||||
- `user_id` - Owner of device.
|
||||
@@ -1159,80 +1147,3 @@ GET /_synapse/admin/v1/username_available?username=$localpart
|
||||
|
||||
The request and response format is the same as the
|
||||
[/_matrix/client/r0/register/available](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available) API.
|
||||
|
||||
### Find a user based on their ID in an auth provider
|
||||
|
||||
The API is:
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/auth_providers/$provider/users/$external_id
|
||||
```
|
||||
|
||||
When a user matched the given ID for the given provider, an HTTP code `200` with a response body like the following is returned:
|
||||
|
||||
```json
|
||||
{
|
||||
"user_id": "@hello:example.org"
|
||||
}
|
||||
```
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following parameters should be set in the URL:
|
||||
|
||||
- `provider` - The ID of the authentication provider, as advertised by the [`GET /_matrix/client/v3/login`](https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3login) API in the `m.login.sso` authentication method.
|
||||
- `external_id` - The user ID from the authentication provider. Usually corresponds to the `sub` claim for OIDC providers, or to the `uid` attestation for SAML2 providers.
|
||||
|
||||
The `external_id` may have characters that are not URL-safe (typically `/`, `:` or `@`), so it is advised to URL-encode those parameters.
|
||||
|
||||
**Errors**
|
||||
|
||||
Returns a `404` HTTP status code if no user was found, with a response body like this:
|
||||
|
||||
```json
|
||||
{
|
||||
"errcode":"M_NOT_FOUND",
|
||||
"error":"User not found"
|
||||
}
|
||||
```
|
||||
|
||||
_Added in Synapse 1.68.0._
|
||||
|
||||
|
||||
### Find a user based on their Third Party ID (ThreePID or 3PID)
|
||||
|
||||
The API is:
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/threepid/$medium/users/$address
|
||||
```
|
||||
|
||||
When a user matched the given address for the given medium, an HTTP code `200` with a response body like the following is returned:
|
||||
|
||||
```json
|
||||
{
|
||||
"user_id": "@hello:example.org"
|
||||
}
|
||||
```
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following parameters should be set in the URL:
|
||||
|
||||
- `medium` - Kind of third-party ID, either `email` or `msisdn`.
|
||||
- `address` - Value of the third-party ID.
|
||||
|
||||
The `address` may have characters that are not URL-safe, so it is advised to URL-encode those parameters.
|
||||
|
||||
**Errors**
|
||||
|
||||
Returns a `404` HTTP status code if no user was found, with a response body like this:
|
||||
|
||||
```json
|
||||
{
|
||||
"errcode":"M_NOT_FOUND",
|
||||
"error":"User not found"
|
||||
}
|
||||
```
|
||||
|
||||
_Added in Synapse 1.72.0._
|
||||
|
||||
@@ -34,45 +34,13 @@ the process of indexing it).
|
||||
## Chain Cover Index
|
||||
|
||||
Synapse computes auth chain differences by pre-computing a "chain cover" index
|
||||
for the auth chain in a room, allowing us to efficiently make reachability queries
|
||||
like "is event `A` in the auth chain of event `B`?". We could do this with an index
|
||||
that tracks all pairs `(A, B)` such that `A` is in the auth chain of `B`. However, this
|
||||
would be prohibitively large, scaling poorly as the room accumulates more state
|
||||
events.
|
||||
for the auth chain in a room, allowing efficient reachability queries like "is
|
||||
event A in the auth chain of event B". This is done by assigning every event a
|
||||
*chain ID* and *sequence number* (e.g. `(5,3)`), and having a map of *links*
|
||||
between chains (e.g. `(5,3) -> (2,4)`) such that A is reachable by B (i.e. `A`
|
||||
is in the auth chain of `B`) if and only if either:
|
||||
|
||||
Instead, we break down the graph into *chains*. A chain is a subset of a DAG
|
||||
with the following property: for any pair of events `E` and `F` in the chain,
|
||||
the chain contains a path `E -> F` or a path `F -> E`. This forces a chain to be
|
||||
linear (without forks), e.g. `E -> F -> G -> ... -> H`. Each event in the chain
|
||||
is given a *sequence number* local to that chain. The oldest event `E` in the
|
||||
chain has sequence number 1. If `E` has a child `F` in the chain, then `F` has
|
||||
sequence number 2. If `E` has a grandchild `G` in the chain, then `G` has
|
||||
sequence number 3; and so on.
|
||||
|
||||
Synapse ensures that each persisted event belongs to exactly one chain, and
|
||||
tracks how the chains are connected to one another. This allows us to
|
||||
efficiently answer reachability queries. Doing so uses less storage than
|
||||
tracking reachability on an event-by-event basis, particularly when we have
|
||||
fewer and longer chains. See
|
||||
|
||||
> Jagadish, H. (1990). [A compression technique to materialize transitive closure](https://doi.org/10.1145/99935.99944).
|
||||
> *ACM Transactions on Database Systems (TODS)*, 15*(4)*, 558-598.
|
||||
|
||||
for the original idea or
|
||||
|
||||
> Y. Chen, Y. Chen, [An efficient algorithm for answering graph
|
||||
> reachability queries](https://doi.org/10.1109/ICDE.2008.4497498),
|
||||
> in: 2008 IEEE 24th International Conference on Data Engineering, April 2008,
|
||||
> pp. 893–902. (PDF available via [Google Scholar](https://scholar.google.com/scholar?q=Y.%20Chen,%20Y.%20Chen,%20An%20efficient%20algorithm%20for%20answering%20graph%20reachability%20queries,%20in:%202008%20IEEE%2024th%20International%20Conference%20on%20Data%20Engineering,%20April%202008,%20pp.%20893902.).)
|
||||
|
||||
for a more modern take.
|
||||
|
||||
In practical terms, the chain cover assigns every event a
|
||||
*chain ID* and *sequence number* (e.g. `(5,3)`), and maintains a map of *links*
|
||||
between events in chains (e.g. `(5,3) -> (2,4)`) such that `A` is reachable by `B`
|
||||
(i.e. `A` is in the auth chain of `B`) if and only if either:
|
||||
|
||||
1. `A` and `B` have the same chain ID and `A`'s sequence number is less than `B`'s
|
||||
1. A and B have the same chain ID and `A`'s sequence number is less than `B`'s
|
||||
sequence number; or
|
||||
2. there is a link `L` between `B`'s chain ID and `A`'s chain ID such that
|
||||
`L.start_seq_no` <= `B.seq_no` and `A.seq_no` <= `L.end_seq_no`.
|
||||
@@ -81,9 +49,8 @@ There are actually two potential implementations, one where we store links from
|
||||
each chain to every other reachable chain (the transitive closure of the links
|
||||
graph), and one where we remove redundant links (the transitive reduction of the
|
||||
links graph) e.g. if we have chains `C3 -> C2 -> C1` then the link `C3 -> C1`
|
||||
would not be stored. Synapse uses the former implementation so that it doesn't
|
||||
need to recurse to test reachability between chains. This trades-off extra storage
|
||||
in order to save CPU cycles and DB queries.
|
||||
would not be stored. Synapse uses the former implementations so that it doesn't
|
||||
need to recurse to test reachability between chains.
|
||||
|
||||
### Example
|
||||
|
||||
|
||||
@@ -70,61 +70,82 @@ on save as they take a while and can be very resource intensive.
|
||||
- Avoid wildcard imports (`from synapse.types import *`) and
|
||||
relative imports (`from .types import UserID`).
|
||||
|
||||
## Configuration code and documentation format
|
||||
## Configuration file format
|
||||
|
||||
When adding a configuration option to the code, if several settings are grouped into a single dict, ensure that your code
|
||||
correctly handles the top-level option being set to `None` (as it will be if no sub-options are enabled).
|
||||
|
||||
The [configuration manual](usage/configuration/config_documentation.md) acts as a
|
||||
The [sample configuration file](./sample_config.yaml) acts as a
|
||||
reference to Synapse's configuration options for server administrators.
|
||||
Remember that many readers will be unfamiliar with YAML and server
|
||||
administration in general, so it is important that when you add
|
||||
a configuration option the documentation be as easy to understand as possible, which
|
||||
includes following a consistent format.
|
||||
administration in general, so that it is important that the file be as
|
||||
easy to understand as possible, which includes following a consistent
|
||||
format.
|
||||
|
||||
Some guidelines follow:
|
||||
|
||||
- Each option should be listed in the config manual with the following format:
|
||||
|
||||
- The name of the option, prefixed by `###`.
|
||||
- Sections should be separated with a heading consisting of a single
|
||||
line prefixed and suffixed with `##`. There should be **two** blank
|
||||
lines before the section header, and **one** after.
|
||||
- Each option should be listed in the file with the following format:
|
||||
- A comment describing the setting. Each line of this comment
|
||||
should be prefixed with a hash (`#`) and a space.
|
||||
|
||||
- A comment which describes the default behaviour (i.e. what
|
||||
The comment should describe the default behaviour (ie, what
|
||||
happens if the setting is omitted), as well as what the effect
|
||||
will be if the setting is changed.
|
||||
- An example setting, using backticks to define the code block
|
||||
|
||||
Often, the comment end with something like "uncomment the
|
||||
following to <do action>".
|
||||
|
||||
- A line consisting of only `#`.
|
||||
- A commented-out example setting, prefixed with only `#`.
|
||||
|
||||
For boolean (on/off) options, convention is that this example
|
||||
should be the *opposite* to the default. For other options, the example should give
|
||||
some non-default value which is likely to be useful to the reader.
|
||||
should be the *opposite* to the default (so the comment will end
|
||||
with "Uncomment the following to enable [or disable]
|
||||
<feature>." For other options, the example should give some
|
||||
non-default value which is likely to be useful to the reader.
|
||||
|
||||
- There should be a horizontal rule between each option, which can be achieved by adding `---` before and
|
||||
after the option.
|
||||
- `true` and `false` are spelt thus (as opposed to `True`, etc.)
|
||||
- There should be a blank line between each option.
|
||||
- Where several settings are grouped into a single dict, *avoid* the
|
||||
convention where the whole block is commented out, resulting in
|
||||
comment lines starting `# #`, as this is hard to read and confusing
|
||||
to edit. Instead, leave the top-level config option uncommented, and
|
||||
follow the conventions above for sub-options. Ensure that your code
|
||||
correctly handles the top-level option being set to `None` (as it
|
||||
will be if no sub-options are enabled).
|
||||
- Lines should be wrapped at 80 characters.
|
||||
- Use two-space indents.
|
||||
- `true` and `false` are spelt thus (as opposed to `True`, etc.)
|
||||
- Use single quotes (`'`) rather than double-quotes (`"`) or backticks
|
||||
(`` ` ``) to refer to configuration options.
|
||||
|
||||
Example:
|
||||
|
||||
---
|
||||
### `modules`
|
||||
|
||||
Use the `module` sub-option to add a module under `modules` to extend functionality.
|
||||
The `module` setting then has a sub-option, `config`, which can be used to define some configuration
|
||||
for the `module`.
|
||||
|
||||
Defaults to none.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
modules:
|
||||
- module: my_super_module.MySuperClass
|
||||
config:
|
||||
do_thing: true
|
||||
- module: my_other_super_module.SomeClass
|
||||
config: {}
|
||||
## Frobnication ##
|
||||
|
||||
# The frobnicator will ensure that all requests are fully frobnicated.
|
||||
# To enable it, uncomment the following.
|
||||
#
|
||||
#frobnicator_enabled: true
|
||||
|
||||
# By default, the frobnicator will frobnicate with the default frobber.
|
||||
# The following will make it use an alternative frobber.
|
||||
#
|
||||
#frobincator_frobber: special_frobber
|
||||
|
||||
# Settings for the frobber
|
||||
#
|
||||
frobber:
|
||||
# frobbing speed. Defaults to 1.
|
||||
#
|
||||
#speed: 10
|
||||
|
||||
# frobbing distance. Defaults to 1000.
|
||||
#
|
||||
#distance: 100
|
||||
```
|
||||
---
|
||||
|
||||
Note that the sample configuration is generated from the synapse code
|
||||
and is maintained by a script, `scripts-dev/generate_sample_config.sh`.
|
||||
Making sure that the output from this script matches the desired format
|
||||
is left as an exercise for the reader!
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
Deprecation Policy for Platform Dependencies
|
||||
============================================
|
||||
|
||||
Synapse has a number of platform dependencies, including Python, Rust,
|
||||
PostgreSQL and SQLite. This document outlines the policy towards which versions
|
||||
we support, and when we drop support for versions in the future.
|
||||
Synapse has a number of platform dependencies, including Python and PostgreSQL.
|
||||
This document outlines the policy towards which versions we support, and when we
|
||||
drop support for versions in the future.
|
||||
|
||||
|
||||
Policy
|
||||
@@ -17,14 +17,6 @@ Details on the upstream support life cycles for Python and PostgreSQL are
|
||||
documented at [https://endoflife.date/python](https://endoflife.date/python) and
|
||||
[https://endoflife.date/postgresql](https://endoflife.date/postgresql).
|
||||
|
||||
A Rust compiler is required to build Synapse from source. For any given release
|
||||
the minimum required version may be bumped up to a recent Rust version, and so
|
||||
people building from source should ensure they can fetch recent versions of Rust
|
||||
(e.g. by using [rustup](https://rustup.rs/)).
|
||||
|
||||
The oldest supported version of SQLite is the version
|
||||
[provided](https://packages.debian.org/buster/libsqlite3-0) by
|
||||
[Debian oldstable](https://wiki.debian.org/DebianOldStable).
|
||||
|
||||
Context
|
||||
-------
|
||||
@@ -39,15 +31,3 @@ long process.
|
||||
By following the upstream support life cycles Synapse can ensure that its
|
||||
dependencies continue to get security patches, while not requiring system admins
|
||||
to constantly update their platform dependencies to the latest versions.
|
||||
|
||||
For Rust, the situation is a bit different given that a) the Rust foundation
|
||||
does not generally support older Rust versions, and b) the library ecosystem
|
||||
generally bump their minimum support Rust versions frequently. In general, the
|
||||
Synapse team will try to avoid updating the dependency on Rust to the absolute
|
||||
latest version, but introducing a formal policy is hard given the constraints of
|
||||
the ecosystem.
|
||||
|
||||
On a similar note, SQLite does not generally have a concept of "supported
|
||||
release"; bugfixes are published for the latest minor release only. We chose to
|
||||
track Debian's oldstable as this is relatively conservative, predictably updated
|
||||
and is consistent with the `.deb` packages released by Matrix.org.
|
||||
@@ -28,9 +28,6 @@ The source code of Synapse is hosted on GitHub. You will also need [a recent ver
|
||||
|
||||
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
|
||||
|
||||
A recent version of the Rust compiler is needed to build the native modules. The
|
||||
easiest way of installing the latest version is to use [rustup](https://rustup.rs/).
|
||||
|
||||
|
||||
# 3. Get the source.
|
||||
|
||||
@@ -65,8 +62,6 @@ pipx install poetry
|
||||
but see poetry's [installation instructions](https://python-poetry.org/docs/#installation)
|
||||
for other installation methods.
|
||||
|
||||
Synapse requires Poetry version 1.2.0 or later.
|
||||
|
||||
Next, open a terminal and install dependencies as follows:
|
||||
|
||||
```sh
|
||||
@@ -117,11 +112,6 @@ Some documentation also exists in [Synapse's GitHub
|
||||
Wiki](https://github.com/matrix-org/synapse/wiki), although this is primarily
|
||||
contributed to by community authors.
|
||||
|
||||
When changes are made to any Rust code then you must call either `poetry install`
|
||||
or `maturin develop` (if installed) to rebuild the Rust code. Using [`maturin`](https://github.com/PyO3/maturin)
|
||||
is quicker than `poetry install`, so is recommended when making frequent
|
||||
changes to the Rust code.
|
||||
|
||||
|
||||
# 8. Test, test, test!
|
||||
<a name="test-test-test"></a>
|
||||
@@ -167,12 +157,6 @@ was broken. They are slower than the linters but will typically catch more error
|
||||
poetry run trial tests
|
||||
```
|
||||
|
||||
You can run unit tests in parallel by specifying `-jX` argument to `trial` where `X` is the number of parallel runners you want. To use 4 cpu cores, you would run them like:
|
||||
|
||||
```sh
|
||||
poetry run trial -j4 tests
|
||||
```
|
||||
|
||||
If you wish to only run *some* unit tests, you may specify
|
||||
another module instead of `tests` - or a test class or a method:
|
||||
|
||||
@@ -209,7 +193,7 @@ The database file can then be inspected with:
|
||||
sqlite3 _trial_temp/test.db
|
||||
```
|
||||
|
||||
Note that the database file is cleared at the beginning of each test run. Thus it
|
||||
Note that the database file is cleared at the beginning of each test run. Thus it
|
||||
will always only contain the data generated by the *last run test*. Though generally
|
||||
when debugging, one is only running a single test anyway.
|
||||
|
||||
@@ -324,22 +308,12 @@ The above will run a monolithic (single-process) Synapse with SQLite as the data
|
||||
|
||||
- Passing `POSTGRES=1` as an environment variable to use the Postgres database instead.
|
||||
- Passing `WORKERS=1` as an environment variable to use a workerised setup instead. This option implies the use of Postgres.
|
||||
- If setting `WORKERS=1`, optionally set `WORKER_TYPES=` to declare which worker
|
||||
types you wish to test. A simple comma-delimited string containing the worker types
|
||||
defined from the `WORKERS_CONFIG` template in
|
||||
[here](https://github.com/matrix-org/synapse/blob/develop/docker/configure_workers_and_start.py#L54).
|
||||
A safe example would be `WORKER_TYPES="federation_inbound, federation_sender, synchrotron"`.
|
||||
See the [worker documentation](../workers.md) for additional information on workers.
|
||||
|
||||
To increase the log level for the tests, set `SYNAPSE_TEST_LOG_LEVEL`, e.g:
|
||||
```sh
|
||||
SYNAPSE_TEST_LOG_LEVEL=DEBUG COMPLEMENT_DIR=../complement ./scripts-dev/complement.sh -run TestImportHistoricalMessages
|
||||
```
|
||||
|
||||
### Prettier formatting with `gotestfmt`
|
||||
|
||||
If you want to format the output of the tests the same way as it looks in CI,
|
||||
install [gotestfmt](https://github.com/GoTestTools/gotestfmt).
|
||||
install [gotestfmt](https://github.com/haveyoudebuggedit/gotestfmt).
|
||||
|
||||
You can then use this incantation to format the tests appropriately:
|
||||
|
||||
@@ -373,7 +347,7 @@ To prepare a Pull Request, please:
|
||||
3. `git push` your commit to your fork of Synapse;
|
||||
4. on GitHub, [create the Pull Request](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request);
|
||||
5. add a [changelog entry](#changelog) and push it to your Pull Request;
|
||||
6. that's it for now, a non-draft pull request will automatically request review from the team;
|
||||
6. for most contributors, that's all - however, if you are a member of the organization `matrix-org`, on GitHub, please request a review from `matrix.org / Synapse Core`.
|
||||
7. if you need to update your PR, please avoid rebasing and just add new commits to your branch.
|
||||
|
||||
|
||||
@@ -396,7 +370,7 @@ This file will become part of our [changelog](
|
||||
https://github.com/matrix-org/synapse/blob/master/CHANGES.md) at the next
|
||||
release, so the content of the file should be a short description of your
|
||||
change in the same style as the rest of the changelog. The file can contain Markdown
|
||||
formatting, and must end with a full stop (.) or an exclamation mark (!) for
|
||||
formatting, and should end with a full stop (.) or an exclamation mark (!) for
|
||||
consistency.
|
||||
|
||||
Adding credits to the changelog is encouraged, we value your
|
||||
@@ -549,13 +523,10 @@ From this point, you should:
|
||||
1. Look at the results of the CI pipeline.
|
||||
- If there is any error, fix the error.
|
||||
2. If a developer has requested changes, make these changes and let us know if it is ready for a developer to review again.
|
||||
- A pull request is a conversation, if you disagree with the suggestions, please respond and discuss it.
|
||||
3. Create a new commit with the changes.
|
||||
- Please do NOT overwrite the history. New commits make the reviewer's life easier.
|
||||
- Push this commits to your Pull Request.
|
||||
4. Back to 1.
|
||||
5. Once the pull request is ready for review again please re-request review from whichever developer did your initial
|
||||
review (or leave a comment in the pull request that you believe all required changes have been done).
|
||||
|
||||
Once both the CI and the developers are happy, the patch will be merged into Synapse and released shortly!
|
||||
|
||||
|
||||
@@ -191,28 +191,3 @@ There are three separate aspects to this:
|
||||
flavour will be accepted by SQLite 3.22, but will give a column whose
|
||||
default value is the **string** `"FALSE"` - which, when cast back to a boolean
|
||||
in Python, evaluates to `True`.
|
||||
|
||||
|
||||
## `event_id` global uniqueness
|
||||
|
||||
`event_id`'s can be considered globally unique although there has been a lot of
|
||||
debate on this topic in places like
|
||||
[MSC2779](https://github.com/matrix-org/matrix-spec-proposals/issues/2779) and
|
||||
[MSC2848](https://github.com/matrix-org/matrix-spec-proposals/pull/2848) which
|
||||
has no resolution yet (as of 2022-09-01). There are several places in Synapse
|
||||
and even in the Matrix APIs like [`GET
|
||||
/_matrix/federation/v1/event/{eventId}`](https://spec.matrix.org/v1.1/server-server-api/#get_matrixfederationv1eventeventid)
|
||||
where we assume that event IDs are globally unique.
|
||||
|
||||
When scoping `event_id` in a database schema, it is often nice to accompany it
|
||||
with `room_id` (`PRIMARY KEY (room_id, event_id)` and a `FOREIGN KEY(room_id)
|
||||
REFERENCES rooms(room_id)`) which makes flexible lookups easy. For example it
|
||||
makes it very easy to find and clean up everything in a room when it needs to be
|
||||
purged (no need to use sub-`select` query or join from the `events` table).
|
||||
|
||||
A note on collisions: In room versions `1` and `2` it's possible to end up with
|
||||
two events with the same `event_id` (in the same or different rooms). After room
|
||||
version `3`, that can only happen with a hash collision, which we basically hope
|
||||
will never happen (SHA256 has a massive big key space).
|
||||
|
||||
|
||||
|
||||
@@ -126,23 +126,6 @@ context of poetry's venv, without having to run `poetry shell` beforehand.
|
||||
poetry install --extras all --remove-untracked
|
||||
```
|
||||
|
||||
## ...delete everything and start over from scratch?
|
||||
|
||||
```shell
|
||||
# Stop the current virtualenv if active
|
||||
$ deactivate
|
||||
|
||||
# Remove all of the files from the current environment.
|
||||
# Don't worry, even though it says "all", this will only
|
||||
# remove the Poetry virtualenvs for the current project.
|
||||
$ poetry env remove --all
|
||||
|
||||
# Reactivate Poetry shell to create the virtualenv again
|
||||
$ poetry shell
|
||||
# Install everything again
|
||||
$ poetry install --extras all
|
||||
```
|
||||
|
||||
## ...run a command in the `poetry` virtualenv?
|
||||
|
||||
Use `poetry run cmd args` when you need the python virtualenv context.
|
||||
@@ -254,35 +237,3 @@ poetry run pip install build && poetry run python -m build
|
||||
because [`build`](https://github.com/pypa/build) is a standardish tool which
|
||||
doesn't require poetry. (It's what we use in CI too). However, you could try
|
||||
`poetry build` too.
|
||||
|
||||
|
||||
# Troubleshooting
|
||||
|
||||
## Check the version of poetry with `poetry --version`.
|
||||
|
||||
The minimum version of poetry supported by Synapse is 1.2.
|
||||
|
||||
It can also be useful to check the version of `poetry-core` in use. If you've
|
||||
installed `poetry` with `pipx`, try `pipx runpip poetry list | grep
|
||||
poetry-core`.
|
||||
|
||||
## Clear caches: `poetry cache clear --all pypi`.
|
||||
|
||||
Poetry caches a bunch of information about packages that isn't readily available
|
||||
from PyPI. (This is what makes poetry seem slow when doing the first
|
||||
`poetry install`.) Try `poetry cache list` and `poetry cache clear --all
|
||||
<name of cache>` to see if that fixes things.
|
||||
|
||||
## Remove outdated egg-info
|
||||
|
||||
Delete the `matrix_synapse.egg-info/` directory from the root of your Synapse
|
||||
install.
|
||||
|
||||
This stores some cached information about dependencies and often conflicts with
|
||||
letting Poetry do the right thing.
|
||||
|
||||
|
||||
|
||||
## Try `--verbose` or `--dry-run` arguments.
|
||||
|
||||
Sometimes useful to see what poetry's internal logic is.
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
Some notes on how we do reviews
|
||||
===============================
|
||||
|
||||
The Synapse team works off a shared review queue -- any new pull requests for
|
||||
Synapse (or related projects) has a review requested from the entire team. Team
|
||||
members should process this queue using the following rules:
|
||||
|
||||
* Any high urgency pull requests (e.g. fixes for broken continuous integration
|
||||
or fixes for release blockers);
|
||||
* Follow-up reviews for pull requests which have previously received reviews;
|
||||
* Any remaining pull requests.
|
||||
|
||||
For the latter two categories above, older pull requests should be prioritised.
|
||||
|
||||
It is explicit that there is no priority given to pull requests from the team
|
||||
(vs from the community). If a pull request requires a quick turn around, please
|
||||
explicitly communicate this via [#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org)
|
||||
or as a comment on the pull request.
|
||||
|
||||
Once an initial review has been completed and the author has made additional changes,
|
||||
follow-up reviews should go back to the same reviewer. This helps build a shared
|
||||
context and conversation between author and reviewer.
|
||||
|
||||
As a team we aim to keep the number of inflight pull requests to a minimum to ensure
|
||||
that ongoing work is finished before starting new work.
|
||||
|
||||
Performing a review
|
||||
-------------------
|
||||
|
||||
To communicate to the rest of the team the status of each pull request, team
|
||||
members should do the following:
|
||||
|
||||
* Assign themselves to the pull request (they should be left assigned to the
|
||||
pull request until it is merged, closed, or are no longer the reviewer);
|
||||
* Review the pull request by leaving comments, questions, and suggestions;
|
||||
* Mark the pull request appropriately (as needing changes or accepted).
|
||||
|
||||
If you are unsure about a particular part of the pull request (or are not confident
|
||||
in your understanding of part of the code) then ask questions or request review
|
||||
from the team again. When requesting review from the team be sure to leave a comment
|
||||
with the rationale on why you're putting it back in the queue.
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user