1
0

Compare commits

..

4 Commits

Author SHA1 Message Date
David Robertson
4d343db081 Get rid of my home dir, whoops 2021-11-16 16:37:43 +00:00
David Robertson
a1367dcf8c Require networkx 2021-11-16 16:34:33 +00:00
David Robertson
9e361c8550 Changelog 2021-11-16 13:52:59 +00:00
David Robertson
51fec1a534 Commit hacky script to visualise store inheritance
Use e.g. with `scripts-dev/storage_inheritance.py DataStore --show`.
2021-11-16 13:51:50 +00:00
922 changed files with 52818 additions and 78762 deletions

View File

@@ -1,93 +0,0 @@
{{- /*gotype: github.com/haveyoudebuggedit/gotestfmt/parser.Package*/ -}}
{{- /*
This template contains the format for an individual package. GitHub actions does not currently support nested groups so
we are creating a stylized header for each package.
This template is based on https://github.com/haveyoudebuggedit/gotestfmt/blob/f179b0e462a9dcf7101515d87eec4e4d7e58b92a/.gotestfmt/github/package.gotpl
which is under the Unlicense licence.
*/ -}}
{{- $settings := .Settings -}}
{{- if and (or (not $settings.HideSuccessfulPackages) (ne .Result "PASS")) (or (not $settings.HideEmptyPackages) (ne .Result "SKIP") (ne (len .TestCases) 0)) -}}
{{- if eq .Result "PASS" -}}
{{ "\033" }}[0;32m
{{- else if eq .Result "SKIP" -}}
{{ "\033" }}[0;33m
{{- else -}}
{{ "\033" }}[0;31m
{{- end -}}
📦 {{ .Name }}{{- "\033" }}[0m
{{- with .Coverage -}}
{{- "\033" -}}[0;37m ({{ . }}% coverage){{- "\033" -}}[0m
{{- end -}}
{{- "\n" -}}
{{- with .Reason -}}
{{- " " -}}🛑 {{ . -}}{{- "\n" -}}
{{- end -}}
{{- with .Output -}}
{{- . -}}{{- "\n" -}}
{{- end -}}
{{- with .TestCases -}}
{{- /* Failing tests are first */ -}}
{{- range . -}}
{{- if and (ne .Result "PASS") (ne .Result "SKIP") -}}
::group::{{ "\033" }}[0;31m❌{{ " " }}{{- .Name -}}
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
{{- with .Coverage -}}
, coverage: {{ . }}%
{{- end -}})
{{- "\033" -}}[0m
{{- "\n" -}}
{{- with .Output -}}
{{- formatTestOutput . $settings -}}
{{- "\n" -}}
{{- end -}}
::endgroup::{{- "\n" -}}
{{- end -}}
{{- end -}}
{{- /* Then skipped tests are second */ -}}
{{- range . -}}
{{- if eq .Result "SKIP" -}}
::group::{{ "\033" }}[0;33m🚧{{ " " }}{{- .Name -}}
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
{{- with .Coverage -}}
, coverage: {{ . }}%
{{- end -}})
{{- "\033" -}}[0m
{{- "\n" -}}
{{- with .Output -}}
{{- formatTestOutput . $settings -}}
{{- "\n" -}}
{{- end -}}
::endgroup::{{- "\n" -}}
{{- end -}}
{{- end -}}
{{- /* Then passing tests are last */ -}}
{{- range . -}}
{{- if eq .Result "PASS" -}}
::group::{{ "\033" }}[0;32m✅{{ " " }}{{- .Name -}}
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
{{- with .Coverage -}}
, coverage: {{ . }}%
{{- end -}})
{{- "\033" -}}[0m
{{- "\n" -}}
{{- with .Output -}}
{{- formatTestOutput . $settings -}}
{{- "\n" -}}
{{- end -}}
::endgroup::{{- "\n" -}}
{{- end -}}
{{- end -}}
{{- end -}}
{{- "\n" -}}
{{- end -}}

View File

@@ -1,4 +0,0 @@
---
title: CI run against latest deps is failing
---
See https://github.com/{{env.GITHUB_REPOSITORY}}/actions/runs/{{env.GITHUB_RUN_ID}}

8
.ci/patch_for_twisted_trunk.sh Executable file
View File

@@ -0,0 +1,8 @@
#!/bin/sh
# replaces the dependency on Twisted in `python_dependencies` with trunk.
set -e
cd "$(dirname "$0")"/..
sed -i -e 's#"Twisted.*"#"Twisted @ git+https://github.com/twisted/twisted"#' synapse/python_dependencies.py

View File

@@ -1,25 +0,0 @@
#!/bin/bash
#
# Fetches a version of complement which best matches the current build.
#
# The tarball is unpacked into `./complement`.
set -e
mkdir -p complement
# Pick an appropriate version of complement. Depending on whether this is a PR or release,
# etc. we need to use different fallbacks:
#
# 1. First check if there's a similarly named branch (GITHUB_HEAD_REF
# for pull requests, otherwise GITHUB_REF).
# 2. Attempt to use the base branch, e.g. when merging into release-vX.Y
# (GITHUB_BASE_REF for pull requests).
# 3. Use the default complement branch ("HEAD").
for BRANCH_NAME in "$GITHUB_HEAD_REF" "$GITHUB_BASE_REF" "${GITHUB_REF#refs/heads/}" "HEAD"; do
# Skip empty branch names and merge commits.
if [[ -z "$BRANCH_NAME" || $BRANCH_NAME =~ ^refs/pull/.* ]]; then
continue
fi
(wget -O - "https://github.com/matrix-org/complement/archive/$BRANCH_NAME.tar.gz" | tar -xz --strip-components=1 -C complement) && break
done

View File

@@ -1,36 +0,0 @@
#!/bin/sh
#
# Common commands to set up Complement's prerequisites in a GitHub Actions CI run.
#
# Must be called after Synapse has been checked out to `synapse/`.
#
set -eu
alias block='{ set +x; } 2>/dev/null; func() { echo "::group::$*"; set -x; }; func'
alias endblock='{ set +x; } 2>/dev/null; func() { echo "::endgroup::"; set -x; }; func'
block Set Go Version
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
# Add the Go path to the PATH: We need this so we can call gotestfmt
echo "~/go/bin" >> $GITHUB_PATH
endblock
block Install Complement Dependencies
sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev
go get -v github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest
endblock
block Install custom gotestfmt template
mkdir .gotestfmt/github -p
cp synapse/.ci/complement_package.gotpl .gotestfmt/github/package.gotpl
endblock
block Check out Complement
# Attempt to check out the same branch of Complement as the PR. If it
# doesn't exist, fallback to HEAD.
synapse/.ci/scripts/checkout_complement.sh
endblock

View File

@@ -2,24 +2,29 @@
# Test for the export-data admin command against sqlite and postgres
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
# Expects `poetry` to be available on the `PATH`.
set -xe
cd "$(dirname "$0")/../.."
echo "--- Install dependencies"
# Install dependencies for this test.
pip install psycopg2
# Install Synapse itself. This won't update any libraries.
pip install -e .
echo "--- Generate the signing key"
# Generate the server's signing key.
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
echo "--- Prepare test database"
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
# Run the export-data command on the sqlite test database
poetry run python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
--output-directory /tmp/export_data
# Test that the output directory exists and contains the rooms directory
@@ -32,14 +37,14 @@ else
fi
# Create the PostgreSQL database.
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
# Port the SQLite databse to postgres so we can check command works against postgres
echo "+++ Port SQLite3 databse to postgres"
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
# Run the export-data command on postgres database
poetry run python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
--output-directory /tmp/export_data2
# Test that the output directory exists and contains the rooms directory

View File

@@ -1,83 +1,16 @@
#!/usr/bin/env bash
# this script is run by GitHub Actions in a plain `focal` container; it
# - installs the minimal system requirements, and poetry;
# - patches the project definition file to refer to old versions only;
# - creates a venv with these old versions using poetry; and finally
# - invokes `trial` to run the tests with old deps.
# Prevent tzdata from asking for user input
export DEBIAN_FRONTEND=noninteractive
# this script is run by GitHub Actions in a plain `bionic` container; it installs the
# minimal requirements for tox and hands over to the py3-old tox environment.
set -ex
apt-get update
apt-get install -y \
python3 python3-dev python3-pip python3-venv pipx \
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
apt-get install -y python3 python3-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox
export LANG="C.UTF-8"
# Prevent virtualenv from auto-updating pip to an incompatible version
export VIRTUALENV_NO_DOWNLOAD=1
# TODO: in the future, we could use an implementation of
# https://github.com/python-poetry/poetry/issues/3527
# https://github.com/pypa/pip/issues/8085
# to select the lowest possible versions, rather than resorting to this sed script.
# Patch the project definitions in-place:
# - Replace all lower and tilde bounds with exact bounds
# - Replace all caret bounds---but not the one that defines the supported Python version!
# - Delete all lines referring to psycopg2 --- so no testing of postgres support.
# - Use pyopenssl 17.0, which is the oldest version that works with
# a `cryptography` compiled against OpenSSL 1.1.
# - Omit systemd: we're not logging to journal here.
# TODO: also replace caret bounds, see https://python-poetry.org/docs/dependency-specification/#version-constraints
# We don't use these yet, but IIRC they are the default bound used when you `poetry add`.
# The sed expression 's/\^/==/g' ought to do the trick. But it would also change
# `python = "^3.7"` to `python = "==3.7", which would mean we fail because olddeps
# runs on 3.8 (#12343).
sed -i \
-e "s/[~>]=/==/g" \
-e '/^python = "^/!s/\^/==/g' \
-e "/psycopg2/d" \
-e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
-e '/systemd/d' \
pyproject.toml
# Use poetry to do the installation. This ensures that the versions are all mutually
# compatible (as far the package metadata declares, anyway); pip's package resolver
# is more lax.
#
# Rather than `poetry install --no-dev`, we drop all dev dependencies from the
# toml file. This means we don't have to ensure compatibility between old deps and
# dev tools.
pip install --user toml
REMOVE_DEV_DEPENDENCIES="
import toml
with open('pyproject.toml', 'r') as f:
data = toml.loads(f.read())
del data['tool']['poetry']['dev-dependencies']
with open('pyproject.toml', 'w') as f:
toml.dump(data, f)
"
python3 -c "$REMOVE_DEV_DEPENDENCIES"
pipx install poetry==1.1.12
~/.local/bin/poetry lock
echo "::group::Patched pyproject.toml"
cat pyproject.toml
echo "::endgroup::"
echo "::group::Lockfile after patch"
cat poetry.lock
echo "::endgroup::"
~/.local/bin/poetry install -E "all test"
~/.local/bin/poetry run trial --jobs=2 tests
exec tox -e py3-old,combine

View File

@@ -1,37 +1,41 @@
#!/usr/bin/env bash
#
# Test script for 'synapse_port_db'.
# - configures synapse and a postgres server.
# - sets up synapse and deps
# - runs the port script on a prepopulated test sqlite db
# - also runs it against an new sqlite db
#
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
# Expects `poetry` to be available on the `PATH`.
set -xe
cd "$(dirname "$0")/../.."
echo "--- Install dependencies"
# Install dependencies for this test.
pip install psycopg2 coverage coverage-enable-subprocess
# Install Synapse itself. This won't update any libraries.
pip install -e .
echo "--- Generate the signing key"
# Generate the server's signing key.
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
echo "--- Prepare test database"
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
# Create the PostgreSQL database.
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
echo "+++ Run synapse_port_db against test database"
# TODO: this invocation of synapse_port_db (and others below) used to be prepended with `coverage run`,
# but coverage seems unable to find the entrypoints installed by `pip install -e .`.
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
# We should be able to run twice against the same database.
echo "+++ Run synapse_port_db a second time"
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
#####
@@ -42,12 +46,12 @@ echo "--- Prepare empty SQLite database"
# we do this by deleting the sqlite db, and then doing the same again.
rm .ci/test_db.db
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
# re-create the PostgreSQL database.
poetry run .ci/scripts/postgres_exec.py \
.ci/scripts/postgres_exec.py \
"DROP DATABASE synapse" \
"CREATE DATABASE synapse"
echo "+++ Run synapse_port_db against empty database"
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml

View File

@@ -3,9 +3,11 @@
# things to include
!docker
!scripts
!synapse
!MANIFEST.in
!README.rst
!pyproject.toml
!poetry.lock
!setup.py
!synctl
**/__pycache__

11
.flake8
View File

@@ -1,11 +0,0 @@
# TODO: incorporate this into pyproject.toml if flake8 supports it in the future.
# See https://github.com/PyCQA/flake8/issues/234
[flake8]
# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes
# for error codes. The ones we ignore are:
# W503: line break before binary operator
# W504: line break after binary operator
# E203: whitespace before ':' (which is contrary to pep8?)
# E731: do not assign a lambda expression, use a def
# E501: Line too long (black enforces this for us)
ignore=W503,W504,E203,E731,E501

View File

@@ -6,6 +6,3 @@ aff1eb7c671b0a3813407321d2702ec46c71fa56
# Update black to 20.8b1 (#9381).
0a00b7ff14890987f09112a2ae696c61001e6cf1
# Convert tests/rest/admin/test_room.py to unix file endings (#7953).
c4268e3da64f1abb5b31deaeb5769adb6510c0a7

72
.github/ISSUE_TEMPLATE/BUG_REPORT.md vendored Normal file
View File

@@ -0,0 +1,72 @@
---
name: Bug report
about: Create a report to help us improve
---
<!--
**THIS IS NOT A SUPPORT CHANNEL!**
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**,
please ask in **#synapse:matrix.org** (using a matrix.org account if necessary)
If you want to report a security issue, please see https://matrix.org/security-disclosure-policy/
This is a bug report template. By following the instructions below and
filling out the sections with your information, you will help the us to get all
the necessary data to fix your issue.
You can also preview your report before submitting it. You may remove sections
that aren't relevant to your particular case.
Text between <!-- and --> marks will be invisible in the report.
-->
### Description
<!-- Describe here the problem that you are experiencing -->
### Steps to reproduce
- list the steps
- that reproduce the bug
- using hyphens as bullet points
<!--
Describe how what happens differs from what you expected.
If you can identify any relevant log snippets from _homeserver.log_, please include
those (please be careful to remove any personal or private data). Please surround them with
``` (three backticks, on a line on their own), so that they are formatted legibly.
-->
### Version information
<!-- IMPORTANT: please answer the following questions, to help us narrow down the problem -->
<!-- Was this issue identified on matrix.org or another homeserver? -->
- **Homeserver**:
If not matrix.org:
<!--
What version of Synapse is running?
You can find the Synapse version with this command:
$ curl http://localhost:8008/_synapse/admin/v1/server_version
(You may need to replace `localhost:8008` if Synapse is not configured to
listen on that port.)
-->
- **Version**:
- **Install method**:
<!-- examples: package manager/git clone/pip -->
- **Platform**:
<!--
Tell us about the environment in which your homeserver is operating
distro, hardware, if it's running in a vm/container, etc.
-->

View File

@@ -1,103 +0,0 @@
name: Bug report
description: Create a report to help us improve
body:
- type: markdown
attributes:
value: |
**THIS IS NOT A SUPPORT CHANNEL!**
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**, please ask in **[#synapse:matrix.org](https://matrix.to/#/#synapse:matrix.org)** (using a matrix.org account if necessary).
If you want to report a security issue, please see https://matrix.org/security-disclosure-policy/
This is a bug report form. By following the instructions below and completing the sections with your information, you will help the us to get all the necessary data to fix your issue.
You can also preview your report before submitting it.
- type: textarea
id: description
attributes:
label: Description
description: Describe the problem that you are experiencing
validations:
required: true
- type: textarea
id: reproduction_steps
attributes:
label: Steps to reproduce
description: |
Describe the series of steps that leads you to the problem.
Describe how what happens differs from what you expected.
placeholder: Tell us what you see!
value: |
- list the steps
- that reproduce the bug
- using hyphens as bullet points
validations:
required: true
- type: markdown
attributes:
value: |
---
**IMPORTANT**: please answer the following questions, to help us narrow down the problem.
- type: input
id: homeserver
attributes:
label: Homeserver
description: Which homeserver was this issue identified on? (matrix.org, another homeserver, etc)
validations:
required: true
- type: input
id: version
attributes:
label: Synapse Version
description: |
What version of Synapse is this homeserver running?
You can find the Synapse version by visiting https://yourserver.example.com/_matrix/federation/v1/version
or with this command:
```
$ curl http://localhost:8008/_synapse/admin/v1/server_version
```
(You may need to replace `localhost:8008` if Synapse is not configured to listen on that port.)
validations:
required: true
- type: dropdown
id: install_method
attributes:
label: Installation Method
options:
- Docker (matrixdotorg/synapse)
- Debian packages from packages.matrix.org
- pip (from PyPI)
- Other (please mention below)
- type: textarea
id: platform
attributes:
label: Platform
description: |
Tell us about the environment in which your homeserver is operating...
e.g. distro, hardware, if it's running in a vm/container, etc.
validations:
required: true
- type: textarea
id: logs
attributes:
label: Relevant log output
description: |
Please copy and paste any relevant log output, ideally at INFO or DEBUG log level.
This will be automatically formatted into code, so there is no need for backticks.
Please be careful to remove any personal or private data.
**Bug reports are usually very difficult to diagnose without logging.**
render: shell
validations:
required: true
- type: textarea
id: anything_else
attributes:
label: Anything else that would be useful to know?

View File

@@ -8,7 +8,6 @@
- Use markdown where necessary, mostly for `code blocks`.
- End with either a period (.) or an exclamation mark (!).
- Start with a capital letter.
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
* [ ] Pull request includes a [sign off](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#sign-off)
* [ ] [Code style](https://matrix-org.github.io/synapse/latest/code_style.html) is correct
(run the [linters](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))

View File

@@ -5,7 +5,7 @@ name: Build docker images
on:
push:
tags: ["v*"]
branches: [ master, main, develop ]
branches: [ master, main ]
workflow_dispatch:
permissions:
@@ -36,22 +36,37 @@ jobs:
- name: Calculate docker image tag
id: set-tag
uses: docker/metadata-action@master
run: |
case "${GITHUB_REF}" in
refs/heads/master|refs/heads/main)
tag=latest
;;
refs/tags/*)
tag=${GITHUB_REF#refs/tags/}
;;
*)
tag=${GITHUB_SHA}
;;
esac
echo "::set-output name=tag::$tag"
# for release builds, we want to get the amd64 image out asap, so first
# we do an amd64-only build, before following up with a multiarch build.
- name: Build and push amd64
uses: docker/build-push-action@v2
if: "${{ startsWith(github.ref, 'refs/tags/v') }}"
with:
images: matrixdotorg/synapse
flavor: |
latest=false
tags: |
type=raw,value=develop,enable=${{ github.ref == 'refs/heads/develop' }}
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
type=pep440,pattern={{raw}}
push: true
labels: "gitsha1=${{ github.sha }}"
tags: "matrixdotorg/synapse:${{ steps.set-tag.outputs.tag }}"
file: "docker/Dockerfile"
platforms: linux/amd64
- name: Build and push all platforms
uses: docker/build-push-action@v2
with:
push: true
labels: "gitsha1=${{ github.sha }}"
tags: "${{ steps.set-tag.outputs.tags }}"
tags: "matrixdotorg/synapse:${{ steps.set-tag.outputs.tag }}"
file: "docker/Dockerfile"
platforms: linux/amd64,linux/arm64

View File

@@ -22,7 +22,7 @@ jobs:
- name: Setup mdbook
uses: peaceiris/actions-mdbook@4b5ef36b314c2599664ca107bb8c02412548d79d # v1.1.14
with:
mdbook-version: '0.4.17'
mdbook-version: '0.4.9'
- name: Build the documentation
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.

View File

@@ -1,159 +0,0 @@
# People who are freshly `pip install`ing from PyPI will pull in the latest versions of
# dependencies which match the broad requirements. Since most CI runs are against
# the locked poetry environment, run specifically against the latest dependencies to
# know if there's an upcoming breaking change.
#
# As an overview this workflow:
# - checks out develop,
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
# - runs mypy and test suites in that checkout.
#
# Based on the twisted trunk CI job.
name: Latest dependencies
on:
schedule:
- cron: 0 7 * * *
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
mypy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
# The dev dependencies aren't exposed in the wheel metadata (at least with current
# poetry-core versions), so we install with poetry.
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: "3.x"
poetry-version: "1.2.0b1"
extras: "all"
# Dump installed versions for debugging.
- run: poetry run pip list > before.txt
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
# `pip install matrix-synapse[all]` as closely as possible.
- run: poetry update --no-dev
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
- name: Remove warn_unused_ignores from mypy config
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
- run: poetry run mypy
trial:
runs-on: ubuntu-latest
strategy:
matrix:
include:
- database: "sqlite"
- database: "postgres"
postgres-version: "14"
steps:
- uses: actions/checkout@v2
- run: sudo apt-get -qq install xmlsec1
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
if: ${{ matrix.postgres-version }}
run: |
docker run -d -p 5432:5432 \
-e POSTGRES_PASSWORD=postgres \
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
postgres:${{ matrix.postgres-version }}
- uses: actions/setup-python@v2
with:
python-version: "3.x"
- run: pip install .[all,test]
- name: Await PostgreSQL
if: ${{ matrix.postgres-version }}
timeout-minutes: 2
run: until pg_isready -h localhost; do sleep 1; done
- run: python -m twisted.trial --jobs=2 tests
env:
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
SYNAPSE_POSTGRES_HOST: localhost
SYNAPSE_POSTGRES_USER: postgres
SYNAPSE_POSTGRES_PASSWORD: postgres
- name: Dump logs
# Logs are most useful when the command fails, always include them.
if: ${{ always() }}
# Note: Dumps to workflow logs instead of using actions/upload-artifact
# This keeps logs colocated with failing jobs
# It also ignores find's exit code; this is a best effort affair
run: >-
find _trial_temp -name '*.log'
-exec echo "::group::{}" \;
-exec cat {} \;
-exec echo "::endgroup::" \;
|| true
sytest:
runs-on: ubuntu-latest
container:
image: matrixdotorg/sytest-synapse:testing
volumes:
- ${{ github.workspace }}:/src
strategy:
fail-fast: false
matrix:
include:
- sytest-tag: focal
- sytest-tag: focal
postgres: postgres
workers: workers
redis: redis
env:
POSTGRES: ${{ matrix.postgres && 1}}
WORKERS: ${{ matrix.workers && 1 }}
REDIS: ${{ matrix.redis && 1 }}
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
steps:
- uses: actions/checkout@v2
- name: Ensure sytest runs `pip install`
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
run: rm /src/poetry.lock
working-directory: /src
- name: Prepare test blacklist
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
- name: Run SyTest
run: /bootstrap.sh synapse
working-directory: /src
- name: Summarise results.tap
if: ${{ always() }}
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
- name: Upload SyTest logs
uses: actions/upload-artifact@v2
if: ${{ always() }}
with:
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
path: |
/logs/results.tap
/logs/**/*.log*
# TODO: run complement (as with twisted trunk, see #12473).
# open an issue if the build fails, so we know about it.
open-issue:
if: failure()
needs:
# TODO: should mypy be included here? It feels more brittle than the other two.
- mypy
- trial
- sytest
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
update_existing: true
filename: .ci/latest_deps_build_failed_issue_template.md

View File

@@ -7,7 +7,7 @@ on:
# of things breaking (but only build one set of debs)
pull_request:
push:
branches: ["develop", "release-*"]
branches: ["develop"]
# we do the full build on tags.
tags: ["v*"]
@@ -31,7 +31,7 @@ jobs:
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
dists='["debian:sid"]'
if [[ $GITHUB_REF == refs/tags/* ]]; then
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
dists=$(scripts-dev/build_debian_packages --show-dists-json)
fi
echo "::set-output name=distros::$dists"
# map the step outputs to job outputs
@@ -74,7 +74,7 @@ jobs:
# see https://github.com/docker/build-push-action/issues/252
# for the cache magic here
run: |
./src/scripts-dev/build_debian_packages.py \
./src/scripts-dev/build_debian_packages \
--docker-build-arg=--cache-from=type=local,src=/tmp/.buildx-cache \
--docker-build-arg=--cache-to=type=local,mode=max,dest=/tmp/.buildx-cache-new \
--docker-build-arg=--progress=plain \
@@ -91,7 +91,17 @@ jobs:
build-sdist:
name: "Build pypi distribution files"
uses: "matrix-org/backend-meta/.github/workflows/packaging.yml@v1"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- run: pip install wheel
- run: |
python setup.py sdist bdist_wheel
- uses: actions/upload-artifact@v2
with:
name: python-dist
path: dist/*
# if it's a tag, create a release and attach the artifacts to it
attach-assets:
@@ -112,8 +122,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
files: |
Sdist/*
Wheel/*
python-dist/*
debs.tar.xz
# if it's not already published, keep the release as a draft.
draft: true

View File

@@ -10,17 +10,328 @@ concurrency:
cancel-in-progress: true
jobs:
complement:
if: "${{ !failure() && !cancelled() }}"
lint:
runs-on: ubuntu-latest
strategy:
matrix:
toxenv:
- "check-sampleconfig"
- "check_codestyle"
- "check_isort"
- "mypy"
- "packaging"
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- run: pip install tox
- run: tox -e ${{ matrix.toxenv }}
lint-crlf:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Check line endings
run: scripts-dev/check_line_terminators.sh
lint-newsfile:
if: ${{ github.base_ref == 'develop' || contains(github.base_ref, 'release-') }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 0
- uses: actions/setup-python@v2
- run: pip install tox
- run: scripts-dev/check-newsfragment
env:
PULL_REQUEST_NUMBER: ${{ github.event.number }}
lint-sdist:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: "3.x"
- run: pip install wheel
- run: python setup.py sdist bdist_wheel
- uses: actions/upload-artifact@v2
with:
name: Python Distributions
path: dist/*
# Dummy step to gate other tests on without repeating the whole list
linting-done:
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
needs: [lint, lint-crlf, lint-newsfile, lint-sdist]
runs-on: ubuntu-latest
steps:
- run: "true"
trial:
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
needs: linting-done
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"]
database: ["sqlite"]
toxenv: ["py"]
include:
# Newest Python without optional deps
- python-version: "3.10"
toxenv: "py-noextras"
# Oldest Python with PostgreSQL
- python-version: "3.6"
database: "postgres"
postgres-version: "9.6"
toxenv: "py"
# Newest Python with newest PostgreSQL
- python-version: "3.10"
database: "postgres"
postgres-version: "14"
toxenv: "py"
steps:
- uses: actions/checkout@v2
- run: sudo apt-get -qq install xmlsec1
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
if: ${{ matrix.postgres-version }}
run: |
docker run -d -p 5432:5432 \
-e POSTGRES_PASSWORD=postgres \
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
postgres:${{ matrix.postgres-version }}
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- run: pip install tox
- name: Await PostgreSQL
if: ${{ matrix.postgres-version }}
timeout-minutes: 2
run: until pg_isready -h localhost; do sleep 1; done
- run: tox -e ${{ matrix.toxenv }}
env:
TRIAL_FLAGS: "--jobs=2"
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
SYNAPSE_POSTGRES_HOST: localhost
SYNAPSE_POSTGRES_USER: postgres
SYNAPSE_POSTGRES_PASSWORD: postgres
- name: Dump logs
# Logs are most useful when the command fails, always include them.
if: ${{ always() }}
# Note: Dumps to workflow logs instead of using actions/upload-artifact
# This keeps logs colocated with failing jobs
# It also ignores find's exit code; this is a best effort affair
run: >-
find _trial_temp -name '*.log'
-exec echo "::group::{}" \;
-exec cat {} \;
-exec echo "::endgroup::" \;
|| true
trial-olddeps:
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
needs: linting-done
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Test with old deps
uses: docker://ubuntu:bionic # For old python and sqlite
with:
workdir: /github/workspace
entrypoint: .ci/scripts/test_old_deps.sh
env:
TRIAL_FLAGS: "--jobs=2"
- name: Dump logs
# Logs are most useful when the command fails, always include them.
if: ${{ always() }}
# Note: Dumps to workflow logs instead of using actions/upload-artifact
# This keeps logs colocated with failing jobs
# It also ignores find's exit code; this is a best effort affair
run: >-
find _trial_temp -name '*.log'
-exec echo "::group::{}" \;
-exec cat {} \;
-exec echo "::endgroup::" \;
|| true
trial-pypy:
# Very slow; only run if the branch name includes 'pypy'
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
needs: linting-done
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["pypy-3.6"]
steps:
- uses: actions/checkout@v2
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- run: pip install tox
- run: tox -e py
env:
TRIAL_FLAGS: "--jobs=2"
- name: Dump logs
# Logs are most useful when the command fails, always include them.
if: ${{ always() }}
# Note: Dumps to workflow logs instead of using actions/upload-artifact
# This keeps logs colocated with failing jobs
# It also ignores find's exit code; this is a best effort affair
run: >-
find _trial_temp -name '*.log'
-exec echo "::group::{}" \;
-exec cat {} \;
-exec echo "::endgroup::" \;
|| true
sytest:
if: ${{ !failure() && !cancelled() }}
needs: linting-done
runs-on: ubuntu-latest
container:
image: matrixdotorg/sytest-synapse:${{ matrix.sytest-tag }}
volumes:
- ${{ github.workspace }}:/src
env:
SYTEST_BRANCH: ${{ github.head_ref }}
POSTGRES: ${{ matrix.postgres && 1}}
MULTI_POSTGRES: ${{ (matrix.postgres == 'multi-postgres') && 1}}
WORKERS: ${{ matrix.workers && 1 }}
REDIS: ${{ matrix.redis && 1 }}
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
TOP: ${{ github.workspace }}
strategy:
fail-fast: false
matrix:
include:
- arrangement: workers
database: Postgres
- sytest-tag: bionic
- sytest-tag: bionic
postgres: postgres
- sytest-tag: testing
postgres: postgres
- sytest-tag: bionic
postgres: multi-postgres
workers: workers
- sytest-tag: buster
postgres: multi-postgres
workers: workers
- sytest-tag: buster
postgres: postgres
workers: workers
redis: redis
steps:
- uses: actions/checkout@v2
- name: Prepare test blacklist
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
- name: Run SyTest
run: /bootstrap.sh synapse
working-directory: /src
- name: Summarise results.tap
if: ${{ always() }}
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
- name: Upload SyTest logs
uses: actions/upload-artifact@v2
if: ${{ always() }}
with:
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
path: |
/logs/results.tap
/logs/**/*.log*
export-data:
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
needs: [linting-done, portdb]
runs-on: ubuntu-latest
env:
TOP: ${{ github.workspace }}
services:
postgres:
image: postgres
ports:
- 5432:5432
env:
POSTGRES_PASSWORD: "postgres"
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v2
- run: sudo apt-get -qq install xmlsec1
- uses: actions/setup-python@v2
with:
python-version: "3.9"
- run: .ci/scripts/test_export_data_command.sh
portdb:
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
needs: linting-done
runs-on: ubuntu-latest
env:
TOP: ${{ github.workspace }}
strategy:
matrix:
include:
- python-version: "3.6"
postgres-version: "9.6"
- python-version: "3.10"
postgres-version: "14"
services:
postgres:
image: postgres:${{ matrix.postgres-version }}
ports:
- 5432:5432
env:
POSTGRES_PASSWORD: "postgres"
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v2
- run: sudo apt-get -qq install xmlsec1
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- run: .ci/scripts/test_synapse_port_db.sh
complement:
if: ${{ !failure() && !cancelled() }}
needs: linting-done
runs-on: ubuntu-latest
container:
# https://github.com/matrix-org/complement/blob/master/dockerfiles/ComplementCIBuildkite.Dockerfile
image: matrixdotorg/complement:latest
env:
CI: true
ports:
- 8448:8448
volumes:
- /var/run/docker.sock:/var/run/docker.sock
steps:
- name: Run actions/checkout@v2 for synapse
@@ -28,33 +339,79 @@ jobs:
with:
path: synapse
- name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- run: |
set -o pipefail
synapse/scripts-dev/complement.sh --build-only
while :; do
POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} SYNAPSE_TEST_LOG_LEVEL=DEBUG COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -f -run TestSendJoinPartialStateResponse -json 2>&1 | gotestfmt | tee /tmp/xxx
if grep ❌ /tmp/xxx; then
break
# Attempt to check out the same branch of Complement as the PR. If it
# doesn't exist, fallback to master.
- name: Checkout complement
shell: bash
run: |
mkdir -p complement
# Attempt to use the version of complement which best matches the current
# build. Depending on whether this is a PR or release, etc. we need to
# use different fallbacks.
#
# 1. First check if there's a similarly named branch (GITHUB_HEAD_REF
# for pull requests, otherwise GITHUB_REF).
# 2. Attempt to use the base branch, e.g. when merging into release-vX.Y
# (GITHUB_BASE_REF for pull requests).
# 3. Use the default complement branch ("master").
for BRANCH_NAME in "$GITHUB_HEAD_REF" "$GITHUB_BASE_REF" "${GITHUB_REF#refs/heads/}" "master"; do
# Skip empty branch names and merge commits.
if [[ -z "$BRANCH_NAME" || $BRANCH_NAME =~ ^refs/pull/.* ]]; then
continue
fi
(wget -O - "https://github.com/matrix-org/complement/archive/$BRANCH_NAME.tar.gz" | tar -xz --strip-components=1 -C complement) && break
done
shell: bash
name: Run Complement Tests
# Build initial Synapse image
- run: docker build -t matrixdotorg/synapse:latest -f docker/Dockerfile .
working-directory: synapse
# Build a ready-to-run Synapse image based on the initial image above.
# This new image includes a config file, keys for signing and TLS, and
# other settings to make it suitable for testing under Complement.
- run: docker build -t complement-synapse -f Synapse.Dockerfile .
working-directory: complement/dockerfiles
# Run Complement
- run: go test -v -tags synapse_blacklist,msc2403,msc2946,msc3083 ./tests/...
env:
COMPLEMENT_BASE_IMAGE: complement-synapse:latest
working-directory: complement
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
tests-done:
if: ${{ always() }}
needs:
- lint
- lint-crlf
- lint-newsfile
- lint-sdist
- trial
- trial-olddeps
- sytest
- portdb
- complement
runs-on: ubuntu-latest
steps:
- uses: matrix-org/done-action@v2
with:
needs: ${{ toJSON(needs) }}
- name: Set build result
env:
NEEDS_CONTEXT: ${{ toJSON(needs) }}
# the `jq` incantation dumps out a series of "<job> <result>" lines.
# we set it to an intermediate variable to avoid a pipe, which makes it
# hard to set $rc.
run: |
rc=0
results=$(jq -r 'to_entries[] | [.key,.value.result] | join(" ")' <<< $NEEDS_CONTEXT)
while read job result ; do
# The newsfile lint may be skipped on non PR builds
if [ $result == "skipped" ] && [ $job == "lint-newsfile" ]; then
continue
fi
# The newsfile lint may be skipped on non PR builds
skippable:
lint-newsfile
if [ "$result" != "success" ]; then
echo "::set-failed ::Job $job returned $result"
rc=1
fi
done <<< $results
exit $rc

View File

@@ -6,27 +6,16 @@ on:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
mypy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: "3.x"
extras: "all"
- run: |
poetry remove twisted
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
poetry install --no-interaction --extras "all test"
- name: Remove warn_unused_ignores from mypy config
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
- run: poetry run mypy
- uses: actions/setup-python@v2
- run: .ci/patch_for_twisted_trunk.sh
- run: pip install tox
- run: tox -e mypy
trial:
runs-on: ubuntu-latest
@@ -34,15 +23,14 @@ jobs:
steps:
- uses: actions/checkout@v2
- run: sudo apt-get -qq install xmlsec1
- uses: matrix-org/setup-python-poetry@v1
- uses: actions/setup-python@v2
with:
python-version: "3.x"
extras: "all test"
- run: |
poetry remove twisted
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
poetry install --no-interaction --extras "all test"
- run: poetry run trial --jobs 2 tests
python-version: 3.6
- run: .ci/patch_for_twisted_trunk.sh
- run: pip install tox
- run: tox -e py
env:
TRIAL_FLAGS: "--jobs=2"
- name: Dump logs
# Logs are most useful when the command fails, always include them.
@@ -67,23 +55,11 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Patch dependencies
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
# but the sytest-synapse container expects it to be in /venv/.
# We symlink it before running poetry so that poetry actually
# ends up installing to `/venv`.
run: |
ln -s -T /venv /src/.venv
poetry remove twisted
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
poetry install --no-interaction --extras "all test"
run: .ci/patch_for_twisted_trunk.sh
working-directory: /src
- name: Run SyTest
run: /bootstrap.sh synapse
working-directory: /src
env:
# Use offline mode to avoid reinstalling the pinned version of
# twisted.
OFFLINE: 1
- name: Summarise results.tap
if: ${{ always() }}
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
@@ -96,51 +72,6 @@ jobs:
/logs/results.tap
/logs/**/*.log*
complement:
if: "${{ !failure() && !cancelled() }}"
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
include:
- arrangement: monolith
database: SQLite
- arrangement: monolith
database: Postgres
- arrangement: workers
database: Postgres
steps:
- name: Run actions/checkout@v2 for synapse
uses: actions/checkout@v2
with:
path: synapse
- name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
# This step is specific to the 'Twisted trunk' test run:
- name: Patch dependencies
run: |
set -x
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
pipx install poetry==1.1.12
poetry remove -n twisted
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
poetry lock --no-update
# NOT IN 1.1.12 poetry lock --check
working-directory: synapse
- run: |
set -o pipefail
TEST_ONLY_SKIP_DEP_HASH_VERIFICATION=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
shell: bash
name: Run Complement Tests
# open an issue if the build fails, so we know about it.
open-issue:
if: failure()
@@ -148,7 +79,6 @@ jobs:
- mypy
- trial
- sytest
- complement
runs-on: ubuntu-latest

10
.gitignore vendored
View File

@@ -15,9 +15,6 @@ _trial_temp*/
.DS_Store
__pycache__/
# We do want the poetry lockfile.
!poetry.lock
# stuff that is likely to exist when you run a server locally
/*.db
/*.log
@@ -33,9 +30,6 @@ __pycache__/
/media_store/
/uploads
# For direnv users
/.envrc
# IDEs
/.idea/
/.ropeproject/
@@ -56,7 +50,3 @@ __pycache__/
# docs
book/
# complement
/complement-*
/master.tar.gz

9992
CHANGES.md

File diff suppressed because it is too large Load Diff

56
MANIFEST.in Normal file
View File

@@ -0,0 +1,56 @@
include synctl
include LICENSE
include VERSION
include *.rst
include *.md
include demo/README
include demo/demo.tls.dh
include demo/*.py
include demo/*.sh
include synapse/py.typed
recursive-include synapse/storage *.sql
recursive-include synapse/storage *.sql.postgres
recursive-include synapse/storage *.sql.sqlite
recursive-include synapse/storage *.py
recursive-include synapse/storage *.txt
recursive-include synapse/storage *.md
recursive-include docs *
recursive-include scripts *
recursive-include scripts-dev *
recursive-include synapse *.pyi
recursive-include tests *.py
recursive-include tests *.pem
recursive-include tests *.p8
recursive-include tests *.crt
recursive-include tests *.key
recursive-include synapse/res *
recursive-include synapse/static *.css
recursive-include synapse/static *.gif
recursive-include synapse/static *.html
recursive-include synapse/static *.js
exclude .codecov.yml
exclude .coveragerc
exclude .dockerignore
exclude .editorconfig
exclude Dockerfile
exclude mypy.ini
exclude sytest-blacklist
exclude test_postgresql.sh
include book.toml
include pyproject.toml
recursive-include changelog.d *
prune .circleci
prune .github
prune .ci
prune contrib
prune debian
prune demo/etc
prune docker
prune snap
prune stubs

View File

@@ -55,7 +55,7 @@ solutions. The hope is for Matrix to act as the building blocks for a new
generation of fully open and interoperable messaging and VoIP apps for the
internet.
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
team, written in Python 3/Twisted.
In Matrix, every user runs one or more Matrix clients, which connect through to
@@ -246,7 +246,7 @@ Password reset
==============
Users can reset their password through their client. Alternatively, a server admin
can reset a users password using the `admin API <docs/admin_api/user_admin_api.md#reset-password>`_
can reset a users password using the `admin API <docs/admin_api/user_admin_api.rst#reset-password>`_
or by directly editing the database as shown below.
First calculate the hash of the new password::
@@ -293,42 +293,36 @@ directory of your choice::
git clone https://github.com/matrix-org/synapse.git
cd synapse
Synapse has a number of external dependencies. We maintain a fixed development
environment using `Poetry <https://python-poetry.org/>`_. First, install poetry. We recommend::
Synapse has a number of external dependencies, that are easiest
to install using pip and a virtualenv::
pip install --user pipx
pipx install poetry
as described `here <https://python-poetry.org/docs/#installing-with-pipx>`_.
(See `poetry's installation docs <https://python-poetry.org/docs/#installation>`_
for other installation methods.) Then ask poetry to create a virtual environment
from the project and install Synapse's dependencies::
poetry install --extras "all test"
python3 -m venv ./env
source ./env/bin/activate
pip install -e ".[all,dev]"
This will run a process of downloading and installing all the needed
dependencies into a virtual env.
dependencies into a virtual env. If any dependencies fail to install,
try installing the failing modules individually::
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`::
pip install -e "module-name"
poetry run ./demo/start.sh
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`
(to stop, you can use ``poetry run ./demo/stop.sh``)
./demo/start.sh
See the `demo documentation <https://matrix-org.github.io/synapse/develop/development/demo.html>`_
for more information.
(to stop, you can use `./demo/stop.sh`)
If you just want to start a single instance of the app and run it directly::
# Create the homeserver.yaml config once
poetry run synapse_homeserver \
python -m synapse.app.homeserver \
--server-name my.domain.name \
--config-path homeserver.yaml \
--generate-config \
--report-stats=[yes|no]
# Start the app
poetry run synapse_homeserver --config-path homeserver.yaml
python -m synapse.app.homeserver --config-path homeserver.yaml
Running the unit tests
@@ -337,7 +331,7 @@ Running the unit tests
After getting up and running, you may wish to run Synapse's unit tests to
check that everything is installed correctly::
poetry run trial tests
trial tests
This should end with a 'PASSED' result (note that exact numbers will
differ)::

View File

@@ -0,0 +1 @@
Add a new version of delete room admin API `DELETE /_synapse/admin/v2/rooms/<room_id>` to run it in background. Contributed by @dklimpel.

View File

@@ -0,0 +1 @@
Allow the admin [Delete Room API](https://matrix-org.github.io/synapse/latest/admin_api/rooms.html#delete-room-api) to block a room without the need to join it.

2
changelog.d/11230.bugfix Normal file
View File

@@ -0,0 +1,2 @@
Fix a long-standing bug wherein display names or avatar URLs containing null bytes cause an internal server error
when stored in the DB.

View File

@@ -0,0 +1 @@
Support filtering by relation senders & types per [MSC3440](https://github.com/matrix-org/matrix-doc/pull/3440).

1
changelog.d/11242.misc Normal file
View File

@@ -0,0 +1 @@
Split out federated PDU retrieval function into a non-cached version.

1
changelog.d/11247.misc Normal file
View File

@@ -0,0 +1 @@
Clean up code relating to to-device messages and sending ephemeral events to application services.

1
changelog.d/11278.misc Normal file
View File

@@ -0,0 +1 @@
Fix a small typo in the error response when a relation type other than 'm.annotation' is passed to `GET /rooms/{room_id}/aggregations/{event_id}`.

1
changelog.d/11280.misc Normal file
View File

@@ -0,0 +1 @@
Drop unused db tables `room_stats_historical` and `user_stats_historical`.

1
changelog.d/11281.doc Normal file
View File

@@ -0,0 +1 @@
Suggest users of the Debian packages add configuration to `/etc/matrix-synapse/conf.d/` to prevent, upon upgrade, being asked to choose between their configuration and the maintainer's.

1
changelog.d/11282.misc Normal file
View File

@@ -0,0 +1 @@
Require all files in synapse/ and tests/ to pass mypy unless specifically excluded.

1
changelog.d/11285.misc Normal file
View File

@@ -0,0 +1 @@
Require all files in synapse/ and tests/ to pass mypy unless specifically excluded.

1
changelog.d/11286.doc Normal file
View File

@@ -0,0 +1 @@
Fix typo in the word `available` and fix HTTP method (should be `GET`) for the `username_available` admin API. Contributed by Stanislav Motylkov.

1
changelog.d/11287.misc Normal file
View File

@@ -0,0 +1 @@
Add missing type hints to `synapse.app`.

1
changelog.d/11288.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix a long-standing bug where uploading extremely thin images (e.g. 1000x1) would fail. Contributed by @Neeeflix.

1
changelog.d/11292.misc Normal file
View File

@@ -0,0 +1 @@
Remove unused parameters on `FederationEventHandler._check_event_auth`.

1
changelog.d/11297.misc Normal file
View File

@@ -0,0 +1 @@
Add type hints to `synapse._scripts`.

1
changelog.d/11298.doc Normal file
View File

@@ -0,0 +1 @@
Add Single Sign-On, SAML and CAS pages to the documentation.

1
changelog.d/11303.misc Normal file
View File

@@ -0,0 +1 @@
Fix an issue which prevented the 'remove deleted devices from device_inbox column' background process from running when updating from a recent Synapse version.

1
changelog.d/11307.misc Normal file
View File

@@ -0,0 +1 @@
Add type hints to storage classes.

1
changelog.d/11310.misc Normal file
View File

@@ -0,0 +1 @@
Add type hints to storage classes.

1
changelog.d/11311.misc Normal file
View File

@@ -0,0 +1 @@
Add type hints to storage classes.

1
changelog.d/11312.misc Normal file
View File

@@ -0,0 +1 @@
Add type hints to storage classes.

1
changelog.d/11313.misc Normal file
View File

@@ -0,0 +1 @@
Add type hints to storage classes.

1
changelog.d/11314.misc Normal file
View File

@@ -0,0 +1 @@
Add type hints to storage classes.

1
changelog.d/11316.misc Normal file
View File

@@ -0,0 +1 @@
Add type hints to storage classes.

1
changelog.d/11321.misc Normal file
View File

@@ -0,0 +1 @@
Add type hints to `synapse.util`.

1
changelog.d/11322.misc Normal file
View File

@@ -0,0 +1 @@
Add type hints to storage classes.

1
changelog.d/11323.misc Normal file
View File

@@ -0,0 +1 @@
Improve type annotations in Synapse's test suite.

1
changelog.d/11327.misc Normal file
View File

@@ -0,0 +1 @@
Test that room alias deletion works as intended.

1
changelog.d/11332.misc Normal file
View File

@@ -0,0 +1 @@
Add type hints to storage classes.

View File

@@ -0,0 +1 @@
Support the stable version of [MSC2778](https://github.com/matrix-org/matrix-doc/pull/2778): the `m.login.application_service` login type. Contributed by @tulir.

1
changelog.d/11339.misc Normal file
View File

@@ -0,0 +1 @@
Add type hints to storage classes.

1
changelog.d/11342.misc Normal file
View File

@@ -0,0 +1 @@
Add type hints to storage classes.

1
changelog.d/11357.misc Normal file
View File

@@ -0,0 +1 @@
Add a development script for visualising the storage class inheritance hierarchy.

View File

@@ -1 +0,0 @@
Add type annotations to `tests.utils`.

View File

@@ -1 +0,0 @@
Add an explanation of the `--report-stats` argument to the docs.

View File

@@ -1 +0,0 @@
Implement [MSC3827](https://github.com/matrix-org/matrix-spec-proposals/pull/3827): Filtering of /publicRooms by room type.

View File

@@ -1,3 +0,0 @@
Clean up references to sample configuration and redirect users to the configuration manual instead.

View File

@@ -1 +0,0 @@
Enable Complement testing in the 'Twisted Trunk' CI runs.

View File

@@ -1 +0,0 @@
Add documentation for anonymised homeserver statistics collection.

View File

@@ -1 +0,0 @@
Faster room joins: Handle race between persisting an event and un-partial stating a room.

View File

@@ -1 +0,0 @@
Add missing type hints to `synapse.logging`.

View File

@@ -1 +0,0 @@
Raise a `DependencyError` on missing dependencies instead of a `ConfigError`.

View File

@@ -1 +0,0 @@
Fix wrong section header for `allow_public_rooms_over_federation` in the homeserver config documentation.

View File

@@ -1 +0,0 @@
Reduce DB usage of `/sync` when a large number of unread messages have recently been sent in a room.

View File

@@ -1 +0,0 @@
Add a rate limit for local users sending invites.

View File

@@ -1 +0,0 @@
Improve startup times in Complement test runs against workers, particularly in CPU-constrained environments.

View File

@@ -1 +0,0 @@
Only one-line SQL statements for logging and tracing.

View File

@@ -1 +0,0 @@
Fix application service not being able to join remote federated room without a profile set.

View File

@@ -1 +0,0 @@
Document how the Synapse team does reviews.

View File

@@ -1 +0,0 @@
Apply ratelimiting earlier in processing of /send request.

View File

@@ -1 +0,0 @@
Enforce type annotations for `tests.test_server`.

View File

@@ -1 +0,0 @@
Add type annotations to `tests.server`.

View File

@@ -1 +0,0 @@
Add a link to the configuration manual from the homeserver sample config documentation.

View File

@@ -1 +0,0 @@
Add support to `complement.sh` for skipping the docker build.

View File

@@ -1 +0,0 @@
Faster joins: skip waiting for full state when processing incoming events over federation.

View File

@@ -1 +0,0 @@
Improve exception handling when processing events received over federation.

View File

@@ -1 +0,0 @@
Improve validation logic in Synapse's REST endpoints.

View File

@@ -1 +0,0 @@
Faster room joins: fix race in recalculation of current room state.

View File

@@ -1 +0,0 @@
Add the ability to set the log level using the `SYNAPSE_TEST_LOG_LEVEL` environment when using `complement.sh`.

View File

@@ -1 +0,0 @@
Reduce DB usage of `/sync` when a large number of unread messages have recently been sent in a room.

View File

@@ -1 +0,0 @@
Enable Complement testing in the 'Twisted Trunk' CI runs.

View File

@@ -1 +0,0 @@
Add support to `complement.sh` for skipping the docker build.

View File

@@ -1 +0,0 @@
Improve and fix type hints.

View File

@@ -1 +0,0 @@
Add missing links to config options.

View File

@@ -1 +0,0 @@
Update config used by Complement to allow device name lookup over federation.

View File

@@ -1 +0,0 @@
Make use of the more robust `get_current_state` in `_get_state_map_for_room` to avoid breakages.

View File

@@ -1 +0,0 @@
Fix bug where rows were not deleted from `event_push_actions` table on large servers. Introduced in v1.62.0.

View File

@@ -1 +0,0 @@
Check that `auto_vacuum` is disabled when porting a SQLite database to Postgres, as `VACUUM`s must not be performed between runs of the script.

View File

@@ -1 +0,0 @@
Remove obsolete and for 8 years unused `RoomEventsStoreTestCase`. Contributed by @arkamar.

View File

@@ -1 +0,0 @@
Bump the version of `lxml` in matrix.org Docker images Debian packages from 4.8.0 to 4.9.1.

View File

@@ -1 +0,0 @@
Reduce number of queries used to get profile information. Contributed by Nick @ Beeper (@fizzadar).

View File

@@ -16,7 +16,6 @@
""" Starts a synapse client console. """
import argparse
import binascii
import cmd
import getpass
import json
@@ -27,8 +26,9 @@ import urllib
from http import TwistedHttpClient
from typing import Optional
import nacl.encoding
import nacl.signing
import urlparse
from signedjson.key import NACL_ED25519, decode_verify_key_bytes
from signedjson.sign import SignatureVerifyException, verify_signed_json
from twisted.internet import defer, reactor, threads
@@ -41,6 +41,7 @@ TRUSTED_ID_SERVERS = ["localhost:8001"]
class SynapseCmd(cmd.Cmd):
"""Basic synapse command-line processor.
This processes commands from the user and calls the relevant HTTP methods.
@@ -419,8 +420,8 @@ class SynapseCmd(cmd.Cmd):
pubKey = None
pubKeyObj = yield self.http_client.do_request("GET", url)
if "public_key" in pubKeyObj:
pubKey = decode_verify_key_bytes(
NACL_ED25519, binascii.unhexlify(pubKeyObj["public_key"])
pubKey = nacl.signing.VerifyKey(
pubKeyObj["public_key"], encoder=nacl.encoding.HexEncoder
)
else:
print("No public key found in pubkey response!")

View File

@@ -14,7 +14,6 @@ services:
# failure
restart: unless-stopped
# See the readme for a full documentation of the environment settings
# NOTE: You must edit homeserver.yaml to use postgres, it defaults to sqlite
environment:
- SYNAPSE_CONFIG_PATH=/data/homeserver.yaml
volumes:

View File

@@ -1,125 +0,0 @@
# Setting up Synapse with Workers using Docker Compose
This directory describes how deploy and manage Synapse and workers via [Docker Compose](https://docs.docker.com/compose/).
Example worker configuration files can be found [here](workers).
All examples and snippets assume that your Synapse service is called `synapse` in your Docker Compose file.
An example Docker Compose file can be found [here](docker-compose.yaml).
## Worker Service Examples in Docker Compose
In order to start the Synapse container as a worker, you must specify an `entrypoint` that loads both the `homeserver.yaml` and the configuration for the worker (`synapse-generic-worker-1.yaml` in the example below). You must also include the worker type in the environment variable `SYNAPSE_WORKER` or alternatively pass `-m synapse.app.generic_worker` as part of the `entrypoint` after `"/start.py", "run"`).
### Generic Worker Example
```yaml
synapse-generic-worker-1:
image: matrixdotorg/synapse:latest
container_name: synapse-generic-worker-1
restart: unless-stopped
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-generic-worker-1.yaml"]
healthcheck:
test: ["CMD-SHELL", "curl -fSs http://localhost:8081/health || exit 1"]
start_period: "5s"
interval: "15s"
timeout: "5s"
volumes:
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
environment:
SYNAPSE_WORKER: synapse.app.generic_worker
# Expose port if required so your reverse proxy can send requests to this worker
# Port configuration will depend on how the http listener is defined in the worker configuration file
ports:
- 8081:8081
depends_on:
- synapse
```
### Federation Sender Example
Please note: The federation sender does not receive REST API calls so no exposed ports are required.
```yaml
synapse-federation-sender-1:
image: matrixdotorg/synapse:latest
container_name: synapse-federation-sender-1
restart: unless-stopped
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-federation-sender-1.yaml"]
healthcheck:
disable: true
volumes:
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
environment:
SYNAPSE_WORKER: synapse.app.federation_sender
depends_on:
- synapse
```
## `homeserver.yaml` Configuration
### Enable Redis
Locate the `redis` section of your `homeserver.yaml` and enable and configure it:
```yaml
redis:
enabled: true
host: redis
port: 6379
# password: <secret_password>
```
This assumes that your Redis service is called `redis` in your Docker Compose file.
### Add a replication Listener
Locate the `listeners` section of your `homeserver.yaml` and add the following replication listener:
```yaml
listeners:
# Other listeners
- port: 9093
type: http
resources:
- names: [replication]
```
This listener is used by the workers for replication and is referred to in worker config files using the following settings:
```yaml
worker_replication_host: synapse
worker_replication_http_port: 9093
```
### Add Workers to `instance_map`
Locate the `instance_map` section of your `homeserver.yaml` and populate it with your workers:
```yaml
instance_map:
synapse-generic-worker-1: # The worker_name setting in your worker configuration file
host: synapse-generic-worker-1 # The name of the worker service in your Docker Compose file
port: 8034 # The port assigned to the replication listener in your worker config file
synapse-federation-sender-1:
host: synapse-federation-sender-1
port: 8034
```
### Configure Federation Senders
This section is applicable if you are using Federation senders (synapse.app.federation_sender). Locate the `send_federation` and `federation_sender_instances` settings in your `homeserver.yaml` and configure them:
```yaml
# This will disable federation sending on the main Synapse instance
send_federation: false
federation_sender_instances:
- synapse-federation-sender-1 # The worker_name setting in your federation sender worker configuration file
```
## Other Worker types
Using the concepts shown here it is possible to create other worker types in Docker Compose. See the [Workers](https://matrix-org.github.io/synapse/latest/workers.html#available-worker-applications) documentation for a list of available workers.

View File

@@ -1,77 +0,0 @@
networks:
backend:
services:
postgres:
image: postgres:latest
restart: unless-stopped
volumes:
- ${VOLUME_PATH}/var/lib/postgresql/data:/var/lib/postgresql/data:rw
networks:
- backend
environment:
POSTGRES_DB: synapse
POSTGRES_USER: synapse_user
POSTGRES_PASSWORD: postgres
POSTGRES_INITDB_ARGS: --encoding=UTF8 --locale=C
redis:
image: redis:latest
restart: unless-stopped
networks:
- backend
synapse:
image: matrixdotorg/synapse:latest
container_name: synapse
restart: unless-stopped
volumes:
- ${VOLUME_PATH}/data:/data:rw
ports:
- 8008:8008
networks:
- backend
environment:
SYNAPSE_CONFIG_DIR: /data
SYNAPSE_CONFIG_PATH: /data/homeserver.yaml
depends_on:
- postgres
synapse-generic-worker-1:
image: matrixdotorg/synapse:latest
container_name: synapse-generic-worker-1
restart: unless-stopped
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-generic-worker-1.yaml"]
healthcheck:
test: ["CMD-SHELL", "curl -fSs http://localhost:8081/health || exit 1"]
start_period: "5s"
interval: "15s"
timeout: "5s"
networks:
- backend
volumes:
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
environment:
SYNAPSE_WORKER: synapse.app.generic_worker
# Expose port if required so your reverse proxy can send requests to this worker
# Port configuration will depend on how the http listener is defined in the worker configuration file
ports:
- 8081:8081
depends_on:
- synapse
synapse-federation-sender-1:
image: matrixdotorg/synapse:latest
container_name: synapse-federation-sender-1
restart: unless-stopped
entrypoint: ["/start.py", "run", "--config-path=/data/homeserver.yaml", "--config-path=/data/workers/synapse-federation-sender-1.yaml"]
healthcheck:
disable: true
networks:
- backend
volumes:
- ${VOLUME_PATH}/data:/data:rw # Replace VOLUME_PATH with the path to your Synapse volume
environment:
SYNAPSE_WORKER: synapse.app.federation_sender
depends_on:
- synapse

Some files were not shown because too many files have changed in this diff Show More