1
0

Compare commits

..

5 Commits

Author SHA1 Message Date
H. Shay
da5c58bb24 run pyupgrade --py37-plus --keep-percent-format on synapse/ 2022-01-04 12:50:21 -08:00
H. Shay
6e94f64edf lint 2022-01-04 11:02:04 -08:00
H. Shay
da1f496487 newsfragment 2022-01-04 10:50:46 -08:00
H. Shay
68882a0251 run pyupgrade on synapse/app and synapse/appservice 2022-01-04 10:24:57 -08:00
H. Shay
56d599e5ac run pyupgrade on synapse/_scripts and synapse/api 2022-01-04 10:23:16 -08:00
771 changed files with 24607 additions and 47550 deletions

View File

@@ -1,4 +0,0 @@
---
title: CI run against latest deps is failing
---
See https://github.com/{{env.GITHUB_REPOSITORY}}/actions/runs/{{env.GITHUB_RUN_ID}}

8
.ci/patch_for_twisted_trunk.sh Executable file
View File

@@ -0,0 +1,8 @@
#!/bin/sh
# replaces the dependency on Twisted in `python_dependencies` with trunk.
set -e
cd "$(dirname "$0")"/..
sed -i -e 's#"Twisted.*"#"Twisted @ git+https://github.com/twisted/twisted"#' synapse/python_dependencies.py

View File

@@ -1,28 +0,0 @@
#!/usr/bin/env python3
# This script will write a json file to $OUTPUT_FILE that contains the name of
# each available Synapse version with documentation.
#
# This script assumes that any top-level directory in the "gh-pages" branch is
# named after a documentation version and contains documentation website files.
import os.path
import json
OUTPUT_FILE = "versions.json"
# Determine the list of Synapse versions that have documentation.
doc_versions = []
for filepath in os.listdir():
if os.path.isdir(filepath):
doc_versions.append(filepath)
# Record the documentation versions in a json file, such that the
# frontend javascript is aware of what versions exist.
to_write = {
"versions": doc_versions,
"default_version": "latest",
}
# Write the file.
with open(OUTPUT_FILE, "w") as f:
f.write(json.dumps(to_write))

View File

@@ -2,24 +2,29 @@
# Test for the export-data admin command against sqlite and postgres
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
# Expects `poetry` to be available on the `PATH`.
set -xe
cd "$(dirname "$0")/../.."
echo "--- Install dependencies"
# Install dependencies for this test.
pip install psycopg2
# Install Synapse itself. This won't update any libraries.
pip install -e .
echo "--- Generate the signing key"
# Generate the server's signing key.
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
echo "--- Prepare test database"
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
# Run the export-data command on the sqlite test database
poetry run python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
--output-directory /tmp/export_data
# Test that the output directory exists and contains the rooms directory
@@ -32,14 +37,14 @@ else
fi
# Create the PostgreSQL database.
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
# Port the SQLite databse to postgres so we can check command works against postgres
echo "+++ Port SQLite3 databse to postgres"
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
# Run the export-data command on postgres database
poetry run python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
--output-directory /tmp/export_data2
# Test that the output directory exists and contains the rooms directory

View File

@@ -1,81 +1,16 @@
#!/usr/bin/env bash
# this script is run by GitHub Actions in a plain `focal` container; it
# - installs the minimal system requirements, and poetry;
# - patches the project definition file to refer to old versions only;
# - creates a venv with these old versions using poetry; and finally
# - invokes `trial` to run the tests with old deps.
# Prevent tzdata from asking for user input
export DEBIAN_FRONTEND=noninteractive
# this script is run by GitHub Actions in a plain `bionic` container; it installs the
# minimal requirements for tox and hands over to the py3-old tox environment.
set -ex
apt-get update
apt-get install -y \
python3 python3-dev python3-pip python3-venv pipx \
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
apt-get install -y python3 python3-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox
export LANG="C.UTF-8"
# Prevent virtualenv from auto-updating pip to an incompatible version
export VIRTUALENV_NO_DOWNLOAD=1
# TODO: in the future, we could use an implementation of
# https://github.com/python-poetry/poetry/issues/3527
# https://github.com/pypa/pip/issues/8085
# to select the lowest possible versions, rather than resorting to this sed script.
# Patch the project definitions in-place:
# - Replace all lower and tilde bounds with exact bounds
# - Make the pyopenssl 17.0, which is the oldest version that works with
# a `cryptography` compiled against OpenSSL 1.1.
# - Delete all lines referring to psycopg2 --- so no testing of postgres support.
# - Omit systemd: we're not logging to journal here.
# TODO: also replace caret bounds, see https://python-poetry.org/docs/dependency-specification/#version-constraints
# We don't use these yet, but IIRC they are the default bound used when you `poetry add`.
# The sed expression 's/\^/==/g' ought to do the trick. But it would also change
# `python = "^3.7"` to `python = "==3.7", which would mean we fail because olddeps
# runs on 3.8 (#12343).
sed -i \
-e "s/[~>]=/==/g" \
-e "/psycopg2/d" \
-e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
-e '/systemd/d' \
pyproject.toml
# Use poetry to do the installation. This ensures that the versions are all mutually
# compatible (as far the package metadata declares, anyway); pip's package resolver
# is more lax.
#
# Rather than `poetry install --no-dev`, we drop all dev dependencies from the
# toml file. This means we don't have to ensure compatibility between old deps and
# dev tools.
pip install --user toml
REMOVE_DEV_DEPENDENCIES="
import toml
with open('pyproject.toml', 'r') as f:
data = toml.loads(f.read())
del data['tool']['poetry']['dev-dependencies']
with open('pyproject.toml', 'w') as f:
toml.dump(data, f)
"
python3 -c "$REMOVE_DEV_DEPENDENCIES"
pipx install poetry==1.1.12
~/.local/bin/poetry lock
echo "::group::Patched pyproject.toml"
cat pyproject.toml
echo "::endgroup::"
echo "::group::Lockfile after patch"
cat poetry.lock
echo "::endgroup::"
~/.local/bin/poetry install -E "all test"
~/.local/bin/poetry run trial --jobs=2 tests
exec tox -e py3-old,combine

View File

@@ -1,37 +1,41 @@
#!/usr/bin/env bash
#
# Test script for 'synapse_port_db'.
# - configures synapse and a postgres server.
# - sets up synapse and deps
# - runs the port script on a prepopulated test sqlite db
# - also runs it against an new sqlite db
#
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
# Expects `poetry` to be available on the `PATH`.
set -xe
cd "$(dirname "$0")/../.."
echo "--- Install dependencies"
# Install dependencies for this test.
pip install psycopg2 coverage coverage-enable-subprocess
# Install Synapse itself. This won't update any libraries.
pip install -e .
echo "--- Generate the signing key"
# Generate the server's signing key.
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
echo "--- Prepare test database"
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
# Create the PostgreSQL database.
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
echo "+++ Run synapse_port_db against test database"
# TODO: this invocation of synapse_port_db (and others below) used to be prepended with `coverage run`,
# but coverage seems unable to find the entrypoints installed by `pip install -e .`.
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
# We should be able to run twice against the same database.
echo "+++ Run synapse_port_db a second time"
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
#####
@@ -42,12 +46,12 @@ echo "--- Prepare empty SQLite database"
# we do this by deleting the sqlite db, and then doing the same again.
rm .ci/test_db.db
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
# re-create the PostgreSQL database.
poetry run .ci/scripts/postgres_exec.py \
.ci/scripts/postgres_exec.py \
"DROP DATABASE synapse" \
"CREATE DATABASE synapse"
echo "+++ Run synapse_port_db against empty database"
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml

View File

@@ -3,9 +3,11 @@
# things to include
!docker
!scripts
!synapse
!MANIFEST.in
!README.rst
!pyproject.toml
!poetry.lock
!setup.py
!synctl
**/__pycache__

11
.flake8
View File

@@ -1,11 +0,0 @@
# TODO: incorporate this into pyproject.toml if flake8 supports it in the future.
# See https://github.com/PyCQA/flake8/issues/234
[flake8]
# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes
# for error codes. The ones we ignore are:
# W503: line break before binary operator
# W504: line break after binary operator
# E203: whitespace before ':' (which is contrary to pep8?)
# E731: do not assign a lambda expression, use a def
# E501: Line too long (black enforces this for us)
ignore=W503,W504,E203,E731,E501

View File

@@ -8,7 +8,6 @@
- Use markdown where necessary, mostly for `code blocks`.
- End with either a period (.) or an exclamation mark (!).
- Start with a capital letter.
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
* [ ] Pull request includes a [sign off](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#sign-off)
* [ ] [Code style](https://matrix-org.github.io/synapse/latest/code_style.html) is correct
(run the [linters](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))

View File

@@ -34,8 +34,6 @@ jobs:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
# TODO: consider using https://github.com/docker/metadata-action instead of this
# custom magic
- name: Calculate docker image tag
id: set-tag
run: |
@@ -55,6 +53,18 @@ jobs:
esac
echo "::set-output name=tag::$tag"
# for release builds, we want to get the amd64 image out asap, so first
# we do an amd64-only build, before following up with a multiarch build.
- name: Build and push amd64
uses: docker/build-push-action@v2
if: "${{ startsWith(github.ref, 'refs/tags/v') }}"
with:
push: true
labels: "gitsha1=${{ github.sha }}"
tags: "matrixdotorg/synapse:${{ steps.set-tag.outputs.tag }}"
file: "docker/Dockerfile"
platforms: linux/amd64
- name: Build and push all platforms
uses: docker/build-push-action@v2
with:

View File

@@ -14,7 +14,7 @@ on:
jobs:
pages:
name: Build and deploy docs
name: GitHub Pages
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
@@ -22,7 +22,7 @@ jobs:
- name: Setup mdbook
uses: peaceiris/actions-mdbook@4b5ef36b314c2599664ca107bb8c02412548d79d # v1.1.14
with:
mdbook-version: '0.4.17'
mdbook-version: '0.4.9'
- name: Build the documentation
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
@@ -63,29 +63,3 @@ jobs:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./book
destination_dir: ./${{ steps.vars.outputs.branch-version }}
list_available_versions:
needs: pages
runs-on: ubuntu-latest
steps:
# Check out the current branch
- uses: actions/checkout@v3
with:
persist-credentials: false
- name: Save the script
run: cp .ci/scripts/record_available_doc_versions.py /
- uses: actions/setup-python@v3
# Check out the gh-pages branch, which we'll be pushing the doc versions to
- uses: actions/checkout@v3
with:
persist-credentials: false
# Check out the gh-pages branch
ref: 'gh-pages'
- name: Record the available documentation versions
run: |
# Download the script
/record_available_doc_versions

View File

@@ -1,159 +0,0 @@
# People who are freshly `pip install`ing from PyPI will pull in the latest versions of
# dependencies which match the broad requirements. Since most CI runs are against
# the locked poetry environment, run specifically against the latest dependencies to
# know if there's an upcoming breaking change.
#
# As an overview this workflow:
# - checks out develop,
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
# - runs mypy and test suites in that checkout.
#
# Based on the twisted trunk CI job.
name: Latest dependencies
on:
schedule:
- cron: 0 7 * * *
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
mypy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
# The dev dependencies aren't exposed in the wheel metadata (at least with current
# poetry-core versions), so we install with poetry.
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: "3.x"
poetry-version: "1.2.0b1"
extras: "all"
# Dump installed versions for debugging.
- run: poetry run pip list > before.txt
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
# `pip install matrix-synapse[all]` as closely as possible.
- run: poetry update --no-dev
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
- name: Remove warn_unused_ignores from mypy config
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
- run: poetry run mypy
trial:
runs-on: ubuntu-latest
strategy:
matrix:
include:
- database: "sqlite"
- database: "postgres"
postgres-version: "14"
steps:
- uses: actions/checkout@v2
- run: sudo apt-get -qq install xmlsec1
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
if: ${{ matrix.postgres-version }}
run: |
docker run -d -p 5432:5432 \
-e POSTGRES_PASSWORD=postgres \
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
postgres:${{ matrix.postgres-version }}
- uses: actions/setup-python@v2
with:
python-version: "3.x"
- run: pip install .[all,test]
- name: Await PostgreSQL
if: ${{ matrix.postgres-version }}
timeout-minutes: 2
run: until pg_isready -h localhost; do sleep 1; done
- run: python -m twisted.trial --jobs=2 tests
env:
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
SYNAPSE_POSTGRES_HOST: localhost
SYNAPSE_POSTGRES_USER: postgres
SYNAPSE_POSTGRES_PASSWORD: postgres
- name: Dump logs
# Logs are most useful when the command fails, always include them.
if: ${{ always() }}
# Note: Dumps to workflow logs instead of using actions/upload-artifact
# This keeps logs colocated with failing jobs
# It also ignores find's exit code; this is a best effort affair
run: >-
find _trial_temp -name '*.log'
-exec echo "::group::{}" \;
-exec cat {} \;
-exec echo "::endgroup::" \;
|| true
sytest:
runs-on: ubuntu-latest
container:
image: matrixdotorg/sytest-synapse:testing
volumes:
- ${{ github.workspace }}:/src
strategy:
fail-fast: false
matrix:
include:
- sytest-tag: focal
- sytest-tag: focal
postgres: postgres
workers: workers
redis: redis
env:
POSTGRES: ${{ matrix.postgres && 1}}
WORKERS: ${{ matrix.workers && 1 }}
REDIS: ${{ matrix.redis && 1 }}
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
steps:
- uses: actions/checkout@v2
- name: Ensure sytest runs `pip install`
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
run: rm /src/poetry.lock
working-directory: /src
- name: Prepare test blacklist
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
- name: Run SyTest
run: /bootstrap.sh synapse
working-directory: /src
- name: Summarise results.tap
if: ${{ always() }}
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
- name: Upload SyTest logs
uses: actions/upload-artifact@v2
if: ${{ always() }}
with:
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
path: |
/logs/results.tap
/logs/**/*.log*
# TODO: run complement (as with twisted trunk, see #12473).
# open an issue if the build fails, so we know about it.
open-issue:
if: failure()
needs:
# TODO: should mypy be included here? It feels more brittle than the other two.
- mypy
- trial
- sytest
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
update_existing: true
filename: .ci/latest_deps_build_failed_issue_template.md

View File

@@ -7,7 +7,7 @@ on:
# of things breaking (but only build one set of debs)
pull_request:
push:
branches: ["develop", "release-*"]
branches: ["develop"]
# we do the full build on tags.
tags: ["v*"]
@@ -31,7 +31,7 @@ jobs:
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
dists='["debian:sid"]'
if [[ $GITHUB_REF == refs/tags/* ]]; then
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
dists=$(scripts-dev/build_debian_packages --show-dists-json)
fi
echo "::set-output name=distros::$dists"
# map the step outputs to job outputs
@@ -74,7 +74,7 @@ jobs:
# see https://github.com/docker/build-push-action/issues/252
# for the cache magic here
run: |
./src/scripts-dev/build_debian_packages.py \
./src/scripts-dev/build_debian_packages \
--docker-build-arg=--cache-from=type=local,src=/tmp/.buildx-cache \
--docker-build-arg=--cache-to=type=local,mode=max,dest=/tmp/.buildx-cache-new \
--docker-build-arg=--progress=plain \
@@ -91,7 +91,17 @@ jobs:
build-sdist:
name: "Build pypi distribution files"
uses: "matrix-org/backend-meta/.github/workflows/packaging.yml@v1"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- run: pip install wheel
- run: |
python setup.py sdist bdist_wheel
- uses: actions/upload-artifact@v2
with:
name: python-dist
path: dist/*
# if it's a tag, create a release and attach the artifacts to it
attach-assets:
@@ -112,8 +122,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
files: |
Sdist/*
Wheel/*
python-dist/*
debs.tar.xz
# if it's not already published, keep the release as a draft.
draft: true

View File

@@ -10,19 +10,22 @@ concurrency:
cancel-in-progress: true
jobs:
check-sampleconfig:
lint:
runs-on: ubuntu-latest
strategy:
matrix:
toxenv:
- "check-sampleconfig"
- "check_codestyle"
- "check_isort"
- "mypy"
- "packaging"
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- run: pip install .
- run: scripts-dev/generate_sample_config.sh --check
- run: scripts-dev/config-lint.sh
lint:
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1"
with:
typechecking-extras: "all"
- run: pip install tox
- run: tox -e ${{ matrix.toxenv }}
lint-crlf:
runs-on: ubuntu-latest
@@ -40,15 +43,29 @@ jobs:
ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 0
- uses: actions/setup-python@v2
- run: "pip install 'towncrier>=18.6.0rc1'"
- run: scripts-dev/check-newsfragment.sh
- run: pip install tox
- run: scripts-dev/check-newsfragment
env:
PULL_REQUEST_NUMBER: ${{ github.event.number }}
lint-sdist:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: "3.x"
- run: pip install wheel
- run: python setup.py sdist bdist_wheel
- uses: actions/upload-artifact@v2
with:
name: Python Distributions
path: dist/*
# Dummy step to gate other tests on without repeating the whole list
linting-done:
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
needs: [lint, lint-crlf, lint-newsfile, check-sampleconfig]
needs: [lint, lint-crlf, lint-newsfile, lint-sdist]
runs-on: ubuntu-latest
steps:
- run: "true"
@@ -61,23 +78,23 @@ jobs:
matrix:
python-version: ["3.7", "3.8", "3.9", "3.10"]
database: ["sqlite"]
extras: ["all"]
toxenv: ["py"]
include:
# Newest Python without optional deps
- python-version: "3.10"
extras: ""
toxenv: "py-noextras"
# Oldest Python with PostgreSQL
- python-version: "3.7"
database: "postgres"
postgres-version: "10"
extras: "all"
toxenv: "py"
# Newest Python with newest PostgreSQL
- python-version: "3.10"
database: "postgres"
postgres-version: "14"
extras: "all"
toxenv: "py"
steps:
- uses: actions/checkout@v2
@@ -89,16 +106,17 @@ jobs:
-e POSTGRES_PASSWORD=postgres \
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
postgres:${{ matrix.postgres-version }}
- uses: matrix-org/setup-python-poetry@v1
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
extras: ${{ matrix.extras }}
- run: pip install tox
- name: Await PostgreSQL
if: ${{ matrix.postgres-version }}
timeout-minutes: 2
run: until pg_isready -h localhost; do sleep 1; done
- run: poetry run trial --jobs=2 tests
- run: tox -e ${{ matrix.toxenv }}
env:
TRIAL_FLAGS: "--jobs=2"
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
SYNAPSE_POSTGRES_HOST: localhost
SYNAPSE_POSTGRES_USER: postgres
@@ -117,19 +135,18 @@ jobs:
|| true
trial-olddeps:
# Note: sqlite only; no postgres
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
needs: linting-done
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Test with old deps
uses: docker://ubuntu:focal # For old python and sqlite
# Note: focal seems to be using 3.8, but the oldest is 3.7?
# See https://github.com/matrix-org/synapse/issues/12343
uses: docker://ubuntu:bionic # For old python and sqlite
with:
workdir: /github/workspace
entrypoint: .ci/scripts/test_old_deps.sh
env:
TRIAL_FLAGS: "--jobs=2"
- name: Dump logs
# Logs are most useful when the command fails, always include them.
if: ${{ always() }}
@@ -145,24 +162,23 @@ jobs:
trial-pypy:
# Very slow; only run if the branch name includes 'pypy'
# Note: sqlite only; no postgres. Completely untested since poetry move.
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
needs: linting-done
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["pypy-3.7"]
extras: ["all"]
steps:
- uses: actions/checkout@v2
# Install libs necessary for PyPy to build binary wheels for dependencies
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
- uses: matrix-org/setup-python-poetry@v1
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
extras: ${{ matrix.extras }}
- run: poetry run trial --jobs=2 tests
- run: pip install tox
- run: tox -e py
env:
TRIAL_FLAGS: "--jobs=2"
- name: Dump logs
# Logs are most useful when the command fails, always include them.
if: ${{ always() }}
@@ -197,15 +213,15 @@ jobs:
fail-fast: false
matrix:
include:
- sytest-tag: focal
- sytest-tag: bionic
- sytest-tag: focal
- sytest-tag: bionic
postgres: postgres
- sytest-tag: testing
postgres: postgres
- sytest-tag: focal
- sytest-tag: bionic
postgres: multi-postgres
workers: workers
@@ -261,10 +277,9 @@ jobs:
steps:
- uses: actions/checkout@v2
- run: sudo apt-get -qq install xmlsec1
- uses: matrix-org/setup-python-poetry@v1
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
extras: "postgres"
python-version: "3.9"
- run: .ci/scripts/test_export_data_command.sh
portdb:
@@ -299,39 +314,33 @@ jobs:
steps:
- uses: actions/checkout@v2
- run: sudo apt-get -qq install xmlsec1
- uses: matrix-org/setup-python-poetry@v1
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
extras: "postgres"
- run: .ci/scripts/test_synapse_port_db.sh
complement:
if: ${{ !failure() && !cancelled() }}
needs: linting-done
runs-on: ubuntu-latest
container:
# https://github.com/matrix-org/complement/blob/master/dockerfiles/ComplementCIBuildkite.Dockerfile
image: matrixdotorg/complement:latest
env:
CI: true
ports:
- 8448:8448
volumes:
- /var/run/docker.sock:/var/run/docker.sock
steps:
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
- name: "Set Go Version"
run: |
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
# Add the Go path to the PATH: We need this so we can call gotestfmt
echo "~/go/bin" >> $GITHUB_PATH
- name: "Install Complement Dependencies"
run: |
sudo apt-get update && sudo apt-get install -y libolm3 libolm-dev
go get -v github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest
- name: Run actions/checkout@v2 for synapse
uses: actions/checkout@v2
with:
path: synapse
# Attempt to check out the same branch of Complement as the PR. If it
# doesn't exist, fallback to HEAD.
# doesn't exist, fallback to master.
- name: Checkout complement
shell: bash
run: |
@@ -344,8 +353,8 @@ jobs:
# for pull requests, otherwise GITHUB_REF).
# 2. Attempt to use the base branch, e.g. when merging into release-vX.Y
# (GITHUB_BASE_REF for pull requests).
# 3. Use the default complement branch ("HEAD").
for BRANCH_NAME in "$GITHUB_HEAD_REF" "$GITHUB_BASE_REF" "${GITHUB_REF#refs/heads/}" "HEAD"; do
# 3. Use the default complement branch ("master").
for BRANCH_NAME in "$GITHUB_HEAD_REF" "$GITHUB_BASE_REF" "${GITHUB_REF#refs/heads/}" "master"; do
# Skip empty branch names and merge commits.
if [[ -z "$BRANCH_NAME" || $BRANCH_NAME =~ ^refs/pull/.* ]]; then
continue
@@ -354,32 +363,55 @@ jobs:
(wget -O - "https://github.com/matrix-org/complement/archive/$BRANCH_NAME.tar.gz" | tar -xz --strip-components=1 -C complement) && break
done
- run: |
set -o pipefail
COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
shell: bash
name: Run Complement Tests
# Build initial Synapse image
- run: docker build -t matrixdotorg/synapse:latest -f docker/Dockerfile .
working-directory: synapse
# Build a ready-to-run Synapse image based on the initial image above.
# This new image includes a config file, keys for signing and TLS, and
# other settings to make it suitable for testing under Complement.
- run: docker build -t complement-synapse -f Synapse.Dockerfile .
working-directory: complement/dockerfiles
# Run Complement
- run: go test -v -tags synapse_blacklist,msc2403 ./tests/...
env:
COMPLEMENT_BASE_IMAGE: complement-synapse:latest
working-directory: complement
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
tests-done:
if: ${{ always() }}
needs:
- check-sampleconfig
- lint
- lint-crlf
- lint-newsfile
- lint-sdist
- trial
- trial-olddeps
- sytest
- export-data
- portdb
- complement
runs-on: ubuntu-latest
steps:
- uses: matrix-org/done-action@v2
with:
needs: ${{ toJSON(needs) }}
- name: Set build result
env:
NEEDS_CONTEXT: ${{ toJSON(needs) }}
# the `jq` incantation dumps out a series of "<job> <result>" lines.
# we set it to an intermediate variable to avoid a pipe, which makes it
# hard to set $rc.
run: |
rc=0
results=$(jq -r 'to_entries[] | [.key,.value.result] | join(" ")' <<< $NEEDS_CONTEXT)
while read job result ; do
# The newsfile lint may be skipped on non PR builds
if [ $result == "skipped" ] && [ $job == "lint-newsfile" ]; then
continue
fi
# The newsfile lint may be skipped on non PR builds
skippable:
lint-newsfile
if [ "$result" != "success" ]; then
echo "::set-failed ::Job $job returned $result"
rc=1
fi
done <<< $results
exit $rc

View File

@@ -6,27 +6,16 @@ on:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
mypy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: "3.x"
extras: "all"
- run: |
poetry remove twisted
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
poetry install --no-interaction --extras "all test"
- name: Remove warn_unused_ignores from mypy config
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
- run: poetry run mypy
- uses: actions/setup-python@v2
- run: .ci/patch_for_twisted_trunk.sh
- run: pip install tox
- run: tox -e mypy
trial:
runs-on: ubuntu-latest
@@ -34,15 +23,14 @@ jobs:
steps:
- uses: actions/checkout@v2
- run: sudo apt-get -qq install xmlsec1
- uses: matrix-org/setup-python-poetry@v1
- uses: actions/setup-python@v2
with:
python-version: "3.x"
extras: "all test"
- run: |
poetry remove twisted
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
poetry install --no-interaction --extras "all test"
- run: poetry run trial --jobs 2 tests
python-version: 3.6
- run: .ci/patch_for_twisted_trunk.sh
- run: pip install tox
- run: tox -e py
env:
TRIAL_FLAGS: "--jobs=2"
- name: Dump logs
# Logs are most useful when the command fails, always include them.
@@ -67,23 +55,11 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Patch dependencies
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
# but the sytest-synapse container expects it to be in /venv/.
# We symlink it before running poetry so that poetry actually
# ends up installing to `/venv`.
run: |
ln -s -T /venv /src/.venv
poetry remove twisted
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
poetry install --no-interaction --extras "all test"
run: .ci/patch_for_twisted_trunk.sh
working-directory: /src
- name: Run SyTest
run: /bootstrap.sh synapse
working-directory: /src
env:
# Use offline mode to avoid reinstalling the pinned version of
# twisted.
OFFLINE: 1
- name: Summarise results.tap
if: ${{ always() }}
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap

10
.gitignore vendored
View File

@@ -15,9 +15,6 @@ _trial_temp*/
.DS_Store
__pycache__/
# We do want the poetry lockfile.
!poetry.lock
# stuff that is likely to exist when you run a server locally
/*.db
/*.log
@@ -33,9 +30,6 @@ __pycache__/
/media_store/
/uploads
# For direnv users
/.envrc
# IDEs
/.idea/
/.ropeproject/
@@ -56,7 +50,3 @@ __pycache__/
# docs
book/
# complement
/complement-*
/master.tar.gz

9743
CHANGES.md

File diff suppressed because it is too large Load Diff

56
MANIFEST.in Normal file
View File

@@ -0,0 +1,56 @@
include synctl
include LICENSE
include VERSION
include *.rst
include *.md
include demo/README
include demo/demo.tls.dh
include demo/*.py
include demo/*.sh
include synapse/py.typed
recursive-include synapse/storage *.sql
recursive-include synapse/storage *.sql.postgres
recursive-include synapse/storage *.sql.sqlite
recursive-include synapse/storage *.py
recursive-include synapse/storage *.txt
recursive-include synapse/storage *.md
recursive-include docs *
recursive-include scripts *
recursive-include scripts-dev *
recursive-include synapse *.pyi
recursive-include tests *.py
recursive-include tests *.pem
recursive-include tests *.p8
recursive-include tests *.crt
recursive-include tests *.key
recursive-include synapse/res *
recursive-include synapse/static *.css
recursive-include synapse/static *.gif
recursive-include synapse/static *.html
recursive-include synapse/static *.js
exclude .codecov.yml
exclude .coveragerc
exclude .dockerignore
exclude .editorconfig
exclude Dockerfile
exclude mypy.ini
exclude sytest-blacklist
exclude test_postgresql.sh
include book.toml
include pyproject.toml
recursive-include changelog.d *
prune .circleci
prune .github
prune .ci
prune contrib
prune debian
prune demo/etc
prune docker
prune snap
prune stubs

View File

@@ -55,7 +55,7 @@ solutions. The hope is for Matrix to act as the building blocks for a new
generation of fully open and interoperable messaging and VoIP apps for the
internet.
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
team, written in Python 3/Twisted.
In Matrix, every user runs one or more Matrix clients, which connect through to
@@ -246,7 +246,7 @@ Password reset
==============
Users can reset their password through their client. Alternatively, a server admin
can reset a users password using the `admin API <docs/admin_api/user_admin_api.md#reset-password>`_
can reset a users password using the `admin API <docs/admin_api/user_admin_api.rst#reset-password>`_
or by directly editing the database as shown below.
First calculate the hash of the new password::
@@ -293,42 +293,36 @@ directory of your choice::
git clone https://github.com/matrix-org/synapse.git
cd synapse
Synapse has a number of external dependencies. We maintain a fixed development
environment using `Poetry <https://python-poetry.org/>`_. First, install poetry. We recommend::
Synapse has a number of external dependencies, that are easiest
to install using pip and a virtualenv::
pip install --user pipx
pipx install poetry
as described `here <https://python-poetry.org/docs/#installing-with-pipx>`_.
(See `poetry's installation docs <https://python-poetry.org/docs/#installation>`_
for other installation methods.) Then ask poetry to create a virtual environment
from the project and install Synapse's dependencies::
poetry install --extras "all test"
python3 -m venv ./env
source ./env/bin/activate
pip install -e ".[all,dev]"
This will run a process of downloading and installing all the needed
dependencies into a virtual env.
dependencies into a virtual env. If any dependencies fail to install,
try installing the failing modules individually::
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`::
pip install -e "module-name"
poetry run ./demo/start.sh
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`
(to stop, you can use ``poetry run ./demo/stop.sh``)
./demo/start.sh
See the `demo documentation <https://matrix-org.github.io/synapse/develop/development/demo.html>`_
for more information.
(to stop, you can use `./demo/stop.sh`)
If you just want to start a single instance of the app and run it directly::
# Create the homeserver.yaml config once
poetry run synapse_homeserver \
python -m synapse.app.homeserver \
--server-name my.domain.name \
--config-path homeserver.yaml \
--generate-config \
--report-stats=[yes|no]
# Start the app
poetry run synapse_homeserver --config-path homeserver.yaml
python -m synapse.app.homeserver --config-path homeserver.yaml
Running the unit tests
@@ -337,7 +331,7 @@ Running the unit tests
After getting up and running, you may wish to run Synapse's unit tests to
check that everything is installed correctly::
poetry run trial tests
trial tests
This should end with a 'PASSED' result (note that exact numbers will
differ)::

1
changelog.d/10520.misc Normal file
View File

@@ -0,0 +1 @@
Send and handle cross-signing messages using the stable prefix.

1
changelog.d/11243.misc Normal file
View File

@@ -0,0 +1 @@
Allow specific, experimental events to be created without `prev_events`. Used by [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716).

1
changelog.d/11331.misc Normal file
View File

@@ -0,0 +1 @@
A test helper (`wait_for_background_updates`) no longer depends on classes defining a `store` property.

1
changelog.d/11360.misc Normal file
View File

@@ -0,0 +1 @@
Add type hints to `synapse.appservice`.

View File

@@ -0,0 +1 @@
Allow guests to send state events per [MSC3419](https://github.com/matrix-org/matrix-doc/pull/3419).

1
changelog.d/11427.doc Normal file
View File

@@ -0,0 +1 @@
Document the usage of refresh tokens.

1
changelog.d/11480.misc Normal file
View File

@@ -0,0 +1 @@
Add missing type hints to `synapse.config` module.

1
changelog.d/11487.misc Normal file
View File

@@ -0,0 +1 @@
Add test to ensure we share the same `state_group` across the whole historical batch when using the [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) `/batch_send` endpoint.

1
changelog.d/11503.misc Normal file
View File

@@ -0,0 +1 @@
Refactor `tests.util.setup_test_homeserver` and `tests.server.setup_test_homeserver`.

1
changelog.d/11516.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix a long-standing bug where relations from other rooms could be included in the bundled aggregations of an event.

1
changelog.d/11520.misc Normal file
View File

@@ -0,0 +1 @@
Use HTTPStatus constants in place of literals in `tests.rest.client.test_auth`.

1
changelog.d/11531.misc Normal file
View File

@@ -0,0 +1 @@
Add a receipt types constant for `m.read`.

1
changelog.d/11535.misc Normal file
View File

@@ -0,0 +1 @@
Clean up `synapse.rest.admin`.

1
changelog.d/11536.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix a long-standing bug which could cause `AssertionError`s to be written to the log when Synapse was restarted after purging events from the database.

View File

@@ -0,0 +1 @@
Add experimental support for MSC3202: allowing application services to masquerade as specific devices.

1
changelog.d/11541.misc Normal file
View File

@@ -0,0 +1 @@
Support unprefixed versions of fallback key property names.

1
changelog.d/11542.misc Normal file
View File

@@ -0,0 +1 @@
Add missing `errcode` to `parse_string` and `parse_boolean`.

1
changelog.d/11543.misc Normal file
View File

@@ -0,0 +1 @@
Use HTTPStatus constants in place of literals in `synapse.http`.

1
changelog.d/11546.misc Normal file
View File

@@ -0,0 +1 @@
Add missing type hints to storage classes.

1
changelog.d/11547.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix a bug introduced in Synapse 1.17.0 where a pusher created for an email with capital letters would fail to be created.

1
changelog.d/11549.misc Normal file
View File

@@ -0,0 +1 @@
Add missing type hints to storage classes.

1
changelog.d/11550.misc Normal file
View File

@@ -0,0 +1 @@
Fix an inaccurate and misleading comment in the `/sync` code.

1
changelog.d/11551.misc Normal file
View File

@@ -0,0 +1 @@
Add missing type hints to storage classes.

1
changelog.d/11553.doc Normal file
View File

@@ -0,0 +1 @@
Add details for how to configure a TURN server when behind a NAT. Contibuted by @AndrewFerr.

1
changelog.d/11555.misc Normal file
View File

@@ -0,0 +1 @@
Add missing type hints to storage classes.

1
changelog.d/11556.misc Normal file
View File

@@ -0,0 +1 @@
Add missing type hints to `synapse.logging.context`.

1
changelog.d/11558.misc Normal file
View File

@@ -0,0 +1 @@
Stop populating unused database column `state_events.prev_state`.

1
changelog.d/11560.misc Normal file
View File

@@ -0,0 +1 @@
Minor efficiency improvements in event persistence.

1
changelog.d/11564.misc Normal file
View File

@@ -0,0 +1 @@
Add some safety checks that storage functions are used correctly.

1
changelog.d/11565.misc Normal file
View File

@@ -0,0 +1 @@
Make `get_device` return `None` if the device doesn't exist rather than raising an exception.

1
changelog.d/11566.misc Normal file
View File

@@ -0,0 +1 @@
Split the HTML parsing code from the URL preview resource code.

1
changelog.d/11570.misc Normal file
View File

@@ -0,0 +1 @@
Remove redundant `COALESCE()`s around `COUNT()`s in database queries.

1
changelog.d/11571.misc Normal file
View File

@@ -0,0 +1 @@
Add missing type hints to `synapse.http`.

1
changelog.d/11574.misc Normal file
View File

@@ -0,0 +1 @@
Convert `EventStreamResult` from a `namedtuple` to `attrs` to improve type hints.

1
changelog.d/11575.misc Normal file
View File

@@ -0,0 +1 @@
Add missing type hints to storage classes.

1
changelog.d/11580.misc Normal file
View File

@@ -0,0 +1 @@
Add some safety checks that storage functions are used correctly.

1
changelog.d/11582.misc Normal file
View File

@@ -0,0 +1 @@
Add [MSC2716](https://github.com/matrix-org/matrix-doc/pull/2716) and [MSC3030](https://github.com/matrix-org/matrix-doc/pull/3030) to `/versions` -> `unstable_features` to detect server support.

View File

@@ -0,0 +1 @@
Replace `mock` package by its standard library version.

1
changelog.d/11589.misc Normal file
View File

@@ -0,0 +1 @@
Add missing type hints to storage classes.

1
changelog.d/11590.misc Normal file
View File

@@ -0,0 +1 @@
Add type hints to `synapse/tests/rest/admin`.

1
changelog.d/11592.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix a long-standing bug where responses included bundled aggregations when they should not, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675).

1
changelog.d/11594.misc Normal file
View File

@@ -0,0 +1 @@
Add missing type hints to storage classes.

1
changelog.d/11595.misc Normal file
View File

@@ -0,0 +1 @@
Drop EOL python 3.6 and postgres 9.6 from CI.

1
changelog.d/11596.misc Normal file
View File

@@ -0,0 +1 @@
Update black version and run it on all the files.

1
changelog.d/11602.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix a long-standing bug that some unknown endpoints would return HTML error pages instead of JSON `M_UNRECOGNIZED` errors.

1
changelog.d/11603.misc Normal file
View File

@@ -0,0 +1 @@
Add opentracing type stubs and fix associated mypy errors.

1
changelog.d/11607.misc Normal file
View File

@@ -0,0 +1 @@
Improve opentracing support for requests which use a `ResponseCache`.

1
changelog.d/11618.misc Normal file
View File

@@ -0,0 +1 @@
Improve opentracing support for incoming HTTP requests.

1
changelog.d/11619.misc Normal file
View File

@@ -0,0 +1 @@
A number of improvements to opentracing support.

1
changelog.d/11622.misc Normal file
View File

@@ -0,0 +1 @@
Add opentracing type stubs and fix associated mypy errors.

1
changelog.d/11623.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix a long-standing bug where responses included bundled aggregations when they should not, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675).

1
changelog.d/11632.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix a bug introduced in Synapse 1.19.3 which could sometimes cause `AssertionError`s when backfilling rooms over federation.

1
changelog.d/11633.misc Normal file
View File

@@ -0,0 +1 @@
Drop support for Python 3.6 and Ubuntu 18.04.

1
changelog.d/11638.misc Normal file
View File

@@ -0,0 +1 @@
Improve the error messages from `get_create_event_for_room`.

1
changelog.d/11642.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix a long-standing bug which could cause `AssertionError`s to be written to the log when Synapse was restarted after purging events from the database.

1
changelog.d/11643.misc Normal file
View File

@@ -0,0 +1 @@
Remove redundant `get_current_events_token` method.

1
changelog.d/11652.misc Normal file
View File

@@ -0,0 +1 @@
Add missing type hints to storage classes.

1
changelog.d/11653.misc Normal file
View File

@@ -0,0 +1 @@
Add missing type hints to storage classes.

1
changelog.d/11654.misc Normal file
View File

@@ -0,0 +1 @@
Add missing type hints to storage classes.

1
changelog.d/11657.misc Normal file
View File

@@ -0,0 +1 @@
Add missing type hints to storage classes.

1
changelog.d/11665.misc Normal file
View File

@@ -0,0 +1 @@
Convert `namedtuples` to `attrs`.

View File

@@ -0,0 +1 @@
Include the room topic in the stripped state included with invites and knocking.

1
changelog.d/11684.misc Normal file
View File

@@ -0,0 +1 @@
Run `pyupgrade --py36-plus` on `synapse/app`, `synapse/appservice`, `synapse/_scripts`, 'synapse/api'.

View File

@@ -1 +0,0 @@
Fix a bug introduced in Synapse v1.48.0 where latest thread reply provided failed to include the proper bundled aggregations.

View File

@@ -1 +0,0 @@
Fix scripts-dev to pass typechecking.

View File

@@ -1 +0,0 @@
Add a module API to allow modules to change actions for existing push rules of local users.

View File

@@ -1 +0,0 @@
Use supervisord to supervise Postgres and Caddy in the Complement image to reduce restart time.

View File

@@ -1 +0,0 @@
Add some type hints to datastore.

View File

@@ -1 +0,0 @@
Use `make_awaitable` instead of `defer.succeed` for return values of mocks in tests.

View File

@@ -1 +0,0 @@
Add new `enable_registration_token_3pid_bypass` configuration option to allow registrations via token as an alternative to verifying a 3pid.

View File

@@ -1 +0,0 @@
Remove unused `# type: ignore`s.

View File

@@ -1 +0,0 @@
Explicitly opt-in to using [BuildKit-specific features](https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md) in the Dockerfile. This fixes issues with building images in some GitLab CI environments.

View File

@@ -1 +0,0 @@
Fix a bug where attempting to send a large amount of read receipts to an application service all at once would result in duplicate content and abnormally high memory usage. Contributed by Brad & Nick @ Beeper.

View File

@@ -1 +0,0 @@
Release script: confirm the commit to be tagged before tagging.

View File

@@ -1 +0,0 @@
Consistently check if an object is a `frozendict`.

View File

@@ -1 +0,0 @@
Fix a bug introduced in Synapse 1.57 which could cause `Failed to calculate hosts in room` errors to be logged for outbound federation.

View File

@@ -1 +0,0 @@
Allow unused `#type: ignore` comments in bleeding edge CI jobs.

View File

@@ -1 +0,0 @@
Add missing linebreak to pipx install instructions.

View File

@@ -1 +0,0 @@
Fix a long standing bug where status codes would almost always get logged as 200!, irrespective of the actual status code, when clients disconnect before a request has finished processing.

View File

@@ -1 +0,0 @@
Improve docstrings for the receipts store.

View File

@@ -1 +0,0 @@
Use constants for read-receipts in tests.

Some files were not shown because too many files have changed in this diff Show More