1
0

Compare commits

..

1 Commits

Author SHA1 Message Date
Action Bot
ab0f152504 Version picker added for v1.64 docs 2023-12-11 14:52:14 +00:00
350 changed files with 7559 additions and 15985 deletions

View File

@@ -27,10 +27,10 @@ which is under the Unlicense licence.
{{- . -}}{{- "\n" -}}
{{- end -}}
{{- with .TestCases -}}
{{- /* Passing tests are first */ -}}
{{- /* Failing tests are first */ -}}
{{- range . -}}
{{- if eq .Result "PASS" -}}
::group::{{ "\033" }}[0;32m✅{{ " " }}{{- .Name -}}
{{- if and (ne .Result "PASS") (ne .Result "SKIP") -}}
::group::{{ "\033" }}[0;31m❌{{ " " }}{{- .Name -}}
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
{{- with .Coverage -}}
, coverage: {{ . }}%
@@ -47,6 +47,7 @@ which is under the Unlicense licence.
{{- end -}}
{{- end -}}
{{- /* Then skipped tests are second */ -}}
{{- range . -}}
{{- if eq .Result "SKIP" -}}
@@ -67,10 +68,11 @@ which is under the Unlicense licence.
{{- end -}}
{{- end -}}
{{- /* and failing tests are last */ -}}
{{- /* Then passing tests are last */ -}}
{{- range . -}}
{{- if and (ne .Result "PASS") (ne .Result "SKIP") -}}
::group::{{ "\033" }}[0;31m❌{{ " " }}{{- .Name -}}
{{- if eq .Result "PASS" -}}
::group::{{ "\033" }}[0;32m✅{{ " " }}{{- .Name -}}
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
{{- with .Coverage -}}
, coverage: {{ . }}%

View File

@@ -1,128 +0,0 @@
#!/usr/bin/env python
# Copyright 2022 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Calculate the trial jobs to run based on if we're in a PR or not.
import json
import os
IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
# First calculate the various trial jobs.
#
# For each type of test we only run on Py3.7 on PRs
trial_sqlite_tests = [
{
"python-version": "3.7",
"database": "sqlite",
"extras": "all",
}
]
if not IS_PR:
trial_sqlite_tests.extend(
{
"python-version": version,
"database": "sqlite",
"extras": "all",
}
for version in ("3.8", "3.9", "3.10")
)
trial_postgres_tests = [
{
"python-version": "3.7",
"database": "postgres",
"postgres-version": "10",
"extras": "all",
}
]
if not IS_PR:
trial_postgres_tests.append(
{
"python-version": "3.10",
"database": "postgres",
"postgres-version": "14",
"extras": "all",
}
)
trial_no_extra_tests = [
{
"python-version": "3.7",
"database": "sqlite",
"extras": "",
}
]
print("::group::Calculated trial jobs")
print(
json.dumps(
trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests, indent=4
)
)
print("::endgroup::")
test_matrix = json.dumps(
trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests
)
print(f"::set-output name=trial_test_matrix::{test_matrix}")
# First calculate the various sytest jobs.
#
# For each type of test we only run on focal on PRs
sytest_tests = [
{
"sytest-tag": "focal",
},
{
"sytest-tag": "focal",
"postgres": "postgres",
},
{
"sytest-tag": "focal",
"postgres": "multi-postgres",
"workers": "workers",
},
]
if not IS_PR:
sytest_tests.extend(
[
{
"sytest-tag": "testing",
"postgres": "postgres",
},
{
"sytest-tag": "buster",
"postgres": "multi-postgres",
"workers": "workers",
},
]
)
print("::group::Calculated sytest jobs")
print(json.dumps(sytest_tests, indent=4))
print("::endgroup::")
test_matrix = json.dumps(sytest_tests)
print(f"::set-output name=sytest_test_matrix::{test_matrix}")

View File

@@ -1,21 +0,0 @@
#!/bin/bash
#
# wraps `gotestfmt`, hiding output from successful packages unless
# all tests passed.
set -o pipefail
set -e
# tee the test results to a log, whilst also piping them into gotestfmt,
# telling it to hide successful results, so that we can clearly see
# unsuccessful results.
tee complement.log | gotestfmt -hide successful-packages
# gotestfmt will exit non-zero if there were any failures, so if we got to this
# point, we must have had a successful result.
echo "All tests successful; showing all test results"
# Pipe the test results back through gotestfmt, showing all results.
# The log file consists of JSON lines giving the test results, interspersed
# with regular stdout lines (including reports of downloaded packages).
grep '^{"Time":' complement.log | gotestfmt

31
.ci/scripts/postgres_exec.py Executable file
View File

@@ -0,0 +1,31 @@
#!/usr/bin/env python
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import psycopg2
# a very simple replacment for `psql`, to make up for the lack of the postgres client
# libraries in the synapse docker image.
# We use "postgres" as a database because it's bound to exist and the "synapse" one
# doesn't exist yet.
db_conn = psycopg2.connect(
user="postgres", host="localhost", password="postgres", dbname="postgres"
)
db_conn.autocommit = True
cur = db_conn.cursor()
for c in sys.argv[1:]:
cur.execute(c)

View File

@@ -32,7 +32,7 @@ else
fi
# Create the PostgreSQL database.
psql -c "CREATE DATABASE synapse"
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
# Port the SQLite databse to postgres so we can check command works against postgres
echo "+++ Port SQLite3 databse to postgres"

View File

@@ -5,8 +5,18 @@
# - creates a venv with these old versions using poetry; and finally
# - invokes `trial` to run the tests with old deps.
# Prevent tzdata from asking for user input
export DEBIAN_FRONTEND=noninteractive
set -ex
apt-get update
apt-get install -y \
python3 python3-dev python3-pip python3-venv pipx \
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
export LANG="C.UTF-8"
# Prevent virtualenv from auto-updating pip to an incompatible version
export VIRTUALENV_NO_DOWNLOAD=1
@@ -23,6 +33,12 @@ export VIRTUALENV_NO_DOWNLOAD=1
# a `cryptography` compiled against OpenSSL 1.1.
# - Omit systemd: we're not logging to journal here.
# TODO: also replace caret bounds, see https://python-poetry.org/docs/dependency-specification/#version-constraints
# We don't use these yet, but IIRC they are the default bound used when you `poetry add`.
# The sed expression 's/\^/==/g' ought to do the trick. But it would also change
# `python = "^3.7"` to `python = "==3.7", which would mean we fail because olddeps
# runs on 3.8 (#12343).
sed -i \
-e "s/[~>]=/==/g" \
-e '/^python = "^/!s/\^/==/g' \
@@ -39,7 +55,7 @@ sed -i \
# toml file. This means we don't have to ensure compatibility between old deps and
# dev tools.
pip install toml wheel
pip install --user toml
REMOVE_DEV_DEPENDENCIES="
import toml
@@ -53,8 +69,8 @@ with open('pyproject.toml', 'w') as f:
"
python3 -c "$REMOVE_DEV_DEPENDENCIES"
pip install poetry==1.2.0
poetry lock
pipx install poetry==1.1.14
~/.local/bin/poetry lock
echo "::group::Patched pyproject.toml"
cat pyproject.toml
@@ -62,3 +78,6 @@ echo "::endgroup::"
echo "::group::Lockfile after patch"
cat poetry.lock
echo "::endgroup::"
~/.local/bin/poetry install -E "all test"
~/.local/bin/poetry run trial --jobs=2 tests

View File

@@ -2,27 +2,27 @@
#
# Test script for 'synapse_port_db'.
# - configures synapse and a postgres server.
# - runs the port script on a prepopulated test sqlite db. Checks that the
# return code is zero.
# - reruns the port script on the same sqlite db, targetting the same postgres db.
# Checks that the return code is zero.
# - runs the port script against a new sqlite db. Checks the return code is zero.
# - runs the port script on a prepopulated test sqlite db
# - also runs it against an new sqlite db
#
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
# Expects `poetry` to be available on the `PATH`.
set -xe -o pipefail
set -xe
cd "$(dirname "$0")/../.."
echo "--- Generate the signing key"
# Generate the server's signing key.
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
echo "--- Prepare test database"
# Make sure the SQLite3 database is using the latest schema and has no pending background updates.
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
# Create the PostgreSQL database.
psql -c "CREATE DATABASE synapse"
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
echo "+++ Run synapse_port_db against test database"
# TODO: this invocation of synapse_port_db (and others below) used to be prepended with `coverage run`,
@@ -45,23 +45,9 @@ rm .ci/test_db.db
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
# re-create the PostgreSQL database.
psql \
-c "DROP DATABASE synapse" \
-c "CREATE DATABASE synapse"
poetry run .ci/scripts/postgres_exec.py \
"DROP DATABASE synapse" \
"CREATE DATABASE synapse"
echo "+++ Run synapse_port_db against empty database"
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
echo "--- Create a brand new postgres database from schema"
cp .ci/postgres-config.yaml .ci/postgres-config-unported.yaml
sed -i -e 's/database: synapse/database: synapse_unported/' .ci/postgres-config-unported.yaml
psql -c "CREATE DATABASE synapse_unported"
poetry run update_synapse_database --database-config .ci/postgres-config-unported.yaml --run-background-updates
echo "+++ Comparing ported schema with unported schema"
# Ignore the tables that portdb creates. (Should it tidy them up when the porting is completed?)
psql synapse -c "DROP TABLE port_from_sqlite3;"
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse_unported > unported.sql
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse > ported.sql
# By default, `diff` returns zero if there are no changes and nonzero otherwise
diff -u unported.sql ported.sql | tee schema_diff

View File

@@ -4,14 +4,8 @@
# things to include
!docker
!synapse
!rust
!README.rst
!pyproject.toml
!poetry.lock
!Cargo.lock
!build_rust.py
rust/target
synapse/*.so
**/__pycache__

View File

@@ -5,7 +5,7 @@
#
# As an overview this workflow:
# - checks out develop,
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
# - runs mypy and test suites in that checkout.
#
# Based on the twisted trunk CI job.
@@ -26,19 +26,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
# The dev dependencies aren't exposed in the wheel metadata (at least with current
# poetry-core versions), so we install with poetry.
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: "3.x"
poetry-version: "1.2.0"
poetry-version: "1.2.0b1"
extras: "all"
# Dump installed versions for debugging.
- run: poetry run pip list > before.txt
@@ -60,14 +53,6 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- run: sudo apt-get -qq install xmlsec1
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
if: ${{ matrix.postgres-version }}
@@ -84,12 +69,6 @@ jobs:
if: ${{ matrix.postgres-version }}
timeout-minutes: 2
run: until pg_isready -h localhost; do sleep 1; done
# We nuke the local copy, as we've installed synapse into the virtualenv
# (rather than use an editable install, which we no longer support). If we
# don't do this then python can't find the native lib.
- run: rm -rf synapse/
- run: python -m twisted.trial --jobs=2 tests
env:
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
@@ -134,14 +113,6 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Ensure sytest runs `pip install`
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
run: rm /src/poetry.lock
@@ -164,42 +135,11 @@ jobs:
/logs/**/*.log*
complement:
if: "${{ !failure() && !cancelled() }}"
runs-on: ubuntu-latest
# TODO: run complement (as with twisted trunk, see #12473).
strategy:
fail-fast: false
matrix:
include:
- arrangement: monolith
database: SQLite
- arrangement: monolith
database: Postgres
- arrangement: workers
database: Postgres
steps:
- name: Run actions/checkout@v2 for synapse
uses: actions/checkout@v2
with:
path: synapse
- name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- run: |
set -o pipefail
TEST_ONLY_IGNORE_POETRY_LOCKFILE=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
shell: bash
name: Run Complement Tests
# Open an issue if the build fails, so we know about it.
# Only do this if we're not experimenting with this action in a PR.
# open an issue if the build fails, so we know about it.
open-issue:
if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request'"
if: failure()
needs:
# TODO: should mypy be included here? It feels more brittle than the other two.
- mypy
@@ -216,3 +156,4 @@ jobs:
with:
update_existing: true
filename: .ci/latest_deps_build_failed_issue_template.md

View File

@@ -15,7 +15,7 @@ on:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
contents: write
@@ -89,67 +89,9 @@ jobs:
name: debs
path: debs/*
build-wheels:
name: Build wheels on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-20.04, macos-10.15]
is_pr:
- ${{ startsWith(github.ref, 'refs/pull/') }}
exclude:
# Don't build macos wheels on PR CI.
- is_pr: true
os: "macos-10.15"
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v3
- name: Install cibuildwheel
run: python -m pip install cibuildwheel==2.9.0 poetry==1.2.0
# Only build a single wheel in CI.
- name: Set env vars.
run: |
echo "CIBW_BUILD="cp37-manylinux_x86_64"" >> $GITHUB_ENV
if: startsWith(github.ref, 'refs/pull/')
- name: Build wheels
run: python -m cibuildwheel --output-dir wheelhouse
env:
# Skip testing for platforms which various libraries don't have wheels
# for, and so need extra build deps.
CIBW_TEST_SKIP: pp39-* *i686* *musl* pp37-macosx*
- uses: actions/upload-artifact@v3
with:
name: Wheel
path: ./wheelhouse/*.whl
build-sdist:
name: Build sdist
runs-on: ubuntu-latest
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.10'
- run: pip install build
- name: Build sdist
run: python -m build --sdist
- uses: actions/upload-artifact@v2
with:
name: Sdist
path: dist/*.tar.gz
name: "Build pypi distribution files"
uses: "matrix-org/backend-meta/.github/workflows/packaging.yml@v1"
# if it's a tag, create a release and attach the artifacts to it
attach-assets:
@@ -157,7 +99,6 @@ jobs:
if: ${{ !failure() && !cancelled() && startsWith(github.ref, 'refs/tags/') }}
needs:
- build-debs
- build-wheels
- build-sdist
runs-on: ubuntu-latest
steps:

View File

@@ -10,33 +10,14 @@ concurrency:
cancel-in-progress: true
jobs:
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
# don't modify Rust code.
changes:
runs-on: ubuntu-latest
outputs:
rust: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.rust }}
steps:
- uses: dorny/paths-filter@v2
id: filter
# We only check on PRs
if: startsWith(github.ref, 'refs/pull/')
with:
filters: |
rust:
- 'rust/**'
- 'Cargo.toml'
check-sampleconfig:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- uses: matrix-org/setup-python-poetry@v1
with:
extras: "all"
- run: poetry run scripts-dev/generate_sample_config.sh --check
- run: poetry run scripts-dev/config-lint.sh
- run: pip install .
- run: scripts-dev/generate_sample_config.sh --check
- run: scripts-dev/config-lint.sh
check-schema-delta:
runs-on: ubuntu-latest
@@ -72,111 +53,61 @@ jobs:
env:
PULL_REQUEST_NUMBER: ${{ github.event.number }}
lint-pydantic:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
ref: ${{ github.event.pull_request.head.sha }}
- uses: matrix-org/setup-python-poetry@v1
with:
extras: "all"
- run: poetry run scripts-dev/check_pydantic_models.py
lint-clippy:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@v2
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: 1.58.1
override: true
components: clippy
- uses: Swatinem/rust-cache@v2
- run: cargo clippy
lint-rustfmt:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@v2
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: 1.58.1
override: true
components: rustfmt
- uses: Swatinem/rust-cache@v2
- run: cargo fmt --check
# Dummy step to gate other tests on without repeating the whole list
linting-done:
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
needs:
- lint
- lint-crlf
- lint-newsfile
- lint-pydantic
- check-sampleconfig
- check-schema-delta
- lint-clippy
- lint-rustfmt
needs: [lint, lint-crlf, lint-newsfile, check-sampleconfig, check-schema-delta]
runs-on: ubuntu-latest
steps:
- run: "true"
calculate-test-jobs:
trial:
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
needs: linting-done
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- id: get-matrix
run: .ci/scripts/calculate_jobs.py
outputs:
trial_test_matrix: ${{ steps.get-matrix.outputs.trial_test_matrix }}
sytest_test_matrix: ${{ steps.get-matrix.outputs.sytest_test_matrix }}
trial:
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
needs: calculate-test-jobs
runs-on: ubuntu-latest
strategy:
matrix:
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
python-version: ["3.7", "3.8", "3.9", "3.10"]
database: ["sqlite"]
extras: ["all"]
include:
# Newest Python without optional deps
- python-version: "3.10"
extras: ""
# Oldest Python with PostgreSQL
- python-version: "3.7"
database: "postgres"
postgres-version: "10"
extras: "all"
# Newest Python with newest PostgreSQL
- python-version: "3.10"
database: "postgres"
postgres-version: "14"
extras: "all"
steps:
- uses: actions/checkout@v2
- run: sudo apt-get -qq install xmlsec1
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
if: ${{ matrix.job.postgres-version }}
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
if: ${{ matrix.postgres-version }}
run: |
docker run -d -p 5432:5432 \
-e POSTGRES_PASSWORD=postgres \
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
postgres:${{ matrix.job.postgres-version }}
postgres:${{ matrix.postgres-version }}
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: ${{ matrix.job.python-version }}
extras: ${{ matrix.job.extras }}
python-version: ${{ matrix.python-version }}
extras: ${{ matrix.extras }}
- name: Await PostgreSQL
if: ${{ matrix.job.postgres-version }}
if: ${{ matrix.postgres-version }}
timeout-minutes: 2
run: until pg_isready -h localhost; do sleep 1; done
- run: poetry run trial --jobs=2 tests
env:
SYNAPSE_POSTGRES: ${{ matrix.job.database == 'postgres' || '' }}
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
SYNAPSE_POSTGRES_HOST: localhost
SYNAPSE_POSTGRES_USER: postgres
SYNAPSE_POSTGRES_PASSWORD: postgres
@@ -197,54 +128,16 @@ jobs:
# Note: sqlite only; no postgres
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
needs: linting-done
runs-on: ubuntu-20.04
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install Rust
uses: actions-rs/toolchain@v1
- name: Test with old deps
uses: docker://ubuntu:focal # For old python and sqlite
# Note: focal seems to be using 3.8, but the oldest is 3.7?
# See https://github.com/matrix-org/synapse/issues/12343
with:
toolchain: 1.58.1
override: true
- uses: Swatinem/rust-cache@v2
# There aren't wheels for some of the older deps, so we need to install
# their build dependencies
- run: |
sudo apt-get -qq install build-essential libffi-dev python-dev \
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
- uses: actions/setup-python@v4
with:
python-version: '3.7'
# Calculating the old-deps actually takes a bunch of time, so we cache the
# pyproject.toml / poetry.lock. We need to cache pyproject.toml as
# otherwise the `poetry install` step will error due to the poetry.lock
# file being outdated.
#
# This caches the output of `Prepare old deps`, which should generate the
# same `pyproject.toml` and `poetry.lock` for a given `pyproject.toml` input.
- uses: actions/cache@v3
id: cache-poetry-old-deps
name: Cache poetry.lock
with:
path: |
poetry.lock
pyproject.toml
key: poetry-old-deps2-${{ hashFiles('pyproject.toml') }}
- name: Prepare old deps
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
run: .ci/scripts/prepare_old_deps.sh
# We only now install poetry so that `setup-python-poetry` caches the
# right poetry.lock's dependencies.
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: '3.7'
extras: "all test"
- run: poetry run trial -j2 tests
workdir: /github/workspace
entrypoint: .ci/scripts/test_old_deps.sh
- name: Dump logs
# Logs are most useful when the command fails, always include them.
if: ${{ always() }}
@@ -293,37 +186,50 @@ jobs:
sytest:
if: ${{ !failure() && !cancelled() }}
needs: calculate-test-jobs
needs: linting-done
runs-on: ubuntu-latest
container:
image: matrixdotorg/sytest-synapse:${{ matrix.job.sytest-tag }}
image: matrixdotorg/sytest-synapse:${{ matrix.sytest-tag }}
volumes:
- ${{ github.workspace }}:/src
env:
SYTEST_BRANCH: ${{ github.head_ref }}
POSTGRES: ${{ matrix.job.postgres && 1}}
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') && 1}}
WORKERS: ${{ matrix.job.workers && 1 }}
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
POSTGRES: ${{ matrix.postgres && 1}}
MULTI_POSTGRES: ${{ (matrix.postgres == 'multi-postgres') && 1}}
WORKERS: ${{ matrix.workers && 1 }}
REDIS: ${{ matrix.redis && 1 }}
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
TOP: ${{ github.workspace }}
strategy:
fail-fast: false
matrix:
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
include:
- sytest-tag: focal
- sytest-tag: focal
postgres: postgres
- sytest-tag: testing
postgres: postgres
- sytest-tag: focal
postgres: multi-postgres
workers: workers
- sytest-tag: buster
postgres: multi-postgres
workers: workers
- sytest-tag: buster
postgres: postgres
workers: workers
redis: redis
steps:
- uses: actions/checkout@v2
- name: Prepare test blacklist
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: 1.58.1
override: true
- uses: Swatinem/rust-cache@v2
- name: Run SyTest
run: /bootstrap.sh synapse
working-directory: /src
@@ -334,7 +240,7 @@ jobs:
uses: actions/upload-artifact@v2
if: ${{ always() }}
with:
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
path: |
/logs/results.tap
/logs/**/*.log*
@@ -362,22 +268,19 @@ jobs:
steps:
- uses: actions/checkout@v2
- run: sudo apt-get -qq install xmlsec1 postgresql-client
- run: sudo apt-get -qq install xmlsec1
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: ${{ matrix.python-version }}
extras: "postgres"
- run: .ci/scripts/test_export_data_command.sh
env:
PGHOST: localhost
PGUSER: postgres
PGPASSWORD: postgres
PGDATABASE: postgres
portdb:
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
needs: linting-done
runs-on: ubuntu-latest
env:
TOP: ${{ github.workspace }}
strategy:
matrix:
include:
@@ -403,27 +306,12 @@ jobs:
steps:
- uses: actions/checkout@v2
- run: sudo apt-get -qq install xmlsec1 postgresql-client
- run: sudo apt-get -qq install xmlsec1
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: ${{ matrix.python-version }}
extras: "postgres"
- run: .ci/scripts/test_synapse_port_db.sh
id: run_tester_script
env:
PGHOST: localhost
PGUSER: postgres
PGPASSWORD: postgres
PGDATABASE: postgres
- name: "Upload schema differences"
uses: actions/upload-artifact@v3
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
with:
name: Schema dumps
path: |
unported.sql
ported.sql
schema_diff
complement:
if: "${{ !failure() && !cancelled() }}"
@@ -440,8 +328,29 @@ jobs:
- arrangement: monolith
database: Postgres
- arrangement: workers
database: Postgres
steps:
- name: Run actions/checkout@v2 for synapse
uses: actions/checkout@v2
with:
path: synapse
- name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- run: |
set -o pipefail
POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
shell: bash
name: Run Complement Tests
# XXX When complement with workers is stable, move this back into the standard
# "complement" matrix above.
#
# See https://github.com/matrix-org/synapse/issues/13161
complement-workers:
if: "${{ !failure() && !cancelled() }}"
needs: linting-done
runs-on: ubuntu-latest
steps:
- name: Run actions/checkout@v2 for synapse
@@ -449,52 +358,29 @@ jobs:
with:
path: synapse
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: 1.58.1
override: true
- uses: Swatinem/rust-cache@v2
- name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- run: |
set -o pipefail
POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
POSTGRES=1 WORKERS=1 COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
shell: bash
name: Run Complement Tests
cargo-test:
if: ${{ needs.changes.outputs.rust == 'true' }}
runs-on: ubuntu-latest
needs:
- linting-done
- changes
steps:
- uses: actions/checkout@v2
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: 1.58.1
override: true
- uses: Swatinem/rust-cache@v2
- run: cargo test
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
tests-done:
if: ${{ always() }}
needs:
- check-sampleconfig
- lint
- lint-crlf
- lint-newsfile
- trial
- trial-olddeps
- sytest
- export-data
- portdb
- complement
- cargo-test
runs-on: ubuntu-latest
steps:
- uses: matrix-org/done-action@v2
@@ -502,7 +388,5 @@ jobs:
needs: ${{ toJSON(needs) }}
# The newsfile lint may be skipped on non PR builds
# Cargo test is skipped if there is no changes on Rust code
skippable: |
skippable:
lint-newsfile
cargo-test

View File

@@ -1,28 +0,0 @@
name: Move new issues into the issue triage board
on:
issues:
types: [ opened ]
jobs:
add_new_issues:
name: Add new issues to the triage board
runs-on: ubuntu-latest
steps:
- uses: octokit/graphql-action@v2.x
id: add_to_project
with:
headers: '{"GraphQL-Features": "projects_next_graphql"}'
query: |
mutation add_to_project($projectid:ID!,$contentid:ID!) {
addProjectV2ItemById(input: {projectId: $projectid contentId: $contentid}) {
item {
id
}
}
}
projectid: ${{ env.PROJECT_ID }}
contentid: ${{ github.event.issue.node_id }}
env:
PROJECT_ID: "PVT_kwDOAIB0Bs4AFDdZ"
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}

View File

@@ -1,44 +0,0 @@
name: Move labelled issues to correct projects
on:
issues:
types: [ labeled ]
jobs:
move_needs_info:
name: Move X-Needs-Info on the triage board
runs-on: ubuntu-latest
if: >
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
steps:
- uses: octokit/graphql-action@v2.x
id: add_to_project
with:
headers: '{"GraphQL-Features": "projects_next_graphql"}'
query: |
mutation {
updateProjectV2ItemFieldValue(
input: {
projectId: $projectid
itemId: $contentid
fieldId: $fieldid
value: {
singleSelectOptionId: "Todo"
}
}
) {
projectV2Item {
id
}
}
}
projectid: ${{ env.PROJECT_ID }}
contentid: ${{ github.event.issue.node_id }}
fieldid: ${{ env.FIELD_ID }}
optionid: ${{ env.OPTION_ID }}
env:
PROJECT_ID: "PVT_kwDOAIB0Bs4AFDdZ"
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
FIELD_ID: "PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4"
OPTION_ID: "ba22e43c"

View File

@@ -16,14 +16,6 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: "3.x"
@@ -42,14 +34,6 @@ jobs:
steps:
- uses: actions/checkout@v2
- run: sudo apt-get -qq install xmlsec1
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: "3.x"
@@ -82,14 +66,6 @@ jobs:
steps:
- uses: actions/checkout@v2
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Patch dependencies
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
# but the sytest-synapse container expects it to be in /venv/.
@@ -161,7 +137,7 @@ jobs:
- run: |
set -o pipefail
TEST_ONLY_SKIP_DEP_HASH_VERIFICATION=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
TEST_ONLY_SKIP_DEP_HASH_VERIFICATION=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
shell: bash
name: Run Complement Tests

10
.gitignore vendored
View File

@@ -15,9 +15,8 @@ _trial_temp*/
.DS_Store
__pycache__/
# We do want the poetry and cargo lockfile.
# We do want the poetry lockfile.
!poetry.lock
!Cargo.lock
# stuff that is likely to exist when you run a server locally
/*.db
@@ -61,10 +60,3 @@ book/
# complement
/complement-*
/master.tar.gz
# rust
/target/
/synapse/*.so
# Poetry will create a setup.py, which we don't want to include.
/setup.py

View File

@@ -1,422 +1,3 @@
Synapse 1.68.0 (2022-09-27)
===========================
Please note that Synapse will now refuse to start if configured to use a version of SQLite older than 3.27.
In addition, please note that installing Synapse from a source checkout now requires a recent Rust compiler.
Those using packages will not be affected. On most platforms, installing with `pip install matrix-synapse` will not be affected.
See the [upgrade notes](https://matrix-org.github.io/synapse/v1.68/upgrade.html#upgrading-to-v1680).
Bugfixes
--------
- Fix packaging to include `Cargo.lock` in `sdist`. ([\#13909](https://github.com/matrix-org/synapse/issues/13909))
Synapse 1.68.0rc2 (2022-09-23)
==============================
Bugfixes
--------
- Fix building from packaged sdist. Broken in v1.68.0rc1. ([\#13866](https://github.com/matrix-org/synapse/issues/13866))
Internal Changes
----------------
- Fix the release script not publishing binary wheels. ([\#13850](https://github.com/matrix-org/synapse/issues/13850))
- Lower minimum supported rustc version to 1.58.1. ([\#13857](https://github.com/matrix-org/synapse/issues/13857))
- Lock Rust dependencies' versions. ([\#13858](https://github.com/matrix-org/synapse/issues/13858))
Synapse 1.68.0rc1 (2022-09-20)
==============================
Features
--------
- Keep track of when we fail to process a pulled event over federation so we can intelligently back off in the future. ([\#13589](https://github.com/matrix-org/synapse/issues/13589), [\#13814](https://github.com/matrix-org/synapse/issues/13814))
- Add an [admin API endpoint to fetch messages within a particular window of time](https://matrix-org.github.io/synapse/v1.68/admin_api/rooms.html#room-messages-api). ([\#13672](https://github.com/matrix-org/synapse/issues/13672))
- Add an [admin API endpoint to find a user based on their external ID in an auth provider](https://matrix-org.github.io/synapse/v1.68/admin_api/user_admin_api.html#find-a-user-based-on-their-id-in-an-auth-provider). ([\#13810](https://github.com/matrix-org/synapse/issues/13810))
- Cancel the processing of key query requests when they time out. ([\#13680](https://github.com/matrix-org/synapse/issues/13680))
- Improve validation of request bodies for the following client-server API endpoints: [`/account/3pid/msisdn/requestToken`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidmsisdnrequesttoken), [`/org.matrix.msc3720/account_status`](https://github.com/matrix-org/matrix-spec-proposals/blob/babolivier/user_status/proposals/3720-account-status.md#post-_matrixclientv1account_status), [`/account/3pid/add`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidadd), [`/account/3pid/bind`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidbind), [`/account/3pid/delete`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3piddelete) and [`/account/3pid/unbind`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidunbind). ([\#13687](https://github.com/matrix-org/synapse/issues/13687), [\#13736](https://github.com/matrix-org/synapse/issues/13736))
- Document the timestamp when a user accepts the consent, if [consent tracking](https://matrix-org.github.io/synapse/latest/consent_tracking.html) is used. ([\#13741](https://github.com/matrix-org/synapse/issues/13741))
- Add a `listeners[x].request_id_header` configuration option to specify which request header to extract and use as the request ID in order to correlate requests from a reverse proxy. ([\#13801](https://github.com/matrix-org/synapse/issues/13801))
Bugfixes
--------
- Fix a bug introduced in Synapse 1.41.0 where the `/hierarchy` API returned non-standard information (a `room_id` field under each entry in `children_state`). ([\#13506](https://github.com/matrix-org/synapse/issues/13506))
- Fix a long-standing bug where previously rejected events could end up in room state because they pass auth checks given the current state of the room. ([\#13723](https://github.com/matrix-org/synapse/issues/13723))
- Fix a long-standing bug where Synapse fails to start if a signing key file contains an empty line. ([\#13738](https://github.com/matrix-org/synapse/issues/13738))
- Fix a long-standing bug where Synapse would fail to handle malformed user IDs or room aliases gracefully in certain cases. ([\#13746](https://github.com/matrix-org/synapse/issues/13746))
- Fix a long-standing bug where device lists would remain cached when remote users left and rejoined the last room shared with the local homeserver. ([\#13749](https://github.com/matrix-org/synapse/issues/13749), [\#13826](https://github.com/matrix-org/synapse/issues/13826))
- Fix a long-standing bug that could cause stale caches in some rare cases on the first startup of Synapse with replication. ([\#13766](https://github.com/matrix-org/synapse/issues/13766))
- Fix a long-standing spec compliance bug where Synapse would accept a trailing slash on the end of `/get_missing_events` federation requests. ([\#13789](https://github.com/matrix-org/synapse/issues/13789))
- Delete associated data from `event_failed_pull_attempts`, `insertion_events`, `insertion_event_extremities`, `insertion_event_extremities`, `insertion_event_extremities` when purging the room. ([\#13825](https://github.com/matrix-org/synapse/issues/13825))
Improved Documentation
----------------------
- Note that `libpq` is required on ARM-based Macs. ([\#13480](https://github.com/matrix-org/synapse/issues/13480))
- Fix a mistake in the config manual introduced in Synapse 1.22.0: the `event_cache_size` _is_ scaled by `caches.global_factor`. ([\#13726](https://github.com/matrix-org/synapse/issues/13726))
- Fix a typo in the documentation for the login ratelimiting configuration. ([\#13727](https://github.com/matrix-org/synapse/issues/13727))
- Define Synapse's compatability policy for SQLite versions. ([\#13728](https://github.com/matrix-org/synapse/issues/13728))
- Add docs for the common fix of deleting the `matrix_synapse.egg-info/` directory for fixing Python dependency problems. ([\#13785](https://github.com/matrix-org/synapse/issues/13785))
- Update request log format documentation to mention the format used when the authenticated user is controlling another user. ([\#13794](https://github.com/matrix-org/synapse/issues/13794))
Deprecations and Removals
-------------------------
- Synapse will now refuse to start if configured to use SQLite < 3.27. ([\#13760](https://github.com/matrix-org/synapse/issues/13760))
- Don't include redundant `prev_state` in new events. Contributed by Denis Kariakin (@dakariakin). ([\#13791](https://github.com/matrix-org/synapse/issues/13791))
Internal Changes
----------------
- Add a stub Rust crate. ([\#12595](https://github.com/matrix-org/synapse/issues/12595), [\#13734](https://github.com/matrix-org/synapse/issues/13734), [\#13735](https://github.com/matrix-org/synapse/issues/13735), [\#13743](https://github.com/matrix-org/synapse/issues/13743), [\#13763](https://github.com/matrix-org/synapse/issues/13763), [\#13769](https://github.com/matrix-org/synapse/issues/13769), [\#13778](https://github.com/matrix-org/synapse/issues/13778))
- Bump the minimum dependency of `matrix_common` to 1.3.0 to make use of the `MXCUri` class. Use `MXCUri` to simplify media retention test code. ([\#13162](https://github.com/matrix-org/synapse/issues/13162))
- Add and populate the `event_stream_ordering` column on the `receipts` table for future optimisation of push action processing. Contributed by Nick @ Beeper (@fizzadar). ([\#13703](https://github.com/matrix-org/synapse/issues/13703))
- Rename the `EventFormatVersions` enum values so that they line up with room version numbers. ([\#13706](https://github.com/matrix-org/synapse/issues/13706))
- Update trial old deps CI to use Poetry 1.2.0. ([\#13707](https://github.com/matrix-org/synapse/issues/13707), [\#13725](https://github.com/matrix-org/synapse/issues/13725))
- Add experimental configuration option to allow disabling legacy Prometheus metric names. ([\#13714](https://github.com/matrix-org/synapse/issues/13714), [\#13717](https://github.com/matrix-org/synapse/issues/13717), [\#13718](https://github.com/matrix-org/synapse/issues/13718))
- Fix typechecking with latest types-jsonschema. ([\#13724](https://github.com/matrix-org/synapse/issues/13724))
- Strip number suffix from instance name to consolidate services that traces are spread over. ([\#13729](https://github.com/matrix-org/synapse/issues/13729))
- Instrument `get_metadata_for_events` for understandable traces in Jaeger. ([\#13730](https://github.com/matrix-org/synapse/issues/13730))
- Remove old queries to join room memberships to current state events. Contributed by Nick @ Beeper (@fizzadar). ([\#13745](https://github.com/matrix-org/synapse/issues/13745))
- Avoid raising an error due to malformed user IDs in `get_current_hosts_in_room`. Malformed user IDs cannot currently join a room, so this error would not be hit. ([\#13748](https://github.com/matrix-org/synapse/issues/13748))
- Update the docstrings for `get_users_in_room` and `get_current_hosts_in_room` to explain the impact of partial state. ([\#13750](https://github.com/matrix-org/synapse/issues/13750))
- Use an additional database query when persisting receipts. ([\#13752](https://github.com/matrix-org/synapse/issues/13752))
- Preparatory work for storing thread IDs for notifications and receipts. ([\#13753](https://github.com/matrix-org/synapse/issues/13753))
- Re-type hint some collections as read-only. ([\#13754](https://github.com/matrix-org/synapse/issues/13754))
- Remove unused Prometheus recording rules from `synapse-v2.rules` and add comments describing where the rest are used. ([\#13756](https://github.com/matrix-org/synapse/issues/13756))
- Add a check for editable installs if the Rust library needs rebuilding. ([\#13759](https://github.com/matrix-org/synapse/issues/13759))
- Tag traces with the instance name to be able to easily jump into the right logs and filter traces by instance. ([\#13761](https://github.com/matrix-org/synapse/issues/13761))
- Concurrently fetch room push actions when calculating badge counts. Contributed by Nick @ Beeper (@fizzadar). ([\#13765](https://github.com/matrix-org/synapse/issues/13765))
- Update the script which makes full schema dumps. ([\#13770](https://github.com/matrix-org/synapse/issues/13770))
- Deduplicate `is_server_notices_room`. ([\#13780](https://github.com/matrix-org/synapse/issues/13780))
- Simplify the dependency DAG in the tests workflow. ([\#13784](https://github.com/matrix-org/synapse/issues/13784))
- Remove an old, incorrect migration file. ([\#13788](https://github.com/matrix-org/synapse/issues/13788))
- Remove unused method in `synapse.api.auth.Auth`. ([\#13795](https://github.com/matrix-org/synapse/issues/13795))
- Fix a memory leak when running the unit tests. ([\#13798](https://github.com/matrix-org/synapse/issues/13798))
- Use partial indices on SQLite. ([\#13802](https://github.com/matrix-org/synapse/issues/13802))
- Check that portdb generates the same postgres schema as that in the source tree. ([\#13808](https://github.com/matrix-org/synapse/issues/13808))
- Fix Docker build when Rust .so has been built locally first. ([\#13811](https://github.com/matrix-org/synapse/issues/13811))
- Complement: Initialise the Postgres database directly inside the target image instead of the base Postgres image to fix building using Buildah. ([\#13819](https://github.com/matrix-org/synapse/issues/13819))
- Support providing an index predicate clause when doing upserts. ([\#13822](https://github.com/matrix-org/synapse/issues/13822))
- Minor speedups to linting in CI. ([\#13827](https://github.com/matrix-org/synapse/issues/13827))
Synapse 1.67.0 (2022-09-13)
===========================
This release removes using the deprecated direct TCP replication configuration
for workers. Server admins should use Redis instead. See the [upgrade
notes](https://matrix-org.github.io/synapse/v1.67/upgrade.html#upgrading-to-v1670).
The minimum version of `poetry` supported for managing source checkouts is now
1.2.0.
**Notice:** from the next major release (1.68.0) installing Synapse from a source
checkout will require a recent Rust compiler. Those using packages or
`pip install matrix-synapse` will not be affected. See the [upgrade
notes](https://matrix-org.github.io/synapse/v1.67/upgrade.html#upgrading-to-v1670).
**Notice:** from the next major release (1.68.0), running Synapse with a SQLite
database will require SQLite version 3.27.0 or higher. (The [current minimum
version is SQLite 3.22.0](https://github.com/matrix-org/synapse/blob/release-v1.67/synapse/storage/engines/sqlite.py#L69-L78).)
See [#12983](https://github.com/matrix-org/synapse/issues/12983) and the [upgrade notes](https://matrix-org.github.io/synapse/v1.67/upgrade.html#upgrading-to-v1670) for more details.
No significant changes since 1.67.0rc1.
Synapse 1.67.0rc1 (2022-09-06)
==============================
Features
--------
- Support setting the registration shared secret in a file, via a new `registration_shared_secret_path` configuration option. ([\#13614](https://github.com/matrix-org/synapse/issues/13614))
- Change the default startup behaviour so that any missing "additional" configuration files (signing key, etc) are generated automatically. ([\#13615](https://github.com/matrix-org/synapse/issues/13615))
- Improve performance of sending messages in rooms with thousands of local users. ([\#13634](https://github.com/matrix-org/synapse/issues/13634))
Bugfixes
--------
- Fix a bug introduced in Synapse 1.13 where the [List Rooms admin API](https://matrix-org.github.io/synapse/develop/admin_api/rooms.html#list-room-api) would return integers instead of booleans for the `federatable` and `public` fields when using a Sqlite database. ([\#13509](https://github.com/matrix-org/synapse/issues/13509))
- Fix bug that user cannot `/forget` rooms after the last member has left the room. ([\#13546](https://github.com/matrix-org/synapse/issues/13546))
- Faster Room Joins: fix `/make_knock` blocking indefinitely when the room in question is a partial-stated room. ([\#13583](https://github.com/matrix-org/synapse/issues/13583))
- Fix loading the current stream position behind the actual position. ([\#13585](https://github.com/matrix-org/synapse/issues/13585))
- Fix a longstanding bug in `register_new_matrix_user` which meant it was always necessary to explicitly give a server URL. ([\#13616](https://github.com/matrix-org/synapse/issues/13616))
- Fix the running of [MSC1763](https://github.com/matrix-org/matrix-spec-proposals/pull/1763) retention purge_jobs in deployments with background jobs running on a worker by forcing them back onto the main worker. Contributed by Brad @ Beeper. ([\#13632](https://github.com/matrix-org/synapse/issues/13632))
- Fix a long-standing bug that downloaded media for URL previews was not deleted while database background updates were running. ([\#13657](https://github.com/matrix-org/synapse/issues/13657))
- Fix [MSC3030](https://github.com/matrix-org/matrix-spec-proposals/pull/3030) `/timestamp_to_event` endpoint to return the correct next event when the events have the same timestamp. ([\#13658](https://github.com/matrix-org/synapse/issues/13658))
- Fix bug where we wedge media plugins if clients disconnect early. Introduced in v1.22.0. ([\#13660](https://github.com/matrix-org/synapse/issues/13660))
- Fix a long-standing bug which meant that keys for unwhitelisted servers were not returned by `/_matrix/key/v2/query`. ([\#13683](https://github.com/matrix-org/synapse/issues/13683))
- Fix a bug introduced in Synapse 1.20.0 that would cause the unstable unread counts from [MSC2654](https://github.com/matrix-org/matrix-spec-proposals/pull/2654) to be calculated even if the feature is disabled. ([\#13694](https://github.com/matrix-org/synapse/issues/13694))
Updates to the Docker image
---------------------------
- Update docker image to use a stable version of poetry. ([\#13688](https://github.com/matrix-org/synapse/issues/13688))
Improved Documentation
----------------------
- Improve the description of the ["chain cover index"](https://matrix-org.github.io/synapse/latest/auth_chain_difference_algorithm.html) used internally by Synapse. ([\#13602](https://github.com/matrix-org/synapse/issues/13602))
- Document how ["monthly active users"](https://matrix-org.github.io/synapse/latest/usage/administration/monthly_active_users.html) is calculated and used. ([\#13617](https://github.com/matrix-org/synapse/issues/13617))
- Improve documentation around user registration. ([\#13640](https://github.com/matrix-org/synapse/issues/13640))
- Remove documentation of legacy `frontend_proxy` worker app. ([\#13645](https://github.com/matrix-org/synapse/issues/13645))
- Clarify documentation that HTTP replication traffic can be protected with a shared secret. ([\#13656](https://github.com/matrix-org/synapse/issues/13656))
- Remove unintentional colons from [config manual](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html) headers. ([\#13665](https://github.com/matrix-org/synapse/issues/13665))
- Update docs to make enabling metrics more clear. ([\#13678](https://github.com/matrix-org/synapse/issues/13678))
- Clarify `(room_id, event_id)` global uniqueness and how we should scope our database schemas. ([\#13701](https://github.com/matrix-org/synapse/issues/13701))
Deprecations and Removals
-------------------------
- Drop support for calling `/_matrix/client/v3/rooms/{roomId}/invite` without an `id_access_token`, which was not permitted by the spec. Contributed by @Vetchu. ([\#13241](https://github.com/matrix-org/synapse/issues/13241))
- Remove redundant `_get_joined_users_from_context` cache. Contributed by Nick @ Beeper (@fizzadar). ([\#13569](https://github.com/matrix-org/synapse/issues/13569))
- Remove the ability to use direct TCP replication with workers. Direct TCP replication was deprecated in Synapse 1.18.0. Workers now require using Redis. ([\#13647](https://github.com/matrix-org/synapse/issues/13647))
- Remove support for unstable [private read receipts](https://github.com/matrix-org/matrix-spec-proposals/pull/2285). ([\#13653](https://github.com/matrix-org/synapse/issues/13653), [\#13692](https://github.com/matrix-org/synapse/issues/13692))
Internal Changes
----------------
- Extend the release script to wait for GitHub Actions to finish and to be usable as a guide for the whole process. ([\#13483](https://github.com/matrix-org/synapse/issues/13483))
- Add experimental configuration option to allow disabling legacy Prometheus metric names. ([\#13540](https://github.com/matrix-org/synapse/issues/13540))
- Cache user IDs instead of profiles to reduce cache memory usage. Contributed by Nick @ Beeper (@fizzadar). ([\#13573](https://github.com/matrix-org/synapse/issues/13573), [\#13600](https://github.com/matrix-org/synapse/issues/13600))
- Optimize how Synapse calculates domains to fetch from during backfill. ([\#13575](https://github.com/matrix-org/synapse/issues/13575))
- Comment about a better future where we can get the state diff between two events. ([\#13586](https://github.com/matrix-org/synapse/issues/13586))
- Instrument `_check_sigs_and_hash_and_fetch` to trace time spent in child concurrent calls for understandable traces in Jaeger. ([\#13588](https://github.com/matrix-org/synapse/issues/13588))
- Improve performance of `@cachedList`. ([\#13591](https://github.com/matrix-org/synapse/issues/13591))
- Minor speed up of fetching large numbers of push rules. ([\#13592](https://github.com/matrix-org/synapse/issues/13592))
- Optimise push action fetching queries. Contributed by Nick @ Beeper (@fizzadar). ([\#13597](https://github.com/matrix-org/synapse/issues/13597))
- Rename `event_map` to `unpersisted_events` when computing the auth differences. ([\#13603](https://github.com/matrix-org/synapse/issues/13603))
- Refactor `get_users_in_room(room_id)` mis-use with dedicated `get_current_hosts_in_room(room_id)` function. ([\#13605](https://github.com/matrix-org/synapse/issues/13605))
- Use dedicated `get_local_users_in_room(room_id)` function to find local users when calculating `join_authorised_via_users_server` of a `/make_join` request. ([\#13606](https://github.com/matrix-org/synapse/issues/13606))
- Refactor `get_users_in_room(room_id)` mis-use to lookup single local user with dedicated `check_local_user_in_room(...)` function. ([\#13608](https://github.com/matrix-org/synapse/issues/13608))
- Drop unused column `application_services_state.last_txn`. ([\#13627](https://github.com/matrix-org/synapse/issues/13627))
- Improve readability of Complement CI logs by printing failure results last. ([\#13639](https://github.com/matrix-org/synapse/issues/13639))
- Generalise the `@cancellable` annotation so it can be used on functions other than just servlet methods. ([\#13662](https://github.com/matrix-org/synapse/issues/13662))
- Introduce a `CommonUsageMetrics` class to share some usage metrics between the Prometheus exporter and the phone home stats. ([\#13671](https://github.com/matrix-org/synapse/issues/13671))
- Add some logging to help track down #13444. ([\#13679](https://github.com/matrix-org/synapse/issues/13679))
- Update poetry lock file for v1.2.0. ([\#13689](https://github.com/matrix-org/synapse/issues/13689))
- Add cache to `is_partial_state_room`. ([\#13693](https://github.com/matrix-org/synapse/issues/13693))
- Update the Grafana dashboard that is included with Synapse in the `contrib` directory. ([\#13697](https://github.com/matrix-org/synapse/issues/13697))
- Only run trial CI on all python versions on non-PRs. ([\#13698](https://github.com/matrix-org/synapse/issues/13698))
- Fix typechecking with latest types-jsonschema. ([\#13712](https://github.com/matrix-org/synapse/issues/13712))
- Reduce number of CI checks we run for PRs. ([\#13713](https://github.com/matrix-org/synapse/issues/13713))
Synapse 1.66.0 (2022-08-31)
===========================
No significant changes since 1.66.0rc2.
This release removes the ability for homeservers to delegate email ownership
verification and password reset confirmation to identity servers. This removal
was originally planned for Synapse 1.64, but was later deferred until now. See
the [upgrade notes](https://matrix-org.github.io/synapse/v1.66/upgrade.html#upgrading-to-v1660) for more details.
Deployments with multiple workers should note that the direct TCP replication
configuration was deprecated in Synapse 1.18.0 and will be removed in Synapse
v1.67.0. In particular, the TCP `replication` [listener](https://matrix-org.github.io/synapse/v1.66/usage/configuration/config_documentation.html#listeners)
type (not to be confused with the `replication` resource on the `http` listener
type) and the `worker_replication_port` config option will be removed .
To migrate to Redis, add the [`redis` config](https://matrix-org.github.io/synapse/v1.66/workers.html#shared-configuration),
then remove the TCP `replication` listener from config of the master and
`worker_replication_port` from worker config. Note that a HTTP listener with a
`replication` resource is still required. See the
[worker documentation](https://matrix-org.github.io/synapse/v1.66/workers.html)
for more details.
Synapse 1.66.0rc2 (2022-08-30)
==============================
Bugfixes
--------
- Fix a bug introduced in Synapse 1.66.0rc1 where the new rate limit metrics were misreported (`synapse_rate_limit_sleep_affected_hosts`, `synapse_rate_limit_reject_affected_hosts`). ([\#13649](https://github.com/matrix-org/synapse/issues/13649))
Synapse 1.66.0rc1 (2022-08-23)
==============================
Features
--------
- Improve validation of request bodies for the following client-server API endpoints: [`/account/password`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3accountpassword), [`/account/password/email/requestToken`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3accountpasswordemailrequesttoken), [`/account/deactivate`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3accountdeactivate) and [`/account/3pid/email/requestToken`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidemailrequesttoken). ([\#13188](https://github.com/matrix-org/synapse/issues/13188), [\#13563](https://github.com/matrix-org/synapse/issues/13563))
- Add forgotten status to [Room Details Admin API](https://matrix-org.github.io/synapse/latest/admin_api/rooms.html#room-details-api). ([\#13503](https://github.com/matrix-org/synapse/issues/13503))
- Add an experimental implementation for [MSC3852 (Expose user agents on `Device`)](https://github.com/matrix-org/matrix-spec-proposals/pull/3852). ([\#13549](https://github.com/matrix-org/synapse/issues/13549))
- Add `org.matrix.msc2716v4` experimental room version with updated content fields. Part of [MSC2716 (Importing history)](https://github.com/matrix-org/matrix-spec-proposals/pull/2716). ([\#13551](https://github.com/matrix-org/synapse/issues/13551))
- Add support for compression to federation responses. ([\#13537](https://github.com/matrix-org/synapse/issues/13537))
- Improve performance of sending messages in rooms with thousands of local users. ([\#13522](https://github.com/matrix-org/synapse/issues/13522), [\#13547](https://github.com/matrix-org/synapse/issues/13547))
Bugfixes
--------
- Faster room joins: make `/joined_members` block whilst the room is partial stated. ([\#13514](https://github.com/matrix-org/synapse/issues/13514))
- Fix a bug introduced in Synapse 1.21.0 where the [`/event_reports` Admin API](https://matrix-org.github.io/synapse/develop/admin_api/event_reports.html) could return a total count which was larger than the number of results you can actually query for. ([\#13525](https://github.com/matrix-org/synapse/issues/13525))
- Fix a bug introduced in Synapse 1.52.0 where sending server notices fails if `max_avatar_size` or `allowed_avatar_mimetypes` is set and not `system_mxid_avatar_url`. ([\#13566](https://github.com/matrix-org/synapse/issues/13566))
- Fix a bug where the `opentracing.force_tracing_for_users` config option would not apply to [`/sendToDevice`](https://spec.matrix.org/v1.3/client-server-api/#put_matrixclientv3sendtodeviceeventtypetxnid) and [`/keys/upload`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3keysupload) requests. ([\#13574](https://github.com/matrix-org/synapse/issues/13574))
Improved Documentation
----------------------
- Add `openssl` example for generating registration HMAC digest. ([\#13472](https://github.com/matrix-org/synapse/issues/13472))
- Tidy up Synapse's README. ([\#13491](https://github.com/matrix-org/synapse/issues/13491))
- Document that event purging related to the `redaction_retention_period` config option is executed only every 5 minutes. ([\#13492](https://github.com/matrix-org/synapse/issues/13492))
- Add a warning to retention documentation regarding the possibility of database corruption. ([\#13497](https://github.com/matrix-org/synapse/issues/13497))
- Document that the `DOCKER_BUILDKIT=1` flag is needed to build the docker image. ([\#13515](https://github.com/matrix-org/synapse/issues/13515))
- Add missing links in `user_consent` section of configuration manual. ([\#13536](https://github.com/matrix-org/synapse/issues/13536))
- Fix the doc and some warnings that were referring to the nonexistent `custom_templates_directory` setting (instead of `custom_template_directory`). ([\#13538](https://github.com/matrix-org/synapse/issues/13538))
Deprecations and Removals
-------------------------
- Remove the ability for homeservers to delegate email ownership verification
and password reset confirmation to identity servers. See [upgrade notes](https://matrix-org.github.io/synapse/v1.66/upgrade.html#upgrading-to-v1660) for more details.
Internal Changes
----------------
### Faster room joins
- Update the rejected state of events during de-partial-stating. ([\#13459](https://github.com/matrix-org/synapse/issues/13459))
- Avoid blocking lazy-loading `/sync`s during partial joins due to remote memberships. Pull remote memberships from auth events instead of the room state. ([\#13477](https://github.com/matrix-org/synapse/issues/13477))
- Refuse to start when faster joins is enabled on a deployment with workers, since worker configurations are not currently supported. ([\#13531](https://github.com/matrix-org/synapse/issues/13531))
### Metrics and tracing
- Allow use of both `@trace` and `@tag_args` stacked on the same function. ([\#13453](https://github.com/matrix-org/synapse/issues/13453))
- Instrument the federation/backfill part of `/messages` for understandable traces in Jaeger. ([\#13489](https://github.com/matrix-org/synapse/issues/13489))
- Instrument `FederationStateIdsServlet` (`/state_ids`) for understandable traces in Jaeger. ([\#13499](https://github.com/matrix-org/synapse/issues/13499), [\#13554](https://github.com/matrix-org/synapse/issues/13554))
- Track HTTP response times over 10 seconds from `/messages` (`synapse_room_message_list_rest_servlet_response_time_seconds`). ([\#13533](https://github.com/matrix-org/synapse/issues/13533))
- Add metrics to track how the rate limiter is affecting requests (sleep/reject). ([\#13534](https://github.com/matrix-org/synapse/issues/13534), [\#13541](https://github.com/matrix-org/synapse/issues/13541))
- Add metrics to time how long it takes us to do backfill processing (`synapse_federation_backfill_processing_before_time_seconds`, `synapse_federation_backfill_processing_after_time_seconds`). ([\#13535](https://github.com/matrix-org/synapse/issues/13535), [\#13584](https://github.com/matrix-org/synapse/issues/13584))
- Add metrics to track rate limiter queue timing (`synapse_rate_limit_queue_wait_time_seconds`). ([\#13544](https://github.com/matrix-org/synapse/issues/13544))
- Update metrics to track `/messages` response time by room size. ([\#13545](https://github.com/matrix-org/synapse/issues/13545))
### Everything else
- Refactor methods in `synapse.api.auth.Auth` to use `Requester` objects everywhere instead of user IDs. ([\#13024](https://github.com/matrix-org/synapse/issues/13024))
- Clean-up tests for notifications. ([\#13471](https://github.com/matrix-org/synapse/issues/13471))
- Add some miscellaneous comments to document sync, especially around `compute_state_delta`. ([\#13474](https://github.com/matrix-org/synapse/issues/13474))
- Use literals in place of `HTTPStatus` constants in tests. ([\#13479](https://github.com/matrix-org/synapse/issues/13479), [\#13488](https://github.com/matrix-org/synapse/issues/13488))
- Add comments about how event push actions are rotated. ([\#13485](https://github.com/matrix-org/synapse/issues/13485))
- Modify HTML template content to better support mobile devices' screen sizes. ([\#13493](https://github.com/matrix-org/synapse/issues/13493))
- Add a linter script which will reject non-strict types in Pydantic models. ([\#13502](https://github.com/matrix-org/synapse/issues/13502))
- Reduce the number of tests using legacy TCP replication. ([\#13543](https://github.com/matrix-org/synapse/issues/13543))
- Allow specifying additional request fields when using the `HomeServerTestCase.login` helper method. ([\#13549](https://github.com/matrix-org/synapse/issues/13549))
- Make `HomeServerTestCase` load any configured homeserver modules automatically. ([\#13558](https://github.com/matrix-org/synapse/issues/13558))
Synapse 1.65.0 (2022-08-16)
===========================
No significant changes since 1.65.0rc2.
Synapse 1.65.0rc2 (2022-08-11)
==============================
Internal Changes
----------------
- Revert 'Remove the unspecced `room_id` field in the `/hierarchy` response. ([\#13365](https://github.com/matrix-org/synapse/issues/13365))' to give more time for clients to update. ([\#13501](https://github.com/matrix-org/synapse/issues/13501))
Synapse 1.65.0rc1 (2022-08-09)
==============================
Features
--------
- Add support for stable prefixes for [MSC2285 (private read receipts)](https://github.com/matrix-org/matrix-spec-proposals/pull/2285). ([\#13273](https://github.com/matrix-org/synapse/issues/13273))
- Add new unstable error codes `ORG.MATRIX.MSC3848.ALREADY_JOINED`, `ORG.MATRIX.MSC3848.NOT_JOINED`, and `ORG.MATRIX.MSC3848.INSUFFICIENT_POWER` described in [MSC3848](https://github.com/matrix-org/matrix-spec-proposals/pull/3848). ([\#13343](https://github.com/matrix-org/synapse/issues/13343))
- Use stable prefixes for [MSC3827](https://github.com/matrix-org/matrix-spec-proposals/pull/3827). ([\#13370](https://github.com/matrix-org/synapse/issues/13370))
- Add a new module API method to translate a room alias into a room ID. ([\#13428](https://github.com/matrix-org/synapse/issues/13428))
- Add a new module API method to create a room. ([\#13429](https://github.com/matrix-org/synapse/issues/13429))
- Add remote join capability to the module API's `update_room_membership` method (in a backwards compatible manner). ([\#13441](https://github.com/matrix-org/synapse/issues/13441))
Bugfixes
--------
- Update the version of the LDAP3 auth provider module included in the `matrixdotorg/synapse` DockerHub images and the Debian packages hosted on packages.matrix.org to 0.2.2. This version fixes a regression in the module. ([\#13470](https://github.com/matrix-org/synapse/issues/13470))
- Fix a bug introduced in Synapse 1.41.0 where the `/hierarchy` API returned non-standard information (a `room_id` field under each entry in `children_state`) (this was reverted in v1.65.0rc2, see changelog notes above). ([\#13365](https://github.com/matrix-org/synapse/issues/13365))
- Fix a bug introduced in Synapse 0.24.0 that would respond with the wrong error status code to `/joined_members` requests when the requester is not a current member of the room. Contributed by @andrewdoh. ([\#13374](https://github.com/matrix-org/synapse/issues/13374))
- Fix bug in handling of typing events for appservices. Contributed by Nick @ Beeper (@fizzadar). ([\#13392](https://github.com/matrix-org/synapse/issues/13392))
- Fix a bug introduced in Synapse 1.57.0 where rooms listed in `exclude_rooms_from_sync` in the configuration file would not be properly excluded from incremental syncs. ([\#13408](https://github.com/matrix-org/synapse/issues/13408))
- Fix a bug in the experimental faster-room-joins support which could cause it to get stuck in an infinite loop. ([\#13353](https://github.com/matrix-org/synapse/issues/13353))
- Faster room joins: fix a bug which caused rejected events to become un-rejected during state syncing. ([\#13413](https://github.com/matrix-org/synapse/issues/13413))
- Faster room joins: fix error when running out of servers to sync partial state with, so that Synapse raises the intended error instead. ([\#13432](https://github.com/matrix-org/synapse/issues/13432))
Updates to the Docker image
---------------------------
- Make Docker images build on armv7 by installing cryptography dependencies in the 'requirements' stage. Contributed by Jasper Spaans. ([\#13372](https://github.com/matrix-org/synapse/issues/13372))
Improved Documentation
----------------------
- Update the 'registration tokens' page to acknowledge that the relevant MSC was merged into version 1.2 of the Matrix specification. Contributed by @moan0s. ([\#11897](https://github.com/matrix-org/synapse/issues/11897))
- Document which HTTP resources support gzip compression. ([\#13221](https://github.com/matrix-org/synapse/issues/13221))
- Add steps describing how to elevate an existing user to administrator by manipulating the database. ([\#13230](https://github.com/matrix-org/synapse/issues/13230))
- Fix wrong headline for `url_preview_accept_language` in documentation. ([\#13437](https://github.com/matrix-org/synapse/issues/13437))
- Remove redundant 'Contents' section from the Configuration Manual. Contributed by @dklimpel. ([\#13438](https://github.com/matrix-org/synapse/issues/13438))
- Update documentation for config setting `macaroon_secret_key`. ([\#13443](https://github.com/matrix-org/synapse/issues/13443))
- Update outdated information on `sso_mapping_providers` documentation. ([\#13449](https://github.com/matrix-org/synapse/issues/13449))
- Fix example code in module documentation of `password_auth_provider_callbacks`. ([\#13450](https://github.com/matrix-org/synapse/issues/13450))
- Make the configuration for the cache clearer. ([\#13481](https://github.com/matrix-org/synapse/issues/13481))
Internal Changes
----------------
- Extend the release script to automatically push a new SyTest branch, rather than having that be a manual process. ([\#12978](https://github.com/matrix-org/synapse/issues/12978))
- Make minor clarifications to the error messages given when we fail to join a room via any server. ([\#13160](https://github.com/matrix-org/synapse/issues/13160))
- Enable Complement CI tests in the 'latest deps' test run. ([\#13213](https://github.com/matrix-org/synapse/issues/13213))
- Fix long-standing bugged logic which was never hit in `get_pdu` asking every remote destination even after it finds an event. ([\#13346](https://github.com/matrix-org/synapse/issues/13346))
- Faster room joins: avoid blocking when pulling events with partially missing prev events. ([\#13355](https://github.com/matrix-org/synapse/issues/13355))
- Instrument `/messages` for understandable traces in Jaeger. ([\#13368](https://github.com/matrix-org/synapse/issues/13368))
- Remove an unused argument to `get_relations_for_event`. ([\#13383](https://github.com/matrix-org/synapse/issues/13383))
- Add a `merge-back` command to the release script, which automates merging the correct branches after a release. ([\#13393](https://github.com/matrix-org/synapse/issues/13393))
- Adding missing type hints to tests. ([\#13397](https://github.com/matrix-org/synapse/issues/13397))
- Faster Room Joins: don't leave a stuck room partial state flag if the join fails. ([\#13403](https://github.com/matrix-org/synapse/issues/13403))
- Refactor `_resolve_state_at_missing_prevs` to compute an `EventContext` instead. ([\#13404](https://github.com/matrix-org/synapse/issues/13404), [\#13431](https://github.com/matrix-org/synapse/issues/13431))
- Faster Room Joins: prevent Synapse from answering federated join requests for a room which it has not fully joined yet. ([\#13416](https://github.com/matrix-org/synapse/issues/13416))
- Re-enable running Complement tests against Synapse with workers. ([\#13420](https://github.com/matrix-org/synapse/issues/13420))
- Prevent unnecessary lookups to any external `get_event` cache. Contributed by Nick @ Beeper (@fizzadar). ([\#13435](https://github.com/matrix-org/synapse/issues/13435))
- Add some tracing to give more insight into local room joins. ([\#13439](https://github.com/matrix-org/synapse/issues/13439))
- Rename class `RateLimitConfig` to `RatelimitSettings` and `FederationRateLimitConfig` to `FederationRatelimitSettings`. ([\#13442](https://github.com/matrix-org/synapse/issues/13442))
- Add some comments about how event push actions are stored. ([\#13445](https://github.com/matrix-org/synapse/issues/13445), [\#13455](https://github.com/matrix-org/synapse/issues/13455))
- Improve rebuild speed for the "synapse-workers" docker image. ([\#13447](https://github.com/matrix-org/synapse/issues/13447))
- Fix `@tag_args` being off-by-one with the arguments when tagging a span (tracing). ([\#13452](https://github.com/matrix-org/synapse/issues/13452))
- Update type of `EventContext.rejected`. ([\#13460](https://github.com/matrix-org/synapse/issues/13460))
- Use literals in place of `HTTPStatus` constants in tests. ([\#13463](https://github.com/matrix-org/synapse/issues/13463), [\#13469](https://github.com/matrix-org/synapse/issues/13469))
- Correct a misnamed argument in state res v2 internals. ([\#13467](https://github.com/matrix-org/synapse/issues/13467))
Synapse 1.64.0 (2022-08-02)
===========================
@@ -426,7 +7,7 @@ No significant changes since 1.64.0rc2.
Deprecation Warning
-------------------
Synapse 1.66.0 will remove the ability to delegate the tasks of verifying email address ownership, and password reset confirmation, to an identity server.
Synapse v1.66.0 will remove the ability to delegate the tasks of verifying email address ownership, and password reset confirmation, to an identity server.
If you require your homeserver to verify e-mail addresses or to support password resets via e-mail, please configure your homeserver with SMTP access so that it can send e-mails on its own behalf.
[Consult the configuration documentation for more information.](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#email)
@@ -435,7 +16,7 @@ If you require your homeserver to verify e-mail addresses or to support password
Synapse 1.64.0rc2 (2022-07-29)
==============================
This RC reintroduces support for `account_threepid_delegates.email`, which was removed in 1.64.0rc1. It remains deprecated and will be removed altogether in Synapse 1.66.0. ([\#13406](https://github.com/matrix-org/synapse/issues/13406))
This RC reintroduces support for `account_threepid_delegates.email`, which was removed in 1.64.0rc1. It remains deprecated and will be removed altogether in Synapse v1.66.0. ([\#13406](https://github.com/matrix-org/synapse/issues/13406))
Synapse 1.64.0rc1 (2022-07-26)
@@ -630,20 +211,6 @@ No significant changes since 1.62.0rc3.
Authors of spam-checker plugins should consult the [upgrade notes](https://github.com/matrix-org/synapse/blob/release-v1.62/docs/upgrade.md#upgrading-to-v1620) to learn about the enriched signatures for spam checker callbacks, which are supported with this release of Synapse.
## Security advisory
The following issue is fixed in 1.62.0.
* [GHSA-jhjh-776m-4765](https://github.com/matrix-org/synapse/security/advisories/GHSA-jhjh-776m-4765) / [CVE-2022-31152](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-31152)
Synapse instances prior to 1.62.0 did not implement the Matrix [event authorization rules](https://spec.matrix.org/v1.3/rooms/v10/#authorization-rules) correctly. An attacker could craft events which would be accepted by Synapse but not a spec-conformant server, potentially causing divergence in the room state between servers.
Homeservers with federation disabled via the [`federation_domain_whitelist`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#federation_domain_whitelist) config option are unaffected.
Administrators of homeservers with federation enabled are advised to upgrade to v1.62.0 or higher.
Fixed by [#13087](https://github.com/matrix-org/synapse/pull/13087) and [#13088](https://github.com/matrix-org/synapse/pull/13088).
Synapse 1.62.0rc3 (2022-07-04)
==============================
@@ -684,7 +251,7 @@ Bugfixes
- Fix a bug introduced in Synapse 1.58 where Synapse would not report full version information when installed from a git checkout. This is a best-effort affair and not guaranteed to be stable. ([\#12973](https://github.com/matrix-org/synapse/issues/12973))
- Fix a bug introduced in Synapse 1.60 where Synapse would fail to start if the `sqlite3` module was not available. ([\#12979](https://github.com/matrix-org/synapse/issues/12979))
- Fix a bug where non-standard information was required when requesting the `/hierarchy` API over federation. Introduced
in Synapse 1.41.0. ([\#12991](https://github.com/matrix-org/synapse/issues/12991))
in Synapse v1.41.0. ([\#12991](https://github.com/matrix-org/synapse/issues/12991))
- Fix a long-standing bug which meant that rate limiting was not restrictive enough in some cases. ([\#13018](https://github.com/matrix-org/synapse/issues/13018))
- Fix a bug introduced in Synapse 1.58 where profile requests for a malformed user ID would ccause an internal error. Synapse now returns 400 Bad Request in this situation. ([\#13041](https://github.com/matrix-org/synapse/issues/13041))
- Fix some inconsistencies in the event authentication code. ([\#13087](https://github.com/matrix-org/synapse/issues/13087), [\#13088](https://github.com/matrix-org/synapse/issues/13088))
@@ -1277,7 +844,7 @@ If you have already upgraded to Synapse 1.57.0 without problem, then you have no
Updates to the Docker image
---------------------------
- Include version 0.2.0 of the Synapse LDAP Auth Provider module in the Docker image. This matches the version that was present in the Docker image for Synapse 1.56.0. ([\#12512](https://github.com/matrix-org/synapse/issues/12512))
- Include version 0.2.0 of the Synapse LDAP Auth Provider module in the Docker image. This matches the version that was present in the Docker image for Synapse v1.56.0. ([\#12512](https://github.com/matrix-org/synapse/issues/12512))
Synapse 1.57.0 (2022-04-19)
@@ -1529,10 +1096,10 @@ Features
Bugfixes
--------
- Use the proper serialization format for bundled thread aggregations. The bug has existed since Synapse 1.48.0. ([\#12090](https://github.com/matrix-org/synapse/issues/12090))
- Use the proper serialization format for bundled thread aggregations. The bug has existed since Synapse v1.48.0. ([\#12090](https://github.com/matrix-org/synapse/issues/12090))
- Fix a long-standing bug when redacting events with relations. ([\#12113](https://github.com/matrix-org/synapse/issues/12113), [\#12121](https://github.com/matrix-org/synapse/issues/12121), [\#12130](https://github.com/matrix-org/synapse/issues/12130), [\#12189](https://github.com/matrix-org/synapse/issues/12189))
- Fix a bug introduced in Synapse 1.7.2 whereby background updates are never run with the default background batch size. ([\#12157](https://github.com/matrix-org/synapse/issues/12157))
- Fix a bug where non-standard information was returned from the `/hierarchy` API. Introduced in Synapse 1.41.0. ([\#12175](https://github.com/matrix-org/synapse/issues/12175))
- Fix a bug where non-standard information was returned from the `/hierarchy` API. Introduced in Synapse v1.41.0. ([\#12175](https://github.com/matrix-org/synapse/issues/12175))
- Fix a bug introduced in Synapse 1.54.0 that broke background updates on sqlite homeservers while search was disabled. ([\#12215](https://github.com/matrix-org/synapse/issues/12215))
- Fix a long-standing bug when a `filter` argument with `event_fields` which did not include the `unsigned` field could result in a 500 error on `/sync`. ([\#12234](https://github.com/matrix-org/synapse/issues/12234))
@@ -1917,15 +1484,15 @@ Bugfixes
- Fix a long-standing issue which could cause Synapse to incorrectly accept data in the unsigned field of events
received over federation. ([\#11530](https://github.com/matrix-org/synapse/issues/11530))
- Fix a long-standing bug where Synapse wouldn't cache a response indicating that a remote user has no devices. ([\#11587](https://github.com/matrix-org/synapse/issues/11587))
- Fix an error that occurs whilst trying to get the federation status of a destination server that was working normally. This admin API was newly introduced in Synapse 1.49.0. ([\#11593](https://github.com/matrix-org/synapse/issues/11593))
- Fix an error that occurs whilst trying to get the federation status of a destination server that was working normally. This admin API was newly introduced in Synapse v1.49.0. ([\#11593](https://github.com/matrix-org/synapse/issues/11593))
- Fix bundled aggregations not being included in the `/sync` response, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675). ([\#11612](https://github.com/matrix-org/synapse/issues/11612), [\#11659](https://github.com/matrix-org/synapse/issues/11659), [\#11791](https://github.com/matrix-org/synapse/issues/11791))
- Fix the `/_matrix/client/v1/room/{roomId}/hierarchy` endpoint returning incorrect fields which have been present since Synapse 1.49.0. ([\#11667](https://github.com/matrix-org/synapse/issues/11667))
- Fix preview of some GIF URLs (like tenor.com). Contributed by Philippe Daouadi. ([\#11669](https://github.com/matrix-org/synapse/issues/11669))
- Fix a bug where only the first 50 rooms from a space were returned from the `/hierarchy` API. This has existed since the introduction of the API in Synapse 1.41.0. ([\#11695](https://github.com/matrix-org/synapse/issues/11695))
- Fix a bug introduced in Synapse 1.18.0 where password reset and address validation emails would not be sent if their subject was configured to use the 'app' template variable. Contributed by @br4nnigan. ([\#11710](https://github.com/matrix-org/synapse/issues/11710), [\#11745](https://github.com/matrix-org/synapse/issues/11745))
- Fix a bug where only the first 50 rooms from a space were returned from the `/hierarchy` API. This has existed since the introduction of the API in Synapse v1.41.0. ([\#11695](https://github.com/matrix-org/synapse/issues/11695))
- Fix a bug introduced in Synapse v1.18.0 where password reset and address validation emails would not be sent if their subject was configured to use the 'app' template variable. Contributed by @br4nnigan. ([\#11710](https://github.com/matrix-org/synapse/issues/11710), [\#11745](https://github.com/matrix-org/synapse/issues/11745))
- Make the 'List Rooms' Admin API sort stable. Contributed by Daniël Sonck. ([\#11737](https://github.com/matrix-org/synapse/issues/11737))
- Fix a long-standing bug where space hierarchy over federation would only work correctly some of the time. ([\#11775](https://github.com/matrix-org/synapse/issues/11775))
- Fix a bug introduced in Synapse 1.46.0 that prevented `on_logged_out` module callbacks from being correctly awaited by Synapse. ([\#11786](https://github.com/matrix-org/synapse/issues/11786))
- Fix a bug introduced in Synapse v1.46.0 that prevented `on_logged_out` module callbacks from being correctly awaited by Synapse. ([\#11786](https://github.com/matrix-org/synapse/issues/11786))
Improved Documentation
@@ -2005,8 +1572,8 @@ This release candidate fixes a federation-breaking regression introduced in Syna
Bugfixes
--------
- Fix a bug introduced in Synapse 1.0.0 whereby some device list updates would not be sent to remote homeservers if there were too many to send at once. ([\#11729](https://github.com/matrix-org/synapse/issues/11729))
- Fix a bug introduced in Synapse 1.50.0rc1 whereby outbound federation could fail because too many EDUs were produced for device updates. ([\#11730](https://github.com/matrix-org/synapse/issues/11730))
- Fix a bug introduced in Synapse v1.0.0 whereby some device list updates would not be sent to remote homeservers if there were too many to send at once. ([\#11729](https://github.com/matrix-org/synapse/issues/11729))
- Fix a bug introduced in Synapse v1.50.0rc1 whereby outbound federation could fail because too many EDUs were produced for device updates. ([\#11730](https://github.com/matrix-org/synapse/issues/11730))
Improved Documentation

324
Cargo.lock generated
View File

@@ -1,324 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "autocfg"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "blake2"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9cf849ee05b2ee5fba5e36f97ff8ec2533916700fc0758d40d92136a42f3388"
dependencies = [
"digest",
]
[[package]]
name = "block-buffer"
version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e"
dependencies = [
"generic-array",
]
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "crypto-common"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
dependencies = [
"generic-array",
"typenum",
]
[[package]]
name = "digest"
version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c"
dependencies = [
"block-buffer",
"crypto-common",
"subtle",
]
[[package]]
name = "generic-array"
version = "0.14.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9"
dependencies = [
"typenum",
"version_check",
]
[[package]]
name = "hex"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "indoc"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adab1eaa3408fb7f0c777a73e7465fd5656136fc93b670eb6df3c88c2c1344e3"
[[package]]
name = "libc"
version = "0.2.132"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8371e4e5341c3a96db127eb2465ac681ced4c433e01dd0e938adbef26ba93ba5"
[[package]]
name = "lock_api"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53"
dependencies = [
"autocfg",
"scopeguard",
]
[[package]]
name = "once_cell"
version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "074864da206b4973b84eb91683020dbefd6a8c3f0f38e054d93954e891935e4e"
[[package]]
name = "parking_lot"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
dependencies = [
"lock_api",
"parking_lot_core",
]
[[package]]
name = "parking_lot_core"
version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929"
dependencies = [
"cfg-if",
"libc",
"redox_syscall",
"smallvec",
"windows-sys",
]
[[package]]
name = "proc-macro2"
version = "1.0.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab"
dependencies = [
"unicode-ident",
]
[[package]]
name = "pyo3"
version = "0.16.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0220c44442c9b239dd4357aa856ac468a4f5e1f0df19ddb89b2522952eb4c6ca"
dependencies = [
"cfg-if",
"indoc",
"libc",
"parking_lot",
"pyo3-build-config",
"pyo3-ffi",
"pyo3-macros",
"unindent",
]
[[package]]
name = "pyo3-build-config"
version = "0.16.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c819d397859445928609d0ec5afc2da5204e0d0f73d6bf9e153b04e83c9cdc2"
dependencies = [
"once_cell",
"target-lexicon",
]
[[package]]
name = "pyo3-ffi"
version = "0.16.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca882703ab55f54702d7bfe1189b41b0af10272389f04cae38fe4cd56c65f75f"
dependencies = [
"libc",
"pyo3-build-config",
]
[[package]]
name = "pyo3-macros"
version = "0.16.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "568749402955ad7be7bad9a09b8593851cd36e549ac90bfd44079cea500f3f21"
dependencies = [
"proc-macro2",
"pyo3-macros-backend",
"quote",
"syn",
]
[[package]]
name = "pyo3-macros-backend"
version = "0.16.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "611f64e82d98f447787e82b8e7b0ebc681e1eb78fc1252668b2c605ffb4e1eb8"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "quote"
version = "1.0.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
dependencies = [
"proc-macro2",
]
[[package]]
name = "redox_syscall"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
dependencies = [
"bitflags",
]
[[package]]
name = "scopeguard"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "smallvec"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1"
[[package]]
name = "subtle"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
[[package]]
name = "syn"
version = "1.0.99"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "synapse"
version = "0.1.0"
dependencies = [
"blake2",
"hex",
"pyo3",
]
[[package]]
name = "target-lexicon"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c02424087780c9b71cc96799eaeddff35af2bc513278cda5c99fc1f5d026d3c1"
[[package]]
name = "typenum"
version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
[[package]]
name = "unicode-ident"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c4f5b37a154999a8f3f98cc23a628d850e154479cd94decf3414696e12e31aaf"
[[package]]
name = "unindent"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58ee9362deb4a96cef4d437d1ad49cffc9b9e92d202b6995674e928ce684f112"
[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "windows-sys"
version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2"
dependencies = [
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_msvc"
version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47"
[[package]]
name = "windows_i686_gnu"
version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6"
[[package]]
name = "windows_i686_msvc"
version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024"
[[package]]
name = "windows_x86_64_gnu"
version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1"
[[package]]
name = "windows_x86_64_msvc"
version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680"

View File

@@ -1,5 +0,0 @@
# We make the whole Synapse folder a workspace so that we can run `cargo`
# commands from the root (rather than having to cd into rust/).
[workspace]
members = ["rust"]

View File

@@ -2,70 +2,152 @@
Synapse |support| |development| |documentation| |license| |pypi| |python|
=========================================================================
Synapse is an open-source `Matrix <https://matrix.org/>`_ homeserver written and
maintained by the Matrix.org Foundation. We began rapid development in 2014,
reaching v1.0.0 in 2019. Development on Synapse and the Matrix protocol itself continues
in earnest today.
Briefly, Matrix is an open standard for communications on the internet, supporting
federation, encryption and VoIP. Matrix.org has more to say about the `goals of the
Matrix project <https://matrix.org/docs/guides/introduction>`_, and the `formal specification
<https://spec.matrix.org/>`_ describes the technical details.
.. contents::
Installing and configuration
============================
Introduction
============
The Synapse documentation describes `how to install Synapse <https://matrix-org.github.io/synapse/latest/setup/installation.html>`_. We recommend using
`Docker images <https://matrix-org.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
<https://matrix-org.github.io/synapse/latest/setup/installation.html#matrixorg-packages>`_.
Matrix is an ambitious new ecosystem for open federated Instant Messaging and
VoIP. The basics you need to know to get up and running are:
- Everything in Matrix happens in a room. Rooms are distributed and do not
exist on any single server. Rooms can be located using convenience aliases
like ``#matrix:matrix.org`` or ``#test:localhost:8448``.
- Matrix user IDs look like ``@matthew:matrix.org`` (although in the future
you will normally refer to yourself and others using a third party identifier
(3PID): email address, phone number, etc rather than manipulating Matrix user IDs)
The overall architecture is::
client <----> homeserver <=====================> homeserver <----> client
https://somewhere.org/_matrix https://elsewhere.net/_matrix
``#matrix:matrix.org`` is the official support room for Matrix, and can be
accessed by any client from https://matrix.org/docs/projects/try-matrix-now.html or
via IRC bridge at irc://irc.libera.chat/matrix.
Synapse is currently in rapid development, but as of version 0.5 we believe it
is sufficiently stable to be run as an internet-facing service for real usage!
About Matrix
============
Matrix specifies a set of pragmatic RESTful HTTP JSON APIs as an open standard,
which handle:
- Creating and managing fully distributed chat rooms with no
single points of control or failure
- Eventually-consistent cryptographically secure synchronisation of room
state across a global open network of federated servers and services
- Sending and receiving extensible messages in a room with (optional)
end-to-end encryption
- Inviting, joining, leaving, kicking, banning room members
- Managing user accounts (registration, login, logout)
- Using 3rd Party IDs (3PIDs) such as email addresses, phone numbers,
Facebook accounts to authenticate, identify and discover users on Matrix.
- Placing 1:1 VoIP and Video calls
These APIs are intended to be implemented on a wide range of servers, services
and clients, letting developers build messaging and VoIP functionality on top
of the entirely open Matrix ecosystem rather than using closed or proprietary
solutions. The hope is for Matrix to act as the building blocks for a new
generation of fully open and interoperable messaging and VoIP apps for the
internet.
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
team, written in Python 3/Twisted.
In Matrix, every user runs one or more Matrix clients, which connect through to
a Matrix homeserver. The homeserver stores all their personal chat history and
user account information - much as a mail client connects through to an
IMAP/SMTP server. Just like email, you can either run your own Matrix
homeserver and control and own your own communications and history or use one
hosted by someone else (e.g. matrix.org) - there is no single point of control
or mandatory service provider in Matrix, unlike WhatsApp, Facebook, Hangouts,
etc.
We'd like to invite you to join #matrix:matrix.org (via
https://matrix.org/docs/projects/try-matrix-now.html), run a homeserver, take a look
at the `Matrix spec <https://matrix.org/docs/spec>`_, and experiment with the
`APIs <https://matrix.org/docs/api>`_ and `Client SDKs
<https://matrix.org/docs/projects/try-matrix-now.html#client-sdks>`_.
Thanks for using Matrix!
Support
=======
For support installing or managing Synapse, please join |room|_ (from a matrix.org
account if necessary) and ask questions there. We do not use GitHub issues for
support requests, only for bug reports and feature requests.
Synapse's documentation is `nicely rendered on GitHub Pages <https://matrix-org.github.io/synapse>`_,
with its source available in |docs|_.
.. |room| replace:: ``#synapse:matrix.org``
.. _room: https://matrix.to/#/#synapse:matrix.org
.. |docs| replace:: ``docs``
.. _docs: docs
Synapse Installation
====================
.. _federation:
Synapse has a variety of `config options
<https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html>`_
which can be used to customise its behaviour after installation.
There are additional details on how to `configure Synapse for federation here
<https://matrix-org.github.io/synapse/latest/federate.html>`_.
.. _reverse-proxy:
Using a reverse proxy with Synapse
----------------------------------
It is recommended to put a reverse proxy such as
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
`HAProxy <https://www.haproxy.org/>`_ or
`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
doing so is that it means that you can expose the default https port (443) to
Matrix clients without needing to run Synapse with root privileges.
For information on configuring one, see `the reverse proxy docs
<https://matrix-org.github.io/synapse/latest/reverse_proxy.html>`_.
Upgrading an existing Synapse
-----------------------------
The instructions for upgrading Synapse are in `the upgrade notes`_.
Please check these instructions as upgrading may require extra steps for some
versions of Synapse.
.. _the upgrade notes: https://matrix-org.github.io/synapse/develop/upgrade.html
* For details on how to install synapse, see
`Installation Instructions <https://matrix-org.github.io/synapse/latest/setup/installation.html>`_.
* For specific details on how to configure Synapse for federation see `docs/federate.md <docs/federate.md>`_
Platform dependencies
---------------------
Connecting to Synapse from a client
===================================
Synapse uses a number of platform dependencies such as Python and PostgreSQL,
and aims to follow supported upstream versions. See the
`deprecation policy <https://matrix-org.github.io/synapse/latest/deprecation_policy.html>`_
for more details.
The easiest way to try out your new Synapse installation is by connecting to it
from a web client.
Unless you are running a test instance of Synapse on your local machine, in
general, you will need to enable TLS support before you can successfully
connect from a client: see
`TLS certificates <https://matrix-org.github.io/synapse/latest/setup/installation.html#tls-certificates>`_.
An easy way to get started is to login or register via Element at
https://app.element.io/#/login or https://app.element.io/#/register respectively.
You will need to change the server you are logging into from ``matrix.org``
and instead specify a Homeserver URL of ``https://<server_name>:8448``
(or just ``https://<server_name>`` if you are using a reverse proxy).
If you prefer to use another client, refer to our
`client breakdown <https://matrix.org/docs/projects/clients-matrix>`_.
If all goes well you should at least be able to log in, create a room, and
start sending messages.
.. _`client-user-reg`:
Registering a new user from a client
------------------------------------
By default, registration of new users via Matrix clients is disabled. To enable
it, specify ``enable_registration: true`` in ``homeserver.yaml``. (It is then
recommended to also set up CAPTCHA - see `<docs/CAPTCHA_SETUP.md>`_.)
Once ``enable_registration`` is set to ``true``, it is possible to register a
user via a Matrix client.
Your new user name will be formed partly from the ``server_name``, and partly
from a localpart you specify when you create the account. Your name will take
the form of::
@localpart:my.domain.name
(pronounced "at localpart on my dot domain dot name").
As when logging in, you will need to specify a "Custom server". Specify your
desired ``localpart`` in the 'User name' box.
Security note
-------------
=============
Matrix serves raw, user-supplied data in some APIs -- specifically the `content
repository endpoints`_.
@@ -105,76 +187,30 @@ Following this advice ensures that even if an XSS is found in Synapse, the
impact to other applications will be minimal.
Testing a new installation
==========================
Upgrading an existing Synapse
=============================
The easiest way to try out your new Synapse installation is by connecting to it
from a web client.
The instructions for upgrading synapse are in `the upgrade notes`_.
Please check these instructions as upgrading may require extra steps for some
versions of synapse.
Unless you are running a test instance of Synapse on your local machine, in
general, you will need to enable TLS support before you can successfully
connect from a client: see
`TLS certificates <https://matrix-org.github.io/synapse/latest/setup/installation.html#tls-certificates>`_.
.. _the upgrade notes: https://matrix-org.github.io/synapse/develop/upgrade.html
An easy way to get started is to login or register via Element at
https://app.element.io/#/login or https://app.element.io/#/register respectively.
You will need to change the server you are logging into from ``matrix.org``
and instead specify a Homeserver URL of ``https://<server_name>:8448``
(or just ``https://<server_name>`` if you are using a reverse proxy).
If you prefer to use another client, refer to our
`client breakdown <https://matrix.org/docs/projects/clients-matrix>`_.
.. _reverse-proxy:
If all goes well you should at least be able to log in, create a room, and
start sending messages.
Using a reverse proxy with Synapse
==================================
.. _`client-user-reg`:
It is recommended to put a reverse proxy such as
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
`HAProxy <https://www.haproxy.org/>`_ or
`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
doing so is that it means that you can expose the default https port (443) to
Matrix clients without needing to run Synapse with root privileges.
Registering a new user from a client
------------------------------------
By default, registration of new users via Matrix clients is disabled. To enable
it:
1. In the
`registration config section <https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#registration>`_
set ``enable_registration: true`` in ``homeserver.yaml``.
2. Then **either**:
a. set up a `CAPTCHA <https://matrix-org.github.io/synapse/latest/CAPTCHA_SETUP.html>`_, or
b. set ``enable_registration_without_verification: true`` in ``homeserver.yaml``.
We **strongly** recommend using a CAPTCHA, particularly if your homeserver is exposed to
the public internet. Without it, anyone can freely register accounts on your homeserver.
This can be exploited by attackers to create spambots targetting the rest of the Matrix
federation.
Your new user name will be formed partly from the ``server_name``, and partly
from a localpart you specify when you create the account. Your name will take
the form of::
@localpart:my.domain.name
(pronounced "at localpart on my dot domain dot name").
As when logging in, you will need to specify a "Custom server". Specify your
desired ``localpart`` in the 'User name' box.
Troubleshooting and support
===========================
The `Admin FAQ <https://matrix-org.github.io/synapse/latest/usage/administration/admin_faq.html>`_
includes tips on dealing with some common problems. For more details, see
`Synapse's wider documentation <https://matrix-org.github.io/synapse/latest/>`_.
For additional support installing or managing Synapse, please ask in the community
support room |room|_ (from a matrix.org account if necessary). We do not use GitHub
issues for support requests, only for bug reports and feature requests.
.. |room| replace:: ``#synapse:matrix.org``
.. _room: https://matrix.to/#/#synapse:matrix.org
.. |docs| replace:: ``docs``
.. _docs: docs
For information on configuring one, see `<docs/reverse_proxy.md>`_.
Identity Servers
================
@@ -206,15 +242,34 @@ an email address with your account, or send an invite to another user via their
email address.
Development
===========
Password reset
==============
Users can reset their password through their client. Alternatively, a server admin
can reset a users password using the `admin API <docs/admin_api/user_admin_api.md#reset-password>`_
or by directly editing the database as shown below.
First calculate the hash of the new password::
$ ~/synapse/env/bin/hash_password
Password:
Confirm password:
$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
Then update the ``users`` table in the database::
UPDATE users SET password_hash='$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
WHERE name='@test:test.com';
Synapse Development
===================
We welcome contributions to Synapse from the community!
The best place to get started is our
`guide for contributors <https://matrix-org.github.io/synapse/latest/development/contributing_guide.html>`_.
This is part of our larger `documentation <https://matrix-org.github.io/synapse/latest>`_, which includes
information for synapse developers as well as synapse administrators.
information for Synapse developers as well as Synapse administrators.
Developers might be particularly interested in:
* `Synapse's database schema <https://matrix-org.github.io/synapse/latest/development/database_schema.html>`_,
@@ -225,6 +280,187 @@ Alongside all that, join our developer community on Matrix:
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
Quick start
-----------
Before setting up a development environment for synapse, make sure you have the
system dependencies (such as the python header files) installed - see
`Platform-specific prerequisites <https://matrix-org.github.io/synapse/latest/setup/installation.html#platform-specific-prerequisites>`_.
To check out a synapse for development, clone the git repo into a working
directory of your choice::
git clone https://github.com/matrix-org/synapse.git
cd synapse
Synapse has a number of external dependencies. We maintain a fixed development
environment using `Poetry <https://python-poetry.org/>`_. First, install poetry. We recommend::
pip install --user pipx
pipx install poetry
as described `here <https://python-poetry.org/docs/#installing-with-pipx>`_.
(See `poetry's installation docs <https://python-poetry.org/docs/#installation>`_
for other installation methods.) Then ask poetry to create a virtual environment
from the project and install Synapse's dependencies::
poetry install --extras "all test"
This will run a process of downloading and installing all the needed
dependencies into a virtual env.
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`::
poetry run ./demo/start.sh
(to stop, you can use ``poetry run ./demo/stop.sh``)
See the `demo documentation <https://matrix-org.github.io/synapse/develop/development/demo.html>`_
for more information.
If you just want to start a single instance of the app and run it directly::
# Create the homeserver.yaml config once
poetry run synapse_homeserver \
--server-name my.domain.name \
--config-path homeserver.yaml \
--generate-config \
--report-stats=[yes|no]
# Start the app
poetry run synapse_homeserver --config-path homeserver.yaml
Running the unit tests
----------------------
After getting up and running, you may wish to run Synapse's unit tests to
check that everything is installed correctly::
poetry run trial tests
This should end with a 'PASSED' result (note that exact numbers will
differ)::
Ran 1337 tests in 716.064s
PASSED (skips=15, successes=1322)
For more tips on running the unit tests, like running a specific test or
to see the logging output, see the `CONTRIBUTING doc <CONTRIBUTING.md#run-the-unit-tests>`_.
Running the Integration Tests
-----------------------------
Synapse is accompanied by `SyTest <https://github.com/matrix-org/sytest>`_,
a Matrix homeserver integration testing suite, which uses HTTP requests to
access the API as a Matrix client would. It is able to run Synapse directly from
the source tree, so installation of the server is not required.
Testing with SyTest is recommended for verifying that changes related to the
Client-Server API are functioning correctly. See the `SyTest installation
instructions <https://github.com/matrix-org/sytest#installing>`_ for details.
Platform dependencies
=====================
Synapse uses a number of platform dependencies such as Python and PostgreSQL,
and aims to follow supported upstream versions. See the
`<docs/deprecation_policy.md>`_ document for more details.
Troubleshooting
===============
Need help? Join our community support room on Matrix:
`#synapse:matrix.org <https://matrix.to/#/#synapse:matrix.org>`_
Running out of File Handles
---------------------------
If synapse runs out of file handles, it typically fails badly - live-locking
at 100% CPU, and/or failing to accept new TCP connections (blocking the
connecting client). Matrix currently can legitimately use a lot of file handles,
thanks to busy rooms like #matrix:matrix.org containing hundreds of participating
servers. The first time a server talks in a room it will try to connect
simultaneously to all participating servers, which could exhaust the available
file descriptors between DNS queries & HTTPS sockets, especially if DNS is slow
to respond. (We need to improve the routing algorithm used to be better than
full mesh, but as of March 2019 this hasn't happened yet).
If you hit this failure mode, we recommend increasing the maximum number of
open file handles to be at least 4096 (assuming a default of 1024 or 256).
This is typically done by editing ``/etc/security/limits.conf``
Separately, Synapse may leak file handles if inbound HTTP requests get stuck
during processing - e.g. blocked behind a lock or talking to a remote server etc.
This is best diagnosed by matching up the 'Received request' and 'Processed request'
log lines and looking for any 'Processed request' lines which take more than
a few seconds to execute. Please let us know at #synapse:matrix.org if
you see this failure mode so we can help debug it, however.
Help!! Synapse is slow and eats all my RAM/CPU!
-----------------------------------------------
First, ensure you are running the latest version of Synapse, using Python 3
with a PostgreSQL database.
Synapse's architecture is quite RAM hungry currently - we deliberately
cache a lot of recent room data and metadata in RAM in order to speed up
common requests. We'll improve this in the future, but for now the easiest
way to either reduce the RAM usage (at the risk of slowing things down)
is to set the almost-undocumented ``SYNAPSE_CACHE_FACTOR`` environment
variable. The default is 0.5, which can be decreased to reduce RAM usage
in memory constrained enviroments, or increased if performance starts to
degrade.
However, degraded performance due to a low cache factor, common on
machines with slow disks, often leads to explosions in memory use due
backlogged requests. In this case, reducing the cache factor will make
things worse. Instead, try increasing it drastically. 2.0 is a good
starting value.
Using `libjemalloc <http://jemalloc.net/>`_ can also yield a significant
improvement in overall memory use, and especially in terms of giving back
RAM to the OS. To use it, the library must simply be put in the
LD_PRELOAD environment variable when launching Synapse. On Debian, this
can be done by installing the ``libjemalloc1`` package and adding this
line to ``/etc/default/matrix-synapse``::
LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.1
This can make a significant difference on Python 2.7 - it's unclear how
much of an improvement it provides on Python 3.x.
If you're encountering high CPU use by the Synapse process itself, you
may be affected by a bug with presence tracking that leads to a
massive excess of outgoing federation requests (see `discussion
<https://github.com/matrix-org/synapse/issues/3971>`_). If metrics
indicate that your server is also issuing far more outgoing federation
requests than can be accounted for by your users' activity, this is a
likely cause. The misbehavior can be worked around by setting
the following in the Synapse config file:
.. code-block:: yaml
presence:
enabled: false
People can't accept room invitations from me
--------------------------------------------
The typical failure mode here is that you send an invitation to someone
to join a room or direct chat, but when they go to accept it, they get an
error (typically along the lines of "Invalid signature"). They might see
something like the following in their logs::
2019-09-11 19:32:04,271 - synapse.federation.transport.server - 288 - WARNING - GET-11752 - authenticate_request failed: 401: Invalid signature for server <server> with key ed25519:a_EqML: Unable to verify signature for <server>
This is normally caused by a misconfiguration in your reverse-proxy. See
`<docs/reverse_proxy.md>`_ and double-check that your settings are correct.
.. |support| image:: https://img.shields.io/matrix/synapse:matrix.org?label=support&logo=matrix
:alt: (get support on #synapse:matrix.org)
:target: https://matrix.to/#/#synapse:matrix.org

View File

@@ -1,20 +0,0 @@
# A build script for poetry that adds the rust extension.
import os
from typing import Any, Dict
from setuptools_rust import Binding, RustExtension
def build(setup_kwargs: Dict[str, Any]) -> None:
original_project_dir = os.path.dirname(os.path.realpath(__file__))
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")
extension = RustExtension(
target="synapse.synapse_rust",
path=cargo_toml_path,
binding=Binding.PyO3,
py_limited_api=True,
)
setup_kwargs.setdefault("rust_extensions", []).append(extension)
setup_kwargs["zip_safe"] = False

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,21 @@
synapse_federation_transaction_queue_pendingEdus:total = sum(synapse_federation_transaction_queue_pendingEdus or absent(synapse_federation_transaction_queue_pendingEdus)*0)
synapse_federation_transaction_queue_pendingPdus:total = sum(synapse_federation_transaction_queue_pendingPdus or absent(synapse_federation_transaction_queue_pendingPdus)*0)
synapse_http_server_request_count:method{servlet=""} = sum(synapse_http_server_request_count) by (method)
synapse_http_server_request_count:servlet{method=""} = sum(synapse_http_server_request_count) by (servlet)
synapse_http_server_request_count:total{servlet=""} = sum(synapse_http_server_request_count:by_method) by (servlet)
synapse_cache:hit_ratio_5m = rate(synapse_util_caches_cache:hits[5m]) / rate(synapse_util_caches_cache:total[5m])
synapse_cache:hit_ratio_30s = rate(synapse_util_caches_cache:hits[30s]) / rate(synapse_util_caches_cache:total[30s])
synapse_federation_client_sent{type="EDU"} = synapse_federation_client_sent_edus + 0
synapse_federation_client_sent{type="PDU"} = synapse_federation_client_sent_pdu_destinations:count + 0
synapse_federation_client_sent{type="Query"} = sum(synapse_federation_client_sent_queries) by (job)
synapse_federation_server_received{type="EDU"} = synapse_federation_server_received_edus + 0
synapse_federation_server_received{type="PDU"} = synapse_federation_server_received_pdus + 0
synapse_federation_server_received{type="Query"} = sum(synapse_federation_server_received_queries) by (job)
synapse_federation_transaction_queue_pending{type="EDU"} = synapse_federation_transaction_queue_pending_edus + 0
synapse_federation_transaction_queue_pending{type="PDU"} = synapse_federation_transaction_queue_pending_pdus + 0

View File

@@ -1,35 +1,55 @@
groups:
- name: synapse
rules:
# These 3 rules are used in the included Prometheus console
- record: "synapse_federation_transaction_queue_pendingEdus:total"
expr: "sum(synapse_federation_transaction_queue_pendingEdus or absent(synapse_federation_transaction_queue_pendingEdus)*0)"
- record: "synapse_federation_transaction_queue_pendingPdus:total"
expr: "sum(synapse_federation_transaction_queue_pendingPdus or absent(synapse_federation_transaction_queue_pendingPdus)*0)"
- record: 'synapse_http_server_request_count:method'
labels:
servlet: ""
expr: "sum(synapse_http_server_request_count) by (method)"
- record: 'synapse_http_server_request_count:servlet'
labels:
method: ""
expr: 'sum(synapse_http_server_request_count) by (servlet)'
- record: 'synapse_http_server_request_count:total'
labels:
servlet: ""
expr: 'sum(synapse_http_server_request_count:by_method) by (servlet)'
- record: 'synapse_cache:hit_ratio_5m'
expr: 'rate(synapse_util_caches_cache:hits[5m]) / rate(synapse_util_caches_cache:total[5m])'
- record: 'synapse_cache:hit_ratio_30s'
expr: 'rate(synapse_util_caches_cache:hits[30s]) / rate(synapse_util_caches_cache:total[30s])'
- record: 'synapse_federation_client_sent'
labels:
type: "EDU"
expr: 'synapse_federation_client_sent_edus_total + 0'
expr: 'synapse_federation_client_sent_edus + 0'
- record: 'synapse_federation_client_sent'
labels:
type: "PDU"
expr: 'synapse_federation_client_sent_pdu_destinations_count_total + 0'
expr: 'synapse_federation_client_sent_pdu_destinations:count + 0'
- record: 'synapse_federation_client_sent'
labels:
type: "Query"
expr: 'sum(synapse_federation_client_sent_queries) by (job)'
# These 3 rules are used in the included Prometheus console
- record: 'synapse_federation_server_received'
labels:
type: "EDU"
expr: 'synapse_federation_server_received_edus_total + 0'
expr: 'synapse_federation_server_received_edus + 0'
- record: 'synapse_federation_server_received'
labels:
type: "PDU"
expr: 'synapse_federation_server_received_pdus_total + 0'
expr: 'synapse_federation_server_received_pdus + 0'
- record: 'synapse_federation_server_received'
labels:
type: "Query"
expr: 'sum(synapse_federation_server_received_queries) by (job)'
# These 2 rules are used in the included Prometheus console
- record: 'synapse_federation_transaction_queue_pending'
labels:
type: "EDU"
@@ -39,25 +59,20 @@ groups:
type: "PDU"
expr: 'synapse_federation_transaction_queue_pending_pdus + 0'
# These 3 rules are used in the included Grafana dashboard
- record: synapse_storage_events_persisted_by_source_type
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_type="remote"})
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_type="remote"})
labels:
type: remote
- record: synapse_storage_events_persisted_by_source_type
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity="*client*",origin_type="local"})
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_entity="*client*",origin_type="local"})
labels:
type: local
- record: synapse_storage_events_persisted_by_source_type
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity!="*client*",origin_type="local"})
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_entity!="*client*",origin_type="local"})
labels:
type: bridges
# This rule is used in the included Grafana dashboard
- record: synapse_storage_events_persisted_by_event_type
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep_total)
# This rule is used in the included Grafana dashboard
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep)
- record: synapse_storage_events_persisted_by_origin
expr: sum without(type) (synapse_storage_events_persisted_events_sep_total)
expr: sum without(type) (synapse_storage_events_persisted_events_sep)

View File

@@ -36,7 +36,7 @@ TEMP_VENV="$(mktemp -d)"
python3 -m venv "$TEMP_VENV"
source "$TEMP_VENV/bin/activate"
pip install -U pip
pip install poetry==1.2.0
pip install poetry==1.2.0b1
poetry export \
--extras all \
--extras test \
@@ -61,7 +61,7 @@ dh_virtualenv \
--extras="all,systemd,test" \
--requirements="exported_requirements.txt"
PACKAGE_BUILD_DIR="$(pwd)/debian/matrix-synapse-py3"
PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
TARGET_PYTHON="${VIRTUALENV_DIR}/bin/python"
@@ -78,14 +78,9 @@ case "$DEB_BUILD_OPTIONS" in
cp -r tests "$tmpdir"
# To avoid pulling in the unbuilt Synapse in the local directory
pushd /
PYTHONPATH="$tmpdir" \
"${TARGET_PYTHON}" -m twisted.trial --reporter=text -j2 tests
popd
;;
esac

83
debian/changelog vendored
View File

@@ -1,86 +1,3 @@
matrix-synapse-py3 (1.68.0) stable; urgency=medium
* New Synapse release 1.68.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 27 Sep 2022 12:02:09 +0100
matrix-synapse-py3 (1.68.0~rc2) stable; urgency=medium
* New Synapse release 1.68.0rc2.
-- Synapse Packaging team <packages@matrix.org> Fri, 23 Sep 2022 09:40:10 +0100
matrix-synapse-py3 (1.68.0~rc1) stable; urgency=medium
* New Synapse release 1.68.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Sep 2022 11:18:20 +0100
matrix-synapse-py3 (1.67.0) stable; urgency=medium
* New Synapse release 1.67.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Sep 2022 09:19:56 +0100
matrix-synapse-py3 (1.67.0~rc1) stable; urgency=medium
[ Erik Johnston ]
* Use stable poetry 1.2.0 version, rather than a prerelease.
[ Synapse Packaging team ]
* New Synapse release 1.67.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Sep 2022 09:01:06 +0100
matrix-synapse-py3 (1.66.0) stable; urgency=medium
* New Synapse release 1.66.0.
-- Synapse Packaging team <packages@matrix.org> Wed, 31 Aug 2022 11:20:17 +0100
matrix-synapse-py3 (1.66.0~rc2+nmu1) UNRELEASED; urgency=medium
[ Jörg Behrmann ]
* Update debhelper to compatibility level 12.
* Drop the preinst script stopping synapse.
* Allocate a group for the system user.
* Change dpkg-statoverride to --force-statoverride-add.
[ Erik Johnston ]
* Disable `dh_auto_configure` as it broke during Rust build.
-- Jörg Behrmann <behrmann@physik.fu-berlin.de> Tue, 23 Aug 2022 17:17:00 +0100
matrix-synapse-py3 (1.66.0~rc2) stable; urgency=medium
* New Synapse release 1.66.0rc2.
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Aug 2022 12:25:19 +0100
matrix-synapse-py3 (1.66.0~rc1) stable; urgency=medium
* New Synapse release 1.66.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Aug 2022 09:48:55 +0100
matrix-synapse-py3 (1.65.0) stable; urgency=medium
* New Synapse release 1.65.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 16 Aug 2022 16:51:26 +0100
matrix-synapse-py3 (1.65.0~rc2) stable; urgency=medium
* New Synapse release 1.65.0rc2.
-- Synapse Packaging team <packages@matrix.org> Thu, 11 Aug 2022 11:38:18 +0100
matrix-synapse-py3 (1.65.0~rc1) stable; urgency=medium
* New Synapse release 1.65.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 09 Aug 2022 11:39:29 +0100
matrix-synapse-py3 (1.64.0) stable; urgency=medium
* New Synapse release 1.64.0.

1
debian/compat vendored Normal file
View File

@@ -0,0 +1 @@
10

2
debian/control vendored
View File

@@ -4,7 +4,7 @@ Priority: extra
Maintainer: Synapse Packaging team <packages@matrix.org>
# keep this list in sync with the build dependencies in docker/Dockerfile-dhvirtualenv.
Build-Depends:
debhelper-compat (= 12),
debhelper (>= 10),
dh-virtualenv (>= 1.1),
libsystemd-dev,
libpq-dev,

View File

@@ -40,12 +40,12 @@ EOF
/opt/venvs/matrix-synapse/lib/manage_debconf.pl update
if ! getent passwd $USER >/dev/null; then
adduser --quiet --system --group --no-create-home --home /var/lib/matrix-synapse $USER
adduser --quiet --system --no-create-home --home /var/lib/matrix-synapse $USER
fi
for DIR in /var/lib/matrix-synapse /var/log/matrix-synapse /etc/matrix-synapse; do
if ! dpkg-statoverride --list --quiet $DIR >/dev/null; then
dpkg-statoverride --force-statoverride-add --quiet --update --add $USER "$(id -gn $USER)" 0755 $DIR
dpkg-statoverride --force --quiet --update --add $USER nogroup 0755 $DIR
fi
done

31
debian/matrix-synapse-py3.preinst vendored Normal file
View File

@@ -0,0 +1,31 @@
#!/bin/sh -e
# Attempt to undo some of the braindamage caused by
# https://github.com/matrix-org/package-synapse-debian/issues/18.
#
# Due to reasons [1], the old python2 matrix-synapse package will not stop the
# service when the package is uninstalled. Our maintainer scripts will do the
# right thing in terms of ensuring the service is enabled and unmasked, but
# then do a `systemctl start matrix-synapse`, which of course does nothing -
# leaving the old (py2) service running.
#
# There should normally be no reason for the service to be running during our
# preinst, so we assume that if it *is* running, it's due to that situation,
# and stop it.
#
# [1] dh_systemd_start doesn't do anything because it sees that there is an
# init.d script with the same name, so leaves it to dh_installinit.
#
# dh_installinit doesn't do anything because somebody gave it a --no-start
# for unknown reasons.
if [ -x /bin/systemctl ]; then
if /bin/systemctl --quiet is-active -- matrix-synapse; then
echo >&2 "stopping existing matrix-synapse service"
/bin/systemctl stop matrix-synapse || true
fi
fi
#DEBHELPER#
exit 0

2
debian/matrix-synapse.default vendored Normal file
View File

@@ -0,0 +1,2 @@
# Specify environment variables used when running Synapse
# SYNAPSE_CACHE_FACTOR=0.5 (default)

View File

@@ -5,6 +5,7 @@ Description=Synapse Matrix homeserver
Type=notify
User=matrix-synapse
WorkingDirectory=/var/lib/matrix-synapse
EnvironmentFile=-/etc/default/matrix-synapse
ExecStartPre=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/ --generate-keys
ExecStart=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/
ExecReload=/bin/kill -HUP $MAINPID
@@ -12,10 +13,5 @@ Restart=always
RestartSec=3
SyslogIdentifier=matrix-synapse
# The environment file is not shipped by default anymore and the below directive
# is for backwards compatibility only. Please use your homeserver.yaml if
# possible.
EnvironmentFile=-/etc/default/matrix-synapse
[Install]
WantedBy=multi-user.target

14
debian/rules vendored
View File

@@ -6,19 +6,15 @@
# assume we only have one package
PACKAGE_NAME:=`dh_listpackages`
override_dh_installsystemd:
dh_installsystemd --name=matrix-synapse
override_dh_systemd_enable:
dh_systemd_enable --name=matrix-synapse
override_dh_installinit:
dh_installinit --name=matrix-synapse
# we don't really want to strip the symbols from our object files.
override_dh_strip:
override_dh_auto_configure:
# many libraries pulled from PyPI have allocatable sections after
# non-allocatable ones on which dwz errors out. For those without the issue the
# gains are only marginal
override_dh_dwz:
# dh_shlibdeps calls dpkg-shlibdeps, which finds all the binary files
# (executables and shared libs) in the package, and looks for the shared
# libraries that they depend on. It then adds a dependency on the package that

View File

@@ -31,9 +31,7 @@ ARG PYTHON_VERSION=3.9
###
### Stage 0: generate requirements.txt
###
# We hardcode the use of Debian bullseye here because this could change upstream
# and other Dockerfiles used for testing are expecting bullseye.
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as requirements
FROM docker.io/python:${PYTHON_VERSION}-slim as requirements
# RUN --mount is specific to buildkit and is documented at
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
@@ -42,14 +40,22 @@ FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as requirements
RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update -qq && apt-get install -yqq \
build-essential cargo git libffi-dev libssl-dev \
apt-get update -qq && apt-get install -yqq git \
&& rm -rf /var/lib/apt/lists/*
# We install poetry in its own build stage to avoid its dependencies conflicting with
# synapse's dependencies.
# We use a specific commit from poetry's master branch instead of our usual 1.1.14,
# to incorporate fixes to some bugs in `poetry export`. This commit corresponds to
# https://github.com/python-poetry/poetry/pull/5156 and
# https://github.com/python-poetry/poetry/issues/5141 ;
# without it, we generate a requirements.txt with incorrect environment markers,
# which causes necessary packages to be omitted when we `pip install`.
#
# NB: In poetry 1.2 `poetry export` will be moved into a plugin; we'll need to also
# pip install poetry-plugin-export (https://github.com/python-poetry/poetry-plugin-export).
RUN --mount=type=cache,target=/root/.cache/pip \
pip install --user "poetry==1.2.0"
pip install --user "poetry-core==1.1.0a7" "git+https://github.com/python-poetry/poetry.git@fb13b3a676f476177f7937ffa480ee5cff9a90a5"
WORKDIR /synapse
@@ -62,23 +68,12 @@ COPY pyproject.toml poetry.lock /synapse/
# reason, such as when a git repository is used directly as a dependency.
ARG TEST_ONLY_SKIP_DEP_HASH_VERIFICATION
# If specified, we won't use the Poetry lockfile.
# Instead, we'll just install what a regular `pip install` would from PyPI.
ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
# Export the dependencies, but only if we're actually going to use the Poetry lockfile.
# Otherwise, just create an empty requirements file so that the Dockerfile can
# proceed.
RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
/root/.local/bin/poetry export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \
else \
touch /synapse/requirements.txt; \
fi
RUN /root/.local/bin/poetry export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}
###
### Stage 1: builder
###
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as builder
FROM docker.io/python:${PYTHON_VERSION}-slim as builder
# install the OS build deps
RUN \
@@ -94,20 +89,11 @@ RUN \
libxml++2.6-dev \
libxslt1-dev \
openssl \
rustc \
zlib1g-dev \
git \
curl \
&& rm -rf /var/lib/apt/lists/*
# Install rust and ensure its in the PATH
ENV RUSTUP_HOME=/rust
ENV CARGO_HOME=/cargo
ENV PATH=/cargo/bin:/rust/bin:$PATH
RUN mkdir /rust /cargo
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable
# To speed up rebuilds, install all of the dependencies before we copy over
# the whole synapse project, so that this layer in the Docker cache can be
# used while you develop on the source
@@ -119,27 +105,17 @@ RUN --mount=type=cache,target=/root/.cache/pip \
# Copy over the rest of the synapse source code.
COPY synapse /synapse/synapse/
COPY rust /synapse/rust/
# ... and what we need to `pip install`.
COPY pyproject.toml README.rst build_rust.py /synapse/
# Repeat of earlier build argument declaration, as this is a new build stage.
ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
COPY pyproject.toml README.rst /synapse/
# Install the synapse package itself.
# If we have populated requirements.txt, we don't install any dependencies
# as we should already have those from the previous `pip install` step.
RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
pip install --prefix="/install" --no-deps --no-warn-script-location /synapse[all]; \
else \
pip install --prefix="/install" --no-warn-script-location /synapse[all]; \
fi
RUN pip install --prefix="/install" --no-deps --no-warn-script-location /synapse
###
### Stage 2: runtime
###
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye
FROM docker.io/python:${PYTHON_VERSION}-slim
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md'

View File

@@ -72,7 +72,6 @@ RUN apt-get update -qq -o Acquire::Languages=none \
&& env DEBIAN_FRONTEND=noninteractive apt-get install \
-yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \
build-essential \
curl \
debhelper \
devscripts \
libsystemd-dev \
@@ -86,15 +85,6 @@ RUN apt-get update -qq -o Acquire::Languages=none \
libpq-dev \
xmlsec1
# Install rust and ensure it's in the PATH
ENV RUSTUP_HOME=/rust
ENV CARGO_HOME=/cargo
ENV PATH=/cargo/bin:/rust/bin:$PATH
RUN mkdir /rust /cargo
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable
COPY --from=builder /dh-virtualenv_1.2.2-1_all.deb /
# install dhvirtualenv. Update the apt cache again first, in case we got a

View File

@@ -1,62 +1,39 @@
# syntax=docker/dockerfile:1
# Inherit from the official Synapse docker image
ARG SYNAPSE_VERSION=latest
# first of all, we create a base image with an nginx which we can copy into the
# target image. For repeated rebuilds, this is much faster than apt installing
# each time.
FROM debian:bullseye-slim AS deps_base
RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update -qq && \
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
redis-server nginx-light
# Similarly, a base to copy the redis server from.
#
# The redis docker image has fewer dynamic libraries than the debian package,
# which makes it much easier to copy (but we need to make sure we use an image
# based on the same debian version as the synapse image, to make sure we get
# the expected version of libc.
FROM redis:6-bullseye AS redis_base
# now build the final image, based on the the regular Synapse docker image
FROM matrixdotorg/synapse:$SYNAPSE_VERSION
# Install supervisord with pip instead of apt, to avoid installing a second
# copy of python.
RUN --mount=type=cache,target=/root/.cache/pip \
pip install supervisor~=4.2
RUN mkdir -p /etc/supervisor/conf.d
# Install deps
RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update -qq && \
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
redis-server nginx-light
# Copy over redis and nginx
COPY --from=redis_base /usr/local/bin/redis-server /usr/local/bin
# Install supervisord with pip instead of apt, to avoid installing a second
# copy of python.
RUN --mount=type=cache,target=/root/.cache/pip \
pip install supervisor~=4.2
COPY --from=deps_base /usr/sbin/nginx /usr/sbin
COPY --from=deps_base /usr/share/nginx /usr/share/nginx
COPY --from=deps_base /usr/lib/nginx /usr/lib/nginx
COPY --from=deps_base /etc/nginx /etc/nginx
RUN rm /etc/nginx/sites-enabled/default
RUN mkdir /var/log/nginx /var/lib/nginx
RUN chown www-data /var/log/nginx /var/lib/nginx
# Disable the default nginx sites
RUN rm /etc/nginx/sites-enabled/default
# Copy Synapse worker, nginx and supervisord configuration template files
COPY ./docker/conf-workers/* /conf/
# Copy Synapse worker, nginx and supervisord configuration template files
COPY ./docker/conf-workers/* /conf/
# Copy a script to prefix log lines with the supervisor program name
COPY ./docker/prefix-log /usr/local/bin/
# Copy a script to prefix log lines with the supervisor program name
COPY ./docker/prefix-log /usr/local/bin/
# Expose nginx listener port
EXPOSE 8080/tcp
# Expose nginx listener port
EXPOSE 8080/tcp
# A script to read environment variables and create the necessary
# files to run the desired worker configuration. Will start supervisord.
COPY ./docker/configure_workers_and_start.py /configure_workers_and_start.py
ENTRYPOINT ["/configure_workers_and_start.py"]
# A script to read environment variables and create the necessary
# files to run the desired worker configuration. Will start supervisord.
COPY ./docker/configure_workers_and_start.py /configure_workers_and_start.py
ENTRYPOINT ["/configure_workers_and_start.py"]
# Replace the healthcheck with one which checks *all* the workers. The script
# is generated by configure_workers_and_start.py.
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
CMD /bin/sh /healthcheck.sh
# Replace the healthcheck with one which checks *all* the workers. The script
# is generated by configure_workers_and_start.py.
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
CMD /bin/sh /healthcheck.sh

View File

@@ -191,7 +191,7 @@ If you need to build the image from a Synapse checkout, use the following `docke
build` command from the repo's root:
```
DOCKER_BUILDKIT=1 docker build -t matrixdotorg/synapse -f docker/Dockerfile .
docker build -t matrixdotorg/synapse -f docker/Dockerfile .
```
You can choose to build a different docker image by changing the value of the `-f` flag to

View File

@@ -17,17 +17,7 @@ ARG SYNAPSE_VERSION=latest
# the same debian version as Synapse's docker image (so the versions of the
# shared libraries match).
# now build the final image, based on the Synapse image.
FROM matrixdotorg/synapse-workers:$SYNAPSE_VERSION
# copy the postgres installation over from the image we built above
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
COPY --from=postgres:13-bullseye /usr/lib/postgresql /usr/lib/postgresql
COPY --from=postgres:13-bullseye /usr/share/postgresql /usr/share/postgresql
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
ENV PGDATA=/var/lib/postgresql/data
FROM postgres:13-bullseye AS postgres_base
# initialise the database cluster in /var/lib/postgresql
RUN gosu postgres initdb --locale=C --encoding=UTF-8 --auth-host password
@@ -35,6 +25,18 @@ FROM matrixdotorg/synapse-workers:$SYNAPSE_VERSION
RUN echo "ALTER USER postgres PASSWORD 'somesecret'" | gosu postgres postgres --single
RUN echo "CREATE DATABASE synapse" | gosu postgres postgres --single
# now build the final image, based on the Synapse image.
FROM matrixdotorg/synapse-workers:$SYNAPSE_VERSION
# copy the postgres installation over from the image we built above
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
COPY --from=postgres_base /var/lib/postgresql /var/lib/postgresql
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
ENV PGDATA=/var/lib/postgresql/data
# Extend the shared homeserver config to disable rate-limiting,
# set Complement's static shared secret, enable registration, amongst other
# tweaks to get Synapse ready for testing.

View File

@@ -19,7 +19,7 @@ username=www-data
autorestart=true
[program:redis]
command=/usr/local/bin/prefix-log /usr/local/bin/redis-server
command=/usr/local/bin/prefix-log /usr/bin/redis-server /etc/redis/redis.conf --daemonize no
priority=1
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0

View File

@@ -69,7 +69,6 @@
- [Manhole](manhole.md)
- [Monitoring](metrics-howto.md)
- [Reporting Homeserver Usage Statistics](usage/administration/monitoring/reporting_homeserver_usage_statistics.md)
- [Monthly Active Users](usage/administration/monthly_active_users.md)
- [Understanding Synapse Through Grafana Graphs](usage/administration/understanding_synapse_through_grafana_graphs.md)
- [Useful SQL for Admins](usage/administration/useful_sql_for_admins.md)
- [Database Maintenance Tools](usage/administration/database_maintenance_tools.md)

View File

@@ -5,9 +5,9 @@ non-interactive way. This is generally used for bootstrapping a Synapse
instance with administrator accounts.
To authenticate yourself to the server, you will need both the shared secret
([`registration_shared_secret`](../configuration/config_documentation.md#registration_shared_secret)
in the homeserver configuration), and a one-time nonce. If the registration
shared secret is not configured, this API is not enabled.
(`registration_shared_secret` in the homeserver configuration), and a
one-time nonce. If the registration shared secret is not configured, this API
is not enabled.
To fetch the nonce, you need to request one from the API:
@@ -46,24 +46,7 @@ As an example:
The MAC is the hex digest output of the HMAC-SHA1 algorithm, with the key being
the shared secret and the content being the nonce, user, password, either the
string "admin" or "notadmin", and optionally the user_type
each separated by NULs.
Here is an easy way to generate the HMAC digest if you have Bash and OpenSSL:
```bash
# Update these values and then paste this code block into a bash terminal
nonce='thisisanonce'
username='pepper_roni'
password='pizza'
admin='admin'
secret='shared_secret'
printf '%s\0%s\0%s\0%s' "$nonce" "$username" "$password" "$admin" |
openssl sha1 -hmac "$secret" |
awk '{print $2}'
```
For an example of generation in Python:
each separated by NULs. For an example of generation in Python:
```python
import hmac, hashlib
@@ -87,4 +70,4 @@ def generate_mac(nonce, user, password, admin=False, user_type=None):
mac.update(user_type.encode('utf8'))
return mac.hexdigest()
```
```

View File

@@ -302,8 +302,6 @@ The following fields are possible in the JSON response body:
* `state_events` - Total number of state_events of a room. Complexity of the room.
* `room_type` - The type of the room taken from the room's creation event; for example "m.space" if the room is a space.
If the room does not define a type, the value will be `null`.
* `forgotten` - Whether all local users have
[forgotten](https://spec.matrix.org/latest/client-server-api/#leaving-rooms) the room.
The API is:
@@ -332,13 +330,10 @@ A response body like the following is returned:
"guest_access": null,
"history_visibility": "shared",
"state_events": 93534,
"room_type": "m.space",
"forgotten": false
"room_type": "m.space"
}
```
_Changed in Synapse 1.66:_ Added the `forgotten` key to the response body.
# Room Members API
The Room Members admin API allows server admins to get a list of all members of a room.
@@ -393,151 +388,6 @@ A response body like the following is returned:
}
```
# Room Messages API
The Room Messages admin API allows server admins to get all messages
sent to a room in a given timeframe. There are various parameters available
that allow for filtering and ordering the returned list. This API supports pagination.
To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api).
This endpoint mirrors the [Matrix Spec defined Messages API](https://spec.matrix.org/v1.1/client-server-api/#get_matrixclientv3roomsroomidmessages).
The API is:
```
GET /_synapse/admin/v1/rooms/<room_id>/messages
```
**Parameters**
The following path parameters are required:
* `room_id` - The ID of the room you wish you fetch messages from.
The following query parameters are available:
* `from` (required) - The token to start returning events from. This token can be obtained from a prev_batch
or next_batch token returned by the /sync endpoint, or from an end token returned by a previous request to this endpoint.
* `to` - The token to spot returning events at.
* `limit` - The maximum number of events to return. Defaults to `10`.
* `filter` - A JSON RoomEventFilter to filter returned events with.
* `dir` - The direction to return events from. Either `f` for forwards or `b` for backwards. Setting
this value to `b` will reverse the above sort order. Defaults to `f`.
**Response**
The following fields are possible in the JSON response body:
* `chunk` - A list of room events. The order depends on the dir parameter.
Note that an empty chunk does not necessarily imply that no more events are available. Clients should continue to paginate until no end property is returned.
* `end` - A token corresponding to the end of chunk. This token can be passed back to this endpoint to request further events.
If no further events are available, this property is omitted from the response.
* `start` - A token corresponding to the start of chunk.
* `state` - A list of state events relevant to showing the chunk.
**Example**
For more details on each chunk, read [the Matrix specification](https://spec.matrix.org/v1.1/client-server-api/#get_matrixclientv3roomsroomidmessages).
```json
{
"chunk": [
{
"content": {
"body": "This is an example text message",
"format": "org.matrix.custom.html",
"formatted_body": "<b>This is an example text message</b>",
"msgtype": "m.text"
},
"event_id": "$143273582443PhrSn:example.org",
"origin_server_ts": 1432735824653,
"room_id": "!636q39766251:example.com",
"sender": "@example:example.org",
"type": "m.room.message",
"unsigned": {
"age": 1234
}
},
{
"content": {
"name": "The room name"
},
"event_id": "$143273582443PhrSn:example.org",
"origin_server_ts": 1432735824653,
"room_id": "!636q39766251:example.com",
"sender": "@example:example.org",
"state_key": "",
"type": "m.room.name",
"unsigned": {
"age": 1234
}
},
{
"content": {
"body": "Gangnam Style",
"info": {
"duration": 2140786,
"h": 320,
"mimetype": "video/mp4",
"size": 1563685,
"thumbnail_info": {
"h": 300,
"mimetype": "image/jpeg",
"size": 46144,
"w": 300
},
"thumbnail_url": "mxc://example.org/FHyPlCeYUSFFxlgbQYZmoEoe",
"w": 480
},
"msgtype": "m.video",
"url": "mxc://example.org/a526eYUSFFxlgbQYZmo442"
},
"event_id": "$143273582443PhrSn:example.org",
"origin_server_ts": 1432735824653,
"room_id": "!636q39766251:example.com",
"sender": "@example:example.org",
"type": "m.room.message",
"unsigned": {
"age": 1234
}
}
],
"end": "t47409-4357353_219380_26003_2265",
"start": "t47429-4392820_219380_26003_2265"
}
```
# Room Timestamp to Event API
The Room Timestamp to Event API endpoint fetches the `event_id` of the closest event to the given
timestamp (`ts` query parameter) in the given direction (`dir` query parameter).
Useful for cases like jump to date so you can start paginating messages from
a given date in the archive.
The API is:
```
GET /_synapse/admin/v1/rooms/<room_id>/timestamp_to_event
```
**Parameters**
The following path parameters are required:
* `room_id` - The ID of the room you wish to check.
The following query parameters are available:
* `ts` - a timestamp in milliseconds where we will find the closest event in
the given direction.
* `dir` - can be `f` or `b` to indicate forwards and backwards in time from the
given timestamp. Defaults to `f`.
**Response**
* `event_id` - converted from timestamp
# Block Room API
The Block Room admin API allows server admins to block and unblock rooms,
and query to see if a given room is blocked.

View File

@@ -42,7 +42,6 @@ It returns a JSON body like the following:
"appservice_id": null,
"consent_server_notice_sent": null,
"consent_version": null,
"consent_ts": null,
"external_ids": [
{
"auth_provider": "<provider1>",
@@ -365,7 +364,6 @@ The following actions are **NOT** performed. The list may be incomplete.
- Remove the user's creation (registration) timestamp
- [Remove rate limit overrides](#override-ratelimiting-for-users)
- Remove from monthly active users
- Remove user's consent information (consent version and timestamp)
## Reset password
@@ -755,7 +753,6 @@ A response body like the following is returned:
"device_id": "QBUAZIFURK",
"display_name": "android",
"last_seen_ip": "1.2.3.4",
"last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
"last_seen_ts": 1474491775024,
"user_id": "<user_id>"
},
@@ -763,7 +760,6 @@ A response body like the following is returned:
"device_id": "AUIECTSRND",
"display_name": "ios",
"last_seen_ip": "1.2.3.5",
"last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
"last_seen_ts": 1474491775025,
"user_id": "<user_id>"
}
@@ -790,8 +786,6 @@ The following fields are returned in the JSON response body:
Absent if no name has been set.
- `last_seen_ip` - The IP address where this device was last seen.
(May be a few minutes out of date, for efficiency reasons).
- `last_seen_user_agent` - The user agent of the device when it was last seen.
(May be a few minutes out of date, for efficiency reasons).
- `last_seen_ts` - The timestamp (in milliseconds since the unix epoch) when this
devices was last seen. (May be a few minutes out of date, for efficiency reasons).
- `user_id` - Owner of device.
@@ -843,7 +837,6 @@ A response body like the following is returned:
"device_id": "<device_id>",
"display_name": "android",
"last_seen_ip": "1.2.3.4",
"last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
"last_seen_ts": 1474491775024,
"user_id": "<user_id>"
}
@@ -865,8 +858,6 @@ The following fields are returned in the JSON response body:
Absent if no name has been set.
- `last_seen_ip` - The IP address where this device was last seen.
(May be a few minutes out of date, for efficiency reasons).
- `last_seen_user_agent` - The user agent of the device when it was last seen.
(May be a few minutes out of date, for efficiency reasons).
- `last_seen_ts` - The timestamp (in milliseconds since the unix epoch) when this
devices was last seen. (May be a few minutes out of date, for efficiency reasons).
- `user_id` - Owner of device.
@@ -1155,41 +1146,3 @@ GET /_synapse/admin/v1/username_available?username=$localpart
The request and response format is the same as the
[/_matrix/client/r0/register/available](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available) API.
### Find a user based on their ID in an auth provider
The API is:
```
GET /_synapse/admin/v1/auth_providers/$provider/users/$external_id
```
When a user matched the given ID for the given provider, an HTTP code `200` with a response body like the following is returned:
```json
{
"user_id": "@hello:example.org"
}
```
**Parameters**
The following parameters should be set in the URL:
- `provider` - The ID of the authentication provider, as advertised by the [`GET /_matrix/client/v3/login`](https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3login) API in the `m.login.sso` authentication method.
- `external_id` - The user ID from the authentication provider. Usually corresponds to the `sub` claim for OIDC providers, or to the `uid` attestation for SAML2 providers.
The `external_id` may have characters that are not URL-safe (typically `/`, `:` or `@`), so it is advised to URL-encode those parameters.
**Errors**
Returns a `404` HTTP status code if no user was found, with a response body like this:
```json
{
"errcode":"M_NOT_FOUND",
"error":"User not found"
}
```
_Added in Synapse 1.68.0._

View File

@@ -34,45 +34,13 @@ the process of indexing it).
## Chain Cover Index
Synapse computes auth chain differences by pre-computing a "chain cover" index
for the auth chain in a room, allowing us to efficiently make reachability queries
like "is event `A` in the auth chain of event `B`?". We could do this with an index
that tracks all pairs `(A, B)` such that `A` is in the auth chain of `B`. However, this
would be prohibitively large, scaling poorly as the room accumulates more state
events.
for the auth chain in a room, allowing efficient reachability queries like "is
event A in the auth chain of event B". This is done by assigning every event a
*chain ID* and *sequence number* (e.g. `(5,3)`), and having a map of *links*
between chains (e.g. `(5,3) -> (2,4)`) such that A is reachable by B (i.e. `A`
is in the auth chain of `B`) if and only if either:
Instead, we break down the graph into *chains*. A chain is a subset of a DAG
with the following property: for any pair of events `E` and `F` in the chain,
the chain contains a path `E -> F` or a path `F -> E`. This forces a chain to be
linear (without forks), e.g. `E -> F -> G -> ... -> H`. Each event in the chain
is given a *sequence number* local to that chain. The oldest event `E` in the
chain has sequence number 1. If `E` has a child `F` in the chain, then `F` has
sequence number 2. If `E` has a grandchild `G` in the chain, then `G` has
sequence number 3; and so on.
Synapse ensures that each persisted event belongs to exactly one chain, and
tracks how the chains are connected to one another. This allows us to
efficiently answer reachability queries. Doing so uses less storage than
tracking reachability on an event-by-event basis, particularly when we have
fewer and longer chains. See
> Jagadish, H. (1990). [A compression technique to materialize transitive closure](https://doi.org/10.1145/99935.99944).
> *ACM Transactions on Database Systems (TODS)*, 15*(4)*, 558-598.
for the original idea or
> Y. Chen, Y. Chen, [An efficient algorithm for answering graph
> reachability queries](https://doi.org/10.1109/ICDE.2008.4497498),
> in: 2008 IEEE 24th International Conference on Data Engineering, April 2008,
> pp. 893902. (PDF available via [Google Scholar](https://scholar.google.com/scholar?q=Y.%20Chen,%20Y.%20Chen,%20An%20efficient%20algorithm%20for%20answering%20graph%20reachability%20queries,%20in:%202008%20IEEE%2024th%20International%20Conference%20on%20Data%20Engineering,%20April%202008,%20pp.%20893902.).)
for a more modern take.
In practical terms, the chain cover assigns every event a
*chain ID* and *sequence number* (e.g. `(5,3)`), and maintains a map of *links*
between events in chains (e.g. `(5,3) -> (2,4)`) such that `A` is reachable by `B`
(i.e. `A` is in the auth chain of `B`) if and only if either:
1. `A` and `B` have the same chain ID and `A`'s sequence number is less than `B`'s
1. A and B have the same chain ID and `A`'s sequence number is less than `B`'s
sequence number; or
2. there is a link `L` between `B`'s chain ID and `A`'s chain ID such that
`L.start_seq_no` <= `B.seq_no` and `A.seq_no` <= `L.end_seq_no`.
@@ -81,9 +49,8 @@ There are actually two potential implementations, one where we store links from
each chain to every other reachable chain (the transitive closure of the links
graph), and one where we remove redundant links (the transitive reduction of the
links graph) e.g. if we have chains `C3 -> C2 -> C1` then the link `C3 -> C1`
would not be stored. Synapse uses the former implementation so that it doesn't
need to recurse to test reachability between chains. This trades-off extra storage
in order to save CPU cycles and DB queries.
would not be stored. Synapse uses the former implementations so that it doesn't
need to recurse to test reachability between chains.
### Example

View File

@@ -1,9 +1,9 @@
Deprecation Policy for Platform Dependencies
============================================
Synapse has a number of platform dependencies, including Python, Rust,
PostgreSQL and SQLite. This document outlines the policy towards which versions
we support, and when we drop support for versions in the future.
Synapse has a number of platform dependencies, including Python and PostgreSQL.
This document outlines the policy towards which versions we support, and when we
drop support for versions in the future.
Policy
@@ -17,14 +17,6 @@ Details on the upstream support life cycles for Python and PostgreSQL are
documented at [https://endoflife.date/python](https://endoflife.date/python) and
[https://endoflife.date/postgresql](https://endoflife.date/postgresql).
A Rust compiler is required to build Synapse from source. For any given release
the minimum required version may be bumped up to a recent Rust version, and so
people building from source should ensure they can fetch recent versions of Rust
(e.g. by using [rustup](https://rustup.rs/)).
The oldest supported version of SQLite is the version
[provided](https://packages.debian.org/buster/libsqlite3-0) by
[Debian oldstable](https://wiki.debian.org/DebianOldStable).
Context
-------
@@ -39,15 +31,3 @@ long process.
By following the upstream support life cycles Synapse can ensure that its
dependencies continue to get security patches, while not requiring system admins
to constantly update their platform dependencies to the latest versions.
For Rust, the situation is a bit different given that a) the Rust foundation
does not generally support older Rust versions, and b) the library ecosystem
generally bump their minimum support Rust versions frequently. In general, the
Synapse team will try to avoid updating the dependency on Rust to the absolute
latest version, but introducing a formal policy is hard given the constraints of
the ecosystem.
On a similar note, SQLite does not generally have a concept of "supported
release"; bugfixes are published for the latest minor release only. We chose to
track Debian's oldstable as this is relatively conservative, predictably updated
and is consistent with the `.deb` packages released by Matrix.org.

View File

@@ -28,9 +28,6 @@ The source code of Synapse is hosted on GitHub. You will also need [a recent ver
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
A recent version of the Rust compiler is needed to build the native modules. The
easiest way of installing the latest version is to use [rustup](https://rustup.rs/).
# 3. Get the source.
@@ -65,8 +62,6 @@ pipx install poetry
but see poetry's [installation instructions](https://python-poetry.org/docs/#installation)
for other installation methods.
Synapse requires Poetry version 1.2.0 or later.
Next, open a terminal and install dependencies as follows:
```sh
@@ -117,11 +112,6 @@ Some documentation also exists in [Synapse's GitHub
Wiki](https://github.com/matrix-org/synapse/wiki), although this is primarily
contributed to by community authors.
When changes are made to any Rust code then you must call either `poetry install`
or `maturin develop` (if installed) to rebuild the Rust code. Using [`maturin`](https://github.com/PyO3/maturin)
is quicker than `poetry install`, so is recommended when making frequent
changes to the Rust code.
# 8. Test, test, test!
<a name="test-test-test"></a>
@@ -203,7 +193,7 @@ The database file can then be inspected with:
sqlite3 _trial_temp/test.db
```
Note that the database file is cleared at the beginning of each test run. Thus it
Note that the database file is cleared at the beginning of each test run. Thus it
will always only contain the data generated by the *last run test*. Though generally
when debugging, one is only running a single test anyway.

View File

@@ -191,27 +191,3 @@ There are three separate aspects to this:
flavour will be accepted by SQLite 3.22, but will give a column whose
default value is the **string** `"FALSE"` - which, when cast back to a boolean
in Python, evaluates to `True`.
## `event_id` global uniqueness
In room versions `1` and `2` it's possible to end up with two events with the
same `event_id` (in the same or different rooms). After room version `3`, that
can only happen with a hash collision, which we basically hope will never
happen.
There are several places in Synapse and even Matrix APIs like [`GET
/_matrix/federation/v1/event/{eventId}`](https://spec.matrix.org/v1.1/server-server-api/#get_matrixfederationv1eventeventid)
where we assume that event IDs are globally unique.
But hash collisions are still possible, and by treating event IDs as room
scoped, we can reduce the possibility of a hash collision. When scoping
`event_id` in the database schema, it should be also accompanied by `room_id`
(`PRIMARY KEY (room_id, event_id)`) and lookups should be done through the pair
`(room_id, event_id)`.
There has been a lot of debate on this in places like
https://github.com/matrix-org/matrix-spec-proposals/issues/2779 and
[MSC2848](https://github.com/matrix-org/matrix-spec-proposals/pull/2848) which
has no resolution yet (as of 2022-09-01).

View File

@@ -126,23 +126,6 @@ context of poetry's venv, without having to run `poetry shell` beforehand.
poetry install --extras all --remove-untracked
```
## ...delete everything and start over from scratch?
```shell
# Stop the current virtualenv if active
$ deactivate
# Remove all of the files from the current environment.
# Don't worry, even though it says "all", this will only
# remove the Poetry virtualenvs for the current project.
$ poetry env remove --all
# Reactivate Poetry shell to create the virtualenv again
$ poetry shell
# Install everything again
$ poetry install --extras all
```
## ...run a command in the `poetry` virtualenv?
Use `poetry run cmd args` when you need the python virtualenv context.
@@ -260,11 +243,14 @@ doesn't require poetry. (It's what we use in CI too). However, you could try
## Check the version of poetry with `poetry --version`.
The minimum version of poetry supported by Synapse is 1.2.
At the time of writing, the 1.2 series is beta only. We have seen some examples
where the lockfiles generated by 1.2 prereleasese aren't interpreted correctly
by poetry 1.1.x. For now, use poetry 1.1.14, which includes a critical
[change](https://github.com/python-poetry/poetry/pull/5973) needed to remain
[compatible with PyPI](https://github.com/pypi/warehouse/pull/11775).
It can also be useful to check the version of `poetry-core` in use. If you've
installed `poetry` with `pipx`, try `pipx runpip poetry list | grep
poetry-core`.
installed `poetry` with `pipx`, try `pipx runpip poetry list | grep poetry-core`.
## Clear caches: `poetry cache clear --all pypi`.
@@ -273,16 +259,6 @@ from PyPI. (This is what makes poetry seem slow when doing the first
`poetry install`.) Try `poetry cache list` and `poetry cache clear --all
<name of cache>` to see if that fixes things.
## Remove outdated egg-info
Delete the `matrix_synapse.egg-info/` directory from the root of your Synapse
install.
This stores some cached information about dependencies and often conflicts with
letting Poetry do the right thing.
## Try `--verbose` or `--dry-run` arguments.
Sometimes useful to see what poetry's internal logic is.

View File

@@ -8,8 +8,7 @@ and allow server and room admins to configure how long messages should
be kept in a homeserver's database before being purged from it.
**Please note that, as this feature isn't part of the Matrix
specification yet, this implementation is to be considered as
experimental. There are known bugs which may cause database corruption.
Proceed with caution.**
experimental.**
A message retention policy is mainly defined by its `max_lifetime`
parameter, which defines how long a message can be kept around after

View File

@@ -7,13 +7,7 @@
1. Enable Synapse metrics:
In `homeserver.yaml`, make sure `enable_metrics` is
set to `True`.
1. Enable the `/_synapse/metrics` Synapse endpoint that Prometheus uses to
collect data:
There are two methods of enabling the metrics endpoint in Synapse.
There are two methods of enabling metrics in Synapse.
The first serves the metrics as a part of the usual web server and
can be enabled by adding the \"metrics\" resource to the existing
@@ -47,6 +41,9 @@
- '0.0.0.0'
```
For both options, you will need to ensure that `enable_metrics` is
set to `True`.
1. Restart Synapse.
1. Add a Prometheus target for Synapse.

View File

@@ -263,7 +263,7 @@ class MyAuthProvider:
return None
if self.credentials.get(username) == login_dict.get("my_field"):
return (self.api.get_qualified_user_id(username), None)
return self.api.get_qualified_user_id(username)
async def check_pass(
self,
@@ -280,5 +280,5 @@ class MyAuthProvider:
return None
if self.credentials.get(username) == login_dict.get("password"):
return (self.api.get_qualified_user_id(username), None)
return self.api.get_qualified_user_id(username)
```

View File

@@ -174,9 +174,7 @@ oidc_providers:
1. Create a regular web application for Synapse
2. Set the Allowed Callback URLs to `[synapse public baseurl]/_synapse/client/oidc/callback`
3. Add a rule with any name to add the `preferred_username` claim.
(See https://auth0.com/docs/customize/rules/create-rules for more information on how to create rules.)
3. Add a rule to add the `preferred_username` claim.
<details>
<summary>Code sample</summary>

View File

@@ -45,10 +45,6 @@ listens to traffic on localhost. (Do not change `bind_addresses` to `127.0.0.1`
when using a containerized Synapse, as that will prevent it from responding
to proxied traffic.)
Optionally, you can also set
[`request_id_header`](../usage/configuration/config_documentation.md#listeners)
so that the server extracts and re-uses the same request ID format that the
reverse proxy is using.
## Reverse-proxy configuration examples

View File

@@ -196,10 +196,6 @@ System requirements:
- Python 3.7 or later, up to Python 3.10.
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
If building on an uncommon architecture for which pre-built wheels are
unavailable, you will need to have a recent Rust compiler installed. The easiest
way of installing the latest version is to use [rustup](https://rustup.rs/).
To install the Synapse homeserver run:
```sh
@@ -303,10 +299,9 @@ You may need to install the latest Xcode developer tools:
xcode-select --install
```
On ARM-based Macs you may need to install libjpeg and libpq.
You can use Homebrew (https://brew.sh):
On ARM-based Macs you may need to explicitly install libjpeg which is a pillow dependency. You can use Homebrew (https://brew.sh):
```sh
brew install jpeg libpq
brew install jpeg
```
On macOS Catalina (10.15) you may need to explicitly install OpenSSL
@@ -511,13 +506,9 @@ email will be disabled.
### Registering a user
One way to create a new user is to do so from a client like
[Element](https://element.io/). This requires registration to be enabled via
the
[`enable_registration`](../usage/configuration/config_documentation.md#enable_registration)
setting.
The easiest way to create a new user is to do so from a client like [Element](https://element.io/).
Alternatively, you can create new users from the command line. This can be done as follows:
Alternatively, you can do so from the command line. This can be done as follows:
1. If synapse was installed via pip, activate the virtualenv as follows (if Synapse was
installed via a prebuilt package, `register_new_matrix_user` should already be
@@ -529,7 +520,7 @@ Alternatively, you can create new users from the command line. This can be done
```
2. Run the following command:
```sh
register_new_matrix_user -c homeserver.yaml
register_new_matrix_user -c homeserver.yaml http://localhost:8008
```
This will prompt you to add details for the new user, and will then connect to
@@ -542,13 +533,12 @@ Make admin [no]:
Success!
```
This process uses a setting
[`registration_shared_secret`](../usage/configuration/config_documentation.md#registration_shared_secret),
which is shared between Synapse itself and the `register_new_matrix_user`
script. It doesn't matter what it is (a random value is generated by
`--generate-config`), but it should be kept secret, as anyone with knowledge of
it can register users, including admin accounts, on your server even if
`enable_registration` is `false`.
This process uses a setting `registration_shared_secret` in
`homeserver.yaml`, which is shared between Synapse itself and the
`register_new_matrix_user` script. It doesn't matter what it is (a random
value is generated by `--generate-config`), but it should be kept secret, as
anyone with knowledge of it can register users, including admin accounts,
on your server even if `enable_registration` is `false`.
### Setting up a TURN server

View File

@@ -22,7 +22,7 @@ choose their own username.
In the first case - where users are automatically allocated a Matrix ID - it is
the responsibility of the mapping provider to normalise the SSO attributes and
map them to a valid Matrix ID. The [specification for Matrix
IDs](https://spec.matrix.org/latest/appendices/#user-identifiers) has some
IDs](https://matrix.org/docs/spec/appendices#user-identifiers) has some
information about what is considered valid.
If the mapping provider does not assign a Matrix ID, then Synapse will
@@ -37,10 +37,9 @@ as Synapse). The Synapse config is then modified to point to the mapping provide
## OpenID Mapping Providers
The OpenID mapping provider can be customized by editing the
[`oidc_providers.user_mapping_provider.module`](usage/configuration/config_documentation.md#oidc_providers)
config option.
`oidc_config.user_mapping_provider.module` config option.
`oidc_providers.user_mapping_provider.config` allows you to provide custom
`oidc_config.user_mapping_provider.config` allows you to provide custom
configuration options to the module. Check with the module's documentation for
what options it provides (if any). The options listed by default are for the
user mapping provider built in to Synapse. If using a custom module, you should
@@ -59,7 +58,7 @@ A custom mapping provider must specify the following methods:
- This method should have the `@staticmethod` decoration.
- Arguments:
- `config` - A `dict` representing the parsed content of the
`oidc_providers.user_mapping_provider.config` homeserver config option.
`oidc_config.user_mapping_provider.config` homeserver config option.
Runs on homeserver startup. Providers should extract and validate
any option values they need here.
- Whatever is returned will be passed back to the user mapping provider module's
@@ -103,7 +102,7 @@ A custom mapping provider must specify the following methods:
will be returned as part of the response during a successful login.
Note that care should be taken to not overwrite any of the parameters
usually returned as part of the [login response](https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3login).
usually returned as part of the [login response](https://matrix.org/docs/spec/client_server/latest#post-matrix-client-r0-login).
### Default OpenID Mapping Provider
@@ -114,8 +113,7 @@ specified in the config. It is located at
## SAML Mapping Providers
The SAML mapping provider can be customized by editing the
[`saml2_config.user_mapping_provider.module`](docs/usage/configuration/config_documentation.md#saml2_config)
config option.
`saml2_config.user_mapping_provider.module` config option.
`saml2_config.user_mapping_provider.config` allows you to provide custom
configuration options to the module. Check with the module's documentation for

View File

@@ -5,8 +5,6 @@ worker_name: generic_worker1
worker_replication_host: 127.0.0.1
worker_replication_http_port: 9093
worker_main_http_uri: http://localhost:8008/
worker_listeners:
- type: http
port: 8083

View File

@@ -9,7 +9,7 @@ in, allowing them to specify custom templates:
```yaml
templates:
custom_template_directory: /path/to/custom/templates/
custom_templates_directory: /path/to/custom/templates/
```
If this setting is not set, or the files named below are not found within the directory,

View File

@@ -15,8 +15,9 @@ this document.
The website <https://endoflife.date> also offers convenient
summaries.
- If Synapse was installed using [prebuilt packages](setup/installation.md#prebuilt-packages),
you will need to follow the normal process for upgrading those packages.
- If Synapse was installed using [prebuilt
packages](setup/installation.md#prebuilt-packages), you will need to follow the
normal process for upgrading those packages.
- If Synapse was installed using pip then upgrade to the latest
version by running:
@@ -88,103 +89,6 @@ process, for example:
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
```
# Upgrading to v1.68.0
Two changes announced in the upgrade notes for v1.67.0 have now landed in v1.68.0.
## SQLite version requirement
Synapse now requires a SQLite version of 3.27.0 or higher if SQLite is configured as
Synapse's database.
Installations using
- Docker images [from `matrixdotorg`](https://hub.docker.com/r/matrixdotorg/synapse),
- Debian packages [from Matrix.org](https://packages.matrix.org/), or
- a PostgreSQL database
are not affected.
## Rust requirement when building from source.
Building from a source checkout of Synapse now requires a recent Rust compiler
(currently Rust 1.58.1, but see also the
[Platform Dependency Policy](https://matrix-org.github.io/synapse/latest/deprecation_policy.html)).
Installations using
- Docker images [from `matrixdotorg`](https://hub.docker.com/r/matrixdotorg/synapse),
- Debian packages [from Matrix.org](https://packages.matrix.org/), or
- PyPI wheels via `pip install matrix-synapse` (on supported platforms and architectures)
will not be affected.
# Upgrading to v1.67.0
## Direct TCP replication is no longer supported: migrate to Redis
Redis support was added in v1.13.0 with it becoming the recommended method in
v1.18.0. It replaced the old direct TCP connections (which was deprecated as of
v1.18.0) to the main process. With Redis, rather than all the workers connecting
to the main process, all the workers and the main process connect to Redis,
which relays replication commands between processes. This can give a significant
CPU saving on the main process and is a prerequisite for upcoming
performance improvements.
To migrate to Redis add the [`redis` config](./workers.md#shared-configuration),
and remove the TCP `replication` listener from config of the master and
`worker_replication_port` from worker config. Note that a HTTP listener with a
`replication` resource is still required.
## Minimum version of Poetry is now v1.2.0
The minimum supported version of poetry is now 1.2. This should only affect
those installing from a source checkout.
## Rust requirement in the next release
From the next major release (v1.68.0) installing Synapse from a source checkout
will require a recent Rust compiler. Those using packages or
`pip install matrix-synapse` will not be affected.
The simplest way of installing Rust is via [rustup.rs](https://rustup.rs/)
## SQLite version requirement in the next release
From the next major release (v1.68.0) Synapse will require SQLite 3.27.0 or
higher. Synapse v1.67.0 will be the last major release supporting SQLite
versions 3.22 to 3.26.
Those using Docker images or Debian packages from Matrix.org will not be
affected. If you have installed from source, you should check the version of
SQLite used by Python with:
```shell
python -c "import sqlite3; print(sqlite3.sqlite_version)"
```
If this is too old, refer to your distribution for advice on upgrading.
# Upgrading to v1.66.0
## Delegation of email validation no longer supported
As of this version, Synapse no longer allows the tasks of verifying email address
ownership, and password reset confirmation, to be delegated to an identity server.
This removal was previously planned for Synapse 1.64.0, but was
[delayed](https://github.com/matrix-org/synapse/issues/13421) until now to give
homeserver administrators more notice of the change.
To continue to allow users to add email addresses to their homeserver accounts,
and perform password resets, make sure that Synapse is configured with a working
email server in the [`email` configuration
section](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#email)
(including, at a minimum, a `notif_from` setting.)
Specifying an `email` setting under `account_threepid_delegates` will now cause
an error at startup.
# Upgrading to v1.64.0
## Deprecation of the ability to delegate e-mail verification to identity servers
@@ -1277,7 +1181,7 @@ updated.
When setting up worker processes, we now recommend the use of a Redis
server for replication. **The old direct TCP connection method is
deprecated and will be removed in a future release.** See
the [worker documentation](https://matrix-org.github.io/synapse/v1.66/workers.html) for more details.
[workers](workers.md) for more details.
# Upgrading to v1.14.0

View File

@@ -5,9 +5,8 @@
Many of the API calls in the admin api will require an `access_token` for a
server admin. (Note that a server admin is distinct from a room admin.)
An existing user can be marked as a server admin by updating the database directly.
A user can be marked as a server admin by updating the database directly, e.g.:
Check your [database settings](config_documentation.md#database) in the configuration file, connect to the correct database using either `psql [database name]` (if using PostgreSQL) or `sqlite3 path/to/your/database.db` (if using SQLite) and elevate the user `@foo:bar.com` to administrator.
```sql
UPDATE users SET admin = 1 WHERE name = '@foo:bar.com';
```

View File

@@ -2,11 +2,11 @@
This API allows you to manage tokens which can be used to authenticate
registration requests, as proposed in
[MSC3231](https://github.com/matrix-org/matrix-doc/blob/main/proposals/3231-token-authenticated-registration.md)
and stabilised in version 1.2 of the Matrix specification.
[MSC3231](https://github.com/matrix-org/matrix-doc/blob/main/proposals/3231-token-authenticated-registration.md).
To use it, you will need to enable the `registration_requires_token` config
option, and authenticate by providing an `access_token` for a server admin:
see [Admin API](../admin_api).
see [Admin API](../../usage/administration/admin_api).
Note that this API is still experimental; not all clients may support it yet.
## Registration token objects

View File

@@ -2,9 +2,9 @@
How do I become a server admin?
---
If your server already has an admin account you should use the [User Admin API](../../admin_api/user_admin_api.md#Change-whether-a-user-is-a-server-administrator-or-not) to promote other accounts to become admins.
If your server already has an admin account you should use the user admin API to promote other accounts to become admins. See [User Admin API](../../admin_api/user_admin_api.md#Change-whether-a-user-is-a-server-administrator-or-not)
If you don't have any admin accounts yet you won't be able to use the admin API, so you'll have to edit the database manually. Manually editing the database is generally not recommended so once you have an admin account: use the admin APIs to make further changes.
If you don't have any admin accounts yet you won't be able to use the admin API so you'll have to edit the database manually. Manually editing the database is generally not recommended so once you have an admin account, use the admin APIs to make further changes.
```sql
UPDATE users SET admin = 1 WHERE name = '@foo:bar.com';
@@ -32,11 +32,9 @@ What users are registered on my server?
SELECT NAME from users;
```
Manually resetting passwords
Manually resetting passwords:
---
Users can reset their password through their client. Alternatively, a server admin
can reset a user's password using the [admin API](../../admin_api/user_admin_api.md#reset-password).
See https://github.com/matrix-org/synapse/blob/master/README.rst#password-reset
I have a problem with my server. Can I just delete my database and start again?
---
@@ -103,83 +101,3 @@ LIMIT 10;
You can also use the [List Room API](../../admin_api/rooms.md#list-room-api)
and `order_by` `state_events`.
People can't accept room invitations from me
---
The typical failure mode here is that you send an invitation to someone
to join a room or direct chat, but when they go to accept it, they get an
error (typically along the lines of "Invalid signature"). They might see
something like the following in their logs:
2019-09-11 19:32:04,271 - synapse.federation.transport.server - 288 - WARNING - GET-11752 - authenticate_request failed: 401: Invalid signature for server <server> with key ed25519:a_EqML: Unable to verify signature for <server>
This is normally caused by a misconfiguration in your reverse-proxy. See [the reverse proxy docs](docs/reverse_proxy.md) and double-check that your settings are correct.
Help!! Synapse is slow and eats all my RAM/CPU!
-----------------------------------------------
First, ensure you are running the latest version of Synapse, using Python 3
with a [PostgreSQL database](../../postgres.md).
Synapse's architecture is quite RAM hungry currently - we deliberately
cache a lot of recent room data and metadata in RAM in order to speed up
common requests. We'll improve this in the future, but for now the easiest
way to either reduce the RAM usage (at the risk of slowing things down)
is to set the almost-undocumented ``SYNAPSE_CACHE_FACTOR`` environment
variable. The default is 0.5, which can be decreased to reduce RAM usage
in memory constrained environments, or increased if performance starts to
degrade.
However, degraded performance due to a low cache factor, common on
machines with slow disks, often leads to explosions in memory use due
backlogged requests. In this case, reducing the cache factor will make
things worse. Instead, try increasing it drastically. 2.0 is a good
starting value.
Using [libjemalloc](https://jemalloc.net) can also yield a significant
improvement in overall memory use, and especially in terms of giving back
RAM to the OS. To use it, the library must simply be put in the
LD_PRELOAD environment variable when launching Synapse. On Debian, this
can be done by installing the `libjemalloc1` package and adding this
line to `/etc/default/matrix-synapse`:
LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.1
This made a significant difference on Python 2.7 - it's unclear how
much of an improvement it provides on Python 3.x.
If you're encountering high CPU use by the Synapse process itself, you
may be affected by a bug with presence tracking that leads to a
massive excess of outgoing federation requests (see [discussion](https://github.com/matrix-org/synapse/issues/3971)). If metrics
indicate that your server is also issuing far more outgoing federation
requests than can be accounted for by your users' activity, this is a
likely cause. The misbehavior can be worked around by disabling presence
in the Synapse config file: [see here](../configuration/config_documentation.md#presence).
Running out of File Handles
---------------------------
If Synapse runs out of file handles, it typically fails badly - live-locking
at 100% CPU, and/or failing to accept new TCP connections (blocking the
connecting client). Matrix currently can legitimately use a lot of file handles,
thanks to busy rooms like `#matrix:matrix.org` containing hundreds of participating
servers. The first time a server talks in a room it will try to connect
simultaneously to all participating servers, which could exhaust the available
file descriptors between DNS queries & HTTPS sockets, especially if DNS is slow
to respond. (We need to improve the routing algorithm used to be better than
full mesh, but as of March 2019 this hasn't happened yet).
If you hit this failure mode, we recommend increasing the maximum number of
open file handles to be at least 4096 (assuming a default of 1024 or 256).
This is typically done by editing ``/etc/security/limits.conf``
Separately, Synapse may leak file handles if inbound HTTP requests get stuck
during processing - e.g. blocked behind a lock or talking to a remote server etc.
This is best diagnosed by matching up the 'Received request' and 'Processed request'
log lines and looking for any 'Processed request' lines which take more than
a few seconds to execute. Please let us know at [`#synapse:matrix.org`](https://matrix.to/#/#synapse-dev:matrix.org) if
you see this failure mode so we can help debug it, however.

View File

@@ -1,84 +0,0 @@
# Monthly Active Users
Synapse can be configured to record the number of monthly active users (also referred to as MAU) on a given homeserver.
For clarity's sake, MAU only tracks local users.
Please note that the metrics recorded by the [Homeserver Usage Stats](../../usage/administration/monitoring/reporting_homeserver_usage_statistics.md)
are calculated differently. The `monthly_active_users` from the usage stats does not take into account any
of the rules below, and counts any users who have made a request to the homeserver in the last 30 days.
See the [configuration manual](../../usage/configuration/config_documentation.md#limit_usage_by_mau) for details on how to configure MAU.
## Calculating active users
Individual user activity is measured in active days. If a user performs an action, the exact time of that action is then recorded. When
calculating the MAU figure, any users with a recorded action in the last 30 days are considered part of the cohort. Days are measured
as a rolling window from the current system time to 30 days ago.
So for example, if Synapse were to calculate the active users on the 15th July at 13:25, it would include any activity from 15th June 13:25 onwards.
A user is **never** considered active if they are either:
- Part of the trial day cohort (described below)
- Owned by an application service.
- Note: This **only** covers users that are part of an application service `namespaces.users` registration. The namespace
must also be marked as `exclusive`.
Otherwise, any request to Synapse will mark the user as active. Please note that registration will not mark a user as active *unless*
they register with a 3pid that is included in the config field `mau_limits_reserved_threepids`.
The Prometheus metric for MAU is refreshed every 5 minutes.
Once an hour, Synapse checks to see if any users are inactive (with only activity timestamps later than 30 days). These users
are removed from the active users cohort. If they then become active, they are immediately restored to the cohort.
It is important to note that **deactivated** users are not immediately removed from the pool of active users, but as these users won't
perform actions they will eventually be removed from the cohort.
### Trial days
If the config option `mau_trial_days` is set, a user must have been active this many days **after** registration to be active. A user is in the
trial period if their registration timestamp (also known as the `creation_ts`) is less than `mau_trial_days` old.
As an example, if `mau_trial_days` is set to `3` and a user is active **after** 3 days (72 hours from registration time) then they will be counted as active.
The `mau_appservice_trial_days` config further extends this rule by applying different durations depending on the `appservice_id` of the user.
Users registered by an application service will be recorded with an `appservice_id` matching the `id` key in the registration file for that service.
## Limiting usage of the homeserver when the maximum MAU is reached
If both config options `limit_usage_by_mau` and `max_mau_value` is set, and the current MAU value exceeds the maximum value, the
homeserver will begin to block some actions.
Individual users matching **any** of the below criteria never have their actions blocked:
- Considered part of the cohort of MAU users.
- Considered part of the trial period.
- Registered as a `support` user.
- Application service users if `track_appservice_user_ips` is NOT set.
Please not that server admins are **not** exempt from blocking.
The following actions are blocked when the MAU limit is exceeded:
- Logging in
- Sending events
- Creating rooms
- Syncing
Registration is also blocked for all new signups *unless* the user is registering with a threepid included in the `mau_limits_reserved_threepids`
config value.
When a request is blocked, the response will have the `errcode` `M_RESOURCE_LIMIT_EXCEEDED`.
## Metrics
Synapse records several different prometheus metrics for MAU.
`synapse_admin_mau:current` records the current MAU figure for native (non-application-service) users.
`synapse_admin_mau:max` records the maximum MAU as dictated by the `max_mau_value` config value.
`synapse_admin_mau_current_mau_by_service` records the current MAU including application service users. The label `app_service` can be used
to filter by a specific service ID. This *also* includes non-application-service users under `app_service=native` .
`synapse_admin_mau:registered_reserved_users` records the number of users specified in `mau_limits_reserved_threepids` which have
registered accounts on the homeserver.

View File

@@ -12,14 +12,14 @@ See the following for how to decode the dense data available from the default lo
| Part | Explanation |
| ----- | ------------ |
| AAAA | Timestamp request was logged (not received) |
| AAAA | Timestamp request was logged (not recieved) |
| BBBB | Logger name (`synapse.access.(http\|https).<tag>`, where 'tag' is defined in the `listeners` config section, normally the port) |
| CCCC | Line number in code |
| DDDD | Log Level |
| EEEE | Request Identifier (This identifier is shared by related log lines)|
| FFFF | Source IP (Or X-Forwarded-For if enabled) |
| GGGG | Server Port |
| HHHH | Federated Server or Local User making request (blank if unauthenticated or not supplied).<br/>If this is of the form `@aaa:example.com|@bbb:example.com`, then that means that `@aaa:example.com` is authenticated but they are controlling `@bbb:example.com`, e.g. if `aaa` is controlling `bbb` [via the admin API](https://matrix-org.github.io/synapse/latest/admin_api/user_admin_api.html#login-as-a-user). |
| HHHH | Federated Server or Local User making request (blank if unauthenticated or not supplied) |
| IIII | Total Time to process the request |
| JJJJ | Time to send response over network once generated (this may be negative if the socket is closed before the response is generated)|
| KKKK | Userland CPU time |

View File

@@ -72,6 +72,49 @@ apply if you want your config file to be read properly. A few helpful things to
In addition, each setting has an example of its usage, with the proper indentation
shown.
## Contents
[Modules](#modules)
[Server](#server)
[Homeserver Blocking](#homeserver-blocking)
[TLS](#tls)
[Federation](#federation)
[Caching](#caching)
[Database](#database)
[Logging](#logging)
[Ratelimiting](#ratelimiting)
[Media Store](#media-store)
[Captcha](#captcha)
[TURN](#turn)
[Registration](#registration)
[API Configuration](#api-configuration)
[Signing Keys](#signing-keys)
[Single Sign On Integration](#single-sign-on-integration)
[Push](#push)
[Rooms](#rooms)
[Opentracing](#opentracing)
[Workers](#workers)
[Background Updates](#background-updates)
## Modules
Server admins can expand Synapse's functionality with external modules.
@@ -431,27 +474,19 @@ Sub-options for each listener include:
* `metrics`: (see the docs [here](../../metrics-howto.md)),
* `replication`: (see the docs [here](../../workers.md)).
* `tls`: set to true to enable TLS for this listener. Will use the TLS key/cert specified in tls_private_key_path / tls_certificate_path.
* `x_forwarded`: Only valid for an 'http' listener. Set to true to use the X-Forwarded-For header as the client IP. Useful when Synapse is
behind a [reverse-proxy](../../reverse_proxy.md).
* `request_id_header`: The header extracted from each incoming request that is
used as the basis for the request ID. The request ID is used in
[logs](../administration/request_log.md#request-log-format) and tracing to
correlate and match up requests. When unset, Synapse will automatically
generate sequential request IDs. This option is useful when Synapse is behind
a [reverse-proxy](../../reverse_proxy.md).
_Added in Synapse 1.68.0._
behind a reverse-proxy.
* `resources`: Only valid for an 'http' listener. A list of resources to host
on this port. Sub-options for each resource are:
* `names`: a list of names of HTTP resources. See below for a list of valid resource names.
* `compress`: set to true to enable gzip compression on HTTP bodies for this resource. This is currently only supported with the
`client`, `consent`, `metrics` and `federation` resources.
* `compress`: set to true to enable HTTP compression for this resource.
* `additional_resources`: Only valid for an 'http' listener. A map of
additional endpoints which should be loaded via dynamic modules.
@@ -602,8 +637,6 @@ server owner wants to limit to the number of monthly active users. When enabled
reached the server returns a `ResourceLimitError` with error type `Codes.RESOURCE_LIMIT_EXCEEDED`.
Defaults to false. If this is enabled, a value for `max_mau_value` must also be set.
See [Monthly Active Users](../administration/monthly_active_users.md) for details on how to configure MAU.
Example configuration:
```yaml
limit_usage_by_mau: true
@@ -768,10 +801,6 @@ allowed_avatar_mimetypes: ["image/png", "image/jpeg", "image/gif"]
How long to keep redacted events in unredacted form in the database. After
this period redacted events get replaced with their redacted form in the DB.
Synapse will check whether the rentention period has concluded for redacted
events every 5 minutes. Thus, even if this option is set to `0`, Synapse may
still take up to 5 minutes to purge redacted events from the database.
Defaults to `7d`. Set to `null` to disable.
Example configuration:
@@ -858,11 +887,7 @@ which are older than the room's maximum retention period. Synapse will also
filter events received over federation so that events that should have been
purged are ignored and not stored again.
The message retention policies feature is disabled by default. Please be advised
that enabling this feature carries some risk. There are known bugs with the implementation
which can cause database corruption. Setting retention to delete older history
is less risky than deleting newer history but in general caution is advised when enabling this
experimental feature. You can read more about this feature [here](../../message_retention_policies.md).
The message retention policies feature is disabled by default.
This setting has the following sub-options:
* `default_policy`: Default retention policy. If set, Synapse will apply it to rooms that lack the
@@ -1073,28 +1098,26 @@ allow_device_name_lookup_over_federation: true
---
## Caching ##
Options related to caching.
Options related to caching
---
### `event_cache_size`
The number of events to cache in memory. Defaults to 10K. Like other caches,
this is affected by `caches.global_factor` (see below).
Note that this option is not part of the `caches` section.
The number of events to cache in memory. Not affected by
`caches.global_factor`. Defaults to 10K.
Example configuration:
```yaml
event_cache_size: 15K
```
---
### `caches` and associated values
### `cache` and associated values
A cache 'factor' is a multiplier that can be applied to each of
Synapse's caches in order to increase or decrease the maximum
number of entries that can be stored.
`caches` can be configured through the following sub-options:
Caching can be configured through the following sub-options:
* `global_factor`: Controls the global cache factor, which is the default cache factor
for all caches if a specific factor for that cache is not otherwise
@@ -1156,7 +1179,6 @@ number of entries that can be stored.
Example configuration:
```yaml
event_cache_size: 15K
caches:
global_factor: 1.0
per_cache_factors:
@@ -1402,7 +1424,7 @@ This option specifies several limits for login:
client is attempting to log into. Defaults to `per_second: 0.17`,
`burst_count: 3`.
* `failed_attempts` ratelimits login requests based on the account the
* `failted_attempts` ratelimits login requests based on the account the
client is attempting to log into, based on the amount of failed login
attempts for this account. Defaults to `per_second: 0.17`, `burst_count: 3`.
@@ -1836,7 +1858,7 @@ Example configuration:
max_spider_size: 8M
```
---
### `url_preview_accept_language`
### `url_preview_language`
A list of values for the Accept-Language HTTP header used when
downloading webpages during URL preview generation. This allows
@@ -1884,8 +1906,8 @@ See [here](../../CAPTCHA_SETUP.md) for full details on setting up captcha.
---
### `recaptcha_public_key`
This homeserver's ReCAPTCHA public key. Must be specified if
[`enable_registration_captcha`](#enable_registration_captcha) is enabled.
This homeserver's ReCAPTCHA public key. Must be specified if `enable_registration_captcha` is
enabled.
Example configuration:
```yaml
@@ -1894,8 +1916,7 @@ recaptcha_public_key: "YOUR_PUBLIC_KEY"
---
### `recaptcha_private_key`
This homeserver's ReCAPTCHA private key. Must be specified if
[`enable_registration_captcha`](#enable_registration_captcha) is
This homeserver's ReCAPTCHA private key. Must be specified if `enable_registration_captcha` is
enabled.
Example configuration:
@@ -1905,11 +1926,9 @@ recaptcha_private_key: "YOUR_PRIVATE_KEY"
---
### `enable_registration_captcha`
Set to `true` to require users to complete a CAPTCHA test when registering an account.
Requires a valid ReCaptcha public/private key.
Defaults to `false`.
Note that [`enable_registration`](#enable_registration) must also be set to allow account registration.
Set to true to enable ReCaptcha checks when registering, preventing signup
unless a captcha is answered. Requires a valid ReCaptcha public/private key.
Defaults to false.
Example configuration:
```yaml
@@ -1985,21 +2004,9 @@ Registration can be rate-limited using the parameters in the [Ratelimiting](#rat
---
### `enable_registration`
Enable registration for new users. Defaults to `false`.
It is highly recommended that if you enable registration, you set one or more
or the following options, to avoid abuse of your server by "bots":
* [`enable_registration_captcha`](#enable_registration_captcha)
* [`registrations_require_3pid`](#registrations_require_3pid)
* [`registration_requires_token`](#registration_requires_token)
(In order to enable registration without any verification, you must also set
[`enable_registration_without_verification`](#enable_registration_without_verification).)
Note that even if this setting is disabled, new accounts can still be created
via the admin API if
[`registration_shared_secret`](#registration_shared_secret) is set.
Enable registration for new users. Defaults to false. It is highly recommended that if you enable registration,
you use either captcha, email, or token-based verification to verify that new users are not bots. In order to enable registration
without any verification, you must also set `enable_registration_without_verification` to true.
Example configuration:
```yaml
@@ -2007,21 +2014,88 @@ enable_registration: true
```
---
### `enable_registration_without_verification`
Enable registration without email or captcha verification. Note: this option is *not* recommended,
as registration without verification is a known vector for spam and abuse. Defaults to `false`. Has no effect
unless [`enable_registration`](#enable_registration) is also enabled.
as registration without verification is a known vector for spam and abuse. Defaults to false. Has no effect
unless `enable_registration` is also enabled.
Example configuration:
```yaml
enable_registration_without_verification: true
```
---
### `session_lifetime`
Time that a user's session remains valid for, after they log in.
Note that this is not currently compatible with guest logins.
Note also that this is calculated at login time: changes are not applied retrospectively to users who have already
logged in.
By default, this is infinite.
Example configuration:
```yaml
session_lifetime: 24h
```
----
### `refresh_access_token_lifetime`
Time that an access token remains valid for, if the session is using refresh tokens.
For more information about refresh tokens, please see the [manual](user_authentication/refresh_tokens.md).
Note that this only applies to clients which advertise support for refresh tokens.
Note also that this is calculated at login time and refresh time: changes are not applied to
existing sessions until they are refreshed.
By default, this is 5 minutes.
Example configuration:
```yaml
refreshable_access_token_lifetime: 10m
```
---
### `refresh_token_lifetime: 24h`
Time that a refresh token remains valid for (provided that it is not
exchanged for another one first).
This option can be used to automatically log-out inactive sessions.
Please see the manual for more information.
Note also that this is calculated at login time and refresh time:
changes are not applied to existing sessions until they are refreshed.
By default, this is infinite.
Example configuration:
```yaml
refresh_token_lifetime: 24h
```
---
### `nonrefreshable_access_token_lifetime`
Time that an access token remains valid for, if the session is NOT
using refresh tokens.
Please note that not all clients support refresh tokens, so setting
this to a short value may be inconvenient for some users who will
then be logged out frequently.
Note also that this is calculated at login time: changes are not applied
retrospectively to existing sessions for users that have already logged in.
By default, this is infinite.
Example configuration:
```yaml
nonrefreshable_access_token_lifetime: 24h
```
---
### `registrations_require_3pid`
If this is set, users must provide all of the specified types of 3PID when registering an account.
Note that [`enable_registration`](#enable_registration) must also be set to allow account registration.
If this is set, the user must provide all of the specified types of 3PID when registering.
Example configuration:
```yaml
@@ -2069,11 +2143,9 @@ enable_3pid_lookup: false
Require users to submit a token during registration.
Tokens can be managed using the admin [API](../administration/admin_api/registration_tokens.md).
Note that `enable_registration` must be set to true.
Disabling this option will not delete any tokens previously generated.
Defaults to `false`. Set to `true` to enable.
Note that [`enable_registration`](#enable_registration) must also be set to allow account registration.
Defaults to false. Set to true to enable.
Example configuration:
```yaml
@@ -2082,39 +2154,13 @@ registration_requires_token: true
---
### `registration_shared_secret`
If set, allows registration of standard or admin accounts by anyone who has the
shared secret, even if [`enable_registration`](#enable_registration) is not
set.
This is primarily intended for use with the `register_new_matrix_user` script
(see [Registering a user](../../setup/installation.md#registering-a-user));
however, the interface is [documented](../admin_api/register_api.html).
See also [`registration_shared_secret_path`](#registration_shared_secret_path).
If set, allows registration of standard or admin accounts by anyone who
has the shared secret, even if registration is otherwise disabled.
Example configuration:
```yaml
registration_shared_secret: <PRIVATE STRING>
```
---
### `registration_shared_secret_path`
An alternative to [`registration_shared_secret`](#registration_shared_secret):
allows the shared secret to be specified in an external file.
The file should be a plain text file, containing only the shared secret.
If this file does not exist, Synapse will create a new signing
key on startup and store it in this file.
Example configuration:
```yaml
registration_shared_secret_file: /path/to/secrets/file
```
_Added in Synapse 1.67.0._
---
### `bcrypt_rounds`
@@ -2169,10 +2215,7 @@ their account.
by the Matrix Identity Service API
[specification](https://matrix.org/docs/spec/identity_service/latest).)
*Deprecated in Synapse 1.64.0*: The `email` option is deprecated.
*Removed in Synapse 1.66.0*: The `email` option has been removed.
If present, Synapse will report a configuration error on startup.
*Updated in Synapse 1.64.0*: The `email` option is deprecated.
Example configuration:
```yaml
@@ -2345,79 +2388,6 @@ Example configuration:
```yaml
inhibit_user_in_use_error: true
```
---
## User session management
---
### `session_lifetime`
Time that a user's session remains valid for, after they log in.
Note that this is not currently compatible with guest logins.
Note also that this is calculated at login time: changes are not applied retrospectively to users who have already
logged in.
By default, this is infinite.
Example configuration:
```yaml
session_lifetime: 24h
```
----
### `refresh_access_token_lifetime`
Time that an access token remains valid for, if the session is using refresh tokens.
For more information about refresh tokens, please see the [manual](user_authentication/refresh_tokens.md).
Note that this only applies to clients which advertise support for refresh tokens.
Note also that this is calculated at login time and refresh time: changes are not applied to
existing sessions until they are refreshed.
By default, this is 5 minutes.
Example configuration:
```yaml
refreshable_access_token_lifetime: 10m
```
---
### `refresh_token_lifetime: 24h`
Time that a refresh token remains valid for (provided that it is not
exchanged for another one first).
This option can be used to automatically log-out inactive sessions.
Please see the manual for more information.
Note also that this is calculated at login time and refresh time:
changes are not applied to existing sessions until they are refreshed.
By default, this is infinite.
Example configuration:
```yaml
refresh_token_lifetime: 24h
```
---
### `nonrefreshable_access_token_lifetime`
Time that an access token remains valid for, if the session is NOT
using refresh tokens.
Please note that not all clients support refresh tokens, so setting
this to a short value may be inconvenient for some users who will
then be logged out frequently.
Note also that this is calculated at login time: changes are not applied
retrospectively to existing sessions for users that have already logged in.
By default, this is infinite.
Example configuration:
```yaml
nonrefreshable_access_token_lifetime: 24h
```
---
## Metrics ###
Config options related to metrics.
@@ -2495,7 +2465,7 @@ report_stats_endpoint: https://example.com/report-usage-stats/push
Config settings related to the client/server API
---
### `room_prejoin_state`
### `room_prejoin_state:`
Controls for the state that is shared with users who receive an invite
to a room. By default, the following state event types are shared with users who
@@ -2567,13 +2537,9 @@ track_appservice_user_ips: true
---
### `macaroon_secret_key`
A secret which is used to sign
- access token for guest users,
- short-term login token used during SSO logins (OIDC or SAML2) and
- token used for unsubscribing from email notifications.
If none is specified, the `registration_shared_secret` is used, if one is given;
otherwise, a secret key is derived from the signing key.
A secret which is used to sign access tokens. If none is specified,
the `registration_shared_secret` is used, if one is given; otherwise,
a secret key is derived from the signing key.
Example configuration:
```yaml
@@ -2597,10 +2563,7 @@ Config options relating to signing keys
---
### `signing_key_path`
Path to the signing key to sign events and federation requests with.
*New in Synapse 1.67*: If this file does not exist, Synapse will create a new signing
key on startup and store it in this file.
Path to the signing key to sign messages with.
Example configuration:
```yaml
@@ -2635,7 +2598,7 @@ Example configuration:
key_refresh_interval: 2d
```
---
### `trusted_key_servers`
### `trusted_key_servers:`
The trusted servers to download signing keys from.
@@ -2705,10 +2668,13 @@ key_server_signing_keys_path: "key_server_signing_keys.key"
The following settings can be used to make Synapse use a single sign-on
provider for authentication, instead of its internal password database.
You will probably also want to set the following options to `false` to
You will probably also want to set the following options to false to
disable the regular login/registration flows:
* [`enable_registration`](#enable_registration)
* [`password_config.enabled`](#password_config)
* `enable_registration`
* `password_config.enabled`
You will also want to investigate the settings under the "sso" configuration
section below.
---
### `saml2_config`
@@ -3415,7 +3381,7 @@ user_directory:
For detailed instructions on user consent configuration, see [here](../../consent_tracking.md).
Parts of this section are required if enabling the `consent` resource under
[`listeners`](#listeners), in particular `template_dir` and `version`.
`listeners`, in particular `template_dir` and `version`. # TODO: link `listeners`
* `template_dir`: gives the location of the templates for the HTML forms.
This directory should contain one subdirectory per language (eg, `en`, `fr`),
@@ -3427,7 +3393,7 @@ Parts of this section are required if enabling the `consent` resource under
parameter.
* `server_notice_content`: if enabled, will send a user a "Server Notice"
asking them to consent to the privacy policy. The [`server_notices` section](#server_notices)
asking them to consent to the privacy policy. The `server_notices` section ##TODO: link
must also be configured for this to work. Notices will *not* be sent to
guest users unless `send_server_notice_to_guests` is set to true.

View File

@@ -1 +1 @@
window.SYNAPSE_VERSION = 'v1.68';
window.SYNAPSE_VERSION = 'v1.64';

View File

@@ -32,8 +32,13 @@ stream between all configured Synapse processes. Additionally, processes may
make HTTP requests to each other, primarily for operations which need to wait
for a reply ─ such as sending an event.
All the workers and the main process connect to Redis, which relays replication
commands between processes.
Redis support was added in v1.13.0 with it becoming the recommended method in
v1.18.0. It replaced the old direct TCP connections (which is deprecated as of
v1.18.0) to the main process. With Redis, rather than all the workers connecting
to the main process, all the workers and the main process connect to Redis,
which relays replication commands between processes. This can give a significant
cpu saving on the main process and will be a prerequisite for upcoming
performance improvements.
If Redis support is enabled Synapse will use it as a shared cache, as well as a
pub/sub mechanism.
@@ -112,26 +117,23 @@ redis:
enabled: true
```
See the [configuration manual](usage/configuration/config_documentation.html) for the full documentation of each option.
See the sample config for the full documentation of each option.
Under **no circumstances** should the replication listener be exposed to the
public internet; replication traffic is:
* always unencrypted
* unauthenticated, unless `worker_replication_secret` is configured
public internet; it has no authentication and is unencrypted.
### Worker configuration
In the config file for each worker, you must specify:
* The type of worker (`worker_app`). The currently available worker applications are listed below.
* A unique name for the worker (`worker_name`).
* The HTTP replication endpoint that it should talk to on the main synapse process
(`worker_replication_host` and `worker_replication_http_port`)
* If handling HTTP requests, a `worker_listeners` option with an `http`
listener, in the same way as the `listeners` option in the shared config.
* If handling the `^/_matrix/client/v3/keys/upload` endpoint, the HTTP URI for
the main process (`worker_main_http_uri`).
In the config file for each worker, you must specify the type of worker
application (`worker_app`), and you should specify a unique name for the worker
(`worker_name`). The currently available worker applications are listed below.
You must also specify the HTTP replication endpoint that it should talk to on
the main synapse process. `worker_replication_host` should specify the host of
the main synapse and `worker_replication_http_port` should point to the HTTP
replication port. If the worker will handle HTTP requests then the
`worker_listeners` option should be set with a `http` listener, in the same way
as the `listeners` option in the shared config.
For example:
@@ -215,12 +217,10 @@ information.
^/_matrix/client/(api/v1|r0|v3|unstable)/search$
# Encryption requests
# Note that ^/_matrix/client/(r0|v3|unstable)/keys/upload/ requires `worker_main_http_uri`
^/_matrix/client/(r0|v3|unstable)/keys/query$
^/_matrix/client/(r0|v3|unstable)/keys/changes$
^/_matrix/client/(r0|v3|unstable)/keys/claim$
^/_matrix/client/(r0|v3|unstable)/room_keys/
^/_matrix/client/(r0|v3|unstable)/keys/upload/
# Registration/login requests
^/_matrix/client/(api/v1|r0|v3|unstable)/login$
@@ -325,6 +325,7 @@ effects of bursts of events from that bridge on events sent by normal users.
Additionally, the writing of specific streams (such as events) can be moved off
of the main process to a particular worker.
(This is only supported with Redis-based replication.)
To enable this, the worker must have a HTTP replication listener configured,
have a `worker_name` and be listed in the `instance_map` config. The same worker
@@ -580,23 +581,52 @@ handle it, and are online.
If `update_user_directory` is set to `false`, and this worker is not running,
the above endpoint may give outdated results.
### `synapse.app.frontend_proxy`
Proxies some frequently-requested client endpoints to add caching and remove
load from the main synapse. It can handle REST endpoints matching the following
regular expressions:
^/_matrix/client/(r0|v3|unstable)/keys/upload
If `use_presence` is False in the homeserver config, it can also handle REST
endpoints matching the following regular expressions:
^/_matrix/client/(api/v1|r0|v3|unstable)/presence/[^/]+/status
This "stub" presence handler will pass through `GET` request but make the
`PUT` effectively a no-op.
It will proxy any requests it cannot handle to the main synapse instance. It
must therefore be configured with the location of the main instance, via
the `worker_main_http_uri` setting in the `frontend_proxy` worker configuration
file. For example:
```yaml
worker_main_http_uri: http://127.0.0.1:8008
```
### Historical apps
The following used to be separate worker application types, but are now
equivalent to `synapse.app.generic_worker`:
* `synapse.app.client_reader`
* `synapse.app.event_creator`
* `synapse.app.federation_reader`
* `synapse.app.frontend_proxy`
* `synapse.app.synchrotron`
*Note:* Historically there used to be more apps, however they have been
amalgamated into a single `synapse.app.generic_worker` app. The remaining apps
are ones that do specific processing unrelated to requests, e.g. the `pusher`
that handles sending out push notifications for new events. The intention is for
all these to be folded into the `generic_worker` app and to use config to define
which processes handle the various proccessing such as push notifications.
## Migration from old config
A main change that has occurred is the merging of worker apps into
`synapse.app.generic_worker`. This change is backwards compatible and so no
changes to the config are required.
There are two main independent changes that have been made: introducing Redis
support and merging apps into `synapse.app.generic_worker`. Both these changes
are backwards compatible and so no changes to the config are required, however
server admins are encouraged to plan to migrate to Redis as the old style direct
TCP replication config is deprecated.
To migrate to Redis add the `redis` config as above, and optionally remove the
TCP `replication` listener from master and `worker_replication_port` from worker
config.
To migrate apps to use `synapse.app.generic_worker` simply update the
`worker_app` option in the worker configs, and where worker are started (e.g.

View File

@@ -1,6 +1,6 @@
[mypy]
namespace_packages = True
plugins = pydantic.mypy, mypy_zope:plugin, scripts-dev/mypy_synapse_plugin.py
plugins = mypy_zope:plugin, scripts-dev/mypy_synapse_plugin.py
follow_imports = normal
check_untyped_defs = True
show_error_codes = True
@@ -16,8 +16,7 @@ files =
docker/,
scripts-dev/,
synapse/,
tests/,
build_rust.py
tests/
# Note: Better exclusion syntax coming in mypy > 0.910
# https://github.com/python/mypy/pull/11329
@@ -182,6 +181,3 @@ ignore_missing_imports = True
[mypy-incremental.*]
ignore_missing_imports = True
[mypy-setuptools_rust.*]
ignore_missing_imports = True

243
poetry.lock generated
View File

@@ -7,10 +7,10 @@ optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[package.extras]
dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"]
docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"]
tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"]
dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"]
[[package]]
name = "authlib"
@@ -39,7 +39,7 @@ attrs = ">=19.2.0"
six = "*"
[package.extras]
visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"]
visualize = ["graphviz (>0.5.1)", "Twisted (>=16.1.1)"]
[[package]]
name = "bcrypt"
@@ -199,12 +199,12 @@ python-versions = ">=3.6"
cffi = ">=1.12"
[package.extras]
docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx_rtd_theme"]
docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"]
docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"]
docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"]
pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"]
sdist = ["setuptools_rust (>=0.11.4)"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"]
test = ["pytest (>=6.2.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"]
[[package]]
name = "defusedxml"
@@ -226,7 +226,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
wrapt = ">=1.10,<2"
[package.extras]
dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"]
dev = ["tox", "bump2version (<1)", "sphinx (<2)", "importlib-metadata (<3)", "importlib-resources (<4)", "configparser (<5)", "sphinxcontrib-websupport (<2)", "zipp (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"]
[[package]]
name = "docutils"
@@ -245,7 +245,7 @@ optional = true
python-versions = ">=3.7"
[package.extras]
dev = ["Sphinx", "coverage", "flake8", "lxml", "memory-profiler", "mypy (==0.910)", "tox", "xmlschema (>=1.8.0)"]
dev = ["tox", "coverage", "lxml", "xmlschema (>=1.8.0)", "sphinx", "memory-profiler", "flake8", "mypy (==0.910)"]
[[package]]
name = "flake8"
@@ -274,7 +274,7 @@ attrs = ">=19.2.0"
flake8 = ">=3.0.0"
[package.extras]
dev = ["black", "coverage", "hypothesis", "hypothesmith"]
dev = ["coverage", "black", "hypothesis", "hypothesmith"]
[[package]]
name = "flake8-comprehensions"
@@ -367,8 +367,8 @@ typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
zipp = ">=0.5"
[package.extras]
docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"]
testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", "pyfakefs", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"]
docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"]
[[package]]
name = "importlib-resources"
@@ -382,8 +382,8 @@ python-versions = ">=3.6"
zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
[package.extras]
docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"]
testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"]
docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"]
[[package]]
name = "incremental"
@@ -405,9 +405,9 @@ optional = false
python-versions = ">=3.6,<4.0"
[package.extras]
colors = ["colorama (>=0.4.3,<0.5.0)"]
pipfile_deprecated_finder = ["pipreqs", "requirementslib"]
requirements_deprecated_finder = ["pip-api", "pipreqs"]
requirements_deprecated_finder = ["pipreqs", "pip-api"]
colors = ["colorama (>=0.4.3,<0.5.0)"]
[[package]]
name = "jaeger-client"
@@ -424,7 +424,7 @@ thrift = "*"
tornado = ">=4.3"
[package.extras]
tests = ["codecov", "coverage", "flake8", "flake8-quotes", "flake8-typing-imports", "mock", "mypy", "opentracing_instrumentation (>=3,<4)", "prometheus_client (==0.11.0)", "pycurl", "pytest", "pytest-benchmark[histogram]", "pytest-cov", "pytest-localserver", "pytest-timeout", "pytest-tornado", "tchannel (==2.1.0)"]
tests = ["mock", "pycurl", "pytest", "pytest-cov", "coverage", "pytest-timeout", "pytest-tornado", "pytest-benchmark", "pytest-localserver", "flake8", "flake8-quotes", "flake8-typing-imports", "codecov", "tchannel (==2.1.0)", "opentracing_instrumentation (>=3,<4)", "prometheus_client (==0.11.0)", "mypy"]
[[package]]
name = "jeepney"
@@ -435,8 +435,8 @@ optional = false
python-versions = ">=3.6"
[package.extras]
test = ["async-timeout", "pytest", "pytest-asyncio", "pytest-trio", "testpath", "trio"]
trio = ["async_generator", "trio"]
test = ["pytest", "pytest-trio", "pytest-asyncio", "testpath", "trio", "async-timeout"]
trio = ["trio", "async-generator"]
[[package]]
name = "jinja2"
@@ -486,8 +486,8 @@ pywin32-ctypes = {version = "<0.1.0 || >0.1.0,<0.1.1 || >0.1.1", markers = "sys_
SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""}
[package.extras]
docs = ["jaraco.packaging (>=8.2)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"]
testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"]
docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"]
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"]
[[package]]
name = "ldap3"
@@ -511,7 +511,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*"
[package.extras]
cssselect = ["cssselect (>=0.7)"]
html5 = ["html5lib"]
htmlsoup = ["BeautifulSoup4"]
htmlsoup = ["beautifulsoup4"]
source = ["Cython (>=0.29.7)"]
[[package]]
@@ -524,23 +524,23 @@ python-versions = ">=3.7"
[[package]]
name = "matrix-common"
version = "1.3.0"
version = "1.2.1"
description = "Common utilities for Synapse, Sydent and Sygnal"
category = "main"
optional = false
python-versions = ">=3.7"
python-versions = ">=3.6"
[package.dependencies]
attrs = "*"
importlib-metadata = {version = ">=1.4", markers = "python_version < \"3.8\""}
[package.extras]
dev = ["aiounittest", "black (==22.3.0)", "build (==0.8.0)", "flake8 (==4.0.1)", "isort (==5.9.3)", "mypy (==0.910)", "tox", "twine (==4.0.1)", "twisted"]
test = ["aiounittest", "tox", "twisted"]
dev = ["tox", "twisted", "aiounittest", "mypy (==0.910)", "black (==22.3.0)", "flake8 (==4.0.1)", "isort (==5.9.3)", "build (==0.8.0)", "twine (==4.0.1)"]
test = ["tox", "twisted", "aiounittest"]
[[package]]
name = "matrix-synapse-ldap3"
version = "0.2.2"
version = "0.2.1"
description = "An LDAP3 auth provider for Synapse"
category = "main"
optional = true
@@ -552,7 +552,7 @@ service-identity = "*"
Twisted = ">=15.1.0"
[package.extras]
dev = ["black (==22.3.0)", "flake8 (==4.0.1)", "isort (==5.9.3)", "ldaptor", "matrix-synapse", "mypy (==0.910)", "tox", "types-setuptools"]
dev = ["matrix-synapse", "tox", "ldaptor", "mypy (==0.910)", "types-setuptools", "black (==22.3.0)", "flake8 (==4.0.1)", "isort (==5.9.3)"]
[[package]]
name = "mccabe"
@@ -611,7 +611,7 @@ mypy = "0.950"
"zope.schema" = "*"
[package.extras]
test = ["lxml", "pytest (>=4.6)", "pytest-cov"]
test = ["pytest (>=4.6)", "pytest-cov", "lxml"]
[[package]]
name = "netaddr"
@@ -630,7 +630,7 @@ optional = true
python-versions = "*"
[package.extras]
tests = ["Sphinx", "doubles", "flake8", "flake8-quotes", "gevent", "mock", "pytest", "pytest-cov", "pytest-mock", "six (>=1.10.0,<2.0)", "sphinx_rtd_theme", "tornado"]
tests = ["doubles", "flake8", "flake8-quotes", "mock", "pytest", "pytest-cov", "pytest-mock", "sphinx", "sphinx-rtd-theme", "six (>=1.10.0,<2.0)", "gevent", "tornado"]
[[package]]
name = "packaging"
@@ -778,21 +778,6 @@ category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "pydantic"
version = "1.9.1"
description = "Data validation and settings management using python type hints"
category = "main"
optional = false
python-versions = ">=3.6.1"
[package.dependencies]
typing-extensions = ">=3.7.4.3"
[package.extras]
dotenv = ["python-dotenv (>=0.10.4)"]
email = ["email-validator (>=1.0.3)"]
[[package]]
name = "pyflakes"
version = "2.4.0"
@@ -836,9 +821,9 @@ python-versions = ">=3.6"
[package.extras]
crypto = ["cryptography (>=3.3.1)"]
dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.3.1)", "mypy", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"]
dev = ["sphinx", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.3.1)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "mypy", "pre-commit"]
docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"]
tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"]
[[package]]
name = "pymacaroons"
@@ -872,8 +857,8 @@ python-versions = ">=3.6"
cffi = ">=1.4.1"
[package.extras]
docs = ["sphinx (>=1.6.5)", "sphinx_rtd_theme"]
tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"]
docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"]
[[package]]
name = "pyopenssl"
@@ -925,12 +910,11 @@ pyOpenSSL = "*"
python-dateutil = "*"
pytz = "*"
requests = ">=1.0.0"
setuptools = "*"
six = "*"
xmlschema = ">=1.2.1"
[package.extras]
s2repoze = ["paste", "repoze.who", "zope.interface"]
s2repoze = ["paste", "zope.interface", "repoze.who"]
[[package]]
name = "python-dateutil"
@@ -1035,18 +1019,6 @@ python-versions = ">=3.6"
cryptography = ">=2.0"
jeepney = ">=0.6"
[[package]]
name = "semantic-version"
version = "2.10.0"
description = "A library implementing the 'SemVer' scheme."
category = "main"
optional = false
python-versions = ">=2.7"
[package.extras]
dev = ["Django (>=1.11)", "check-manifest", "colorama (<=0.4.1)", "coverage", "flake8", "nose2", "readme-renderer (<25.0)", "tox", "wheel", "zest.releaser[recommended]"]
doc = ["Sphinx", "sphinx-rtd-theme"]
[[package]]
name = "sentry-sdk"
version = "1.5.11"
@@ -1067,11 +1039,11 @@ celery = ["celery (>=3)"]
chalice = ["chalice (>=1.16.0)"]
django = ["django (>=1.8)"]
falcon = ["falcon (>=1.4)"]
flask = ["blinker (>=1.1)", "flask (>=0.11)"]
flask = ["flask (>=0.11)", "blinker (>=1.1)"]
httpx = ["httpx (>=0.16.0)"]
pure_eval = ["asttokens", "executing", "pure-eval"]
pure_eval = ["pure-eval", "executing", "asttokens"]
pyspark = ["pyspark (>=2.4.4)"]
quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
quart = ["quart (>=0.16.1)", "blinker (>=1.1)"]
rq = ["rq (>=0.6)"]
sanic = ["sanic (>=0.8)"]
sqlalchemy = ["sqlalchemy (>=1.2)"]
@@ -1093,37 +1065,11 @@ pyasn1-modules = "*"
six = "*"
[package.extras]
dev = ["coverage[toml] (>=5.0.2)", "furo", "idna", "pyOpenSSL", "pytest", "sphinx"]
docs = ["furo", "sphinx"]
dev = ["coverage[toml] (>=5.0.2)", "pytest", "sphinx", "furo", "idna", "pyopenssl"]
docs = ["sphinx", "furo"]
idna = ["idna"]
tests = ["coverage[toml] (>=5.0.2)", "pytest"]
[[package]]
name = "setuptools"
version = "65.3.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
category = "main"
optional = false
python-versions = ">=3.7"
[package.extras]
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "setuptools-rust"
version = "1.5.1"
description = "Setuptools Rust extension plugin"
category = "main"
optional = false
python-versions = ">=3.7"
[package.dependencies]
semantic-version = ">=2.8.2,<3"
setuptools = ">=62.4"
typing-extensions = ">=3.7.4.3"
[[package]]
name = "signedjson"
version = "1.1.4"
@@ -1238,7 +1184,6 @@ click = "*"
click-default-group = "*"
incremental = "*"
jinja2 = "*"
setuptools = "*"
tomli = {version = "*", markers = "python_version >= \"3.6\""}
[package.extras]
@@ -1276,7 +1221,7 @@ requests = ">=2.1.0"
Twisted = {version = ">=18.7.0", extras = ["tls"]}
[package.extras]
dev = ["httpbin (==0.5.0)", "pep8", "pyflakes"]
dev = ["pep8", "pyflakes", "httpbin (==0.5.0)"]
docs = ["sphinx (>=1.4.8)"]
[[package]]
@@ -1321,20 +1266,20 @@ typing-extensions = ">=3.6.5"
"zope.interface" = ">=4.4.2"
[package.extras]
all_non_platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"]
conch = ["appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"]
conch_nacl = ["PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"]
all_non_platform = ["cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"]
conch = ["pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)"]
conch_nacl = ["pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pynacl"]
contextvars = ["contextvars (>=2.4,<3)"]
dev = ["coverage (>=6b1,<7)", "pydoctor (>=21.9.0,<21.10.0)", "pyflakes (>=2.2,<3.0)", "python-subunit (>=1.4,<2.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "sphinx-rtd-theme (>=0.5,<1.0)", "towncrier (>=19.2,<20.0)", "twistedchecker (>=0.7,<1.0)"]
dev_release = ["pydoctor (>=21.9.0,<21.10.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "sphinx-rtd-theme (>=0.5,<1.0)", "towncrier (>=19.2,<20.0)"]
dev = ["towncrier (>=19.2,<20.0)", "sphinx-rtd-theme (>=0.5,<1.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "pyflakes (>=2.2,<3.0)", "twistedchecker (>=0.7,<1.0)", "coverage (>=6b1,<7)", "python-subunit (>=1.4,<2.0)", "pydoctor (>=21.9.0,<21.10.0)"]
dev_release = ["towncrier (>=19.2,<20.0)", "sphinx-rtd-theme (>=0.5,<1.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "pydoctor (>=21.9.0,<21.10.0)"]
http2 = ["h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)"]
macos_platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"]
mypy = ["PyHamcrest (>=1.9.0)", "PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "coverage (>=6b1,<7)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "mypy (==0.930)", "mypy-zope (==0.3.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pydoctor (>=21.9.0,<21.10.0)", "pyflakes (>=2.2,<3.0)", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "service-identity (>=18.1.0)", "sphinx (>=4.1.2,<6)", "sphinx-rtd-theme (>=0.5,<1.0)", "towncrier (>=19.2,<20.0)", "twistedchecker (>=0.7,<1.0)", "types-pyOpenSSL", "types-setuptools"]
osx_platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"]
macos_platform = ["pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"]
mypy = ["mypy (==0.930)", "mypy-zope (==0.3.4)", "types-setuptools", "types-pyopenssl", "towncrier (>=19.2,<20.0)", "sphinx-rtd-theme (>=0.5,<1.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "pyflakes (>=2.2,<3.0)", "twistedchecker (>=0.7,<1.0)", "coverage (>=6b1,<7)", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pynacl", "pywin32 (!=226)", "python-subunit (>=1.4,<2.0)", "contextvars (>=2.4,<3)", "pydoctor (>=21.9.0,<21.10.0)"]
osx_platform = ["pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"]
serial = ["pyserial (>=3.0)", "pywin32 (!=226)"]
test = ["PyHamcrest (>=1.9.0)", "cython-test-exception-raiser (>=1.0.2,<2)"]
tls = ["idna (>=2.4)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)"]
windows_platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)"]
test = ["cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)"]
tls = ["pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)"]
windows_platform = ["pywin32 (!=226)", "cython-test-exception-raiser (>=1.0.2,<2)", "PyHamcrest (>=1.9.0)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)", "idna (>=2.4)", "pyasn1", "cryptography (>=2.6)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "pyserial (>=3.0)", "h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)", "contextvars (>=2.4,<3)"]
[[package]]
name = "twisted-iocpsupport"
@@ -1512,7 +1457,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
[package.extras]
brotli = ["brotlipy (>=0.6.0)"]
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"]
secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
@@ -1544,8 +1489,8 @@ elementpath = ">=2.5.0,<3.0.0"
[package.extras]
codegen = ["elementpath (>=2.5.0,<3.0.0)", "jinja2"]
dev = ["Sphinx", "coverage", "elementpath (>=2.5.0,<3.0.0)", "flake8", "jinja2", "lxml", "lxml-stubs", "memory-profiler", "mypy", "sphinx-rtd-theme", "tox"]
docs = ["Sphinx", "elementpath (>=2.5.0,<3.0.0)", "jinja2", "sphinx-rtd-theme"]
dev = ["tox", "coverage", "lxml", "elementpath (>=2.5.0,<3.0.0)", "memory-profiler", "sphinx", "sphinx-rtd-theme", "jinja2", "flake8", "mypy", "lxml-stubs"]
docs = ["elementpath (>=2.5.0,<3.0.0)", "sphinx", "sphinx-rtd-theme", "jinja2"]
[[package]]
name = "zipp"
@@ -1556,8 +1501,8 @@ optional = false
python-versions = ">=3.7"
[package.extras]
docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"]
testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"]
docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"]
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"]
[[package]]
name = "zope.event"
@@ -1567,11 +1512,8 @@ category = "dev"
optional = false
python-versions = "*"
[package.dependencies]
setuptools = "*"
[package.extras]
docs = ["Sphinx"]
docs = ["sphinx"]
test = ["zope.testrunner"]
[[package]]
@@ -1582,11 +1524,8 @@ category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[package.dependencies]
setuptools = "*"
[package.extras]
docs = ["Sphinx", "repoze.sphinx.autointerface"]
docs = ["sphinx", "repoze.sphinx.autointerface"]
test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
@@ -1599,12 +1538,11 @@ optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
[package.dependencies]
setuptools = "*"
"zope.event" = "*"
"zope.interface" = ">=5.0.0"
[package.extras]
docs = ["Sphinx", "repoze.sphinx.autointerface"]
docs = ["sphinx", "repoze.sphinx.autointerface"]
test = ["zope.i18nmessageid", "zope.testing", "zope.testrunner"]
[extras]
@@ -1625,7 +1563,7 @@ url_preview = ["lxml"]
[metadata]
lock-version = "1.1"
python-versions = "^3.7.1"
content-hash = "1b14fc274d9e2a495a7f864150f3ffcf4d9f585e09a67e53301ae4ef3c2f3e48"
content-hash = "c24bbcee7e86dbbe7cdbf49f91a25b310bf21095452641e7440129f59b077f78"
[metadata.files]
attrs = [
@@ -2113,12 +2051,12 @@ markupsafe = [
{file = "MarkupSafe-2.1.0.tar.gz", hash = "sha256:80beaf63ddfbc64a0452b841d8036ca0611e049650e20afcb882f5d3c266d65f"},
]
matrix-common = [
{file = "matrix_common-1.3.0-py3-none-any.whl", hash = "sha256:524e2785b9b03be4d15f3a8a6b857c5b6af68791ffb1b9918f0ad299abc4db20"},
{file = "matrix_common-1.3.0.tar.gz", hash = "sha256:62e121cccd9f243417b57ec37a76dc44aeb198a7a5c67afd6b8275992ff2abd1"},
{file = "matrix_common-1.2.1-py3-none-any.whl", hash = "sha256:946709c405944a0d4b1d73207b77eb064b6dbfc5d70a69471320b06d8ce98b20"},
{file = "matrix_common-1.2.1.tar.gz", hash = "sha256:a99dcf02a6bd95b24a5a61b354888a2ac92bf2b4b839c727b8dd9da2cdfa3853"},
]
matrix-synapse-ldap3 = [
{file = "matrix-synapse-ldap3-0.2.2.tar.gz", hash = "sha256:b388d95693486eef69adaefd0fd9e84463d52fe17b0214a00efcaa669b73cb74"},
{file = "matrix_synapse_ldap3-0.2.2-py3-none-any.whl", hash = "sha256:66ee4c85d7952c6c27fd04c09cdfdf4847b8e8b7d6a7ada6ba1100013bda060f"},
{file = "matrix-synapse-ldap3-0.2.1.tar.gz", hash = "sha256:bfb4390f4a262ffb0d6f057ff3aeb1e46d4e52ff420a064d795fb4f555f00285"},
{file = "matrix_synapse_ldap3-0.2.1-py3-none-any.whl", hash = "sha256:1b3310a60f1d06466f35905a269b6df95747fd1305f2b7fe638f373963b2aa2c"},
]
mccabe = [
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
@@ -2322,43 +2260,6 @@ pycparser = [
{file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
{file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
]
pydantic = [
{file = "pydantic-1.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8098a724c2784bf03e8070993f6d46aa2eeca031f8d8a048dff277703e6e193"},
{file = "pydantic-1.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c320c64dd876e45254bdd350f0179da737463eea41c43bacbee9d8c9d1021f11"},
{file = "pydantic-1.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18f3e912f9ad1bdec27fb06b8198a2ccc32f201e24174cec1b3424dda605a310"},
{file = "pydantic-1.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11951b404e08b01b151222a1cb1a9f0a860a8153ce8334149ab9199cd198131"},
{file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8bc541a405423ce0e51c19f637050acdbdf8feca34150e0d17f675e72d119580"},
{file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e565a785233c2d03724c4dc55464559639b1ba9ecf091288dd47ad9c629433bd"},
{file = "pydantic-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:a4a88dcd6ff8fd47c18b3a3709a89adb39a6373f4482e04c1b765045c7e282fd"},
{file = "pydantic-1.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:447d5521575f18e18240906beadc58551e97ec98142266e521c34968c76c8761"},
{file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:985ceb5d0a86fcaa61e45781e567a59baa0da292d5ed2e490d612d0de5796918"},
{file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059b6c1795170809103a1538255883e1983e5b831faea6558ef873d4955b4a74"},
{file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d12f96b5b64bec3f43c8e82b4aab7599d0157f11c798c9f9c528a72b9e0b339a"},
{file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ae72f8098acb368d877b210ebe02ba12585e77bd0db78ac04a1ee9b9f5dd2166"},
{file = "pydantic-1.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:79b485767c13788ee314669008d01f9ef3bc05db9ea3298f6a50d3ef596a154b"},
{file = "pydantic-1.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:494f7c8537f0c02b740c229af4cb47c0d39840b829ecdcfc93d91dcbb0779892"},
{file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0f047e11febe5c3198ed346b507e1d010330d56ad615a7e0a89fae604065a0e"},
{file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:969dd06110cb780da01336b281f53e2e7eb3a482831df441fb65dd30403f4608"},
{file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:177071dfc0df6248fd22b43036f936cfe2508077a72af0933d0c1fa269b18537"},
{file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9bcf8b6e011be08fb729d110f3e22e654a50f8a826b0575c7196616780683380"},
{file = "pydantic-1.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a955260d47f03df08acf45689bd163ed9df82c0e0124beb4251b1290fa7ae728"},
{file = "pydantic-1.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9ce157d979f742a915b75f792dbd6aa63b8eccaf46a1005ba03aa8a986bde34a"},
{file = "pydantic-1.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0bf07cab5b279859c253d26a9194a8906e6f4a210063b84b433cf90a569de0c1"},
{file = "pydantic-1.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d93d4e95eacd313d2c765ebe40d49ca9dd2ed90e5b37d0d421c597af830c195"},
{file = "pydantic-1.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1542636a39c4892c4f4fa6270696902acb186a9aaeac6f6cf92ce6ae2e88564b"},
{file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a9af62e9b5b9bc67b2a195ebc2c2662fdf498a822d62f902bf27cccb52dbbf49"},
{file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fe4670cb32ea98ffbf5a1262f14c3e102cccd92b1869df3bb09538158ba90fe6"},
{file = "pydantic-1.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:9f659a5ee95c8baa2436d392267988fd0f43eb774e5eb8739252e5a7e9cf07e0"},
{file = "pydantic-1.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b83ba3825bc91dfa989d4eed76865e71aea3a6ca1388b59fc801ee04c4d8d0d6"},
{file = "pydantic-1.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1dd8fecbad028cd89d04a46688d2fcc14423e8a196d5b0a5c65105664901f810"},
{file = "pydantic-1.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02eefd7087268b711a3ff4db528e9916ac9aa18616da7bca69c1871d0b7a091f"},
{file = "pydantic-1.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb57ba90929bac0b6cc2af2373893d80ac559adda6933e562dcfb375029acee"},
{file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4ce9ae9e91f46c344bec3b03d6ee9612802682c1551aaf627ad24045ce090761"},
{file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:72ccb318bf0c9ab97fc04c10c37683d9eea952ed526707fabf9ac5ae59b701fd"},
{file = "pydantic-1.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:61b6760b08b7c395975d893e0b814a11cf011ebb24f7d869e7118f5a339a82e1"},
{file = "pydantic-1.9.1-py3-none-any.whl", hash = "sha256:4988c0f13c42bfa9ddd2fe2f569c9d54646ce84adc5de84228cfe83396f3bd58"},
{file = "pydantic-1.9.1.tar.gz", hash = "sha256:1ed987c3ff29fff7fd8c3ea3a3ea877ad310aae2ef9889a119e22d3f2db0691a"},
]
pyflakes = [
{file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"},
{file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"},
@@ -2497,10 +2398,6 @@ secretstorage = [
{file = "SecretStorage-3.3.1-py3-none-any.whl", hash = "sha256:422d82c36172d88d6a0ed5afdec956514b189ddbfb72fefab0c8a1cee4eaf71f"},
{file = "SecretStorage-3.3.1.tar.gz", hash = "sha256:fd666c51a6bf200643495a04abb261f83229dcb6fd8472ec393df7ffc8b6f195"},
]
semantic-version = [
{file = "semantic_version-2.10.0-py2.py3-none-any.whl", hash = "sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177"},
{file = "semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c"},
]
sentry-sdk = [
{file = "sentry-sdk-1.5.11.tar.gz", hash = "sha256:6c01d9d0b65935fd275adc120194737d1df317dce811e642cbf0394d0d37a007"},
{file = "sentry_sdk-1.5.11-py2.py3-none-any.whl", hash = "sha256:c17179183cac614e900cbd048dab03f49a48e2820182ec686c25e7ce46f8548f"},
@@ -2509,14 +2406,6 @@ service-identity = [
{file = "service-identity-21.1.0.tar.gz", hash = "sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34"},
{file = "service_identity-21.1.0-py2.py3-none-any.whl", hash = "sha256:f0b0caac3d40627c3c04d7a51b6e06721857a0e10a8775f2d1d7e72901b3a7db"},
]
setuptools = [
{file = "setuptools-65.3.0-py3-none-any.whl", hash = "sha256:2e24e0bec025f035a2e72cdd1961119f557d78ad331bb00ff82efb2ab8da8e82"},
{file = "setuptools-65.3.0.tar.gz", hash = "sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57"},
]
setuptools-rust = [
{file = "setuptools-rust-1.5.1.tar.gz", hash = "sha256:0e05e456645d59429cb1021370aede73c0760e9360bbfdaaefb5bced530eb9d7"},
{file = "setuptools_rust-1.5.1-py3-none-any.whl", hash = "sha256:306b236ff3aa5229180e58292610d0c2c51bb488191122d2fc559ae4caeb7d5e"},
]
signedjson = [
{file = "signedjson-1.1.4-py3-none-any.whl", hash = "sha256:45569ec54241c65d2403fe3faf7169be5322547706a231e884ca2b427f23d228"},
{file = "signedjson-1.1.4.tar.gz", hash = "sha256:cd91c56af53f169ef032c62e9c4a3292dc158866933318d0592e3462db3d6492"},

View File

@@ -52,12 +52,9 @@ include_trailing_comma = true
combine_as_imports = true
skip_gitignore = true
[tool.maturin]
manifest-path = "rust/Cargo.toml"
[tool.poetry]
name = "matrix-synapse"
version = "1.68.0"
version = "1.64.0"
description = "Homeserver for the Matrix decentralised comms protocol"
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
license = "Apache-2.0"
@@ -85,17 +82,7 @@ include = [
{ path = "sytest-blacklist", format = "sdist" },
{ path = "tests", format = "sdist" },
{ path = "UPGRADE.rst", format = "sdist" },
{ path = "Cargo.toml", format = "sdist" },
{ path = "Cargo.lock", format = "sdist" },
{ path = "rust/Cargo.toml", format = "sdist" },
{ path = "rust/build.rs", format = "sdist" },
{ path = "rust/src/**", format = "sdist" },
]
exclude = [
{ path = "synapse/*.so", format = "sdist"}
]
build = "build_rust.py"
[tool.poetry.scripts]
synapse_homeserver = "synapse.app.homeserver:main"
@@ -139,7 +126,7 @@ pyOpenSSL = ">=16.0.0"
PyYAML = ">=3.11"
pyasn1 = ">=0.1.9"
pyasn1-modules = ">=0.0.7"
bcrypt = ">=3.1.7"
bcrypt = ">=3.1.0"
Pillow = ">=5.4.0"
sortedcontainers = ">=1.4.4"
pymacaroons = ">=0.13.0"
@@ -165,24 +152,12 @@ typing-extensions = ">=3.10.0.1"
cryptography = ">=3.4.7"
# ijson 3.1.4 fixes a bug with "." in property names
ijson = ">=3.1.4"
matrix-common = "^1.3.0"
matrix-common = "^1.2.1"
# We need packaging.requirements.Requirement, added in 16.1.
packaging = ">=16.1"
# At the time of writing, we only use functions from the version `importlib.metadata`
# which shipped in Python 3.8. This corresponds to version 1.4 of the backport.
importlib_metadata = { version = ">=1.4", python = "<3.8" }
# This is the most recent version of Pydantic with available on common distros.
pydantic = ">=1.7.4"
# This is for building the rust components during "poetry install", which
# currently ignores the `build-system.requires` directive (c.f.
# https://github.com/python-poetry/poetry/issues/6154). Both `pip install` and
# `poetry build` do the right thing without this explicit dependency.
#
# This isn't really a dev-dependency, as `poetry install --no-dev` will fail,
# but the alternative is to add it to the main list of deps where it isn't
# needed.
setuptools_rust = ">=1.3"
# Optional Dependencies
@@ -307,21 +282,5 @@ twine = "*"
towncrier = ">=18.6.0rc1"
[build-system]
requires = ["poetry-core>=1.0.0", "setuptools_rust>=1.3"]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
[tool.cibuildwheel]
# Skip unsupported platforms (by us or by Rust).
skip = "cp36* *-musllinux_i686"
# We need a rust compiler
before-all = "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable -y"
environment= { PATH = "$PATH:$HOME/.cargo/bin" }
# For some reason if we don't manually clean the build directory we
# can end up polluting the next build with a .so that is for the wrong
# Python version.
before-build = "rm -rf {project}/build"
build-frontend = "build"
test-command = "python -c 'from synapse.synapse_rust import sum_as_string; print(sum_as_string(1, 2))'"

View File

@@ -1,25 +0,0 @@
[package]
# We name the package `synapse` so that things like logging have the right
# logging target.
name = "synapse"
# dummy version. See pyproject.toml for the Synapse's version number.
version = "0.1.0"
edition = "2021"
rust-version = "1.58.1"
[lib]
name = "synapse"
crate-type = ["cdylib"]
[package.metadata.maturin]
# This is where we tell maturin where to place the built library.
name = "synapse.synapse_rust"
[dependencies]
pyo3 = { version = "0.16.5", features = ["extension-module", "macros", "abi3", "abi3-py37"] }
[build-dependencies]
blake2 = "0.10.4"
hex = "0.4.3"

View File

@@ -1,45 +0,0 @@
//! This build script calculates the hash of all files in the `src/`
//! directory and adds it as an environment variable during build time.
//!
//! This is used so that the python code can detect when the built native module
//! does not match the source in-tree, helping to detect the case where the
//! source has been updated but the library hasn't been rebuilt.
use std::path::PathBuf;
use blake2::{Blake2b512, Digest};
fn main() -> Result<(), std::io::Error> {
let mut dirs = vec![PathBuf::from("src")];
let mut paths = Vec::new();
while let Some(path) = dirs.pop() {
let mut entries = std::fs::read_dir(path)?
.map(|res| res.map(|e| e.path()))
.collect::<Result<Vec<_>, std::io::Error>>()?;
entries.sort();
for entry in entries {
if entry.is_dir() {
dirs.push(entry)
} else {
paths.push(entry.to_str().expect("valid rust paths").to_string());
}
}
}
paths.sort();
let mut hasher = Blake2b512::new();
for path in paths {
let bytes = std::fs::read(path)?;
hasher.update(bytes);
}
let hex_digest = hex::encode(hasher.finalize());
println!("cargo:rustc-env=SYNAPSE_RUST_DIGEST={hex_digest}");
Ok(())
}

View File

@@ -1,24 +0,0 @@
use pyo3::prelude::*;
/// Returns the hash of all the rust source files at the time it was compiled.
///
/// Used by python to detect if the rust library is outdated.
#[pyfunction]
fn get_rust_file_digest() -> &'static str {
env!("SYNAPSE_RUST_DIGEST")
}
/// Formats the sum of two numbers as string.
#[pyfunction]
#[pyo3(text_signature = "(a, b, /)")]
fn sum_as_string(a: usize, b: usize) -> PyResult<String> {
Ok((a + b).to_string())
}
/// The entry point for defining the Python module.
#[pymodule]
fn synapse_rust(_py: Python<'_>, m: &PyModule) -> PyResult<()> {
m.add_function(wrap_pyfunction!(sum_as_string, m)?)?;
m.add_function(wrap_pyfunction!(get_rust_file_digest, m)?)?;
Ok(())
}

View File

@@ -1,425 +0,0 @@
#! /usr/bin/env python
# Copyright 2022 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A script which enforces that Synapse always uses strict types when defining a Pydantic
model.
Pydantic does not yet offer a strict mode, but it is planned for pydantic v2. See
https://github.com/pydantic/pydantic/issues/1098
https://pydantic-docs.helpmanual.io/blog/pydantic-v2/#strict-mode
until then, this script is a best effort to stop us from introducing type coersion bugs
(like the infamous stringy power levels fixed in room version 10).
"""
import argparse
import contextlib
import functools
import importlib
import logging
import os
import pkgutil
import sys
import textwrap
import traceback
import unittest.mock
from contextlib import contextmanager
from typing import Any, Callable, Dict, Generator, List, Set, Type, TypeVar
from parameterized import parameterized
from pydantic import BaseModel as PydanticBaseModel, conbytes, confloat, conint, constr
from pydantic.typing import get_args
from typing_extensions import ParamSpec
logger = logging.getLogger(__name__)
CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: List[Callable] = [
constr,
conbytes,
conint,
confloat,
]
TYPES_THAT_PYDANTIC_WILL_COERCE_TO = [
str,
bytes,
int,
float,
bool,
]
P = ParamSpec("P")
R = TypeVar("R")
class ModelCheckerException(Exception):
"""Dummy exception. Allows us to detect unwanted types during a module import."""
class MissingStrictInConstrainedTypeException(ModelCheckerException):
factory_name: str
def __init__(self, factory_name: str):
self.factory_name = factory_name
class FieldHasUnwantedTypeException(ModelCheckerException):
message: str
def __init__(self, message: str):
self.message = message
def make_wrapper(factory: Callable[P, R]) -> Callable[P, R]:
"""We patch `constr` and friends with wrappers that enforce strict=True."""
@functools.wraps(factory)
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
# type-ignore: should be redundant once we can use https://github.com/python/mypy/pull/12668
if "strict" not in kwargs: # type: ignore[attr-defined]
raise MissingStrictInConstrainedTypeException(factory.__name__)
if not kwargs["strict"]: # type: ignore[index]
raise MissingStrictInConstrainedTypeException(factory.__name__)
return factory(*args, **kwargs)
return wrapper
def field_type_unwanted(type_: Any) -> bool:
"""Very rough attempt to detect if a type is unwanted as a Pydantic annotation.
At present, we exclude types which will coerce, or any generic type involving types
which will coerce."""
logger.debug("Is %s unwanted?")
if type_ in TYPES_THAT_PYDANTIC_WILL_COERCE_TO:
logger.debug("yes")
return True
logger.debug("Maybe. Subargs are %s", get_args(type_))
rv = any(field_type_unwanted(t) for t in get_args(type_))
logger.debug("Conclusion: %s %s unwanted", type_, "is" if rv else "is not")
return rv
class PatchedBaseModel(PydanticBaseModel):
"""A patched version of BaseModel that inspects fields after models are defined.
We complain loudly if we see an unwanted type.
Beware: ModelField.type_ is presumably private; this is likely to be very brittle.
"""
@classmethod
def __init_subclass__(cls: Type[PydanticBaseModel], **kwargs: object):
for field in cls.__fields__.values():
# Note that field.type_ and field.outer_type are computed based on the
# annotation type, see pydantic.fields.ModelField._type_analysis
if field_type_unwanted(field.outer_type_):
# TODO: this only reports the first bad field. Can we find all bad ones
# and report them all?
raise FieldHasUnwantedTypeException(
f"{cls.__module__}.{cls.__qualname__} has field '{field.name}' "
f"with unwanted type `{field.outer_type_}`"
)
@contextmanager
def monkeypatch_pydantic() -> Generator[None, None, None]:
"""Patch pydantic with our snooping versions of BaseModel and the con* functions.
If the snooping functions see something they don't like, they'll raise a
ModelCheckingException instance.
"""
with contextlib.ExitStack() as patches:
# Most Synapse code ought to import the patched objects directly from
# `pydantic`. But we also patch their containing modules `pydantic.main` and
# `pydantic.types` for completeness.
patch_basemodel1 = unittest.mock.patch(
"pydantic.BaseModel", new=PatchedBaseModel
)
patch_basemodel2 = unittest.mock.patch(
"pydantic.main.BaseModel", new=PatchedBaseModel
)
patches.enter_context(patch_basemodel1)
patches.enter_context(patch_basemodel2)
for factory in CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG:
wrapper: Callable = make_wrapper(factory)
patch1 = unittest.mock.patch(f"pydantic.{factory.__name__}", new=wrapper)
patch2 = unittest.mock.patch(
f"pydantic.types.{factory.__name__}", new=wrapper
)
patches.enter_context(patch1)
patches.enter_context(patch2)
yield
def format_model_checker_exception(e: ModelCheckerException) -> str:
"""Work out which line of code caused e. Format the line in a human-friendly way."""
# TODO. FieldHasUnwantedTypeException gives better error messages. Can we ditch the
# patches of constr() etc, and instead inspect fields to look for ConstrainedStr
# with strict=False? There is some difficulty with the inheritance hierarchy
# because StrictStr < ConstrainedStr < str.
if isinstance(e, FieldHasUnwantedTypeException):
return e.message
elif isinstance(e, MissingStrictInConstrainedTypeException):
frame_summary = traceback.extract_tb(e.__traceback__)[-2]
return (
f"Missing `strict=True` from {e.factory_name}() call \n"
+ traceback.format_list([frame_summary])[0].lstrip()
)
else:
raise ValueError(f"Unknown exception {e}") from e
def lint() -> int:
"""Try to import all of Synapse and see if we spot any Pydantic type coercions.
Print any problems, then return a status code suitable for sys.exit."""
failures = do_lint()
if failures:
print(f"Found {len(failures)} problem(s)")
for failure in sorted(failures):
print(failure)
return os.EX_DATAERR if failures else os.EX_OK
def do_lint() -> Set[str]:
"""Try to import all of Synapse and see if we spot any Pydantic type coercions."""
failures = set()
with monkeypatch_pydantic():
logger.debug("Importing synapse")
try:
# TODO: make "synapse" an argument so we can target this script at
# a subpackage
module = importlib.import_module("synapse")
except ModelCheckerException as e:
logger.warning("Bad annotation found when importing synapse")
failures.add(format_model_checker_exception(e))
return failures
try:
logger.debug("Fetching subpackages")
module_infos = list(
pkgutil.walk_packages(module.__path__, f"{module.__name__}.")
)
except ModelCheckerException as e:
logger.warning("Bad annotation found when looking for modules to import")
failures.add(format_model_checker_exception(e))
return failures
for module_info in module_infos:
logger.debug("Importing %s", module_info.name)
try:
importlib.import_module(module_info.name)
except ModelCheckerException as e:
logger.warning(
f"Bad annotation found when importing {module_info.name}"
)
failures.add(format_model_checker_exception(e))
return failures
def run_test_snippet(source: str) -> None:
"""Exec a snippet of source code in an isolated environment."""
# To emulate `source` being called at the top level of the module,
# the globals and locals we provide apparently have to be the same mapping.
#
# > Remember that at the module level, globals and locals are the same dictionary.
# > If exec gets two separate objects as globals and locals, the code will be
# > executed as if it were embedded in a class definition.
globals_: Dict[str, object]
locals_: Dict[str, object]
globals_ = locals_ = {}
exec(textwrap.dedent(source), globals_, locals_)
class TestConstrainedTypesPatch(unittest.TestCase):
def test_expression_without_strict_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
from pydantic import constr
constr()
"""
)
def test_called_as_module_attribute_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
import pydantic
pydantic.constr()
"""
)
def test_wildcard_import_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
from pydantic import *
constr()
"""
)
def test_alternative_import_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
from pydantic.types import constr
constr()
"""
)
def test_alternative_import_attribute_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
import pydantic.types
pydantic.types.constr()
"""
)
def test_kwarg_but_no_strict_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
from pydantic import constr
constr(min_length=10)
"""
)
def test_kwarg_strict_False_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
from pydantic import constr
constr(strict=False)
"""
)
def test_kwarg_strict_True_doesnt_raise(self) -> None:
with monkeypatch_pydantic():
run_test_snippet(
"""
from pydantic import constr
constr(strict=True)
"""
)
def test_annotation_without_strict_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
from pydantic import constr
x: constr()
"""
)
def test_field_annotation_without_strict_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
from pydantic import BaseModel, conint
class C:
x: conint()
"""
)
class TestFieldTypeInspection(unittest.TestCase):
@parameterized.expand(
[
("str",),
("bytes"),
("int",),
("float",),
("bool"),
("Optional[str]",),
("Union[None, str]",),
("List[str]",),
("List[List[str]]",),
("Dict[StrictStr, str]",),
("Dict[str, StrictStr]",),
("TypedDict('D', x=int)",),
]
)
def test_field_holding_unwanted_type_raises(self, annotation: str) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
f"""
from typing import *
from pydantic import *
class C(BaseModel):
f: {annotation}
"""
)
@parameterized.expand(
[
("StrictStr",),
("StrictBytes"),
("StrictInt",),
("StrictFloat",),
("StrictBool"),
("constr(strict=True, min_length=10)",),
("Optional[StrictStr]",),
("Union[None, StrictStr]",),
("List[StrictStr]",),
("List[List[StrictStr]]",),
("Dict[StrictStr, StrictStr]",),
("TypedDict('D', x=StrictInt)",),
]
)
def test_field_holding_accepted_type_doesnt_raise(self, annotation: str) -> None:
with monkeypatch_pydantic():
run_test_snippet(
f"""
from typing import *
from pydantic import *
class C(BaseModel):
f: {annotation}
"""
)
def test_field_holding_str_raises_with_alternative_import(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
from pydantic.main import BaseModel
class C(BaseModel):
f: str
"""
)
parser = argparse.ArgumentParser()
parser.add_argument("mode", choices=["lint", "test"], default="lint", nargs="?")
parser.add_argument("-v", "--verbose", action="store_true")
if __name__ == "__main__":
args = parser.parse_args(sys.argv[1:])
logging.basicConfig(
format="%(asctime)s %(name)s:%(lineno)d %(levelname)s %(message)s",
level=logging.DEBUG if args.verbose else logging.INFO,
)
# suppress logs we don't care about
logging.getLogger("xmlschema").setLevel(logging.WARNING)
if args.mode == "lint":
sys.exit(lint())
elif args.mode == "test":
unittest.main(argv=sys.argv[:1])

View File

@@ -101,7 +101,6 @@ if [ -z "$skip_docker_build" ]; then
echo_if_github "::group::Build Docker image: matrixdotorg/synapse"
docker build -t matrixdotorg/synapse \
--build-arg TEST_ONLY_SKIP_DEP_HASH_VERIFICATION \
--build-arg TEST_ONLY_IGNORE_POETRY_LOCKFILE \
-f "docker/Dockerfile" .
echo_if_github "::endgroup::"

View File

@@ -106,5 +106,4 @@ isort "${files[@]}"
python3 -m black "${files[@]}"
./scripts-dev/config-lint.sh
flake8 "${files[@]}"
./scripts-dev/check_pydantic_models.py lint
mypy

View File

@@ -9,10 +9,8 @@
export PGHOST="localhost"
POSTGRES_DB_NAME="synapse_full_schema.$$"
SQLITE_SCHEMA_FILE="schema.sql.sqlite"
SQLITE_ROWS_FILE="rows.sql.sqlite"
POSTGRES_SCHEMA_FILE="full.sql.postgres"
POSTGRES_ROWS_FILE="rows.sql.postgres"
SQLITE_FULL_SCHEMA_OUTPUT_FILE="full.sql.sqlite"
POSTGRES_FULL_SCHEMA_OUTPUT_FILE="full.sql.postgres"
REQUIRED_DEPS=("matrix-synapse" "psycopg2")
@@ -24,7 +22,7 @@ usage() {
echo " Username to connect to local postgres instance. The password will be requested"
echo " during script execution."
echo "-c"
echo " CI mode. Prints every command that the script runs."
echo " CI mode. Enables coverage tracking and prints every command that the script runs."
echo "-o <path>"
echo " Directory to output full schema files to."
echo "-h"
@@ -39,6 +37,11 @@ while getopts "p:co:h" opt; do
c)
# Print all commands that are being executed
set -x
# Modify required dependencies for coverage
REQUIRED_DEPS+=("coverage" "coverage-enable-subprocess")
COVERAGE=1
;;
o)
command -v realpath > /dev/null || (echo "The -o flag requires the 'realpath' binary to be installed" && exit 1)
@@ -99,7 +102,6 @@ SQLITE_DB=$TMPDIR/homeserver.db
POSTGRES_CONFIG=$TMPDIR/postgres.conf
# Ensure these files are delete on script exit
# TODO: the trap should also drop the temp postgres DB
trap 'rm -rf $TMPDIR' EXIT
cat > "$SQLITE_CONFIG" <<EOF
@@ -145,34 +147,48 @@ python -m synapse.app.homeserver --generate-keys -c "$SQLITE_CONFIG"
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
echo "Running db background jobs..."
synapse/_scripts/update_synapse_database.py --database-config "$SQLITE_CONFIG" --run-background-updates
synapse/_scripts/update_synapse_database.py --database-config --run-background-updates "$SQLITE_CONFIG"
# Create the PostgreSQL database.
echo "Creating postgres database..."
createdb --lc-collate=C --lc-ctype=C --template=template0 "$POSTGRES_DB_NAME"
echo "Running db background jobs..."
synapse/_scripts/update_synapse_database.py --database-config "$POSTGRES_CONFIG" --run-background-updates
echo "Copying data from SQLite3 to Postgres with synapse_port_db..."
if [ -z "$COVERAGE" ]; then
# No coverage needed
synapse/_scripts/synapse_port_db.py --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
else
# Coverage desired
coverage run synapse/_scripts/synapse_port_db.py --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
fi
# Delete schema_version, applied_schema_deltas and applied_module_schemas tables
# Also delete any shadow tables from fts4
# This needs to be done after synapse_port_db is run
echo "Dropping unwanted db tables..."
SQL="
DROP TABLE schema_version;
DROP TABLE applied_schema_deltas;
DROP TABLE applied_module_schemas;
DROP TABLE event_search_content;
DROP TABLE event_search_segments;
DROP TABLE event_search_segdir;
DROP TABLE event_search_docsize;
DROP TABLE event_search_stat;
DROP TABLE user_directory_search_content;
DROP TABLE user_directory_search_segments;
DROP TABLE user_directory_search_segdir;
DROP TABLE user_directory_search_docsize;
DROP TABLE user_directory_search_stat;
"
sqlite3 "$SQLITE_DB" <<< "$SQL"
psql "$POSTGRES_DB_NAME" -w <<< "$SQL"
echo "Dumping SQLite3 schema to '$OUTPUT_DIR/$SQLITE_SCHEMA_FILE' and '$OUTPUT_DIR/$SQLITE_ROWS_FILE'..."
sqlite3 "$SQLITE_DB" ".schema --indent" > "$OUTPUT_DIR/$SQLITE_SCHEMA_FILE"
sqlite3 "$SQLITE_DB" ".dump --data-only --nosys" > "$OUTPUT_DIR/$SQLITE_ROWS_FILE"
echo "Dumping SQLite3 schema to '$OUTPUT_DIR/$SQLITE_FULL_SCHEMA_OUTPUT_FILE'..."
sqlite3 "$SQLITE_DB" ".dump" > "$OUTPUT_DIR/$SQLITE_FULL_SCHEMA_OUTPUT_FILE"
echo "Dumping Postgres schema to '$OUTPUT_DIR/$POSTGRES_SCHEMA_FILE' and '$OUTPUT_DIR/$POSTGRES_ROWS_FILE'..."
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner "$POSTGRES_DB_NAME" | sed -e '/^$/d' -e '/^--/d' -e 's/public\.//g' -e '/^SET /d' -e '/^SELECT /d' > "$OUTPUT_DIR/$POSTGRES_SCHEMA_FILE"
pg_dump --format=plain --data-only --inserts --no-tablespaces --no-acl --no-owner "$POSTGRES_DB_NAME" | sed -e '/^$/d' -e '/^--/d' -e 's/public\.//g' -e '/^SET /d' -e '/^SELECT /d' > "$OUTPUT_DIR/$POSTGRES_ROWS_FILE"
echo "Dumping Postgres schema to '$OUTPUT_DIR/$POSTGRES_FULL_SCHEMA_OUTPUT_FILE'..."
pg_dump --format=plain --no-tablespaces --no-acl --no-owner $POSTGRES_DB_NAME | sed -e '/^--/d' -e 's/public\.//g' -e '/^SET /d' -e '/^SELECT /d' > "$OUTPUT_DIR/$POSTGRES_FULL_SCHEMA_OUTPUT_FILE"
echo "Cleaning up temporary Postgres database..."
dropdb $POSTGRES_DB_NAME

View File

@@ -18,12 +18,10 @@
"""
import glob
import json
import os
import re
import subprocess
import sys
import time
import urllib.request
from os import path
from tempfile import TemporaryDirectory
@@ -34,7 +32,6 @@ import click
import commonmark
import git
from click.exceptions import ClickException
from git import GitCommandError, Repo
from github import Github
from packaging import version
@@ -58,12 +55,9 @@ def run_until_successful(
def cli() -> None:
"""An interactive script to walk through the parts of creating a release.
Requirements:
- The dev dependencies be installed, which can be done via:
Requires the dev dependencies be installed, which can be done via:
pip install -e .[dev]
- A checkout of the sytest repository at ../sytest
pip install -e .[dev]
Then to use:
@@ -73,20 +67,15 @@ def cli() -> None:
./scripts-dev/release.py tag
# wait for assets to build, either manually or with:
./scripts-dev/release.py wait-for-actions
# ... wait for assets to build ...
./scripts-dev/release.py publish
./scripts-dev/release.py upload
./scripts-dev/release.py merge-back
# Optional: generate some nice links for the announcement
./scripts-dev/release.py announce
Alternatively, `./scripts-dev/release.py full` will do all the above
as well as guiding you through the manual steps.
./scripts-dev/release.py announce
If the env var GH_TOKEN (or GITHUB_TOKEN) is set, or passed into the
`tag`/`publish` command, then a new draft release will be created/published.
@@ -95,21 +84,15 @@ def cli() -> None:
@cli.command()
def prepare() -> None:
_prepare()
def _prepare() -> None:
"""Do the initial stages of creating a release, including creating release
branch, updating changelog and pushing to GitHub.
"""
# Make sure we're in a git repo.
synapse_repo = get_repo_and_check_clean_checkout()
sytest_repo = get_repo_and_check_clean_checkout("../sytest", "sytest")
repo = get_repo_and_check_clean_checkout()
click.secho("Updating Synapse and Sytest git repos...")
synapse_repo.remote().fetch()
sytest_repo.remote().fetch()
click.secho("Updating git repo...")
repo.remote().fetch()
# Get the current version and AST from root Synapse module.
current_version = get_package_version()
@@ -183,12 +166,12 @@ def _prepare() -> None:
assert not parsed_new_version.is_postrelease
release_branch_name = get_release_branch_name(parsed_new_version)
release_branch = find_ref(synapse_repo, release_branch_name)
release_branch = find_ref(repo, release_branch_name)
if release_branch:
if release_branch.is_remote():
# If the release branch only exists on the remote we check it out
# locally.
synapse_repo.git.checkout(release_branch_name)
repo.git.checkout(release_branch_name)
else:
# If a branch doesn't exist we create one. We ask which one branch it
# should be based off, defaulting to sensible values depending on the
@@ -204,34 +187,25 @@ def _prepare() -> None:
"Which branch should the release be based on?", default=default
)
for repo_name, repo in {"synapse": synapse_repo, "sytest": sytest_repo}.items():
base_branch = find_ref(repo, branch_name)
if not base_branch:
print(f"Could not find base branch {branch_name} for {repo_name}!")
click.get_current_context().abort()
base_branch = find_ref(repo, branch_name)
if not base_branch:
print(f"Could not find base branch {branch_name}!")
click.get_current_context().abort()
# Check out the base branch and ensure it's up to date
repo.head.set_reference(
base_branch, f"check out the base branch for {repo_name}"
)
repo.head.reset(index=True, working_tree=True)
if not base_branch.is_remote():
update_branch(repo)
# Check out the base branch and ensure it's up to date
repo.head.set_reference(base_branch, "check out the base branch")
repo.head.reset(index=True, working_tree=True)
if not base_branch.is_remote():
update_branch(repo)
# Create the new release branch
# Type ignore will no longer be needed after GitPython 3.1.28.
# See https://github.com/gitpython-developers/GitPython/pull/1419
repo.create_head(release_branch_name, commit=base_branch) # type: ignore[arg-type]
# Special-case SyTest: we don't actually prepare any files so we may
# as well push it now (and only when we create a release branch;
# not on subsequent RCs or full releases).
if click.confirm("Push new SyTest branch?", default=True):
sytest_repo.git.push("-u", sytest_repo.remote().name, release_branch_name)
# Create the new release branch
# Type ignore will no longer be needed after GitPython 3.1.28.
# See https://github.com/gitpython-developers/GitPython/pull/1419
repo.create_head(release_branch_name, commit=base_branch) # type: ignore[arg-type]
# Switch to the release branch and ensure it's up to date.
synapse_repo.git.checkout(release_branch_name)
update_branch(synapse_repo)
repo.git.checkout(release_branch_name)
update_branch(repo)
# Update the version specified in pyproject.toml.
subprocess.check_output(["poetry", "version", new_version])
@@ -256,15 +230,15 @@ def _prepare() -> None:
run_until_successful('dch -M -r -D stable ""', shell=True)
# Show the user the changes and ask if they want to edit the change log.
synapse_repo.git.add("-u")
repo.git.add("-u")
subprocess.run("git diff --cached", shell=True)
if click.confirm("Edit changelog?", default=False):
click.edit(filename="CHANGES.md")
# Commit the changes.
synapse_repo.git.add("-u")
synapse_repo.git.commit("-m", new_version)
repo.git.add("-u")
repo.git.commit("-m", new_version)
# We give the option to bail here in case the user wants to make sure things
# are OK before pushing.
@@ -272,31 +246,23 @@ def _prepare() -> None:
print("")
print("Run when ready to push:")
print("")
print(
f"\tgit push -u {synapse_repo.remote().name} {synapse_repo.active_branch.name}"
)
print(f"\tgit push -u {repo.remote().name} {repo.active_branch.name}")
print("")
sys.exit(0)
# Otherwise, push and open the changelog in the browser.
synapse_repo.git.push(
"-u", synapse_repo.remote().name, synapse_repo.active_branch.name
)
repo.git.push("-u", repo.remote().name, repo.active_branch.name)
print("Opening the changelog in your browser...")
print("Please ask others to give it a check.")
click.launch(
f"https://github.com/matrix-org/synapse/blob/{synapse_repo.active_branch.name}/CHANGES.md"
f"https://github.com/matrix-org/synapse/blob/{repo.active_branch.name}/CHANGES.md"
)
@cli.command()
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"])
def tag(gh_token: Optional[str]) -> None:
_tag(gh_token)
def _tag(gh_token: Optional[str]) -> None:
"""Tags the release and generates a draft GitHub release"""
# Make sure we're in a git repo.
@@ -387,10 +353,6 @@ def _tag(gh_token: Optional[str]) -> None:
@cli.command()
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=True)
def publish(gh_token: str) -> None:
_publish(gh_token)
def _publish(gh_token: str) -> None:
"""Publish release on GitHub."""
# Make sure we're in a git repo.
@@ -427,12 +389,7 @@ def _publish(gh_token: str) -> None:
@cli.command()
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=False)
def upload(gh_token: Optional[str]) -> None:
_upload(gh_token)
def _upload(gh_token: Optional[str]) -> None:
def upload() -> None:
"""Upload release to pypi."""
current_version = get_package_version()
@@ -445,40 +402,18 @@ def _upload(gh_token: Optional[str]) -> None:
click.echo("Tag {tag_name} (tag.commit) is not currently checked out!")
click.get_current_context().abort()
# Query all the assets corresponding to this release.
gh = Github(gh_token)
gh_repo = gh.get_repo("matrix-org/synapse")
gh_release = gh_repo.get_release(tag_name)
all_assets = set(gh_release.get_assets())
# Only accept the wheels and sdist.
# Notably: we don't care about debs.tar.xz.
asset_names_and_urls = sorted(
(asset.name, asset.browser_download_url)
for asset in all_assets
if asset.name.endswith((".whl", ".tar.gz"))
)
# Print out what we've determined.
print("Found relevant assets:")
for asset_name, _ in asset_names_and_urls:
print(f" - {asset_name}")
ignored_asset_names = sorted(
{asset.name for asset in all_assets}
- {asset_name for asset_name, _ in asset_names_and_urls}
)
print("\nIgnoring irrelevant assets:")
for asset_name in ignored_asset_names:
print(f" - {asset_name}")
pypi_asset_names = [
f"matrix_synapse-{current_version}-py3-none-any.whl",
f"matrix-synapse-{current_version}.tar.gz",
]
with TemporaryDirectory(prefix=f"synapse_upload_{tag_name}_") as tmpdir:
for name, asset_download_url in asset_names_and_urls:
for name in pypi_asset_names:
filename = path.join(tmpdir, name)
url = f"https://github.com/matrix-org/synapse/releases/download/{tag_name}/{name}"
click.echo(f"Downloading {name} into {filename}")
urllib.request.urlretrieve(asset_download_url, filename=filename)
urllib.request.urlretrieve(url, filename=filename)
if click.confirm("Upload to PyPI?", default=True):
subprocess.run("twine upload *", shell=True, cwd=tmpdir)
@@ -488,152 +423,8 @@ def _upload(gh_token: Optional[str]) -> None:
)
def _merge_into(repo: Repo, source: str, target: str) -> None:
"""
Merges branch `source` into branch `target`.
Pulls both before merging and pushes the result.
"""
# Update our branches and switch to the target branch
for branch in [source, target]:
click.echo(f"Switching to {branch} and pulling...")
repo.heads[branch].checkout()
# Pull so we're up to date
repo.remote().pull()
assert repo.active_branch.name == target
try:
# TODO This seemed easier than using GitPython directly
click.echo(f"Merging {source}...")
repo.git.merge(source)
except GitCommandError as exc:
# If a merge conflict occurs, give some context and try to
# make it easy to abort if necessary.
click.echo(exc)
if not click.confirm(
f"Likely merge conflict whilst merging ({source}{target}). "
f"Have you resolved it?"
):
repo.git.merge("--abort")
return
# Push result.
click.echo("Pushing...")
repo.remote().push()
@cli.command()
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=False)
def wait_for_actions(gh_token: Optional[str]) -> None:
_wait_for_actions(gh_token)
def _wait_for_actions(gh_token: Optional[str]) -> None:
# Find out the version and tag name.
current_version = get_package_version()
tag_name = f"v{current_version}"
# Authentication is optional on this endpoint,
# but use a token if we have one to reduce the chance of being rate-limited.
url = f"https://api.github.com/repos/matrix-org/synapse/actions/runs?branch={tag_name}"
headers = {"Accept": "application/vnd.github+json"}
if gh_token is not None:
headers["authorization"] = f"token {gh_token}"
req = urllib.request.Request(url, headers=headers)
time.sleep(10 * 60)
while True:
time.sleep(5 * 60)
response = urllib.request.urlopen(req)
resp = json.loads(response.read())
if len(resp["workflow_runs"]) == 0:
continue
if all(
workflow["status"] != "in_progress" for workflow in resp["workflow_runs"]
):
success = (
workflow["status"] == "completed" for workflow in resp["workflow_runs"]
)
if success:
_notify("Workflows successful. You can now continue the release.")
else:
_notify("Workflows failed.")
click.confirm("Continue anyway?", abort=True)
break
def _notify(message: str) -> None:
# Send a bell character. Most terminals will play a sound or show a notification
# for this.
click.echo(f"\a{message}")
# Try and run notify-send, but don't raise an Exception if this fails
# (This is best-effort)
# TODO Support other platforms?
subprocess.run(
[
"notify-send",
"--app-name",
"Synapse Release Script",
"--expire-time",
"3600000",
message,
]
)
@cli.command()
def merge_back() -> None:
_merge_back()
def _merge_back() -> None:
"""Merge the release branch back into the appropriate branches.
All branches will be automatically pulled from the remote and the results
will be pushed to the remote."""
synapse_repo = get_repo_and_check_clean_checkout()
branch_name = synapse_repo.active_branch.name
if not branch_name.startswith("release-v"):
raise RuntimeError("Not on a release branch. This does not seem sensible.")
# Pull so we're up to date
synapse_repo.remote().pull()
current_version = get_package_version()
if current_version.is_prerelease:
# Release candidate
if click.confirm(f"Merge {branch_name} → develop?", default=True):
_merge_into(synapse_repo, branch_name, "develop")
else:
# Full release
sytest_repo = get_repo_and_check_clean_checkout("../sytest", "sytest")
if click.confirm(f"Merge {branch_name} → master?", default=True):
_merge_into(synapse_repo, branch_name, "master")
if click.confirm("Merge master → develop?", default=True):
_merge_into(synapse_repo, "master", "develop")
if click.confirm(f"On SyTest, merge {branch_name} → master?", default=True):
_merge_into(sytest_repo, branch_name, "master")
if click.confirm("On SyTest, merge master → develop?", default=True):
_merge_into(sytest_repo, "master", "develop")
@cli.command()
def announce() -> None:
_announce()
def _announce() -> None:
"""Generate markdown to announce the release."""
current_version = get_package_version()
@@ -663,56 +454,10 @@ Announce the release in
- #homeowners:matrix.org (Synapse Announcements), bumping the version in the topic
- #synapse:matrix.org (Synapse Admins), bumping the version in the topic
- #synapse-dev:matrix.org
- #synapse-package-maintainers:matrix.org
Ask the designated people to do the blog and tweets."""
- #synapse-package-maintainers:matrix.org"""
)
@cli.command()
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=True)
def full(gh_token: str) -> None:
click.echo("1. If this is a security release, read the security wiki page.")
click.echo("2. Check for any release blockers before proceeding.")
click.echo(" https://github.com/matrix-org/synapse/labels/X-Release-Blocker")
click.confirm("Ready?", abort=True)
click.echo("\n*** prepare ***")
_prepare()
click.echo("Deploy to matrix.org and ensure that it hasn't fallen over.")
click.echo("Remember to silence the alerts to prevent alert spam.")
click.confirm("Deployed?", abort=True)
click.echo("\n*** tag ***")
_tag(gh_token)
click.echo("\n*** wait for actions ***")
_wait_for_actions(gh_token)
click.echo("\n*** publish ***")
_publish(gh_token)
click.echo("\n*** upload ***")
_upload(gh_token)
click.echo("\n*** merge back ***")
_merge_back()
click.echo("\nUpdate the Debian repository")
click.confirm("Started updating Debian repository?", abort=True)
click.echo("\nWait for all release methods to be ready.")
# Docker should be ready because it was done by the workflows earlier
# PyPI should be ready because we just ran upload().
# TODO Automatically poll until the Debs have made it to packages.matrix.org
click.confirm("Debs ready?", abort=True)
click.echo("\n*** announce ***")
_announce()
def get_package_version() -> version.Version:
version_string = subprocess.check_output(["poetry", "version", "--short"]).decode(
"utf-8"
@@ -724,18 +469,14 @@ def get_release_branch_name(version_number: version.Version) -> str:
return f"release-v{version_number.major}.{version_number.minor}"
def get_repo_and_check_clean_checkout(
path: str = ".", name: str = "synapse"
) -> git.Repo:
def get_repo_and_check_clean_checkout() -> git.Repo:
"""Get the project repo and check it's not got any uncommitted changes."""
try:
repo = git.Repo(path=path)
repo = git.Repo()
except git.InvalidGitRepositoryError:
raise click.ClickException(
f"{path} is not a git repository (expecting a {name} repository)."
)
raise click.ClickException("Not in Synapse repo.")
if repo.is_dirty():
raise click.ClickException(f"Uncommitted changes exist in {path}.")
raise click.ClickException("Uncommitted changes exist.")
return repo

View File

@@ -1,2 +0,0 @@
def sum_as_string(a: int, b: int) -> str: ...
def get_rust_file_digest() -> str: ...

View File

@@ -20,8 +20,6 @@ import json
import os
import sys
from synapse.util.rust import check_rust_lib_up_to_date
# Check that we're not running on an unsupported Python version.
if sys.version_info < (3, 7):
print("Synapse requires Python 3.7 or above.")
@@ -80,6 +78,3 @@ if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
from synapse.util.patch_inline_callbacks import do_patch
do_patch()
check_rust_lib_up_to_date()

View File

@@ -1,6 +1,6 @@
# Copyright 2015, 2016 OpenMarket Ltd
# Copyright 2018 New Vector
# Copyright 2021-22 The Matrix.org Foundation C.I.C.
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,22 +20,11 @@ import hashlib
import hmac
import logging
import sys
from typing import Any, Callable, Dict, Optional
from typing import Callable, Optional
import requests
import yaml
_CONFLICTING_SHARED_SECRET_OPTS_ERROR = """\
Conflicting options 'registration_shared_secret' and 'registration_shared_secret_path'
are both defined in config file.
"""
_NO_SHARED_SECRET_OPTS_ERROR = """\
No 'registration_shared_secret' or 'registration_shared_secret_path' defined in config.
"""
_DEFAULT_SERVER_URL = "http://localhost:8008"
def request_registration(
user: str,
@@ -214,104 +203,31 @@ def main() -> None:
parser.add_argument(
"server_url",
default="https://localhost:8448",
nargs="?",
help="URL to use to talk to the homeserver. By default, tries to find a "
"suitable URL from the configuration file. Otherwise, defaults to "
f"'{_DEFAULT_SERVER_URL}'.",
help="URL to use to talk to the homeserver. Defaults to "
" 'https://localhost:8448'.",
)
args = parser.parse_args()
if "config" in args and args.config:
config = yaml.safe_load(args.config)
if args.shared_secret:
secret = args.shared_secret
else:
# argparse should check that we have either config or shared secret
assert config
secret = config.get("registration_shared_secret")
secret_file = config.get("registration_shared_secret_path")
if secret_file:
if secret:
print(_CONFLICTING_SHARED_SECRET_OPTS_ERROR, file=sys.stderr)
sys.exit(1)
secret = _read_file(secret_file, "registration_shared_secret_path").strip()
secret = config.get("registration_shared_secret", None)
if not secret:
print(_NO_SHARED_SECRET_OPTS_ERROR, file=sys.stderr)
print("No 'registration_shared_secret' defined in config.")
sys.exit(1)
if args.server_url:
server_url = args.server_url
elif config:
server_url = _find_client_listener(config)
if not server_url:
server_url = _DEFAULT_SERVER_URL
print(
"Unable to find a suitable HTTP listener in the configuration file. "
f"Trying {server_url} as a last resort.",
file=sys.stderr,
)
else:
server_url = _DEFAULT_SERVER_URL
print(
f"No server url or configuration file given. Defaulting to {server_url}.",
file=sys.stderr,
)
secret = args.shared_secret
admin = None
if args.admin or args.no_admin:
admin = args.admin
register_new_user(
args.user, args.password, server_url, secret, admin, args.user_type
args.user, args.password, args.server_url, secret, admin, args.user_type
)
def _read_file(file_path: Any, config_path: str) -> str:
"""Check the given file exists, and read it into a string
If it does not, exit with an error indicating the problem
Args:
file_path: the file to be read
config_path: where in the configuration file_path came from, so that a useful
error can be emitted if it does not exist.
Returns:
content of the file.
"""
if not isinstance(file_path, str):
print(f"{config_path} setting is not a string", file=sys.stderr)
sys.exit(1)
try:
with open(file_path) as file_stream:
return file_stream.read()
except OSError as e:
print(f"Error accessing file {file_path}: {e}", file=sys.stderr)
sys.exit(1)
def _find_client_listener(config: Dict[str, Any]) -> Optional[str]:
# try to find a listener in the config. Returns a host:port pair
for listener in config.get("listeners", []):
if listener.get("type") != "http" or listener.get("tls", False):
continue
if not any(
name == "client"
for resource in listener.get("resources", [])
for name in resource.get("names", [])
):
continue
# TODO: consider bind_addresses
return f"http://localhost:{listener['port']}"
# no suitable listeners?
return None
if __name__ == "__main__":
main()

View File

@@ -67,7 +67,6 @@ from synapse.storage.databases.main.media_repository import (
)
from synapse.storage.databases.main.presence import PresenceBackgroundUpdateStore
from synapse.storage.databases.main.pusher import PusherWorkerStore
from synapse.storage.databases.main.receipts import ReceiptsBackgroundUpdateStore
from synapse.storage.databases.main.registration import (
RegistrationBackgroundUpdateStore,
find_max_generated_user_id_localpart,
@@ -204,7 +203,6 @@ class Store(
PushRuleStore,
PusherWorkerStore,
PresenceBackgroundUpdateStore,
ReceiptsBackgroundUpdateStore,
):
def execute(self, f: Callable[..., R], *args: Any, **kwargs: Any) -> Awaitable[R]:
return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs)

View File

@@ -26,20 +26,13 @@ from synapse.api.errors import (
Codes,
InvalidClientTokenError,
MissingClientTokenError,
UnstableSpecAuthError,
)
from synapse.appservice import ApplicationService
from synapse.http import get_request_user_agent
from synapse.http.site import SynapseRequest
from synapse.logging.opentracing import (
SynapseTags,
active_span,
force_tracing,
start_active_span,
trace,
)
from synapse.types import Requester, create_requester
from synapse.util.cancellation import cancellable
from synapse.logging.opentracing import active_span, force_tracing, start_active_span
from synapse.storage.databases.main.registration import TokenLookupResult
from synapse.types import Requester, UserID, create_requester
if TYPE_CHECKING:
from synapse.server import HomeServer
@@ -71,14 +64,14 @@ class Auth:
async def check_user_in_room(
self,
room_id: str,
requester: Requester,
user_id: str,
allow_departed_users: bool = False,
) -> Tuple[str, Optional[str]]:
"""Check if the user is in the room, or was at some point.
Args:
room_id: The room to check.
requester: The user making the request, according to the access token.
user_id: The user to check.
current_state: Optional map of the current state of the room.
If provided then that map is used to check whether they are a
@@ -95,7 +88,6 @@ class Auth:
membership event ID of the user.
"""
user_id = requester.user.to_string()
(
membership,
member_event_id,
@@ -114,13 +106,9 @@ class Auth:
forgot = await self.store.did_forget(user_id, room_id)
if not forgot:
return membership, member_event_id
raise UnstableSpecAuthError(
403,
"User %s not in room %s" % (user_id, room_id),
errcode=Codes.NOT_JOINED,
)
@cancellable
raise AuthError(403, "User %s not in room %s" % (user_id, room_id))
async def get_user_by_req(
self,
request: SynapseRequest,
@@ -162,12 +150,6 @@ class Auth:
parent_span.set_tag(
"authenticated_entity", requester.authenticated_entity
)
# We tag the Synapse instance name so that it's an easy jumping
# off point into the logs. Can also be used to filter for an
# instance that is under load.
parent_span.set_tag(
SynapseTags.INSTANCE_NAME, self.hs.get_instance_name()
)
parent_span.set_tag("user_id", requester.user.to_string())
if requester.device_id is not None:
parent_span.set_tag("device_id", requester.device_id)
@@ -175,7 +157,6 @@ class Auth:
parent_span.set_tag("appservice_id", requester.app_service.id)
return requester
@cancellable
async def _wrapped_get_user_by_req(
self,
request: SynapseRequest,
@@ -192,69 +173,96 @@ class Auth:
access_token = self.get_access_token_from_request(request)
# First check if it could be a request from an appservice
requester = await self._get_appservice_user(request)
if not requester:
# If not, it should be from a regular user
requester = await self.get_user_by_access_token(
access_token, allow_expired=allow_expired
)
# Deny the request if the user account has expired.
# This check is only done for regular users, not appservice ones.
if not allow_expired:
if await self._account_validity_handler.is_user_expired(
requester.user.to_string()
):
# Raise the error if either an account validity module has determined
# the account has expired, or the legacy account validity
# implementation is enabled and determined the account has expired
raise AuthError(
403,
"User account has expired",
errcode=Codes.EXPIRED_ACCOUNT,
)
if ip_addr and (
not requester.app_service or self._track_appservice_user_ips
):
# XXX(quenting): I'm 95% confident that we could skip setting the
# device_id to "dummy-device" for appservices, and that the only impact
# would be some rows which whould not deduplicate in the 'user_ips'
# table during the transition
recorded_device_id = (
"dummy-device"
if requester.device_id is None and requester.app_service is not None
else requester.device_id
)
await self.store.insert_client_ip(
user_id=requester.authenticated_entity,
access_token=access_token,
ip=ip_addr,
user_agent=user_agent,
device_id=recorded_device_id,
)
# Track also the puppeted user client IP if enabled and the user is puppeting
if (
requester.user.to_string() != requester.authenticated_entity
and self._track_puppeted_user_ips
):
(
user_id,
device_id,
app_service,
) = await self._get_appservice_user_id_and_device_id(request)
if user_id and app_service:
if ip_addr and self._track_appservice_user_ips:
await self.store.insert_client_ip(
user_id=requester.user.to_string(),
user_id=user_id,
access_token=access_token,
ip=ip_addr,
user_agent=user_agent,
device_id=requester.device_id,
device_id="dummy-device"
if device_id is None
else device_id, # stubbed
)
if requester.is_guest and not allow_guest:
requester = create_requester(
user_id, app_service=app_service, device_id=device_id
)
request.requester = user_id
return requester
user_info = await self.get_user_by_access_token(
access_token, allow_expired=allow_expired
)
token_id = user_info.token_id
is_guest = user_info.is_guest
shadow_banned = user_info.shadow_banned
# Deny the request if the user account has expired.
if not allow_expired:
if await self._account_validity_handler.is_user_expired(
user_info.user_id
):
# Raise the error if either an account validity module has determined
# the account has expired, or the legacy account validity
# implementation is enabled and determined the account has expired
raise AuthError(
403,
"User account has expired",
errcode=Codes.EXPIRED_ACCOUNT,
)
device_id = user_info.device_id
if access_token and ip_addr:
await self.store.insert_client_ip(
user_id=user_info.token_owner,
access_token=access_token,
ip=ip_addr,
user_agent=user_agent,
device_id=device_id,
)
# Track also the puppeted user client IP if enabled and the user is puppeting
if (
user_info.user_id != user_info.token_owner
and self._track_puppeted_user_ips
):
await self.store.insert_client_ip(
user_id=user_info.user_id,
access_token=access_token,
ip=ip_addr,
user_agent=user_agent,
device_id=device_id,
)
if is_guest and not allow_guest:
raise AuthError(
403,
"Guest access not allowed",
errcode=Codes.GUEST_ACCESS_FORBIDDEN,
)
# Mark the token as used. This is used to invalidate old refresh
# tokens after some time.
if not user_info.token_used and token_id is not None:
await self.store.mark_access_token_as_used(token_id)
requester = create_requester(
user_info.user_id,
token_id,
is_guest,
shadow_banned,
device_id,
app_service=app_service,
authenticated_entity=user_info.token_owner,
)
request.requester = requester
return requester
except KeyError:
@@ -291,8 +299,9 @@ class Auth:
403, "Application service has not registered this user (%s)" % user_id
)
@cancellable
async def _get_appservice_user(self, request: Request) -> Optional[Requester]:
async def _get_appservice_user_id_and_device_id(
self, request: Request
) -> Tuple[Optional[str], Optional[str], Optional[ApplicationService]]:
"""
Given a request, reads the request parameters to determine:
- whether it's an application service that's making this request
@@ -307,13 +316,15 @@ class Auth:
Must use `org.matrix.msc3202.device_id` in place of `device_id` for now.
Returns:
the application service `Requester` of that request
3-tuple of
(user ID?, device ID?, application service?)
Postconditions:
- The `app_service` field in the returned `Requester` is set
- The `user_id` field in the returned `Requester` is either the application
service sender or the controlled user set by the `user_id` URI parameter
- The returned application service is permitted to control the returned user ID.
- If an application service is returned, so is a user ID
- A user ID is never returned without an application service
- A device ID is never returned without a user ID or an application service
- The returned application service, if present, is permitted to control the
returned user ID.
- The returned device ID, if present, has been checked to be a valid device ID
for the returned user ID.
"""
@@ -323,12 +334,12 @@ class Auth:
self.get_access_token_from_request(request)
)
if app_service is None:
return None
return None, None, None
if app_service.ip_range_whitelist:
ip_address = IPAddress(request.getClientAddress().host)
if ip_address not in app_service.ip_range_whitelist:
return None
return None, None, None
# This will always be set by the time Twisted calls us.
assert request.args is not None
@@ -362,15 +373,13 @@ class Auth:
Codes.EXCLUSIVE,
)
return create_requester(
effective_user_id, app_service=app_service, device_id=effective_device_id
)
return effective_user_id, effective_device_id, app_service
async def get_user_by_access_token(
self,
token: str,
allow_expired: bool = False,
) -> Requester:
) -> TokenLookupResult:
"""Validate access token and get user_id from it
Args:
@@ -387,9 +396,9 @@ class Auth:
# First look in the database to see if the access token is present
# as an opaque token.
user_info = await self.store.get_user_by_access_token(token)
if user_info:
valid_until_ms = user_info.valid_until_ms
r = await self.store.get_user_by_access_token(token)
if r:
valid_until_ms = r.valid_until_ms
if (
not allow_expired
and valid_until_ms is not None
@@ -401,20 +410,7 @@ class Auth:
msg="Access token has expired", soft_logout=True
)
# Mark the token as used. This is used to invalidate old refresh
# tokens after some time.
await self.store.mark_access_token_as_used(user_info.token_id)
requester = create_requester(
user_id=user_info.user_id,
access_token_id=user_info.token_id,
is_guest=user_info.is_guest,
shadow_banned=user_info.shadow_banned,
device_id=user_info.device_id,
authenticated_entity=user_info.token_owner,
)
return requester
return r
# If the token isn't found in the database, then it could still be a
# macaroon for a guest, so we check that here.
@@ -440,12 +436,11 @@ class Auth:
"Guest access token used for regular user"
)
return create_requester(
return TokenLookupResult(
user_id=user_id,
is_guest=True,
# all guests get the same device id
device_id=GUEST_DEVICE_ID,
authenticated_entity=user_id,
)
except (
pymacaroons.exceptions.MacaroonException,
@@ -459,33 +454,41 @@ class Auth:
)
raise InvalidClientTokenError("Invalid access token passed.")
async def is_server_admin(self, requester: Requester) -> bool:
def get_appservice_by_req(self, request: SynapseRequest) -> ApplicationService:
token = self.get_access_token_from_request(request)
service = self.store.get_app_service_by_token(token)
if not service:
logger.warning("Unrecognised appservice access token.")
raise InvalidClientTokenError()
request.requester = create_requester(service.sender, app_service=service)
return service
async def is_server_admin(self, user: UserID) -> bool:
"""Check if the given user is a local server admin.
Args:
requester: The user making the request, according to the access token.
user: user to check
Returns:
True if the user is an admin
"""
return await self.store.is_server_admin(requester.user)
return await self.store.is_server_admin(user)
async def check_can_change_room_list(
self, room_id: str, requester: Requester
) -> bool:
async def check_can_change_room_list(self, room_id: str, user: UserID) -> bool:
"""Determine whether the user is allowed to edit the room's entry in the
published room list.
Args:
room_id: The room to check.
requester: The user making the request, according to the access token.
room_id
user
"""
is_admin = await self.is_server_admin(requester)
is_admin = await self.is_server_admin(user)
if is_admin:
return True
await self.check_user_in_room(room_id, requester)
user_id = user.to_string()
await self.check_user_in_room(room_id, user_id)
# We currently require the user is a "moderator" in the room. We do this
# by checking if they would (theoretically) be able to change the
@@ -504,9 +507,7 @@ class Auth:
send_level = event_auth.get_send_level(
EventTypes.CanonicalAlias, "", power_level_event
)
user_level = event_auth.get_user_power_level(
requester.user.to_string(), auth_events
)
user_level = event_auth.get_user_power_level(user_id, auth_events)
return user_level >= send_level
@@ -525,7 +526,6 @@ class Auth:
return bool(query_params) or bool(auth_headers)
@staticmethod
@cancellable
def get_access_token_from_request(request: Request) -> str:
"""Extracts the access_token from the request.
@@ -563,18 +563,17 @@ class Auth:
return query_params[0].decode("ascii")
@trace
async def check_user_in_room_or_world_readable(
self, room_id: str, requester: Requester, allow_departed_users: bool = False
self, room_id: str, user_id: str, allow_departed_users: bool = False
) -> Tuple[str, Optional[str]]:
"""Checks that the user is or was in the room or the room is world
readable. If it isn't then an exception is raised.
Args:
room_id: The room to check.
requester: The user making the request, according to the access token.
allow_departed_users: If True, accept users that were previously
members but have now departed.
room_id: room to check
user_id: user to check
allow_departed_users: if True, accept users that were previously
members but have now departed
Returns:
Resolves to the current membership of the user in the room and the
@@ -589,7 +588,7 @@ class Auth:
# * The user is a guest user, and has joined the room
# else it will throw.
return await self.check_user_in_room(
room_id, requester, allow_departed_users=allow_departed_users
room_id, user_id, allow_departed_users=allow_departed_users
)
except AuthError:
visibility = await self._storage_controllers.state.get_current_state_event(
@@ -601,9 +600,8 @@ class Auth:
== HistoryVisibility.WORLD_READABLE
):
return Membership.JOIN, None
raise UnstableSpecAuthError(
raise AuthError(
403,
"User %s not in room %s, and room previews are disabled"
% (requester.user, room_id),
errcode=Codes.NOT_JOINED,
% (user_id, room_id),
)

View File

@@ -216,11 +216,11 @@ class EventContentFields:
MSC2716_HISTORICAL: Final = "org.matrix.msc2716.historical"
# For "insertion" events to indicate what the next batch ID should be in
# order to connect to it
MSC2716_NEXT_BATCH_ID: Final = "next_batch_id"
MSC2716_NEXT_BATCH_ID: Final = "org.matrix.msc2716.next_batch_id"
# Used on "batch" events to indicate which insertion event it connects to
MSC2716_BATCH_ID: Final = "batch_id"
MSC2716_BATCH_ID: Final = "org.matrix.msc2716.batch_id"
# For "marker" events
MSC2716_INSERTION_EVENT_REFERENCE: Final = "insertion_event_reference"
MSC2716_MARKER_INSERTION: Final = "org.matrix.msc2716.marker.insertion"
# The authorising user for joining a restricted room.
AUTHORISING_USER: Final = "join_authorised_via_users_server"
@@ -257,7 +257,7 @@ class GuestAccess:
class ReceiptTypes:
READ: Final = "m.read"
READ_PRIVATE: Final = "m.read.private"
READ_PRIVATE: Final = "org.matrix.msc2285.read.private"
FULLY_READ: Final = "m.fully_read"
@@ -268,4 +268,4 @@ class PublicRoomsFilterFields:
"""
GENERIC_SEARCH_TERM: Final = "generic_search_term"
ROOM_TYPES: Final = "room_types"
ROOM_TYPES: Final = "org.matrix.msc3827.room_types"

View File

@@ -26,7 +26,6 @@ from twisted.web import http
from synapse.util import json_decoder
if typing.TYPE_CHECKING:
from synapse.config.homeserver import HomeServerConfig
from synapse.types import JsonDict
logger = logging.getLogger(__name__)
@@ -81,12 +80,6 @@ class Codes(str, Enum):
INVALID_SIGNATURE = "M_INVALID_SIGNATURE"
USER_DEACTIVATED = "M_USER_DEACTIVATED"
# Part of MSC3848
# https://github.com/matrix-org/matrix-spec-proposals/pull/3848
ALREADY_JOINED = "ORG.MATRIX.MSC3848.ALREADY_JOINED"
NOT_JOINED = "ORG.MATRIX.MSC3848.NOT_JOINED"
INSUFFICIENT_POWER = "ORG.MATRIX.MSC3848.INSUFFICIENT_POWER"
# The account has been suspended on the server.
# By opposition to `USER_DEACTIVATED`, this is a reversible measure
# that can possibly be appealed and reverted.
@@ -174,7 +167,7 @@ class SynapseError(CodeMessageException):
else:
self._additional_fields = dict(additional_fields)
def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
def error_dict(self) -> "JsonDict":
return cs_error(self.msg, self.errcode, **self._additional_fields)
@@ -220,7 +213,7 @@ class ConsentNotGivenError(SynapseError):
)
self._consent_uri = consent_uri
def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
def error_dict(self) -> "JsonDict":
return cs_error(self.msg, self.errcode, consent_uri=self._consent_uri)
@@ -314,37 +307,6 @@ class AuthError(SynapseError):
super().__init__(code, msg, errcode, additional_fields)
class UnstableSpecAuthError(AuthError):
"""An error raised when a new error code is being proposed to replace a previous one.
This error will return a "org.matrix.unstable.errcode" property with the new error code,
with the previous error code still being defined in the "errcode" property.
This error will include `org.matrix.msc3848.unstable.errcode` in the C-S error body.
"""
def __init__(
self,
code: int,
msg: str,
errcode: str,
previous_errcode: str = Codes.FORBIDDEN,
additional_fields: Optional[dict] = None,
):
self.previous_errcode = previous_errcode
super().__init__(code, msg, errcode, additional_fields)
def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
fields = {}
if config is not None and config.experimental.msc3848_enabled:
fields["org.matrix.msc3848.unstable.errcode"] = self.errcode
return cs_error(
self.msg,
self.previous_errcode,
**fields,
**self._additional_fields,
)
class InvalidClientCredentialsError(SynapseError):
"""An error raised when there was a problem with the authorisation credentials
in a client request.
@@ -376,8 +338,8 @@ class InvalidClientTokenError(InvalidClientCredentialsError):
super().__init__(msg=msg, errcode="M_UNKNOWN_TOKEN")
self._soft_logout = soft_logout
def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
d = super().error_dict(config)
def error_dict(self) -> "JsonDict":
d = super().error_dict()
d["soft_logout"] = self._soft_logout
return d
@@ -400,7 +362,7 @@ class ResourceLimitError(SynapseError):
self.limit_type = limit_type
super().__init__(code, msg, errcode=errcode)
def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
def error_dict(self) -> "JsonDict":
return cs_error(
self.msg,
self.errcode,
@@ -435,7 +397,7 @@ class InvalidCaptchaError(SynapseError):
super().__init__(code, msg, errcode)
self.error_url = error_url
def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
def error_dict(self) -> "JsonDict":
return cs_error(self.msg, self.errcode, error_url=self.error_url)
@@ -452,7 +414,7 @@ class LimitExceededError(SynapseError):
super().__init__(code, msg, errcode)
self.retry_after_ms = retry_after_ms
def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
def error_dict(self) -> "JsonDict":
return cs_error(self.msg, self.errcode, retry_after_ms=self.retry_after_ms)
@@ -467,7 +429,7 @@ class RoomKeysVersionError(SynapseError):
super().__init__(403, "Wrong room_keys version", Codes.WRONG_ROOM_KEYS_VERSION)
self.current_version = current_version
def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
def error_dict(self) -> "JsonDict":
return cs_error(self.msg, self.errcode, current_version=self.current_version)
@@ -507,7 +469,7 @@ class IncompatibleRoomVersionError(SynapseError):
self._room_version = room_version
def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
def error_dict(self) -> "JsonDict":
return cs_error(self.msg, self.errcode, room_version=self._room_version)
@@ -553,7 +515,7 @@ class UnredactedContentDeletedError(SynapseError):
)
self.content_keep_ms = content_keep_ms
def error_dict(self, config: Optional["HomeServerConfig"]) -> "JsonDict":
def error_dict(self) -> "JsonDict":
extra = {}
if self.content_keep_ms is not None:
extra = {"fi.mau.msc2815.content_keep_ms": self.content_keep_ms}

View File

@@ -140,13 +140,13 @@ USER_FILTER_SCHEMA = {
@FormatChecker.cls_checks("matrix_room_id")
def matrix_room_id_validator(room_id: object) -> bool:
return isinstance(room_id, str) and RoomID.is_valid(room_id)
def matrix_room_id_validator(room_id_str: str) -> RoomID:
return RoomID.from_string(room_id_str)
@FormatChecker.cls_checks("matrix_user_id")
def matrix_user_id_validator(user_id: object) -> bool:
return isinstance(user_id, str) and UserID.is_valid(user_id)
def matrix_user_id_validator(user_id_str: str) -> UserID:
return UserID.from_string(user_id_str)
class Filtering:

View File

@@ -17,7 +17,7 @@ from collections import OrderedDict
from typing import Hashable, Optional, Tuple
from synapse.api.errors import LimitExceededError
from synapse.config.ratelimiting import RatelimitSettings
from synapse.config.ratelimiting import RateLimitConfig
from synapse.storage.databases.main import DataStore
from synapse.types import Requester
from synapse.util import Clock
@@ -314,8 +314,8 @@ class RequestRatelimiter:
self,
store: DataStore,
clock: Clock,
rc_message: RatelimitSettings,
rc_admin_redaction: Optional[RatelimitSettings],
rc_message: RateLimitConfig,
rc_admin_redaction: Optional[RateLimitConfig],
):
self.store = store
self.clock = clock

View File

@@ -19,23 +19,18 @@ import attr
class EventFormatVersions:
"""This is an internal enum for tracking the version of the event format,
independently of the room version.
To reduce confusion, the event format versions are named after the room
versions that they were used or introduced in.
The concept of an 'event format version' is specific to Synapse (the
specification does not mention this term.)
independently from the room version.
"""
ROOM_V1_V2 = 1 # $id:server event id format: used for room v1 and v2
ROOM_V3 = 2 # MSC1659-style $hash event id format: used for room v3
ROOM_V4_PLUS = 3 # MSC1884-style $hash format: introduced for room v4
V1 = 1 # $id:server event id format
V2 = 2 # MSC1659-style $hash event id format: introduced for room v3
V3 = 3 # MSC1884-style $hash format: introduced for room v4
KNOWN_EVENT_FORMAT_VERSIONS = {
EventFormatVersions.ROOM_V1_V2,
EventFormatVersions.ROOM_V3,
EventFormatVersions.ROOM_V4_PLUS,
EventFormatVersions.V1,
EventFormatVersions.V2,
EventFormatVersions.V3,
}
@@ -97,7 +92,7 @@ class RoomVersions:
V1 = RoomVersion(
"1",
RoomDisposition.STABLE,
EventFormatVersions.ROOM_V1_V2,
EventFormatVersions.V1,
StateResolutionVersions.V1,
enforce_key_validity=False,
special_case_aliases_auth=True,
@@ -115,7 +110,7 @@ class RoomVersions:
V2 = RoomVersion(
"2",
RoomDisposition.STABLE,
EventFormatVersions.ROOM_V1_V2,
EventFormatVersions.V1,
StateResolutionVersions.V2,
enforce_key_validity=False,
special_case_aliases_auth=True,
@@ -133,7 +128,7 @@ class RoomVersions:
V3 = RoomVersion(
"3",
RoomDisposition.STABLE,
EventFormatVersions.ROOM_V3,
EventFormatVersions.V2,
StateResolutionVersions.V2,
enforce_key_validity=False,
special_case_aliases_auth=True,
@@ -151,7 +146,7 @@ class RoomVersions:
V4 = RoomVersion(
"4",
RoomDisposition.STABLE,
EventFormatVersions.ROOM_V4_PLUS,
EventFormatVersions.V3,
StateResolutionVersions.V2,
enforce_key_validity=False,
special_case_aliases_auth=True,
@@ -169,7 +164,7 @@ class RoomVersions:
V5 = RoomVersion(
"5",
RoomDisposition.STABLE,
EventFormatVersions.ROOM_V4_PLUS,
EventFormatVersions.V3,
StateResolutionVersions.V2,
enforce_key_validity=True,
special_case_aliases_auth=True,
@@ -187,7 +182,7 @@ class RoomVersions:
V6 = RoomVersion(
"6",
RoomDisposition.STABLE,
EventFormatVersions.ROOM_V4_PLUS,
EventFormatVersions.V3,
StateResolutionVersions.V2,
enforce_key_validity=True,
special_case_aliases_auth=False,
@@ -205,7 +200,7 @@ class RoomVersions:
MSC2176 = RoomVersion(
"org.matrix.msc2176",
RoomDisposition.UNSTABLE,
EventFormatVersions.ROOM_V4_PLUS,
EventFormatVersions.V3,
StateResolutionVersions.V2,
enforce_key_validity=True,
special_case_aliases_auth=False,
@@ -223,7 +218,7 @@ class RoomVersions:
V7 = RoomVersion(
"7",
RoomDisposition.STABLE,
EventFormatVersions.ROOM_V4_PLUS,
EventFormatVersions.V3,
StateResolutionVersions.V2,
enforce_key_validity=True,
special_case_aliases_auth=False,
@@ -241,7 +236,7 @@ class RoomVersions:
V8 = RoomVersion(
"8",
RoomDisposition.STABLE,
EventFormatVersions.ROOM_V4_PLUS,
EventFormatVersions.V3,
StateResolutionVersions.V2,
enforce_key_validity=True,
special_case_aliases_auth=False,
@@ -259,7 +254,7 @@ class RoomVersions:
V9 = RoomVersion(
"9",
RoomDisposition.STABLE,
EventFormatVersions.ROOM_V4_PLUS,
EventFormatVersions.V3,
StateResolutionVersions.V2,
enforce_key_validity=True,
special_case_aliases_auth=False,
@@ -274,10 +269,28 @@ class RoomVersions:
msc3787_knock_restricted_join_rule=False,
msc3667_int_only_power_levels=False,
)
MSC2716v3 = RoomVersion(
"org.matrix.msc2716v3",
RoomDisposition.UNSTABLE,
EventFormatVersions.V3,
StateResolutionVersions.V2,
enforce_key_validity=True,
special_case_aliases_auth=False,
strict_canonicaljson=True,
limit_notifications_power_levels=True,
msc2176_redaction_rules=False,
msc3083_join_rules=False,
msc3375_redaction_rules=False,
msc2403_knocking=True,
msc2716_historical=True,
msc2716_redactions=True,
msc3787_knock_restricted_join_rule=False,
msc3667_int_only_power_levels=False,
)
MSC3787 = RoomVersion(
"org.matrix.msc3787",
RoomDisposition.UNSTABLE,
EventFormatVersions.ROOM_V4_PLUS,
EventFormatVersions.V3,
StateResolutionVersions.V2,
enforce_key_validity=True,
special_case_aliases_auth=False,
@@ -295,7 +308,7 @@ class RoomVersions:
V10 = RoomVersion(
"10",
RoomDisposition.STABLE,
EventFormatVersions.ROOM_V4_PLUS,
EventFormatVersions.V3,
StateResolutionVersions.V2,
enforce_key_validity=True,
special_case_aliases_auth=False,
@@ -310,24 +323,6 @@ class RoomVersions:
msc3787_knock_restricted_join_rule=True,
msc3667_int_only_power_levels=True,
)
MSC2716v4 = RoomVersion(
"org.matrix.msc2716v4",
RoomDisposition.UNSTABLE,
EventFormatVersions.ROOM_V4_PLUS,
StateResolutionVersions.V2,
enforce_key_validity=True,
special_case_aliases_auth=False,
strict_canonicaljson=True,
limit_notifications_power_levels=True,
msc2176_redaction_rules=False,
msc3083_join_rules=False,
msc3375_redaction_rules=False,
msc2403_knocking=True,
msc2716_historical=True,
msc2716_redactions=True,
msc3787_knock_restricted_join_rule=False,
msc3667_int_only_power_levels=False,
)
KNOWN_ROOM_VERSIONS: Dict[str, RoomVersion] = {
@@ -343,9 +338,9 @@ KNOWN_ROOM_VERSIONS: Dict[str, RoomVersion] = {
RoomVersions.V7,
RoomVersions.V8,
RoomVersions.V9,
RoomVersions.MSC2716v3,
RoomVersions.MSC3787,
RoomVersions.V10,
RoomVersions.MSC2716v4,
)
}

View File

@@ -266,48 +266,15 @@ def register_start(
reactor.callWhenRunning(lambda: defer.ensureDeferred(wrapper()))
def listen_metrics(
bind_addresses: Iterable[str], port: int, enable_legacy_metric_names: bool
) -> None:
def listen_metrics(bind_addresses: Iterable[str], port: int) -> None:
"""
Start Prometheus metrics server.
"""
from prometheus_client import start_http_server as start_http_server_prometheus
from synapse.metrics import (
RegistryProxy,
start_http_server as start_http_server_legacy,
)
from synapse.metrics import RegistryProxy, start_http_server
for host in bind_addresses:
logger.info("Starting metrics listener on %s:%d", host, port)
if enable_legacy_metric_names:
start_http_server_legacy(port, addr=host, registry=RegistryProxy)
else:
_set_prometheus_client_use_created_metrics(False)
start_http_server_prometheus(port, addr=host, registry=RegistryProxy)
def _set_prometheus_client_use_created_metrics(new_value: bool) -> None:
"""
Sets whether prometheus_client should expose `_created`-suffixed metrics for
all gauges, histograms and summaries.
There is no programmatic way to disable this without poking at internals;
the proper way is to use an environment variable which prometheus_client
loads at import time.
The motivation for disabling these `_created` metrics is that they're
a waste of space as they're not useful but they take up space in Prometheus.
"""
import prometheus_client.metrics
if hasattr(prometheus_client.metrics, "_use_created"):
prometheus_client.metrics._use_created = new_value
else:
logger.error(
"Can't disable `_created` metrics in prometheus_client (brittle hack broken?)"
)
start_http_server(port, addr=host, registry=RegistryProxy)
def listen_manhole(
@@ -511,10 +478,9 @@ async def start(hs: "HomeServer") -> None:
setup_sentry(hs)
setup_sdnotify(hs)
# If background tasks are running on the main process or this is the worker in
# charge of them, start collecting the phone home stats and shared usage metrics.
# If background tasks are running on the main process, start collecting the
# phone home stats.
if hs.config.worker.run_background_tasks:
await hs.get_common_usage_metrics_manager().setup()
start_phone_stats_home(hs)
# We now freeze all allocated objects in the hopes that (almost)

View File

@@ -412,11 +412,7 @@ class GenericWorkerServer(HomeServer):
"enable_metrics is not True!"
)
else:
_base.listen_metrics(
listener.bind_addresses,
listener.port,
enable_legacy_metric_names=self.config.metrics.enable_legacy_metrics,
)
_base.listen_metrics(listener.bind_addresses, listener.port)
else:
logger.warning("Unsupported listener type: %s", listener.type)
@@ -445,13 +441,6 @@ def start(config_options: List[str]) -> None:
"synapse.app.user_dir",
)
if config.experimental.faster_joins_enabled:
raise ConfigError(
"You have enabled the experimental `faster_joins` config option, but it is "
"not compatible with worker deployments yet. Please disable `faster_joins` "
"or run Synapse as a single process deployment instead."
)
synapse.events.USE_FROZEN_DICTS = config.server.use_frozen_dicts
synapse.util.caches.TRACK_MEMORY_USAGE = config.caches.track_memory_usage

View File

@@ -44,6 +44,7 @@ from synapse.app._base import (
register_start,
)
from synapse.config._base import ConfigError, format_config_error
from synapse.config.emailconfig import ThreepidBehaviour
from synapse.config.homeserver import HomeServerConfig
from synapse.config.server import ListenerConfig
from synapse.federation.transport.server import TransportLayerServer
@@ -57,6 +58,7 @@ from synapse.http.site import SynapseSite
from synapse.logging.context import LoggingContext
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory
from synapse.rest import ClientRestResource
from synapse.rest.admin import AdminRestResource
from synapse.rest.health import HealthResource
@@ -200,7 +202,7 @@ class SynapseHomeServer(HomeServer):
}
)
if self.config.email.can_verify_email:
if self.config.email.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
from synapse.rest.synapse.client.password_reset import (
PasswordResetSubmitTokenResource,
)
@@ -218,10 +220,7 @@ class SynapseHomeServer(HomeServer):
resources.update({"/_matrix/consent": consent_resource})
if name == "federation":
federation_resource: Resource = TransportLayerServer(self)
if compress:
federation_resource = gz_wrap(federation_resource)
resources.update({FEDERATION_PREFIX: federation_resource})
resources.update({FEDERATION_PREFIX: TransportLayerServer(self)})
if name == "openid":
resources.update(
@@ -289,6 +288,16 @@ class SynapseHomeServer(HomeServer):
manhole_settings=self.config.server.manhole_settings,
manhole_globals={"hs": self},
)
elif listener.type == "replication":
services = listen_tcp(
listener.bind_addresses,
listener.port,
ReplicationStreamProtocolFactory(self),
)
for s in services:
self.get_reactor().addSystemEventTrigger(
"before", "shutdown", s.stopListening
)
elif listener.type == "metrics":
if not self.config.metrics.enable_metrics:
logger.warning(
@@ -296,11 +305,7 @@ class SynapseHomeServer(HomeServer):
"enable_metrics is not True!"
)
else:
_base.listen_metrics(
listener.bind_addresses,
listener.port,
enable_legacy_metric_names=self.config.metrics.enable_legacy_metrics,
)
_base.listen_metrics(listener.bind_addresses, listener.port)
else:
# this shouldn't happen, as the listener type should have been checked
# during parsing

View File

@@ -32,15 +32,15 @@ logger = logging.getLogger("synapse.app.homeserver")
_stats_process: List[Tuple[int, "resource.struct_rusage"]] = []
# Gauges to expose monthly active user control metrics
current_mau_gauge = Gauge("synapse_admin_mau_current", "Current MAU")
current_mau_gauge = Gauge("synapse_admin_mau:current", "Current MAU")
current_mau_by_service_gauge = Gauge(
"synapse_admin_mau_current_mau_by_service",
"Current MAU by service",
["app_service"],
)
max_mau_gauge = Gauge("synapse_admin_mau_max", "MAU Limit")
max_mau_gauge = Gauge("synapse_admin_mau:max", "MAU Limit")
registered_reserved_users_mau_gauge = Gauge(
"synapse_admin_mau_registered_reserved_users",
"synapse_admin_mau:registered_reserved_users",
"Registered users with reserved threepids",
)
@@ -51,16 +51,6 @@ async def phone_stats_home(
stats: JsonDict,
stats_process: List[Tuple[int, "resource.struct_rusage"]] = _stats_process,
) -> None:
"""Collect usage statistics and send them to the configured endpoint.
Args:
hs: the HomeServer object to use for gathering usage data.
stats: the dict in which to store the statistics sent to the configured
endpoint. Mostly used in tests to figure out the data that is supposed to
be sent.
stats_process: statistics about resource usage of the process.
"""
logger.info("Gathering stats for reporting")
now = int(hs.get_clock().time())
# Ensure the homeserver has started.
@@ -93,7 +83,6 @@ async def phone_stats_home(
#
store = hs.get_datastores().main
common_metrics = await hs.get_common_usage_metrics_manager().get_metrics()
stats["homeserver"] = hs.config.server.server_name
stats["server_context"] = hs.config.server.server_context
@@ -115,7 +104,7 @@ async def phone_stats_home(
room_count = await store.get_room_count()
stats["total_room_count"] = room_count
stats["daily_active_users"] = common_metrics.daily_active_users
stats["daily_active_users"] = await store.count_daily_users()
stats["monthly_active_users"] = await store.count_monthly_users()
daily_active_e2ee_rooms = await store.count_daily_active_e2ee_rooms()
stats["daily_active_e2ee_rooms"] = daily_active_e2ee_rooms

View File

@@ -20,7 +20,6 @@ import logging
import os
import re
from collections import OrderedDict
from enum import Enum, auto
from hashlib import sha256
from textwrap import dedent
from typing import (
@@ -604,44 +603,18 @@ class RootConfig:
" may specify directories containing *.yaml files.",
)
# we nest the mutually-exclusive group inside another group so that the help
# text shows them in their own group.
generate_mode_group = parser.add_argument_group(
"Config generation mode",
)
generate_mode_exclusive = generate_mode_group.add_mutually_exclusive_group()
generate_mode_exclusive.add_argument(
# hidden option to make the type and default work
"--generate-mode",
help=argparse.SUPPRESS,
type=_ConfigGenerateMode,
default=_ConfigGenerateMode.GENERATE_MISSING_AND_RUN,
)
generate_mode_exclusive.add_argument(
generate_group = parser.add_argument_group("Config generation")
generate_group.add_argument(
"--generate-config",
action="store_true",
help="Generate a config file, then exit.",
action="store_const",
const=_ConfigGenerateMode.GENERATE_EVERYTHING_AND_EXIT,
dest="generate_mode",
)
generate_mode_exclusive.add_argument(
generate_group.add_argument(
"--generate-missing-configs",
"--generate-keys",
action="store_true",
help="Generate any missing additional config files, then exit.",
action="store_const",
const=_ConfigGenerateMode.GENERATE_MISSING_AND_EXIT,
dest="generate_mode",
)
generate_mode_exclusive.add_argument(
"--generate-missing-and-run",
help="Generate any missing additional config files, then run. This is the "
"default behaviour.",
action="store_const",
const=_ConfigGenerateMode.GENERATE_MISSING_AND_RUN,
dest="generate_mode",
)
generate_group = parser.add_argument_group("Details for --generate-config")
generate_group.add_argument(
"-H", "--server-name", help="The server name to generate a config file for."
)
@@ -697,12 +670,11 @@ class RootConfig:
config_dir_path = os.path.abspath(config_dir_path)
data_dir_path = os.getcwd()
generate_missing_configs = config_args.generate_missing_configs
obj = cls(config_files)
if (
config_args.generate_mode
== _ConfigGenerateMode.GENERATE_EVERYTHING_AND_EXIT
):
if config_args.generate_config:
if config_args.report_stats is None:
parser.error(
"Please specify either --report-stats=yes or --report-stats=no\n\n"
@@ -760,14 +732,11 @@ class RootConfig:
)
% (config_path,)
)
generate_missing_configs = True
config_dict = read_config_files(config_files)
obj.generate_missing_files(config_dict, config_dir_path)
if config_args.generate_mode in (
_ConfigGenerateMode.GENERATE_EVERYTHING_AND_EXIT,
_ConfigGenerateMode.GENERATE_MISSING_AND_EXIT,
):
if generate_missing_configs:
obj.generate_missing_files(config_dict, config_dir_path)
return None
obj.parse_config_dict(
@@ -996,12 +965,6 @@ def read_file(file_path: Any, config_path: Iterable[str]) -> str:
raise ConfigError("Error accessing file %r" % (file_path,), config_path) from e
class _ConfigGenerateMode(Enum):
GENERATE_MISSING_AND_RUN = auto()
GENERATE_MISSING_AND_EXIT = auto()
GENERATE_EVERYTHING_AND_EXIT = auto()
__all__ = [
"Config",
"RootConfig",

View File

@@ -23,7 +23,7 @@ LEGACY_TEMPLATE_DIR_WARNING = """
This server's configuration file is using the deprecated 'template_dir' setting in the
'account_validity' section. Support for this setting has been deprecated and will be
removed in a future version of Synapse. Server admins should instead use the new
'custom_template_directory' setting documented here:
'custom_templates_directory' setting documented here:
https://matrix-org.github.io/synapse/latest/templates.html
---------------------------------------------------------------------------------------"""

View File

@@ -18,6 +18,7 @@
import email.utils
import logging
import os
from enum import Enum
from typing import Any
import attr
@@ -52,7 +53,7 @@ LEGACY_TEMPLATE_DIR_WARNING = """
This server's configuration file is using the deprecated 'template_dir' setting in the
'email' section. Support for this setting has been deprecated and will be removed in a
future version of Synapse. Server admins should instead use the new
'custom_template_directory' setting documented here:
'custom_templates_directory' setting documented here:
https://matrix-org.github.io/synapse/latest/templates.html
---------------------------------------------------------------------------------------"""
@@ -135,22 +136,40 @@ class EmailConfig(Config):
self.email_enable_notifs = email_config.get("enable_notifs", False)
self.threepid_behaviour_email = (
# Have Synapse handle the email sending if account_threepid_delegates.email
# is not defined
# msisdn is currently always remote while Synapse does not support any method of
# sending SMS messages
ThreepidBehaviour.REMOTE
if self.root.registration.account_threepid_delegate_email
else ThreepidBehaviour.LOCAL
)
if config.get("trust_identity_server_for_password_resets"):
raise ConfigError(
'The config option "trust_identity_server_for_password_resets" '
"is no longer supported. Please remove it from the config file."
'The config option "trust_identity_server_for_password_resets" has been removed.'
"Please consult the configuration manual at docs/usage/configuration/config_documentation.md for "
"details and update your config file."
)
# If we have email config settings, assume that we can verify ownership of
# email addresses.
self.can_verify_email = email_config != {}
self.local_threepid_handling_disabled_due_to_email_config = False
if (
self.threepid_behaviour_email == ThreepidBehaviour.LOCAL
and email_config == {}
):
# We cannot warn the user this has happened here
# Instead do so when a user attempts to reset their password
self.local_threepid_handling_disabled_due_to_email_config = True
self.threepid_behaviour_email = ThreepidBehaviour.OFF
# Get lifetime of a validation token in milliseconds
self.email_validation_token_lifetime = self.parse_duration(
email_config.get("validation_token_lifetime", "1h")
)
if self.can_verify_email:
if self.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
missing = []
if not self.email_notif_from:
missing.append("email.notif_from")
@@ -341,3 +360,18 @@ class EmailConfig(Config):
"Config option email.invite_client_location must be a http or https URL",
path=("email", "invite_client_location"),
)
class ThreepidBehaviour(Enum):
"""
Enum to define the behaviour of Synapse with regards to when it contacts an identity
server for 3pid registration and password resets
REMOTE = use an external server to send tokens
LOCAL = send tokens ourselves
OFF = disable registration via 3pid and password resets
"""
REMOTE = "remote"
LOCAL = "local"
OFF = "off"

View File

@@ -32,6 +32,9 @@ class ExperimentalConfig(Config):
# MSC2716 (importing historical messages)
self.msc2716_enabled: bool = experimental.get("msc2716_enabled", False)
# MSC2285 (private read receipts)
self.msc2285_enabled: bool = experimental.get("msc2285_enabled", False)
# MSC3244 (room version capabilities)
self.msc3244_enabled: bool = experimental.get("msc3244_enabled", True)
@@ -71,9 +74,6 @@ class ExperimentalConfig(Config):
self.msc3720_enabled: bool = experimental.get("msc3720_enabled", False)
# MSC2654: Unread counts
#
# Note that enabling this will result in an incorrect unread count for
# previously calculated push actions.
self.msc2654_enabled: bool = experimental.get("msc2654_enabled", False)
# MSC2815 (allow room moderators to view redacted event content)
@@ -88,8 +88,5 @@ class ExperimentalConfig(Config):
# MSC3715: dir param on /relations.
self.msc3715_enabled: bool = experimental.get("msc3715_enabled", False)
# MSC3848: Introduce errcodes for specific event sending failures
self.msc3848_enabled: bool = experimental.get("msc3848_enabled", False)
# MSC3852: Expose last seen user agent field on /_matrix/client/v3/devices.
self.msc3852_enabled: bool = experimental.get("msc3852_enabled", False)
# MSC3827: Filtering of /publicRooms by room type
self.msc3827_enabled: bool = experimental.get("msc3827_enabled", False)

View File

@@ -217,18 +217,7 @@ class KeyConfig(Config):
signing_keys = self.read_file(signing_key_path, name)
try:
loaded_signing_keys = read_signing_keys(
[
signing_key_line
for signing_key_line in signing_keys.splitlines(keepends=False)
if signing_key_line.strip()
]
)
if not loaded_signing_keys:
raise ConfigError(f"No signing keys in file {signing_key_path}")
return loaded_signing_keys
return read_signing_keys(signing_keys.splitlines(True))
except Exception as e:
raise ConfigError("Error reading %s: %s" % (name, str(e)))

View File

@@ -42,35 +42,6 @@ class MetricsConfig(Config):
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
self.enable_metrics = config.get("enable_metrics", False)
"""
### `enable_legacy_metrics` (experimental)
**Experimental: this option may be removed or have its behaviour
changed at any time, with no notice.**
Set to `true` to publish both legacy and non-legacy Prometheus metric names,
or to `false` to only publish non-legacy Prometheus metric names.
Defaults to `true`. Has no effect if `enable_metrics` is `false`.
Legacy metric names include:
- metrics containing colons in the name, such as `synapse_util_caches_response_cache:hits`, because colons are supposed to be reserved for user-defined recording rules;
- counters that don't end with the `_total` suffix, such as `synapse_federation_client_sent_edus`, therefore not adhering to the OpenMetrics standard.
These legacy metric names are unconventional and not compliant with OpenMetrics standards.
They are included for backwards compatibility.
Example configuration:
```yaml
enable_legacy_metrics: false
```
See https://github.com/matrix-org/synapse/issues/11106 for context.
*Since v1.67.0.*
"""
self.enable_legacy_metrics = config.get("enable_legacy_metrics", True)
self.report_stats = config.get("report_stats", None)
self.report_stats_endpoint = config.get(
"report_stats_endpoint", "https://matrix.org/report-usage-stats/push"

View File

@@ -21,7 +21,7 @@ from synapse.types import JsonDict
from ._base import Config
class RatelimitSettings:
class RateLimitConfig:
def __init__(
self,
config: Dict[str, float],
@@ -34,7 +34,7 @@ class RatelimitSettings:
@attr.s(auto_attribs=True)
class FederationRatelimitSettings:
class FederationRateLimitConfig:
window_size: int = 1000
sleep_limit: int = 10
sleep_delay: int = 500
@@ -50,11 +50,11 @@ class RatelimitConfig(Config):
# Load the new-style messages config if it exists. Otherwise fall back
# to the old method.
if "rc_message" in config:
self.rc_message = RatelimitSettings(
self.rc_message = RateLimitConfig(
config["rc_message"], defaults={"per_second": 0.2, "burst_count": 10.0}
)
else:
self.rc_message = RatelimitSettings(
self.rc_message = RateLimitConfig(
{
"per_second": config.get("rc_messages_per_second", 0.2),
"burst_count": config.get("rc_message_burst_count", 10.0),
@@ -64,9 +64,9 @@ class RatelimitConfig(Config):
# Load the new-style federation config, if it exists. Otherwise, fall
# back to the old method.
if "rc_federation" in config:
self.rc_federation = FederationRatelimitSettings(**config["rc_federation"])
self.rc_federation = FederationRateLimitConfig(**config["rc_federation"])
else:
self.rc_federation = FederationRatelimitSettings(
self.rc_federation = FederationRateLimitConfig(
**{
k: v
for k, v in {
@@ -80,17 +80,17 @@ class RatelimitConfig(Config):
}
)
self.rc_registration = RatelimitSettings(config.get("rc_registration", {}))
self.rc_registration = RateLimitConfig(config.get("rc_registration", {}))
self.rc_registration_token_validity = RatelimitSettings(
self.rc_registration_token_validity = RateLimitConfig(
config.get("rc_registration_token_validity", {}),
defaults={"per_second": 0.1, "burst_count": 5},
)
rc_login_config = config.get("rc_login", {})
self.rc_login_address = RatelimitSettings(rc_login_config.get("address", {}))
self.rc_login_account = RatelimitSettings(rc_login_config.get("account", {}))
self.rc_login_failed_attempts = RatelimitSettings(
self.rc_login_address = RateLimitConfig(rc_login_config.get("address", {}))
self.rc_login_account = RateLimitConfig(rc_login_config.get("account", {}))
self.rc_login_failed_attempts = RateLimitConfig(
rc_login_config.get("failed_attempts", {})
)
@@ -101,20 +101,20 @@ class RatelimitConfig(Config):
rc_admin_redaction = config.get("rc_admin_redaction")
self.rc_admin_redaction = None
if rc_admin_redaction:
self.rc_admin_redaction = RatelimitSettings(rc_admin_redaction)
self.rc_admin_redaction = RateLimitConfig(rc_admin_redaction)
self.rc_joins_local = RatelimitSettings(
self.rc_joins_local = RateLimitConfig(
config.get("rc_joins", {}).get("local", {}),
defaults={"per_second": 0.1, "burst_count": 10},
)
self.rc_joins_remote = RatelimitSettings(
self.rc_joins_remote = RateLimitConfig(
config.get("rc_joins", {}).get("remote", {}),
defaults={"per_second": 0.01, "burst_count": 10},
)
# Track the rate of joins to a given room. If there are too many, temporarily
# prevent local joins and remote joins via this server.
self.rc_joins_per_room = RatelimitSettings(
self.rc_joins_per_room = RateLimitConfig(
config.get("rc_joins_per_room", {}),
defaults={"per_second": 1, "burst_count": 10},
)
@@ -124,31 +124,31 @@ class RatelimitConfig(Config):
# * For requests received over federation this is keyed by the origin.
#
# Note that this isn't exposed in the configuration as it is obscure.
self.rc_key_requests = RatelimitSettings(
self.rc_key_requests = RateLimitConfig(
config.get("rc_key_requests", {}),
defaults={"per_second": 20, "burst_count": 100},
)
self.rc_3pid_validation = RatelimitSettings(
self.rc_3pid_validation = RateLimitConfig(
config.get("rc_3pid_validation") or {},
defaults={"per_second": 0.003, "burst_count": 5},
)
self.rc_invites_per_room = RatelimitSettings(
self.rc_invites_per_room = RateLimitConfig(
config.get("rc_invites", {}).get("per_room", {}),
defaults={"per_second": 0.3, "burst_count": 10},
)
self.rc_invites_per_user = RatelimitSettings(
self.rc_invites_per_user = RateLimitConfig(
config.get("rc_invites", {}).get("per_user", {}),
defaults={"per_second": 0.003, "burst_count": 5},
)
self.rc_invites_per_issuer = RatelimitSettings(
self.rc_invites_per_issuer = RateLimitConfig(
config.get("rc_invites", {}).get("per_issuer", {}),
defaults={"per_second": 0.3, "burst_count": 10},
)
self.rc_third_party_invite = RatelimitSettings(
self.rc_third_party_invite = RateLimitConfig(
config.get("rc_third_party_invite", {}),
defaults={
"per_second": self.rc_message.per_second,

View File

@@ -13,23 +13,23 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
from typing import Any, Dict, Optional
import logging
from typing import Any, Optional
from synapse.api.constants import RoomCreationPreset
from synapse.config._base import Config, ConfigError, read_file
from synapse.config._base import Config, ConfigError
from synapse.types import JsonDict, RoomAlias, UserID
from synapse.util.stringutils import random_string_with_symbols, strtobool
NO_EMAIL_DELEGATE_ERROR = """\
Delegation of email verification to an identity server is no longer supported. To
logger = logging.getLogger(__name__)
LEGACY_EMAIL_DELEGATE_WARNING = """\
Delegation of email verification to an identity server is now deprecated. To
continue to allow users to add email addresses to their accounts, and use them for
password resets, configure Synapse with an SMTP server via the `email` setting, and
remove `account_threepid_delegates.email`.
"""
CONFLICTING_SHARED_SECRET_OPTS_ERROR = """\
You have configured both `registration_shared_secret` and
`registration_shared_secret_path`. These are mutually incompatible.
This will be an error in a future version.
"""
@@ -58,22 +58,15 @@ class RegistrationConfig(Config):
self.enable_registration_token_3pid_bypass = config.get(
"enable_registration_token_3pid_bypass", False
)
# read the shared secret, either inline or from an external file
self.registration_shared_secret = config.get("registration_shared_secret")
registration_shared_secret_path = config.get("registration_shared_secret_path")
if registration_shared_secret_path:
if self.registration_shared_secret:
raise ConfigError(CONFLICTING_SHARED_SECRET_OPTS_ERROR)
self.registration_shared_secret = read_file(
registration_shared_secret_path, ("registration_shared_secret_path",)
).strip()
self.bcrypt_rounds = config.get("bcrypt_rounds", 12)
account_threepid_delegates = config.get("account_threepid_delegates") or {}
if "email" in account_threepid_delegates:
raise ConfigError(NO_EMAIL_DELEGATE_ERROR)
logger.warning(LEGACY_EMAIL_DELEGATE_WARNING)
self.account_threepid_delegate_email = account_threepid_delegates.get("email")
self.account_threepid_delegate_msisdn = account_threepid_delegates.get("msisdn")
self.default_identity_server = config.get("default_identity_server")
self.allow_guest_access = config.get("allow_guest_access", False)
@@ -232,21 +225,6 @@ class RegistrationConfig(Config):
else:
return ""
def generate_files(self, config: Dict[str, Any], config_dir_path: str) -> None:
# if 'registration_shared_secret_path' is specified, and the target file
# does not exist, generate it.
registration_shared_secret_path = config.get("registration_shared_secret_path")
if registration_shared_secret_path and not self.path_exists(
registration_shared_secret_path
):
print(
"Generating registration shared secret file "
+ registration_shared_secret_path
)
secret = random_string_with_symbols(50)
with open(registration_shared_secret_path, "w") as f:
f.write(f"{secret}\n")
@staticmethod
def add_arguments(parser: argparse.ArgumentParser) -> None:
reg_group = parser.add_argument_group("registration")

View File

@@ -36,12 +36,6 @@ from ._util import validate_config
logger = logging.Logger(__name__)
DIRECT_TCP_ERROR = """
Using direct TCP replication for workers is no longer supported.
Please see https://matrix-org.github.io/synapse/latest/upgrade.html#direct-tcp-replication-is-no-longer-supported-migrate-to-redis
"""
# by default, we attempt to listen on both '::' *and* '0.0.0.0' because some OSes
# (Windows, macOS, other BSD/Linux where net.ipv6.bindv6only is set) will only listen
# on IPv6 when '::' is set.
@@ -171,6 +165,7 @@ KNOWN_LISTENER_TYPES = {
"http",
"metrics",
"manhole",
"replication",
}
KNOWN_RESOURCES = {
@@ -206,7 +201,6 @@ class HttpListenerConfig:
resources: List[HttpResourceConfig] = attr.Factory(list)
additional_resources: Dict[str, dict] = attr.Factory(dict)
tag: Optional[str] = None
request_id_header: Optional[str] = None
@attr.s(slots=True, frozen=True, auto_attribs=True)
@@ -521,11 +515,7 @@ class ServerConfig(Config):
):
raise ConfigError("allowed_avatar_mimetypes must be a list")
listeners = config.get("listeners", [])
if not isinstance(listeners, list):
raise ConfigError("Expected a list", ("listeners",))
self.listeners = [parse_listener_def(i, x) for i, x in enumerate(listeners)]
self.listeners = [parse_listener_def(x) for x in config.get("listeners", [])]
# no_tls is not really supported any more, but let's grandfather it in
# here.
@@ -890,15 +880,9 @@ def read_gc_thresholds(
)
def parse_listener_def(num: int, listener: Any) -> ListenerConfig:
def parse_listener_def(listener: Any) -> ListenerConfig:
"""parse a listener config from the config file"""
if not isinstance(listener, dict):
raise ConfigError("Expected a dictionary", ("listeners", str(num)))
listener_type = listener["type"]
# Raise a helpful error if direct TCP replication is still configured.
if listener_type == "replication":
raise ConfigError(DIRECT_TCP_ERROR, ("listeners", str(num), "type"))
port = listener.get("port")
if not isinstance(port, int):
@@ -934,7 +918,6 @@ def parse_listener_def(num: int, listener: Any) -> ListenerConfig:
resources=resources,
additional_resources=listener.get("additional_resources", {}),
tag=listener.get("tag"),
request_id_header=listener.get("request_id_header"),
)
return ListenerConfig(port, bind_addresses, listener_type, tls, http_config)

Some files were not shown because too many files have changed in this diff Show More