Compare commits
1 Commits
release-v1
...
release-v1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dc1c5ab34e |
@@ -27,10 +27,10 @@ which is under the Unlicense licence.
|
||||
{{- . -}}{{- "\n" -}}
|
||||
{{- end -}}
|
||||
{{- with .TestCases -}}
|
||||
{{- /* Passing tests are first */ -}}
|
||||
{{- /* Failing tests are first */ -}}
|
||||
{{- range . -}}
|
||||
{{- if eq .Result "PASS" -}}
|
||||
::group::{{ "\033" }}[0;32m✅{{ " " }}{{- .Name -}}
|
||||
{{- if and (ne .Result "PASS") (ne .Result "SKIP") -}}
|
||||
::group::{{ "\033" }}[0;31m❌{{ " " }}{{- .Name -}}
|
||||
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
||||
{{- with .Coverage -}}
|
||||
, coverage: {{ . }}%
|
||||
@@ -47,6 +47,7 @@ which is under the Unlicense licence.
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
|
||||
|
||||
{{- /* Then skipped tests are second */ -}}
|
||||
{{- range . -}}
|
||||
{{- if eq .Result "SKIP" -}}
|
||||
@@ -67,10 +68,11 @@ which is under the Unlicense licence.
|
||||
{{- end -}}
|
||||
{{- end -}}
|
||||
|
||||
{{- /* and failing tests are last */ -}}
|
||||
|
||||
{{- /* Then passing tests are last */ -}}
|
||||
{{- range . -}}
|
||||
{{- if and (ne .Result "PASS") (ne .Result "SKIP") -}}
|
||||
::group::{{ "\033" }}[0;31m❌{{ " " }}{{- .Name -}}
|
||||
{{- if eq .Result "PASS" -}}
|
||||
::group::{{ "\033" }}[0;32m✅{{ " " }}{{- .Name -}}
|
||||
{{- "\033" -}}[0;37m ({{if $settings.ShowTestStatus}}{{.Result}}; {{end}}{{ .Duration -}}
|
||||
{{- with .Coverage -}}
|
||||
, coverage: {{ . }}%
|
||||
|
||||
@@ -1,132 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Wraps `auditwheel repair` to first check if we're repairing a potentially abi3
|
||||
# compatible wheel, if so rename the wheel before repairing it.
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
from typing import Optional
|
||||
from zipfile import ZipFile
|
||||
|
||||
from packaging.tags import Tag
|
||||
from packaging.utils import parse_wheel_filename
|
||||
from packaging.version import Version
|
||||
|
||||
|
||||
def check_is_abi3_compatible(wheel_file: str) -> None:
|
||||
"""Check the contents of the built wheel for any `.so` files that are *not*
|
||||
abi3 compatible.
|
||||
"""
|
||||
|
||||
with ZipFile(wheel_file, "r") as wheel:
|
||||
for file in wheel.namelist():
|
||||
if not file.endswith(".so"):
|
||||
continue
|
||||
|
||||
if not file.endswith(".abi3.so"):
|
||||
raise Exception(f"Found non-abi3 lib: {file}")
|
||||
|
||||
|
||||
def cpython(wheel_file: str, name: str, version: Version, tag: Tag) -> str:
|
||||
"""Replaces the cpython wheel file with a ABI3 compatible wheel"""
|
||||
|
||||
if tag.abi == "abi3":
|
||||
# Nothing to do.
|
||||
return wheel_file
|
||||
|
||||
check_is_abi3_compatible(wheel_file)
|
||||
|
||||
abi3_tag = Tag(tag.interpreter, "abi3", tag.platform)
|
||||
|
||||
dirname = os.path.dirname(wheel_file)
|
||||
new_wheel_file = os.path.join(
|
||||
dirname,
|
||||
f"{name}-{version}-{abi3_tag}.whl",
|
||||
)
|
||||
|
||||
os.rename(wheel_file, new_wheel_file)
|
||||
|
||||
print("Renamed wheel to", new_wheel_file)
|
||||
|
||||
return new_wheel_file
|
||||
|
||||
|
||||
def main(wheel_file: str, dest_dir: str, archs: Optional[str]) -> None:
|
||||
"""Entry point"""
|
||||
|
||||
# Parse the wheel file name into its parts. Note that `parse_wheel_filename`
|
||||
# normalizes the package name (i.e. it converts matrix_synapse ->
|
||||
# matrix-synapse), which is not what we want.
|
||||
_, version, build, tags = parse_wheel_filename(os.path.basename(wheel_file))
|
||||
name = os.path.basename(wheel_file).split("-")[0]
|
||||
|
||||
if len(tags) != 1:
|
||||
# We expect only a wheel file with only a single tag
|
||||
raise Exception(f"Unexpectedly found multiple tags: {tags}")
|
||||
|
||||
tag = next(iter(tags))
|
||||
|
||||
if build:
|
||||
# We don't use build tags in Synapse
|
||||
raise Exception(f"Unexpected build tag: {build}")
|
||||
|
||||
# If the wheel is for cpython then convert it into an abi3 wheel.
|
||||
if tag.interpreter.startswith("cp"):
|
||||
wheel_file = cpython(wheel_file, name, version, tag)
|
||||
|
||||
# Finally, repair the wheel.
|
||||
if archs is not None:
|
||||
# If we are given archs then we are on macos and need to use
|
||||
# `delocate-listdeps`.
|
||||
subprocess.run(["delocate-listdeps", wheel_file], check=True)
|
||||
subprocess.run(
|
||||
["delocate-wheel", "--require-archs", archs, "-w", dest_dir, wheel_file],
|
||||
check=True,
|
||||
)
|
||||
else:
|
||||
subprocess.run(["auditwheel", "repair", "-w", dest_dir, wheel_file], check=True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Tag wheel as abi3 and repair it.")
|
||||
|
||||
parser.add_argument(
|
||||
"--wheel-dir",
|
||||
"-w",
|
||||
metavar="WHEEL_DIR",
|
||||
help="Directory to store delocated wheels",
|
||||
required=True,
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--require-archs",
|
||||
metavar="archs",
|
||||
default=None,
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"wheel_file",
|
||||
metavar="WHEEL_FILE",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
wheel_file = args.wheel_file
|
||||
wheel_dir = args.wheel_dir
|
||||
archs = args.require_archs
|
||||
|
||||
main(wheel_file, wheel_dir, archs)
|
||||
@@ -1,135 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Calculate the trial jobs to run based on if we're in a PR or not.
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
|
||||
def set_output(key: str, value: str):
|
||||
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-output-parameter
|
||||
with open(os.environ["GITHUB_OUTPUT"], "at") as f:
|
||||
print(f"{key}={value}", file=f)
|
||||
|
||||
|
||||
IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
|
||||
|
||||
# First calculate the various trial jobs.
|
||||
#
|
||||
# For each type of test we only run on Py3.7 on PRs
|
||||
|
||||
trial_sqlite_tests = [
|
||||
{
|
||||
"python-version": "3.7",
|
||||
"database": "sqlite",
|
||||
"extras": "all",
|
||||
}
|
||||
]
|
||||
|
||||
if not IS_PR:
|
||||
trial_sqlite_tests.extend(
|
||||
{
|
||||
"python-version": version,
|
||||
"database": "sqlite",
|
||||
"extras": "all",
|
||||
}
|
||||
for version in ("3.8", "3.9", "3.10")
|
||||
)
|
||||
|
||||
|
||||
trial_postgres_tests = [
|
||||
{
|
||||
"python-version": "3.7",
|
||||
"database": "postgres",
|
||||
"postgres-version": "10",
|
||||
"extras": "all",
|
||||
}
|
||||
]
|
||||
|
||||
if not IS_PR:
|
||||
trial_postgres_tests.append(
|
||||
{
|
||||
"python-version": "3.10",
|
||||
"database": "postgres",
|
||||
"postgres-version": "14",
|
||||
"extras": "all",
|
||||
}
|
||||
)
|
||||
|
||||
trial_no_extra_tests = [
|
||||
{
|
||||
"python-version": "3.7",
|
||||
"database": "sqlite",
|
||||
"extras": "",
|
||||
}
|
||||
]
|
||||
|
||||
print("::group::Calculated trial jobs")
|
||||
print(
|
||||
json.dumps(
|
||||
trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests, indent=4
|
||||
)
|
||||
)
|
||||
print("::endgroup::")
|
||||
|
||||
test_matrix = json.dumps(
|
||||
trial_sqlite_tests + trial_postgres_tests + trial_no_extra_tests
|
||||
)
|
||||
set_output("trial_test_matrix", test_matrix)
|
||||
|
||||
|
||||
# First calculate the various sytest jobs.
|
||||
#
|
||||
# For each type of test we only run on focal on PRs
|
||||
|
||||
|
||||
sytest_tests = [
|
||||
{
|
||||
"sytest-tag": "focal",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "focal",
|
||||
"postgres": "postgres",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "focal",
|
||||
"postgres": "multi-postgres",
|
||||
"workers": "workers",
|
||||
},
|
||||
]
|
||||
|
||||
if not IS_PR:
|
||||
sytest_tests.extend(
|
||||
[
|
||||
{
|
||||
"sytest-tag": "testing",
|
||||
"postgres": "postgres",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "buster",
|
||||
"postgres": "multi-postgres",
|
||||
"workers": "workers",
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
print("::group::Calculated sytest jobs")
|
||||
print(json.dumps(sytest_tests, indent=4))
|
||||
print("::endgroup::")
|
||||
|
||||
test_matrix = json.dumps(sytest_tests)
|
||||
set_output("sytest_test_matrix", test_matrix)
|
||||
@@ -1,21 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# wraps `gotestfmt`, hiding output from successful packages unless
|
||||
# all tests passed.
|
||||
|
||||
set -o pipefail
|
||||
set -e
|
||||
|
||||
# tee the test results to a log, whilst also piping them into gotestfmt,
|
||||
# telling it to hide successful results, so that we can clearly see
|
||||
# unsuccessful results.
|
||||
tee complement.log | gotestfmt -hide successful-packages
|
||||
|
||||
# gotestfmt will exit non-zero if there were any failures, so if we got to this
|
||||
# point, we must have had a successful result.
|
||||
echo "All tests successful; showing all test results"
|
||||
|
||||
# Pipe the test results back through gotestfmt, showing all results.
|
||||
# The log file consists of JSON lines giving the test results, interspersed
|
||||
# with regular stdout lines (including reports of downloaded packages).
|
||||
grep '^{"Time":' complement.log | gotestfmt
|
||||
31
.ci/scripts/postgres_exec.py
Executable file
31
.ci/scripts/postgres_exec.py
Executable file
@@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
|
||||
import psycopg2
|
||||
|
||||
# a very simple replacment for `psql`, to make up for the lack of the postgres client
|
||||
# libraries in the synapse docker image.
|
||||
|
||||
# We use "postgres" as a database because it's bound to exist and the "synapse" one
|
||||
# doesn't exist yet.
|
||||
db_conn = psycopg2.connect(
|
||||
user="postgres", host="localhost", password="postgres", dbname="postgres"
|
||||
)
|
||||
db_conn.autocommit = True
|
||||
cur = db_conn.cursor()
|
||||
for c in sys.argv[1:]:
|
||||
cur.execute(c)
|
||||
@@ -21,7 +21,7 @@ endblock
|
||||
|
||||
block Install Complement Dependencies
|
||||
sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev
|
||||
go get -v github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
go get -v github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
endblock
|
||||
|
||||
block Install custom gotestfmt template
|
||||
|
||||
@@ -32,7 +32,7 @@ else
|
||||
fi
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
psql -c "CREATE DATABASE synapse"
|
||||
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
|
||||
# Port the SQLite databse to postgres so we can check command works against postgres
|
||||
echo "+++ Port SQLite3 databse to postgres"
|
||||
|
||||
@@ -5,8 +5,18 @@
|
||||
# - creates a venv with these old versions using poetry; and finally
|
||||
# - invokes `trial` to run the tests with old deps.
|
||||
|
||||
# Prevent tzdata from asking for user input
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
set -ex
|
||||
|
||||
apt-get update
|
||||
apt-get install -y \
|
||||
python3 python3-dev python3-pip python3-venv pipx \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
|
||||
export LANG="C.UTF-8"
|
||||
|
||||
# Prevent virtualenv from auto-updating pip to an incompatible version
|
||||
export VIRTUALENV_NO_DOWNLOAD=1
|
||||
|
||||
@@ -23,6 +33,12 @@ export VIRTUALENV_NO_DOWNLOAD=1
|
||||
# a `cryptography` compiled against OpenSSL 1.1.
|
||||
# - Omit systemd: we're not logging to journal here.
|
||||
|
||||
# TODO: also replace caret bounds, see https://python-poetry.org/docs/dependency-specification/#version-constraints
|
||||
# We don't use these yet, but IIRC they are the default bound used when you `poetry add`.
|
||||
# The sed expression 's/\^/==/g' ought to do the trick. But it would also change
|
||||
# `python = "^3.7"` to `python = "==3.7", which would mean we fail because olddeps
|
||||
# runs on 3.8 (#12343).
|
||||
|
||||
sed -i \
|
||||
-e "s/[~>]=/==/g" \
|
||||
-e '/^python = "^/!s/\^/==/g' \
|
||||
@@ -39,7 +55,7 @@ sed -i \
|
||||
# toml file. This means we don't have to ensure compatibility between old deps and
|
||||
# dev tools.
|
||||
|
||||
pip install toml wheel
|
||||
pip install --user toml
|
||||
|
||||
REMOVE_DEV_DEPENDENCIES="
|
||||
import toml
|
||||
@@ -53,8 +69,8 @@ with open('pyproject.toml', 'w') as f:
|
||||
"
|
||||
python3 -c "$REMOVE_DEV_DEPENDENCIES"
|
||||
|
||||
pip install poetry==1.2.0
|
||||
poetry lock
|
||||
pipx install poetry==1.1.14
|
||||
~/.local/bin/poetry lock
|
||||
|
||||
echo "::group::Patched pyproject.toml"
|
||||
cat pyproject.toml
|
||||
@@ -62,3 +78,6 @@ echo "::endgroup::"
|
||||
echo "::group::Lockfile after patch"
|
||||
cat poetry.lock
|
||||
echo "::endgroup::"
|
||||
|
||||
~/.local/bin/poetry install -E "all test"
|
||||
~/.local/bin/poetry run trial --jobs=2 tests
|
||||
@@ -2,27 +2,27 @@
|
||||
#
|
||||
# Test script for 'synapse_port_db'.
|
||||
# - configures synapse and a postgres server.
|
||||
# - runs the port script on a prepopulated test sqlite db. Checks that the
|
||||
# return code is zero.
|
||||
# - reruns the port script on the same sqlite db, targetting the same postgres db.
|
||||
# Checks that the return code is zero.
|
||||
# - runs the port script against a new sqlite db. Checks the return code is zero.
|
||||
# - runs the port script on a prepopulated test sqlite db
|
||||
# - also runs it against an new sqlite db
|
||||
#
|
||||
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
||||
# Expects `poetry` to be available on the `PATH`.
|
||||
|
||||
set -xe -o pipefail
|
||||
set -xe
|
||||
cd "$(dirname "$0")/../.."
|
||||
|
||||
echo "--- Generate the signing key"
|
||||
|
||||
# Generate the server's signing key.
|
||||
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
|
||||
echo "--- Prepare test database"
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background updates.
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
psql -c "CREATE DATABASE synapse"
|
||||
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against test database"
|
||||
# TODO: this invocation of synapse_port_db (and others below) used to be prepended with `coverage run`,
|
||||
@@ -45,23 +45,9 @@ rm .ci/test_db.db
|
||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# re-create the PostgreSQL database.
|
||||
psql \
|
||||
-c "DROP DATABASE synapse" \
|
||||
-c "CREATE DATABASE synapse"
|
||||
poetry run .ci/scripts/postgres_exec.py \
|
||||
"DROP DATABASE synapse" \
|
||||
"CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against empty database"
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
echo "--- Create a brand new postgres database from schema"
|
||||
cp .ci/postgres-config.yaml .ci/postgres-config-unported.yaml
|
||||
sed -i -e 's/database: synapse/database: synapse_unported/' .ci/postgres-config-unported.yaml
|
||||
psql -c "CREATE DATABASE synapse_unported"
|
||||
poetry run update_synapse_database --database-config .ci/postgres-config-unported.yaml --run-background-updates
|
||||
|
||||
echo "+++ Comparing ported schema with unported schema"
|
||||
# Ignore the tables that portdb creates. (Should it tidy them up when the porting is completed?)
|
||||
psql synapse -c "DROP TABLE port_from_sqlite3;"
|
||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse_unported > unported.sql
|
||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse > ported.sql
|
||||
# By default, `diff` returns zero if there are no changes and nonzero otherwise
|
||||
diff -u unported.sql ported.sql | tee schema_diff
|
||||
@@ -4,15 +4,8 @@
|
||||
# things to include
|
||||
!docker
|
||||
!synapse
|
||||
!rust
|
||||
!README.rst
|
||||
!pyproject.toml
|
||||
!poetry.lock
|
||||
!Cargo.lock
|
||||
!Cargo.toml
|
||||
!build_rust.py
|
||||
|
||||
rust/target
|
||||
synapse/*.so
|
||||
|
||||
**/__pycache__
|
||||
|
||||
22
.github/dependabot.yml
vendored
22
.github/dependabot.yml
vendored
@@ -1,22 +0,0 @@
|
||||
version: 2
|
||||
updates:
|
||||
- # "pip" is the correct setting for poetry, per https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#package-ecosystem
|
||||
package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/docker"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
|
||||
- package-ecosystem: "cargo"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
46
.github/workflows/dependabot_changelog.yml
vendored
46
.github/workflows/dependabot_changelog.yml
vendored
@@ -1,46 +0,0 @@
|
||||
name: Write changelog for dependabot PR
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- reopened # For debugging!
|
||||
|
||||
permissions:
|
||||
# Needed to be able to push the commit. See
|
||||
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#enable-auto-merge-on-a-pull-request
|
||||
# for a similar example
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
add-changelog:
|
||||
runs-on: 'ubuntu-latest'
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
- name: Write, commit and push changelog
|
||||
run: |
|
||||
echo "${{ github.event.pull_request.title }}." > "changelog.d/${{ github.event.pull_request.number }}".misc
|
||||
git add changelog.d
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git config user.name "GitHub Actions"
|
||||
git commit -m "Changelog"
|
||||
git push
|
||||
shell: bash
|
||||
# The `git push` above does not trigger CI on the dependabot PR.
|
||||
#
|
||||
# By default, workflows can't trigger other workflows when they're just using the
|
||||
# default `GITHUB_TOKEN` access token. (This is intended to stop you from writing
|
||||
# recursive workflow loops by accident, because that'll get very expensive very
|
||||
# quickly.) Instead, you have to manually call out to another workflow, or else
|
||||
# make your changes (i.e. the `git push` above) using a personal access token.
|
||||
# See
|
||||
# https://docs.github.com/en/actions/using-workflows/triggering-a-workflow#triggering-a-workflow-from-a-workflow
|
||||
#
|
||||
# I have tried and failed to find a way to trigger CI on the "merge ref" of the PR.
|
||||
# See git commit history for previous attempts. If anyone desperately wants to try
|
||||
# again in the future, make a matrix-bot account and use its access token to git push.
|
||||
|
||||
# THIS WORKFLOW HAS WRITE PERMISSIONS---do not add other jobs here unless they
|
||||
# are sufficiently locked down to dependabot only as above.
|
||||
15
.github/workflows/docker.yml
vendored
15
.github/workflows/docker.yml
vendored
@@ -17,19 +17,19 @@ jobs:
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
id: qemu
|
||||
uses: docker/setup-qemu-action@v2
|
||||
uses: docker/setup-qemu-action@v1
|
||||
with:
|
||||
platforms: arm64
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Inspect builder
|
||||
run: docker buildx inspect
|
||||
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
@@ -48,15 +48,10 @@ jobs:
|
||||
type=pep440,pattern={{raw}}
|
||||
|
||||
- name: Build and push all platforms
|
||||
uses: docker/build-push-action@v3
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
labels: "gitsha1=${{ github.sha }}"
|
||||
tags: "${{ steps.set-tag.outputs.tags }}"
|
||||
file: "docker/Dockerfile"
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
# arm64 builds OOM without the git fetch setting. c.f.
|
||||
# https://github.com/rust-lang/cargo/issues/10583
|
||||
build-args: |
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||
|
||||
4
.github/workflows/docs.yaml
vendored
4
.github/workflows/docs.yaml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@4b5ef36b314c2599664ca107bb8c02412548d79d # v1.1.14
|
||||
@@ -54,7 +54,7 @@ jobs:
|
||||
esac
|
||||
|
||||
# finally, set the 'branch-version' var.
|
||||
echo "branch-version=$branch" >> "$GITHUB_OUTPUT"
|
||||
echo "::set-output name=branch-version::$branch"
|
||||
|
||||
# Deploy to the target directory.
|
||||
- name: Deploy to gh pages
|
||||
|
||||
55
.github/workflows/latest_deps.yml
vendored
55
.github/workflows/latest_deps.yml
vendored
@@ -5,7 +5,7 @@
|
||||
#
|
||||
# As an overview this workflow:
|
||||
# - checks out develop,
|
||||
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
|
||||
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
|
||||
# - runs mypy and test suites in that checkout.
|
||||
#
|
||||
# Based on the twisted trunk CI job.
|
||||
@@ -25,20 +25,13 @@ jobs:
|
||||
mypy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||
# poetry-core versions), so we install with poetry.
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "1.2.0"
|
||||
poetry-version: "1.2.0b1"
|
||||
extras: "all"
|
||||
# Dump installed versions for debugging.
|
||||
- run: poetry run pip list > before.txt
|
||||
@@ -59,15 +52,7 @@ jobs:
|
||||
postgres-version: "14"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
if: ${{ matrix.postgres-version }}
|
||||
@@ -76,7 +61,7 @@ jobs:
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: pip install .[all,test]
|
||||
@@ -84,12 +69,6 @@ jobs:
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
|
||||
# We nuke the local copy, as we've installed synapse into the virtualenv
|
||||
# (rather than use an editable install, which we no longer support). If we
|
||||
# don't do this then python can't find the native lib.
|
||||
- run: rm -rf synapse/
|
||||
|
||||
- run: python -m twisted.trial --jobs=2 tests
|
||||
env:
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
@@ -133,15 +112,7 @@ jobs:
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- name: Ensure sytest runs `pip install`
|
||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||
run: rm /src/poetry.lock
|
||||
@@ -155,7 +126,7 @@ jobs:
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
@@ -182,8 +153,8 @@ jobs:
|
||||
database: Postgres
|
||||
|
||||
steps:
|
||||
- name: Run actions/checkout@v3 for synapse
|
||||
uses: actions/checkout@v3
|
||||
- name: Run actions/checkout@v2 for synapse
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
@@ -192,7 +163,7 @@ jobs:
|
||||
|
||||
- run: |
|
||||
set -o pipefail
|
||||
TEST_ONLY_IGNORE_POETRY_LOCKFILE=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
||||
TEST_ONLY_IGNORE_POETRY_LOCKFILE=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
|
||||
@@ -201,19 +172,19 @@ jobs:
|
||||
open-issue:
|
||||
if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request'"
|
||||
needs:
|
||||
# TODO: should mypy be included here? It feels more brittle than the others.
|
||||
# TODO: should mypy be included here? It feels more brittle than the other two.
|
||||
- mypy
|
||||
- trial
|
||||
- sytest
|
||||
- complement
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
update_existing: true
|
||||
filename: .ci/latest_deps_build_failed_issue_template.md
|
||||
|
||||
|
||||
112
.github/workflows/release-artifacts.yml
vendored
112
.github/workflows/release-artifacts.yml
vendored
@@ -11,12 +11,11 @@ on:
|
||||
|
||||
# we do the full build on tags.
|
||||
tags: ["v*"]
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
@@ -25,10 +24,8 @@ jobs:
|
||||
name: "Calculate list of debian distros"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- id: set-distros
|
||||
run: |
|
||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||
@@ -36,7 +33,7 @@ jobs:
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
|
||||
fi
|
||||
echo "distros=$dists" >> "$GITHUB_OUTPUT"
|
||||
echo "::set-output name=distros::$dists"
|
||||
# map the step outputs to job outputs
|
||||
outputs:
|
||||
distros: ${{ steps.set-distros.outputs.distros }}
|
||||
@@ -52,18 +49,18 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: src
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
install: true
|
||||
|
||||
- name: Set up docker layer caching
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
@@ -71,9 +68,7 @@ jobs:
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
- name: Set up python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.x'
|
||||
uses: actions/setup-python@v2
|
||||
|
||||
- name: Build the packages
|
||||
# see https://github.com/docker/build-push-action/issues/252
|
||||
@@ -89,96 +84,14 @@ jobs:
|
||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
||||
|
||||
- name: Upload debs as artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: debs
|
||||
path: debs/*
|
||||
|
||||
build-wheels:
|
||||
name: Build wheels on ${{ matrix.os }} for ${{ matrix.arch }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-20.04, macos-10.15]
|
||||
arch: [x86_64, aarch64]
|
||||
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
||||
# It is not read by the rest of the workflow.
|
||||
is_pr:
|
||||
- ${{ startsWith(github.ref, 'refs/pull/') }}
|
||||
|
||||
exclude:
|
||||
# Don't build macos wheels on PR CI.
|
||||
- is_pr: true
|
||||
os: "macos-10.15"
|
||||
# Don't build aarch64 wheels on mac.
|
||||
- os: "macos-10.15"
|
||||
arch: aarch64
|
||||
# Don't build aarch64 wheels on PR CI.
|
||||
- is_pr: true
|
||||
arch: aarch64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
||||
# here, because `python` on osx points to Python 2.7.
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install cibuildwheel
|
||||
run: python -m pip install cibuildwheel==2.9.0 poetry==1.2.0
|
||||
|
||||
- name: Set up QEMU to emulate aarch64
|
||||
if: matrix.arch == 'aarch64'
|
||||
uses: docker/setup-qemu-action@v2
|
||||
with:
|
||||
platforms: arm64
|
||||
|
||||
- name: Build aarch64 wheels
|
||||
if: matrix.arch == 'aarch64'
|
||||
run: echo 'CIBW_ARCHS_LINUX=aarch64' >> $GITHUB_ENV
|
||||
|
||||
- name: Only build a single wheel on PR
|
||||
if: startsWith(github.ref, 'refs/pull/')
|
||||
run: echo "CIBW_BUILD="cp37-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
|
||||
|
||||
- name: Build wheels
|
||||
run: python -m cibuildwheel --output-dir wheelhouse
|
||||
env:
|
||||
# Skip testing for platforms which various libraries don't have wheels
|
||||
# for, and so need extra build deps.
|
||||
CIBW_TEST_SKIP: pp39-* *i686* *musl* pp37-macosx*
|
||||
# Fix Rust OOM errors on emulated aarch64: https://github.com/rust-lang/cargo/issues/10583
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
||||
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Wheel
|
||||
path: ./wheelhouse/*.whl
|
||||
|
||||
build-sdist:
|
||||
name: Build sdist
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- run: pip install build
|
||||
|
||||
- name: Build sdist
|
||||
run: python -m build --sdist
|
||||
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Sdist
|
||||
path: dist/*.tar.gz
|
||||
|
||||
name: "Build pypi distribution files"
|
||||
uses: "matrix-org/backend-meta/.github/workflows/packaging.yml@v1"
|
||||
|
||||
# if it's a tag, create a release and attach the artifacts to it
|
||||
attach-assets:
|
||||
@@ -186,12 +99,11 @@ jobs:
|
||||
if: ${{ !failure() && !cancelled() && startsWith(github.ref, 'refs/tags/') }}
|
||||
needs:
|
||||
- build-debs
|
||||
- build-wheels
|
||||
- build-sdist
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download all workflow run artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
uses: actions/download-artifact@v2
|
||||
- name: Build a tarball for the debs
|
||||
run: tar -cvJf debs.tar.xz debs
|
||||
- name: Attach to release
|
||||
|
||||
320
.github/workflows/tests.yml
vendored
320
.github/workflows/tests.yml
vendored
@@ -4,46 +4,26 @@ on:
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
|
||||
# don't modify Rust code.
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
rust: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.rust }}
|
||||
steps:
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
# We only check on PRs
|
||||
if: startsWith(github.ref, 'refs/pull/')
|
||||
with:
|
||||
filters: |
|
||||
rust:
|
||||
- 'rust/**'
|
||||
- 'Cargo.toml'
|
||||
|
||||
check-sampleconfig:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
extras: "all"
|
||||
- run: poetry run scripts-dev/generate_sample_config.sh --check
|
||||
- run: poetry run scripts-dev/config-lint.sh
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install .
|
||||
- run: scripts-dev/generate_sample_config.sh --check
|
||||
- run: scripts-dev/config-lint.sh
|
||||
|
||||
check-schema-delta:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
||||
- run: scripts-dev/check_schema_delta.py --force-colors
|
||||
|
||||
@@ -55,19 +35,19 @@ jobs:
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- name: Check line endings
|
||||
run: scripts-dev/check_line_terminators.sh
|
||||
|
||||
lint-newsfile:
|
||||
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
||||
if: ${{ github.base_ref == 'develop' || contains(github.base_ref, 'release-') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v2
|
||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
||||
- run: scripts-dev/check-newsfragment.sh
|
||||
env:
|
||||
@@ -76,108 +56,70 @@ jobs:
|
||||
lint-pydantic:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
extras: "all"
|
||||
- run: poetry run scripts-dev/check_pydantic_models.py
|
||||
|
||||
lint-clippy:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
override: true
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: cargo clippy
|
||||
|
||||
lint-rustfmt:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
override: true
|
||||
components: rustfmt
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: cargo fmt --check
|
||||
|
||||
# Dummy step to gate other tests on without repeating the whole list
|
||||
linting-done:
|
||||
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
||||
needs:
|
||||
- lint
|
||||
- lint-crlf
|
||||
- lint-newsfile
|
||||
- lint-pydantic
|
||||
- check-sampleconfig
|
||||
- check-schema-delta
|
||||
- lint-clippy
|
||||
- lint-rustfmt
|
||||
needs: [lint, lint-crlf, lint-newsfile, lint-pydantic, check-sampleconfig, check-schema-delta]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: "true"
|
||||
|
||||
calculate-test-jobs:
|
||||
trial:
|
||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- id: get-matrix
|
||||
run: .ci/scripts/calculate_jobs.py
|
||||
outputs:
|
||||
trial_test_matrix: ${{ steps.get-matrix.outputs.trial_test_matrix }}
|
||||
sytest_test_matrix: ${{ steps.get-matrix.outputs.sytest_test_matrix }}
|
||||
|
||||
trial:
|
||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: calculate-test-jobs
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
database: ["sqlite"]
|
||||
extras: ["all"]
|
||||
include:
|
||||
# Newest Python without optional deps
|
||||
- python-version: "3.10"
|
||||
extras: ""
|
||||
|
||||
# Oldest Python with PostgreSQL
|
||||
- python-version: "3.7"
|
||||
database: "postgres"
|
||||
postgres-version: "10"
|
||||
extras: "all"
|
||||
|
||||
# Newest Python with newest PostgreSQL
|
||||
- python-version: "3.10"
|
||||
database: "postgres"
|
||||
postgres-version: "14"
|
||||
extras: "all"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
if: ${{ matrix.postgres-version }}
|
||||
run: |
|
||||
docker run -d -p 5432:5432 \
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.job.postgres-version }}
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.job.python-version }}
|
||||
extras: ${{ matrix.job.extras }}
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: ${{ matrix.extras }}
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
env:
|
||||
SYNAPSE_POSTGRES: ${{ matrix.job.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
@@ -198,54 +140,16 @@ jobs:
|
||||
# Note: sqlite only; no postgres
|
||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
- uses: actions/checkout@v2
|
||||
- name: Test with old deps
|
||||
uses: docker://ubuntu:focal # For old python and sqlite
|
||||
# Note: focal seems to be using 3.8, but the oldest is 3.7?
|
||||
# See https://github.com/matrix-org/synapse/issues/12343
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
override: true
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
# There aren't wheels for some of the older deps, so we need to install
|
||||
# their build dependencies
|
||||
- run: |
|
||||
sudo apt-get -qq install build-essential libffi-dev python-dev \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.7'
|
||||
|
||||
# Calculating the old-deps actually takes a bunch of time, so we cache the
|
||||
# pyproject.toml / poetry.lock. We need to cache pyproject.toml as
|
||||
# otherwise the `poetry install` step will error due to the poetry.lock
|
||||
# file being outdated.
|
||||
#
|
||||
# This caches the output of `Prepare old deps`, which should generate the
|
||||
# same `pyproject.toml` and `poetry.lock` for a given `pyproject.toml` input.
|
||||
- uses: actions/cache@v3
|
||||
id: cache-poetry-old-deps
|
||||
name: Cache poetry.lock
|
||||
with:
|
||||
path: |
|
||||
poetry.lock
|
||||
pyproject.toml
|
||||
key: poetry-old-deps2-${{ hashFiles('pyproject.toml') }}
|
||||
- name: Prepare old deps
|
||||
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
||||
run: .ci/scripts/prepare_old_deps.sh
|
||||
|
||||
# We only now install poetry so that `setup-python-poetry` caches the
|
||||
# right poetry.lock's dependencies.
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: '3.7'
|
||||
extras: "all test"
|
||||
|
||||
- run: poetry run trial -j2 tests
|
||||
workdir: /github/workspace
|
||||
entrypoint: .ci/scripts/test_old_deps.sh
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
@@ -271,7 +175,7 @@ jobs:
|
||||
extras: ["all"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
@@ -294,37 +198,50 @@ jobs:
|
||||
|
||||
sytest:
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
needs: calculate-test-jobs
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:${{ matrix.job.sytest-tag }}
|
||||
image: matrixdotorg/sytest-synapse:${{ matrix.sytest-tag }}
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
env:
|
||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
||||
POSTGRES: ${{ matrix.job.postgres && 1}}
|
||||
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') && 1}}
|
||||
WORKERS: ${{ matrix.job.workers && 1 }}
|
||||
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
|
||||
POSTGRES: ${{ matrix.postgres && 1}}
|
||||
MULTI_POSTGRES: ${{ (matrix.postgres == 'multi-postgres') && 1}}
|
||||
WORKERS: ${{ matrix.workers && 1 }}
|
||||
REDIS: ${{ matrix.redis && 1 }}
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
TOP: ${{ github.workspace }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
||||
include:
|
||||
- sytest-tag: focal
|
||||
|
||||
- sytest-tag: focal
|
||||
postgres: postgres
|
||||
|
||||
- sytest-tag: testing
|
||||
postgres: postgres
|
||||
|
||||
- sytest-tag: focal
|
||||
postgres: multi-postgres
|
||||
workers: workers
|
||||
|
||||
- sytest-tag: buster
|
||||
postgres: multi-postgres
|
||||
workers: workers
|
||||
|
||||
- sytest-tag: buster
|
||||
postgres: postgres
|
||||
workers: workers
|
||||
redis: redis
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
override: true
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
@@ -332,10 +249,10 @@ jobs:
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
path: |
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
@@ -362,23 +279,20 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_export_data_command.sh
|
||||
env:
|
||||
PGHOST: localhost
|
||||
PGUSER: postgres
|
||||
PGPASSWORD: postgres
|
||||
PGDATABASE: postgres
|
||||
|
||||
|
||||
portdb:
|
||||
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TOP: ${{ github.workspace }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
@@ -403,28 +317,13 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_synapse_port_db.sh
|
||||
id: run_tester_script
|
||||
env:
|
||||
PGHOST: localhost
|
||||
PGUSER: postgres
|
||||
PGPASSWORD: postgres
|
||||
PGDATABASE: postgres
|
||||
- name: "Upload schema differences"
|
||||
uses: actions/upload-artifact@v3
|
||||
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
||||
with:
|
||||
name: Schema dumps
|
||||
path: |
|
||||
unported.sql
|
||||
ported.sql
|
||||
schema_diff
|
||||
|
||||
complement:
|
||||
if: "${{ !failure() && !cancelled() }}"
|
||||
@@ -445,57 +344,34 @@ jobs:
|
||||
database: Postgres
|
||||
|
||||
steps:
|
||||
- name: Run actions/checkout@v3 for synapse
|
||||
uses: actions/checkout@v3
|
||||
- name: Run actions/checkout@v2 for synapse
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
override: true
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- run: |
|
||||
set -o pipefail
|
||||
POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
||||
POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
|
||||
cargo-test:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- linting-done
|
||||
- changes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
override: true
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: cargo test
|
||||
|
||||
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
|
||||
tests-done:
|
||||
if: ${{ always() }}
|
||||
needs:
|
||||
- check-sampleconfig
|
||||
- lint
|
||||
- lint-crlf
|
||||
- lint-newsfile
|
||||
- trial
|
||||
- trial-olddeps
|
||||
- sytest
|
||||
- export-data
|
||||
- portdb
|
||||
- complement
|
||||
- cargo-test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: matrix-org/done-action@v2
|
||||
@@ -503,7 +379,5 @@ jobs:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
# The newsfile lint may be skipped on non PR builds
|
||||
# Cargo test is skipped if there is no changes on Rust code
|
||||
skippable: |
|
||||
skippable:
|
||||
lint-newsfile
|
||||
cargo-test
|
||||
|
||||
28
.github/workflows/triage-incoming.yml
vendored
28
.github/workflows/triage-incoming.yml
vendored
@@ -1,28 +0,0 @@
|
||||
name: Move new issues into the issue triage board
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [ opened ]
|
||||
|
||||
jobs:
|
||||
add_new_issues:
|
||||
name: Add new issues to the triage board
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: octokit/graphql-action@v2.x
|
||||
id: add_to_project
|
||||
with:
|
||||
headers: '{"GraphQL-Features": "projects_next_graphql"}'
|
||||
query: |
|
||||
mutation add_to_project($projectid:ID!,$contentid:ID!) {
|
||||
addProjectV2ItemById(input: {projectId: $projectid contentId: $contentid}) {
|
||||
item {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
projectid: ${{ env.PROJECT_ID }}
|
||||
contentid: ${{ github.event.issue.node_id }}
|
||||
env:
|
||||
PROJECT_ID: "PVT_kwDOAIB0Bs4AFDdZ"
|
||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
44
.github/workflows/triage_labelled.yml
vendored
44
.github/workflows/triage_labelled.yml
vendored
@@ -1,44 +0,0 @@
|
||||
name: Move labelled issues to correct projects
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [ labeled ]
|
||||
|
||||
jobs:
|
||||
move_needs_info:
|
||||
name: Move X-Needs-Info on the triage board
|
||||
runs-on: ubuntu-latest
|
||||
if: >
|
||||
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
||||
steps:
|
||||
- uses: octokit/graphql-action@v2.x
|
||||
id: add_to_project
|
||||
with:
|
||||
headers: '{"GraphQL-Features": "projects_next_graphql"}'
|
||||
query: |
|
||||
mutation {
|
||||
updateProjectV2ItemFieldValue(
|
||||
input: {
|
||||
projectId: $projectid
|
||||
itemId: $contentid
|
||||
fieldId: $fieldid
|
||||
value: {
|
||||
singleSelectOptionId: "Todo"
|
||||
}
|
||||
}
|
||||
) {
|
||||
projectV2Item {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
projectid: ${{ env.PROJECT_ID }}
|
||||
contentid: ${{ github.event.issue.node_id }}
|
||||
fieldid: ${{ env.FIELD_ID }}
|
||||
optionid: ${{ env.OPTION_ID }}
|
||||
env:
|
||||
PROJECT_ID: "PVT_kwDOAIB0Bs4AFDdZ"
|
||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
FIELD_ID: "PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4"
|
||||
OPTION_ID: "ba22e43c"
|
||||
40
.github/workflows/twisted_trunk.yml
vendored
40
.github/workflows/twisted_trunk.yml
vendored
@@ -15,15 +15,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
@@ -40,16 +32,8 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
@@ -81,15 +65,7 @@ jobs:
|
||||
- ${{ github.workspace }}:/src
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- name: Patch dependencies
|
||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||
# but the sytest-synapse container expects it to be in /venv/.
|
||||
@@ -112,7 +88,7 @@ jobs:
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
@@ -138,8 +114,8 @@ jobs:
|
||||
database: Postgres
|
||||
|
||||
steps:
|
||||
- name: Run actions/checkout@v3 for synapse
|
||||
uses: actions/checkout@v3
|
||||
- name: Run actions/checkout@v2 for synapse
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
@@ -161,7 +137,7 @@ jobs:
|
||||
|
||||
- run: |
|
||||
set -o pipefail
|
||||
TEST_ONLY_SKIP_DEP_HASH_VERIFICATION=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
||||
TEST_ONLY_SKIP_DEP_HASH_VERIFICATION=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
|
||||
@@ -177,7 +153,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -15,9 +15,8 @@ _trial_temp*/
|
||||
.DS_Store
|
||||
__pycache__/
|
||||
|
||||
# We do want the poetry and cargo lockfile.
|
||||
# We do want the poetry lockfile.
|
||||
!poetry.lock
|
||||
!Cargo.lock
|
||||
|
||||
# stuff that is likely to exist when you run a server locally
|
||||
/*.db
|
||||
@@ -61,10 +60,3 @@ book/
|
||||
# complement
|
||||
/complement-*
|
||||
/master.tar.gz
|
||||
|
||||
# rust
|
||||
/target/
|
||||
/synapse/*.so
|
||||
|
||||
# Poetry will create a setup.py, which we don't want to include.
|
||||
/setup.py
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
group_imports = "StdExternalCrate"
|
||||
547
CHANGES.md
547
CHANGES.md
@@ -1,522 +1,3 @@
|
||||
Synapse 1.70.1 (2022-10-28)
|
||||
===========================
|
||||
|
||||
This release fixes some regressions that were discovered in 1.70.0.
|
||||
|
||||
[#14300](https://github.com/matrix-org/synapse/issues/14300)
|
||||
was previously reported to be a regression in 1.70.0 as well. However, we have
|
||||
since concluded that it was limited to the reporter and thus have not needed
|
||||
to include any fix for it in 1.70.1.
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in Synapse 1.70.0rc1 where the access tokens sent to application services as headers were malformed. Application services which were obtaining access tokens from query parameters were not affected. ([\#14301](https://github.com/matrix-org/synapse/issues/14301))
|
||||
- Fix room creation being rate limited too aggressively since Synapse v1.69.0. ([\#14314](https://github.com/matrix-org/synapse/issues/14314))
|
||||
|
||||
|
||||
Synapse 1.70.0 (2022-10-26)
|
||||
===========================
|
||||
|
||||
No significant changes since 1.70.0rc2.
|
||||
|
||||
|
||||
Synapse 1.70.0rc2 (2022-10-25)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in Synapse 1.70.0rc1 where the information returned from the `/threads` API could be stale when threaded events are redacted. ([\#14248](https://github.com/matrix-org/synapse/issues/14248))
|
||||
- Fix a bug introduced in Synapse 1.70.0rc1 leading to broken outbound federation when using Python 3.7. ([\#14280](https://github.com/matrix-org/synapse/issues/14280))
|
||||
- Fix a bug introduced in Synapse 1.70.0rc1 where edits to non-message events were aggregated by the homeserver. ([\#14283](https://github.com/matrix-org/synapse/issues/14283))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Build ABI3 wheels for CPython. ([\#14253](https://github.com/matrix-org/synapse/issues/14253))
|
||||
- For the aarch64 architecture, only build wheels for CPython manylinux. ([\#14259](https://github.com/matrix-org/synapse/issues/14259))
|
||||
|
||||
|
||||
Synapse 1.70.0rc1 (2022-10-19)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Support for [MSC3856](https://github.com/matrix-org/matrix-spec-proposals/pull/3856): threads list API. ([\#13394](https://github.com/matrix-org/synapse/issues/13394), [\#14171](https://github.com/matrix-org/synapse/issues/14171), [\#14175](https://github.com/matrix-org/synapse/issues/14175))
|
||||
- Support for thread-specific notifications & receipts ([MSC3771](https://github.com/matrix-org/matrix-spec-proposals/pull/3771) and [MSC3773](https://github.com/matrix-org/matrix-spec-proposals/pull/3773)). ([\#13776](https://github.com/matrix-org/synapse/issues/13776), [\#13824](https://github.com/matrix-org/synapse/issues/13824), [\#13877](https://github.com/matrix-org/synapse/issues/13877), [\#13878](https://github.com/matrix-org/synapse/issues/13878), [\#14050](https://github.com/matrix-org/synapse/issues/14050), [\#14140](https://github.com/matrix-org/synapse/issues/14140), [\#14159](https://github.com/matrix-org/synapse/issues/14159), [\#14163](https://github.com/matrix-org/synapse/issues/14163), [\#14174](https://github.com/matrix-org/synapse/issues/14174), [\#14222](https://github.com/matrix-org/synapse/issues/14222))
|
||||
- Stop fetching missing `prev_events` after we already know their signature is invalid. ([\#13816](https://github.com/matrix-org/synapse/issues/13816))
|
||||
- Send application service access tokens as a header (and query parameter). Implements [MSC2832](https://github.com/matrix-org/matrix-spec-proposals/pull/2832). ([\#13996](https://github.com/matrix-org/synapse/issues/13996))
|
||||
- Ignore server ACL changes when generating pushes. Implements [MSC3786](https://github.com/matrix-org/matrix-spec-proposals/pull/3786). ([\#13997](https://github.com/matrix-org/synapse/issues/13997))
|
||||
- Experimental support for redirecting to an implementation of a [MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886) HTTP rendezvous service. ([\#14018](https://github.com/matrix-org/synapse/issues/14018))
|
||||
- The `/relations` endpoint can now be used on workers. ([\#14028](https://github.com/matrix-org/synapse/issues/14028))
|
||||
- Advertise support for Matrix 1.3 and 1.4 on `/_matrix/client/versions`. ([\#14032](https://github.com/matrix-org/synapse/issues/14032), [\#14184](https://github.com/matrix-org/synapse/issues/14184))
|
||||
- Improve validation of request bodies for the [Device Management](https://spec.matrix.org/v1.4/client-server-api/#device-management) and [MSC2697 Device Dehyrdation](https://github.com/matrix-org/matrix-spec-proposals/pull/2697) client-server API endpoints. ([\#14054](https://github.com/matrix-org/synapse/issues/14054))
|
||||
- Experimental support for [MSC3874](https://github.com/matrix-org/matrix-spec-proposals/pull/3874): Filtering threads from the `/messages` endpoint. ([\#14148](https://github.com/matrix-org/synapse/issues/14148))
|
||||
- Improve the validation of the following PUT endpoints: [`/directory/room/{roomAlias}`](https://spec.matrix.org/v1.4/client-server-api/#put_matrixclientv3directoryroomroomalias), [`/directory/list/room/{roomId}`](https://spec.matrix.org/v1.4/client-server-api/#put_matrixclientv3directorylistroomroomid) and [`/directory/list/appservice/{networkId}/{roomId}`](https://spec.matrix.org/v1.4/application-service-api/#put_matrixclientv3directorylistappservicenetworkidroomid). ([\#14179](https://github.com/matrix-org/synapse/issues/14179))
|
||||
- Build and publish binary wheels for `aarch64` platforms. ([\#14212](https://github.com/matrix-org/synapse/issues/14212))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Prevent device names from appearing in device list updates in some situations when `allow_device_name_lookup_over_federation` is `false`. (This is not comprehensive: see [\#13114](https://github.com/matrix-org/synapse/issues/13114).) ([\#10015](https://github.com/matrix-org/synapse/issues/10015))
|
||||
- Fix a long-standing bug where redactions were not being sent over federation if we did not have the original event. ([\#13813](https://github.com/matrix-org/synapse/issues/13813))
|
||||
- Fix a long-standing bug where edits of non-`m.room.message` events would not be correctly bundled or have their new content applied. ([\#14034](https://github.com/matrix-org/synapse/issues/14034))
|
||||
- Fix a bug introduced in Synapse 1.53.0 when querying `/publicRooms` with both a `room_type` filter and a `third_party_instance_id`. ([\#14053](https://github.com/matrix-org/synapse/issues/14053))
|
||||
- Fix a bug introduced in Synapse 1.35.0 where errors parsing a `/send_join` or `/state` response would produce excessive, low-quality Sentry events. ([\#14065](https://github.com/matrix-org/synapse/issues/14065))
|
||||
- Fix a long-standing bug where Synapse would error on the optional 'invite_room_state' field not being provided to [`PUT /_matrix/federation/v2/invite/{roomId}/{eventId}`](https://spec.matrix.org/v1.4/server-server-api/#put_matrixfederationv2inviteroomideventid). ([\#14083](https://github.com/matrix-org/synapse/issues/14083))
|
||||
- Fix a bug where invalid oEmbed fields would cause the entire response to be discarded. Introduced in Synapse 1.18.0. ([\#14089](https://github.com/matrix-org/synapse/issues/14089))
|
||||
- Fix a bug introduced in Synapse 1.37.0 in which an incorrect key name was used for sending and receiving room metadata when knocking on a room. ([\#14102](https://github.com/matrix-org/synapse/issues/14102))
|
||||
- Fix a bug introduced in v1.69.0rc1 where the joined hosts for a given event were not being properly cached. ([\#14125](https://github.com/matrix-org/synapse/issues/14125))
|
||||
- Fix a bug introduced in Synapse 1.30.0 where purging and rejoining a room without restarting in-between would result in a broken room. ([\#14161](https://github.com/matrix-org/synapse/issues/14161), [\#14164](https://github.com/matrix-org/synapse/issues/14164))
|
||||
- Fix [MSC3030](https://github.com/matrix-org/matrix-spec-proposals/pull/3030) `/timestamp_to_event` endpoint returning potentially inaccurate closest events with `outliers` present. ([\#14215](https://github.com/matrix-org/synapse/issues/14215))
|
||||
|
||||
|
||||
Updates to the Docker image
|
||||
---------------------------
|
||||
|
||||
- Update the version of frozendict in Docker images and Debian packages from 2.3.3 to 2.3.4, which may fix memory leak problems. ([\#13955](https://github.com/matrix-org/synapse/issues/13955))
|
||||
- Use the `minimal` Rust profile when building Synapse. ([\#14141](https://github.com/matrix-org/synapse/issues/14141))
|
||||
- Prevent a class of database sharding errors when using `Dockerfile-workers` to spawn multiple instances of the same worker. Contributed by Jason Little. ([\#14165](https://github.com/matrix-org/synapse/issues/14165))
|
||||
- Set `LD_PRELOAD` to use jemalloc memory allocator in Dockerfile-workers. ([\#14182](https://github.com/matrix-org/synapse/issues/14182))
|
||||
- Fix pre-startup logging being lost when using the `Dockerfile-workers` image. ([\#14195](https://github.com/matrix-org/synapse/issues/14195))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Add sample worker files for `pusher` and `federation_sender`. ([\#14077](https://github.com/matrix-org/synapse/issues/14077))
|
||||
- Improve the listener example on the metrics documentation. ([\#14078](https://github.com/matrix-org/synapse/issues/14078))
|
||||
- Expand Google OpenID Connect example config to map email attribute. Contributed by @ptman. ([\#14081](https://github.com/matrix-org/synapse/issues/14081))
|
||||
- The changelog entry ending in a full stop or exclamation mark is not optional. ([\#14087](https://github.com/matrix-org/synapse/issues/14087))
|
||||
- Fix links to jemalloc documentation, which were broken in [#13491](https://github.com/matrix-org/synapse/pull/14124). ([\#14093](https://github.com/matrix-org/synapse/issues/14093))
|
||||
- Remove not needed `replication` listener in docker compose example. ([\#14107](https://github.com/matrix-org/synapse/issues/14107))
|
||||
- Fix name of `alias_creation_rules` option in the config manual documentation. ([\#14124](https://github.com/matrix-org/synapse/issues/14124))
|
||||
- Clarify comment on event contexts. ([\#14145](https://github.com/matrix-org/synapse/issues/14145))
|
||||
- Fix dead link to the [Admin Registration API](https://matrix-org.github.io/synapse/latest/admin_api/register_api.html). ([\#14189](https://github.com/matrix-org/synapse/issues/14189))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Remove the experimental implementation of [MSC3772](https://github.com/matrix-org/matrix-spec-proposals/pull/3772). ([\#14094](https://github.com/matrix-org/synapse/issues/14094))
|
||||
- Remove the unstable identifier for [MSC3715](https://github.com/matrix-org/matrix-doc/pull/3715). ([\#14106](https://github.com/matrix-org/synapse/issues/14106), [\#14146](https://github.com/matrix-org/synapse/issues/14146))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Optimise queries used to get a users rooms during sync. Contributed by Nick @ Beeper (@fizzadar). ([\#13991](https://github.com/matrix-org/synapse/issues/13991))
|
||||
- Update authlib from 0.15.5 to 1.1.0. ([\#14006](https://github.com/matrix-org/synapse/issues/14006))
|
||||
- Make `parse_server_name` consistent in handling invalid server names. ([\#14007](https://github.com/matrix-org/synapse/issues/14007))
|
||||
- Don't repeatedly wake up the same users for batched events. ([\#14033](https://github.com/matrix-org/synapse/issues/14033))
|
||||
- Complement test image: capture logs from nginx. ([\#14063](https://github.com/matrix-org/synapse/issues/14063))
|
||||
- Don't create noisy Sentry events when a requester drops connection to the metrics server mid-request. ([\#14072](https://github.com/matrix-org/synapse/issues/14072))
|
||||
- Run the integration test suites with the asyncio reactor enabled in CI. ([\#14092](https://github.com/matrix-org/synapse/issues/14092))
|
||||
- Add debug logs to figure out why an event was filtered out of the client response. ([\#14095](https://github.com/matrix-org/synapse/issues/14095))
|
||||
- Indicate what endpoint came back with a JSON response we were unable to parse. ([\#14097](https://github.com/matrix-org/synapse/issues/14097))
|
||||
- Break up calls to fetch rooms for many users. Contributed by Nick @ Beeper (@fizzadar). ([\#14109](https://github.com/matrix-org/synapse/issues/14109))
|
||||
- Faster joins: prioritise the server we joined by when restarting a partial join resync. ([\#14126](https://github.com/matrix-org/synapse/issues/14126))
|
||||
- Cache Rust build cache when building docker images. ([\#14130](https://github.com/matrix-org/synapse/issues/14130))
|
||||
- Enable dependabot for Rust dependencies. ([\#14132](https://github.com/matrix-org/synapse/issues/14132))
|
||||
- Bump typing-extensions from 4.1.1 to 4.4.0. ([\#14134](https://github.com/matrix-org/synapse/issues/14134))
|
||||
- Use the `minimal` Rust profile when building Synapse. ([\#14141](https://github.com/matrix-org/synapse/issues/14141))
|
||||
- Remove unused configuration code. ([\#14142](https://github.com/matrix-org/synapse/issues/14142))
|
||||
- Prepare for the [`gotestfmt` repository move](https://github.com/GoTestTools/gotestfmt/discussions/46). ([\#14144](https://github.com/matrix-org/synapse/issues/14144))
|
||||
- Invalidate rooms for user caches on replicated event, fix sync cache race in synapse workers. Contributed by Nick @ Beeper (@fizzadar). ([\#14155](https://github.com/matrix-org/synapse/issues/14155))
|
||||
- Enable url previews when testing with complement. ([\#14198](https://github.com/matrix-org/synapse/issues/14198))
|
||||
- When authenticating batched events, check for auth events in batch as well as DB. ([\#14214](https://github.com/matrix-org/synapse/issues/14214))
|
||||
- Update CI config to avoid GitHub Actions deprecation warnings. ([\#14216](https://github.com/matrix-org/synapse/issues/14216), [\#14224](https://github.com/matrix-org/synapse/issues/14224))
|
||||
- Update dependency requirements to allow building with poetry-core 1.3.2. ([\#14217](https://github.com/matrix-org/synapse/issues/14217))
|
||||
- Rename the `cache_memory` extra to `cache-memory`, for compatability with poetry-core 1.3.0 and [PEP 685](https://peps.python.org/pep-0685/). From-source installations using this extra will need to install using the new name. ([\#14221](https://github.com/matrix-org/synapse/issues/14221))
|
||||
- Specify dev-dependencies using lower bounds, to reduce the likelihood of a dependabot merge conflict. The lockfile continues to pin to specific versions. ([\#14227](https://github.com/matrix-org/synapse/issues/14227))
|
||||
|
||||
|
||||
Synapse 1.69.0 (2022-10-17)
|
||||
===========================
|
||||
|
||||
Please note that legacy Prometheus metric names are now deprecated and will be removed in Synapse 1.73.0.
|
||||
Server administrators should update their dashboards and alerting rules to avoid using the deprecated metric names.
|
||||
See the [upgrade notes](https://matrix-org.github.io/synapse/v1.69/upgrade.html#upgrading-to-v1690) for more details.
|
||||
|
||||
|
||||
No significant changes since 1.69.0rc4.
|
||||
|
||||
|
||||
Synapse 1.69.0rc4 (2022-10-14)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix poor performance of the `event_push_backfill_thread_id` background update, which was introduced in Synapse 1.68.0rc1. ([\#14172](https://github.com/matrix-org/synapse/issues/14172), [\#14181](https://github.com/matrix-org/synapse/issues/14181))
|
||||
|
||||
|
||||
Updates to the Docker image
|
||||
---------------------------
|
||||
|
||||
- Fix docker build OOMing in CI for arm64 builds. ([\#14173](https://github.com/matrix-org/synapse/issues/14173))
|
||||
|
||||
|
||||
Synapse 1.69.0rc3 (2022-10-12)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix an issue with Docker images causing the Rust dependencies to not be pinned correctly. Introduced in v1.68.0 ([\#14129](https://github.com/matrix-org/synapse/issues/14129))
|
||||
- Fix a bug introduced in Synapse 1.69.0rc1 which would cause registration replication requests to fail if the worker sending the request is not running Synapse 1.69. ([\#14135](https://github.com/matrix-org/synapse/issues/14135))
|
||||
- Fix error in background update when rotating existing notifications. Introduced in v1.69.0rc2. ([\#14138](https://github.com/matrix-org/synapse/issues/14138))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Rename the `url_preview` extra to `url-preview`, for compatability with poetry-core 1.3.0 and [PEP 685](https://peps.python.org/pep-0685/). From-source installations using this extra will need to install using the new name. ([\#14085](https://github.com/matrix-org/synapse/issues/14085))
|
||||
|
||||
|
||||
Synapse 1.69.0rc2 (2022-10-06)
|
||||
==============================
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Deprecate the `generate_short_term_login_token` method in favor of an async `create_login_token` method in the Module API. ([\#13842](https://github.com/matrix-org/synapse/issues/13842))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Ensure Synapse v1.69 works with upcoming database changes in v1.70. ([\#14045](https://github.com/matrix-org/synapse/issues/14045))
|
||||
- Fix a bug introduced in Synapse v1.68.0 where messages could not be sent in rooms with non-integer `notifications` power level. ([\#14073](https://github.com/matrix-org/synapse/issues/14073))
|
||||
- Temporarily pin build-system requirements to workaround an incompatibility with poetry-core 1.3.0. This will be reverted before the v1.69.0 release proper, see [\#14079](https://github.com/matrix-org/synapse/issues/14079). ([\#14080](https://github.com/matrix-org/synapse/issues/14080))
|
||||
|
||||
|
||||
Synapse 1.69.0rc1 (2022-10-04)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Allow application services to set the `origin_server_ts` of a state event by providing the query parameter `ts` in [`PUT /_matrix/client/r0/rooms/{roomId}/state/{eventType}/{stateKey}`](https://spec.matrix.org/v1.4/client-server-api/#put_matrixclientv3roomsroomidstateeventtypestatekey), per [MSC3316](https://github.com/matrix-org/matrix-doc/pull/3316). Contributed by @lukasdenk. ([\#11866](https://github.com/matrix-org/synapse/issues/11866))
|
||||
- Allow server admins to require a manual approval process before new accounts can be used (using [MSC3866](https://github.com/matrix-org/matrix-spec-proposals/pull/3866)). ([\#13556](https://github.com/matrix-org/synapse/issues/13556))
|
||||
- Exponentially backoff from backfilling the same event over and over. ([\#13635](https://github.com/matrix-org/synapse/issues/13635), [\#13936](https://github.com/matrix-org/synapse/issues/13936))
|
||||
- Add cache invalidation across workers to module API. ([\#13667](https://github.com/matrix-org/synapse/issues/13667), [\#13947](https://github.com/matrix-org/synapse/issues/13947))
|
||||
- Experimental implementation of [MSC3882](https://github.com/matrix-org/matrix-spec-proposals/pull/3882) to allow an existing device/session to generate a login token for use on a new device/session. ([\#13722](https://github.com/matrix-org/synapse/issues/13722), [\#13868](https://github.com/matrix-org/synapse/issues/13868))
|
||||
- Experimental support for thread-specific receipts ([MSC3771](https://github.com/matrix-org/matrix-spec-proposals/pull/3771)). ([\#13782](https://github.com/matrix-org/synapse/issues/13782), [\#13893](https://github.com/matrix-org/synapse/issues/13893), [\#13932](https://github.com/matrix-org/synapse/issues/13932), [\#13937](https://github.com/matrix-org/synapse/issues/13937), [\#13939](https://github.com/matrix-org/synapse/issues/13939))
|
||||
- Add experimental support for [MSC3881: Remotely toggle push notifications for another client](https://github.com/matrix-org/matrix-spec-proposals/pull/3881). ([\#13799](https://github.com/matrix-org/synapse/issues/13799), [\#13831](https://github.com/matrix-org/synapse/issues/13831), [\#13860](https://github.com/matrix-org/synapse/issues/13860))
|
||||
- Keep track when an event pulled over federation fails its signature check so we can intelligently back-off in the future. ([\#13815](https://github.com/matrix-org/synapse/issues/13815))
|
||||
- Improve validation for the unspecced, internal-only `_matrix/client/unstable/add_threepid/msisdn/submit_token` endpoint. ([\#13832](https://github.com/matrix-org/synapse/issues/13832))
|
||||
- Faster remote room joins: record _when_ we first partial-join to a room. ([\#13892](https://github.com/matrix-org/synapse/issues/13892))
|
||||
- Support a `dir` parameter on the `/relations` endpoint per [MSC3715](https://github.com/matrix-org/matrix-doc/pull/3715). ([\#13920](https://github.com/matrix-org/synapse/issues/13920))
|
||||
- Ask mail servers receiving emails from Synapse to not send automatic replies (e.g. out-of-office responses). ([\#13957](https://github.com/matrix-org/synapse/issues/13957))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Send push notifications for invites received over federation. ([\#13719](https://github.com/matrix-org/synapse/issues/13719), [\#14014](https://github.com/matrix-org/synapse/issues/14014))
|
||||
- Fix a long-standing bug where typing events would be accepted from remote servers not present in a room. Also fix a bug where incoming typing events would cause other incoming events to get stuck during a fast join. ([\#13830](https://github.com/matrix-org/synapse/issues/13830))
|
||||
- Fix a bug introduced in Synapse v1.53.0 where the experimental implementation of [MSC3715](https://github.com/matrix-org/matrix-spec-proposals/pull/3715) would give incorrect results when paginating forward. ([\#13840](https://github.com/matrix-org/synapse/issues/13840))
|
||||
- Fix access token leak to logs from proxy agent. ([\#13855](https://github.com/matrix-org/synapse/issues/13855))
|
||||
- Fix `have_seen_event` cache not being invalidated after we persist an event which causes inefficiency effects like extra `/state` federation calls. ([\#13863](https://github.com/matrix-org/synapse/issues/13863))
|
||||
- Faster room joins: Fix a bug introduced in 1.66.0 where an error would be logged when syncing after joining a room. ([\#13872](https://github.com/matrix-org/synapse/issues/13872))
|
||||
- Fix a bug introduced in 1.66.0 where some required fields in the pushrules sent to clients were not present anymore. Contributed by Nico. ([\#13904](https://github.com/matrix-org/synapse/issues/13904))
|
||||
- Fix packaging to include `Cargo.lock` in `sdist`. ([\#13909](https://github.com/matrix-org/synapse/issues/13909))
|
||||
- Fix a long-standing bug where device updates could cause delays sending out to-device messages over federation. ([\#13922](https://github.com/matrix-org/synapse/issues/13922))
|
||||
- Fix a bug introduced in v1.68.0 where Synapse would require `setuptools_rust` at runtime, even though the package is only required at build time. ([\#13952](https://github.com/matrix-org/synapse/issues/13952))
|
||||
- Fix a long-standing bug where `POST /_matrix/client/v3/keys/query` requests could result in excessively large SQL queries. ([\#13956](https://github.com/matrix-org/synapse/issues/13956))
|
||||
- Fix a performance regression in the `get_users_in_room` database query. Introduced in v1.67.0. ([\#13972](https://github.com/matrix-org/synapse/issues/13972))
|
||||
- Fix a bug introduced in v1.68.0 bug where Rust extension wasn't built in `release` mode when using `poetry install`. ([\#14009](https://github.com/matrix-org/synapse/issues/14009))
|
||||
- Do not return an unspecified `original_event` field when using the stable `/relations` endpoint. Introduced in Synapse v1.57.0. ([\#14025](https://github.com/matrix-org/synapse/issues/14025))
|
||||
- Correctly handle a race with device lists when a remote user leaves during a partial join. ([\#13885](https://github.com/matrix-org/synapse/issues/13885))
|
||||
- Correctly handle sending local device list updates to remote servers during a partial join. ([\#13934](https://github.com/matrix-org/synapse/issues/13934))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Add `worker_main_http_uri` for the worker generator bash script. ([\#13772](https://github.com/matrix-org/synapse/issues/13772))
|
||||
- Update URL for the NixOS module for Synapse. ([\#13818](https://github.com/matrix-org/synapse/issues/13818))
|
||||
- Fix a mistake in sso_mapping_providers.md: `map_user_attributes` is expected to return `display_name`, not `displayname`. ([\#13836](https://github.com/matrix-org/synapse/issues/13836))
|
||||
- Fix a cross-link from the registration admin API to the `registration_shared_secret` configuration documentation. ([\#13870](https://github.com/matrix-org/synapse/issues/13870))
|
||||
- Update the man page for the `hash_password` script to correct the default number of bcrypt rounds performed. ([\#13911](https://github.com/matrix-org/synapse/issues/13911), [\#13930](https://github.com/matrix-org/synapse/issues/13930))
|
||||
- Emphasize the right reasons when to use `(room_id, event_id)` in a database schema. ([\#13915](https://github.com/matrix-org/synapse/issues/13915))
|
||||
- Add instruction to contributing guide for running unit tests in parallel. Contributed by @ashfame. ([\#13928](https://github.com/matrix-org/synapse/issues/13928))
|
||||
- Clarify that the `auto_join_rooms` config option can also be used with Space aliases. ([\#13931](https://github.com/matrix-org/synapse/issues/13931))
|
||||
- Add some cross references to worker documentation. ([\#13974](https://github.com/matrix-org/synapse/issues/13974))
|
||||
- Linkify urls in config documentation. ([\#14003](https://github.com/matrix-org/synapse/issues/14003))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Remove the `complete_sso_login` method from the Module API which was deprecated in Synapse 1.13.0. ([\#13843](https://github.com/matrix-org/synapse/issues/13843))
|
||||
- Announce that legacy metric names are deprecated, will be turned off by default in Synapse v1.71.0 and removed altogether in Synapse v1.73.0. See the upgrade notes for more information. ([\#14024](https://github.com/matrix-org/synapse/issues/14024))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Speed up creation of DM rooms. ([\#13487](https://github.com/matrix-org/synapse/issues/13487), [\#13800](https://github.com/matrix-org/synapse/issues/13800))
|
||||
- Port push rules to using Rust. ([\#13768](https://github.com/matrix-org/synapse/issues/13768), [\#13838](https://github.com/matrix-org/synapse/issues/13838), [\#13889](https://github.com/matrix-org/synapse/issues/13889))
|
||||
- Optimise get rooms for user calls. Contributed by Nick @ Beeper (@fizzadar). ([\#13787](https://github.com/matrix-org/synapse/issues/13787))
|
||||
- Update the script which makes full schema dumps. ([\#13792](https://github.com/matrix-org/synapse/issues/13792))
|
||||
- Use shared methods for cache invalidation when persisting events, remove duplicate codepaths. Contributed by Nick @ Beeper (@fizzadar). ([\#13796](https://github.com/matrix-org/synapse/issues/13796))
|
||||
- Improve the `synapse.api.auth.Auth` mock used in unit tests. ([\#13809](https://github.com/matrix-org/synapse/issues/13809))
|
||||
- Faster Remote Room Joins: tell remote homeservers that we are unable to authorise them if they query a room which has partial state on our server. ([\#13823](https://github.com/matrix-org/synapse/issues/13823))
|
||||
- Carry IdP Session IDs through user-mapping sessions. ([\#13839](https://github.com/matrix-org/synapse/issues/13839))
|
||||
- Fix the release script not publishing binary wheels. ([\#13850](https://github.com/matrix-org/synapse/issues/13850))
|
||||
- Raise issue if complement fails with latest deps. ([\#13859](https://github.com/matrix-org/synapse/issues/13859))
|
||||
- Correct the comments in the complement dockerfile. ([\#13867](https://github.com/matrix-org/synapse/issues/13867))
|
||||
- Create a new snapshot of the database schema. ([\#13873](https://github.com/matrix-org/synapse/issues/13873))
|
||||
- Faster room joins: Send device list updates to most servers in rooms with partial state. ([\#13874](https://github.com/matrix-org/synapse/issues/13874), [\#14013](https://github.com/matrix-org/synapse/issues/14013))
|
||||
- Add comments to the Prometheus recording rules to make it clear which set of rules you need for Grafana or Prometheus Console. ([\#13876](https://github.com/matrix-org/synapse/issues/13876))
|
||||
- Only pull relevant backfill points from the database based on the current depth and limit (instead of all) every time we want to `/backfill`. ([\#13879](https://github.com/matrix-org/synapse/issues/13879))
|
||||
- Faster room joins: Avoid waiting for full state when processing `/keys/changes` requests. ([\#13888](https://github.com/matrix-org/synapse/issues/13888))
|
||||
- Improve backfill robustness by trying more servers when we get a `4xx` error back. ([\#13890](https://github.com/matrix-org/synapse/issues/13890))
|
||||
- Fix mypy errors with canonicaljson 1.6.3. ([\#13905](https://github.com/matrix-org/synapse/issues/13905))
|
||||
- Faster remote room joins: correctly handle remote device list updates during a partial join. ([\#13913](https://github.com/matrix-org/synapse/issues/13913))
|
||||
- Complement image: propagate SIGTERM to all workers. ([\#13914](https://github.com/matrix-org/synapse/issues/13914))
|
||||
- Update an innaccurate comment in Synapse's upsert database helper. ([\#13924](https://github.com/matrix-org/synapse/issues/13924))
|
||||
- Update mypy (0.950 -> 0.981) and mypy-zope (0.3.7 -> 0.3.11). ([\#13925](https://github.com/matrix-org/synapse/issues/13925), [\#13993](https://github.com/matrix-org/synapse/issues/13993))
|
||||
- Use dedicated `get_local_users_in_room(room_id)` function to find local users when calculating users to copy over during a room upgrade. ([\#13960](https://github.com/matrix-org/synapse/issues/13960))
|
||||
- Refactor language in user directory `_track_user_joined_room` code to make it more clear that we use both local and remote users. ([\#13966](https://github.com/matrix-org/synapse/issues/13966))
|
||||
- Revert catch-all exceptions being recorded as event pull attempt failures (only handle what we know about). ([\#13969](https://github.com/matrix-org/synapse/issues/13969))
|
||||
- Speed up calculating push actions in large rooms. ([\#13973](https://github.com/matrix-org/synapse/issues/13973), [\#13992](https://github.com/matrix-org/synapse/issues/13992))
|
||||
- Enable update notifications from Github's dependabot. ([\#13976](https://github.com/matrix-org/synapse/issues/13976))
|
||||
- Prototype a workflow to automatically add changelogs to dependabot PRs. ([\#13998](https://github.com/matrix-org/synapse/issues/13998), [\#14011](https://github.com/matrix-org/synapse/issues/14011), [\#14017](https://github.com/matrix-org/synapse/issues/14017), [\#14021](https://github.com/matrix-org/synapse/issues/14021), [\#14027](https://github.com/matrix-org/synapse/issues/14027))
|
||||
- Fix type annotations to be compatible with new annotations in development versions of twisted. ([\#14012](https://github.com/matrix-org/synapse/issues/14012))
|
||||
- Clear out stale entries in `event_push_actions_staging` table. ([\#14020](https://github.com/matrix-org/synapse/issues/14020))
|
||||
- Bump versions of GitHub actions. ([\#13978](https://github.com/matrix-org/synapse/issues/13978), [\#13979](https://github.com/matrix-org/synapse/issues/13979), [\#13980](https://github.com/matrix-org/synapse/issues/13980), [\#13982](https://github.com/matrix-org/synapse/issues/13982), [\#14015](https://github.com/matrix-org/synapse/issues/14015), [\#14019](https://github.com/matrix-org/synapse/issues/14019), [\#14022](https://github.com/matrix-org/synapse/issues/14022), [\#14023](https://github.com/matrix-org/synapse/issues/14023))
|
||||
|
||||
|
||||
Synapse 1.68.0 (2022-09-27)
|
||||
===========================
|
||||
|
||||
Please note that Synapse will now refuse to start if configured to use a version of SQLite older than 3.27.
|
||||
|
||||
In addition, please note that installing Synapse from a source checkout now requires a recent Rust compiler.
|
||||
Those using packages will not be affected. On most platforms, installing with `pip install matrix-synapse` will not be affected.
|
||||
See the [upgrade notes](https://matrix-org.github.io/synapse/v1.68/upgrade.html#upgrading-to-v1680).
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix packaging to include `Cargo.lock` in `sdist`. ([\#13909](https://github.com/matrix-org/synapse/issues/13909))
|
||||
|
||||
|
||||
Synapse 1.68.0rc2 (2022-09-23)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix building from packaged sdist. Broken in v1.68.0rc1. ([\#13866](https://github.com/matrix-org/synapse/issues/13866))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Fix the release script not publishing binary wheels. ([\#13850](https://github.com/matrix-org/synapse/issues/13850))
|
||||
- Lower minimum supported rustc version to 1.58.1. ([\#13857](https://github.com/matrix-org/synapse/issues/13857))
|
||||
- Lock Rust dependencies' versions. ([\#13858](https://github.com/matrix-org/synapse/issues/13858))
|
||||
|
||||
|
||||
Synapse 1.68.0rc1 (2022-09-20)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Keep track of when we fail to process a pulled event over federation so we can intelligently back off in the future. ([\#13589](https://github.com/matrix-org/synapse/issues/13589), [\#13814](https://github.com/matrix-org/synapse/issues/13814))
|
||||
- Add an [admin API endpoint to fetch messages within a particular window of time](https://matrix-org.github.io/synapse/v1.68/admin_api/rooms.html#room-messages-api). ([\#13672](https://github.com/matrix-org/synapse/issues/13672))
|
||||
- Add an [admin API endpoint to find a user based on their external ID in an auth provider](https://matrix-org.github.io/synapse/v1.68/admin_api/user_admin_api.html#find-a-user-based-on-their-id-in-an-auth-provider). ([\#13810](https://github.com/matrix-org/synapse/issues/13810))
|
||||
- Cancel the processing of key query requests when they time out. ([\#13680](https://github.com/matrix-org/synapse/issues/13680))
|
||||
- Improve validation of request bodies for the following client-server API endpoints: [`/account/3pid/msisdn/requestToken`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidmsisdnrequesttoken), [`/org.matrix.msc3720/account_status`](https://github.com/matrix-org/matrix-spec-proposals/blob/babolivier/user_status/proposals/3720-account-status.md#post-_matrixclientv1account_status), [`/account/3pid/add`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidadd), [`/account/3pid/bind`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidbind), [`/account/3pid/delete`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3piddelete) and [`/account/3pid/unbind`](https://spec.matrix.org/v1.3/client-server-api/#post_matrixclientv3account3pidunbind). ([\#13687](https://github.com/matrix-org/synapse/issues/13687), [\#13736](https://github.com/matrix-org/synapse/issues/13736))
|
||||
- Document the timestamp when a user accepts the consent, if [consent tracking](https://matrix-org.github.io/synapse/latest/consent_tracking.html) is used. ([\#13741](https://github.com/matrix-org/synapse/issues/13741))
|
||||
- Add a `listeners[x].request_id_header` configuration option to specify which request header to extract and use as the request ID in order to correlate requests from a reverse proxy. ([\#13801](https://github.com/matrix-org/synapse/issues/13801))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in Synapse 1.41.0 where the `/hierarchy` API returned non-standard information (a `room_id` field under each entry in `children_state`). ([\#13506](https://github.com/matrix-org/synapse/issues/13506))
|
||||
- Fix a long-standing bug where previously rejected events could end up in room state because they pass auth checks given the current state of the room. ([\#13723](https://github.com/matrix-org/synapse/issues/13723))
|
||||
- Fix a long-standing bug where Synapse fails to start if a signing key file contains an empty line. ([\#13738](https://github.com/matrix-org/synapse/issues/13738))
|
||||
- Fix a long-standing bug where Synapse would fail to handle malformed user IDs or room aliases gracefully in certain cases. ([\#13746](https://github.com/matrix-org/synapse/issues/13746))
|
||||
- Fix a long-standing bug where device lists would remain cached when remote users left and rejoined the last room shared with the local homeserver. ([\#13749](https://github.com/matrix-org/synapse/issues/13749), [\#13826](https://github.com/matrix-org/synapse/issues/13826))
|
||||
- Fix a long-standing bug that could cause stale caches in some rare cases on the first startup of Synapse with replication. ([\#13766](https://github.com/matrix-org/synapse/issues/13766))
|
||||
- Fix a long-standing spec compliance bug where Synapse would accept a trailing slash on the end of `/get_missing_events` federation requests. ([\#13789](https://github.com/matrix-org/synapse/issues/13789))
|
||||
- Delete associated data from `event_failed_pull_attempts`, `insertion_events`, `insertion_event_extremities`, `insertion_event_extremities`, `insertion_event_extremities` when purging the room. ([\#13825](https://github.com/matrix-org/synapse/issues/13825))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Note that `libpq` is required on ARM-based Macs. ([\#13480](https://github.com/matrix-org/synapse/issues/13480))
|
||||
- Fix a mistake in the config manual introduced in Synapse 1.22.0: the `event_cache_size` _is_ scaled by `caches.global_factor`. ([\#13726](https://github.com/matrix-org/synapse/issues/13726))
|
||||
- Fix a typo in the documentation for the login ratelimiting configuration. ([\#13727](https://github.com/matrix-org/synapse/issues/13727))
|
||||
- Define Synapse's compatability policy for SQLite versions. ([\#13728](https://github.com/matrix-org/synapse/issues/13728))
|
||||
- Add docs for the common fix of deleting the `matrix_synapse.egg-info/` directory for fixing Python dependency problems. ([\#13785](https://github.com/matrix-org/synapse/issues/13785))
|
||||
- Update request log format documentation to mention the format used when the authenticated user is controlling another user. ([\#13794](https://github.com/matrix-org/synapse/issues/13794))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Synapse will now refuse to start if configured to use SQLite < 3.27. ([\#13760](https://github.com/matrix-org/synapse/issues/13760))
|
||||
- Don't include redundant `prev_state` in new events. Contributed by Denis Kariakin (@dakariakin). ([\#13791](https://github.com/matrix-org/synapse/issues/13791))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Add a stub Rust crate. ([\#12595](https://github.com/matrix-org/synapse/issues/12595), [\#13734](https://github.com/matrix-org/synapse/issues/13734), [\#13735](https://github.com/matrix-org/synapse/issues/13735), [\#13743](https://github.com/matrix-org/synapse/issues/13743), [\#13763](https://github.com/matrix-org/synapse/issues/13763), [\#13769](https://github.com/matrix-org/synapse/issues/13769), [\#13778](https://github.com/matrix-org/synapse/issues/13778))
|
||||
- Bump the minimum dependency of `matrix_common` to 1.3.0 to make use of the `MXCUri` class. Use `MXCUri` to simplify media retention test code. ([\#13162](https://github.com/matrix-org/synapse/issues/13162))
|
||||
- Add and populate the `event_stream_ordering` column on the `receipts` table for future optimisation of push action processing. Contributed by Nick @ Beeper (@fizzadar). ([\#13703](https://github.com/matrix-org/synapse/issues/13703))
|
||||
- Rename the `EventFormatVersions` enum values so that they line up with room version numbers. ([\#13706](https://github.com/matrix-org/synapse/issues/13706))
|
||||
- Update trial old deps CI to use Poetry 1.2.0. ([\#13707](https://github.com/matrix-org/synapse/issues/13707), [\#13725](https://github.com/matrix-org/synapse/issues/13725))
|
||||
- Add experimental configuration option to allow disabling legacy Prometheus metric names. ([\#13714](https://github.com/matrix-org/synapse/issues/13714), [\#13717](https://github.com/matrix-org/synapse/issues/13717), [\#13718](https://github.com/matrix-org/synapse/issues/13718))
|
||||
- Fix typechecking with latest types-jsonschema. ([\#13724](https://github.com/matrix-org/synapse/issues/13724))
|
||||
- Strip number suffix from instance name to consolidate services that traces are spread over. ([\#13729](https://github.com/matrix-org/synapse/issues/13729))
|
||||
- Instrument `get_metadata_for_events` for understandable traces in Jaeger. ([\#13730](https://github.com/matrix-org/synapse/issues/13730))
|
||||
- Remove old queries to join room memberships to current state events. Contributed by Nick @ Beeper (@fizzadar). ([\#13745](https://github.com/matrix-org/synapse/issues/13745))
|
||||
- Avoid raising an error due to malformed user IDs in `get_current_hosts_in_room`. Malformed user IDs cannot currently join a room, so this error would not be hit. ([\#13748](https://github.com/matrix-org/synapse/issues/13748))
|
||||
- Update the docstrings for `get_users_in_room` and `get_current_hosts_in_room` to explain the impact of partial state. ([\#13750](https://github.com/matrix-org/synapse/issues/13750))
|
||||
- Use an additional database query when persisting receipts. ([\#13752](https://github.com/matrix-org/synapse/issues/13752))
|
||||
- Preparatory work for storing thread IDs for notifications and receipts. ([\#13753](https://github.com/matrix-org/synapse/issues/13753))
|
||||
- Re-type hint some collections as read-only. ([\#13754](https://github.com/matrix-org/synapse/issues/13754))
|
||||
- Remove unused Prometheus recording rules from `synapse-v2.rules` and add comments describing where the rest are used. ([\#13756](https://github.com/matrix-org/synapse/issues/13756))
|
||||
- Add a check for editable installs if the Rust library needs rebuilding. ([\#13759](https://github.com/matrix-org/synapse/issues/13759))
|
||||
- Tag traces with the instance name to be able to easily jump into the right logs and filter traces by instance. ([\#13761](https://github.com/matrix-org/synapse/issues/13761))
|
||||
- Concurrently fetch room push actions when calculating badge counts. Contributed by Nick @ Beeper (@fizzadar). ([\#13765](https://github.com/matrix-org/synapse/issues/13765))
|
||||
- Update the script which makes full schema dumps. ([\#13770](https://github.com/matrix-org/synapse/issues/13770))
|
||||
- Deduplicate `is_server_notices_room`. ([\#13780](https://github.com/matrix-org/synapse/issues/13780))
|
||||
- Simplify the dependency DAG in the tests workflow. ([\#13784](https://github.com/matrix-org/synapse/issues/13784))
|
||||
- Remove an old, incorrect migration file. ([\#13788](https://github.com/matrix-org/synapse/issues/13788))
|
||||
- Remove unused method in `synapse.api.auth.Auth`. ([\#13795](https://github.com/matrix-org/synapse/issues/13795))
|
||||
- Fix a memory leak when running the unit tests. ([\#13798](https://github.com/matrix-org/synapse/issues/13798))
|
||||
- Use partial indices on SQLite. ([\#13802](https://github.com/matrix-org/synapse/issues/13802))
|
||||
- Check that portdb generates the same postgres schema as that in the source tree. ([\#13808](https://github.com/matrix-org/synapse/issues/13808))
|
||||
- Fix Docker build when Rust .so has been built locally first. ([\#13811](https://github.com/matrix-org/synapse/issues/13811))
|
||||
- Complement: Initialise the Postgres database directly inside the target image instead of the base Postgres image to fix building using Buildah. ([\#13819](https://github.com/matrix-org/synapse/issues/13819))
|
||||
- Support providing an index predicate clause when doing upserts. ([\#13822](https://github.com/matrix-org/synapse/issues/13822))
|
||||
- Minor speedups to linting in CI. ([\#13827](https://github.com/matrix-org/synapse/issues/13827))
|
||||
|
||||
|
||||
Synapse 1.67.0 (2022-09-13)
|
||||
===========================
|
||||
|
||||
This release removes using the deprecated direct TCP replication configuration
|
||||
for workers. Server admins should use Redis instead. See the [upgrade
|
||||
notes](https://matrix-org.github.io/synapse/v1.67/upgrade.html#upgrading-to-v1670).
|
||||
|
||||
The minimum version of `poetry` supported for managing source checkouts is now
|
||||
1.2.0.
|
||||
|
||||
**Notice:** from the next major release (1.68.0) installing Synapse from a source
|
||||
checkout will require a recent Rust compiler. Those using packages or
|
||||
`pip install matrix-synapse` will not be affected. See the [upgrade
|
||||
notes](https://matrix-org.github.io/synapse/v1.67/upgrade.html#upgrading-to-v1670).
|
||||
|
||||
**Notice:** from the next major release (1.68.0), running Synapse with a SQLite
|
||||
database will require SQLite version 3.27.0 or higher. (The [current minimum
|
||||
version is SQLite 3.22.0](https://github.com/matrix-org/synapse/blob/release-v1.67/synapse/storage/engines/sqlite.py#L69-L78).)
|
||||
See [#12983](https://github.com/matrix-org/synapse/issues/12983) and the [upgrade notes](https://matrix-org.github.io/synapse/v1.67/upgrade.html#upgrading-to-v1670) for more details.
|
||||
|
||||
|
||||
No significant changes since 1.67.0rc1.
|
||||
|
||||
|
||||
Synapse 1.67.0rc1 (2022-09-06)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Support setting the registration shared secret in a file, via a new `registration_shared_secret_path` configuration option. ([\#13614](https://github.com/matrix-org/synapse/issues/13614))
|
||||
- Change the default startup behaviour so that any missing "additional" configuration files (signing key, etc) are generated automatically. ([\#13615](https://github.com/matrix-org/synapse/issues/13615))
|
||||
- Improve performance of sending messages in rooms with thousands of local users. ([\#13634](https://github.com/matrix-org/synapse/issues/13634))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in Synapse 1.13 where the [List Rooms admin API](https://matrix-org.github.io/synapse/develop/admin_api/rooms.html#list-room-api) would return integers instead of booleans for the `federatable` and `public` fields when using a Sqlite database. ([\#13509](https://github.com/matrix-org/synapse/issues/13509))
|
||||
- Fix bug that user cannot `/forget` rooms after the last member has left the room. ([\#13546](https://github.com/matrix-org/synapse/issues/13546))
|
||||
- Faster Room Joins: fix `/make_knock` blocking indefinitely when the room in question is a partial-stated room. ([\#13583](https://github.com/matrix-org/synapse/issues/13583))
|
||||
- Fix loading the current stream position behind the actual position. ([\#13585](https://github.com/matrix-org/synapse/issues/13585))
|
||||
- Fix a longstanding bug in `register_new_matrix_user` which meant it was always necessary to explicitly give a server URL. ([\#13616](https://github.com/matrix-org/synapse/issues/13616))
|
||||
- Fix the running of [MSC1763](https://github.com/matrix-org/matrix-spec-proposals/pull/1763) retention purge_jobs in deployments with background jobs running on a worker by forcing them back onto the main worker. Contributed by Brad @ Beeper. ([\#13632](https://github.com/matrix-org/synapse/issues/13632))
|
||||
- Fix a long-standing bug that downloaded media for URL previews was not deleted while database background updates were running. ([\#13657](https://github.com/matrix-org/synapse/issues/13657))
|
||||
- Fix [MSC3030](https://github.com/matrix-org/matrix-spec-proposals/pull/3030) `/timestamp_to_event` endpoint to return the correct next event when the events have the same timestamp. ([\#13658](https://github.com/matrix-org/synapse/issues/13658))
|
||||
- Fix bug where we wedge media plugins if clients disconnect early. Introduced in v1.22.0. ([\#13660](https://github.com/matrix-org/synapse/issues/13660))
|
||||
- Fix a long-standing bug which meant that keys for unwhitelisted servers were not returned by `/_matrix/key/v2/query`. ([\#13683](https://github.com/matrix-org/synapse/issues/13683))
|
||||
- Fix a bug introduced in Synapse 1.20.0 that would cause the unstable unread counts from [MSC2654](https://github.com/matrix-org/matrix-spec-proposals/pull/2654) to be calculated even if the feature is disabled. ([\#13694](https://github.com/matrix-org/synapse/issues/13694))
|
||||
|
||||
|
||||
Updates to the Docker image
|
||||
---------------------------
|
||||
|
||||
- Update docker image to use a stable version of poetry. ([\#13688](https://github.com/matrix-org/synapse/issues/13688))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Improve the description of the ["chain cover index"](https://matrix-org.github.io/synapse/latest/auth_chain_difference_algorithm.html) used internally by Synapse. ([\#13602](https://github.com/matrix-org/synapse/issues/13602))
|
||||
- Document how ["monthly active users"](https://matrix-org.github.io/synapse/latest/usage/administration/monthly_active_users.html) is calculated and used. ([\#13617](https://github.com/matrix-org/synapse/issues/13617))
|
||||
- Improve documentation around user registration. ([\#13640](https://github.com/matrix-org/synapse/issues/13640))
|
||||
- Remove documentation of legacy `frontend_proxy` worker app. ([\#13645](https://github.com/matrix-org/synapse/issues/13645))
|
||||
- Clarify documentation that HTTP replication traffic can be protected with a shared secret. ([\#13656](https://github.com/matrix-org/synapse/issues/13656))
|
||||
- Remove unintentional colons from [config manual](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html) headers. ([\#13665](https://github.com/matrix-org/synapse/issues/13665))
|
||||
- Update docs to make enabling metrics more clear. ([\#13678](https://github.com/matrix-org/synapse/issues/13678))
|
||||
- Clarify `(room_id, event_id)` global uniqueness and how we should scope our database schemas. ([\#13701](https://github.com/matrix-org/synapse/issues/13701))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Drop support for calling `/_matrix/client/v3/rooms/{roomId}/invite` without an `id_access_token`, which was not permitted by the spec. Contributed by @Vetchu. ([\#13241](https://github.com/matrix-org/synapse/issues/13241))
|
||||
- Remove redundant `_get_joined_users_from_context` cache. Contributed by Nick @ Beeper (@fizzadar). ([\#13569](https://github.com/matrix-org/synapse/issues/13569))
|
||||
- Remove the ability to use direct TCP replication with workers. Direct TCP replication was deprecated in Synapse 1.18.0. Workers now require using Redis. ([\#13647](https://github.com/matrix-org/synapse/issues/13647))
|
||||
- Remove support for unstable [private read receipts](https://github.com/matrix-org/matrix-spec-proposals/pull/2285). ([\#13653](https://github.com/matrix-org/synapse/issues/13653), [\#13692](https://github.com/matrix-org/synapse/issues/13692))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Extend the release script to wait for GitHub Actions to finish and to be usable as a guide for the whole process. ([\#13483](https://github.com/matrix-org/synapse/issues/13483))
|
||||
- Add experimental configuration option to allow disabling legacy Prometheus metric names. ([\#13540](https://github.com/matrix-org/synapse/issues/13540))
|
||||
- Cache user IDs instead of profiles to reduce cache memory usage. Contributed by Nick @ Beeper (@fizzadar). ([\#13573](https://github.com/matrix-org/synapse/issues/13573), [\#13600](https://github.com/matrix-org/synapse/issues/13600))
|
||||
- Optimize how Synapse calculates domains to fetch from during backfill. ([\#13575](https://github.com/matrix-org/synapse/issues/13575))
|
||||
- Comment about a better future where we can get the state diff between two events. ([\#13586](https://github.com/matrix-org/synapse/issues/13586))
|
||||
- Instrument `_check_sigs_and_hash_and_fetch` to trace time spent in child concurrent calls for understandable traces in Jaeger. ([\#13588](https://github.com/matrix-org/synapse/issues/13588))
|
||||
- Improve performance of `@cachedList`. ([\#13591](https://github.com/matrix-org/synapse/issues/13591))
|
||||
- Minor speed up of fetching large numbers of push rules. ([\#13592](https://github.com/matrix-org/synapse/issues/13592))
|
||||
- Optimise push action fetching queries. Contributed by Nick @ Beeper (@fizzadar). ([\#13597](https://github.com/matrix-org/synapse/issues/13597))
|
||||
- Rename `event_map` to `unpersisted_events` when computing the auth differences. ([\#13603](https://github.com/matrix-org/synapse/issues/13603))
|
||||
- Refactor `get_users_in_room(room_id)` mis-use with dedicated `get_current_hosts_in_room(room_id)` function. ([\#13605](https://github.com/matrix-org/synapse/issues/13605))
|
||||
- Use dedicated `get_local_users_in_room(room_id)` function to find local users when calculating `join_authorised_via_users_server` of a `/make_join` request. ([\#13606](https://github.com/matrix-org/synapse/issues/13606))
|
||||
- Refactor `get_users_in_room(room_id)` mis-use to lookup single local user with dedicated `check_local_user_in_room(...)` function. ([\#13608](https://github.com/matrix-org/synapse/issues/13608))
|
||||
- Drop unused column `application_services_state.last_txn`. ([\#13627](https://github.com/matrix-org/synapse/issues/13627))
|
||||
- Improve readability of Complement CI logs by printing failure results last. ([\#13639](https://github.com/matrix-org/synapse/issues/13639))
|
||||
- Generalise the `@cancellable` annotation so it can be used on functions other than just servlet methods. ([\#13662](https://github.com/matrix-org/synapse/issues/13662))
|
||||
- Introduce a `CommonUsageMetrics` class to share some usage metrics between the Prometheus exporter and the phone home stats. ([\#13671](https://github.com/matrix-org/synapse/issues/13671))
|
||||
- Add some logging to help track down #13444. ([\#13679](https://github.com/matrix-org/synapse/issues/13679))
|
||||
- Update poetry lock file for v1.2.0. ([\#13689](https://github.com/matrix-org/synapse/issues/13689))
|
||||
- Add cache to `is_partial_state_room`. ([\#13693](https://github.com/matrix-org/synapse/issues/13693))
|
||||
- Update the Grafana dashboard that is included with Synapse in the `contrib` directory. ([\#13697](https://github.com/matrix-org/synapse/issues/13697))
|
||||
- Only run trial CI on all python versions on non-PRs. ([\#13698](https://github.com/matrix-org/synapse/issues/13698))
|
||||
- Fix typechecking with latest types-jsonschema. ([\#13712](https://github.com/matrix-org/synapse/issues/13712))
|
||||
- Reduce number of CI checks we run for PRs. ([\#13713](https://github.com/matrix-org/synapse/issues/13713))
|
||||
|
||||
|
||||
Synapse 1.66.0 (2022-08-31)
|
||||
===========================
|
||||
|
||||
@@ -528,7 +9,7 @@ was originally planned for Synapse 1.64, but was later deferred until now. See
|
||||
the [upgrade notes](https://matrix-org.github.io/synapse/v1.66/upgrade.html#upgrading-to-v1660) for more details.
|
||||
|
||||
Deployments with multiple workers should note that the direct TCP replication
|
||||
configuration was deprecated in Synapse 1.18.0 and will be removed in Synapse
|
||||
configuration was deprecated in Synapse v1.18.0 and will be removed in Synapse
|
||||
v1.67.0. In particular, the TCP `replication` [listener](https://matrix-org.github.io/synapse/v1.66/usage/configuration/config_documentation.html#listeners)
|
||||
type (not to be confused with the `replication` resource on the `http` listener
|
||||
type) and the `worker_replication_port` config option will be removed .
|
||||
@@ -658,7 +139,7 @@ Bugfixes
|
||||
--------
|
||||
|
||||
- Update the version of the LDAP3 auth provider module included in the `matrixdotorg/synapse` DockerHub images and the Debian packages hosted on packages.matrix.org to 0.2.2. This version fixes a regression in the module. ([\#13470](https://github.com/matrix-org/synapse/issues/13470))
|
||||
- Fix a bug introduced in Synapse 1.41.0 where the `/hierarchy` API returned non-standard information (a `room_id` field under each entry in `children_state`) (this was reverted in v1.65.0rc2, see changelog notes above). ([\#13365](https://github.com/matrix-org/synapse/issues/13365))
|
||||
- Fix a bug introduced in Synapse v1.41.0 where the `/hierarchy` API returned non-standard information (a `room_id` field under each entry in `children_state`) (this was reverted in v1.65.0rc2, see changelog notes above). ([\#13365](https://github.com/matrix-org/synapse/issues/13365))
|
||||
- Fix a bug introduced in Synapse 0.24.0 that would respond with the wrong error status code to `/joined_members` requests when the requester is not a current member of the room. Contributed by @andrewdoh. ([\#13374](https://github.com/matrix-org/synapse/issues/13374))
|
||||
- Fix bug in handling of typing events for appservices. Contributed by Nick @ Beeper (@fizzadar). ([\#13392](https://github.com/matrix-org/synapse/issues/13392))
|
||||
- Fix a bug introduced in Synapse 1.57.0 where rooms listed in `exclude_rooms_from_sync` in the configuration file would not be properly excluded from incremental syncs. ([\#13408](https://github.com/matrix-org/synapse/issues/13408))
|
||||
@@ -723,7 +204,7 @@ No significant changes since 1.64.0rc2.
|
||||
Deprecation Warning
|
||||
-------------------
|
||||
|
||||
Synapse 1.66.0 will remove the ability to delegate the tasks of verifying email address ownership, and password reset confirmation, to an identity server.
|
||||
Synapse v1.66.0 will remove the ability to delegate the tasks of verifying email address ownership, and password reset confirmation, to an identity server.
|
||||
|
||||
If you require your homeserver to verify e-mail addresses or to support password resets via e-mail, please configure your homeserver with SMTP access so that it can send e-mails on its own behalf.
|
||||
[Consult the configuration documentation for more information.](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#email)
|
||||
@@ -732,7 +213,7 @@ If you require your homeserver to verify e-mail addresses or to support password
|
||||
Synapse 1.64.0rc2 (2022-07-29)
|
||||
==============================
|
||||
|
||||
This RC reintroduces support for `account_threepid_delegates.email`, which was removed in 1.64.0rc1. It remains deprecated and will be removed altogether in Synapse 1.66.0. ([\#13406](https://github.com/matrix-org/synapse/issues/13406))
|
||||
This RC reintroduces support for `account_threepid_delegates.email`, which was removed in 1.64.0rc1. It remains deprecated and will be removed altogether in Synapse v1.66.0. ([\#13406](https://github.com/matrix-org/synapse/issues/13406))
|
||||
|
||||
|
||||
Synapse 1.64.0rc1 (2022-07-26)
|
||||
@@ -981,7 +462,7 @@ Bugfixes
|
||||
- Fix a bug introduced in Synapse 1.58 where Synapse would not report full version information when installed from a git checkout. This is a best-effort affair and not guaranteed to be stable. ([\#12973](https://github.com/matrix-org/synapse/issues/12973))
|
||||
- Fix a bug introduced in Synapse 1.60 where Synapse would fail to start if the `sqlite3` module was not available. ([\#12979](https://github.com/matrix-org/synapse/issues/12979))
|
||||
- Fix a bug where non-standard information was required when requesting the `/hierarchy` API over federation. Introduced
|
||||
in Synapse 1.41.0. ([\#12991](https://github.com/matrix-org/synapse/issues/12991))
|
||||
in Synapse v1.41.0. ([\#12991](https://github.com/matrix-org/synapse/issues/12991))
|
||||
- Fix a long-standing bug which meant that rate limiting was not restrictive enough in some cases. ([\#13018](https://github.com/matrix-org/synapse/issues/13018))
|
||||
- Fix a bug introduced in Synapse 1.58 where profile requests for a malformed user ID would ccause an internal error. Synapse now returns 400 Bad Request in this situation. ([\#13041](https://github.com/matrix-org/synapse/issues/13041))
|
||||
- Fix some inconsistencies in the event authentication code. ([\#13087](https://github.com/matrix-org/synapse/issues/13087), [\#13088](https://github.com/matrix-org/synapse/issues/13088))
|
||||
@@ -1574,7 +1055,7 @@ If you have already upgraded to Synapse 1.57.0 without problem, then you have no
|
||||
Updates to the Docker image
|
||||
---------------------------
|
||||
|
||||
- Include version 0.2.0 of the Synapse LDAP Auth Provider module in the Docker image. This matches the version that was present in the Docker image for Synapse 1.56.0. ([\#12512](https://github.com/matrix-org/synapse/issues/12512))
|
||||
- Include version 0.2.0 of the Synapse LDAP Auth Provider module in the Docker image. This matches the version that was present in the Docker image for Synapse v1.56.0. ([\#12512](https://github.com/matrix-org/synapse/issues/12512))
|
||||
|
||||
|
||||
Synapse 1.57.0 (2022-04-19)
|
||||
@@ -1826,10 +1307,10 @@ Features
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Use the proper serialization format for bundled thread aggregations. The bug has existed since Synapse 1.48.0. ([\#12090](https://github.com/matrix-org/synapse/issues/12090))
|
||||
- Use the proper serialization format for bundled thread aggregations. The bug has existed since Synapse v1.48.0. ([\#12090](https://github.com/matrix-org/synapse/issues/12090))
|
||||
- Fix a long-standing bug when redacting events with relations. ([\#12113](https://github.com/matrix-org/synapse/issues/12113), [\#12121](https://github.com/matrix-org/synapse/issues/12121), [\#12130](https://github.com/matrix-org/synapse/issues/12130), [\#12189](https://github.com/matrix-org/synapse/issues/12189))
|
||||
- Fix a bug introduced in Synapse 1.7.2 whereby background updates are never run with the default background batch size. ([\#12157](https://github.com/matrix-org/synapse/issues/12157))
|
||||
- Fix a bug where non-standard information was returned from the `/hierarchy` API. Introduced in Synapse 1.41.0. ([\#12175](https://github.com/matrix-org/synapse/issues/12175))
|
||||
- Fix a bug where non-standard information was returned from the `/hierarchy` API. Introduced in Synapse v1.41.0. ([\#12175](https://github.com/matrix-org/synapse/issues/12175))
|
||||
- Fix a bug introduced in Synapse 1.54.0 that broke background updates on sqlite homeservers while search was disabled. ([\#12215](https://github.com/matrix-org/synapse/issues/12215))
|
||||
- Fix a long-standing bug when a `filter` argument with `event_fields` which did not include the `unsigned` field could result in a 500 error on `/sync`. ([\#12234](https://github.com/matrix-org/synapse/issues/12234))
|
||||
|
||||
@@ -2214,15 +1695,15 @@ Bugfixes
|
||||
- Fix a long-standing issue which could cause Synapse to incorrectly accept data in the unsigned field of events
|
||||
received over federation. ([\#11530](https://github.com/matrix-org/synapse/issues/11530))
|
||||
- Fix a long-standing bug where Synapse wouldn't cache a response indicating that a remote user has no devices. ([\#11587](https://github.com/matrix-org/synapse/issues/11587))
|
||||
- Fix an error that occurs whilst trying to get the federation status of a destination server that was working normally. This admin API was newly introduced in Synapse 1.49.0. ([\#11593](https://github.com/matrix-org/synapse/issues/11593))
|
||||
- Fix an error that occurs whilst trying to get the federation status of a destination server that was working normally. This admin API was newly introduced in Synapse v1.49.0. ([\#11593](https://github.com/matrix-org/synapse/issues/11593))
|
||||
- Fix bundled aggregations not being included in the `/sync` response, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675). ([\#11612](https://github.com/matrix-org/synapse/issues/11612), [\#11659](https://github.com/matrix-org/synapse/issues/11659), [\#11791](https://github.com/matrix-org/synapse/issues/11791))
|
||||
- Fix the `/_matrix/client/v1/room/{roomId}/hierarchy` endpoint returning incorrect fields which have been present since Synapse 1.49.0. ([\#11667](https://github.com/matrix-org/synapse/issues/11667))
|
||||
- Fix preview of some GIF URLs (like tenor.com). Contributed by Philippe Daouadi. ([\#11669](https://github.com/matrix-org/synapse/issues/11669))
|
||||
- Fix a bug where only the first 50 rooms from a space were returned from the `/hierarchy` API. This has existed since the introduction of the API in Synapse 1.41.0. ([\#11695](https://github.com/matrix-org/synapse/issues/11695))
|
||||
- Fix a bug introduced in Synapse 1.18.0 where password reset and address validation emails would not be sent if their subject was configured to use the 'app' template variable. Contributed by @br4nnigan. ([\#11710](https://github.com/matrix-org/synapse/issues/11710), [\#11745](https://github.com/matrix-org/synapse/issues/11745))
|
||||
- Fix a bug where only the first 50 rooms from a space were returned from the `/hierarchy` API. This has existed since the introduction of the API in Synapse v1.41.0. ([\#11695](https://github.com/matrix-org/synapse/issues/11695))
|
||||
- Fix a bug introduced in Synapse v1.18.0 where password reset and address validation emails would not be sent if their subject was configured to use the 'app' template variable. Contributed by @br4nnigan. ([\#11710](https://github.com/matrix-org/synapse/issues/11710), [\#11745](https://github.com/matrix-org/synapse/issues/11745))
|
||||
- Make the 'List Rooms' Admin API sort stable. Contributed by Daniël Sonck. ([\#11737](https://github.com/matrix-org/synapse/issues/11737))
|
||||
- Fix a long-standing bug where space hierarchy over federation would only work correctly some of the time. ([\#11775](https://github.com/matrix-org/synapse/issues/11775))
|
||||
- Fix a bug introduced in Synapse 1.46.0 that prevented `on_logged_out` module callbacks from being correctly awaited by Synapse. ([\#11786](https://github.com/matrix-org/synapse/issues/11786))
|
||||
- Fix a bug introduced in Synapse v1.46.0 that prevented `on_logged_out` module callbacks from being correctly awaited by Synapse. ([\#11786](https://github.com/matrix-org/synapse/issues/11786))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
@@ -2302,8 +1783,8 @@ This release candidate fixes a federation-breaking regression introduced in Syna
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in Synapse 1.0.0 whereby some device list updates would not be sent to remote homeservers if there were too many to send at once. ([\#11729](https://github.com/matrix-org/synapse/issues/11729))
|
||||
- Fix a bug introduced in Synapse 1.50.0rc1 whereby outbound federation could fail because too many EDUs were produced for device updates. ([\#11730](https://github.com/matrix-org/synapse/issues/11730))
|
||||
- Fix a bug introduced in Synapse v1.0.0 whereby some device list updates would not be sent to remote homeservers if there were too many to send at once. ([\#11729](https://github.com/matrix-org/synapse/issues/11729))
|
||||
- Fix a bug introduced in Synapse v1.50.0rc1 whereby outbound federation could fail because too many EDUs were produced for device updates. ([\#11730](https://github.com/matrix-org/synapse/issues/11730))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
|
||||
466
Cargo.lock
generated
466
Cargo.lock
generated
@@ -1,466 +0,0 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "0.7.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.65"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "98161a4e3e2184da77bb14f02184cdd111e83bbbcc9979dfee3c44b9a85f5602"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
version = "1.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "983cd8b9d4b02a6dc6ffa557262eb5858a27a0038ffffe21a0f133eaa819a164"
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "blake2"
|
||||
version = "0.10.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b9cf849ee05b2ee5fba5e36f97ff8ec2533916700fc0758d40d92136a42f3388"
|
||||
dependencies = [
|
||||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "block-buffer"
|
||||
version = "0.10.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e"
|
||||
dependencies = [
|
||||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "crypto-common"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
|
||||
dependencies = [
|
||||
"generic-array",
|
||||
"typenum",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "digest"
|
||||
version = "0.10.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c"
|
||||
dependencies = [
|
||||
"block-buffer",
|
||||
"crypto-common",
|
||||
"subtle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "generic-array"
|
||||
version = "0.14.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9"
|
||||
dependencies = [
|
||||
"typenum",
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hex"
|
||||
version = "0.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
||||
|
||||
[[package]]
|
||||
name = "indoc"
|
||||
version = "1.0.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "adab1eaa3408fb7f0c777a73e7465fd5656136fc93b670eb6df3c88c2c1344e3"
|
||||
|
||||
[[package]]
|
||||
name = "itoa"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.135"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c"
|
||||
|
||||
[[package]]
|
||||
name = "lock_api"
|
||||
version = "0.4.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"scopeguard",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
|
||||
|
||||
[[package]]
|
||||
name = "memoffset"
|
||||
version = "0.6.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1"
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.12.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
|
||||
dependencies = [
|
||||
"lock_api",
|
||||
"parking_lot_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot_core"
|
||||
version = "0.9.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"smallvec",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.46"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3"
|
||||
version = "0.17.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "201b6887e5576bf2f945fe65172c1fcbf3fcf285b23e4d71eb171d9736e38d32"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cfg-if",
|
||||
"indoc",
|
||||
"libc",
|
||||
"memoffset",
|
||||
"parking_lot",
|
||||
"pyo3-build-config",
|
||||
"pyo3-ffi",
|
||||
"pyo3-macros",
|
||||
"unindent",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-build-config"
|
||||
version = "0.17.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bf0708c9ed01692635cbf056e286008e5a2927ab1a5e48cdd3aeb1ba5a6fef47"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"target-lexicon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-ffi"
|
||||
version = "0.17.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "90352dea4f486932b72ddf776264d293f85b79a1d214de1d023927b41461132d"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"pyo3-build-config",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-log"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e5695ccff5060c13ca1751cf8c857a12da9b0bf0378cb071c5e0326f7c7e4c1b"
|
||||
dependencies = [
|
||||
"arc-swap",
|
||||
"log",
|
||||
"pyo3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros"
|
||||
version = "0.17.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7eb24b804a2d9e88bfcc480a5a6dd76f006c1e3edaf064e8250423336e2cd79d"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-macros-backend",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros-backend"
|
||||
version = "0.17.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f22bb49f6a7348c253d7ac67a6875f2dc65f36c2ae64a82c381d528972bea6d6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pythonize"
|
||||
version = "0.17.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0f7f0c136f5fbc01868185eef462800e49659eb23acca83b9e884367a006acb6"
|
||||
dependencies = [
|
||||
"pyo3",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.2.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.6.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
version = "1.0.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
|
||||
|
||||
[[package]]
|
||||
name = "scopeguard"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.145"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.145"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.86"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "41feea4228a6f1cd09ec7a3593a682276702cd67b5273544757dae23c096f074"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
|
||||
|
||||
[[package]]
|
||||
name = "subtle"
|
||||
version = "2.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.102"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "synapse"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"blake2",
|
||||
"hex",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"pyo3",
|
||||
"pyo3-log",
|
||||
"pythonize",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "target-lexicon"
|
||||
version = "0.12.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c02424087780c9b71cc96799eaeddff35af2bc513278cda5c99fc1f5d026d3c1"
|
||||
|
||||
[[package]]
|
||||
name = "typenum"
|
||||
version = "1.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
|
||||
|
||||
[[package]]
|
||||
name = "unindent"
|
||||
version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "58ee9362deb4a96cef4d437d1ad49cffc9b9e92d202b6995674e928ce684f112"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.36.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2"
|
||||
dependencies = [
|
||||
"windows_aarch64_msvc",
|
||||
"windows_i686_gnu",
|
||||
"windows_i686_msvc",
|
||||
"windows_x86_64_gnu",
|
||||
"windows_x86_64_msvc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.36.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.36.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.36.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.36.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.36.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680"
|
||||
@@ -1,5 +0,0 @@
|
||||
# We make the whole Synapse folder a workspace so that we can run `cargo`
|
||||
# commands from the root (rather than having to cd into rust/).
|
||||
|
||||
[workspace]
|
||||
members = ["rust"]
|
||||
@@ -3,7 +3,7 @@ Synapse |support| |development| |documentation| |license| |pypi| |python|
|
||||
=========================================================================
|
||||
|
||||
Synapse is an open-source `Matrix <https://matrix.org/>`_ homeserver written and
|
||||
maintained by the Matrix.org Foundation. We began rapid development in 2014,
|
||||
maintained by the Matrix.org Foundation. We began rapid development began in 2014,
|
||||
reaching v1.0.0 in 2019. Development on Synapse and the Matrix protocol itself continues
|
||||
in earnest today.
|
||||
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
# A build script for poetry that adds the rust extension.
|
||||
|
||||
import os
|
||||
from typing import Any, Dict
|
||||
|
||||
from setuptools_rust import Binding, RustExtension
|
||||
|
||||
|
||||
def build(setup_kwargs: Dict[str, Any]) -> None:
|
||||
original_project_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")
|
||||
|
||||
extension = RustExtension(
|
||||
target="synapse.synapse_rust",
|
||||
path=cargo_toml_path,
|
||||
binding=Binding.PyO3,
|
||||
py_limited_api=True,
|
||||
# We force always building in release mode, as we can't tell the
|
||||
# difference between using `poetry` in development vs production.
|
||||
debug=False,
|
||||
)
|
||||
setup_kwargs.setdefault("rust_extensions", []).append(extension)
|
||||
setup_kwargs["zip_safe"] = False
|
||||
@@ -94,6 +94,20 @@ worker_replication_host: synapse
|
||||
worker_replication_http_port: 9093
|
||||
```
|
||||
|
||||
### Add Workers to `instance_map`
|
||||
|
||||
Locate the `instance_map` section of your `homeserver.yaml` and populate it with your workers:
|
||||
|
||||
```yaml
|
||||
instance_map:
|
||||
synapse-generic-worker-1: # The worker_name setting in your worker configuration file
|
||||
host: synapse-generic-worker-1 # The name of the worker service in your Docker Compose file
|
||||
port: 8034 # The port assigned to the replication listener in your worker config file
|
||||
synapse-federation-sender-1:
|
||||
host: synapse-federation-sender-1
|
||||
port: 8034
|
||||
```
|
||||
|
||||
### Configure Federation Senders
|
||||
|
||||
This section is applicable if you are using Federation senders (synapse.app.federation_sender). Locate the `send_federation` and `federation_sender_instances` settings in your `homeserver.yaml` and configure them:
|
||||
@@ -108,4 +122,4 @@ federation_sender_instances:
|
||||
|
||||
## Other Worker types
|
||||
|
||||
Using the concepts shown here it is possible to create other worker types in Docker Compose. See the [Workers](https://matrix-org.github.io/synapse/latest/workers.html#available-worker-applications) documentation for a list of available workers.
|
||||
Using the concepts shown here it is possible to create other worker types in Docker Compose. See the [Workers](https://matrix-org.github.io/synapse/latest/workers.html#available-worker-applications) documentation for a list of available workers.
|
||||
@@ -5,4 +5,10 @@ worker_name: synapse-federation-sender-1
|
||||
worker_replication_host: synapse
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8034
|
||||
resources:
|
||||
- names: [replication]
|
||||
|
||||
worker_log_config: /data/federation_sender.log.config
|
||||
|
||||
@@ -6,6 +6,10 @@ worker_replication_host: synapse
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8034
|
||||
resources:
|
||||
- names: [replication]
|
||||
- type: http
|
||||
port: 8081
|
||||
x_forwarded: true
|
||||
|
||||
@@ -335,7 +335,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "sum(rate(synapse_storage_events_persisted_events_total{instance=\"$instance\"}[$bucket_size]))",
|
||||
"expr": "sum(rate(synapse_storage_events_persisted_events{instance=\"$instance\"}[$bucket_size]))",
|
||||
"hide": false,
|
||||
"instant": false,
|
||||
"legendFormat": "Events",
|
||||
@@ -1423,7 +1423,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_background_process_ru_utime_seconds_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])+rate(synapse_background_process_ru_stime_seconds_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_background_process_ru_utime_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])+rate(synapse_background_process_ru_stime_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"hide": false,
|
||||
"instant": false,
|
||||
@@ -1804,7 +1804,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "sum(rate(synapse_storage_events_persisted_events_total{instance=\"$instance\"}[$bucket_size])) without (job,index)",
|
||||
"expr": "sum(rate(synapse_storage_events_persisted_events{instance=\"$instance\"}[$bucket_size])) without (job,index)",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
@@ -2437,7 +2437,7 @@
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"exemplar": false,
|
||||
"expr": "sum(rate(synapse_state_res_db_for_biggest_room_seconds_total{instance=\"$instance\"}[1m]))",
|
||||
"expr": "sum(rate(synapse_state_res_db_for_biggest_room_seconds{instance=\"$instance\"}[1m]))",
|
||||
"format": "time_series",
|
||||
"hide": false,
|
||||
"instant": false,
|
||||
@@ -2451,7 +2451,7 @@
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"exemplar": false,
|
||||
"expr": "sum(rate(synapse_state_res_cpu_for_biggest_room_seconds_total{instance=\"$instance\"}[1m]))",
|
||||
"expr": "sum(rate(synapse_state_res_cpu_for_biggest_room_seconds{instance=\"$instance\"}[1m]))",
|
||||
"format": "time_series",
|
||||
"hide": false,
|
||||
"instant": false,
|
||||
@@ -3244,104 +3244,6 @@
|
||||
"yaxis": {
|
||||
"align": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"description": "Average number of hosts being rate limited across each worker type.",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "palette-classic"
|
||||
},
|
||||
"custom": {
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 0,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": {
|
||||
"legend": false,
|
||||
"tooltip": false,
|
||||
"viz": false
|
||||
},
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"showPoints": "auto",
|
||||
"spanNulls": false,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "none"
|
||||
},
|
||||
"thresholdsStyle": {
|
||||
"mode": "off"
|
||||
}
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green"
|
||||
}
|
||||
]
|
||||
},
|
||||
"unit": "none"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 53
|
||||
},
|
||||
"id": 225,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": [],
|
||||
"displayMode": "list",
|
||||
"placement": "bottom"
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "single",
|
||||
"sort": "desc"
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"editorMode": "builder",
|
||||
"expr": "avg by(job, rate_limiter_name) (synapse_rate_limit_sleep_affected_hosts{instance=\"$instance\", job=~\"$job\", index=~\"$index\"})",
|
||||
"hide": false,
|
||||
"legendFormat": "Slept by {{job}}:{{rate_limiter_name}}",
|
||||
"range": true,
|
||||
"refId": "B"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"editorMode": "builder",
|
||||
"expr": "avg by(job, rate_limiter_name) (synapse_rate_limit_reject_affected_hosts{instance=\"$instance\", job=~\"$job\", index=~\"$index\"})",
|
||||
"legendFormat": "Rejected by {{job}}:{{rate_limiter_name}}",
|
||||
"range": true,
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Hosts being rate limited",
|
||||
"type": "timeseries"
|
||||
}
|
||||
],
|
||||
"targets": [
|
||||
@@ -3425,7 +3327,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_background_process_ru_utime_seconds_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])+rate(synapse_background_process_ru_stime_seconds_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_background_process_ru_utime_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])+rate(synapse_background_process_ru_stime_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 1,
|
||||
@@ -3518,7 +3420,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_background_process_db_txn_duration_seconds_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) + rate(synapse_background_process_db_sched_duration_seconds_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_background_process_db_txn_duration_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) + rate(synapse_background_process_db_sched_duration_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"hide": false,
|
||||
"intervalFactor": 1,
|
||||
@@ -3726,7 +3628,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "sum(rate(synapse_federation_client_sent_transactions_total{instance=\"$instance\"}[$bucket_size]))",
|
||||
"expr": "sum(rate(synapse_federation_client_sent_transactions{instance=\"$instance\"}[$bucket_size]))",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "successful txn rate",
|
||||
@@ -3736,7 +3638,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "sum(rate(synapse_util_metrics_block_count_total{block_name=\"_send_new_transaction\",instance=\"$instance\"}[$bucket_size]) - ignoring (block_name) rate(synapse_federation_client_sent_transactions_total{instance=\"$instance\"}[$bucket_size]))",
|
||||
"expr": "sum(rate(synapse_util_metrics_block_count{block_name=\"_send_new_transaction\",instance=\"$instance\"}[$bucket_size]) - ignoring (block_name) rate(synapse_federation_client_sent_transactions{instance=\"$instance\"}[$bucket_size]))",
|
||||
"legendFormat": "failed txn rate",
|
||||
"refId": "B"
|
||||
}
|
||||
@@ -3826,7 +3728,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "sum(rate(synapse_federation_server_received_pdus_total{instance=~\"$instance\"}[$bucket_size]))",
|
||||
"expr": "sum(rate(synapse_federation_server_received_pdus{instance=~\"$instance\"}[$bucket_size]))",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "pdus",
|
||||
@@ -3836,7 +3738,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "sum(rate(synapse_federation_server_received_edus_total{instance=~\"$instance\"}[$bucket_size]))",
|
||||
"expr": "sum(rate(synapse_federation_server_received_edus{instance=~\"$instance\"}[$bucket_size]))",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "edus",
|
||||
@@ -3928,7 +3830,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "sum(rate(synapse_federation_client_sent_pdu_destinations:total_total{instance=\"$instance\"}[$bucket_size]))",
|
||||
"expr": "sum(rate(synapse_federation_client_sent_pdu_destinations:total{instance=\"$instance\"}[$bucket_size]))",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 1,
|
||||
@@ -3939,7 +3841,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "sum(rate(synapse_federation_client_sent_edus_total{instance=\"$instance\"}[$bucket_size]))",
|
||||
"expr": "sum(rate(synapse_federation_client_sent_edus{instance=\"$instance\"}[$bucket_size]))",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "edus",
|
||||
@@ -5042,7 +4944,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_http_httppusher_http_pushes_processed_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) and on (instance, job, index) (synapse_http_httppusher_http_pushes_failed_total + synapse_http_httppusher_http_pushes_processed_total) > 0",
|
||||
"expr": "rate(synapse_http_httppusher_http_pushes_processed{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) and on (instance, job, index) (synapse_http_httppusher_http_pushes_failed + synapse_http_httppusher_http_pushes_processed) > 0",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
@@ -5054,7 +4956,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_http_httppusher_http_pushes_failed_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) and on (instance, job, index) (synapse_http_httppusher_http_pushes_failed_total + synapse_http_httppusher_http_pushes_processed_total) > 0",
|
||||
"expr": "rate(synapse_http_httppusher_http_pushes_failed{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) and on (instance, job, index) (synapse_http_httppusher_http_pushes_failed + synapse_http_httppusher_http_pushes_processed) > 0",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 2,
|
||||
"legendFormat": "failed {{job}}",
|
||||
@@ -5268,12 +5170,12 @@
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"exemplar": true,
|
||||
"expr": "sum(rate(synapse_push_bulk_push_rule_evaluator_push_rules_state_size_counter_total{job=\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"expr": "sum(rate(synapse_push_bulk_push_rule_evaluator_push_rules_state_size_counter{job=\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
"legendFormat": "{{index}}",
|
||||
"metric": "synapse_push_bulk_push_rule_evaluator_push_rules_state_size_counter_total",
|
||||
"metric": "synapse_push_bulk_push_rule_evaluator_push_rules_state_size_counter",
|
||||
"refId": "A",
|
||||
"step": 2
|
||||
}
|
||||
@@ -5369,12 +5271,12 @@
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"exemplar": true,
|
||||
"expr": "sum(rate(synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter_total{job=\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"expr": "sum(rate(synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter{job=\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
"legendFormat": "{{index}}",
|
||||
"metric": "synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter_total",
|
||||
"metric": "synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter",
|
||||
"refId": "A",
|
||||
"step": 2
|
||||
}
|
||||
@@ -5475,12 +5377,12 @@
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"exemplar": true,
|
||||
"expr": "sum(rate(synapse_util_caches_cache_hits{job=\"$job\",index=~\"$index\",name=\"push_rules_delta_state_cache_metric\",instance=\"$instance\"}[$bucket_size]))/sum(rate(synapse_util_caches_cache{job=\"$job\",index=~\"$index\", name=\"push_rules_delta_state_cache_metric\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"expr": "sum(rate(synapse_util_caches_cache:hits{job=\"$job\",index=~\"$index\",name=\"push_rules_delta_state_cache_metric\",instance=\"$instance\"}[$bucket_size]))/sum(rate(synapse_util_caches_cache:total{job=\"$job\",index=~\"$index\", name=\"push_rules_delta_state_cache_metric\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
"legendFormat": "Hit Rate",
|
||||
"metric": "synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter_total",
|
||||
"metric": "synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter",
|
||||
"refId": "A",
|
||||
"step": 2
|
||||
},
|
||||
@@ -5490,7 +5392,7 @@
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"exemplar": true,
|
||||
"expr": "sum(rate(synapse_util_caches_cache{job=\"$job\",index=~\"$index\", name=\"push_rules_delta_state_cache_metric\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"expr": "sum(rate(synapse_util_caches_cache:total{job=\"$job\",index=~\"$index\", name=\"push_rules_delta_state_cache_metric\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
@@ -5598,12 +5500,12 @@
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"exemplar": true,
|
||||
"expr": "sum(rate(synapse_util_caches_cache_hits{job=\"$job\",index=~\"$index\",name=\"room_push_rule_cache\",instance=\"$instance\"}[$bucket_size]))/sum(rate(synapse_util_caches_cache{job=\"$job\",index=~\"$index\", name=\"room_push_rule_cache\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"expr": "sum(rate(synapse_util_caches_cache:hits{job=\"$job\",index=~\"$index\",name=\"room_push_rule_cache\",instance=\"$instance\"}[$bucket_size]))/sum(rate(synapse_util_caches_cache:total{job=\"$job\",index=~\"$index\", name=\"room_push_rule_cache\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
"legendFormat": "Hit Rate",
|
||||
"metric": "synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter_total",
|
||||
"metric": "synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter",
|
||||
"refId": "A",
|
||||
"step": 2
|
||||
},
|
||||
@@ -5613,7 +5515,7 @@
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"exemplar": true,
|
||||
"expr": "sum(rate(synapse_util_caches_cache{job=\"$job\",index=~\"$index\", name=\"room_push_rule_cache\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"expr": "sum(rate(synapse_util_caches_cache:total{job=\"$job\",index=~\"$index\", name=\"room_push_rule_cache\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
@@ -5719,12 +5621,12 @@
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"exemplar": true,
|
||||
"expr": "sum(rate(synapse_util_caches_cache_hits{job=\"$job\",index=~\"$index\",name=\"_get_rules_for_room\",instance=\"$instance\"}[$bucket_size]))/sum(rate(synapse_util_caches_cache{job=\"$job\",index=~\"$index\", name=\"_get_rules_for_room\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"expr": "sum(rate(synapse_util_caches_cache:hits{job=\"$job\",index=~\"$index\",name=\"_get_rules_for_room\",instance=\"$instance\"}[$bucket_size]))/sum(rate(synapse_util_caches_cache:total{job=\"$job\",index=~\"$index\", name=\"_get_rules_for_room\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
"legendFormat": "Hit Rate",
|
||||
"metric": "synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter_total",
|
||||
"metric": "synapse_push_bulk_push_rule_evaluator_push_rules_invalidation_counter",
|
||||
"refId": "A",
|
||||
"step": 2
|
||||
},
|
||||
@@ -5734,7 +5636,7 @@
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"exemplar": true,
|
||||
"expr": "sum(rate(synapse_util_caches_cache{job=\"$job\",index=~\"$index\", name=\"_get_rules_for_room\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"expr": "sum(rate(synapse_util_caches_cache:total{job=\"$job\",index=~\"$index\", name=\"_get_rules_for_room\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
@@ -6087,7 +5989,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "topk(10, rate(synapse_storage_transaction_time_count_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]))",
|
||||
"expr": "topk(10, rate(synapse_storage_transaction_time_count{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]))",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
@@ -6187,7 +6089,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_storage_transaction_time_sum_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_storage_transaction_time_sum{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"instant": false,
|
||||
"interval": "",
|
||||
@@ -6287,7 +6189,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_storage_transaction_time_sum_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])/rate(synapse_storage_transaction_time_count_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_storage_transaction_time_sum{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])/rate(synapse_storage_transaction_time_count{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"instant": false,
|
||||
"interval": "",
|
||||
@@ -6502,7 +6404,7 @@
|
||||
"h": 13,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 35
|
||||
"y": 10
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 12,
|
||||
@@ -6538,7 +6440,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_util_metrics_block_ru_utime_seconds_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\",block_name!=\"wrapped_request_handler\"}[$bucket_size]) + rate(synapse_util_metrics_block_ru_stime_seconds_total[$bucket_size])",
|
||||
"expr": "rate(synapse_util_metrics_block_ru_utime_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\",block_name!=\"wrapped_request_handler\"}[$bucket_size]) + rate(synapse_util_metrics_block_ru_stime_seconds[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
@@ -6600,7 +6502,7 @@
|
||||
"h": 13,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 35
|
||||
"y": 10
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 26,
|
||||
@@ -6636,7 +6538,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "(rate(synapse_util_metrics_block_ru_utime_seconds_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) + rate(synapse_util_metrics_block_ru_stime_seconds_total[$bucket_size])) / rate(synapse_util_metrics_block_count_total[$bucket_size])",
|
||||
"expr": "(rate(synapse_util_metrics_block_ru_utime_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) + rate(synapse_util_metrics_block_ru_stime_seconds[$bucket_size])) / rate(synapse_util_metrics_block_count[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
@@ -6699,7 +6601,7 @@
|
||||
"h": 13,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 48
|
||||
"y": 23
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 13,
|
||||
@@ -6737,7 +6639,7 @@
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"exemplar": true,
|
||||
"expr": "rate(synapse_util_metrics_block_db_txn_duration_seconds_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_util_metrics_block_db_txn_duration_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
@@ -6803,7 +6705,7 @@
|
||||
"h": 13,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 48
|
||||
"y": 23
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 27,
|
||||
@@ -6839,7 +6741,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_util_metrics_block_db_txn_duration_seconds_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) / rate(synapse_util_metrics_block_db_txn_count_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_util_metrics_block_db_txn_duration_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) / rate(synapse_util_metrics_block_db_txn_count{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
@@ -6901,7 +6803,7 @@
|
||||
"h": 13,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 61
|
||||
"y": 36
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 28,
|
||||
@@ -6936,7 +6838,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_util_metrics_block_db_txn_duration_seconds_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) / rate(synapse_util_metrics_block_db_txn_count_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_util_metrics_block_db_txn_duration_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) / rate(synapse_util_metrics_block_db_txn_count{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
@@ -6998,7 +6900,7 @@
|
||||
"h": 13,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 61
|
||||
"y": 36
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 25,
|
||||
@@ -7033,7 +6935,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_util_metrics_block_time_seconds_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) / rate(synapse_util_metrics_block_count_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_util_metrics_block_time_seconds{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]) / rate(synapse_util_metrics_block_count[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
@@ -7058,13 +6960,11 @@
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"$$hashKey": "object:180",
|
||||
"format": "s",
|
||||
"format": "ms",
|
||||
"logBase": 1,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"$$hashKey": "object:181",
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"show": true
|
||||
@@ -7088,7 +6988,7 @@
|
||||
"h": 15,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 74
|
||||
"y": 49
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 154,
|
||||
@@ -7109,7 +7009,7 @@
|
||||
"alertThreshold": true
|
||||
},
|
||||
"percentage": false,
|
||||
"pluginVersion": "9.0.4",
|
||||
"pluginVersion": "8.4.3",
|
||||
"pointradius": 2,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
@@ -7122,7 +7022,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_util_metrics_block_count_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_util_metrics_block_count{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"interval": "",
|
||||
"legendFormat": "{{job}}-{{index}} {{block_name}}",
|
||||
"refId": "A"
|
||||
@@ -7209,7 +7109,7 @@
|
||||
"h": 10,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 69
|
||||
"y": 36
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 1,
|
||||
@@ -7246,7 +7146,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_util_caches_cache_hits{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])/rate(synapse_util_caches_cache{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_util_caches_cache:hits{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])/rate(synapse_util_caches_cache:total{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 2,
|
||||
"legendFormat": "{{name}} {{job}}-{{index}}",
|
||||
@@ -7311,7 +7211,7 @@
|
||||
"h": 10,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 69
|
||||
"y": 36
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 8,
|
||||
@@ -7347,7 +7247,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "synapse_util_caches_cache_size{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}",
|
||||
"expr": "synapse_util_caches_cache:size{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}",
|
||||
"format": "time_series",
|
||||
"hide": false,
|
||||
"interval": "",
|
||||
@@ -7411,7 +7311,7 @@
|
||||
"h": 10,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 79
|
||||
"y": 46
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 38,
|
||||
@@ -7447,7 +7347,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_util_caches_cache{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_util_caches_cache:total{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
@@ -7507,7 +7407,7 @@
|
||||
"h": 10,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 79
|
||||
"y": 46
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 39,
|
||||
@@ -7515,16 +7415,11 @@
|
||||
"alignAsTable": true,
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"hideEmpty": false,
|
||||
"hideZero": false,
|
||||
"max": true,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"rightSide": false,
|
||||
"show": true,
|
||||
"sort": "max",
|
||||
"sortDesc": true,
|
||||
"total": false,
|
||||
"values": true
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 1,
|
||||
@@ -7547,7 +7442,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "topk(10, rate(synapse_util_caches_cache{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size]) - rate(synapse_util_caches_cache_hits{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"expr": "topk(10, rate(synapse_util_caches_cache:total{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size]) - rate(synapse_util_caches_cache:hits{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 2,
|
||||
@@ -7572,13 +7467,11 @@
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"$$hashKey": "object:101",
|
||||
"format": "rps",
|
||||
"logBase": 1,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"$$hashKey": "object:102",
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"show": true
|
||||
@@ -7608,7 +7501,7 @@
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 89
|
||||
"y": 56
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 65,
|
||||
@@ -7643,7 +7536,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_util_caches_cache_evicted_size{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_util_caches_cache:evicted_size{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 1,
|
||||
@@ -7763,7 +7656,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "synapse_util_caches_response_cache_size{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}",
|
||||
"expr": "synapse_util_caches_response_cache:size{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}",
|
||||
"interval": "",
|
||||
"legendFormat": "{{name}} {{job}}-{{index}}",
|
||||
"refId": "A"
|
||||
@@ -7853,7 +7746,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_util_caches_response_cache_hits{instance=\"$instance\", job=~\"$job\", index=~\"$index\"}[$bucket_size])/rate(synapse_util_caches_response_cache{instance=\"$instance\", job=~\"$job\", index=~\"$index\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_util_caches_response_cache:hits{instance=\"$instance\", job=~\"$job\", index=~\"$index\"}[$bucket_size])/rate(synapse_util_caches_response_cache:total{instance=\"$instance\", job=~\"$job\", index=~\"$index\"}[$bucket_size])",
|
||||
"interval": "",
|
||||
"legendFormat": "{{name}} {{job}}-{{index}}",
|
||||
"refId": "A"
|
||||
@@ -9556,7 +9449,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "synapse_forward_extremities_bucket{instance=\"$instance\"} and on (index, instance, job) (synapse_storage_events_persisted_events_total > 0)",
|
||||
"expr": "synapse_forward_extremities_bucket{instance=\"$instance\"} and on (index, instance, job) (synapse_storage_events_persisted_events > 0)",
|
||||
"format": "heatmap",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "{{le}}",
|
||||
@@ -9716,7 +9609,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_storage_events_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events_total > 0)",
|
||||
"expr": "rate(synapse_storage_events_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events > 0)",
|
||||
"format": "heatmap",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "{{le}}",
|
||||
@@ -9793,7 +9686,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "histogram_quantile(0.5, rate(synapse_storage_events_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events_total > 0))",
|
||||
"expr": "histogram_quantile(0.5, rate(synapse_storage_events_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events > 0))",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "50%",
|
||||
@@ -9803,7 +9696,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "histogram_quantile(0.75, rate(synapse_storage_events_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events_total > 0))",
|
||||
"expr": "histogram_quantile(0.75, rate(synapse_storage_events_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events > 0))",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "75%",
|
||||
@@ -9813,7 +9706,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "histogram_quantile(0.90, rate(synapse_storage_events_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events_total > 0))",
|
||||
"expr": "histogram_quantile(0.90, rate(synapse_storage_events_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events > 0))",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "90%",
|
||||
@@ -9823,7 +9716,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "histogram_quantile(0.99, rate(synapse_storage_events_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events_total > 0))",
|
||||
"expr": "histogram_quantile(0.99, rate(synapse_storage_events_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events > 0))",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "99%",
|
||||
@@ -9905,7 +9798,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_storage_events_stale_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events_total > 0)",
|
||||
"expr": "rate(synapse_storage_events_stale_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events > 0)",
|
||||
"format": "heatmap",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "{{le}}",
|
||||
@@ -9982,7 +9875,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "histogram_quantile(0.5, rate(synapse_storage_events_stale_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events_total > 0))",
|
||||
"expr": "histogram_quantile(0.5, rate(synapse_storage_events_stale_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events > 0))",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "50%",
|
||||
@@ -9992,7 +9885,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "histogram_quantile(0.75, rate(synapse_storage_events_stale_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events_total > 0))",
|
||||
"expr": "histogram_quantile(0.75, rate(synapse_storage_events_stale_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events > 0))",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "75%",
|
||||
@@ -10002,7 +9895,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "histogram_quantile(0.90, rate(synapse_storage_events_stale_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events_total > 0))",
|
||||
"expr": "histogram_quantile(0.90, rate(synapse_storage_events_stale_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events > 0))",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "90%",
|
||||
@@ -10012,7 +9905,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "histogram_quantile(0.99, rate(synapse_storage_events_stale_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events_total > 0))",
|
||||
"expr": "histogram_quantile(0.99, rate(synapse_storage_events_stale_forward_extremities_persisted_bucket{instance=\"$instance\"}[$bucket_size]) and on (index, instance, job) (synapse_storage_events_persisted_events > 0))",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "99%",
|
||||
@@ -10297,7 +10190,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "sum(rate(synapse_storage_events_state_resolutions_during_persistence_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]))",
|
||||
"expr": "sum(rate(synapse_storage_events_state_resolutions_during_persistence{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]))",
|
||||
"interval": "",
|
||||
"legendFormat": "State res ",
|
||||
"refId": "A"
|
||||
@@ -10306,7 +10199,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "sum(rate(synapse_storage_events_potential_times_prune_extremities_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]))",
|
||||
"expr": "sum(rate(synapse_storage_events_potential_times_prune_extremities{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]))",
|
||||
"interval": "",
|
||||
"legendFormat": "Potential to prune",
|
||||
"refId": "B"
|
||||
@@ -10315,7 +10208,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "sum(rate(synapse_storage_events_times_pruned_extremities_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]))",
|
||||
"expr": "sum(rate(synapse_storage_events_times_pruned_extremities{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size]))",
|
||||
"interval": "",
|
||||
"legendFormat": "Pruned",
|
||||
"refId": "C"
|
||||
@@ -11069,7 +10962,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_handler_presence_notified_presence_total{job=\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_handler_presence_notified_presence{job=\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"interval": "",
|
||||
"legendFormat": "Notified",
|
||||
"refId": "A"
|
||||
@@ -11078,7 +10971,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_handler_presence_federation_presence_out_total{job=\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_handler_presence_federation_presence_out{job=\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"interval": "",
|
||||
"legendFormat": "Remote ping",
|
||||
"refId": "B"
|
||||
@@ -11087,7 +10980,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_handler_presence_presence_updates_total{job=\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_handler_presence_presence_updates{job=\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"interval": "",
|
||||
"legendFormat": "Total updates",
|
||||
"refId": "C"
|
||||
@@ -11096,7 +10989,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_handler_presence_federation_presence_total{job=\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_handler_presence_federation_presence{job=\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"interval": "",
|
||||
"legendFormat": "Remote updates",
|
||||
"refId": "D"
|
||||
@@ -11105,7 +10998,7 @@
|
||||
"datasource": {
|
||||
"uid": "$datasource"
|
||||
},
|
||||
"expr": "rate(synapse_handler_presence_bump_active_time_total{job=\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_handler_presence_bump_active_time{job=\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"interval": "",
|
||||
"legendFormat": "Bump active time",
|
||||
"refId": "E"
|
||||
@@ -11789,7 +11682,7 @@
|
||||
"name": "instance",
|
||||
"options": [],
|
||||
"query": {
|
||||
"query": "label_values(synapse_util_metrics_block_ru_utime_seconds_total, instance)",
|
||||
"query": "label_values(synapse_util_metrics_block_ru_utime_seconds, instance)",
|
||||
"refId": "Prometheus-instance-Variable-Query"
|
||||
},
|
||||
"refresh": 2,
|
||||
@@ -11818,7 +11711,7 @@
|
||||
"name": "job",
|
||||
"options": [],
|
||||
"query": {
|
||||
"query": "label_values(synapse_util_metrics_block_ru_utime_seconds_total, job)",
|
||||
"query": "label_values(synapse_util_metrics_block_ru_utime_seconds, job)",
|
||||
"refId": "Prometheus-job-Variable-Query"
|
||||
},
|
||||
"refresh": 2,
|
||||
@@ -11848,7 +11741,7 @@
|
||||
"name": "index",
|
||||
"options": [],
|
||||
"query": {
|
||||
"query": "label_values(synapse_util_metrics_block_ru_utime_seconds_total, index)",
|
||||
"query": "label_values(synapse_util_metrics_block_ru_utime_seconds, index)",
|
||||
"refId": "Prometheus-index-Variable-Query"
|
||||
},
|
||||
"refresh": 2,
|
||||
@@ -11864,8 +11757,8 @@
|
||||
]
|
||||
},
|
||||
"time": {
|
||||
"from": "now-3h",
|
||||
"to": "now"
|
||||
"from": "2022-07-22T04:08:13.716Z",
|
||||
"to": "2022-07-22T18:44:27.863Z"
|
||||
},
|
||||
"timepicker": {
|
||||
"now": true,
|
||||
@@ -11896,6 +11789,6 @@
|
||||
"timezone": "",
|
||||
"title": "Synapse",
|
||||
"uid": "000000012",
|
||||
"version": 133,
|
||||
"version": 124,
|
||||
"weekStart": ""
|
||||
}
|
||||
}
|
||||
21
contrib/prometheus/synapse-v1.rules
Normal file
21
contrib/prometheus/synapse-v1.rules
Normal file
@@ -0,0 +1,21 @@
|
||||
synapse_federation_transaction_queue_pendingEdus:total = sum(synapse_federation_transaction_queue_pendingEdus or absent(synapse_federation_transaction_queue_pendingEdus)*0)
|
||||
synapse_federation_transaction_queue_pendingPdus:total = sum(synapse_federation_transaction_queue_pendingPdus or absent(synapse_federation_transaction_queue_pendingPdus)*0)
|
||||
|
||||
synapse_http_server_request_count:method{servlet=""} = sum(synapse_http_server_request_count) by (method)
|
||||
synapse_http_server_request_count:servlet{method=""} = sum(synapse_http_server_request_count) by (servlet)
|
||||
|
||||
synapse_http_server_request_count:total{servlet=""} = sum(synapse_http_server_request_count:by_method) by (servlet)
|
||||
|
||||
synapse_cache:hit_ratio_5m = rate(synapse_util_caches_cache:hits[5m]) / rate(synapse_util_caches_cache:total[5m])
|
||||
synapse_cache:hit_ratio_30s = rate(synapse_util_caches_cache:hits[30s]) / rate(synapse_util_caches_cache:total[30s])
|
||||
|
||||
synapse_federation_client_sent{type="EDU"} = synapse_federation_client_sent_edus + 0
|
||||
synapse_federation_client_sent{type="PDU"} = synapse_federation_client_sent_pdu_destinations:count + 0
|
||||
synapse_federation_client_sent{type="Query"} = sum(synapse_federation_client_sent_queries) by (job)
|
||||
|
||||
synapse_federation_server_received{type="EDU"} = synapse_federation_server_received_edus + 0
|
||||
synapse_federation_server_received{type="PDU"} = synapse_federation_server_received_pdus + 0
|
||||
synapse_federation_server_received{type="Query"} = sum(synapse_federation_server_received_queries) by (job)
|
||||
|
||||
synapse_federation_transaction_queue_pending{type="EDU"} = synapse_federation_transaction_queue_pending_edus + 0
|
||||
synapse_federation_transaction_queue_pending{type="PDU"} = synapse_federation_transaction_queue_pending_pdus + 0
|
||||
@@ -1,20 +1,37 @@
|
||||
groups:
|
||||
- name: synapse
|
||||
rules:
|
||||
- record: "synapse_federation_transaction_queue_pendingEdus:total"
|
||||
expr: "sum(synapse_federation_transaction_queue_pendingEdus or absent(synapse_federation_transaction_queue_pendingEdus)*0)"
|
||||
- record: "synapse_federation_transaction_queue_pendingPdus:total"
|
||||
expr: "sum(synapse_federation_transaction_queue_pendingPdus or absent(synapse_federation_transaction_queue_pendingPdus)*0)"
|
||||
- record: 'synapse_http_server_request_count:method'
|
||||
labels:
|
||||
servlet: ""
|
||||
expr: "sum(synapse_http_server_request_count) by (method)"
|
||||
- record: 'synapse_http_server_request_count:servlet'
|
||||
labels:
|
||||
method: ""
|
||||
expr: 'sum(synapse_http_server_request_count) by (servlet)'
|
||||
|
||||
- record: 'synapse_http_server_request_count:total'
|
||||
labels:
|
||||
servlet: ""
|
||||
expr: 'sum(synapse_http_server_request_count:by_method) by (servlet)'
|
||||
|
||||
- record: 'synapse_cache:hit_ratio_5m'
|
||||
expr: 'rate(synapse_util_caches_cache:hits[5m]) / rate(synapse_util_caches_cache:total[5m])'
|
||||
- record: 'synapse_cache:hit_ratio_30s'
|
||||
expr: 'rate(synapse_util_caches_cache:hits[30s]) / rate(synapse_util_caches_cache:total[30s])'
|
||||
|
||||
###
|
||||
### Prometheus Console Only
|
||||
### The following rules are only needed if you use the Prometheus Console
|
||||
### in contrib/prometheus/consoles/synapse.html
|
||||
###
|
||||
- record: 'synapse_federation_client_sent'
|
||||
labels:
|
||||
type: "EDU"
|
||||
expr: 'synapse_federation_client_sent_edus_total + 0'
|
||||
expr: 'synapse_federation_client_sent_edus + 0'
|
||||
- record: 'synapse_federation_client_sent'
|
||||
labels:
|
||||
type: "PDU"
|
||||
expr: 'synapse_federation_client_sent_pdu_destinations_count_total + 0'
|
||||
expr: 'synapse_federation_client_sent_pdu_destinations:count + 0'
|
||||
- record: 'synapse_federation_client_sent'
|
||||
labels:
|
||||
type: "Query"
|
||||
@@ -23,11 +40,11 @@ groups:
|
||||
- record: 'synapse_federation_server_received'
|
||||
labels:
|
||||
type: "EDU"
|
||||
expr: 'synapse_federation_server_received_edus_total + 0'
|
||||
expr: 'synapse_federation_server_received_edus + 0'
|
||||
- record: 'synapse_federation_server_received'
|
||||
labels:
|
||||
type: "PDU"
|
||||
expr: 'synapse_federation_server_received_pdus_total + 0'
|
||||
expr: 'synapse_federation_server_received_pdus + 0'
|
||||
- record: 'synapse_federation_server_received'
|
||||
labels:
|
||||
type: "Query"
|
||||
@@ -41,34 +58,21 @@ groups:
|
||||
labels:
|
||||
type: "PDU"
|
||||
expr: 'synapse_federation_transaction_queue_pending_pdus + 0'
|
||||
###
|
||||
### End of 'Prometheus Console Only' rules block
|
||||
###
|
||||
|
||||
|
||||
###
|
||||
### Grafana Only
|
||||
### The following rules are only needed if you use the Grafana dashboard
|
||||
### in contrib/grafana/synapse.json
|
||||
###
|
||||
- record: synapse_storage_events_persisted_by_source_type
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_type="remote"})
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_type="remote"})
|
||||
labels:
|
||||
type: remote
|
||||
- record: synapse_storage_events_persisted_by_source_type
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity="*client*",origin_type="local"})
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_entity="*client*",origin_type="local"})
|
||||
labels:
|
||||
type: local
|
||||
- record: synapse_storage_events_persisted_by_source_type
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity!="*client*",origin_type="local"})
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_entity!="*client*",origin_type="local"})
|
||||
labels:
|
||||
type: bridges
|
||||
|
||||
- record: synapse_storage_events_persisted_by_event_type
|
||||
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep_total)
|
||||
|
||||
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep)
|
||||
- record: synapse_storage_events_persisted_by_origin
|
||||
expr: sum without(type) (synapse_storage_events_persisted_events_sep_total)
|
||||
###
|
||||
### End of 'Grafana Only' rules block
|
||||
###
|
||||
expr: sum without(type) (synapse_storage_events_persisted_events_sep)
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ You can alternatively create multiple worker configuration files with a simple `
|
||||
#!/bin/bash
|
||||
for i in {1..5}
|
||||
do
|
||||
cat << EOF > generic_worker$i.yaml
|
||||
cat << EOF >> generic_worker$i.yaml
|
||||
worker_app: synapse.app.generic_worker
|
||||
worker_name: generic_worker$i
|
||||
|
||||
@@ -15,8 +15,6 @@ worker_name: generic_worker$i
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_main_http_uri: http://localhost:8008/
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 808$i
|
||||
|
||||
9
debian/build_virtualenv
vendored
9
debian/build_virtualenv
vendored
@@ -36,7 +36,7 @@ TEMP_VENV="$(mktemp -d)"
|
||||
python3 -m venv "$TEMP_VENV"
|
||||
source "$TEMP_VENV/bin/activate"
|
||||
pip install -U pip
|
||||
pip install poetry==1.2.0
|
||||
pip install poetry==1.2.0b1
|
||||
poetry export \
|
||||
--extras all \
|
||||
--extras test \
|
||||
@@ -61,7 +61,7 @@ dh_virtualenv \
|
||||
--extras="all,systemd,test" \
|
||||
--requirements="exported_requirements.txt"
|
||||
|
||||
PACKAGE_BUILD_DIR="$(pwd)/debian/matrix-synapse-py3"
|
||||
PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
|
||||
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
|
||||
TARGET_PYTHON="${VIRTUALENV_DIR}/bin/python"
|
||||
|
||||
@@ -78,14 +78,9 @@ case "$DEB_BUILD_OPTIONS" in
|
||||
|
||||
cp -r tests "$tmpdir"
|
||||
|
||||
# To avoid pulling in the unbuilt Synapse in the local directory
|
||||
pushd /
|
||||
|
||||
PYTHONPATH="$tmpdir" \
|
||||
"${TARGET_PYTHON}" -m twisted.trial --reporter=text -j2 tests
|
||||
|
||||
popd
|
||||
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
103
debian/changelog
vendored
103
debian/changelog
vendored
@@ -1,112 +1,9 @@
|
||||
matrix-synapse-py3 (1.70.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.70.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 28 Oct 2022 12:10:21 +0100
|
||||
|
||||
matrix-synapse-py3 (1.70.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.70.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 26 Oct 2022 11:11:50 +0100
|
||||
|
||||
matrix-synapse-py3 (1.70.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.70.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Oct 2022 10:59:47 +0100
|
||||
|
||||
matrix-synapse-py3 (1.70.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.70.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 19 Oct 2022 14:11:57 +0100
|
||||
|
||||
matrix-synapse-py3 (1.69.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.69.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 17 Oct 2022 11:31:03 +0100
|
||||
|
||||
matrix-synapse-py3 (1.69.0~rc4) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.69.0rc4.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 14 Oct 2022 15:04:47 +0100
|
||||
|
||||
matrix-synapse-py3 (1.69.0~rc3) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.69.0rc3.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 12 Oct 2022 13:24:04 +0100
|
||||
|
||||
matrix-synapse-py3 (1.69.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.69.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 06 Oct 2022 14:45:00 +0100
|
||||
|
||||
matrix-synapse-py3 (1.69.0~rc1) stable; urgency=medium
|
||||
|
||||
* The man page for the hash_password script has been updated to reflect
|
||||
the correct default value of 'bcrypt_rounds'.
|
||||
* New Synapse release 1.69.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Oct 2022 11:17:16 +0100
|
||||
|
||||
matrix-synapse-py3 (1.68.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.68.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 27 Sep 2022 12:02:09 +0100
|
||||
|
||||
matrix-synapse-py3 (1.68.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.68.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 23 Sep 2022 09:40:10 +0100
|
||||
|
||||
matrix-synapse-py3 (1.68.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.68.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Sep 2022 11:18:20 +0100
|
||||
|
||||
matrix-synapse-py3 (1.67.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.67.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Sep 2022 09:19:56 +0100
|
||||
|
||||
matrix-synapse-py3 (1.67.0~rc1) stable; urgency=medium
|
||||
|
||||
[ Erik Johnston ]
|
||||
* Use stable poetry 1.2.0 version, rather than a prerelease.
|
||||
|
||||
[ Synapse Packaging team ]
|
||||
* New Synapse release 1.67.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Sep 2022 09:01:06 +0100
|
||||
|
||||
matrix-synapse-py3 (1.66.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.66.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 31 Aug 2022 11:20:17 +0100
|
||||
|
||||
matrix-synapse-py3 (1.66.0~rc2+nmu1) UNRELEASED; urgency=medium
|
||||
|
||||
[ Jörg Behrmann ]
|
||||
* Update debhelper to compatibility level 12.
|
||||
* Drop the preinst script stopping synapse.
|
||||
* Allocate a group for the system user.
|
||||
* Change dpkg-statoverride to --force-statoverride-add.
|
||||
|
||||
[ Erik Johnston ]
|
||||
* Disable `dh_auto_configure` as it broke during Rust build.
|
||||
|
||||
-- Jörg Behrmann <behrmann@physik.fu-berlin.de> Tue, 23 Aug 2022 17:17:00 +0100
|
||||
|
||||
matrix-synapse-py3 (1.66.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.66.0rc2.
|
||||
|
||||
1
debian/compat
vendored
Normal file
1
debian/compat
vendored
Normal file
@@ -0,0 +1 @@
|
||||
10
|
||||
2
debian/control
vendored
2
debian/control
vendored
@@ -4,7 +4,7 @@ Priority: extra
|
||||
Maintainer: Synapse Packaging team <packages@matrix.org>
|
||||
# keep this list in sync with the build dependencies in docker/Dockerfile-dhvirtualenv.
|
||||
Build-Depends:
|
||||
debhelper-compat (= 12),
|
||||
debhelper (>= 10),
|
||||
dh-virtualenv (>= 1.1),
|
||||
libsystemd-dev,
|
||||
libpq-dev,
|
||||
|
||||
2
debian/hash_password.1
vendored
2
debian/hash_password.1
vendored
@@ -10,7 +10,7 @@
|
||||
.P
|
||||
\fBhash_password\fR takes a password as an parameter either on the command line or the \fBSTDIN\fR if not supplied\.
|
||||
.P
|
||||
It accepts an YAML file which can be used to specify parameters like the number of rounds for bcrypt and password_config section having the pepper value used for the hashing\. By default \fBbcrypt_rounds\fR is set to \fB12\fR\.
|
||||
It accepts an YAML file which can be used to specify parameters like the number of rounds for bcrypt and password_config section having the pepper value used for the hashing\. By default \fBbcrypt_rounds\fR is set to \fB10\fR\.
|
||||
.P
|
||||
The hashed password is written on the \fBSTDOUT\fR\.
|
||||
.SH "FILES"
|
||||
|
||||
2
debian/hash_password.ronn
vendored
2
debian/hash_password.ronn
vendored
@@ -14,7 +14,7 @@ or the `STDIN` if not supplied.
|
||||
|
||||
It accepts an YAML file which can be used to specify parameters like the
|
||||
number of rounds for bcrypt and password_config section having the pepper
|
||||
value used for the hashing. By default `bcrypt_rounds` is set to **12**.
|
||||
value used for the hashing. By default `bcrypt_rounds` is set to **10**.
|
||||
|
||||
The hashed password is written on the `STDOUT`.
|
||||
|
||||
|
||||
4
debian/matrix-synapse-py3.postinst
vendored
4
debian/matrix-synapse-py3.postinst
vendored
@@ -40,12 +40,12 @@ EOF
|
||||
/opt/venvs/matrix-synapse/lib/manage_debconf.pl update
|
||||
|
||||
if ! getent passwd $USER >/dev/null; then
|
||||
adduser --quiet --system --group --no-create-home --home /var/lib/matrix-synapse $USER
|
||||
adduser --quiet --system --no-create-home --home /var/lib/matrix-synapse $USER
|
||||
fi
|
||||
|
||||
for DIR in /var/lib/matrix-synapse /var/log/matrix-synapse /etc/matrix-synapse; do
|
||||
if ! dpkg-statoverride --list --quiet $DIR >/dev/null; then
|
||||
dpkg-statoverride --force-statoverride-add --quiet --update --add $USER "$(id -gn $USER)" 0755 $DIR
|
||||
dpkg-statoverride --force --quiet --update --add $USER nogroup 0755 $DIR
|
||||
fi
|
||||
done
|
||||
|
||||
|
||||
31
debian/matrix-synapse-py3.preinst
vendored
Normal file
31
debian/matrix-synapse-py3.preinst
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
#!/bin/sh -e
|
||||
|
||||
# Attempt to undo some of the braindamage caused by
|
||||
# https://github.com/matrix-org/package-synapse-debian/issues/18.
|
||||
#
|
||||
# Due to reasons [1], the old python2 matrix-synapse package will not stop the
|
||||
# service when the package is uninstalled. Our maintainer scripts will do the
|
||||
# right thing in terms of ensuring the service is enabled and unmasked, but
|
||||
# then do a `systemctl start matrix-synapse`, which of course does nothing -
|
||||
# leaving the old (py2) service running.
|
||||
#
|
||||
# There should normally be no reason for the service to be running during our
|
||||
# preinst, so we assume that if it *is* running, it's due to that situation,
|
||||
# and stop it.
|
||||
#
|
||||
# [1] dh_systemd_start doesn't do anything because it sees that there is an
|
||||
# init.d script with the same name, so leaves it to dh_installinit.
|
||||
#
|
||||
# dh_installinit doesn't do anything because somebody gave it a --no-start
|
||||
# for unknown reasons.
|
||||
|
||||
if [ -x /bin/systemctl ]; then
|
||||
if /bin/systemctl --quiet is-active -- matrix-synapse; then
|
||||
echo >&2 "stopping existing matrix-synapse service"
|
||||
/bin/systemctl stop matrix-synapse || true
|
||||
fi
|
||||
fi
|
||||
|
||||
#DEBHELPER#
|
||||
|
||||
exit 0
|
||||
2
debian/matrix-synapse.default
vendored
Normal file
2
debian/matrix-synapse.default
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
# Specify environment variables used when running Synapse
|
||||
# SYNAPSE_CACHE_FACTOR=0.5 (default)
|
||||
6
debian/matrix-synapse.service
vendored
6
debian/matrix-synapse.service
vendored
@@ -5,6 +5,7 @@ Description=Synapse Matrix homeserver
|
||||
Type=notify
|
||||
User=matrix-synapse
|
||||
WorkingDirectory=/var/lib/matrix-synapse
|
||||
EnvironmentFile=-/etc/default/matrix-synapse
|
||||
ExecStartPre=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/ --generate-keys
|
||||
ExecStart=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/
|
||||
ExecReload=/bin/kill -HUP $MAINPID
|
||||
@@ -12,10 +13,5 @@ Restart=always
|
||||
RestartSec=3
|
||||
SyslogIdentifier=matrix-synapse
|
||||
|
||||
# The environment file is not shipped by default anymore and the below directive
|
||||
# is for backwards compatibility only. Please use your homeserver.yaml if
|
||||
# possible.
|
||||
EnvironmentFile=-/etc/default/matrix-synapse
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
||||
14
debian/rules
vendored
14
debian/rules
vendored
@@ -6,19 +6,15 @@
|
||||
# assume we only have one package
|
||||
PACKAGE_NAME:=`dh_listpackages`
|
||||
|
||||
override_dh_installsystemd:
|
||||
dh_installsystemd --name=matrix-synapse
|
||||
override_dh_systemd_enable:
|
||||
dh_systemd_enable --name=matrix-synapse
|
||||
|
||||
override_dh_installinit:
|
||||
dh_installinit --name=matrix-synapse
|
||||
|
||||
# we don't really want to strip the symbols from our object files.
|
||||
override_dh_strip:
|
||||
|
||||
override_dh_auto_configure:
|
||||
|
||||
# many libraries pulled from PyPI have allocatable sections after
|
||||
# non-allocatable ones on which dwz errors out. For those without the issue the
|
||||
# gains are only marginal
|
||||
override_dh_dwz:
|
||||
|
||||
# dh_shlibdeps calls dpkg-shlibdeps, which finds all the binary files
|
||||
# (executables and shared libs) in the package, and looks for the shared
|
||||
# libraries that they depend on. It then adds a dependency on the package that
|
||||
|
||||
@@ -31,9 +31,7 @@ ARG PYTHON_VERSION=3.9
|
||||
###
|
||||
### Stage 0: generate requirements.txt
|
||||
###
|
||||
# We hardcode the use of Debian bullseye here because this could change upstream
|
||||
# and other Dockerfiles used for testing are expecting bullseye.
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as requirements
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim as requirements
|
||||
|
||||
# RUN --mount is specific to buildkit and is documented at
|
||||
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
|
||||
@@ -48,8 +46,17 @@ RUN \
|
||||
|
||||
# We install poetry in its own build stage to avoid its dependencies conflicting with
|
||||
# synapse's dependencies.
|
||||
# We use a specific commit from poetry's master branch instead of our usual 1.1.14,
|
||||
# to incorporate fixes to some bugs in `poetry export`. This commit corresponds to
|
||||
# https://github.com/python-poetry/poetry/pull/5156 and
|
||||
# https://github.com/python-poetry/poetry/issues/5141 ;
|
||||
# without it, we generate a requirements.txt with incorrect environment markers,
|
||||
# which causes necessary packages to be omitted when we `pip install`.
|
||||
#
|
||||
# NB: In poetry 1.2 `poetry export` will be moved into a plugin; we'll need to also
|
||||
# pip install poetry-plugin-export (https://github.com/python-poetry/poetry-plugin-export).
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install --user "poetry==1.2.0"
|
||||
pip install --user "poetry-core==1.1.0a7" "git+https://github.com/python-poetry/poetry.git@fb13b3a676f476177f7937ffa480ee5cff9a90a5"
|
||||
|
||||
WORKDIR /synapse
|
||||
|
||||
@@ -78,7 +85,7 @@ RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
||||
###
|
||||
### Stage 1: builder
|
||||
###
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as builder
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim as builder
|
||||
|
||||
# install the OS build deps
|
||||
RUN \
|
||||
@@ -94,26 +101,11 @@ RUN \
|
||||
libxml++2.6-dev \
|
||||
libxslt1-dev \
|
||||
openssl \
|
||||
rustc \
|
||||
zlib1g-dev \
|
||||
git \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
||||
# Install rust and ensure its in the PATH
|
||||
ENV RUSTUP_HOME=/rust
|
||||
ENV CARGO_HOME=/cargo
|
||||
ENV PATH=/cargo/bin:/rust/bin:$PATH
|
||||
RUN mkdir /rust /cargo
|
||||
|
||||
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
||||
|
||||
|
||||
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
|
||||
# set to true, so we expose it as a build-arg.
|
||||
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
|
||||
ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
|
||||
|
||||
# To speed up rebuilds, install all of the dependencies before we copy over
|
||||
# the whole synapse project, so that this layer in the Docker cache can be
|
||||
# used while you develop on the source
|
||||
@@ -125,9 +117,8 @@ RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
|
||||
# Copy over the rest of the synapse source code.
|
||||
COPY synapse /synapse/synapse/
|
||||
COPY rust /synapse/rust/
|
||||
# ... and what we need to `pip install`.
|
||||
COPY pyproject.toml README.rst build_rust.py Cargo.toml Cargo.lock /synapse/
|
||||
COPY pyproject.toml README.rst /synapse/
|
||||
|
||||
# Repeat of earlier build argument declaration, as this is a new build stage.
|
||||
ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
|
||||
@@ -135,9 +126,7 @@ ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
|
||||
# Install the synapse package itself.
|
||||
# If we have populated requirements.txt, we don't install any dependencies
|
||||
# as we should already have those from the previous `pip install` step.
|
||||
RUN --mount=type=cache,target=/synapse/target,sharing=locked \
|
||||
--mount=type=cache,target=${CARGO_HOME}/registry,sharing=locked \
|
||||
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
||||
RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
||||
pip install --prefix="/install" --no-deps --no-warn-script-location /synapse[all]; \
|
||||
else \
|
||||
pip install --prefix="/install" --no-warn-script-location /synapse[all]; \
|
||||
@@ -147,7 +136,7 @@ RUN --mount=type=cache,target=/synapse/target,sharing=locked \
|
||||
### Stage 2: runtime
|
||||
###
|
||||
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim
|
||||
|
||||
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
||||
LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md'
|
||||
|
||||
@@ -72,7 +72,6 @@ RUN apt-get update -qq -o Acquire::Languages=none \
|
||||
&& env DEBIAN_FRONTEND=noninteractive apt-get install \
|
||||
-yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \
|
||||
build-essential \
|
||||
curl \
|
||||
debhelper \
|
||||
devscripts \
|
||||
libsystemd-dev \
|
||||
@@ -86,15 +85,6 @@ RUN apt-get update -qq -o Acquire::Languages=none \
|
||||
libpq-dev \
|
||||
xmlsec1
|
||||
|
||||
# Install rust and ensure it's in the PATH
|
||||
ENV RUSTUP_HOME=/rust
|
||||
ENV CARGO_HOME=/cargo
|
||||
ENV PATH=/cargo/bin:/rust/bin:$PATH
|
||||
RUN mkdir /rust /cargo
|
||||
|
||||
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
||||
|
||||
|
||||
COPY --from=builder /dh-virtualenv_1.2.2-1_all.deb /
|
||||
|
||||
# install dhvirtualenv. Update the apt cache again first, in case we got a
|
||||
|
||||
@@ -40,11 +40,7 @@ FROM matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||
COPY --from=deps_base /etc/nginx /etc/nginx
|
||||
RUN rm /etc/nginx/sites-enabled/default
|
||||
RUN mkdir /var/log/nginx /var/lib/nginx
|
||||
RUN chown www-data /var/lib/nginx
|
||||
|
||||
# have nginx log to stderr/out
|
||||
RUN ln -sf /dev/stdout /var/log/nginx/access.log
|
||||
RUN ln -sf /dev/stderr /var/log/nginx/error.log
|
||||
RUN chown www-data /var/log/nginx /var/lib/nginx
|
||||
|
||||
# Copy Synapse worker, nginx and supervisord configuration template files
|
||||
COPY ./docker/conf-workers/* /conf/
|
||||
|
||||
@@ -241,4 +241,4 @@ healthcheck:
|
||||
|
||||
Jemalloc is embedded in the image and will be used instead of the default allocator.
|
||||
You can read about jemalloc by reading the Synapse
|
||||
[Admin FAQ](https://matrix-org.github.io/synapse/latest/usage/administration/admin_faq.html#help-synapse-is-slow-and-eats-all-my-ramcpu).
|
||||
[README](https://github.com/matrix-org/synapse/blob/HEAD/README.rst#help-synapse-is-slow-and-eats-all-my-ram-cpu).
|
||||
|
||||
@@ -8,29 +8,35 @@
|
||||
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
|
||||
FROM matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||
# First of all, we copy postgres server from the official postgres image,
|
||||
# since for repeated rebuilds, this is much faster than apt installing
|
||||
# postgres each time.
|
||||
# first of all, we create a base image with a postgres server and database,
|
||||
# which we can copy into the target image. For repeated rebuilds, this is
|
||||
# much faster than apt installing postgres each time.
|
||||
#
|
||||
# This trick only works because (a) the Synapse image happens to have all the
|
||||
# shared libraries that postgres wants, (b) we use a postgres image based on
|
||||
# the same debian version as Synapse's docker image (so the versions of the
|
||||
# shared libraries match).
|
||||
|
||||
# This trick only works because (a) the Synapse image happens to have all the
|
||||
# shared libraries that postgres wants, (b) we use a postgres image based on
|
||||
# the same debian version as Synapse's docker image (so the versions of the
|
||||
# shared libraries match).
|
||||
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||
COPY --from=postgres:13-bullseye /usr/lib/postgresql /usr/lib/postgresql
|
||||
COPY --from=postgres:13-bullseye /usr/share/postgresql /usr/share/postgresql
|
||||
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
|
||||
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
||||
ENV PGDATA=/var/lib/postgresql/data
|
||||
|
||||
# We also initialize the database at build time, rather than runtime, so that it's faster to spin up the image.
|
||||
FROM postgres:13-bullseye AS postgres_base
|
||||
# initialise the database cluster in /var/lib/postgresql
|
||||
RUN gosu postgres initdb --locale=C --encoding=UTF-8 --auth-host password
|
||||
|
||||
# Configure a password and create a database for Synapse
|
||||
RUN echo "ALTER USER postgres PASSWORD 'somesecret'" | gosu postgres postgres --single
|
||||
RUN echo "CREATE DATABASE synapse" | gosu postgres postgres --single
|
||||
|
||||
# now build the final image, based on the Synapse image.
|
||||
|
||||
FROM matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||
# copy the postgres installation over from the image we built above
|
||||
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||
COPY --from=postgres_base /var/lib/postgresql /var/lib/postgresql
|
||||
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
|
||||
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
|
||||
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
|
||||
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
||||
ENV PGDATA=/var/lib/postgresql/data
|
||||
|
||||
# Extend the shared homeserver config to disable rate-limiting,
|
||||
# set Complement's static shared secret, enable registration, amongst other
|
||||
# tweaks to get Synapse ready for testing.
|
||||
|
||||
@@ -57,7 +57,6 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
||||
federation_reader, \
|
||||
federation_sender, \
|
||||
synchrotron, \
|
||||
client_reader, \
|
||||
appservice, \
|
||||
pusher"
|
||||
|
||||
|
||||
@@ -12,8 +12,6 @@ trusted_key_servers: []
|
||||
enable_registration: true
|
||||
enable_registration_without_verification: true
|
||||
bcrypt_rounds: 4
|
||||
url_preview_enabled: true
|
||||
url_preview_ip_range_blacklist: []
|
||||
|
||||
## Registration ##
|
||||
|
||||
|
||||
@@ -39,7 +39,6 @@
|
||||
# continue to work if so.
|
||||
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
@@ -108,34 +107,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"client_reader": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["client"],
|
||||
"endpoint_patterns": [
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/publicRooms$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/joined_members$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/context/.*$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/members$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/state$",
|
||||
"^/_matrix/client/v1/rooms/.*/hierarchy$",
|
||||
"^/_matrix/client/(v1|unstable)/rooms/.*/relations/",
|
||||
"^/_matrix/client/v1/rooms/.*/threads$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/login$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/3pid$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/whoami$",
|
||||
"^/_matrix/client/versions$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/register$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/auth/.*/fallback/web$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/messages$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/event",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/joined_rooms",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable/.*)/rooms/.*/aliases",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/search",
|
||||
],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"federation_reader": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["federation"],
|
||||
@@ -230,19 +201,24 @@ upstream {upstream_worker_type} {{
|
||||
|
||||
# Utility functions
|
||||
def log(txt: str) -> None:
|
||||
"""Log something to the stdout.
|
||||
|
||||
Args:
|
||||
txt: The text to log.
|
||||
"""
|
||||
print(txt)
|
||||
|
||||
|
||||
def error(txt: str) -> NoReturn:
|
||||
print(txt, file=sys.stderr)
|
||||
"""Log something and exit with an error code.
|
||||
|
||||
Args:
|
||||
txt: The text to log in error.
|
||||
"""
|
||||
log(txt)
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
def flush_buffers() -> None:
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
|
||||
|
||||
def convert(src: str, dst: str, **template_vars: object) -> None:
|
||||
"""Generate a file from a template
|
||||
|
||||
@@ -323,7 +299,7 @@ def generate_base_homeserver_config() -> None:
|
||||
# start.py already does this for us, so just call that.
|
||||
# note that this script is copied in in the official, monolith dockerfile
|
||||
os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT)
|
||||
subprocess.run(["/usr/local/bin/python", "/start.py", "migrate_config"], check=True)
|
||||
subprocess.check_output(["/usr/local/bin/python", "/start.py", "migrate_config"])
|
||||
|
||||
|
||||
def generate_worker_files(
|
||||
@@ -397,8 +373,8 @@ def generate_worker_files(
|
||||
# No workers, just the main process
|
||||
worker_types = []
|
||||
else:
|
||||
# Split type names by comma, ignoring whitespace.
|
||||
worker_types = [x.strip() for x in worker_types_env.split(",")]
|
||||
# Split type names by comma
|
||||
worker_types = worker_types_env.split(",")
|
||||
|
||||
# Create the worker configuration directory if it doesn't already exist
|
||||
os.makedirs("/conf/workers", exist_ok=True)
|
||||
@@ -417,6 +393,8 @@ def generate_worker_files(
|
||||
|
||||
# For each worker type specified by the user, create config values
|
||||
for worker_type in worker_types:
|
||||
worker_type = worker_type.strip()
|
||||
|
||||
worker_config = WORKERS_CONFIG.get(worker_type)
|
||||
if worker_config:
|
||||
worker_config = worker_config.copy()
|
||||
@@ -626,24 +604,14 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
with open(mark_filepath, "w") as f:
|
||||
f.write("")
|
||||
|
||||
# Lifted right out of start.py
|
||||
jemallocpath = "/usr/lib/%s-linux-gnu/libjemalloc.so.2" % (platform.machine(),)
|
||||
|
||||
if os.path.isfile(jemallocpath):
|
||||
environ["LD_PRELOAD"] = jemallocpath
|
||||
else:
|
||||
log("Could not find %s, will not use" % (jemallocpath,))
|
||||
|
||||
# Start supervisord, which will start Synapse, all of the configured worker
|
||||
# processes, redis, nginx etc. according to the config we created above.
|
||||
log("Starting supervisord")
|
||||
flush_buffers()
|
||||
os.execle(
|
||||
os.execl(
|
||||
"/usr/local/bin/supervisord",
|
||||
"supervisord",
|
||||
"-c",
|
||||
"/etc/supervisor/supervisord.conf",
|
||||
environ,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -13,19 +13,14 @@ import jinja2
|
||||
|
||||
# Utility functions
|
||||
def log(txt: str) -> None:
|
||||
print(txt)
|
||||
print(txt, file=sys.stderr)
|
||||
|
||||
|
||||
def error(txt: str) -> NoReturn:
|
||||
print(txt, file=sys.stderr)
|
||||
log(txt)
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
def flush_buffers() -> None:
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
|
||||
|
||||
def convert(src: str, dst: str, environ: Mapping[str, object]) -> None:
|
||||
"""Generate a file from a template
|
||||
|
||||
@@ -136,10 +131,10 @@ def generate_config_from_template(
|
||||
|
||||
if ownership is not None:
|
||||
log(f"Setting ownership on /data to {ownership}")
|
||||
subprocess.run(["chown", "-R", ownership, "/data"], check=True)
|
||||
subprocess.check_output(["chown", "-R", ownership, "/data"])
|
||||
args = ["gosu", ownership] + args
|
||||
|
||||
subprocess.run(args, check=True)
|
||||
subprocess.check_output(args)
|
||||
|
||||
|
||||
def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> None:
|
||||
@@ -163,7 +158,7 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) ->
|
||||
if ownership is not None:
|
||||
# make sure that synapse has perms to write to the data dir.
|
||||
log(f"Setting ownership on {data_dir} to {ownership}")
|
||||
subprocess.run(["chown", ownership, data_dir], check=True)
|
||||
subprocess.check_output(["chown", ownership, data_dir])
|
||||
|
||||
# create a suitable log config from our template
|
||||
log_config_file = "%s/%s.log.config" % (config_dir, server_name)
|
||||
@@ -190,7 +185,6 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) ->
|
||||
"--open-private-ports",
|
||||
]
|
||||
# log("running %s" % (args, ))
|
||||
flush_buffers()
|
||||
os.execv(sys.executable, args)
|
||||
|
||||
|
||||
@@ -273,10 +267,8 @@ running with 'migrate_config'. See the README for more details.
|
||||
args = [sys.executable] + args
|
||||
if ownership is not None:
|
||||
args = ["gosu", ownership] + args
|
||||
flush_buffers()
|
||||
os.execve("/usr/sbin/gosu", args, environ)
|
||||
else:
|
||||
flush_buffers()
|
||||
os.execve(sys.executable, args, environ)
|
||||
|
||||
|
||||
|
||||
@@ -69,7 +69,6 @@
|
||||
- [Manhole](manhole.md)
|
||||
- [Monitoring](metrics-howto.md)
|
||||
- [Reporting Homeserver Usage Statistics](usage/administration/monitoring/reporting_homeserver_usage_statistics.md)
|
||||
- [Monthly Active Users](usage/administration/monthly_active_users.md)
|
||||
- [Understanding Synapse Through Grafana Graphs](usage/administration/understanding_synapse_through_grafana_graphs.md)
|
||||
- [Useful SQL for Admins](usage/administration/useful_sql_for_admins.md)
|
||||
- [Database Maintenance Tools](usage/administration/database_maintenance_tools.md)
|
||||
|
||||
@@ -5,9 +5,9 @@ non-interactive way. This is generally used for bootstrapping a Synapse
|
||||
instance with administrator accounts.
|
||||
|
||||
To authenticate yourself to the server, you will need both the shared secret
|
||||
([`registration_shared_secret`](../usage/configuration/config_documentation.md#registration_shared_secret)
|
||||
in the homeserver configuration), and a one-time nonce. If the registration
|
||||
shared secret is not configured, this API is not enabled.
|
||||
(`registration_shared_secret` in the homeserver configuration), and a
|
||||
one-time nonce. If the registration shared secret is not configured, this API
|
||||
is not enabled.
|
||||
|
||||
To fetch the nonce, you need to request one from the API:
|
||||
|
||||
|
||||
@@ -393,151 +393,6 @@ A response body like the following is returned:
|
||||
}
|
||||
```
|
||||
|
||||
# Room Messages API
|
||||
|
||||
The Room Messages admin API allows server admins to get all messages
|
||||
sent to a room in a given timeframe. There are various parameters available
|
||||
that allow for filtering and ordering the returned list. This API supports pagination.
|
||||
|
||||
To use it, you will need to authenticate by providing an `access_token`
|
||||
for a server admin: see [Admin API](../usage/administration/admin_api).
|
||||
|
||||
This endpoint mirrors the [Matrix Spec defined Messages API](https://spec.matrix.org/v1.1/client-server-api/#get_matrixclientv3roomsroomidmessages).
|
||||
|
||||
The API is:
|
||||
```
|
||||
GET /_synapse/admin/v1/rooms/<room_id>/messages
|
||||
```
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following path parameters are required:
|
||||
|
||||
* `room_id` - The ID of the room you wish you fetch messages from.
|
||||
|
||||
The following query parameters are available:
|
||||
|
||||
* `from` (required) - The token to start returning events from. This token can be obtained from a prev_batch
|
||||
or next_batch token returned by the /sync endpoint, or from an end token returned by a previous request to this endpoint.
|
||||
* `to` - The token to spot returning events at.
|
||||
* `limit` - The maximum number of events to return. Defaults to `10`.
|
||||
* `filter` - A JSON RoomEventFilter to filter returned events with.
|
||||
* `dir` - The direction to return events from. Either `f` for forwards or `b` for backwards. Setting
|
||||
this value to `b` will reverse the above sort order. Defaults to `f`.
|
||||
|
||||
**Response**
|
||||
|
||||
The following fields are possible in the JSON response body:
|
||||
|
||||
* `chunk` - A list of room events. The order depends on the dir parameter.
|
||||
Note that an empty chunk does not necessarily imply that no more events are available. Clients should continue to paginate until no end property is returned.
|
||||
* `end` - A token corresponding to the end of chunk. This token can be passed back to this endpoint to request further events.
|
||||
If no further events are available, this property is omitted from the response.
|
||||
* `start` - A token corresponding to the start of chunk.
|
||||
* `state` - A list of state events relevant to showing the chunk.
|
||||
|
||||
**Example**
|
||||
|
||||
For more details on each chunk, read [the Matrix specification](https://spec.matrix.org/v1.1/client-server-api/#get_matrixclientv3roomsroomidmessages).
|
||||
|
||||
```json
|
||||
{
|
||||
"chunk": [
|
||||
{
|
||||
"content": {
|
||||
"body": "This is an example text message",
|
||||
"format": "org.matrix.custom.html",
|
||||
"formatted_body": "<b>This is an example text message</b>",
|
||||
"msgtype": "m.text"
|
||||
},
|
||||
"event_id": "$143273582443PhrSn:example.org",
|
||||
"origin_server_ts": 1432735824653,
|
||||
"room_id": "!636q39766251:example.com",
|
||||
"sender": "@example:example.org",
|
||||
"type": "m.room.message",
|
||||
"unsigned": {
|
||||
"age": 1234
|
||||
}
|
||||
},
|
||||
{
|
||||
"content": {
|
||||
"name": "The room name"
|
||||
},
|
||||
"event_id": "$143273582443PhrSn:example.org",
|
||||
"origin_server_ts": 1432735824653,
|
||||
"room_id": "!636q39766251:example.com",
|
||||
"sender": "@example:example.org",
|
||||
"state_key": "",
|
||||
"type": "m.room.name",
|
||||
"unsigned": {
|
||||
"age": 1234
|
||||
}
|
||||
},
|
||||
{
|
||||
"content": {
|
||||
"body": "Gangnam Style",
|
||||
"info": {
|
||||
"duration": 2140786,
|
||||
"h": 320,
|
||||
"mimetype": "video/mp4",
|
||||
"size": 1563685,
|
||||
"thumbnail_info": {
|
||||
"h": 300,
|
||||
"mimetype": "image/jpeg",
|
||||
"size": 46144,
|
||||
"w": 300
|
||||
},
|
||||
"thumbnail_url": "mxc://example.org/FHyPlCeYUSFFxlgbQYZmoEoe",
|
||||
"w": 480
|
||||
},
|
||||
"msgtype": "m.video",
|
||||
"url": "mxc://example.org/a526eYUSFFxlgbQYZmo442"
|
||||
},
|
||||
"event_id": "$143273582443PhrSn:example.org",
|
||||
"origin_server_ts": 1432735824653,
|
||||
"room_id": "!636q39766251:example.com",
|
||||
"sender": "@example:example.org",
|
||||
"type": "m.room.message",
|
||||
"unsigned": {
|
||||
"age": 1234
|
||||
}
|
||||
}
|
||||
],
|
||||
"end": "t47409-4357353_219380_26003_2265",
|
||||
"start": "t47429-4392820_219380_26003_2265"
|
||||
}
|
||||
```
|
||||
|
||||
# Room Timestamp to Event API
|
||||
|
||||
The Room Timestamp to Event API endpoint fetches the `event_id` of the closest event to the given
|
||||
timestamp (`ts` query parameter) in the given direction (`dir` query parameter).
|
||||
|
||||
Useful for cases like jump to date so you can start paginating messages from
|
||||
a given date in the archive.
|
||||
|
||||
The API is:
|
||||
```
|
||||
GET /_synapse/admin/v1/rooms/<room_id>/timestamp_to_event
|
||||
```
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following path parameters are required:
|
||||
|
||||
* `room_id` - The ID of the room you wish to check.
|
||||
|
||||
The following query parameters are available:
|
||||
|
||||
* `ts` - a timestamp in milliseconds where we will find the closest event in
|
||||
the given direction.
|
||||
* `dir` - can be `f` or `b` to indicate forwards and backwards in time from the
|
||||
given timestamp. Defaults to `f`.
|
||||
|
||||
**Response**
|
||||
|
||||
* `event_id` - converted from timestamp
|
||||
|
||||
# Block Room API
|
||||
The Block Room admin API allows server admins to block and unblock rooms,
|
||||
and query to see if a given room is blocked.
|
||||
|
||||
@@ -42,7 +42,6 @@ It returns a JSON body like the following:
|
||||
"appservice_id": null,
|
||||
"consent_server_notice_sent": null,
|
||||
"consent_version": null,
|
||||
"consent_ts": null,
|
||||
"external_ids": [
|
||||
{
|
||||
"auth_provider": "<provider1>",
|
||||
@@ -365,7 +364,6 @@ The following actions are **NOT** performed. The list may be incomplete.
|
||||
- Remove the user's creation (registration) timestamp
|
||||
- [Remove rate limit overrides](#override-ratelimiting-for-users)
|
||||
- Remove from monthly active users
|
||||
- Remove user's consent information (consent version and timestamp)
|
||||
|
||||
## Reset password
|
||||
|
||||
@@ -1155,41 +1153,3 @@ GET /_synapse/admin/v1/username_available?username=$localpart
|
||||
|
||||
The request and response format is the same as the
|
||||
[/_matrix/client/r0/register/available](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available) API.
|
||||
|
||||
### Find a user based on their ID in an auth provider
|
||||
|
||||
The API is:
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/auth_providers/$provider/users/$external_id
|
||||
```
|
||||
|
||||
When a user matched the given ID for the given provider, an HTTP code `200` with a response body like the following is returned:
|
||||
|
||||
```json
|
||||
{
|
||||
"user_id": "@hello:example.org"
|
||||
}
|
||||
```
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following parameters should be set in the URL:
|
||||
|
||||
- `provider` - The ID of the authentication provider, as advertised by the [`GET /_matrix/client/v3/login`](https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3login) API in the `m.login.sso` authentication method.
|
||||
- `external_id` - The user ID from the authentication provider. Usually corresponds to the `sub` claim for OIDC providers, or to the `uid` attestation for SAML2 providers.
|
||||
|
||||
The `external_id` may have characters that are not URL-safe (typically `/`, `:` or `@`), so it is advised to URL-encode those parameters.
|
||||
|
||||
**Errors**
|
||||
|
||||
Returns a `404` HTTP status code if no user was found, with a response body like this:
|
||||
|
||||
```json
|
||||
{
|
||||
"errcode":"M_NOT_FOUND",
|
||||
"error":"User not found"
|
||||
}
|
||||
```
|
||||
|
||||
_Added in Synapse 1.68.0._
|
||||
|
||||
@@ -34,45 +34,13 @@ the process of indexing it).
|
||||
## Chain Cover Index
|
||||
|
||||
Synapse computes auth chain differences by pre-computing a "chain cover" index
|
||||
for the auth chain in a room, allowing us to efficiently make reachability queries
|
||||
like "is event `A` in the auth chain of event `B`?". We could do this with an index
|
||||
that tracks all pairs `(A, B)` such that `A` is in the auth chain of `B`. However, this
|
||||
would be prohibitively large, scaling poorly as the room accumulates more state
|
||||
events.
|
||||
for the auth chain in a room, allowing efficient reachability queries like "is
|
||||
event A in the auth chain of event B". This is done by assigning every event a
|
||||
*chain ID* and *sequence number* (e.g. `(5,3)`), and having a map of *links*
|
||||
between chains (e.g. `(5,3) -> (2,4)`) such that A is reachable by B (i.e. `A`
|
||||
is in the auth chain of `B`) if and only if either:
|
||||
|
||||
Instead, we break down the graph into *chains*. A chain is a subset of a DAG
|
||||
with the following property: for any pair of events `E` and `F` in the chain,
|
||||
the chain contains a path `E -> F` or a path `F -> E`. This forces a chain to be
|
||||
linear (without forks), e.g. `E -> F -> G -> ... -> H`. Each event in the chain
|
||||
is given a *sequence number* local to that chain. The oldest event `E` in the
|
||||
chain has sequence number 1. If `E` has a child `F` in the chain, then `F` has
|
||||
sequence number 2. If `E` has a grandchild `G` in the chain, then `G` has
|
||||
sequence number 3; and so on.
|
||||
|
||||
Synapse ensures that each persisted event belongs to exactly one chain, and
|
||||
tracks how the chains are connected to one another. This allows us to
|
||||
efficiently answer reachability queries. Doing so uses less storage than
|
||||
tracking reachability on an event-by-event basis, particularly when we have
|
||||
fewer and longer chains. See
|
||||
|
||||
> Jagadish, H. (1990). [A compression technique to materialize transitive closure](https://doi.org/10.1145/99935.99944).
|
||||
> *ACM Transactions on Database Systems (TODS)*, 15*(4)*, 558-598.
|
||||
|
||||
for the original idea or
|
||||
|
||||
> Y. Chen, Y. Chen, [An efficient algorithm for answering graph
|
||||
> reachability queries](https://doi.org/10.1109/ICDE.2008.4497498),
|
||||
> in: 2008 IEEE 24th International Conference on Data Engineering, April 2008,
|
||||
> pp. 893–902. (PDF available via [Google Scholar](https://scholar.google.com/scholar?q=Y.%20Chen,%20Y.%20Chen,%20An%20efficient%20algorithm%20for%20answering%20graph%20reachability%20queries,%20in:%202008%20IEEE%2024th%20International%20Conference%20on%20Data%20Engineering,%20April%202008,%20pp.%20893902.).)
|
||||
|
||||
for a more modern take.
|
||||
|
||||
In practical terms, the chain cover assigns every event a
|
||||
*chain ID* and *sequence number* (e.g. `(5,3)`), and maintains a map of *links*
|
||||
between events in chains (e.g. `(5,3) -> (2,4)`) such that `A` is reachable by `B`
|
||||
(i.e. `A` is in the auth chain of `B`) if and only if either:
|
||||
|
||||
1. `A` and `B` have the same chain ID and `A`'s sequence number is less than `B`'s
|
||||
1. A and B have the same chain ID and `A`'s sequence number is less than `B`'s
|
||||
sequence number; or
|
||||
2. there is a link `L` between `B`'s chain ID and `A`'s chain ID such that
|
||||
`L.start_seq_no` <= `B.seq_no` and `A.seq_no` <= `L.end_seq_no`.
|
||||
@@ -81,9 +49,8 @@ There are actually two potential implementations, one where we store links from
|
||||
each chain to every other reachable chain (the transitive closure of the links
|
||||
graph), and one where we remove redundant links (the transitive reduction of the
|
||||
links graph) e.g. if we have chains `C3 -> C2 -> C1` then the link `C3 -> C1`
|
||||
would not be stored. Synapse uses the former implementation so that it doesn't
|
||||
need to recurse to test reachability between chains. This trades-off extra storage
|
||||
in order to save CPU cycles and DB queries.
|
||||
would not be stored. Synapse uses the former implementations so that it doesn't
|
||||
need to recurse to test reachability between chains.
|
||||
|
||||
### Example
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
Deprecation Policy for Platform Dependencies
|
||||
============================================
|
||||
|
||||
Synapse has a number of platform dependencies, including Python, Rust,
|
||||
PostgreSQL and SQLite. This document outlines the policy towards which versions
|
||||
we support, and when we drop support for versions in the future.
|
||||
Synapse has a number of platform dependencies, including Python and PostgreSQL.
|
||||
This document outlines the policy towards which versions we support, and when we
|
||||
drop support for versions in the future.
|
||||
|
||||
|
||||
Policy
|
||||
@@ -17,14 +17,6 @@ Details on the upstream support life cycles for Python and PostgreSQL are
|
||||
documented at [https://endoflife.date/python](https://endoflife.date/python) and
|
||||
[https://endoflife.date/postgresql](https://endoflife.date/postgresql).
|
||||
|
||||
A Rust compiler is required to build Synapse from source. For any given release
|
||||
the minimum required version may be bumped up to a recent Rust version, and so
|
||||
people building from source should ensure they can fetch recent versions of Rust
|
||||
(e.g. by using [rustup](https://rustup.rs/)).
|
||||
|
||||
The oldest supported version of SQLite is the version
|
||||
[provided](https://packages.debian.org/buster/libsqlite3-0) by
|
||||
[Debian oldstable](https://wiki.debian.org/DebianOldStable).
|
||||
|
||||
Context
|
||||
-------
|
||||
@@ -39,15 +31,3 @@ long process.
|
||||
By following the upstream support life cycles Synapse can ensure that its
|
||||
dependencies continue to get security patches, while not requiring system admins
|
||||
to constantly update their platform dependencies to the latest versions.
|
||||
|
||||
For Rust, the situation is a bit different given that a) the Rust foundation
|
||||
does not generally support older Rust versions, and b) the library ecosystem
|
||||
generally bump their minimum support Rust versions frequently. In general, the
|
||||
Synapse team will try to avoid updating the dependency on Rust to the absolute
|
||||
latest version, but introducing a formal policy is hard given the constraints of
|
||||
the ecosystem.
|
||||
|
||||
On a similar note, SQLite does not generally have a concept of "supported
|
||||
release"; bugfixes are published for the latest minor release only. We chose to
|
||||
track Debian's oldstable as this is relatively conservative, predictably updated
|
||||
and is consistent with the `.deb` packages released by Matrix.org.
|
||||
@@ -28,9 +28,6 @@ The source code of Synapse is hosted on GitHub. You will also need [a recent ver
|
||||
|
||||
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
|
||||
|
||||
A recent version of the Rust compiler is needed to build the native modules. The
|
||||
easiest way of installing the latest version is to use [rustup](https://rustup.rs/).
|
||||
|
||||
|
||||
# 3. Get the source.
|
||||
|
||||
@@ -65,8 +62,6 @@ pipx install poetry
|
||||
but see poetry's [installation instructions](https://python-poetry.org/docs/#installation)
|
||||
for other installation methods.
|
||||
|
||||
Synapse requires Poetry version 1.2.0 or later.
|
||||
|
||||
Next, open a terminal and install dependencies as follows:
|
||||
|
||||
```sh
|
||||
@@ -117,11 +112,6 @@ Some documentation also exists in [Synapse's GitHub
|
||||
Wiki](https://github.com/matrix-org/synapse/wiki), although this is primarily
|
||||
contributed to by community authors.
|
||||
|
||||
When changes are made to any Rust code then you must call either `poetry install`
|
||||
or `maturin develop` (if installed) to rebuild the Rust code. Using [`maturin`](https://github.com/PyO3/maturin)
|
||||
is quicker than `poetry install`, so is recommended when making frequent
|
||||
changes to the Rust code.
|
||||
|
||||
|
||||
# 8. Test, test, test!
|
||||
<a name="test-test-test"></a>
|
||||
@@ -167,12 +157,6 @@ was broken. They are slower than the linters but will typically catch more error
|
||||
poetry run trial tests
|
||||
```
|
||||
|
||||
You can run unit tests in parallel by specifying `-jX` argument to `trial` where `X` is the number of parallel runners you want. To use 4 cpu cores, you would run them like:
|
||||
|
||||
```sh
|
||||
poetry run trial -j4 tests
|
||||
```
|
||||
|
||||
If you wish to only run *some* unit tests, you may specify
|
||||
another module instead of `tests` - or a test class or a method:
|
||||
|
||||
@@ -209,7 +193,7 @@ The database file can then be inspected with:
|
||||
sqlite3 _trial_temp/test.db
|
||||
```
|
||||
|
||||
Note that the database file is cleared at the beginning of each test run. Thus it
|
||||
Note that the database file is cleared at the beginning of each test run. Thus it
|
||||
will always only contain the data generated by the *last run test*. Though generally
|
||||
when debugging, one is only running a single test anyway.
|
||||
|
||||
@@ -333,7 +317,7 @@ SYNAPSE_TEST_LOG_LEVEL=DEBUG COMPLEMENT_DIR=../complement ./scripts-dev/compleme
|
||||
### Prettier formatting with `gotestfmt`
|
||||
|
||||
If you want to format the output of the tests the same way as it looks in CI,
|
||||
install [gotestfmt](https://github.com/GoTestTools/gotestfmt).
|
||||
install [gotestfmt](https://github.com/haveyoudebuggedit/gotestfmt).
|
||||
|
||||
You can then use this incantation to format the tests appropriately:
|
||||
|
||||
@@ -390,7 +374,7 @@ This file will become part of our [changelog](
|
||||
https://github.com/matrix-org/synapse/blob/master/CHANGES.md) at the next
|
||||
release, so the content of the file should be a short description of your
|
||||
change in the same style as the rest of the changelog. The file can contain Markdown
|
||||
formatting, and must end with a full stop (.) or an exclamation mark (!) for
|
||||
formatting, and should end with a full stop (.) or an exclamation mark (!) for
|
||||
consistency.
|
||||
|
||||
Adding credits to the changelog is encouraged, we value your
|
||||
|
||||
@@ -191,28 +191,3 @@ There are three separate aspects to this:
|
||||
flavour will be accepted by SQLite 3.22, but will give a column whose
|
||||
default value is the **string** `"FALSE"` - which, when cast back to a boolean
|
||||
in Python, evaluates to `True`.
|
||||
|
||||
|
||||
## `event_id` global uniqueness
|
||||
|
||||
`event_id`'s can be considered globally unique although there has been a lot of
|
||||
debate on this topic in places like
|
||||
[MSC2779](https://github.com/matrix-org/matrix-spec-proposals/issues/2779) and
|
||||
[MSC2848](https://github.com/matrix-org/matrix-spec-proposals/pull/2848) which
|
||||
has no resolution yet (as of 2022-09-01). There are several places in Synapse
|
||||
and even in the Matrix APIs like [`GET
|
||||
/_matrix/federation/v1/event/{eventId}`](https://spec.matrix.org/v1.1/server-server-api/#get_matrixfederationv1eventeventid)
|
||||
where we assume that event IDs are globally unique.
|
||||
|
||||
When scoping `event_id` in a database schema, it is often nice to accompany it
|
||||
with `room_id` (`PRIMARY KEY (room_id, event_id)` and a `FOREIGN KEY(room_id)
|
||||
REFERENCES rooms(room_id)`) which makes flexible lookups easy. For example it
|
||||
makes it very easy to find and clean up everything in a room when it needs to be
|
||||
purged (no need to use sub-`select` query or join from the `events` table).
|
||||
|
||||
A note on collisions: In room versions `1` and `2` it's possible to end up with
|
||||
two events with the same `event_id` (in the same or different rooms). After room
|
||||
version `3`, that can only happen with a hash collision, which we basically hope
|
||||
will never happen (SHA256 has a massive big key space).
|
||||
|
||||
|
||||
|
||||
@@ -126,23 +126,6 @@ context of poetry's venv, without having to run `poetry shell` beforehand.
|
||||
poetry install --extras all --remove-untracked
|
||||
```
|
||||
|
||||
## ...delete everything and start over from scratch?
|
||||
|
||||
```shell
|
||||
# Stop the current virtualenv if active
|
||||
$ deactivate
|
||||
|
||||
# Remove all of the files from the current environment.
|
||||
# Don't worry, even though it says "all", this will only
|
||||
# remove the Poetry virtualenvs for the current project.
|
||||
$ poetry env remove --all
|
||||
|
||||
# Reactivate Poetry shell to create the virtualenv again
|
||||
$ poetry shell
|
||||
# Install everything again
|
||||
$ poetry install --extras all
|
||||
```
|
||||
|
||||
## ...run a command in the `poetry` virtualenv?
|
||||
|
||||
Use `poetry run cmd args` when you need the python virtualenv context.
|
||||
@@ -260,11 +243,14 @@ doesn't require poetry. (It's what we use in CI too). However, you could try
|
||||
|
||||
## Check the version of poetry with `poetry --version`.
|
||||
|
||||
The minimum version of poetry supported by Synapse is 1.2.
|
||||
At the time of writing, the 1.2 series is beta only. We have seen some examples
|
||||
where the lockfiles generated by 1.2 prereleasese aren't interpreted correctly
|
||||
by poetry 1.1.x. For now, use poetry 1.1.14, which includes a critical
|
||||
[change](https://github.com/python-poetry/poetry/pull/5973) needed to remain
|
||||
[compatible with PyPI](https://github.com/pypi/warehouse/pull/11775).
|
||||
|
||||
It can also be useful to check the version of `poetry-core` in use. If you've
|
||||
installed `poetry` with `pipx`, try `pipx runpip poetry list | grep
|
||||
poetry-core`.
|
||||
installed `poetry` with `pipx`, try `pipx runpip poetry list | grep poetry-core`.
|
||||
|
||||
## Clear caches: `poetry cache clear --all pypi`.
|
||||
|
||||
@@ -273,16 +259,6 @@ from PyPI. (This is what makes poetry seem slow when doing the first
|
||||
`poetry install`.) Try `poetry cache list` and `poetry cache clear --all
|
||||
<name of cache>` to see if that fixes things.
|
||||
|
||||
## Remove outdated egg-info
|
||||
|
||||
Delete the `matrix_synapse.egg-info/` directory from the root of your Synapse
|
||||
install.
|
||||
|
||||
This stores some cached information about dependencies and often conflicts with
|
||||
letting Poetry do the right thing.
|
||||
|
||||
|
||||
|
||||
## Try `--verbose` or `--dry-run` arguments.
|
||||
|
||||
Sometimes useful to see what poetry's internal logic is.
|
||||
|
||||
@@ -7,30 +7,17 @@
|
||||
|
||||
1. Enable Synapse metrics:
|
||||
|
||||
In `homeserver.yaml`, make sure `enable_metrics` is
|
||||
set to `True`.
|
||||
|
||||
1. Enable the `/_synapse/metrics` Synapse endpoint that Prometheus uses to
|
||||
collect data:
|
||||
|
||||
There are two methods of enabling the metrics endpoint in Synapse.
|
||||
There are two methods of enabling metrics in Synapse.
|
||||
|
||||
The first serves the metrics as a part of the usual web server and
|
||||
can be enabled by adding the `metrics` resource to the existing
|
||||
listener as such as in this example:
|
||||
can be enabled by adding the \"metrics\" resource to the existing
|
||||
listener as such:
|
||||
|
||||
```yaml
|
||||
listeners:
|
||||
- port: 8008
|
||||
tls: false
|
||||
type: http
|
||||
x_forwarded: true
|
||||
bind_addresses: ['::1', '127.0.0.1']
|
||||
|
||||
resources:
|
||||
# added "metrics" in this line
|
||||
- names: [client, federation, metrics]
|
||||
compress: false
|
||||
resources:
|
||||
- names:
|
||||
- client
|
||||
- metrics
|
||||
```
|
||||
|
||||
This provides a simple way of adding metrics to your Synapse
|
||||
@@ -44,26 +31,19 @@
|
||||
to just internal networks easier. The served metrics are available
|
||||
over HTTP only, and will be available at `/_synapse/metrics`.
|
||||
|
||||
Add a new listener to homeserver.yaml as in this example:
|
||||
Add a new listener to homeserver.yaml:
|
||||
|
||||
```yaml
|
||||
listeners:
|
||||
- port: 8008
|
||||
tls: false
|
||||
type: http
|
||||
x_forwarded: true
|
||||
bind_addresses: ['::1', '127.0.0.1']
|
||||
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
compress: false
|
||||
|
||||
# beginning of the new metrics listener
|
||||
- port: 9000
|
||||
type: metrics
|
||||
bind_addresses: ['::1', '127.0.0.1']
|
||||
listeners:
|
||||
- type: metrics
|
||||
port: 9000
|
||||
bind_addresses:
|
||||
- '0.0.0.0'
|
||||
```
|
||||
|
||||
For both options, you will need to ensure that `enable_metrics` is
|
||||
set to `True`.
|
||||
|
||||
1. Restart Synapse.
|
||||
|
||||
1. Add a Prometheus target for Synapse.
|
||||
@@ -152,8 +132,6 @@ Synapse 1.2 updates the Prometheus metrics to match the naming
|
||||
convention of the upstream `prometheus_client`. The old names are
|
||||
considered deprecated and will be removed in a future version of
|
||||
Synapse.
|
||||
**The old names will be disabled by default in Synapse v1.71.0 and removed
|
||||
altogether in Synapse v1.73.0.**
|
||||
|
||||
| New Name | Old Name |
|
||||
| ---------------------------------------------------------------------------- | ---------------------------------------------------------------------- |
|
||||
@@ -165,13 +143,6 @@ altogether in Synapse v1.73.0.**
|
||||
| synapse_federation_client_events_processed_total | synapse_federation_client_events_processed |
|
||||
| synapse_event_processing_loop_count_total | synapse_event_processing_loop_count |
|
||||
| synapse_event_processing_loop_room_count_total | synapse_event_processing_loop_room_count |
|
||||
| synapse_util_caches_cache_hits | synapse_util_caches_cache:hits |
|
||||
| synapse_util_caches_cache_size | synapse_util_caches_cache:size |
|
||||
| synapse_util_caches_cache_evicted_size | synapse_util_caches_cache:evicted_size |
|
||||
| synapse_util_caches_cache | synapse_util_caches_cache:total |
|
||||
| synapse_util_caches_response_cache_size | synapse_util_caches_response_cache:size |
|
||||
| synapse_util_caches_response_cache_hits | synapse_util_caches_response_cache:hits |
|
||||
| synapse_util_caches_response_cache_evicted_size | synapse_util_caches_response_cache:evicted_size |
|
||||
| synapse_util_metrics_block_count_total | synapse_util_metrics_block_count |
|
||||
| synapse_util_metrics_block_time_seconds_total | synapse_util_metrics_block_time_seconds |
|
||||
| synapse_util_metrics_block_ru_utime_seconds_total | synapse_util_metrics_block_ru_utime_seconds |
|
||||
@@ -287,7 +258,7 @@ Standard Metric Names
|
||||
|
||||
As of synapse version 0.18.2, the format of the process-wide metrics has
|
||||
been changed to fit prometheus standard naming conventions. Additionally
|
||||
the units have been changed to seconds, from milliseconds.
|
||||
the units have been changed to seconds, from miliseconds.
|
||||
|
||||
| New name | Old name |
|
||||
| ---------------------------------------- | --------------------------------- |
|
||||
|
||||
@@ -174,9 +174,7 @@ oidc_providers:
|
||||
|
||||
1. Create a regular web application for Synapse
|
||||
2. Set the Allowed Callback URLs to `[synapse public baseurl]/_synapse/client/oidc/callback`
|
||||
3. Add a rule with any name to add the `preferred_username` claim.
|
||||
(See https://auth0.com/docs/customize/rules/create-rules for more information on how to create rules.)
|
||||
|
||||
3. Add a rule to add the `preferred_username` claim.
|
||||
<details>
|
||||
<summary>Code sample</summary>
|
||||
|
||||
@@ -336,12 +334,11 @@ oidc_providers:
|
||||
issuer: "https://accounts.google.com/"
|
||||
client_id: "your-client-id" # TO BE FILLED
|
||||
client_secret: "your-client-secret" # TO BE FILLED
|
||||
scopes: ["openid", "profile", "email"] # email is optional, read below
|
||||
scopes: ["openid", "profile"]
|
||||
user_mapping_provider:
|
||||
config:
|
||||
localpart_template: "{{ user.given_name|lower }}"
|
||||
display_name_template: "{{ user.name }}"
|
||||
email_template: "{{ user.email }}" # needs "email" in scopes above
|
||||
```
|
||||
4. Back in the Google console, add this Authorized redirect URI: `[synapse
|
||||
public baseurl]/_synapse/client/oidc/callback`.
|
||||
@@ -424,7 +421,7 @@ Synapse config:
|
||||
user_mapping_provider:
|
||||
config:
|
||||
display_name_template: "{{ user.name }}"
|
||||
email_template: "{{ user.email }}"
|
||||
email_template: "{{ '{{ user.email }}' }}"
|
||||
```
|
||||
|
||||
Relevant documents:
|
||||
|
||||
@@ -45,10 +45,6 @@ listens to traffic on localhost. (Do not change `bind_addresses` to `127.0.0.1`
|
||||
when using a containerized Synapse, as that will prevent it from responding
|
||||
to proxied traffic.)
|
||||
|
||||
Optionally, you can also set
|
||||
[`request_id_header`](../usage/configuration/config_documentation.md#listeners)
|
||||
so that the server extracts and re-uses the same request ID format that the
|
||||
reverse proxy is using.
|
||||
|
||||
## Reverse-proxy configuration examples
|
||||
|
||||
|
||||
@@ -181,7 +181,7 @@ doas pkg_add synapse
|
||||
#### NixOS
|
||||
|
||||
Robin Lambertz has packaged Synapse for NixOS at:
|
||||
<https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/matrix/synapse.nix>
|
||||
<https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/matrix-synapse.nix>
|
||||
|
||||
|
||||
### Installing as a Python module from PyPI
|
||||
@@ -196,10 +196,6 @@ System requirements:
|
||||
- Python 3.7 or later, up to Python 3.10.
|
||||
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||
|
||||
If building on an uncommon architecture for which pre-built wheels are
|
||||
unavailable, you will need to have a recent Rust compiler installed. The easiest
|
||||
way of installing the latest version is to use [rustup](https://rustup.rs/).
|
||||
|
||||
To install the Synapse homeserver run:
|
||||
|
||||
```sh
|
||||
@@ -303,10 +299,9 @@ You may need to install the latest Xcode developer tools:
|
||||
xcode-select --install
|
||||
```
|
||||
|
||||
On ARM-based Macs you may need to install libjpeg and libpq.
|
||||
You can use Homebrew (https://brew.sh):
|
||||
On ARM-based Macs you may need to explicitly install libjpeg which is a pillow dependency. You can use Homebrew (https://brew.sh):
|
||||
```sh
|
||||
brew install jpeg libpq
|
||||
brew install jpeg
|
||||
```
|
||||
|
||||
On macOS Catalina (10.15) you may need to explicitly install OpenSSL
|
||||
@@ -511,13 +506,9 @@ email will be disabled.
|
||||
|
||||
### Registering a user
|
||||
|
||||
One way to create a new user is to do so from a client like
|
||||
[Element](https://element.io/). This requires registration to be enabled via
|
||||
the
|
||||
[`enable_registration`](../usage/configuration/config_documentation.md#enable_registration)
|
||||
setting.
|
||||
The easiest way to create a new user is to do so from a client like [Element](https://element.io/).
|
||||
|
||||
Alternatively, you can create new users from the command line. This can be done as follows:
|
||||
Alternatively, you can do so from the command line. This can be done as follows:
|
||||
|
||||
1. If synapse was installed via pip, activate the virtualenv as follows (if Synapse was
|
||||
installed via a prebuilt package, `register_new_matrix_user` should already be
|
||||
@@ -529,7 +520,7 @@ Alternatively, you can create new users from the command line. This can be done
|
||||
```
|
||||
2. Run the following command:
|
||||
```sh
|
||||
register_new_matrix_user -c homeserver.yaml
|
||||
register_new_matrix_user -c homeserver.yaml http://localhost:8008
|
||||
```
|
||||
|
||||
This will prompt you to add details for the new user, and will then connect to
|
||||
@@ -542,13 +533,12 @@ Make admin [no]:
|
||||
Success!
|
||||
```
|
||||
|
||||
This process uses a setting
|
||||
[`registration_shared_secret`](../usage/configuration/config_documentation.md#registration_shared_secret),
|
||||
which is shared between Synapse itself and the `register_new_matrix_user`
|
||||
script. It doesn't matter what it is (a random value is generated by
|
||||
`--generate-config`), but it should be kept secret, as anyone with knowledge of
|
||||
it can register users, including admin accounts, on your server even if
|
||||
`enable_registration` is `false`.
|
||||
This process uses a setting `registration_shared_secret` in
|
||||
`homeserver.yaml`, which is shared between Synapse itself and the
|
||||
`register_new_matrix_user` script. It doesn't matter what it is (a random
|
||||
value is generated by `--generate-config`), but it should be kept secret, as
|
||||
anyone with knowledge of it can register users, including admin accounts,
|
||||
on your server even if `enable_registration` is `false`.
|
||||
|
||||
### Setting up a TURN server
|
||||
|
||||
|
||||
@@ -73,8 +73,8 @@ A custom mapping provider must specify the following methods:
|
||||
* `async def map_user_attributes(self, userinfo, token, failures)`
|
||||
- This method must be async.
|
||||
- Arguments:
|
||||
- `userinfo` - An [`authlib.oidc.core.claims.UserInfo`](https://docs.authlib.org/en/latest/specs/oidc.html#authlib.oidc.core.UserInfo)
|
||||
object to extract user information from.
|
||||
- `userinfo` - A `authlib.oidc.core.claims.UserInfo` object to extract user
|
||||
information from.
|
||||
- `token` - A dictionary which includes information necessary to make
|
||||
further requests to the OpenID provider.
|
||||
- `failures` - An `int` that represents the amount of times the returned
|
||||
@@ -91,13 +91,7 @@ A custom mapping provider must specify the following methods:
|
||||
`None`, the user is prompted to pick their own username. This is only used
|
||||
during a user's first login. Once a localpart has been associated with a
|
||||
remote user ID (see `get_remote_user_id`) it cannot be updated.
|
||||
- `confirm_localpart`: A boolean. If set to `True`, when a `localpart`
|
||||
string is returned from this method, Synapse will prompt the user to
|
||||
either accept this localpart or pick their own username. Otherwise this
|
||||
option has no effect. If omitted, defaults to `False`.
|
||||
- `display_name`: An optional string, the display name for the user.
|
||||
- `emails`: A list of strings, the email address(es) to associate with
|
||||
this user. If omitted, defaults to an empty list.
|
||||
- `displayname`: An optional string, the display name for the user.
|
||||
* `async def get_extra_attributes(self, userinfo, token)`
|
||||
- This method must be async.
|
||||
- Arguments:
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
worker_app: synapse.app.federation_sender
|
||||
worker_name: federation_sender1
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/federation-sender-log.yaml
|
||||
@@ -5,8 +5,6 @@ worker_name: generic_worker1
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_main_http_uri: http://localhost:8008/
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8083
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
worker_app: synapse.app.media_repository
|
||||
worker_name: media_worker
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8085
|
||||
resources:
|
||||
- names: [media]
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/media-worker-log.yaml
|
||||
@@ -1,8 +0,0 @@
|
||||
worker_app: synapse.app.pusher
|
||||
worker_name: pusher_worker1
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/pusher-worker-log.yaml
|
||||
156
docs/upgrade.md
156
docs/upgrade.md
@@ -15,8 +15,9 @@ this document.
|
||||
The website <https://endoflife.date> also offers convenient
|
||||
summaries.
|
||||
|
||||
- If Synapse was installed using [prebuilt packages](setup/installation.md#prebuilt-packages),
|
||||
you will need to follow the normal process for upgrading those packages.
|
||||
- If Synapse was installed using [prebuilt
|
||||
packages](setup/installation.md#prebuilt-packages), you will need to follow the
|
||||
normal process for upgrading those packages.
|
||||
|
||||
- If Synapse was installed using pip then upgrade to the latest
|
||||
version by running:
|
||||
@@ -88,157 +89,6 @@ process, for example:
|
||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
```
|
||||
|
||||
# Upgrading to v1.69.0
|
||||
|
||||
## Changes to the receipts replication streams
|
||||
|
||||
Synapse now includes information indicating if a receipt applies to a thread when
|
||||
replicating it to other workers. This is a forwards- and backwards-incompatible
|
||||
change: v1.68 and workers cannot process receipts replicated by v1.69 workers, and
|
||||
vice versa.
|
||||
|
||||
Once all workers are upgraded to v1.69 (or downgraded to v1.68), receipts
|
||||
replication will resume as normal.
|
||||
|
||||
|
||||
## Deprecation of legacy Prometheus metric names
|
||||
|
||||
In current versions of Synapse, some Prometheus metrics are emitted under two different names,
|
||||
with one of the names being older but non-compliant with OpenMetrics and Prometheus conventions
|
||||
and one of the names being newer but compliant.
|
||||
|
||||
Synapse v1.71.0 will turn the old metric names off *by default*.
|
||||
For administrators that still rely on them and have not had chance to update their
|
||||
uses of the metrics, it's possible to specify `enable_legacy_metrics: true` in
|
||||
the configuration to re-enable them temporarily.
|
||||
|
||||
Synapse v1.73.0 will **remove legacy metric names altogether** and it will no longer
|
||||
be possible to re-enable them.
|
||||
|
||||
The Grafana dashboard, Prometheus recording rules and Prometheus Consoles included
|
||||
in the `contrib` directory in the Synapse repository have been updated to no longer
|
||||
rely on the legacy names. These can be used on a current version of Synapse
|
||||
because current versions of Synapse emit both old and new names.
|
||||
|
||||
You may need to update your alerting rules or any other rules that depend on
|
||||
the names of Prometheus metrics.
|
||||
If you want to test your changes before legacy names are disabled by default,
|
||||
you may specify `enable_legacy_metrics: false` in your homeserver configuration.
|
||||
|
||||
A list of affected metrics is available on the [Metrics How-to page](https://matrix-org.github.io/synapse/v1.69/metrics-howto.html?highlight=metrics%20deprecated#renaming-of-metrics--deprecation-of-old-names-in-12).
|
||||
|
||||
|
||||
## Deprecation of the `generate_short_term_login_token` module API method
|
||||
|
||||
The following method of the module API has been deprecated, and is scheduled to
|
||||
be remove in v1.71.0:
|
||||
|
||||
```python
|
||||
def generate_short_term_login_token(
|
||||
self,
|
||||
user_id: str,
|
||||
duration_in_ms: int = (2 * 60 * 1000),
|
||||
auth_provider_id: str = "",
|
||||
auth_provider_session_id: Optional[str] = None,
|
||||
) -> str:
|
||||
...
|
||||
```
|
||||
|
||||
It has been replaced by an asynchronous equivalent:
|
||||
|
||||
```python
|
||||
async def create_login_token(
|
||||
self,
|
||||
user_id: str,
|
||||
duration_in_ms: int = (2 * 60 * 1000),
|
||||
auth_provider_id: Optional[str] = None,
|
||||
auth_provider_session_id: Optional[str] = None,
|
||||
) -> str:
|
||||
...
|
||||
```
|
||||
|
||||
Synapse will log a warning when a module uses the deprecated method, to help
|
||||
administrators find modules using it.
|
||||
|
||||
|
||||
# Upgrading to v1.68.0
|
||||
|
||||
Two changes announced in the upgrade notes for v1.67.0 have now landed in v1.68.0.
|
||||
|
||||
## SQLite version requirement
|
||||
|
||||
Synapse now requires a SQLite version of 3.27.0 or higher if SQLite is configured as
|
||||
Synapse's database.
|
||||
|
||||
Installations using
|
||||
|
||||
- Docker images [from `matrixdotorg`](https://hub.docker.com/r/matrixdotorg/synapse),
|
||||
- Debian packages [from Matrix.org](https://packages.matrix.org/), or
|
||||
- a PostgreSQL database
|
||||
|
||||
are not affected.
|
||||
|
||||
## Rust requirement when building from source.
|
||||
|
||||
Building from a source checkout of Synapse now requires a recent Rust compiler
|
||||
(currently Rust 1.58.1, but see also the
|
||||
[Platform Dependency Policy](https://matrix-org.github.io/synapse/latest/deprecation_policy.html)).
|
||||
|
||||
Installations using
|
||||
|
||||
- Docker images [from `matrixdotorg`](https://hub.docker.com/r/matrixdotorg/synapse),
|
||||
- Debian packages [from Matrix.org](https://packages.matrix.org/), or
|
||||
- PyPI wheels via `pip install matrix-synapse` (on supported platforms and architectures)
|
||||
|
||||
will not be affected.
|
||||
|
||||
# Upgrading to v1.67.0
|
||||
|
||||
## Direct TCP replication is no longer supported: migrate to Redis
|
||||
|
||||
Redis support was added in v1.13.0 with it becoming the recommended method in
|
||||
v1.18.0. It replaced the old direct TCP connections (which was deprecated as of
|
||||
v1.18.0) to the main process. With Redis, rather than all the workers connecting
|
||||
to the main process, all the workers and the main process connect to Redis,
|
||||
which relays replication commands between processes. This can give a significant
|
||||
CPU saving on the main process and is a prerequisite for upcoming
|
||||
performance improvements.
|
||||
|
||||
To migrate to Redis add the [`redis` config](./workers.md#shared-configuration),
|
||||
and remove the TCP `replication` listener from config of the master and
|
||||
`worker_replication_port` from worker config. Note that a HTTP listener with a
|
||||
`replication` resource is still required.
|
||||
|
||||
## Minimum version of Poetry is now v1.2.0
|
||||
|
||||
The minimum supported version of poetry is now 1.2. This should only affect
|
||||
those installing from a source checkout.
|
||||
|
||||
## Rust requirement in the next release
|
||||
|
||||
From the next major release (v1.68.0) installing Synapse from a source checkout
|
||||
will require a recent Rust compiler. Those using packages or
|
||||
`pip install matrix-synapse` will not be affected.
|
||||
|
||||
The simplest way of installing Rust is via [rustup.rs](https://rustup.rs/)
|
||||
|
||||
## SQLite version requirement in the next release
|
||||
|
||||
From the next major release (v1.68.0) Synapse will require SQLite 3.27.0 or
|
||||
higher. Synapse v1.67.0 will be the last major release supporting SQLite
|
||||
versions 3.22 to 3.26.
|
||||
|
||||
Those using Docker images or Debian packages from Matrix.org will not be
|
||||
affected. If you have installed from source, you should check the version of
|
||||
SQLite used by Python with:
|
||||
|
||||
```shell
|
||||
python -c "import sqlite3; print(sqlite3.sqlite_version)"
|
||||
```
|
||||
|
||||
If this is too old, refer to your distribution for advice on upgrading.
|
||||
|
||||
|
||||
# Upgrading to v1.66.0
|
||||
|
||||
## Delegation of email validation no longer supported
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
# Monthly Active Users
|
||||
|
||||
Synapse can be configured to record the number of monthly active users (also referred to as MAU) on a given homeserver.
|
||||
For clarity's sake, MAU only tracks local users.
|
||||
|
||||
Please note that the metrics recorded by the [Homeserver Usage Stats](../../usage/administration/monitoring/reporting_homeserver_usage_statistics.md)
|
||||
are calculated differently. The `monthly_active_users` from the usage stats does not take into account any
|
||||
of the rules below, and counts any users who have made a request to the homeserver in the last 30 days.
|
||||
|
||||
See the [configuration manual](../../usage/configuration/config_documentation.md#limit_usage_by_mau) for details on how to configure MAU.
|
||||
|
||||
## Calculating active users
|
||||
|
||||
Individual user activity is measured in active days. If a user performs an action, the exact time of that action is then recorded. When
|
||||
calculating the MAU figure, any users with a recorded action in the last 30 days are considered part of the cohort. Days are measured
|
||||
as a rolling window from the current system time to 30 days ago.
|
||||
|
||||
So for example, if Synapse were to calculate the active users on the 15th July at 13:25, it would include any activity from 15th June 13:25 onwards.
|
||||
|
||||
A user is **never** considered active if they are either:
|
||||
- Part of the trial day cohort (described below)
|
||||
- Owned by an application service.
|
||||
- Note: This **only** covers users that are part of an application service `namespaces.users` registration. The namespace
|
||||
must also be marked as `exclusive`.
|
||||
|
||||
Otherwise, any request to Synapse will mark the user as active. Please note that registration will not mark a user as active *unless*
|
||||
they register with a 3pid that is included in the config field `mau_limits_reserved_threepids`.
|
||||
|
||||
The Prometheus metric for MAU is refreshed every 5 minutes.
|
||||
|
||||
Once an hour, Synapse checks to see if any users are inactive (with only activity timestamps later than 30 days). These users
|
||||
are removed from the active users cohort. If they then become active, they are immediately restored to the cohort.
|
||||
|
||||
It is important to note that **deactivated** users are not immediately removed from the pool of active users, but as these users won't
|
||||
perform actions they will eventually be removed from the cohort.
|
||||
|
||||
### Trial days
|
||||
|
||||
If the config option `mau_trial_days` is set, a user must have been active this many days **after** registration to be active. A user is in the
|
||||
trial period if their registration timestamp (also known as the `creation_ts`) is less than `mau_trial_days` old.
|
||||
|
||||
As an example, if `mau_trial_days` is set to `3` and a user is active **after** 3 days (72 hours from registration time) then they will be counted as active.
|
||||
|
||||
The `mau_appservice_trial_days` config further extends this rule by applying different durations depending on the `appservice_id` of the user.
|
||||
Users registered by an application service will be recorded with an `appservice_id` matching the `id` key in the registration file for that service.
|
||||
|
||||
|
||||
## Limiting usage of the homeserver when the maximum MAU is reached
|
||||
|
||||
If both config options `limit_usage_by_mau` and `max_mau_value` is set, and the current MAU value exceeds the maximum value, the
|
||||
homeserver will begin to block some actions.
|
||||
|
||||
Individual users matching **any** of the below criteria never have their actions blocked:
|
||||
- Considered part of the cohort of MAU users.
|
||||
- Considered part of the trial period.
|
||||
- Registered as a `support` user.
|
||||
- Application service users if `track_appservice_user_ips` is NOT set.
|
||||
|
||||
Please not that server admins are **not** exempt from blocking.
|
||||
|
||||
The following actions are blocked when the MAU limit is exceeded:
|
||||
- Logging in
|
||||
- Sending events
|
||||
- Creating rooms
|
||||
- Syncing
|
||||
|
||||
Registration is also blocked for all new signups *unless* the user is registering with a threepid included in the `mau_limits_reserved_threepids`
|
||||
config value.
|
||||
|
||||
When a request is blocked, the response will have the `errcode` `M_RESOURCE_LIMIT_EXCEEDED`.
|
||||
|
||||
## Metrics
|
||||
|
||||
Synapse records several different prometheus metrics for MAU.
|
||||
|
||||
`synapse_admin_mau:current` records the current MAU figure for native (non-application-service) users.
|
||||
|
||||
`synapse_admin_mau:max` records the maximum MAU as dictated by the `max_mau_value` config value.
|
||||
|
||||
`synapse_admin_mau_current_mau_by_service` records the current MAU including application service users. The label `app_service` can be used
|
||||
to filter by a specific service ID. This *also* includes non-application-service users under `app_service=native` .
|
||||
|
||||
`synapse_admin_mau:registered_reserved_users` records the number of users specified in `mau_limits_reserved_threepids` which have
|
||||
registered accounts on the homeserver.
|
||||
@@ -12,14 +12,14 @@ See the following for how to decode the dense data available from the default lo
|
||||
|
||||
| Part | Explanation |
|
||||
| ----- | ------------ |
|
||||
| AAAA | Timestamp request was logged (not received) |
|
||||
| AAAA | Timestamp request was logged (not recieved) |
|
||||
| BBBB | Logger name (`synapse.access.(http\|https).<tag>`, where 'tag' is defined in the `listeners` config section, normally the port) |
|
||||
| CCCC | Line number in code |
|
||||
| DDDD | Log Level |
|
||||
| EEEE | Request Identifier (This identifier is shared by related log lines)|
|
||||
| FFFF | Source IP (Or X-Forwarded-For if enabled) |
|
||||
| GGGG | Server Port |
|
||||
| HHHH | Federated Server or Local User making request (blank if unauthenticated or not supplied).<br/>If this is of the form `@aaa:example.com|@bbb:example.com`, then that means that `@aaa:example.com` is authenticated but they are controlling `@bbb:example.com`, e.g. if `aaa` is controlling `bbb` [via the admin API](https://matrix-org.github.io/synapse/latest/admin_api/user_admin_api.html#login-as-a-user). |
|
||||
| HHHH | Federated Server or Local User making request (blank if unauthenticated or not supplied) |
|
||||
| IIII | Total Time to process the request |
|
||||
| JJJJ | Time to send response over network once generated (this may be negative if the socket is closed before the response is generated)|
|
||||
| KKKK | Userland CPU time |
|
||||
|
||||
@@ -179,7 +179,7 @@ This will tell other servers to send traffic to port 443 instead.
|
||||
|
||||
This option currently defaults to false.
|
||||
|
||||
See [Delegation of incoming federation traffic](../../delegate.md) for more
|
||||
See https://matrix-org.github.io/synapse/latest/delegate.html for more
|
||||
information.
|
||||
|
||||
Example configuration:
|
||||
@@ -431,19 +431,12 @@ Sub-options for each listener include:
|
||||
|
||||
* `metrics`: (see the docs [here](../../metrics-howto.md)),
|
||||
|
||||
* `replication`: (deprecated as of Synapse 1.18, see the docs [here](../../workers.md)).
|
||||
|
||||
* `tls`: set to true to enable TLS for this listener. Will use the TLS key/cert specified in tls_private_key_path / tls_certificate_path.
|
||||
|
||||
* `x_forwarded`: Only valid for an 'http' listener. Set to true to use the X-Forwarded-For header as the client IP. Useful when Synapse is
|
||||
behind a [reverse-proxy](../../reverse_proxy.md).
|
||||
|
||||
* `request_id_header`: The header extracted from each incoming request that is
|
||||
used as the basis for the request ID. The request ID is used in
|
||||
[logs](../administration/request_log.md#request-log-format) and tracing to
|
||||
correlate and match up requests. When unset, Synapse will automatically
|
||||
generate sequential request IDs. This option is useful when Synapse is behind
|
||||
a [reverse-proxy](../../reverse_proxy.md).
|
||||
|
||||
_Added in Synapse 1.68.0._
|
||||
behind a reverse-proxy.
|
||||
|
||||
* `resources`: Only valid for an 'http' listener. A list of resources to host
|
||||
on this port. Sub-options for each resource are:
|
||||
@@ -602,8 +595,6 @@ server owner wants to limit to the number of monthly active users. When enabled
|
||||
reached the server returns a `ResourceLimitError` with error type `Codes.RESOURCE_LIMIT_EXCEEDED`.
|
||||
Defaults to false. If this is enabled, a value for `max_mau_value` must also be set.
|
||||
|
||||
See [Monthly Active Users](../administration/monthly_active_users.md) for details on how to configure MAU.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
limit_usage_by_mau: true
|
||||
@@ -1078,10 +1069,8 @@ Options related to caching.
|
||||
---
|
||||
### `event_cache_size`
|
||||
|
||||
The number of events to cache in memory. Defaults to 10K. Like other caches,
|
||||
this is affected by `caches.global_factor` (see below).
|
||||
|
||||
Note that this option is not part of the `caches` section.
|
||||
The number of events to cache in memory. Not affected by
|
||||
`caches.global_factor` and is not part of the `caches` section. Defaults to 10K.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
@@ -1139,7 +1128,7 @@ number of entries that can be stored.
|
||||
|
||||
* `cache_autotuning` and its sub-options `max_cache_memory_usage`, `target_cache_memory_usage`, and
|
||||
`min_cache_ttl` work in conjunction with each other to maintain a balance between cache memory
|
||||
usage and cache entry availability. You must be using [jemalloc](../administration/admin_faq.md#help-synapse-is-slow-and-eats-all-my-ramcpu)
|
||||
usage and cache entry availability. You must be using [jemalloc](https://github.com/matrix-org/synapse#help-synapse-is-slow-and-eats-all-my-ramcpu)
|
||||
to utilize this option, and all three of the options must be specified for this feature to work. This option
|
||||
defaults to off, enable it by providing values for the sub-options listed below. Please note that the feature will not work
|
||||
and may cause unstable behavior (such as excessive emptying of caches or exceptions) if all of the values are not provided.
|
||||
@@ -1402,7 +1391,7 @@ This option specifies several limits for login:
|
||||
client is attempting to log into. Defaults to `per_second: 0.17`,
|
||||
`burst_count: 3`.
|
||||
|
||||
* `failed_attempts` ratelimits login requests based on the account the
|
||||
* `failted_attempts` ratelimits login requests based on the account the
|
||||
client is attempting to log into, based on the amount of failed login
|
||||
attempts for this account. Defaults to `per_second: 0.17`, `burst_count: 3`.
|
||||
|
||||
@@ -1884,8 +1873,8 @@ See [here](../../CAPTCHA_SETUP.md) for full details on setting up captcha.
|
||||
---
|
||||
### `recaptcha_public_key`
|
||||
|
||||
This homeserver's ReCAPTCHA public key. Must be specified if
|
||||
[`enable_registration_captcha`](#enable_registration_captcha) is enabled.
|
||||
This homeserver's ReCAPTCHA public key. Must be specified if `enable_registration_captcha` is
|
||||
enabled.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
@@ -1894,8 +1883,7 @@ recaptcha_public_key: "YOUR_PUBLIC_KEY"
|
||||
---
|
||||
### `recaptcha_private_key`
|
||||
|
||||
This homeserver's ReCAPTCHA private key. Must be specified if
|
||||
[`enable_registration_captcha`](#enable_registration_captcha) is
|
||||
This homeserver's ReCAPTCHA private key. Must be specified if `enable_registration_captcha` is
|
||||
enabled.
|
||||
|
||||
Example configuration:
|
||||
@@ -1905,11 +1893,9 @@ recaptcha_private_key: "YOUR_PRIVATE_KEY"
|
||||
---
|
||||
### `enable_registration_captcha`
|
||||
|
||||
Set to `true` to require users to complete a CAPTCHA test when registering an account.
|
||||
Requires a valid ReCaptcha public/private key.
|
||||
Defaults to `false`.
|
||||
|
||||
Note that [`enable_registration`](#enable_registration) must also be set to allow account registration.
|
||||
Set to true to enable ReCaptcha checks when registering, preventing signup
|
||||
unless a captcha is answered. Requires a valid ReCaptcha public/private key.
|
||||
Defaults to false.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
@@ -1985,21 +1971,9 @@ Registration can be rate-limited using the parameters in the [Ratelimiting](#rat
|
||||
---
|
||||
### `enable_registration`
|
||||
|
||||
Enable registration for new users. Defaults to `false`.
|
||||
|
||||
It is highly recommended that if you enable registration, you set one or more
|
||||
or the following options, to avoid abuse of your server by "bots":
|
||||
|
||||
* [`enable_registration_captcha`](#enable_registration_captcha)
|
||||
* [`registrations_require_3pid`](#registrations_require_3pid)
|
||||
* [`registration_requires_token`](#registration_requires_token)
|
||||
|
||||
(In order to enable registration without any verification, you must also set
|
||||
[`enable_registration_without_verification`](#enable_registration_without_verification).)
|
||||
|
||||
Note that even if this setting is disabled, new accounts can still be created
|
||||
via the admin API if
|
||||
[`registration_shared_secret`](#registration_shared_secret) is set.
|
||||
Enable registration for new users. Defaults to false. It is highly recommended that if you enable registration,
|
||||
you use either captcha, email, or token-based verification to verify that new users are not bots. In order to enable registration
|
||||
without any verification, you must also set `enable_registration_without_verification` to true.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
@@ -2007,21 +1981,88 @@ enable_registration: true
|
||||
```
|
||||
---
|
||||
### `enable_registration_without_verification`
|
||||
|
||||
Enable registration without email or captcha verification. Note: this option is *not* recommended,
|
||||
as registration without verification is a known vector for spam and abuse. Defaults to `false`. Has no effect
|
||||
unless [`enable_registration`](#enable_registration) is also enabled.
|
||||
as registration without verification is a known vector for spam and abuse. Defaults to false. Has no effect
|
||||
unless `enable_registration` is also enabled.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
enable_registration_without_verification: true
|
||||
```
|
||||
---
|
||||
### `session_lifetime`
|
||||
|
||||
Time that a user's session remains valid for, after they log in.
|
||||
|
||||
Note that this is not currently compatible with guest logins.
|
||||
|
||||
Note also that this is calculated at login time: changes are not applied retrospectively to users who have already
|
||||
logged in.
|
||||
|
||||
By default, this is infinite.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
session_lifetime: 24h
|
||||
```
|
||||
----
|
||||
### `refresh_access_token_lifetime`
|
||||
|
||||
Time that an access token remains valid for, if the session is using refresh tokens.
|
||||
|
||||
For more information about refresh tokens, please see the [manual](user_authentication/refresh_tokens.md).
|
||||
|
||||
Note that this only applies to clients which advertise support for refresh tokens.
|
||||
|
||||
Note also that this is calculated at login time and refresh time: changes are not applied to
|
||||
existing sessions until they are refreshed.
|
||||
|
||||
By default, this is 5 minutes.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
refreshable_access_token_lifetime: 10m
|
||||
```
|
||||
---
|
||||
### `refresh_token_lifetime: 24h`
|
||||
|
||||
Time that a refresh token remains valid for (provided that it is not
|
||||
exchanged for another one first).
|
||||
This option can be used to automatically log-out inactive sessions.
|
||||
Please see the manual for more information.
|
||||
|
||||
Note also that this is calculated at login time and refresh time:
|
||||
changes are not applied to existing sessions until they are refreshed.
|
||||
|
||||
By default, this is infinite.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
refresh_token_lifetime: 24h
|
||||
```
|
||||
---
|
||||
### `nonrefreshable_access_token_lifetime`
|
||||
|
||||
Time that an access token remains valid for, if the session is NOT
|
||||
using refresh tokens.
|
||||
|
||||
Please note that not all clients support refresh tokens, so setting
|
||||
this to a short value may be inconvenient for some users who will
|
||||
then be logged out frequently.
|
||||
|
||||
Note also that this is calculated at login time: changes are not applied
|
||||
retrospectively to existing sessions for users that have already logged in.
|
||||
|
||||
By default, this is infinite.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
nonrefreshable_access_token_lifetime: 24h
|
||||
```
|
||||
---
|
||||
### `registrations_require_3pid`
|
||||
|
||||
If this is set, users must provide all of the specified types of 3PID when registering an account.
|
||||
|
||||
Note that [`enable_registration`](#enable_registration) must also be set to allow account registration.
|
||||
If this is set, the user must provide all of the specified types of 3PID when registering.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
@@ -2069,11 +2110,9 @@ enable_3pid_lookup: false
|
||||
|
||||
Require users to submit a token during registration.
|
||||
Tokens can be managed using the admin [API](../administration/admin_api/registration_tokens.md).
|
||||
Note that `enable_registration` must be set to true.
|
||||
Disabling this option will not delete any tokens previously generated.
|
||||
Defaults to `false`. Set to `true` to enable.
|
||||
|
||||
|
||||
Note that [`enable_registration`](#enable_registration) must also be set to allow account registration.
|
||||
Defaults to false. Set to true to enable.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
@@ -2082,39 +2121,13 @@ registration_requires_token: true
|
||||
---
|
||||
### `registration_shared_secret`
|
||||
|
||||
If set, allows registration of standard or admin accounts by anyone who has the
|
||||
shared secret, even if [`enable_registration`](#enable_registration) is not
|
||||
set.
|
||||
|
||||
This is primarily intended for use with the `register_new_matrix_user` script
|
||||
(see [Registering a user](../../setup/installation.md#registering-a-user));
|
||||
however, the interface is [documented](../../admin_api/register_api.html).
|
||||
|
||||
See also [`registration_shared_secret_path`](#registration_shared_secret_path).
|
||||
If set, allows registration of standard or admin accounts by anyone who
|
||||
has the shared secret, even if registration is otherwise disabled.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
registration_shared_secret: <PRIVATE STRING>
|
||||
```
|
||||
|
||||
---
|
||||
### `registration_shared_secret_path`
|
||||
|
||||
An alternative to [`registration_shared_secret`](#registration_shared_secret):
|
||||
allows the shared secret to be specified in an external file.
|
||||
|
||||
The file should be a plain text file, containing only the shared secret.
|
||||
|
||||
If this file does not exist, Synapse will create a new signing
|
||||
key on startup and store it in this file.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
registration_shared_secret_file: /path/to/secrets/file
|
||||
```
|
||||
|
||||
_Added in Synapse 1.67.0._
|
||||
|
||||
---
|
||||
### `bcrypt_rounds`
|
||||
|
||||
@@ -2229,9 +2242,6 @@ homeserver. If the room already exists, make certain it is a publicly joinable
|
||||
room, i.e. the join rule of the room must be set to 'public'. You can find more options
|
||||
relating to auto-joining rooms below.
|
||||
|
||||
As Spaces are just rooms under the hood, Space aliases may also be
|
||||
used.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
auto_join_rooms:
|
||||
@@ -2243,7 +2253,7 @@ auto_join_rooms:
|
||||
|
||||
Where `auto_join_rooms` are specified, setting this flag ensures that
|
||||
the rooms exist by creating them when the first user on the
|
||||
homeserver registers. This option will not create Spaces.
|
||||
homeserver registers.
|
||||
|
||||
By default the auto-created rooms are publicly joinable from any federated
|
||||
server. Use the `autocreate_auto_join_rooms_federated` and
|
||||
@@ -2261,7 +2271,7 @@ autocreate_auto_join_rooms: false
|
||||
---
|
||||
### `autocreate_auto_join_rooms_federated`
|
||||
|
||||
Whether the rooms listed in `auto_join_rooms` that are auto-created are available
|
||||
Whether the rooms listen in `auto_join_rooms` that are auto-created are available
|
||||
via federation. Only has an effect if `autocreate_auto_join_rooms` is true.
|
||||
|
||||
Note that whether a room is federated cannot be modified after
|
||||
@@ -2348,79 +2358,6 @@ Example configuration:
|
||||
```yaml
|
||||
inhibit_user_in_use_error: true
|
||||
```
|
||||
---
|
||||
## User session management
|
||||
---
|
||||
### `session_lifetime`
|
||||
|
||||
Time that a user's session remains valid for, after they log in.
|
||||
|
||||
Note that this is not currently compatible with guest logins.
|
||||
|
||||
Note also that this is calculated at login time: changes are not applied retrospectively to users who have already
|
||||
logged in.
|
||||
|
||||
By default, this is infinite.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
session_lifetime: 24h
|
||||
```
|
||||
----
|
||||
### `refresh_access_token_lifetime`
|
||||
|
||||
Time that an access token remains valid for, if the session is using refresh tokens.
|
||||
|
||||
For more information about refresh tokens, please see the [manual](user_authentication/refresh_tokens.md).
|
||||
|
||||
Note that this only applies to clients which advertise support for refresh tokens.
|
||||
|
||||
Note also that this is calculated at login time and refresh time: changes are not applied to
|
||||
existing sessions until they are refreshed.
|
||||
|
||||
By default, this is 5 minutes.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
refreshable_access_token_lifetime: 10m
|
||||
```
|
||||
---
|
||||
### `refresh_token_lifetime: 24h`
|
||||
|
||||
Time that a refresh token remains valid for (provided that it is not
|
||||
exchanged for another one first).
|
||||
This option can be used to automatically log-out inactive sessions.
|
||||
Please see the manual for more information.
|
||||
|
||||
Note also that this is calculated at login time and refresh time:
|
||||
changes are not applied to existing sessions until they are refreshed.
|
||||
|
||||
By default, this is infinite.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
refresh_token_lifetime: 24h
|
||||
```
|
||||
---
|
||||
### `nonrefreshable_access_token_lifetime`
|
||||
|
||||
Time that an access token remains valid for, if the session is NOT
|
||||
using refresh tokens.
|
||||
|
||||
Please note that not all clients support refresh tokens, so setting
|
||||
this to a short value may be inconvenient for some users who will
|
||||
then be logged out frequently.
|
||||
|
||||
Note also that this is calculated at login time: changes are not applied
|
||||
retrospectively to existing sessions for users that have already logged in.
|
||||
|
||||
By default, this is infinite.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
nonrefreshable_access_token_lifetime: 24h
|
||||
```
|
||||
|
||||
---
|
||||
## Metrics ###
|
||||
Config options related to metrics.
|
||||
@@ -2436,31 +2373,6 @@ Example configuration:
|
||||
enable_metrics: true
|
||||
```
|
||||
---
|
||||
### `enable_legacy_metrics`
|
||||
|
||||
Set to `true` to publish both legacy and non-legacy Prometheus metric names,
|
||||
or to `false` to only publish non-legacy Prometheus metric names.
|
||||
Defaults to `true`. Has no effect if `enable_metrics` is `false`.
|
||||
**In Synapse v1.71.0, this will default to `false` before being removed in Synapse v1.73.0.**
|
||||
|
||||
Legacy metric names include:
|
||||
- metrics containing colons in the name, such as `synapse_util_caches_response_cache:hits`, because colons are supposed to be reserved for user-defined recording rules;
|
||||
- counters that don't end with the `_total` suffix, such as `synapse_federation_client_sent_edus`, therefore not adhering to the OpenMetrics standard.
|
||||
|
||||
These legacy metric names are unconventional and not compliant with OpenMetrics standards.
|
||||
They are included for backwards compatibility.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
enable_legacy_metrics: false
|
||||
```
|
||||
|
||||
See https://github.com/matrix-org/synapse/issues/11106 for context.
|
||||
|
||||
*Since v1.67.0.*
|
||||
|
||||
**Will be removed in v1.73.0.**
|
||||
---
|
||||
### `sentry`
|
||||
|
||||
Use this option to enable sentry integration. Provide the DSN assigned to you by sentry
|
||||
@@ -2523,7 +2435,7 @@ report_stats_endpoint: https://example.com/report-usage-stats/push
|
||||
Config settings related to the client/server API
|
||||
|
||||
---
|
||||
### `room_prejoin_state`
|
||||
### `room_prejoin_state:`
|
||||
|
||||
Controls for the state that is shared with users who receive an invite
|
||||
to a room. By default, the following state event types are shared with users who
|
||||
@@ -2625,10 +2537,7 @@ Config options relating to signing keys
|
||||
---
|
||||
### `signing_key_path`
|
||||
|
||||
Path to the signing key to sign events and federation requests with.
|
||||
|
||||
*New in Synapse 1.67*: If this file does not exist, Synapse will create a new signing
|
||||
key on startup and store it in this file.
|
||||
Path to the signing key to sign messages with.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
@@ -2663,7 +2572,7 @@ Example configuration:
|
||||
key_refresh_interval: 2d
|
||||
```
|
||||
---
|
||||
### `trusted_key_servers`
|
||||
### `trusted_key_servers:`
|
||||
|
||||
The trusted servers to download signing keys from.
|
||||
|
||||
@@ -2733,10 +2642,13 @@ key_server_signing_keys_path: "key_server_signing_keys.key"
|
||||
The following settings can be used to make Synapse use a single sign-on
|
||||
provider for authentication, instead of its internal password database.
|
||||
|
||||
You will probably also want to set the following options to `false` to
|
||||
You will probably also want to set the following options to false to
|
||||
disable the regular login/registration flows:
|
||||
* [`enable_registration`](#enable_registration)
|
||||
* [`password_config.enabled`](#password_config)
|
||||
* `enable_registration`
|
||||
* `password_config.enabled`
|
||||
|
||||
You will also want to investigate the settings under the "sso" configuration
|
||||
section below.
|
||||
|
||||
---
|
||||
### `saml2_config`
|
||||
@@ -2977,7 +2889,7 @@ Options for each entry include:
|
||||
|
||||
* `module`: The class name of a custom mapping module. Default is
|
||||
`synapse.handlers.oidc.JinjaOidcMappingProvider`.
|
||||
See [OpenID Mapping Providers](../../sso_mapping_providers.md#openid-mapping-providers)
|
||||
See https://matrix-org.github.io/synapse/latest/sso_mapping_providers.html#openid-mapping-providers
|
||||
for information on implementing a custom mapping provider.
|
||||
|
||||
* `config`: Configuration for the mapping provider module. This section will
|
||||
@@ -3418,15 +3330,13 @@ This option has the following sub-options:
|
||||
the user directory. If false, search results will only contain users
|
||||
visible in public rooms and users sharing a room with the requester.
|
||||
Defaults to false.
|
||||
|
||||
NB. If you set this to true, and the last time the user_directory search
|
||||
indexes were (re)built was before Synapse 1.44, you'll have to
|
||||
rebuild the indexes in order to search through all known users.
|
||||
|
||||
These indexes are built the first time Synapse starts; admins can
|
||||
manually trigger a rebuild via the API following the instructions
|
||||
[for running background updates](../administration/admin_api/background_updates.md#run),
|
||||
set to true to return search results containing all known users, even if that
|
||||
manually trigger a rebuild via API following the instructions at
|
||||
https://matrix-org.github.io/synapse/latest/usage/administration/admin_api/background_updates.html#run
|
||||
Set to true to return search results containing all known users, even if that
|
||||
user does not share a room with the requester.
|
||||
* `prefer_local_users`: Defines whether to prefer local users in search query results.
|
||||
If set to true, local users are more likely to appear above remote users when searching the
|
||||
@@ -3541,9 +3451,9 @@ Example configuration:
|
||||
enable_room_list_search: false
|
||||
```
|
||||
---
|
||||
### `alias_creation_rules`
|
||||
### `alias_creation`
|
||||
|
||||
The `alias_creation_rules` option controls who is allowed to create aliases
|
||||
The `alias_creation` option controls who is allowed to create aliases
|
||||
on this server.
|
||||
|
||||
The format of this option is a list of rules that contain globs that
|
||||
|
||||
@@ -1 +1 @@
|
||||
window.SYNAPSE_VERSION = 'v1.70';
|
||||
window.SYNAPSE_VERSION = 'v1.66';
|
||||
|
||||
128
docs/workers.md
128
docs/workers.md
@@ -32,8 +32,13 @@ stream between all configured Synapse processes. Additionally, processes may
|
||||
make HTTP requests to each other, primarily for operations which need to wait
|
||||
for a reply ─ such as sending an event.
|
||||
|
||||
All the workers and the main process connect to Redis, which relays replication
|
||||
commands between processes.
|
||||
Redis support was added in v1.13.0 with it becoming the recommended method in
|
||||
v1.18.0. It replaced the old direct TCP connections (which is deprecated as of
|
||||
v1.18.0) to the main process. With Redis, rather than all the workers connecting
|
||||
to the main process, all the workers and the main process connect to Redis,
|
||||
which relays replication commands between processes. This can give a significant
|
||||
cpu saving on the main process and will be a prerequisite for upcoming
|
||||
performance improvements.
|
||||
|
||||
If Redis support is enabled Synapse will use it as a shared cache, as well as a
|
||||
pub/sub mechanism.
|
||||
@@ -93,6 +98,7 @@ listener" for the main process; and secondly, you need to enable redis-based
|
||||
replication. Optionally, a shared secret can be used to authenticate HTTP
|
||||
traffic between workers. For example:
|
||||
|
||||
|
||||
```yaml
|
||||
# extend the existing `listeners` section. This defines the ports that the
|
||||
# main process will listen on.
|
||||
@@ -111,27 +117,23 @@ redis:
|
||||
enabled: true
|
||||
```
|
||||
|
||||
See the [configuration manual](usage/configuration/config_documentation.html) for the full documentation of each option.
|
||||
See the sample config for the full documentation of each option.
|
||||
|
||||
Under **no circumstances** should the replication listener be exposed to the
|
||||
public internet; replication traffic is:
|
||||
|
||||
* always unencrypted
|
||||
* unauthenticated, unless `worker_replication_secret` is configured
|
||||
public internet; it has no authentication and is unencrypted.
|
||||
|
||||
|
||||
### Worker configuration
|
||||
|
||||
In the config file for each worker, you must specify:
|
||||
* The type of worker (`worker_app`). The currently available worker applications are listed below.
|
||||
* A unique name for the worker (`worker_name`).
|
||||
* The HTTP replication endpoint that it should talk to on the main synapse process
|
||||
(`worker_replication_host` and `worker_replication_http_port`)
|
||||
* If handling HTTP requests, a `worker_listeners` option with an `http`
|
||||
listener, in the same way as the [`listeners`](usage/configuration/config_documentation.md#listeners)
|
||||
option in the shared config.
|
||||
* If handling the `^/_matrix/client/v3/keys/upload` endpoint, the HTTP URI for
|
||||
the main process (`worker_main_http_uri`).
|
||||
In the config file for each worker, you must specify the type of worker
|
||||
application (`worker_app`), and you should specify a unique name for the worker
|
||||
(`worker_name`). The currently available worker applications are listed below.
|
||||
You must also specify the HTTP replication endpoint that it should talk to on
|
||||
the main synapse process. `worker_replication_host` should specify the host of
|
||||
the main synapse and `worker_replication_http_port` should point to the HTTP
|
||||
replication port. If the worker will handle HTTP requests then the
|
||||
`worker_listeners` option should be set with a `http` listener, in the same way
|
||||
as the `listeners` option in the shared config.
|
||||
|
||||
For example:
|
||||
|
||||
@@ -203,8 +205,6 @@ information.
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/members$
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/state$
|
||||
^/_matrix/client/v1/rooms/.*/hierarchy$
|
||||
^/_matrix/client/(v1|unstable)/rooms/.*/relations/
|
||||
^/_matrix/client/v1/rooms/.*/threads$
|
||||
^/_matrix/client/unstable/org.matrix.msc2716/rooms/.*/batch_send$
|
||||
^/_matrix/client/unstable/im.nheko.summary/rooms/.*/summary$
|
||||
^/_matrix/client/(r0|v3|unstable)/account/3pid$
|
||||
@@ -217,12 +217,10 @@ information.
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/search$
|
||||
|
||||
# Encryption requests
|
||||
# Note that ^/_matrix/client/(r0|v3|unstable)/keys/upload/ requires `worker_main_http_uri`
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/query$
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/changes$
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/claim$
|
||||
^/_matrix/client/(r0|v3|unstable)/room_keys/
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/upload/
|
||||
|
||||
# Registration/login requests
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/login$
|
||||
@@ -287,9 +285,8 @@ For multiple workers not handling the SSO endpoints properly, see
|
||||
[#7530](https://github.com/matrix-org/synapse/issues/7530) and
|
||||
[#9427](https://github.com/matrix-org/synapse/issues/9427).
|
||||
|
||||
Note that a [HTTP listener](usage/configuration/config_documentation.md#listeners)
|
||||
with `client` and `federation` `resources` must be configured in the `worker_listeners`
|
||||
option in the worker config.
|
||||
Note that a HTTP listener with `client` and `federation` resources must be
|
||||
configured in the `worker_listeners` option in the worker config.
|
||||
|
||||
#### Load balancing
|
||||
|
||||
@@ -328,9 +325,9 @@ effects of bursts of events from that bridge on events sent by normal users.
|
||||
|
||||
Additionally, the writing of specific streams (such as events) can be moved off
|
||||
of the main process to a particular worker.
|
||||
(This is only supported with Redis-based replication.)
|
||||
|
||||
To enable this, the worker must have a
|
||||
[HTTP `replication` listener](usage/configuration/config_documentation.md#listeners) configured,
|
||||
To enable this, the worker must have a HTTP replication listener configured,
|
||||
have a `worker_name` and be listed in the `instance_map` config. The same worker
|
||||
can handle multiple streams, but unless otherwise documented, each stream can only
|
||||
have a single writer.
|
||||
@@ -414,7 +411,7 @@ the stream writer for the `presence` stream:
|
||||
There is also support for moving background tasks to a separate
|
||||
worker. Background tasks are run periodically or started via replication. Exactly
|
||||
which tasks are configured to run depends on your Synapse configuration (e.g. if
|
||||
stats is enabled). This worker doesn't handle any REST endpoints itself.
|
||||
stats is enabled).
|
||||
|
||||
To enable this, the worker must have a `worker_name` and can be configured to run
|
||||
background tasks. For example, to move background tasks to a dedicated worker,
|
||||
@@ -461,8 +458,8 @@ worker application type.
|
||||
#### Notifying Application Services
|
||||
|
||||
You can designate one generic worker to send output traffic to Application Services.
|
||||
Doesn't handle any REST endpoints itself, but you should specify its name in the
|
||||
shared configuration as follows:
|
||||
|
||||
Specify its name in the shared configuration as follows:
|
||||
|
||||
```yaml
|
||||
notify_appservices_from_worker: worker_name
|
||||
@@ -490,12 +487,6 @@ pusher_instances:
|
||||
- pusher_worker2
|
||||
```
|
||||
|
||||
An example for a pusher instance:
|
||||
|
||||
```yaml
|
||||
{{#include systemd-with-workers/workers/pusher_worker.yaml}}
|
||||
```
|
||||
|
||||
|
||||
### `synapse.app.appservice`
|
||||
|
||||
@@ -526,12 +517,6 @@ federation_sender_instances:
|
||||
- federation_sender2
|
||||
```
|
||||
|
||||
An example for a federation sender instance:
|
||||
|
||||
```yaml
|
||||
{{#include systemd-with-workers/workers/federation_sender.yaml}}
|
||||
```
|
||||
|
||||
### `synapse.app.media_repository`
|
||||
|
||||
Handles the media repository. It can handle all endpoints starting with:
|
||||
@@ -552,12 +537,16 @@ file to stop the main synapse running background jobs related to managing the
|
||||
media repository. Note that doing so will prevent the main process from being
|
||||
able to handle the above endpoints.
|
||||
|
||||
In the `media_repository` worker configuration file, configure the
|
||||
[HTTP listener](usage/configuration/config_documentation.md#listeners) to
|
||||
In the `media_repository` worker configuration file, configure the http listener to
|
||||
expose the `media` resource. For example:
|
||||
|
||||
```yaml
|
||||
{{#include systemd-with-workers/workers/media_worker.yaml}}
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8085
|
||||
resources:
|
||||
- names:
|
||||
- media
|
||||
```
|
||||
|
||||
Note that if running multiple media repositories they must be on the same server
|
||||
@@ -592,23 +581,52 @@ handle it, and are online.
|
||||
If `update_user_directory` is set to `false`, and this worker is not running,
|
||||
the above endpoint may give outdated results.
|
||||
|
||||
### `synapse.app.frontend_proxy`
|
||||
|
||||
Proxies some frequently-requested client endpoints to add caching and remove
|
||||
load from the main synapse. It can handle REST endpoints matching the following
|
||||
regular expressions:
|
||||
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/upload
|
||||
|
||||
If `use_presence` is False in the homeserver config, it can also handle REST
|
||||
endpoints matching the following regular expressions:
|
||||
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/presence/[^/]+/status
|
||||
|
||||
This "stub" presence handler will pass through `GET` request but make the
|
||||
`PUT` effectively a no-op.
|
||||
|
||||
It will proxy any requests it cannot handle to the main synapse instance. It
|
||||
must therefore be configured with the location of the main instance, via
|
||||
the `worker_main_http_uri` setting in the `frontend_proxy` worker configuration
|
||||
file. For example:
|
||||
|
||||
```yaml
|
||||
worker_main_http_uri: http://127.0.0.1:8008
|
||||
```
|
||||
|
||||
### Historical apps
|
||||
|
||||
The following used to be separate worker application types, but are now
|
||||
equivalent to `synapse.app.generic_worker`:
|
||||
|
||||
* `synapse.app.client_reader`
|
||||
* `synapse.app.event_creator`
|
||||
* `synapse.app.federation_reader`
|
||||
* `synapse.app.frontend_proxy`
|
||||
* `synapse.app.synchrotron`
|
||||
*Note:* Historically there used to be more apps, however they have been
|
||||
amalgamated into a single `synapse.app.generic_worker` app. The remaining apps
|
||||
are ones that do specific processing unrelated to requests, e.g. the `pusher`
|
||||
that handles sending out push notifications for new events. The intention is for
|
||||
all these to be folded into the `generic_worker` app and to use config to define
|
||||
which processes handle the various proccessing such as push notifications.
|
||||
|
||||
|
||||
## Migration from old config
|
||||
|
||||
A main change that has occurred is the merging of worker apps into
|
||||
`synapse.app.generic_worker`. This change is backwards compatible and so no
|
||||
changes to the config are required.
|
||||
There are two main independent changes that have been made: introducing Redis
|
||||
support and merging apps into `synapse.app.generic_worker`. Both these changes
|
||||
are backwards compatible and so no changes to the config are required, however
|
||||
server admins are encouraged to plan to migrate to Redis as the old style direct
|
||||
TCP replication config is deprecated.
|
||||
|
||||
To migrate to Redis add the `redis` config as above, and optionally remove the
|
||||
TCP `replication` listener from master and `worker_replication_port` from worker
|
||||
config.
|
||||
|
||||
To migrate apps to use `synapse.app.generic_worker` simply update the
|
||||
`worker_app` option in the worker configs, and where worker are started (e.g.
|
||||
|
||||
9
mypy.ini
9
mypy.ini
@@ -16,8 +16,7 @@ files =
|
||||
docker/,
|
||||
scripts-dev/,
|
||||
synapse/,
|
||||
tests/,
|
||||
build_rust.py
|
||||
tests/
|
||||
|
||||
# Note: Better exclusion syntax coming in mypy > 0.910
|
||||
# https://github.com/python/mypy/pull/11329
|
||||
@@ -106,9 +105,6 @@ disallow_untyped_defs = False
|
||||
[mypy-tests.handlers.test_user_directory]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.push.test_bulk_push_rule_evaluator]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.test_server]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
@@ -185,6 +181,3 @@ ignore_missing_imports = True
|
||||
|
||||
[mypy-incremental.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-setuptools_rust.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
716
poetry.lock
generated
716
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -52,12 +52,9 @@ include_trailing_comma = true
|
||||
combine_as_imports = true
|
||||
skip_gitignore = true
|
||||
|
||||
[tool.maturin]
|
||||
manifest-path = "rust/Cargo.toml"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.70.1"
|
||||
version = "1.66.0"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "Apache-2.0"
|
||||
@@ -85,17 +82,7 @@ include = [
|
||||
{ path = "sytest-blacklist", format = "sdist" },
|
||||
{ path = "tests", format = "sdist" },
|
||||
{ path = "UPGRADE.rst", format = "sdist" },
|
||||
{ path = "Cargo.toml", format = "sdist" },
|
||||
{ path = "Cargo.lock", format = "sdist" },
|
||||
{ path = "rust/Cargo.toml", format = "sdist" },
|
||||
{ path = "rust/build.rs", format = "sdist" },
|
||||
{ path = "rust/src/**", format = "sdist" },
|
||||
]
|
||||
exclude = [
|
||||
{ path = "synapse/*.so", format = "sdist"}
|
||||
]
|
||||
|
||||
build = "build_rust.py"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
synapse_homeserver = "synapse.app.homeserver:main"
|
||||
@@ -139,7 +126,7 @@ pyOpenSSL = ">=16.0.0"
|
||||
PyYAML = ">=3.11"
|
||||
pyasn1 = ">=0.1.9"
|
||||
pyasn1-modules = ">=0.0.7"
|
||||
bcrypt = ">=3.1.7"
|
||||
bcrypt = ">=3.1.0"
|
||||
Pillow = ">=5.4.0"
|
||||
sortedcontainers = ">=1.4.4"
|
||||
pymacaroons = ">=0.13.0"
|
||||
@@ -165,7 +152,7 @@ typing-extensions = ">=3.10.0.1"
|
||||
cryptography = ">=3.4.7"
|
||||
# ijson 3.1.4 fixes a bug with "." in property names
|
||||
ijson = ">=3.1.4"
|
||||
matrix-common = "^1.3.0"
|
||||
matrix-common = "^1.2.1"
|
||||
# We need packaging.requirements.Requirement, added in 16.1.
|
||||
packaging = ">=16.1"
|
||||
# At the time of writing, we only use functions from the version `importlib.metadata`
|
||||
@@ -174,15 +161,6 @@ importlib_metadata = { version = ">=1.4", python = "<3.8" }
|
||||
# This is the most recent version of Pydantic with available on common distros.
|
||||
pydantic = ">=1.7.4"
|
||||
|
||||
# This is for building the rust components during "poetry install", which
|
||||
# currently ignores the `build-system.requires` directive (c.f.
|
||||
# https://github.com/python-poetry/poetry/issues/6154). Both `pip install` and
|
||||
# `poetry build` do the right thing without this explicit dependency.
|
||||
#
|
||||
# This isn't really a dev-dependency, as `poetry install --no-dev` will fail,
|
||||
# but the alternative is to add it to the main list of deps where it isn't
|
||||
# needed.
|
||||
setuptools_rust = ">=1.3"
|
||||
|
||||
|
||||
# Optional Dependencies
|
||||
@@ -219,7 +197,7 @@ oidc = ["authlib"]
|
||||
# `systemd.journal.JournalHandler`, as is documented in
|
||||
# `contrib/systemd/log_config.yaml`.
|
||||
systemd = ["systemd-python"]
|
||||
url-preview = ["lxml"]
|
||||
url_preview = ["lxml"]
|
||||
sentry = ["sentry-sdk"]
|
||||
opentracing = ["jaeger-client", "opentracing"]
|
||||
jwt = ["authlib"]
|
||||
@@ -227,7 +205,7 @@ jwt = ["authlib"]
|
||||
# (if it is not installed, we fall back to slow code.)
|
||||
redis = ["txredisapi", "hiredis"]
|
||||
# Required to use experimental `caches.track_memory_usage` config option.
|
||||
cache-memory = ["pympler"]
|
||||
cache_memory = ["pympler"]
|
||||
test = ["parameterized", "idna"]
|
||||
|
||||
# The duplication here is awful. I hate hate hate hate hate it. However, for now I want
|
||||
@@ -250,7 +228,7 @@ all = [
|
||||
"pysaml2",
|
||||
# oidc and jwt
|
||||
"authlib",
|
||||
# url-preview
|
||||
# url_preview
|
||||
"lxml",
|
||||
# sentry
|
||||
"sentry-sdk",
|
||||
@@ -258,7 +236,7 @@ all = [
|
||||
"jaeger-client", "opentracing",
|
||||
# redis
|
||||
"txredisapi", "hiredis",
|
||||
# cache-memory
|
||||
# cache_memory
|
||||
"pympler",
|
||||
# omitted:
|
||||
# - test: it's useful to have this separate from dev deps in the olddeps job
|
||||
@@ -267,10 +245,10 @@ all = [
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
## We pin black so that our tests don't start failing on new releases.
|
||||
isort = ">=5.10.1"
|
||||
black = ">=22.3.0"
|
||||
isort = "==5.7.0"
|
||||
black = "==22.3.0"
|
||||
flake8-comprehensions = "*"
|
||||
flake8-bugbear = ">=21.3.2"
|
||||
flake8-bugbear = "==21.3.2"
|
||||
flake8 = "*"
|
||||
|
||||
# Typechecking
|
||||
@@ -296,46 +274,16 @@ parameterized = ">=0.7.4"
|
||||
idna = ">=2.5"
|
||||
|
||||
# The following are used by the release script
|
||||
click = ">=8.1.3"
|
||||
click = "==8.1.1"
|
||||
# GitPython was == 3.1.14; bumped to 3.1.20, the first release with type hints.
|
||||
GitPython = ">=3.1.20"
|
||||
commonmark = ">=0.9.1"
|
||||
pygithub = ">=1.55"
|
||||
commonmark = "==0.9.1"
|
||||
pygithub = "==1.55"
|
||||
# The following are executed as commands by the release script.
|
||||
twine = "*"
|
||||
# Towncrier min version comes from #3425. Rationale unclear.
|
||||
towncrier = ">=18.6.0rc1"
|
||||
|
||||
[build-system]
|
||||
# The upper bounds here are defensive, intended to prevent situations like
|
||||
# #13849 and #14079 where we see buildtime or runtime errors caused by build
|
||||
# system changes.
|
||||
# We are happy to raise these upper bounds upon request,
|
||||
# provided we check that it's safe to do so (i.e. that CI passes).
|
||||
requires = ["poetry-core>=1.0.0,<=1.3.2", "setuptools_rust>=1.3,<=1.5.2"]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
|
||||
[tool.cibuildwheel]
|
||||
# Skip unsupported platforms (by us or by Rust).
|
||||
skip = "cp36* *-musllinux_i686 pp*aarch64 *-musllinux_aarch64"
|
||||
|
||||
# We need a rust compiler
|
||||
before-all = "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable -y --profile minimal"
|
||||
environment= { PATH = "$PATH:$HOME/.cargo/bin" }
|
||||
|
||||
# For some reason if we don't manually clean the build directory we
|
||||
# can end up polluting the next build with a .so that is for the wrong
|
||||
# Python version.
|
||||
before-build = "rm -rf {project}/build"
|
||||
build-frontend = "build"
|
||||
test-command = "python -c 'from synapse.synapse_rust import sum_as_string; print(sum_as_string(1, 2))'"
|
||||
|
||||
|
||||
[tool.cibuildwheel.linux]
|
||||
# Wrap the repair command to correctly rename the built cpython wheels as ABI3.
|
||||
repair-wheel-command = "./.ci/scripts/auditwheel_wrapper.py -w {dest_dir} {wheel}"
|
||||
|
||||
[tool.cibuildwheel.macos]
|
||||
# Wrap the repair command to correctly rename the built cpython wheels as ABI3.
|
||||
repair-wheel-command = "./.ci/scripts/auditwheel_wrapper.py --require-archs {delocate_archs} -w {dest_dir} {wheel}"
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
[package]
|
||||
# We name the package `synapse` so that things like logging have the right
|
||||
# logging target.
|
||||
name = "synapse"
|
||||
|
||||
# dummy version. See pyproject.toml for the Synapse's version number.
|
||||
version = "0.1.0"
|
||||
|
||||
edition = "2021"
|
||||
rust-version = "1.58.1"
|
||||
|
||||
[lib]
|
||||
name = "synapse"
|
||||
# We generate a `cdylib` for Python and a standard `lib` for running
|
||||
# tests/benchmarks.
|
||||
crate-type = ["lib", "cdylib"]
|
||||
|
||||
[package.metadata.maturin]
|
||||
# This is where we tell maturin where to place the built library.
|
||||
name = "synapse.synapse_rust"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.63"
|
||||
lazy_static = "1.4.0"
|
||||
log = "0.4.17"
|
||||
pyo3 = { version = "0.17.1", features = ["extension-module", "macros", "anyhow", "abi3", "abi3-py37"] }
|
||||
pyo3-log = "0.7.0"
|
||||
pythonize = "0.17.0"
|
||||
regex = "1.6.0"
|
||||
serde = { version = "1.0.144", features = ["derive"] }
|
||||
serde_json = "1.0.85"
|
||||
|
||||
[build-dependencies]
|
||||
blake2 = "0.10.4"
|
||||
hex = "0.4.3"
|
||||
@@ -1,149 +0,0 @@
|
||||
// Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#![feature(test)]
|
||||
use synapse::push::{
|
||||
evaluator::PushRuleEvaluator, Condition, EventMatchCondition, FilteredPushRules, PushRules,
|
||||
};
|
||||
use test::Bencher;
|
||||
|
||||
extern crate test;
|
||||
|
||||
#[bench]
|
||||
fn bench_match_exact(b: &mut Bencher) {
|
||||
let flattened_keys = [
|
||||
("type".to_string(), "m.text".to_string()),
|
||||
("room_id".to_string(), "!room:server".to_string()),
|
||||
("content.body".to_string(), "test message".to_string()),
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let eval = PushRuleEvaluator::py_new(
|
||||
flattened_keys,
|
||||
10,
|
||||
0,
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
true,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let condition = Condition::Known(synapse::push::KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: "room_id".into(),
|
||||
pattern: Some("!room:server".into()),
|
||||
pattern_type: None,
|
||||
},
|
||||
));
|
||||
|
||||
let matched = eval.match_condition(&condition, None, None).unwrap();
|
||||
assert!(matched, "Didn't match");
|
||||
|
||||
b.iter(|| eval.match_condition(&condition, None, None).unwrap());
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_match_word(b: &mut Bencher) {
|
||||
let flattened_keys = [
|
||||
("type".to_string(), "m.text".to_string()),
|
||||
("room_id".to_string(), "!room:server".to_string()),
|
||||
("content.body".to_string(), "test message".to_string()),
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let eval = PushRuleEvaluator::py_new(
|
||||
flattened_keys,
|
||||
10,
|
||||
0,
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
true,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let condition = Condition::Known(synapse::push::KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: "content.body".into(),
|
||||
pattern: Some("test".into()),
|
||||
pattern_type: None,
|
||||
},
|
||||
));
|
||||
|
||||
let matched = eval.match_condition(&condition, None, None).unwrap();
|
||||
assert!(matched, "Didn't match");
|
||||
|
||||
b.iter(|| eval.match_condition(&condition, None, None).unwrap());
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_match_word_miss(b: &mut Bencher) {
|
||||
let flattened_keys = [
|
||||
("type".to_string(), "m.text".to_string()),
|
||||
("room_id".to_string(), "!room:server".to_string()),
|
||||
("content.body".to_string(), "test message".to_string()),
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let eval = PushRuleEvaluator::py_new(
|
||||
flattened_keys,
|
||||
10,
|
||||
0,
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
true,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let condition = Condition::Known(synapse::push::KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: "content.body".into(),
|
||||
pattern: Some("foobar".into()),
|
||||
pattern_type: None,
|
||||
},
|
||||
));
|
||||
|
||||
let matched = eval.match_condition(&condition, None, None).unwrap();
|
||||
assert!(!matched, "Didn't match");
|
||||
|
||||
b.iter(|| eval.match_condition(&condition, None, None).unwrap());
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_eval_message(b: &mut Bencher) {
|
||||
let flattened_keys = [
|
||||
("type".to_string(), "m.text".to_string()),
|
||||
("room_id".to_string(), "!room:server".to_string()),
|
||||
("content.body".to_string(), "test message".to_string()),
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let eval = PushRuleEvaluator::py_new(
|
||||
flattened_keys,
|
||||
10,
|
||||
0,
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
true,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let rules =
|
||||
FilteredPushRules::py_new(PushRules::new(Vec::new()), Default::default(), false, false);
|
||||
|
||||
b.iter(|| eval.run(&rules, Some("bob"), Some("person")));
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
// Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#![feature(test)]
|
||||
|
||||
use synapse::push::utils::{glob_to_regex, GlobMatchType};
|
||||
use test::Bencher;
|
||||
|
||||
extern crate test;
|
||||
|
||||
#[bench]
|
||||
fn bench_whole(b: &mut Bencher) {
|
||||
b.iter(|| glob_to_regex("test", GlobMatchType::Whole));
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_word(b: &mut Bencher) {
|
||||
b.iter(|| glob_to_regex("test", GlobMatchType::Word));
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_whole_wildcard_run(b: &mut Bencher) {
|
||||
b.iter(|| glob_to_regex("test***??*?*?foo", GlobMatchType::Whole));
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_word_wildcard_run(b: &mut Bencher) {
|
||||
b.iter(|| glob_to_regex("test***??*?*?foo", GlobMatchType::Whole));
|
||||
}
|
||||
@@ -1,45 +0,0 @@
|
||||
//! This build script calculates the hash of all files in the `src/`
|
||||
//! directory and adds it as an environment variable during build time.
|
||||
//!
|
||||
//! This is used so that the python code can detect when the built native module
|
||||
//! does not match the source in-tree, helping to detect the case where the
|
||||
//! source has been updated but the library hasn't been rebuilt.
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use blake2::{Blake2b512, Digest};
|
||||
|
||||
fn main() -> Result<(), std::io::Error> {
|
||||
let mut dirs = vec![PathBuf::from("src")];
|
||||
|
||||
let mut paths = Vec::new();
|
||||
while let Some(path) = dirs.pop() {
|
||||
let mut entries = std::fs::read_dir(path)?
|
||||
.map(|res| res.map(|e| e.path()))
|
||||
.collect::<Result<Vec<_>, std::io::Error>>()?;
|
||||
|
||||
entries.sort();
|
||||
|
||||
for entry in entries {
|
||||
if entry.is_dir() {
|
||||
dirs.push(entry);
|
||||
} else {
|
||||
paths.push(entry.to_str().expect("valid rust paths").to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
paths.sort();
|
||||
|
||||
let mut hasher = Blake2b512::new();
|
||||
|
||||
for path in paths {
|
||||
let bytes = std::fs::read(path)?;
|
||||
hasher.update(bytes);
|
||||
}
|
||||
|
||||
let hex_digest = hex::encode(hasher.finalize());
|
||||
println!("cargo:rustc-env=SYNAPSE_RUST_DIGEST={hex_digest}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
use pyo3::prelude::*;
|
||||
|
||||
pub mod push;
|
||||
|
||||
/// Returns the hash of all the rust source files at the time it was compiled.
|
||||
///
|
||||
/// Used by python to detect if the rust library is outdated.
|
||||
#[pyfunction]
|
||||
fn get_rust_file_digest() -> &'static str {
|
||||
env!("SYNAPSE_RUST_DIGEST")
|
||||
}
|
||||
|
||||
/// Formats the sum of two numbers as string.
|
||||
#[pyfunction]
|
||||
#[pyo3(text_signature = "(a, b, /)")]
|
||||
fn sum_as_string(a: usize, b: usize) -> PyResult<String> {
|
||||
Ok((a + b).to_string())
|
||||
}
|
||||
|
||||
/// The entry point for defining the Python module.
|
||||
#[pymodule]
|
||||
fn synapse_rust(py: Python<'_>, m: &PyModule) -> PyResult<()> {
|
||||
pyo3_log::init();
|
||||
|
||||
m.add_function(wrap_pyfunction!(sum_as_string, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(get_rust_file_digest, m)?)?;
|
||||
|
||||
push::register_module(py, m)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,323 +0,0 @@
|
||||
// Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
//! Contains the definitions of the "base" push rules.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use serde_json::Value;
|
||||
|
||||
use super::KnownCondition;
|
||||
use crate::push::Action;
|
||||
use crate::push::Condition;
|
||||
use crate::push::EventMatchCondition;
|
||||
use crate::push::PushRule;
|
||||
use crate::push::SetTweak;
|
||||
use crate::push::TweakValue;
|
||||
|
||||
const HIGHLIGHT_ACTION: Action = Action::SetTweak(SetTweak {
|
||||
set_tweak: Cow::Borrowed("highlight"),
|
||||
value: None,
|
||||
other_keys: Value::Null,
|
||||
});
|
||||
|
||||
const HIGHLIGHT_FALSE_ACTION: Action = Action::SetTweak(SetTweak {
|
||||
set_tweak: Cow::Borrowed("highlight"),
|
||||
value: Some(TweakValue::Other(Value::Bool(false))),
|
||||
other_keys: Value::Null,
|
||||
});
|
||||
|
||||
const SOUND_ACTION: Action = Action::SetTweak(SetTweak {
|
||||
set_tweak: Cow::Borrowed("sound"),
|
||||
value: Some(TweakValue::String(Cow::Borrowed("default"))),
|
||||
other_keys: Value::Null,
|
||||
});
|
||||
|
||||
const RING_ACTION: Action = Action::SetTweak(SetTweak {
|
||||
set_tweak: Cow::Borrowed("sound"),
|
||||
value: Some(TweakValue::String(Cow::Borrowed("ring"))),
|
||||
other_keys: Value::Null,
|
||||
});
|
||||
|
||||
pub const BASE_PREPEND_OVERRIDE_RULES: &[PushRule] = &[PushRule {
|
||||
rule_id: Cow::Borrowed("global/override/.m.rule.master"),
|
||||
priority_class: 5,
|
||||
conditions: Cow::Borrowed(&[]),
|
||||
actions: Cow::Borrowed(&[Action::DontNotify]),
|
||||
default: true,
|
||||
default_enabled: false,
|
||||
}];
|
||||
|
||||
pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/override/.m.rule.suppress_notices"),
|
||||
priority_class: 5,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("content.msgtype"),
|
||||
pattern: Some(Cow::Borrowed("m.notice")),
|
||||
pattern_type: None,
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::DontNotify]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/override/.m.rule.invite_for_me"),
|
||||
priority_class: 5,
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.member")),
|
||||
pattern_type: None,
|
||||
})),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("content.membership"),
|
||||
pattern: Some(Cow::Borrowed("invite")),
|
||||
pattern_type: None,
|
||||
})),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("state_key"),
|
||||
pattern: None,
|
||||
pattern_type: Some(Cow::Borrowed("user_id")),
|
||||
})),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION, SOUND_ACTION]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/override/.m.rule.member_event"),
|
||||
priority_class: 5,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.member")),
|
||||
pattern_type: None,
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::DontNotify]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/override/.m.rule.contains_display_name"),
|
||||
priority_class: 5,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::ContainsDisplayName)]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION, SOUND_ACTION]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/override/.m.rule.roomnotif"),
|
||||
priority_class: 5,
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::SenderNotificationPermission {
|
||||
key: Cow::Borrowed("room"),
|
||||
}),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("content.body"),
|
||||
pattern: Some(Cow::Borrowed("@room")),
|
||||
pattern_type: None,
|
||||
})),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/override/.m.rule.tombstone"),
|
||||
priority_class: 5,
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.tombstone")),
|
||||
pattern_type: None,
|
||||
})),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("state_key"),
|
||||
pattern: Some(Cow::Borrowed("")),
|
||||
pattern_type: None,
|
||||
})),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/override/.m.rule.reaction"),
|
||||
priority_class: 5,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.reaction")),
|
||||
pattern_type: None,
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::DontNotify]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/override/.m.rule.room.server_acl"),
|
||||
priority_class: 5,
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.server_acl")),
|
||||
pattern_type: None,
|
||||
})),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("state_key"),
|
||||
pattern: Some(Cow::Borrowed("")),
|
||||
pattern_type: None,
|
||||
})),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
];
|
||||
|
||||
pub const BASE_APPEND_CONTENT_RULES: &[PushRule] = &[PushRule {
|
||||
rule_id: Cow::Borrowed("global/content/.m.rule.contains_user_name"),
|
||||
priority_class: 4,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("content.body"),
|
||||
pattern: None,
|
||||
pattern_type: Some(Cow::Borrowed("user_localpart")),
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION, SOUND_ACTION]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
}];
|
||||
|
||||
pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/underride/.m.rule.call"),
|
||||
priority_class: 1,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.call.invite")),
|
||||
pattern_type: None,
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, RING_ACTION, HIGHLIGHT_FALSE_ACTION]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/underride/.m.rule.room_one_to_one"),
|
||||
priority_class: 1,
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.message")),
|
||||
pattern_type: None,
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
}),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, SOUND_ACTION, HIGHLIGHT_FALSE_ACTION]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/underride/.m.rule.encrypted_room_one_to_one"),
|
||||
priority_class: 1,
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.encrypted")),
|
||||
pattern_type: None,
|
||||
})),
|
||||
Condition::Known(KnownCondition::RoomMemberCount {
|
||||
is: Some(Cow::Borrowed("2")),
|
||||
}),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, SOUND_ACTION, HIGHLIGHT_FALSE_ACTION]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/underride/.m.rule.message"),
|
||||
priority_class: 1,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.message")),
|
||||
pattern_type: None,
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/underride/.m.rule.encrypted"),
|
||||
priority_class: 1,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
|
||||
EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("m.room.encrypted")),
|
||||
pattern_type: None,
|
||||
},
|
||||
))]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/underride/.im.vector.jitsi"),
|
||||
priority_class: 1,
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("type"),
|
||||
pattern: Some(Cow::Borrowed("im.vector.modular.widgets")),
|
||||
pattern_type: None,
|
||||
})),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("content.type"),
|
||||
pattern: Some(Cow::Borrowed("jitsi")),
|
||||
pattern_type: None,
|
||||
})),
|
||||
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: Cow::Borrowed("state_key"),
|
||||
pattern: Some(Cow::Borrowed("*")),
|
||||
pattern_type: None,
|
||||
})),
|
||||
]),
|
||||
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION]),
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
];
|
||||
|
||||
lazy_static! {
|
||||
pub static ref BASE_RULES_BY_ID: HashMap<&'static str, &'static PushRule> =
|
||||
BASE_PREPEND_OVERRIDE_RULES
|
||||
.iter()
|
||||
.chain(BASE_APPEND_OVERRIDE_RULES.iter())
|
||||
.chain(BASE_APPEND_CONTENT_RULES.iter())
|
||||
.chain(BASE_APPEND_UNDERRIDE_RULES.iter())
|
||||
.map(|rule| { (&*rule.rule_id, rule) })
|
||||
.collect();
|
||||
}
|
||||
@@ -1,275 +0,0 @@
|
||||
// Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use anyhow::{Context, Error};
|
||||
use lazy_static::lazy_static;
|
||||
use log::warn;
|
||||
use pyo3::prelude::*;
|
||||
use regex::Regex;
|
||||
|
||||
use super::{
|
||||
utils::{get_glob_matcher, get_localpart_from_id, GlobMatchType},
|
||||
Action, Condition, EventMatchCondition, FilteredPushRules, KnownCondition,
|
||||
};
|
||||
|
||||
lazy_static! {
|
||||
/// Used to parse the `is` clause in the room member count condition.
|
||||
static ref INEQUALITY_EXPR: Regex = Regex::new(r"^([=<>]*)([0-9]+)$").expect("valid regex");
|
||||
}
|
||||
|
||||
/// Allows running a set of push rules against a particular event.
|
||||
#[pyclass]
|
||||
pub struct PushRuleEvaluator {
|
||||
/// A mapping of "flattened" keys to string values in the event, e.g.
|
||||
/// includes things like "type" and "content.msgtype".
|
||||
flattened_keys: BTreeMap<String, String>,
|
||||
|
||||
/// The "content.body", if any.
|
||||
body: String,
|
||||
|
||||
/// The number of users in the room.
|
||||
room_member_count: u64,
|
||||
|
||||
/// The `notifications` section of the current power levels in the room.
|
||||
notification_power_levels: BTreeMap<String, i64>,
|
||||
|
||||
/// The power level of the sender of the event, or None if event is an
|
||||
/// outlier.
|
||||
sender_power_level: Option<i64>,
|
||||
}
|
||||
|
||||
#[pymethods]
|
||||
impl PushRuleEvaluator {
|
||||
/// Create a new `PushRuleEvaluator`. See struct docstring for details.
|
||||
#[new]
|
||||
pub fn py_new(
|
||||
flattened_keys: BTreeMap<String, String>,
|
||||
room_member_count: u64,
|
||||
sender_power_level: Option<i64>,
|
||||
notification_power_levels: BTreeMap<String, i64>,
|
||||
) -> Result<Self, Error> {
|
||||
let body = flattened_keys
|
||||
.get("content.body")
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
|
||||
Ok(PushRuleEvaluator {
|
||||
flattened_keys,
|
||||
body,
|
||||
room_member_count,
|
||||
notification_power_levels,
|
||||
sender_power_level,
|
||||
})
|
||||
}
|
||||
|
||||
/// Run the evaluator with the given push rules, for the given user ID and
|
||||
/// display name of the user.
|
||||
///
|
||||
/// Passing in None will skip evaluating rules matching user ID and display
|
||||
/// name.
|
||||
///
|
||||
/// Returns the set of actions, if any, that match (filtering out any
|
||||
/// `dont_notify` actions).
|
||||
pub fn run(
|
||||
&self,
|
||||
push_rules: &FilteredPushRules,
|
||||
user_id: Option<&str>,
|
||||
display_name: Option<&str>,
|
||||
) -> Vec<Action> {
|
||||
'outer: for (push_rule, enabled) in push_rules.iter() {
|
||||
if !enabled {
|
||||
continue;
|
||||
}
|
||||
|
||||
for condition in push_rule.conditions.iter() {
|
||||
match self.match_condition(condition, user_id, display_name) {
|
||||
Ok(true) => {}
|
||||
Ok(false) => continue 'outer,
|
||||
Err(err) => {
|
||||
warn!("Condition match failed {err}");
|
||||
continue 'outer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let actions = push_rule
|
||||
.actions
|
||||
.iter()
|
||||
// Filter out "dont_notify" actions, as we don't store them.
|
||||
.filter(|a| **a != Action::DontNotify)
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
return actions;
|
||||
}
|
||||
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
/// Check if the given condition matches.
|
||||
fn matches(
|
||||
&self,
|
||||
condition: Condition,
|
||||
user_id: Option<&str>,
|
||||
display_name: Option<&str>,
|
||||
) -> bool {
|
||||
match self.match_condition(&condition, user_id, display_name) {
|
||||
Ok(true) => true,
|
||||
Ok(false) => false,
|
||||
Err(err) => {
|
||||
warn!("Condition match failed {err}");
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PushRuleEvaluator {
|
||||
/// Match a given `Condition` for a push rule.
|
||||
pub fn match_condition(
|
||||
&self,
|
||||
condition: &Condition,
|
||||
user_id: Option<&str>,
|
||||
display_name: Option<&str>,
|
||||
) -> Result<bool, Error> {
|
||||
let known_condition = match condition {
|
||||
Condition::Known(known) => known,
|
||||
Condition::Unknown(_) => {
|
||||
return Ok(false);
|
||||
}
|
||||
};
|
||||
|
||||
let result = match known_condition {
|
||||
KnownCondition::EventMatch(event_match) => {
|
||||
self.match_event_match(event_match, user_id)?
|
||||
}
|
||||
KnownCondition::ContainsDisplayName => {
|
||||
if let Some(dn) = display_name {
|
||||
if !dn.is_empty() {
|
||||
get_glob_matcher(dn, GlobMatchType::Word)?.is_match(&self.body)?
|
||||
} else {
|
||||
// We specifically ignore empty display names, as otherwise
|
||||
// they would always match.
|
||||
false
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
KnownCondition::RoomMemberCount { is } => {
|
||||
if let Some(is) = is {
|
||||
self.match_member_count(is)?
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
KnownCondition::SenderNotificationPermission { key } => {
|
||||
if let Some(sender_power_level) = &self.sender_power_level {
|
||||
let required_level = self
|
||||
.notification_power_levels
|
||||
.get(key.as_ref())
|
||||
.copied()
|
||||
.unwrap_or(50);
|
||||
|
||||
*sender_power_level >= required_level
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// Evaluates a `event_match` condition.
|
||||
fn match_event_match(
|
||||
&self,
|
||||
event_match: &EventMatchCondition,
|
||||
user_id: Option<&str>,
|
||||
) -> Result<bool, Error> {
|
||||
let pattern = if let Some(pattern) = &event_match.pattern {
|
||||
pattern
|
||||
} else if let Some(pattern_type) = &event_match.pattern_type {
|
||||
// The `pattern_type` can either be "user_id" or "user_localpart",
|
||||
// either way if we don't have a `user_id` then the condition can't
|
||||
// match.
|
||||
let user_id = if let Some(user_id) = user_id {
|
||||
user_id
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
match &**pattern_type {
|
||||
"user_id" => user_id,
|
||||
"user_localpart" => get_localpart_from_id(user_id)?,
|
||||
_ => return Ok(false),
|
||||
}
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
let haystack = if let Some(haystack) = self.flattened_keys.get(&*event_match.key) {
|
||||
haystack
|
||||
} else {
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
// For the content.body we match against "words", but for everything
|
||||
// else we match against the entire value.
|
||||
let match_type = if event_match.key == "content.body" {
|
||||
GlobMatchType::Word
|
||||
} else {
|
||||
GlobMatchType::Whole
|
||||
};
|
||||
|
||||
let mut compiled_pattern = get_glob_matcher(pattern, match_type)?;
|
||||
compiled_pattern.is_match(haystack)
|
||||
}
|
||||
|
||||
/// Match the member count against an 'is' condition
|
||||
/// The `is` condition can be things like '>2', '==3' or even just '4'.
|
||||
fn match_member_count(&self, is: &str) -> Result<bool, Error> {
|
||||
let captures = INEQUALITY_EXPR.captures(is).context("bad 'is' clause")?;
|
||||
let ineq = captures.get(1).map_or("==", |m| m.as_str());
|
||||
let rhs: u64 = captures
|
||||
.get(2)
|
||||
.context("missing number")?
|
||||
.as_str()
|
||||
.parse()?;
|
||||
|
||||
let matches = match ineq {
|
||||
"" | "==" => self.room_member_count == rhs,
|
||||
"<" => self.room_member_count < rhs,
|
||||
">" => self.room_member_count > rhs,
|
||||
">=" => self.room_member_count >= rhs,
|
||||
"<=" => self.room_member_count <= rhs,
|
||||
_ => false,
|
||||
};
|
||||
|
||||
Ok(matches)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn push_rule_evaluator() {
|
||||
let mut flattened_keys = BTreeMap::new();
|
||||
flattened_keys.insert("content.body".to_string(), "foo bar bob hello".to_string());
|
||||
let evaluator =
|
||||
PushRuleEvaluator::py_new(flattened_keys, 10, Some(0), BTreeMap::new()).unwrap();
|
||||
|
||||
let result = evaluator.run(&FilteredPushRules::default(), None, Some("bob"));
|
||||
assert_eq!(result.len(), 3);
|
||||
}
|
||||
@@ -1,477 +0,0 @@
|
||||
// Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
//! An implementation of Matrix push rules.
|
||||
//!
|
||||
//! The `Cow<_>` type is used extensively within this module to allow creating
|
||||
//! the base rules as constants (in Rust constants can't require explicit
|
||||
//! allocation atm).
|
||||
//!
|
||||
//! ---
|
||||
//!
|
||||
//! Push rules is the system used to determine which events trigger a push (and a
|
||||
//! bump in notification counts).
|
||||
//!
|
||||
//! This consists of a list of "push rules" for each user, where a push rule is a
|
||||
//! pair of "conditions" and "actions". When a user receives an event Synapse
|
||||
//! iterates over the list of push rules until it finds one where all the conditions
|
||||
//! match the event, at which point "actions" describe the outcome (e.g. notify,
|
||||
//! highlight, etc).
|
||||
//!
|
||||
//! Push rules are split up into 5 different "kinds" (aka "priority classes"), which
|
||||
//! are run in order:
|
||||
//! 1. Override — highest priority rules, e.g. always ignore notices
|
||||
//! 2. Content — content specific rules, e.g. @ notifications
|
||||
//! 3. Room — per room rules, e.g. enable/disable notifications for all messages
|
||||
//! in a room
|
||||
//! 4. Sender — per sender rules, e.g. never notify for messages from a given
|
||||
//! user
|
||||
//! 5. Underride — the lowest priority "default" rules, e.g. notify for every
|
||||
//! message.
|
||||
//!
|
||||
//! The set of "base rules" are the list of rules that every user has by default. A
|
||||
//! user can modify their copy of the push rules in one of three ways:
|
||||
//! 1. Adding a new push rule of a certain kind
|
||||
//! 2. Changing the actions of a base rule
|
||||
//! 3. Enabling/disabling a base rule.
|
||||
//!
|
||||
//! The base rules are split into whether they come before or after a particular
|
||||
//! kind, so the order of push rule evaluation would be: base rules for before
|
||||
//! "override" kind, user defined "override" rules, base rules after "override"
|
||||
//! kind, etc, etc.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::collections::{BTreeMap, HashMap, HashSet};
|
||||
|
||||
use anyhow::{Context, Error};
|
||||
use log::warn;
|
||||
use pyo3::prelude::*;
|
||||
use pythonize::{depythonize, pythonize};
|
||||
use serde::de::Error as _;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
|
||||
use self::evaluator::PushRuleEvaluator;
|
||||
|
||||
mod base_rules;
|
||||
pub mod evaluator;
|
||||
pub mod utils;
|
||||
|
||||
/// Called when registering modules with python.
|
||||
pub fn register_module(py: Python<'_>, m: &PyModule) -> PyResult<()> {
|
||||
let child_module = PyModule::new(py, "push")?;
|
||||
child_module.add_class::<PushRule>()?;
|
||||
child_module.add_class::<PushRules>()?;
|
||||
child_module.add_class::<FilteredPushRules>()?;
|
||||
child_module.add_class::<PushRuleEvaluator>()?;
|
||||
child_module.add_function(wrap_pyfunction!(get_base_rule_ids, m)?)?;
|
||||
|
||||
m.add_submodule(child_module)?;
|
||||
|
||||
// We need to manually add the module to sys.modules to make `from
|
||||
// synapse.synapse_rust import push` work.
|
||||
py.import("sys")?
|
||||
.getattr("modules")?
|
||||
.set_item("synapse.synapse_rust.push", child_module)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[pyfunction]
|
||||
fn get_base_rule_ids() -> HashSet<&'static str> {
|
||||
base_rules::BASE_RULES_BY_ID.keys().copied().collect()
|
||||
}
|
||||
|
||||
/// A single push rule for a user.
|
||||
#[derive(Debug, Clone)]
|
||||
#[pyclass(frozen)]
|
||||
pub struct PushRule {
|
||||
/// A unique ID for this rule
|
||||
pub rule_id: Cow<'static, str>,
|
||||
/// The "kind" of push rule this is (see `PRIORITY_CLASS_MAP` in Python)
|
||||
#[pyo3(get)]
|
||||
pub priority_class: i32,
|
||||
/// The conditions that must all match for actions to be applied
|
||||
pub conditions: Cow<'static, [Condition]>,
|
||||
/// The actions to apply if all conditions are met
|
||||
pub actions: Cow<'static, [Action]>,
|
||||
/// Whether this is a base rule
|
||||
#[pyo3(get)]
|
||||
pub default: bool,
|
||||
/// Whether this is enabled by default
|
||||
#[pyo3(get)]
|
||||
pub default_enabled: bool,
|
||||
}
|
||||
|
||||
#[pymethods]
|
||||
impl PushRule {
|
||||
#[staticmethod]
|
||||
pub fn from_db(
|
||||
rule_id: String,
|
||||
priority_class: i32,
|
||||
conditions: &str,
|
||||
actions: &str,
|
||||
) -> Result<PushRule, Error> {
|
||||
let conditions = serde_json::from_str(conditions).context("parsing conditions")?;
|
||||
let actions = serde_json::from_str(actions).context("parsing actions")?;
|
||||
|
||||
Ok(PushRule {
|
||||
rule_id: Cow::Owned(rule_id),
|
||||
priority_class,
|
||||
conditions,
|
||||
actions,
|
||||
default: false,
|
||||
default_enabled: true,
|
||||
})
|
||||
}
|
||||
|
||||
#[getter]
|
||||
fn rule_id(&self) -> &str {
|
||||
&self.rule_id
|
||||
}
|
||||
|
||||
#[getter]
|
||||
fn actions(&self) -> Vec<Action> {
|
||||
self.actions.clone().into_owned()
|
||||
}
|
||||
|
||||
#[getter]
|
||||
fn conditions(&self) -> Vec<Condition> {
|
||||
self.conditions.clone().into_owned()
|
||||
}
|
||||
|
||||
fn __repr__(&self) -> String {
|
||||
format!(
|
||||
"<PushRule rule_id={}, conditions={:?}, actions={:?}>",
|
||||
self.rule_id, self.conditions, self.actions
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// The "action" Synapse should perform for a matching push rule.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum Action {
|
||||
DontNotify,
|
||||
Notify,
|
||||
Coalesce,
|
||||
SetTweak(SetTweak),
|
||||
|
||||
// An unrecognized custom action.
|
||||
Unknown(Value),
|
||||
}
|
||||
|
||||
impl IntoPy<PyObject> for Action {
|
||||
fn into_py(self, py: Python<'_>) -> PyObject {
|
||||
// When we pass the `Action` struct to Python we want it to be converted
|
||||
// to a dict. We use `pythonize`, which converts the struct using the
|
||||
// `serde` serialization.
|
||||
pythonize(py, &self).expect("valid action")
|
||||
}
|
||||
}
|
||||
|
||||
/// The body of a `SetTweak` push action.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
pub struct SetTweak {
|
||||
set_tweak: Cow<'static, str>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
value: Option<TweakValue>,
|
||||
|
||||
// This picks up any other fields that may have been added by clients.
|
||||
// These get added when we convert the `Action` to a python object.
|
||||
#[serde(flatten)]
|
||||
other_keys: Value,
|
||||
}
|
||||
|
||||
/// The value of a `set_tweak`.
|
||||
///
|
||||
/// We need this (rather than using `TweakValue` directly) so that we can use
|
||||
/// `&'static str` in the value when defining the constant base rules.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[serde(untagged)]
|
||||
pub enum TweakValue {
|
||||
String(Cow<'static, str>),
|
||||
Other(Value),
|
||||
}
|
||||
|
||||
impl Serialize for Action {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
match self {
|
||||
Action::DontNotify => serializer.serialize_str("dont_notify"),
|
||||
Action::Notify => serializer.serialize_str("notify"),
|
||||
Action::Coalesce => serializer.serialize_str("coalesce"),
|
||||
Action::SetTweak(tweak) => tweak.serialize(serializer),
|
||||
Action::Unknown(value) => value.serialize(serializer),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Simple helper class for deserializing Action from JSON.
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum ActionDeserializeHelper {
|
||||
Str(String),
|
||||
SetTweak(SetTweak),
|
||||
Unknown(Value),
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for Action {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let helper: ActionDeserializeHelper = Deserialize::deserialize(deserializer)?;
|
||||
match helper {
|
||||
ActionDeserializeHelper::Str(s) => match &*s {
|
||||
"dont_notify" => Ok(Action::DontNotify),
|
||||
"notify" => Ok(Action::Notify),
|
||||
"coalesce" => Ok(Action::Coalesce),
|
||||
_ => Err(D::Error::custom("unrecognized action")),
|
||||
},
|
||||
ActionDeserializeHelper::SetTweak(set_tweak) => Ok(Action::SetTweak(set_tweak)),
|
||||
ActionDeserializeHelper::Unknown(value) => Ok(Action::Unknown(value)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A condition used in push rules to match against an event.
|
||||
///
|
||||
/// We need this split as `serde` doesn't give us the ability to have a
|
||||
/// "catchall" variant in tagged enums.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum Condition {
|
||||
/// A recognized condition that we can match against
|
||||
Known(KnownCondition),
|
||||
/// An unrecognized condition that we ignore.
|
||||
Unknown(Value),
|
||||
}
|
||||
|
||||
/// The set of "known" conditions that we can handle.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
#[serde(tag = "kind")]
|
||||
pub enum KnownCondition {
|
||||
EventMatch(EventMatchCondition),
|
||||
ContainsDisplayName,
|
||||
RoomMemberCount {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
is: Option<Cow<'static, str>>,
|
||||
},
|
||||
SenderNotificationPermission {
|
||||
key: Cow<'static, str>,
|
||||
},
|
||||
}
|
||||
|
||||
impl IntoPy<PyObject> for Condition {
|
||||
fn into_py(self, py: Python<'_>) -> PyObject {
|
||||
pythonize(py, &self).expect("valid condition")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'source> FromPyObject<'source> for Condition {
|
||||
fn extract(ob: &'source PyAny) -> PyResult<Self> {
|
||||
Ok(depythonize(ob)?)
|
||||
}
|
||||
}
|
||||
|
||||
/// The body of a [`Condition::EventMatch`]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct EventMatchCondition {
|
||||
pub key: Cow<'static, str>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub pattern: Option<Cow<'static, str>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub pattern_type: Option<Cow<'static, str>>,
|
||||
}
|
||||
|
||||
/// The collection of push rules for a user.
|
||||
#[derive(Debug, Clone, Default)]
|
||||
#[pyclass(frozen)]
|
||||
pub struct PushRules {
|
||||
/// Custom push rules that override a base rule.
|
||||
overridden_base_rules: HashMap<Cow<'static, str>, PushRule>,
|
||||
|
||||
/// Custom rules that come between the prepend/append override base rules.
|
||||
override_rules: Vec<PushRule>,
|
||||
/// Custom rules that come before the base content rules.
|
||||
content: Vec<PushRule>,
|
||||
/// Custom rules that come before the base room rules.
|
||||
room: Vec<PushRule>,
|
||||
/// Custom rules that come before the base sender rules.
|
||||
sender: Vec<PushRule>,
|
||||
/// Custom rules that come before the base underride rules.
|
||||
underride: Vec<PushRule>,
|
||||
}
|
||||
|
||||
#[pymethods]
|
||||
impl PushRules {
|
||||
#[new]
|
||||
pub fn new(rules: Vec<PushRule>) -> PushRules {
|
||||
let mut push_rules: PushRules = Default::default();
|
||||
|
||||
for rule in rules {
|
||||
if let Some(&o) = base_rules::BASE_RULES_BY_ID.get(&*rule.rule_id) {
|
||||
push_rules.overridden_base_rules.insert(
|
||||
rule.rule_id.clone(),
|
||||
PushRule {
|
||||
actions: rule.actions.clone(),
|
||||
..o.clone()
|
||||
},
|
||||
);
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
match rule.priority_class {
|
||||
5 => push_rules.override_rules.push(rule),
|
||||
4 => push_rules.content.push(rule),
|
||||
3 => push_rules.room.push(rule),
|
||||
2 => push_rules.sender.push(rule),
|
||||
1 => push_rules.underride.push(rule),
|
||||
_ => {
|
||||
warn!(
|
||||
"Unrecognized priority class for rule {}: {}",
|
||||
rule.rule_id, rule.priority_class
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
push_rules
|
||||
}
|
||||
|
||||
/// Returns the list of all rules, including base rules, in the order they
|
||||
/// should be executed in.
|
||||
fn rules(&self) -> Vec<PushRule> {
|
||||
self.iter().cloned().collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl PushRules {
|
||||
/// Iterates over all the rules, including base rules, in the order they
|
||||
/// should be executed in.
|
||||
pub fn iter(&self) -> impl Iterator<Item = &PushRule> {
|
||||
base_rules::BASE_PREPEND_OVERRIDE_RULES
|
||||
.iter()
|
||||
.chain(self.override_rules.iter())
|
||||
.chain(base_rules::BASE_APPEND_OVERRIDE_RULES.iter())
|
||||
.chain(self.content.iter())
|
||||
.chain(base_rules::BASE_APPEND_CONTENT_RULES.iter())
|
||||
.chain(self.room.iter())
|
||||
.chain(self.sender.iter())
|
||||
.chain(self.underride.iter())
|
||||
.chain(base_rules::BASE_APPEND_UNDERRIDE_RULES.iter())
|
||||
.map(|rule| {
|
||||
self.overridden_base_rules
|
||||
.get(&*rule.rule_id)
|
||||
.unwrap_or(rule)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A wrapper around `PushRules` that checks the enabled state of rules and
|
||||
/// filters out disabled experimental rules.
|
||||
#[derive(Debug, Clone, Default)]
|
||||
#[pyclass(frozen)]
|
||||
pub struct FilteredPushRules {
|
||||
push_rules: PushRules,
|
||||
enabled_map: BTreeMap<String, bool>,
|
||||
}
|
||||
|
||||
#[pymethods]
|
||||
impl FilteredPushRules {
|
||||
#[new]
|
||||
pub fn py_new(push_rules: PushRules, enabled_map: BTreeMap<String, bool>) -> Self {
|
||||
Self {
|
||||
push_rules,
|
||||
enabled_map,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the list of all rules and their enabled state, including base
|
||||
/// rules, in the order they should be executed in.
|
||||
fn rules(&self) -> Vec<(PushRule, bool)> {
|
||||
self.iter().map(|(r, e)| (r.clone(), e)).collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl FilteredPushRules {
|
||||
/// Iterates over all the rules and their enabled state, including base
|
||||
/// rules, in the order they should be executed in.
|
||||
fn iter(&self) -> impl Iterator<Item = (&PushRule, bool)> {
|
||||
self.push_rules.iter().map(|r| {
|
||||
let enabled = *self
|
||||
.enabled_map
|
||||
.get(&*r.rule_id)
|
||||
.unwrap_or(&r.default_enabled);
|
||||
(r, enabled)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_serialize_condition() {
|
||||
let condition = Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
|
||||
key: "content.body".into(),
|
||||
pattern: Some("coffee".into()),
|
||||
pattern_type: None,
|
||||
}));
|
||||
|
||||
let json = serde_json::to_string(&condition).unwrap();
|
||||
assert_eq!(
|
||||
json,
|
||||
r#"{"kind":"event_match","key":"content.body","pattern":"coffee"}"#
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_condition() {
|
||||
let json = r#"{"kind":"event_match","key":"content.body","pattern":"coffee"}"#;
|
||||
|
||||
let _: Condition = serde_json::from_str(json).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_custom_condition() {
|
||||
let json = r#"{"kind":"custom_tag"}"#;
|
||||
|
||||
let condition: Condition = serde_json::from_str(json).unwrap();
|
||||
assert!(matches!(condition, Condition::Unknown(_)));
|
||||
|
||||
let new_json = serde_json::to_string(&condition).unwrap();
|
||||
assert_eq!(json, new_json);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_action() {
|
||||
let _: Action = serde_json::from_str(r#""notify""#).unwrap();
|
||||
let _: Action = serde_json::from_str(r#""dont_notify""#).unwrap();
|
||||
let _: Action = serde_json::from_str(r#""coalesce""#).unwrap();
|
||||
let _: Action = serde_json::from_str(r#"{"set_tweak": "highlight"}"#).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_custom_action() {
|
||||
let json = r#"{"some_custom":"action_fields"}"#;
|
||||
|
||||
let action: Action = serde_json::from_str(json).unwrap();
|
||||
assert!(matches!(action, Action::Unknown(_)));
|
||||
|
||||
let new_json = serde_json::to_string(&action).unwrap();
|
||||
assert_eq!(json, new_json);
|
||||
}
|
||||
@@ -1,215 +0,0 @@
|
||||
// Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
use anyhow::bail;
|
||||
use anyhow::Context;
|
||||
use anyhow::Error;
|
||||
use lazy_static::lazy_static;
|
||||
use regex;
|
||||
use regex::Regex;
|
||||
use regex::RegexBuilder;
|
||||
|
||||
lazy_static! {
|
||||
/// Matches runs of non-wildcard characters followed by wildcard characters.
|
||||
static ref WILDCARD_RUN: Regex = Regex::new(r"([^\?\*]*)([\?\*]*)").expect("valid regex");
|
||||
}
|
||||
|
||||
/// Extract the localpart from a Matrix style ID
|
||||
pub(crate) fn get_localpart_from_id(id: &str) -> Result<&str, Error> {
|
||||
let (localpart, _) = id
|
||||
.split_once(':')
|
||||
.with_context(|| format!("ID does not contain colon: {id}"))?;
|
||||
|
||||
// We need to strip off the first character, which is the ID type.
|
||||
if localpart.is_empty() {
|
||||
bail!("Invalid ID {id}");
|
||||
}
|
||||
|
||||
Ok(&localpart[1..])
|
||||
}
|
||||
|
||||
/// Used by `glob_to_regex` to specify what to match the regex against.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum GlobMatchType {
|
||||
/// The generated regex will match against the entire input.
|
||||
Whole,
|
||||
/// The generated regex will match against words.
|
||||
Word,
|
||||
}
|
||||
|
||||
/// Convert a "glob" style expression to a regex, anchoring either to the entire
|
||||
/// input or to individual words.
|
||||
pub fn glob_to_regex(glob: &str, match_type: GlobMatchType) -> Result<Regex, Error> {
|
||||
let mut chunks = Vec::new();
|
||||
|
||||
// Patterns with wildcards must be simplified to avoid performance cliffs
|
||||
// - The glob `?**?**?` is equivalent to the glob `???*`
|
||||
// - The glob `???*` is equivalent to the regex `.{3,}`
|
||||
for captures in WILDCARD_RUN.captures_iter(glob) {
|
||||
if let Some(chunk) = captures.get(1) {
|
||||
chunks.push(regex::escape(chunk.as_str()));
|
||||
}
|
||||
|
||||
if let Some(wildcards) = captures.get(2) {
|
||||
if wildcards.as_str() == "" {
|
||||
continue;
|
||||
}
|
||||
|
||||
let question_marks = wildcards.as_str().chars().filter(|c| *c == '?').count();
|
||||
|
||||
if wildcards.as_str().contains('*') {
|
||||
chunks.push(format!(".{{{question_marks},}}"));
|
||||
} else {
|
||||
chunks.push(format!(".{{{question_marks}}}"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let joined = chunks.join("");
|
||||
|
||||
let regex_str = match match_type {
|
||||
GlobMatchType::Whole => format!(r"\A{joined}\z"),
|
||||
|
||||
// `^|\W` and `\W|$` handle the case where `pattern` starts or ends with a non-word
|
||||
// character.
|
||||
GlobMatchType::Word => format!(r"(?:^|\b|\W){joined}(?:\b|\W|$)"),
|
||||
};
|
||||
|
||||
Ok(RegexBuilder::new(®ex_str)
|
||||
.case_insensitive(true)
|
||||
.build()?)
|
||||
}
|
||||
|
||||
/// Compiles the glob into a `Matcher`.
|
||||
pub fn get_glob_matcher(glob: &str, match_type: GlobMatchType) -> Result<Matcher, Error> {
|
||||
// There are a number of shortcuts we can make if the glob doesn't contain a
|
||||
// wild card.
|
||||
let matcher = if glob.contains(['*', '?']) {
|
||||
let regex = glob_to_regex(glob, match_type)?;
|
||||
Matcher::Regex(regex)
|
||||
} else if match_type == GlobMatchType::Whole {
|
||||
// If there aren't any wildcards and we're matching the whole thing,
|
||||
// then we simply can do a case-insensitive string match.
|
||||
Matcher::Whole(glob.to_lowercase())
|
||||
} else {
|
||||
// Otherwise, if we're matching against words then can first check
|
||||
// if the haystack contains the glob at all.
|
||||
Matcher::Word {
|
||||
word: glob.to_lowercase(),
|
||||
regex: None,
|
||||
}
|
||||
};
|
||||
|
||||
Ok(matcher)
|
||||
}
|
||||
|
||||
/// Matches against a glob
|
||||
pub enum Matcher {
|
||||
/// Plain regex matching.
|
||||
Regex(Regex),
|
||||
|
||||
/// Case-insensitive equality.
|
||||
Whole(String),
|
||||
|
||||
/// Word matching. `regex` is a cache of calling [`glob_to_regex`] on word.
|
||||
Word { word: String, regex: Option<Regex> },
|
||||
}
|
||||
|
||||
impl Matcher {
|
||||
/// Checks if the glob matches the given haystack.
|
||||
pub fn is_match(&mut self, haystack: &str) -> Result<bool, Error> {
|
||||
// We want to to do case-insensitive matching, so we convert to
|
||||
// lowercase first.
|
||||
let haystack = haystack.to_lowercase();
|
||||
|
||||
match self {
|
||||
Matcher::Regex(regex) => Ok(regex.is_match(&haystack)),
|
||||
Matcher::Whole(whole) => Ok(whole == &haystack),
|
||||
Matcher::Word { word, regex } => {
|
||||
// If we're looking for a literal word, then we first check if
|
||||
// the haystack contains the word as a substring.
|
||||
if !haystack.contains(&*word) {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
// If it does contain the word as a substring, then we need to
|
||||
// check if it is an actual word by testing it against the regex.
|
||||
let regex = if let Some(regex) = regex {
|
||||
regex
|
||||
} else {
|
||||
let compiled_regex = glob_to_regex(word, GlobMatchType::Word)?;
|
||||
regex.insert(compiled_regex)
|
||||
};
|
||||
|
||||
Ok(regex.is_match(&haystack))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_domain_from_id() {
|
||||
get_localpart_from_id("").unwrap_err();
|
||||
get_localpart_from_id(":").unwrap_err();
|
||||
get_localpart_from_id(":asd").unwrap_err();
|
||||
get_localpart_from_id("::as::asad").unwrap_err();
|
||||
|
||||
assert_eq!(get_localpart_from_id("@test:foo").unwrap(), "test");
|
||||
assert_eq!(get_localpart_from_id("@:").unwrap(), "");
|
||||
assert_eq!(get_localpart_from_id("@test:foo:907").unwrap(), "test");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tset_glob() -> Result<(), Error> {
|
||||
assert_eq!(
|
||||
glob_to_regex("simple", GlobMatchType::Whole)?.as_str(),
|
||||
r"\Asimple\z"
|
||||
);
|
||||
assert_eq!(
|
||||
glob_to_regex("simple*", GlobMatchType::Whole)?.as_str(),
|
||||
r"\Asimple.{0,}\z"
|
||||
);
|
||||
assert_eq!(
|
||||
glob_to_regex("simple?", GlobMatchType::Whole)?.as_str(),
|
||||
r"\Asimple.{1}\z"
|
||||
);
|
||||
assert_eq!(
|
||||
glob_to_regex("simple?*?*", GlobMatchType::Whole)?.as_str(),
|
||||
r"\Asimple.{2,}\z"
|
||||
);
|
||||
assert_eq!(
|
||||
glob_to_regex("simple???", GlobMatchType::Whole)?.as_str(),
|
||||
r"\Asimple.{3}\z"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
glob_to_regex("escape.", GlobMatchType::Whole)?.as_str(),
|
||||
r"\Aescape\.\z"
|
||||
);
|
||||
|
||||
assert!(glob_to_regex("simple", GlobMatchType::Whole)?.is_match("simple"));
|
||||
assert!(!glob_to_regex("simple", GlobMatchType::Whole)?.is_match("simples"));
|
||||
assert!(glob_to_regex("simple*", GlobMatchType::Whole)?.is_match("simples"));
|
||||
assert!(glob_to_regex("simple?", GlobMatchType::Whole)?.is_match("simples"));
|
||||
assert!(glob_to_regex("simple*", GlobMatchType::Whole)?.is_match("simple"));
|
||||
|
||||
assert!(glob_to_regex("simple", GlobMatchType::Word)?.is_match("some simple."));
|
||||
assert!(glob_to_regex("simple", GlobMatchType::Word)?.is_match("simple"));
|
||||
assert!(!glob_to_regex("simple", GlobMatchType::Word)?.is_match("simples"));
|
||||
|
||||
assert!(glob_to_regex("@user:foo", GlobMatchType::Word)?.is_match("Some @user:foo test"));
|
||||
assert!(glob_to_regex("@user:foo", GlobMatchType::Word)?.is_match("@user:foo"));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -88,9 +88,10 @@ def make_wrapper(factory: Callable[P, R]) -> Callable[P, R]:
|
||||
|
||||
@functools.wraps(factory)
|
||||
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
if "strict" not in kwargs:
|
||||
# type-ignore: should be redundant once we can use https://github.com/python/mypy/pull/12668
|
||||
if "strict" not in kwargs: # type: ignore[attr-defined]
|
||||
raise MissingStrictInConstrainedTypeException(factory.__name__)
|
||||
if not kwargs["strict"]:
|
||||
if not kwargs["strict"]: # type: ignore[index]
|
||||
raise MissingStrictInConstrainedTypeException(factory.__name__)
|
||||
return factory(*args, **kwargs)
|
||||
|
||||
|
||||
@@ -126,7 +126,7 @@ export COMPLEMENT_BASE_IMAGE=complement-synapse
|
||||
|
||||
extra_test_args=()
|
||||
|
||||
test_tags="synapse_blacklist,msc3787"
|
||||
test_tags="synapse_blacklist,msc2716,msc3030,msc3787"
|
||||
|
||||
# All environment variables starting with PASS_ will be shared.
|
||||
# (The prefix is stripped off before reaching the container.)
|
||||
@@ -158,10 +158,7 @@ else
|
||||
|
||||
# We only test faster room joins on monoliths, because they are purposefully
|
||||
# being developed without worker support to start with.
|
||||
#
|
||||
# The tests for importing historical messages (MSC2716) and jump to date (MSC3030)
|
||||
# also only pass with monoliths, currently.
|
||||
test_tags="$test_tags,faster_joins,msc2716,msc3030"
|
||||
test_tags="$test_tags,faster_joins"
|
||||
fi
|
||||
|
||||
|
||||
|
||||
@@ -2,37 +2,34 @@
|
||||
#
|
||||
# This script generates SQL files for creating a brand new Synapse DB with the latest
|
||||
# schema, on both SQLite3 and Postgres.
|
||||
#
|
||||
# It does so by having Synapse generate an up-to-date SQLite DB, then running
|
||||
# synapse_port_db to convert it to Postgres. It then dumps the contents of both.
|
||||
|
||||
export PGHOST="localhost"
|
||||
POSTGRES_MAIN_DB_NAME="synapse_full_schema_main.$$"
|
||||
POSTGRES_COMMON_DB_NAME="synapse_full_schema_common.$$"
|
||||
POSTGRES_STATE_DB_NAME="synapse_full_schema_state.$$"
|
||||
POSTGRES_DB_NAME="synapse_full_schema.$$"
|
||||
|
||||
SQLITE_FULL_SCHEMA_OUTPUT_FILE="full.sql.sqlite"
|
||||
POSTGRES_FULL_SCHEMA_OUTPUT_FILE="full.sql.postgres"
|
||||
|
||||
REQUIRED_DEPS=("matrix-synapse" "psycopg2")
|
||||
|
||||
usage() {
|
||||
echo
|
||||
echo "Usage: $0 -p <postgres_username> -o <path> [-c] [-n <schema number>] [-h]"
|
||||
echo "Usage: $0 -p <postgres_username> -o <path> [-c] [-n] [-h]"
|
||||
echo
|
||||
echo "-p <postgres_username>"
|
||||
echo " Username to connect to local postgres instance. The password will be requested"
|
||||
echo " during script execution."
|
||||
echo "-c"
|
||||
echo " CI mode. Prints every command that the script runs."
|
||||
echo " CI mode. Enables coverage tracking and prints every command that the script runs."
|
||||
echo "-o <path>"
|
||||
echo " Directory to output full schema files to."
|
||||
echo "-n <schema number>"
|
||||
echo " Schema number for the new snapshot. Used to set the location of files within "
|
||||
echo " the output directory, mimicking that of synapse/storage/schemas."
|
||||
echo " Defaults to 9999."
|
||||
echo "-h"
|
||||
echo " Display this help text."
|
||||
echo ""
|
||||
echo " NB: make sure to run this against the *oldest* supported version of postgres,"
|
||||
echo " or else pg_dump might output non-backwards-compatible syntax."
|
||||
}
|
||||
|
||||
SCHEMA_NUMBER="9999"
|
||||
while getopts "p:co:hn:" opt; do
|
||||
while getopts "p:co:h" opt; do
|
||||
case $opt in
|
||||
p)
|
||||
export PGUSER=$OPTARG
|
||||
@@ -40,6 +37,11 @@ while getopts "p:co:hn:" opt; do
|
||||
c)
|
||||
# Print all commands that are being executed
|
||||
set -x
|
||||
|
||||
# Modify required dependencies for coverage
|
||||
REQUIRED_DEPS+=("coverage" "coverage-enable-subprocess")
|
||||
|
||||
COVERAGE=1
|
||||
;;
|
||||
o)
|
||||
command -v realpath > /dev/null || (echo "The -o flag requires the 'realpath' binary to be installed" && exit 1)
|
||||
@@ -49,9 +51,6 @@ while getopts "p:co:hn:" opt; do
|
||||
usage
|
||||
exit
|
||||
;;
|
||||
n)
|
||||
SCHEMA_NUMBER="$OPTARG"
|
||||
;;
|
||||
\?)
|
||||
echo "ERROR: Invalid option: -$OPTARG" >&2
|
||||
usage
|
||||
@@ -99,21 +98,11 @@ cd "$(dirname "$0")/.."
|
||||
TMPDIR=$(mktemp -d)
|
||||
KEY_FILE=$TMPDIR/test.signing.key # default Synapse signing key path
|
||||
SQLITE_CONFIG=$TMPDIR/sqlite.conf
|
||||
SQLITE_MAIN_DB=$TMPDIR/main.db
|
||||
SQLITE_STATE_DB=$TMPDIR/state.db
|
||||
SQLITE_COMMON_DB=$TMPDIR/common.db
|
||||
SQLITE_DB=$TMPDIR/homeserver.db
|
||||
POSTGRES_CONFIG=$TMPDIR/postgres.conf
|
||||
|
||||
# Ensure these files are delete on script exit
|
||||
cleanup() {
|
||||
echo "Cleaning up temporary sqlite database and config files..."
|
||||
rm -r "$TMPDIR"
|
||||
echo "Cleaning up temporary Postgres database..."
|
||||
dropdb --if-exists "$POSTGRES_COMMON_DB_NAME"
|
||||
dropdb --if-exists "$POSTGRES_MAIN_DB_NAME"
|
||||
dropdb --if-exists "$POSTGRES_STATE_DB_NAME"
|
||||
}
|
||||
trap 'cleanup' EXIT
|
||||
trap 'rm -rf $TMPDIR' EXIT
|
||||
|
||||
cat > "$SQLITE_CONFIG" <<EOF
|
||||
server_name: "test"
|
||||
@@ -123,22 +112,10 @@ macaroon_secret_key: "abcde"
|
||||
|
||||
report_stats: false
|
||||
|
||||
databases:
|
||||
common:
|
||||
name: "sqlite3"
|
||||
data_stores: []
|
||||
args:
|
||||
database: "$SQLITE_COMMON_DB"
|
||||
main:
|
||||
name: "sqlite3"
|
||||
data_stores: ["main"]
|
||||
args:
|
||||
database: "$SQLITE_MAIN_DB"
|
||||
state:
|
||||
name: "sqlite3"
|
||||
data_stores: ["state"]
|
||||
args:
|
||||
database: "$SQLITE_STATE_DB"
|
||||
database:
|
||||
name: "sqlite3"
|
||||
args:
|
||||
database: "$SQLITE_DB"
|
||||
|
||||
# Suppress the key server warning.
|
||||
trusted_key_servers: []
|
||||
@@ -152,32 +129,13 @@ macaroon_secret_key: "abcde"
|
||||
|
||||
report_stats: false
|
||||
|
||||
databases:
|
||||
common:
|
||||
name: "psycopg2"
|
||||
data_stores: []
|
||||
args:
|
||||
user: "$PGUSER"
|
||||
host: "$PGHOST"
|
||||
password: "$PGPASSWORD"
|
||||
database: "$POSTGRES_COMMON_DB_NAME"
|
||||
main:
|
||||
name: "psycopg2"
|
||||
data_stores: ["main"]
|
||||
args:
|
||||
user: "$PGUSER"
|
||||
host: "$PGHOST"
|
||||
password: "$PGPASSWORD"
|
||||
database: "$POSTGRES_MAIN_DB_NAME"
|
||||
state:
|
||||
name: "psycopg2"
|
||||
data_stores: ["state"]
|
||||
args:
|
||||
user: "$PGUSER"
|
||||
host: "$PGHOST"
|
||||
password: "$PGPASSWORD"
|
||||
database: "$POSTGRES_STATE_DB_NAME"
|
||||
|
||||
database:
|
||||
name: "psycopg2"
|
||||
args:
|
||||
user: "$PGUSER"
|
||||
host: "$PGHOST"
|
||||
password: "$PGPASSWORD"
|
||||
database: "$POSTGRES_DB_NAME"
|
||||
|
||||
# Suppress the key server warning.
|
||||
trusted_key_servers: []
|
||||
@@ -189,46 +147,29 @@ python -m synapse.app.homeserver --generate-keys -c "$SQLITE_CONFIG"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
echo "Running db background jobs..."
|
||||
synapse/_scripts/update_synapse_database.py --database-config "$SQLITE_CONFIG" --run-background-updates
|
||||
synapse/_scripts/update_synapse_database.py --database-config --run-background-updates "$SQLITE_CONFIG"
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
echo "Creating postgres databases..."
|
||||
createdb --lc-collate=C --lc-ctype=C --template=template0 "$POSTGRES_COMMON_DB_NAME"
|
||||
createdb --lc-collate=C --lc-ctype=C --template=template0 "$POSTGRES_MAIN_DB_NAME"
|
||||
createdb --lc-collate=C --lc-ctype=C --template=template0 "$POSTGRES_STATE_DB_NAME"
|
||||
|
||||
echo "Running db background jobs..."
|
||||
synapse/_scripts/update_synapse_database.py --database-config "$POSTGRES_CONFIG" --run-background-updates
|
||||
echo "Creating postgres database..."
|
||||
createdb --lc-collate=C --lc-ctype=C --template=template0 "$POSTGRES_DB_NAME"
|
||||
|
||||
echo "Copying data from SQLite3 to Postgres with synapse_port_db..."
|
||||
if [ -z "$COVERAGE" ]; then
|
||||
# No coverage needed
|
||||
synapse/_scripts/synapse_port_db.py --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
|
||||
else
|
||||
# Coverage desired
|
||||
coverage run synapse/_scripts/synapse_port_db.py --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
|
||||
fi
|
||||
|
||||
# Delete schema_version, applied_schema_deltas and applied_module_schemas tables
|
||||
# Also delete any shadow tables from fts4
|
||||
# This needs to be done after synapse_port_db is run
|
||||
echo "Dropping unwanted db tables..."
|
||||
|
||||
# Some common tables are created and updated by Synapse itself and do not belong in the
|
||||
# schema.
|
||||
DROP_APP_MANAGED_TABLES="
|
||||
SQL="
|
||||
DROP TABLE schema_version;
|
||||
DROP TABLE schema_compat_version;
|
||||
DROP TABLE applied_schema_deltas;
|
||||
DROP TABLE applied_module_schemas;
|
||||
"
|
||||
# Other common tables are not created by Synapse and do belong in the schema.
|
||||
# TODO: we could derive DROP_COMMON_TABLES from the dump of the common-only DB. But
|
||||
# since there's only one table there, I haven't bothered to do so.
|
||||
DROP_COMMON_TABLES="$DROP_APP_MANAGED_TABLES
|
||||
DROP TABLE background_updates;
|
||||
"
|
||||
|
||||
sqlite3 "$SQLITE_COMMON_DB" <<< "$DROP_APP_MANAGED_TABLES"
|
||||
sqlite3 "$SQLITE_MAIN_DB" <<< "$DROP_COMMON_TABLES"
|
||||
sqlite3 "$SQLITE_STATE_DB" <<< "$DROP_COMMON_TABLES"
|
||||
psql "$POSTGRES_COMMON_DB_NAME" -w <<< "$DROP_APP_MANAGED_TABLES"
|
||||
psql "$POSTGRES_MAIN_DB_NAME" -w <<< "$DROP_COMMON_TABLES"
|
||||
psql "$POSTGRES_STATE_DB_NAME" -w <<< "$DROP_COMMON_TABLES"
|
||||
|
||||
# For Reasons(TM), SQLite's `.schema` also dumps out "shadow tables", the implementation
|
||||
# details behind full text search tables. Omit these from the dumps.
|
||||
|
||||
sqlite3 "$SQLITE_MAIN_DB" <<< "
|
||||
DROP TABLE event_search_content;
|
||||
DROP TABLE event_search_segments;
|
||||
DROP TABLE event_search_segdir;
|
||||
@@ -240,57 +181,16 @@ DROP TABLE user_directory_search_segdir;
|
||||
DROP TABLE user_directory_search_docsize;
|
||||
DROP TABLE user_directory_search_stat;
|
||||
"
|
||||
sqlite3 "$SQLITE_DB" <<< "$SQL"
|
||||
psql "$POSTGRES_DB_NAME" -w <<< "$SQL"
|
||||
|
||||
echo "Dumping SQLite3 schema..."
|
||||
echo "Dumping SQLite3 schema to '$OUTPUT_DIR/$SQLITE_FULL_SCHEMA_OUTPUT_FILE'..."
|
||||
sqlite3 "$SQLITE_DB" ".dump" > "$OUTPUT_DIR/$SQLITE_FULL_SCHEMA_OUTPUT_FILE"
|
||||
|
||||
mkdir -p "$OUTPUT_DIR/"{common,main,state}"/full_schemas/$SCHEMA_NUMBER"
|
||||
sqlite3 "$SQLITE_COMMON_DB" ".schema" > "$OUTPUT_DIR/common/full_schemas/$SCHEMA_NUMBER/full.sql.sqlite"
|
||||
sqlite3 "$SQLITE_COMMON_DB" ".dump --data-only --nosys" >> "$OUTPUT_DIR/common/full_schemas/$SCHEMA_NUMBER/full.sql.sqlite"
|
||||
sqlite3 "$SQLITE_MAIN_DB" ".schema" > "$OUTPUT_DIR/main/full_schemas/$SCHEMA_NUMBER/full.sql.sqlite"
|
||||
sqlite3 "$SQLITE_MAIN_DB" ".dump --data-only --nosys" >> "$OUTPUT_DIR/main/full_schemas/$SCHEMA_NUMBER/full.sql.sqlite"
|
||||
sqlite3 "$SQLITE_STATE_DB" ".schema" > "$OUTPUT_DIR/state/full_schemas/$SCHEMA_NUMBER/full.sql.sqlite"
|
||||
sqlite3 "$SQLITE_STATE_DB" ".dump --data-only --nosys" >> "$OUTPUT_DIR/state/full_schemas/$SCHEMA_NUMBER/full.sql.sqlite"
|
||||
echo "Dumping Postgres schema to '$OUTPUT_DIR/$POSTGRES_FULL_SCHEMA_OUTPUT_FILE'..."
|
||||
pg_dump --format=plain --no-tablespaces --no-acl --no-owner $POSTGRES_DB_NAME | sed -e '/^--/d' -e 's/public\.//g' -e '/^SET /d' -e '/^SELECT /d' > "$OUTPUT_DIR/$POSTGRES_FULL_SCHEMA_OUTPUT_FILE"
|
||||
|
||||
cleanup_pg_schema() {
|
||||
# Cleanup as follows:
|
||||
# - Remove empty lines. pg_dump likes to output a lot of these.
|
||||
# - Remove comment-only lines. pg_dump also likes to output a lot of these to visually
|
||||
# separate tables etc.
|
||||
# - Remove "public." prefix --- the schema name.
|
||||
# - Remove "SET" commands. Last time I ran this, the output commands were
|
||||
# SET statement_timeout = 0;
|
||||
# SET lock_timeout = 0;
|
||||
# SET idle_in_transaction_session_timeout = 0;
|
||||
# SET client_encoding = 'UTF8';
|
||||
# SET standard_conforming_strings = on;
|
||||
# SET check_function_bodies = false;
|
||||
# SET xmloption = content;
|
||||
# SET client_min_messages = warning;
|
||||
# SET row_security = off;
|
||||
# SET default_table_access_method = heap;
|
||||
# - Very carefully remove specific SELECT statements. We CANNOT blanket remove all
|
||||
# SELECT statements because some of those have side-effects which we do want in the
|
||||
# schema. Last time I ran this, the only SELECTS were
|
||||
# SELECT pg_catalog.set_config('search_path', '', false);
|
||||
# and
|
||||
# SELECT pg_catalog.setval(text, bigint, bool);
|
||||
# We do want to remove the former, but the latter is important. If the last argument
|
||||
# is `true` or omitted, this marks the given integer as having been consumed and
|
||||
# will NOT appear as the nextval.
|
||||
sed -e '/^$/d' \
|
||||
-e '/^--/d' \
|
||||
-e 's/public\.//g' \
|
||||
-e '/^SET /d' \
|
||||
-e '/^SELECT pg_catalog.set_config/d'
|
||||
}
|
||||
|
||||
echo "Dumping Postgres schema..."
|
||||
|
||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner "$POSTGRES_COMMON_DB_NAME" | cleanup_pg_schema > "$OUTPUT_DIR/common/full_schemas/$SCHEMA_NUMBER/full.sql.postgres"
|
||||
pg_dump --format=plain --data-only --inserts --no-tablespaces --no-acl --no-owner "$POSTGRES_COMMON_DB_NAME" | cleanup_pg_schema >> "$OUTPUT_DIR/common/full_schemas/$SCHEMA_NUMBER/full.sql.postgres"
|
||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner "$POSTGRES_MAIN_DB_NAME" | cleanup_pg_schema > "$OUTPUT_DIR/main/full_schemas/$SCHEMA_NUMBER/full.sql.postgres"
|
||||
pg_dump --format=plain --data-only --inserts --no-tablespaces --no-acl --no-owner "$POSTGRES_MAIN_DB_NAME" | cleanup_pg_schema >> "$OUTPUT_DIR/main/full_schemas/$SCHEMA_NUMBER/full.sql.postgres"
|
||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner "$POSTGRES_STATE_DB_NAME" | cleanup_pg_schema > "$OUTPUT_DIR/state/full_schemas/$SCHEMA_NUMBER/full.sql.postgres"
|
||||
pg_dump --format=plain --data-only --inserts --no-tablespaces --no-acl --no-owner "$POSTGRES_STATE_DB_NAME" | cleanup_pg_schema >> "$OUTPUT_DIR/state/full_schemas/$SCHEMA_NUMBER/full.sql.postgres"
|
||||
echo "Cleaning up temporary Postgres database..."
|
||||
dropdb $POSTGRES_DB_NAME
|
||||
|
||||
echo "Done! Files dumped to: $OUTPUT_DIR"
|
||||
|
||||
@@ -29,7 +29,7 @@ class SynapsePlugin(Plugin):
|
||||
self, fullname: str
|
||||
) -> Optional[Callable[[MethodSigContext], CallableType]]:
|
||||
if fullname.startswith(
|
||||
"synapse.util.caches.descriptors.CachedFunction.__call__"
|
||||
"synapse.util.caches.descriptors._CachedFunction.__call__"
|
||||
) or fullname.startswith(
|
||||
"synapse.util.caches.descriptors._LruCachedFunction.__call__"
|
||||
):
|
||||
@@ -38,7 +38,7 @@ class SynapsePlugin(Plugin):
|
||||
|
||||
|
||||
def cached_function_method_signature(ctx: MethodSigContext) -> CallableType:
|
||||
"""Fixes the `CachedFunction.__call__` signature to be correct.
|
||||
"""Fixes the `_CachedFunction.__call__` signature to be correct.
|
||||
|
||||
It already has *almost* the correct signature, except:
|
||||
|
||||
|
||||
@@ -18,12 +18,10 @@
|
||||
"""
|
||||
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
import urllib.request
|
||||
from os import path
|
||||
from tempfile import TemporaryDirectory
|
||||
@@ -73,21 +71,18 @@ def cli() -> None:
|
||||
|
||||
./scripts-dev/release.py tag
|
||||
|
||||
# wait for assets to build, either manually or with:
|
||||
./scripts-dev/release.py wait-for-actions
|
||||
# ... wait for assets to build ...
|
||||
|
||||
./scripts-dev/release.py publish
|
||||
|
||||
./scripts-dev/release.py upload
|
||||
|
||||
# Optional: generate some nice links for the announcement
|
||||
|
||||
./scripts-dev/release.py merge-back
|
||||
|
||||
# Optional: generate some nice links for the announcement
|
||||
./scripts-dev/release.py announce
|
||||
|
||||
Alternatively, `./scripts-dev/release.py full` will do all the above
|
||||
as well as guiding you through the manual steps.
|
||||
|
||||
If the env var GH_TOKEN (or GITHUB_TOKEN) is set, or passed into the
|
||||
`tag`/`publish` command, then a new draft release will be created/published.
|
||||
"""
|
||||
@@ -95,10 +90,6 @@ def cli() -> None:
|
||||
|
||||
@cli.command()
|
||||
def prepare() -> None:
|
||||
_prepare()
|
||||
|
||||
|
||||
def _prepare() -> None:
|
||||
"""Do the initial stages of creating a release, including creating release
|
||||
branch, updating changelog and pushing to GitHub.
|
||||
"""
|
||||
@@ -293,10 +284,6 @@ def _prepare() -> None:
|
||||
@cli.command()
|
||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"])
|
||||
def tag(gh_token: Optional[str]) -> None:
|
||||
_tag(gh_token)
|
||||
|
||||
|
||||
def _tag(gh_token: Optional[str]) -> None:
|
||||
"""Tags the release and generates a draft GitHub release"""
|
||||
|
||||
# Make sure we're in a git repo.
|
||||
@@ -387,10 +374,6 @@ def _tag(gh_token: Optional[str]) -> None:
|
||||
@cli.command()
|
||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=True)
|
||||
def publish(gh_token: str) -> None:
|
||||
_publish(gh_token)
|
||||
|
||||
|
||||
def _publish(gh_token: str) -> None:
|
||||
"""Publish release on GitHub."""
|
||||
|
||||
# Make sure we're in a git repo.
|
||||
@@ -427,12 +410,7 @@ def _publish(gh_token: str) -> None:
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=False)
|
||||
def upload(gh_token: Optional[str]) -> None:
|
||||
_upload(gh_token)
|
||||
|
||||
|
||||
def _upload(gh_token: Optional[str]) -> None:
|
||||
def upload() -> None:
|
||||
"""Upload release to pypi."""
|
||||
|
||||
current_version = get_package_version()
|
||||
@@ -445,40 +423,18 @@ def _upload(gh_token: Optional[str]) -> None:
|
||||
click.echo("Tag {tag_name} (tag.commit) is not currently checked out!")
|
||||
click.get_current_context().abort()
|
||||
|
||||
# Query all the assets corresponding to this release.
|
||||
gh = Github(gh_token)
|
||||
gh_repo = gh.get_repo("matrix-org/synapse")
|
||||
gh_release = gh_repo.get_release(tag_name)
|
||||
|
||||
all_assets = set(gh_release.get_assets())
|
||||
|
||||
# Only accept the wheels and sdist.
|
||||
# Notably: we don't care about debs.tar.xz.
|
||||
asset_names_and_urls = sorted(
|
||||
(asset.name, asset.browser_download_url)
|
||||
for asset in all_assets
|
||||
if asset.name.endswith((".whl", ".tar.gz"))
|
||||
)
|
||||
|
||||
# Print out what we've determined.
|
||||
print("Found relevant assets:")
|
||||
for asset_name, _ in asset_names_and_urls:
|
||||
print(f" - {asset_name}")
|
||||
|
||||
ignored_asset_names = sorted(
|
||||
{asset.name for asset in all_assets}
|
||||
- {asset_name for asset_name, _ in asset_names_and_urls}
|
||||
)
|
||||
print("\nIgnoring irrelevant assets:")
|
||||
for asset_name in ignored_asset_names:
|
||||
print(f" - {asset_name}")
|
||||
pypi_asset_names = [
|
||||
f"matrix_synapse-{current_version}-py3-none-any.whl",
|
||||
f"matrix-synapse-{current_version}.tar.gz",
|
||||
]
|
||||
|
||||
with TemporaryDirectory(prefix=f"synapse_upload_{tag_name}_") as tmpdir:
|
||||
for name, asset_download_url in asset_names_and_urls:
|
||||
for name in pypi_asset_names:
|
||||
filename = path.join(tmpdir, name)
|
||||
url = f"https://github.com/matrix-org/synapse/releases/download/{tag_name}/{name}"
|
||||
|
||||
click.echo(f"Downloading {name} into {filename}")
|
||||
urllib.request.urlretrieve(asset_download_url, filename=filename)
|
||||
urllib.request.urlretrieve(url, filename=filename)
|
||||
|
||||
if click.confirm("Upload to PyPI?", default=True):
|
||||
subprocess.run("twine upload *", shell=True, cwd=tmpdir)
|
||||
@@ -523,75 +479,8 @@ def _merge_into(repo: Repo, source: str, target: str) -> None:
|
||||
repo.remote().push()
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=False)
|
||||
def wait_for_actions(gh_token: Optional[str]) -> None:
|
||||
_wait_for_actions(gh_token)
|
||||
|
||||
|
||||
def _wait_for_actions(gh_token: Optional[str]) -> None:
|
||||
# Find out the version and tag name.
|
||||
current_version = get_package_version()
|
||||
tag_name = f"v{current_version}"
|
||||
|
||||
# Authentication is optional on this endpoint,
|
||||
# but use a token if we have one to reduce the chance of being rate-limited.
|
||||
url = f"https://api.github.com/repos/matrix-org/synapse/actions/runs?branch={tag_name}"
|
||||
headers = {"Accept": "application/vnd.github+json"}
|
||||
if gh_token is not None:
|
||||
headers["authorization"] = f"token {gh_token}"
|
||||
req = urllib.request.Request(url, headers=headers)
|
||||
|
||||
time.sleep(10 * 60)
|
||||
while True:
|
||||
time.sleep(5 * 60)
|
||||
response = urllib.request.urlopen(req)
|
||||
resp = json.loads(response.read())
|
||||
|
||||
if len(resp["workflow_runs"]) == 0:
|
||||
continue
|
||||
|
||||
if all(
|
||||
workflow["status"] != "in_progress" for workflow in resp["workflow_runs"]
|
||||
):
|
||||
success = (
|
||||
workflow["status"] == "completed" for workflow in resp["workflow_runs"]
|
||||
)
|
||||
if success:
|
||||
_notify("Workflows successful. You can now continue the release.")
|
||||
else:
|
||||
_notify("Workflows failed.")
|
||||
click.confirm("Continue anyway?", abort=True)
|
||||
|
||||
break
|
||||
|
||||
|
||||
def _notify(message: str) -> None:
|
||||
# Send a bell character. Most terminals will play a sound or show a notification
|
||||
# for this.
|
||||
click.echo(f"\a{message}")
|
||||
|
||||
# Try and run notify-send, but don't raise an Exception if this fails
|
||||
# (This is best-effort)
|
||||
# TODO Support other platforms?
|
||||
subprocess.run(
|
||||
[
|
||||
"notify-send",
|
||||
"--app-name",
|
||||
"Synapse Release Script",
|
||||
"--expire-time",
|
||||
"3600000",
|
||||
message,
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@cli.command()
|
||||
def merge_back() -> None:
|
||||
_merge_back()
|
||||
|
||||
|
||||
def _merge_back() -> None:
|
||||
"""Merge the release branch back into the appropriate branches.
|
||||
All branches will be automatically pulled from the remote and the results
|
||||
will be pushed to the remote."""
|
||||
@@ -630,10 +519,6 @@ def _merge_back() -> None:
|
||||
|
||||
@cli.command()
|
||||
def announce() -> None:
|
||||
_announce()
|
||||
|
||||
|
||||
def _announce() -> None:
|
||||
"""Generate markdown to announce the release."""
|
||||
|
||||
current_version = get_package_version()
|
||||
@@ -663,56 +548,10 @@ Announce the release in
|
||||
- #homeowners:matrix.org (Synapse Announcements), bumping the version in the topic
|
||||
- #synapse:matrix.org (Synapse Admins), bumping the version in the topic
|
||||
- #synapse-dev:matrix.org
|
||||
- #synapse-package-maintainers:matrix.org
|
||||
|
||||
Ask the designated people to do the blog and tweets."""
|
||||
- #synapse-package-maintainers:matrix.org"""
|
||||
)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=True)
|
||||
def full(gh_token: str) -> None:
|
||||
click.echo("1. If this is a security release, read the security wiki page.")
|
||||
click.echo("2. Check for any release blockers before proceeding.")
|
||||
click.echo(" https://github.com/matrix-org/synapse/labels/X-Release-Blocker")
|
||||
|
||||
click.confirm("Ready?", abort=True)
|
||||
|
||||
click.echo("\n*** prepare ***")
|
||||
_prepare()
|
||||
|
||||
click.echo("Deploy to matrix.org and ensure that it hasn't fallen over.")
|
||||
click.echo("Remember to silence the alerts to prevent alert spam.")
|
||||
click.confirm("Deployed?", abort=True)
|
||||
|
||||
click.echo("\n*** tag ***")
|
||||
_tag(gh_token)
|
||||
|
||||
click.echo("\n*** wait for actions ***")
|
||||
_wait_for_actions(gh_token)
|
||||
|
||||
click.echo("\n*** publish ***")
|
||||
_publish(gh_token)
|
||||
|
||||
click.echo("\n*** upload ***")
|
||||
_upload(gh_token)
|
||||
|
||||
click.echo("\n*** merge back ***")
|
||||
_merge_back()
|
||||
|
||||
click.echo("\nUpdate the Debian repository")
|
||||
click.confirm("Started updating Debian repository?", abort=True)
|
||||
|
||||
click.echo("\nWait for all release methods to be ready.")
|
||||
# Docker should be ready because it was done by the workflows earlier
|
||||
# PyPI should be ready because we just ran upload().
|
||||
# TODO Automatically poll until the Debs have made it to packages.matrix.org
|
||||
click.confirm("Debs ready?", abort=True)
|
||||
|
||||
click.echo("\n*** announce ***")
|
||||
_announce()
|
||||
|
||||
|
||||
def get_package_version() -> version.Version:
|
||||
version_string = subprocess.check_output(["poetry", "version", "--short"]).decode(
|
||||
"utf-8"
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
def sum_as_string(a: int, b: int) -> str: ...
|
||||
def get_rust_file_digest() -> str: ...
|
||||
@@ -1,46 +0,0 @@
|
||||
from typing import Any, Collection, Dict, Mapping, Optional, Sequence, Set, Tuple, Union
|
||||
|
||||
from synapse.types import JsonDict
|
||||
|
||||
class PushRule:
|
||||
@property
|
||||
def rule_id(self) -> str: ...
|
||||
@property
|
||||
def priority_class(self) -> int: ...
|
||||
@property
|
||||
def conditions(self) -> Sequence[Mapping[str, str]]: ...
|
||||
@property
|
||||
def actions(self) -> Sequence[Union[Mapping[str, Any], str]]: ...
|
||||
@property
|
||||
def default(self) -> bool: ...
|
||||
@property
|
||||
def default_enabled(self) -> bool: ...
|
||||
@staticmethod
|
||||
def from_db(
|
||||
rule_id: str, priority_class: int, conditions: str, actions: str
|
||||
) -> "PushRule": ...
|
||||
|
||||
class PushRules:
|
||||
def __init__(self, rules: Collection[PushRule]): ...
|
||||
def rules(self) -> Collection[PushRule]: ...
|
||||
|
||||
class FilteredPushRules:
|
||||
def __init__(self, push_rules: PushRules, enabled_map: Dict[str, bool]): ...
|
||||
def rules(self) -> Collection[Tuple[PushRule, bool]]: ...
|
||||
|
||||
def get_base_rule_ids() -> Collection[str]: ...
|
||||
|
||||
class PushRuleEvaluator:
|
||||
def __init__(
|
||||
self,
|
||||
flattened_keys: Mapping[str, str],
|
||||
room_member_count: int,
|
||||
sender_power_level: Optional[int],
|
||||
notification_power_levels: Mapping[str, int],
|
||||
): ...
|
||||
def run(
|
||||
self,
|
||||
push_rules: FilteredPushRules,
|
||||
user_id: Optional[str],
|
||||
display_name: Optional[str],
|
||||
) -> Collection[dict]: ...
|
||||
@@ -20,31 +20,31 @@ import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
from synapse.util.rust import check_rust_lib_up_to_date
|
||||
from synapse.util.stringutils import strtobool
|
||||
|
||||
# Check that we're not running on an unsupported Python version.
|
||||
if sys.version_info < (3, 7):
|
||||
print("Synapse requires Python 3.7 or above.")
|
||||
sys.exit(1)
|
||||
|
||||
# Allow using the asyncio reactor via env var.
|
||||
if strtobool(os.environ.get("SYNAPSE_ASYNC_IO_REACTOR", "0")):
|
||||
from incremental import Version
|
||||
if bool(os.environ.get("SYNAPSE_ASYNC_IO_REACTOR", False)):
|
||||
try:
|
||||
from incremental import Version
|
||||
|
||||
import twisted
|
||||
import twisted
|
||||
|
||||
# We need a bugfix that is included in Twisted 21.2.0:
|
||||
# https://twistedmatrix.com/trac/ticket/9787
|
||||
if twisted.version < Version("Twisted", 21, 2, 0):
|
||||
print("Using asyncio reactor requires Twisted>=21.2.0")
|
||||
sys.exit(1)
|
||||
# We need a bugfix that is included in Twisted 21.2.0:
|
||||
# https://twistedmatrix.com/trac/ticket/9787
|
||||
if twisted.version < Version("Twisted", 21, 2, 0):
|
||||
print("Using asyncio reactor requires Twisted>=21.2.0")
|
||||
sys.exit(1)
|
||||
|
||||
import asyncio
|
||||
import asyncio
|
||||
|
||||
from twisted.internet import asyncioreactor
|
||||
from twisted.internet import asyncioreactor
|
||||
|
||||
asyncioreactor.install(asyncio.get_event_loop())
|
||||
asyncioreactor.install(asyncio.get_event_loop())
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Twisted and canonicaljson will fail to import when this file is executed to
|
||||
# get the __version__ during a fresh install. That's OK and subsequent calls to
|
||||
@@ -78,6 +78,3 @@ if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
|
||||
from synapse.util.patch_inline_callbacks import do_patch
|
||||
|
||||
do_patch()
|
||||
|
||||
|
||||
check_rust_lib_up_to_date()
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Copyright 2015, 2016 OpenMarket Ltd
|
||||
# Copyright 2018 New Vector
|
||||
# Copyright 2021-22 The Matrix.org Foundation C.I.C.
|
||||
# Copyright 2021 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -20,22 +20,11 @@ import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
import sys
|
||||
from typing import Any, Callable, Dict, Optional
|
||||
from typing import Callable, Optional
|
||||
|
||||
import requests
|
||||
import yaml
|
||||
|
||||
_CONFLICTING_SHARED_SECRET_OPTS_ERROR = """\
|
||||
Conflicting options 'registration_shared_secret' and 'registration_shared_secret_path'
|
||||
are both defined in config file.
|
||||
"""
|
||||
|
||||
_NO_SHARED_SECRET_OPTS_ERROR = """\
|
||||
No 'registration_shared_secret' or 'registration_shared_secret_path' defined in config.
|
||||
"""
|
||||
|
||||
_DEFAULT_SERVER_URL = "http://localhost:8008"
|
||||
|
||||
|
||||
def request_registration(
|
||||
user: str,
|
||||
@@ -214,104 +203,31 @@ def main() -> None:
|
||||
|
||||
parser.add_argument(
|
||||
"server_url",
|
||||
default="https://localhost:8448",
|
||||
nargs="?",
|
||||
help="URL to use to talk to the homeserver. By default, tries to find a "
|
||||
"suitable URL from the configuration file. Otherwise, defaults to "
|
||||
f"'{_DEFAULT_SERVER_URL}'.",
|
||||
help="URL to use to talk to the homeserver. Defaults to "
|
||||
" 'https://localhost:8448'.",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if "config" in args and args.config:
|
||||
config = yaml.safe_load(args.config)
|
||||
|
||||
if args.shared_secret:
|
||||
secret = args.shared_secret
|
||||
else:
|
||||
# argparse should check that we have either config or shared secret
|
||||
assert config
|
||||
|
||||
secret = config.get("registration_shared_secret")
|
||||
secret_file = config.get("registration_shared_secret_path")
|
||||
if secret_file:
|
||||
if secret:
|
||||
print(_CONFLICTING_SHARED_SECRET_OPTS_ERROR, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
secret = _read_file(secret_file, "registration_shared_secret_path").strip()
|
||||
secret = config.get("registration_shared_secret", None)
|
||||
if not secret:
|
||||
print(_NO_SHARED_SECRET_OPTS_ERROR, file=sys.stderr)
|
||||
print("No 'registration_shared_secret' defined in config.")
|
||||
sys.exit(1)
|
||||
|
||||
if args.server_url:
|
||||
server_url = args.server_url
|
||||
elif config:
|
||||
server_url = _find_client_listener(config)
|
||||
if not server_url:
|
||||
server_url = _DEFAULT_SERVER_URL
|
||||
print(
|
||||
"Unable to find a suitable HTTP listener in the configuration file. "
|
||||
f"Trying {server_url} as a last resort.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
else:
|
||||
server_url = _DEFAULT_SERVER_URL
|
||||
print(
|
||||
f"No server url or configuration file given. Defaulting to {server_url}.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
secret = args.shared_secret
|
||||
|
||||
admin = None
|
||||
if args.admin or args.no_admin:
|
||||
admin = args.admin
|
||||
|
||||
register_new_user(
|
||||
args.user, args.password, server_url, secret, admin, args.user_type
|
||||
args.user, args.password, args.server_url, secret, admin, args.user_type
|
||||
)
|
||||
|
||||
|
||||
def _read_file(file_path: Any, config_path: str) -> str:
|
||||
"""Check the given file exists, and read it into a string
|
||||
|
||||
If it does not, exit with an error indicating the problem
|
||||
|
||||
Args:
|
||||
file_path: the file to be read
|
||||
config_path: where in the configuration file_path came from, so that a useful
|
||||
error can be emitted if it does not exist.
|
||||
Returns:
|
||||
content of the file.
|
||||
"""
|
||||
if not isinstance(file_path, str):
|
||||
print(f"{config_path} setting is not a string", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
with open(file_path) as file_stream:
|
||||
return file_stream.read()
|
||||
except OSError as e:
|
||||
print(f"Error accessing file {file_path}: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _find_client_listener(config: Dict[str, Any]) -> Optional[str]:
|
||||
# try to find a listener in the config. Returns a host:port pair
|
||||
for listener in config.get("listeners", []):
|
||||
if listener.get("type") != "http" or listener.get("tls", False):
|
||||
continue
|
||||
|
||||
if not any(
|
||||
name == "client"
|
||||
for resource in listener.get("resources", [])
|
||||
for name in resource.get("names", [])
|
||||
):
|
||||
continue
|
||||
|
||||
# TODO: consider bind_addresses
|
||||
return f"http://localhost:{listener['port']}"
|
||||
|
||||
# no suitable listeners?
|
||||
return None
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user