Compare commits
5 Commits
shay/add_c
...
erikj/tree
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dcd574b89c | ||
|
|
c03785e121 | ||
|
|
b9cdf3d85e | ||
|
|
18ac015ecd | ||
|
|
4874d6320a |
@@ -50,16 +50,7 @@ def cpython(wheel_file: str, name: str, version: Version, tag: Tag) -> str:
|
||||
|
||||
check_is_abi3_compatible(wheel_file)
|
||||
|
||||
# HACK: it seems that some older versions of pip will consider a wheel marked
|
||||
# as macosx_11_0 as incompatible with Big Sur. I haven't done the full archaeology
|
||||
# here; there are some clues in
|
||||
# https://github.com/pantsbuild/pants/pull/12857
|
||||
# https://github.com/pypa/pip/issues/9138
|
||||
# https://github.com/pypa/packaging/pull/319
|
||||
# Empirically this seems to work, note that macOS 11 and 10.16 are the same,
|
||||
# both versions are valid for backwards compatibility.
|
||||
platform = tag.platform.replace("macosx_11_0", "macosx_10_16")
|
||||
abi3_tag = Tag(tag.interpreter, "abi3", platform)
|
||||
abi3_tag = Tag(tag.interpreter, "abi3", tag.platform)
|
||||
|
||||
dirname = os.path.dirname(wheel_file)
|
||||
new_wheel_file = os.path.join(
|
||||
|
||||
@@ -109,26 +109,11 @@ sytest_tests = [
|
||||
"postgres": "multi-postgres",
|
||||
"workers": "workers",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "focal",
|
||||
"postgres": "multi-postgres",
|
||||
"workers": "workers",
|
||||
"reactor": "asyncio",
|
||||
},
|
||||
]
|
||||
|
||||
if not IS_PR:
|
||||
sytest_tests.extend(
|
||||
[
|
||||
{
|
||||
"sytest-tag": "focal",
|
||||
"reactor": "asyncio",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "focal",
|
||||
"postgres": "postgres",
|
||||
"reactor": "asyncio",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "testing",
|
||||
"postgres": "postgres",
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
import sys
|
||||
|
||||
if sys.version_info < (3, 11):
|
||||
raise RuntimeError("Requires at least Python 3.11, to import tomllib")
|
||||
|
||||
import tomllib
|
||||
|
||||
with open("poetry.lock", "rb") as f:
|
||||
lockfile = tomllib.load(f)
|
||||
|
||||
try:
|
||||
lock_version = lockfile["metadata"]["lock-version"]
|
||||
assert lock_version == "2.0"
|
||||
except Exception:
|
||||
print(
|
||||
"""\
|
||||
Lockfile is not version 2.0. You probably need to upgrade poetry on your local box
|
||||
and re-run `poetry lock --no-update`. See the Poetry cheat sheet at
|
||||
https://matrix-org.github.io/synapse/develop/development/dependencies.html
|
||||
"""
|
||||
)
|
||||
raise
|
||||
@@ -31,6 +31,34 @@ sed -i \
|
||||
-e '/systemd/d' \
|
||||
pyproject.toml
|
||||
|
||||
# Use poetry to do the installation. This ensures that the versions are all mutually
|
||||
# compatible (as far the package metadata declares, anyway); pip's package resolver
|
||||
# is more lax.
|
||||
#
|
||||
# Rather than `poetry install --no-dev`, we drop all dev dependencies from the
|
||||
# toml file. This means we don't have to ensure compatibility between old deps and
|
||||
# dev tools.
|
||||
|
||||
pip install toml wheel
|
||||
|
||||
REMOVE_DEV_DEPENDENCIES="
|
||||
import toml
|
||||
with open('pyproject.toml', 'r') as f:
|
||||
data = toml.loads(f.read())
|
||||
|
||||
del data['tool']['poetry']['dev-dependencies']
|
||||
|
||||
with open('pyproject.toml', 'w') as f:
|
||||
toml.dump(data, f)
|
||||
"
|
||||
python3 -c "$REMOVE_DEV_DEPENDENCIES"
|
||||
|
||||
pip install poetry==1.2.0
|
||||
poetry lock
|
||||
|
||||
echo "::group::Patched pyproject.toml"
|
||||
cat pyproject.toml
|
||||
echo "::endgroup::"
|
||||
echo "::group::Lockfile after patch"
|
||||
cat poetry.lock
|
||||
echo "::endgroup::"
|
||||
|
||||
@@ -9,9 +9,19 @@ set -eu
|
||||
alias block='{ set +x; } 2>/dev/null; func() { echo "::group::$*"; set -x; }; func'
|
||||
alias endblock='{ set +x; } 2>/dev/null; func() { echo "::endgroup::"; set -x; }; func'
|
||||
|
||||
block Set Go Version
|
||||
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
|
||||
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
|
||||
|
||||
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
|
||||
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
|
||||
# Add the Go path to the PATH: We need this so we can call gotestfmt
|
||||
echo "~/go/bin" >> $GITHUB_PATH
|
||||
endblock
|
||||
|
||||
block Install Complement Dependencies
|
||||
sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev
|
||||
go install -v github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
go get -v github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
endblock
|
||||
|
||||
block Install custom gotestfmt template
|
||||
|
||||
@@ -23,9 +23,8 @@ poetry run python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-dat
|
||||
--output-directory /tmp/export_data
|
||||
|
||||
# Test that the output directory exists and contains the rooms directory
|
||||
dir_r="/tmp/export_data/rooms"
|
||||
dir_u="/tmp/export_data/user_data"
|
||||
if [ -d "$dir_r" ] && [ -d "$dir_u" ]; then
|
||||
dir="/tmp/export_data/rooms"
|
||||
if [ -d "$dir" ]; then
|
||||
echo "Command successful, this test passes"
|
||||
else
|
||||
echo "No output directories found, the command fails against a sqlite database."
|
||||
@@ -44,9 +43,8 @@ poetry run python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-d
|
||||
--output-directory /tmp/export_data2
|
||||
|
||||
# Test that the output directory exists and contains the rooms directory
|
||||
dir_r2="/tmp/export_data2/rooms"
|
||||
dir_u2="/tmp/export_data2/user_data"
|
||||
if [ -d "$dir_r2" ] && [ -d "$dir_u2" ]; then
|
||||
dir2="/tmp/export_data2/rooms"
|
||||
if [ -d "$dir2" ]; then
|
||||
echo "Command successful, this test passes"
|
||||
else
|
||||
echo "No output directories found, the command fails against a postgres database."
|
||||
|
||||
18
.flake8
Normal file
18
.flake8
Normal file
@@ -0,0 +1,18 @@
|
||||
# TODO: incorporate this into pyproject.toml if flake8 supports it in the future.
|
||||
# See https://github.com/PyCQA/flake8/issues/234
|
||||
[flake8]
|
||||
# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes
|
||||
# for error codes. The ones we ignore are:
|
||||
# W503: line break before binary operator
|
||||
# W504: line break after binary operator
|
||||
# E203: whitespace before ':' (which is contrary to pep8?)
|
||||
# E731: do not assign a lambda expression, use a def
|
||||
# E501: Line too long (black enforces this for us)
|
||||
#
|
||||
# flake8-bugbear runs extra checks. Its error codes are described at
|
||||
# https://github.com/PyCQA/flake8-bugbear#list-of-warnings
|
||||
# B019: Use of functools.lru_cache or functools.cache on methods can lead to memory leaks
|
||||
# B023: Functions defined inside a loop must not use variables redefined in the loop
|
||||
# B024: Abstract base class with no abstract method.
|
||||
|
||||
ignore=W503,W504,E203,E731,E501,B019,B023,B024
|
||||
@@ -21,8 +21,4 @@ aff1eb7c671b0a3813407321d2702ec46c71fa56
|
||||
0a00b7ff14890987f09112a2ae696c61001e6cf1
|
||||
|
||||
# Convert tests/rest/admin/test_room.py to unix file endings (#7953).
|
||||
c4268e3da64f1abb5b31deaeb5769adb6510c0a7
|
||||
|
||||
# Update black to 23.1.0 (#15103)
|
||||
9bb2eac71962970d02842bca441f4bcdbbf93a11
|
||||
|
||||
c4268e3da64f1abb5b31deaeb5769adb6510c0a7
|
||||
2
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
2
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
@@ -129,7 +129,7 @@ body:
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: |
|
||||
Please copy and paste any relevant log output as text (not images), ideally at INFO or DEBUG log level.
|
||||
Please copy and paste any relevant log output, ideally at INFO or DEBUG log level.
|
||||
This will be automatically formatted into code, so there is no need for backticks (`\``).
|
||||
|
||||
Please be careful to remove any personal or private data.
|
||||
|
||||
46
.github/workflows/dependabot_changelog.yml
vendored
Normal file
46
.github/workflows/dependabot_changelog.yml
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
name: Write changelog for dependabot PR
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- reopened # For debugging!
|
||||
|
||||
permissions:
|
||||
# Needed to be able to push the commit. See
|
||||
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#enable-auto-merge-on-a-pull-request
|
||||
# for a similar example
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
add-changelog:
|
||||
runs-on: 'ubuntu-latest'
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
- name: Write, commit and push changelog
|
||||
run: |
|
||||
echo "${{ github.event.pull_request.title }}." > "changelog.d/${{ github.event.pull_request.number }}".misc
|
||||
git add changelog.d
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git config user.name "GitHub Actions"
|
||||
git commit -m "Changelog"
|
||||
git push
|
||||
shell: bash
|
||||
# The `git push` above does not trigger CI on the dependabot PR.
|
||||
#
|
||||
# By default, workflows can't trigger other workflows when they're just using the
|
||||
# default `GITHUB_TOKEN` access token. (This is intended to stop you from writing
|
||||
# recursive workflow loops by accident, because that'll get very expensive very
|
||||
# quickly.) Instead, you have to manually call out to another workflow, or else
|
||||
# make your changes (i.e. the `git push` above) using a personal access token.
|
||||
# See
|
||||
# https://docs.github.com/en/actions/using-workflows/triggering-a-workflow#triggering-a-workflow-from-a-workflow
|
||||
#
|
||||
# I have tried and failed to find a way to trigger CI on the "merge ref" of the PR.
|
||||
# See git commit history for previous attempts. If anyone desperately wants to try
|
||||
# again in the future, make a matrix-bot account and use its access token to git push.
|
||||
|
||||
# THIS WORKFLOW HAS WRITE PERMISSIONS---do not add other jobs here unless they
|
||||
# are sufficiently locked down to dependabot only as above.
|
||||
14
.github/workflows/docker.yml
vendored
14
.github/workflows/docker.yml
vendored
@@ -10,7 +10,6 @@ on:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -35,20 +34,11 @@ jobs:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Calculate docker image tag
|
||||
id: set-tag
|
||||
uses: docker/metadata-action@master
|
||||
with:
|
||||
images: |
|
||||
docker.io/matrixdotorg/synapse
|
||||
ghcr.io/matrix-org/synapse
|
||||
images: matrixdotorg/synapse
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
@@ -58,7 +48,7 @@ jobs:
|
||||
type=pep440,pattern={{raw}}
|
||||
|
||||
- name: Build and push all platforms
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
push: true
|
||||
labels: "gitsha1=${{ github.sha }}"
|
||||
|
||||
4
.github/workflows/docs-pr-netlify.yaml
vendored
4
.github/workflows/docs-pr-netlify.yaml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
||||
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
||||
- name: 📥 Download artifact
|
||||
uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615 # v2.27.0
|
||||
uses: dawidd6/action-download-artifact@e6e25ac3a2b93187502a8be1ef9e9603afc34925 # v2.24.2
|
||||
with:
|
||||
workflow: docs-pr.yaml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
@@ -22,7 +22,7 @@ jobs:
|
||||
path: book
|
||||
|
||||
- name: 📤 Deploy to Netlify
|
||||
uses: matrix-org/netlify-pr-preview@v2
|
||||
uses: matrix-org/netlify-pr-preview@v1
|
||||
with:
|
||||
path: book
|
||||
owner: ${{ github.event.workflow_run.head_repository.owner.login }}
|
||||
|
||||
28
.github/workflows/docs-pr.yaml
vendored
28
.github/workflows/docs-pr.yaml
vendored
@@ -4,15 +4,13 @@ on:
|
||||
pull_request:
|
||||
paths:
|
||||
- docs/**
|
||||
- book.toml
|
||||
- .github/workflows/docs-pr.yaml
|
||||
|
||||
jobs:
|
||||
pages:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
@@ -34,27 +32,3 @@ jobs:
|
||||
path: book
|
||||
# We'll only use this in a workflow_run, then we're done with it
|
||||
retention-days: 1
|
||||
|
||||
link-check:
|
||||
name: Check links in documentation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Setup htmltest
|
||||
run: |
|
||||
wget https://github.com/wjdp/htmltest/releases/download/v0.17.0/htmltest_0.17.0_linux_amd64.tar.gz
|
||||
echo '775c597ee74899d6002cd2d93076f897f4ba68686bceabe2e5d72e84c57bc0fb htmltest_0.17.0_linux_amd64.tar.gz' | sha256sum -c
|
||||
tar zxf htmltest_0.17.0_linux_amd64.tar.gz
|
||||
|
||||
- name: Test links with htmltest
|
||||
# Build the book with `./` as the site URL (to make checks on 404.html possible)
|
||||
# Then run htmltest (without checking external links since that involves the network and is slow).
|
||||
run: |
|
||||
MDBOOK_OUTPUT__HTML__SITE_URL="./" mdbook build
|
||||
./htmltest book --skip-external
|
||||
|
||||
79
.github/workflows/docs.yaml
vendored
79
.github/workflows/docs.yaml
vendored
@@ -13,10 +13,25 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
pre:
|
||||
name: Calculate variables for GitHub Pages deployment
|
||||
pages:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
||||
# as the default. Let's opt for the welcome page instead.
|
||||
run: |
|
||||
mdbook build
|
||||
cp book/welcome_and_overview.html book/index.html
|
||||
|
||||
# Figure out the target directory.
|
||||
#
|
||||
# The target directory depends on the name of the branch
|
||||
@@ -40,65 +55,11 @@ jobs:
|
||||
|
||||
# finally, set the 'branch-version' var.
|
||||
echo "branch-version=$branch" >> "$GITHUB_OUTPUT"
|
||||
outputs:
|
||||
branch-version: ${{ steps.vars.outputs.branch-version }}
|
||||
|
||||
################################################################################
|
||||
pages-docs:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- pre
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
||||
# as the default. Let's opt for the welcome page instead.
|
||||
run: |
|
||||
mdbook build
|
||||
cp book/welcome_and_overview.html book/index.html
|
||||
|
||||
|
||||
# Deploy to the target directory.
|
||||
- name: Deploy to gh pages
|
||||
uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
|
||||
uses: peaceiris/actions-gh-pages@de7ea6f8efb354206b205ef54722213d99067935 # v3.9.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./book
|
||||
destination_dir: ./${{ needs.pre.outputs.branch-version }}
|
||||
|
||||
################################################################################
|
||||
pages-devdocs:
|
||||
name: GitHub Pages (developer docs)
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- pre
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: "Set up Sphinx"
|
||||
uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "1.3.2"
|
||||
groups: "dev-docs"
|
||||
extras: ""
|
||||
|
||||
- name: Build the documentation
|
||||
run: |
|
||||
cd dev-docs
|
||||
poetry run make html
|
||||
|
||||
# Deploy to the target directory.
|
||||
- name: Deploy to gh pages
|
||||
uses: peaceiris/actions-gh-pages@373f7f263a76c20808c831209c920827a82a2847 # v3.9.3
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./dev-docs/_build/html
|
||||
destination_dir: ./dev-docs/${{ needs.pre.outputs.branch-version }}
|
||||
destination_dir: ./${{ steps.vars.outputs.branch-version }}
|
||||
|
||||
41
.github/workflows/latest_deps.yml
vendored
41
.github/workflows/latest_deps.yml
vendored
@@ -22,26 +22,14 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check_repo:
|
||||
# Prevent this workflow from running on any fork of Synapse other than matrix-org/synapse, as it is
|
||||
# only useful to the Synapse core team.
|
||||
# All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest
|
||||
# of the workflow will be skipped as well.
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }}
|
||||
steps:
|
||||
- id: check_condition
|
||||
run: echo "should_run_workflow=${{ github.repository == 'matrix-org/synapse' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
mypy:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||
@@ -49,7 +37,7 @@ jobs:
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "1.3.2"
|
||||
poetry-version: "1.2.0"
|
||||
extras: "all"
|
||||
# Dump installed versions for debugging.
|
||||
- run: poetry run pip list > before.txt
|
||||
@@ -61,8 +49,6 @@ jobs:
|
||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||
- run: poetry run mypy
|
||||
trial:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -75,7 +61,9 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
@@ -121,8 +109,6 @@ jobs:
|
||||
|
||||
|
||||
sytest:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:testing
|
||||
@@ -148,7 +134,9 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Ensure sytest runs `pip install`
|
||||
@@ -174,8 +162,7 @@ jobs:
|
||||
|
||||
|
||||
complement:
|
||||
needs: check_repo
|
||||
if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'"
|
||||
if: "${{ !failure() && !cancelled() }}"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
@@ -197,8 +184,6 @@ jobs:
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- uses: actions/setup-go@v4
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
@@ -211,7 +196,7 @@ jobs:
|
||||
# Open an issue if the build fails, so we know about it.
|
||||
# Only do this if we're not experimenting with this action in a PR.
|
||||
open-issue:
|
||||
if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request' && needs.check_repo.outputs.should_run_workflow == 'true'"
|
||||
if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request'"
|
||||
needs:
|
||||
# TODO: should mypy be included here? It feels more brittle than the others.
|
||||
- mypy
|
||||
@@ -223,7 +208,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1
|
||||
- uses: JasonEtco/create-an-issue@77399b6110ef82b94c1c9f9f615acf9e604f7f56 # v2.5.0, 2020-12-06
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
|
||||
24
.github/workflows/poetry_lockfile.yaml
vendored
24
.github/workflows/poetry_lockfile.yaml
vendored
@@ -1,24 +0,0 @@
|
||||
on:
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
paths:
|
||||
- poetry.lock
|
||||
pull_request:
|
||||
paths:
|
||||
- poetry.lock
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check-sdists:
|
||||
name: "Check locked dependencies have sdists"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- run: pip install tomli
|
||||
- run: ./scripts-dev/check_locked_deps_have_sdists.py
|
||||
2
.github/workflows/push_complement_image.yml
vendored
2
.github/workflows/push_complement_image.yml
vendored
@@ -48,7 +48,7 @@ jobs:
|
||||
with:
|
||||
ref: master
|
||||
- name: Login to registry
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
|
||||
9
.github/workflows/release-artifacts.yml
vendored
9
.github/workflows/release-artifacts.yml
vendored
@@ -4,15 +4,13 @@ name: Build release artifacts
|
||||
|
||||
on:
|
||||
# we build on PRs and develop to (hopefully) get early warning
|
||||
# of things breaking (but only build one set of debs). PRs skip
|
||||
# building wheels on macOS & ARM.
|
||||
# of things breaking (but only build one set of debs)
|
||||
pull_request:
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
|
||||
# we do the full build on tags.
|
||||
tags: ["v*"]
|
||||
merge_group:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
@@ -34,7 +32,6 @@ jobs:
|
||||
- id: set-distros
|
||||
run: |
|
||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||
# NOTE: inside the actual Dockerfile-dhvirtualenv, the image name is expanded into its full image path
|
||||
dists='["debian:sid"]'
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
|
||||
@@ -130,7 +127,7 @@ jobs:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install cibuildwheel
|
||||
run: python -m pip install cibuildwheel==2.9.0
|
||||
run: python -m pip install cibuildwheel==2.9.0 poetry==1.2.0
|
||||
|
||||
- name: Set up QEMU to emulate aarch64
|
||||
if: matrix.arch == 'aarch64'
|
||||
@@ -151,7 +148,7 @@ jobs:
|
||||
env:
|
||||
# Skip testing for platforms which various libraries don't have wheels
|
||||
# for, and so need extra build deps.
|
||||
CIBW_TEST_SKIP: pp3*-* *i686* *musl*
|
||||
CIBW_TEST_SKIP: pp39-* *i686* *musl* pp37-macosx*
|
||||
# Fix Rust OOM errors on emulated aarch64: https://github.com/rust-lang/cargo/issues/10583
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
||||
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
|
||||
|
||||
218
.github/workflows/tests.yml
vendored
218
.github/workflows/tests.yml
vendored
@@ -4,7 +4,6 @@ on:
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
pull_request:
|
||||
merge_group:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
@@ -34,13 +33,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.60.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "1.3.2"
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
extras: "all"
|
||||
- run: poetry run scripts-dev/generate_sample_config.sh --check
|
||||
- run: poetry run scripts-dev/config-lint.sh
|
||||
@@ -55,70 +52,10 @@ jobs:
|
||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
||||
- run: scripts-dev/check_schema_delta.py --force-colors
|
||||
|
||||
check-lockfile:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: .ci/scripts/check_lockfile.py
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
install-project: "false"
|
||||
|
||||
- name: Import order (isort)
|
||||
run: poetry run isort --check --diff .
|
||||
|
||||
- name: Code style (black)
|
||||
run: poetry run black --check --diff .
|
||||
|
||||
- name: Semantic checks (ruff)
|
||||
# --quiet suppresses the update check.
|
||||
run: poetry run ruff --quiet .
|
||||
|
||||
lint-mypy:
|
||||
runs-on: ubuntu-latest
|
||||
name: Typechecking
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.60.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
# We want to make use of type hints in optional dependencies too.
|
||||
extras: all
|
||||
# We have seen odd mypy failures that were resolved when we started
|
||||
# installing the project again:
|
||||
# https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775
|
||||
# To make CI green, err towards caution and install the project.
|
||||
install-project: "true"
|
||||
|
||||
# Cribbed from
|
||||
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
||||
- name: Restore/persist mypy's cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
.mypy_cache
|
||||
key: mypy-cache-${{ github.context.sha }}
|
||||
restore-keys: mypy-cache-
|
||||
|
||||
- name: Run mypy
|
||||
run: poetry run mypy
|
||||
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1"
|
||||
with:
|
||||
typechecking-extras: "all"
|
||||
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -149,12 +86,8 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.60.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
poetry-version: "1.3.2"
|
||||
extras: "all"
|
||||
- run: poetry run scripts-dev/check_pydantic_models.py
|
||||
|
||||
@@ -167,8 +100,12 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.60.0
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
@@ -185,7 +122,10 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
components: clippy
|
||||
@@ -202,10 +142,12 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
# We use nightly so that it correctly groups together imports
|
||||
toolchain: nightly-2022-12-01
|
||||
toolchain: 1.58.1
|
||||
components: rustfmt
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
@@ -216,13 +158,11 @@ jobs:
|
||||
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
||||
needs:
|
||||
- lint
|
||||
- lint-mypy
|
||||
- lint-crlf
|
||||
- lint-newsfile
|
||||
- lint-pydantic
|
||||
- check-sampleconfig
|
||||
- check-schema-delta
|
||||
- check-lockfile
|
||||
- lint-clippy
|
||||
- lint-rustfmt
|
||||
runs-on: ubuntu-latest
|
||||
@@ -257,33 +197,33 @@ jobs:
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
# 1. Mount postgres data files onto a tmpfs in-memory filesystem to reduce overhead of docker's overlayfs layer.
|
||||
# 2. Expose the unix socket for postgres. This removes latency of using docker-proxy for connections.
|
||||
run: |
|
||||
docker run -d -p 5432:5432 \
|
||||
--tmpfs /var/lib/postgres:rw,size=6144m \
|
||||
--mount 'type=bind,src=/var/run/postgresql,dst=/var/run/postgresql' \
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.job.postgres-version }}
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.60.0
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.job.python-version }}
|
||||
poetry-version: "1.3.2"
|
||||
extras: ${{ matrix.job.extras }}
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: poetry run trial --jobs=6 tests
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
env:
|
||||
SYNAPSE_POSTGRES: ${{ matrix.job.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: /var/run/postgresql
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
- name: Dump logs
|
||||
@@ -308,39 +248,51 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.60.0
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
# There aren't wheels for some of the older deps, so we need to install
|
||||
# their build dependencies
|
||||
- run: |
|
||||
sudo apt-get -qq update
|
||||
sudo apt-get -qq install build-essential libffi-dev python-dev \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.7'
|
||||
|
||||
# Calculating the old-deps actually takes a bunch of time, so we cache the
|
||||
# pyproject.toml / poetry.lock. We need to cache pyproject.toml as
|
||||
# otherwise the `poetry install` step will error due to the poetry.lock
|
||||
# file being outdated.
|
||||
#
|
||||
# This caches the output of `Prepare old deps`, which should generate the
|
||||
# same `pyproject.toml` and `poetry.lock` for a given `pyproject.toml` input.
|
||||
- uses: actions/cache@v3
|
||||
id: cache-poetry-old-deps
|
||||
name: Cache poetry.lock
|
||||
with:
|
||||
path: |
|
||||
poetry.lock
|
||||
pyproject.toml
|
||||
key: poetry-old-deps2-${{ hashFiles('pyproject.toml') }}
|
||||
- name: Prepare old deps
|
||||
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
||||
run: .ci/scripts/prepare_old_deps.sh
|
||||
|
||||
# Note: we install using `pip` here, not poetry. `poetry install` ignores the
|
||||
# build-system section (https://github.com/python-poetry/poetry/issues/6154), but
|
||||
# we explicitly want to test that you can `pip install` using the oldest version
|
||||
# of poetry-core and setuptools-rust.
|
||||
- run: pip install .[all,test]
|
||||
# We only now install poetry so that `setup-python-poetry` caches the
|
||||
# right poetry.lock's dependencies.
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: '3.7'
|
||||
extras: "all test"
|
||||
|
||||
# We nuke the local copy, as we've installed synapse into the virtualenv
|
||||
# (rather than use an editable install, which we no longer support). If we
|
||||
# don't do this then python can't find the native lib.
|
||||
- run: rm -rf synapse/
|
||||
|
||||
# Sanity check we can import/run Synapse
|
||||
- run: python -m synapse.app.homeserver --help
|
||||
|
||||
- run: python -m twisted.trial -j6 tests
|
||||
- run: poetry run trial -j2 tests
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
@@ -372,7 +324,6 @@ jobs:
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
poetry-version: "1.3.2"
|
||||
extras: ${{ matrix.extras }}
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
- name: Dump logs
|
||||
@@ -399,8 +350,7 @@ jobs:
|
||||
env:
|
||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
||||
POSTGRES: ${{ matrix.job.postgres && 1}}
|
||||
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') || '' }}
|
||||
ASYNCIO_REACTOR: ${{ (matrix.job.reactor == 'asyncio') || '' }}
|
||||
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') && 1}}
|
||||
WORKERS: ${{ matrix.job.workers && 1 }}
|
||||
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
|
||||
TOP: ${{ github.workspace }}
|
||||
@@ -416,7 +366,12 @@ jobs:
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.60.0
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Run SyTest
|
||||
@@ -460,7 +415,6 @@ jobs:
|
||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
poetry-version: "1.3.2"
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_export_data_command.sh
|
||||
env:
|
||||
@@ -512,7 +466,6 @@ jobs:
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
poetry-version: "1.3.2"
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_synapse_port_db.sh
|
||||
id: run_tester_script
|
||||
@@ -556,21 +509,21 @@ jobs:
|
||||
path: synapse
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.60.0
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: actions/setup-go@v4
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- run: |
|
||||
set -o pipefail
|
||||
COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
||||
POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
||||
shell: bash
|
||||
env:
|
||||
POSTGRES: ${{ (matrix.database == 'Postgres') && 1 || '' }}
|
||||
WORKERS: ${{ (matrix.arrangement == 'workers') && 1 || '' }}
|
||||
name: Run Complement Tests
|
||||
|
||||
cargo-test:
|
||||
@@ -584,31 +537,16 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.60.0
|
||||
# There don't seem to be versioned releases of this action per se: for each rust
|
||||
# version there is a branch which gets constantly rebased on top of master.
|
||||
# We pin to a specific commit for paranoia's sake.
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: 1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: cargo test
|
||||
|
||||
# We want to ensure that the cargo benchmarks still compile, which requires a
|
||||
# nightly compiler.
|
||||
cargo-bench:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- linting-done
|
||||
- changes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: cargo bench --no-run
|
||||
|
||||
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
|
||||
tests-done:
|
||||
if: ${{ always() }}
|
||||
@@ -620,7 +558,6 @@ jobs:
|
||||
- portdb
|
||||
- complement
|
||||
- cargo-test
|
||||
- cargo-bench
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: matrix-org/done-action@v2
|
||||
@@ -632,4 +569,3 @@ jobs:
|
||||
skippable: |
|
||||
lint-newsfile
|
||||
cargo-test
|
||||
cargo-bench
|
||||
|
||||
2
.github/workflows/triage-incoming.yml
vendored
2
.github/workflows/triage-incoming.yml
vendored
@@ -6,7 +6,7 @@ on:
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v2
|
||||
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v1
|
||||
with:
|
||||
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
|
||||
content_id: ${{ github.event.issue.node_id }}
|
||||
|
||||
51
.github/workflows/twisted_trunk.yml
vendored
51
.github/workflows/twisted_trunk.yml
vendored
@@ -5,42 +5,22 @@ on:
|
||||
- cron: 0 8 * * *
|
||||
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
twisted_ref:
|
||||
description: Commit, branch or tag to checkout from upstream Twisted.
|
||||
required: false
|
||||
default: 'trunk'
|
||||
type: string
|
||||
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check_repo:
|
||||
# Prevent this workflow from running on any fork of Synapse other than matrix-org/synapse, as it is
|
||||
# only useful to the Synapse core team.
|
||||
# All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest
|
||||
# of the workflow will be skipped as well.
|
||||
if: github.repository == 'matrix-org/synapse'
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }}
|
||||
steps:
|
||||
- id: check_condition
|
||||
run: echo "should_run_workflow=${{ github.repository == 'matrix-org/synapse' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
mypy:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
@@ -49,15 +29,13 @@ jobs:
|
||||
extras: "all"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref }}
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- name: Remove warn_unused_ignores from mypy config
|
||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||
- run: poetry run mypy
|
||||
|
||||
trial:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -65,7 +43,9 @@ jobs:
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
@@ -92,8 +72,6 @@ jobs:
|
||||
|| true
|
||||
|
||||
sytest:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:buster
|
||||
@@ -104,7 +82,9 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
uses: dtolnay/rust-toolchain@e645b0cf01249a964ec099494d38d2da0f0b349f
|
||||
with:
|
||||
toolchain: stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Patch dependencies
|
||||
@@ -138,8 +118,7 @@ jobs:
|
||||
/logs/**/*.log*
|
||||
|
||||
complement:
|
||||
needs: check_repo
|
||||
if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'"
|
||||
if: "${{ !failure() && !cancelled() }}"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
@@ -161,8 +140,6 @@ jobs:
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- uses: actions/setup-go@v4
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
@@ -171,7 +148,7 @@ jobs:
|
||||
run: |
|
||||
set -x
|
||||
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
|
||||
pipx install poetry==1.3.2
|
||||
pipx install poetry==1.2.0
|
||||
|
||||
poetry remove -n twisted
|
||||
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
@@ -186,7 +163,7 @@ jobs:
|
||||
|
||||
# open an issue if the build fails, so we know about it.
|
||||
open-issue:
|
||||
if: failure() && needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
if: failure()
|
||||
needs:
|
||||
- mypy
|
||||
- trial
|
||||
@@ -197,7 +174,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1
|
||||
- uses: JasonEtco/create-an-issue@77399b6110ef82b94c1c9f9f615acf9e604f7f56 # v2.5.0, 2020-12-06
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
|
||||
13
.gitignore
vendored
13
.gitignore
vendored
@@ -15,10 +15,9 @@ _trial_temp*/
|
||||
.DS_Store
|
||||
__pycache__/
|
||||
|
||||
# We do want poetry, cargo and flake lockfiles.
|
||||
# We do want the poetry and cargo lockfile.
|
||||
!poetry.lock
|
||||
!Cargo.lock
|
||||
!flake.lock
|
||||
|
||||
# stuff that is likely to exist when you run a server locally
|
||||
/*.db
|
||||
@@ -37,10 +36,6 @@ __pycache__/
|
||||
|
||||
# For direnv users
|
||||
/.envrc
|
||||
.direnv/
|
||||
|
||||
# For nix/devenv users
|
||||
.devenv/
|
||||
|
||||
# IDEs
|
||||
/.idea/
|
||||
@@ -57,7 +52,6 @@ __pycache__/
|
||||
/coverage.*
|
||||
/dist/
|
||||
/docs/build/
|
||||
/dev-docs/_build/
|
||||
/htmlcov
|
||||
/pip-wheel-metadata/
|
||||
|
||||
@@ -66,7 +60,7 @@ book/
|
||||
|
||||
# complement
|
||||
/complement-*
|
||||
/main.tar.gz
|
||||
/master.tar.gz
|
||||
|
||||
# rust
|
||||
/target/
|
||||
@@ -74,6 +68,3 @@ book/
|
||||
|
||||
# Poetry will create a setup.py, which we don't want to include.
|
||||
/setup.py
|
||||
|
||||
# Don't include users' poetry configs
|
||||
/poetry.toml
|
||||
|
||||
1292
CHANGES.md
1292
CHANGES.md
File diff suppressed because it is too large
Load Diff
68
Cargo.lock
generated
68
Cargo.lock
generated
@@ -4,18 +4,18 @@ version = 3
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "1.0.2"
|
||||
version = "0.7.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41"
|
||||
checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.71"
|
||||
version = "1.0.66"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
|
||||
checksum = "216261ddc8289130e551ddcd5ce8a064710c0d064a4d2895c67151c92b5443f6"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
@@ -37,9 +37,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "blake2"
|
||||
version = "0.10.6"
|
||||
version = "0.10.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe"
|
||||
checksum = "b12e5fd123190ce1c2e559308a94c9bacad77907d4c6005d9e58fe1a0689e55e"
|
||||
dependencies = [
|
||||
"digest",
|
||||
]
|
||||
@@ -132,9 +132,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.19"
|
||||
version = "0.4.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
|
||||
checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
@@ -182,9 +185,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.52"
|
||||
version = "1.0.46"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1d0e1ae9e836cc3beddd63db0df682593d7e2d3d891ae8c9083d2113e1744224"
|
||||
checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
@@ -229,9 +232,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-log"
|
||||
version = "0.8.2"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c94ff6535a6bae58d7d0b85e60d4c53f7f84d0d0aa35d6a28c3f3e70bfe51444"
|
||||
checksum = "e5695ccff5060c13ca1751cf8c857a12da9b0bf0378cb071c5e0326f7c7e4c1b"
|
||||
dependencies = [
|
||||
"arc-swap",
|
||||
"log",
|
||||
@@ -247,7 +250,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-macros-backend",
|
||||
"quote",
|
||||
"syn 1.0.104",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -258,7 +261,7 @@ checksum = "c8df9be978a2d2f0cdebabb03206ed73b11314701a5bfe71b0d753b81997777f"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.104",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -273,9 +276,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.26"
|
||||
version = "1.0.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc"
|
||||
checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
@@ -291,9 +294,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.8.4"
|
||||
version = "1.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d0ab3ca65655bb1e41f2a8c8cd662eb4fb035e67c3f78da1d61dffe89d07300f"
|
||||
checksum = "e076559ef8e241f2ae3479e36f97bd5741c0330689e217ad51ce2c76808b868a"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
@@ -302,9 +305,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.7.2"
|
||||
version = "0.6.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78"
|
||||
checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
@@ -320,29 +323,29 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.164"
|
||||
version = "1.0.148"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9e8c8cf938e98f769bc164923b06dce91cea1751522f46f8466461af04c9027d"
|
||||
checksum = "e53f64bb4ba0191d6d0676e1b141ca55047d83b74f5607e6d8eb88126c52c2dc"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.164"
|
||||
version = "1.0.148"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68"
|
||||
checksum = "a55492425aa53521babf6137309e7d34c20bbfbbfcfe2c7f3a047fd1f6b92c0c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.10",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.99"
|
||||
version = "1.0.89"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "46266871c240a00b8f503b877622fe33430b3c7d963bdc0f2adc511e54a1eae3"
|
||||
checksum = "020ff22c755c2ed3f8cf162dbb41a7268d934702f3ed3631656ea597e08fc3db"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
@@ -372,17 +375,6 @@ dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5aad1363ed6d37b84299588d62d3a7d95b5a5c2d9aad5c85609fda12afaa1f40"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "synapse"
|
||||
version = "0.1.0"
|
||||
|
||||
@@ -3,3 +3,7 @@
|
||||
|
||||
[workspace]
|
||||
members = ["rust"]
|
||||
|
||||
[profile.dbgrelease]
|
||||
inherits = "release"
|
||||
debug = true
|
||||
|
||||
1
changelog.d/14255.misc
Normal file
1
changelog.d/14255.misc
Normal file
@@ -0,0 +1 @@
|
||||
Optimise push badge count calculations. Contributed by Nick @ Beeper (@fizzadar).
|
||||
1
changelog.d/14490.feature
Normal file
1
changelog.d/14490.feature
Normal file
@@ -0,0 +1 @@
|
||||
Stop using deprecated `keyIds` parameter when calling `/_matrix/key/v2/server`.
|
||||
1
changelog.d/14493.doc
Normal file
1
changelog.d/14493.doc
Normal file
@@ -0,0 +1 @@
|
||||
Update worker settings for `pusher` and `federation_sender` functionality.
|
||||
1
changelog.d/14517.doc
Normal file
1
changelog.d/14517.doc
Normal file
@@ -0,0 +1 @@
|
||||
Add links to third party package repositories, and point to the bug which highlights Ubuntu's out-of-date packages.
|
||||
1
changelog.d/14525.feature
Normal file
1
changelog.d/14525.feature
Normal file
@@ -0,0 +1 @@
|
||||
Stop using deprecated `keyIds` parameter when calling `/_matrix/key/v2/server`.
|
||||
1
changelog.d/14528.misc
Normal file
1
changelog.d/14528.misc
Normal file
@@ -0,0 +1 @@
|
||||
Share the `ClientRestResource` for both workers and the main process.
|
||||
1
changelog.d/14549.misc
Normal file
1
changelog.d/14549.misc
Normal file
@@ -0,0 +1 @@
|
||||
Faster joins: use servers list approximation to send read receipts when in partial state instead of waiting for the full state of the room.
|
||||
1
changelog.d/14551.feature
Normal file
1
changelog.d/14551.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add new `push.enabled` config option to allow opting out of push notification calculation.
|
||||
1
changelog.d/14568.misc
Normal file
1
changelog.d/14568.misc
Normal file
@@ -0,0 +1 @@
|
||||
Modernize unit tests configuration related to workers.
|
||||
1
changelog.d/14576.feature
Normal file
1
changelog.d/14576.feature
Normal file
@@ -0,0 +1 @@
|
||||
Advertise support for Matrix 1.5 on `/_matrix/client/versions`.
|
||||
1
changelog.d/14591.misc
Normal file
1
changelog.d/14591.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump jsonschema from 4.17.0 to 4.17.3.
|
||||
1
changelog.d/14592.bugfix
Normal file
1
changelog.d/14592.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug where a device list update might not be sent to clients in certain circumstances.
|
||||
1
changelog.d/14597.misc
Normal file
1
changelog.d/14597.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add missing type hints.
|
||||
1
changelog.d/14602.misc
Normal file
1
changelog.d/14602.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix Rust lint CI.
|
||||
1
changelog.d/14607.misc
Normal file
1
changelog.d/14607.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump JasonEtco/create-an-issue from 2.5.0 to 2.8.1.
|
||||
@@ -1 +0,0 @@
|
||||
Replace `EventContext` fields `prev_group` and `delta_ids` with field `state_group_deltas`.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a long-standing bug where media files were served in an unsafe manner. Contributed by @joshqou.
|
||||
@@ -1 +0,0 @@
|
||||
Improve `/messages` response time by avoiding backfill when we already have messages to return.
|
||||
@@ -1 +0,0 @@
|
||||
Regularly try to send transactions to other servers after they failed instead of waiting for a new event to be available before trying.
|
||||
@@ -1 +0,0 @@
|
||||
Remove experimental [MSC2716](https://github.com/matrix-org/matrix-spec-proposals/pull/2716) implementation to incrementally import history into existing rooms.
|
||||
@@ -1 +0,0 @@
|
||||
Fix requesting multiple keys at once over federation, related to [MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983).
|
||||
@@ -1 +0,0 @@
|
||||
Avoid invalidating a cache that was just prefilled.
|
||||
@@ -1 +0,0 @@
|
||||
Fix requesting multiple keys at once over federation, related to [MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983).
|
||||
@@ -1 +0,0 @@
|
||||
Document `looping_call()` functionality that will wait for the given function to finish before scheduling another.
|
||||
@@ -1 +0,0 @@
|
||||
Fix joining rooms through aliases where the alias server isn't a real homeserver. Contributed by @tulir @ Beeper.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug in push rules handling leading to an invalid (per spec) `is_user_mention` rule sent to clients. Also fix wrong rule names for `is_user_mention` and `is_room_mention`.
|
||||
@@ -1 +0,0 @@
|
||||
Allow for the configuration of max request retries and min/max retry delays in the matrix federation client.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug introduced in 1.57.0 where the wrong table would be locked on updating database rows when using SQLite as the database backend.
|
||||
@@ -1 +0,0 @@
|
||||
Fix Sytest environmental variable evaluation in CI.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a typo in the [Admin API](https://matrix-org.github.io/synapse/latest/usage/administration/admin_api/index.html).
|
||||
@@ -1 +0,0 @@
|
||||
Switch from `matrix://` to `matrix-federation://` scheme for internal Synapse routing of outbound federation traffic.
|
||||
@@ -1 +0,0 @@
|
||||
Fix typo in MSC number in faster remote room join architecture doc.
|
||||
@@ -1 +0,0 @@
|
||||
Fix harmless exceptions being printed when running the port DB script.
|
||||
@@ -1 +0,0 @@
|
||||
Fix forgotten rooms missing from initial sync after rejoining them. Contributed by Nico from Famedly.
|
||||
@@ -1 +0,0 @@
|
||||
Fix sqlite `user_filters` upgrade introduced in v1.86.0.
|
||||
@@ -1 +0,0 @@
|
||||
Add spam checker module API for logins.
|
||||
@@ -1 +0,0 @@
|
||||
Add check constraint to current_state_delta_stream (#15849).
|
||||
@@ -1,28 +0,0 @@
|
||||
# Schema symlinks
|
||||
|
||||
This directory contains symlinks to the latest dump of the postgres full schema. This is useful to have, as it allows IDEs to understand our schema and provide autocomplete, linters, inspections, etc.
|
||||
|
||||
In particular, the DataGrip functionality in IntelliJ's products seems to only consider files called `*.sql` when defining a schema from DDL; `*.sql.postgres` will be ignored. To get around this we symlink those files to ones ending in `.sql`. We've chosen to ignore the `.sql.sqlite` schema dumps here, as they're not intended for production use (and are much quicker to test against).
|
||||
|
||||
## Example
|
||||

|
||||
|
||||
## Caveats
|
||||
|
||||
- Doesn't include temporary tables created ad-hoc by Synapse.
|
||||
- Postgres only. IDEs will likely be confused by SQLite-specific queries.
|
||||
- Will not include migrations created after the latest schema dump.
|
||||
- Symlinks might confuse checkouts on Windows systems.
|
||||
|
||||
## Instructions
|
||||
|
||||
### Jetbrains IDEs with DataGrip plugin
|
||||
|
||||
- View -> Tool Windows -> Database
|
||||
- `+` Icon -> DDL Data Source
|
||||
- Pick a name, e.g. `Synapse schema dump`
|
||||
- Under sources, click `+`.
|
||||
- Add an entry with Path pointing to this directory, and dialect set to PostgreSQL.
|
||||
- OK, and OK.
|
||||
- IDE should now be aware of the schema.
|
||||
- Try control-clicking on a table name in a bit of SQL e.g. in `_get_forgotten_rooms_for_user_txn`.
|
||||
@@ -1 +0,0 @@
|
||||
../../synapse/storage/schema/common/full_schemas/72/full.sql.postgres
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 13 KiB |
@@ -1 +0,0 @@
|
||||
../../synapse/storage/schema/main/full_schemas/72/full.sql.postgres
|
||||
@@ -1 +0,0 @@
|
||||
../../synapse/storage/schema/common/schema_version.sql
|
||||
@@ -1 +0,0 @@
|
||||
../../synapse/storage/schema/state/full_schemas/72/full.sql.postgres
|
||||
@@ -68,12 +68,7 @@ redis:
|
||||
enabled: true
|
||||
host: redis
|
||||
port: 6379
|
||||
# dbid: <redis_logical_db_id>
|
||||
# password: <secret_password>
|
||||
# use_tls: True
|
||||
# certificate_file: <path_to_certificate>
|
||||
# private_key_file: <path_to_private_key>
|
||||
# ca_file: <path_to_ca_certificate>
|
||||
```
|
||||
|
||||
This assumes that your Redis service is called `redis` in your Docker Compose file.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,47 +0,0 @@
|
||||
# `lnav` config for Synapse logs
|
||||
|
||||
[lnav](https://lnav.org/) is a log-viewing tool. It is particularly useful when
|
||||
you need to interleave multiple log files, or for exploring a large log file
|
||||
with regex filters. The downside is that it is not as ubiquitous as tools like
|
||||
`less`, `grep`, etc.
|
||||
|
||||
This directory contains an `lnav` [log format definition](
|
||||
https://docs.lnav.org/en/v0.10.1/formats.html#defining-a-new-format
|
||||
) for Synapse logs as
|
||||
emitted by Synapse with the default [logging configuration](
|
||||
https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#log_config
|
||||
). It supports lnav 0.10.1 because that's what's packaged by my distribution.
|
||||
|
||||
This should allow lnav:
|
||||
|
||||
- to interpret timestamps, allowing log interleaving;
|
||||
- to interpret log severity levels, allowing colouring by log level(!!!);
|
||||
- to interpret request IDs, allowing you to skip through a specific request; and
|
||||
- to highlight room, event and user IDs in logs.
|
||||
|
||||
See also https://gist.github.com/benje/e2ab750b0a81d11920d83af637d289f7 for a
|
||||
similar example.
|
||||
|
||||
## Example
|
||||
|
||||
[](https://asciinema.org/a/556133)
|
||||
|
||||
## Tips
|
||||
|
||||
- `lnav -i /path/to/synapse/checkout/contrib/lnav/synapse-log-format.json`
|
||||
- `lnav my_synapse_log_file` or `lnav synapse_log_files.*`, etc.
|
||||
- `lnav --help` for CLI help.
|
||||
|
||||
Within lnav itself:
|
||||
|
||||
- `?` for help within lnav itself.
|
||||
- `q` to quit.
|
||||
- `/` to search a-la `less` and `vim`, then `n` and `N` to continue searching
|
||||
down and up.
|
||||
- Use `o` and `O` to skip through logs based on the request ID (`POST-1234`, or
|
||||
else the value of the [`request_id_header`](
|
||||
https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html?highlight=request_id_header#listeners
|
||||
) header). This may get confused if the same request ID is repeated among
|
||||
multiple files or process restarts.
|
||||
- ???
|
||||
- Profit
|
||||
@@ -1,67 +0,0 @@
|
||||
{
|
||||
"$schema": "https://lnav.org/schemas/format-v1.schema.json",
|
||||
"synapse": {
|
||||
"title": "Synapse logs",
|
||||
"description": "Logs output by Synapse, a Matrix homesever, under its default logging config.",
|
||||
"regex": {
|
||||
"log": {
|
||||
"pattern": ".*(?<timestamp>\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}) - (?<logger>.+) - (?<lineno>\\d+) - (?<level>\\w+) - (?<context>.+) - (?<body>.*)"
|
||||
}
|
||||
},
|
||||
"json": false,
|
||||
"timestamp-field": "timestamp",
|
||||
"timestamp-format": [
|
||||
"%Y-%m-%d %H:%M:%S,%L"
|
||||
],
|
||||
"level-field": "level",
|
||||
"body-field": "body",
|
||||
"opid-field": "context",
|
||||
"level": {
|
||||
"critical": "CRITICAL",
|
||||
"error": "ERROR",
|
||||
"warning": "WARNING",
|
||||
"info": "INFO",
|
||||
"debug": "DEBUG"
|
||||
},
|
||||
"sample": [
|
||||
{
|
||||
"line": "my-matrix-server-generic-worker-4 | 2023-01-27 09:47:09,818 - synapse.replication.tcp.client - 381 - ERROR - PUT-32992 - Timed out waiting for stream receipts",
|
||||
"level": "error"
|
||||
},
|
||||
{
|
||||
"line": "my-matrix-server-federation-sender-1 | 2023-01-25 20:56:20,995 - synapse.http.matrixfederationclient - 709 - WARNING - federation_transaction_transmission_loop-3 - {PUT-O-3} [example.com] Request failed: PUT matrix-federation://example.com/_matrix/federation/v1/send/1674680155797: HttpResponseException('403: Forbidden')",
|
||||
"level": "warning"
|
||||
},
|
||||
{
|
||||
"line": "my-matrix-server | 2023-01-25 20:55:54,433 - synapse.storage.databases - 66 - INFO - main - [database config 'master']: Checking database server",
|
||||
"level": "info"
|
||||
},
|
||||
{
|
||||
"line": "my-matrix-server | 2023-01-26 15:08:40,447 - synapse.access.http.8008 - 460 - INFO - PUT-74929 - 0.0.0.0 - 8008 - {@alice:example.com} Processed request: 0.011sec/0.000sec (0.000sec, 0.000sec) (0.001sec/0.008sec/3) 2B 200 \"PUT /_matrix/client/r0/user/%40alice%3Atexample.com/account_data/im.vector.setting.breadcrumbs HTTP/1.0\" \"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Element/1.11.20 Chrome/108.0.5359.179 Electron/22.0.3 Safari/537.36\" [0 dbevts]",
|
||||
"level": "info"
|
||||
}
|
||||
],
|
||||
"highlights": {
|
||||
"user_id": {
|
||||
"pattern": "(@|%40)[^:% ]+(:|%3A)[\\[\\]0-9a-zA-Z.\\-:]+(:\\d{1,5})?(?<!:)",
|
||||
"underline": true
|
||||
},
|
||||
"room_id": {
|
||||
"pattern": "(!|%21)[^:% ]+(:|%3A)[\\[\\]0-9a-zA-Z.\\-:]+(:\\d{1,5})?(?<!:)",
|
||||
"underline": true
|
||||
},
|
||||
"room_alias": {
|
||||
"pattern": "(#|%23)[^:% ]+(:|%3A)[\\[\\]0-9a-zA-Z.\\-:]+(:\\d{1,5})?(?<!:)",
|
||||
"underline": true
|
||||
},
|
||||
"event_id_v1_v2": {
|
||||
"pattern": "(\\$|%25)[^:% ]+(:|%3A)[\\[\\]0-9a-zA-Z.\\-:]+(:\\d{1,5})?(?<!:)",
|
||||
"underline": true
|
||||
},
|
||||
"event_id_v3_plus": {
|
||||
"pattern": "(\\$|%25)([A-Za-z0-9+/_]|-){43}",
|
||||
"underline": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,19 +15,19 @@ worker_name: generic_worker$i
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_main_http_uri: http://localhost:8008/
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 808$i
|
||||
x_forwarded: true
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/generic-worker-log.yaml
|
||||
#worker_pid_file: DATADIR/generic_worker$i.pid
|
||||
EOF
|
||||
done
|
||||
```
|
||||
|
||||
This would create five generic workers with a unique `worker_name` field in each file and listening on ports 8081-8085.
|
||||
|
||||
Customise the script to your needs. Note that `worker_pid_file` is required if `worker_daemonize` is `true`. Uncomment and/or modify the line if needed.
|
||||
Customise the script to your needs.
|
||||
|
||||
@@ -8,9 +8,7 @@ It also prints out the example lines for Synapse main configuration file.
|
||||
|
||||
Remember to route necessary endpoints directly to a worker associated with it.
|
||||
|
||||
If you run the script as-is, it will create workers with the replication listener starting from port 8034 and another, regular http listener starting from 8044. If you don't need all of the stream writers listed in the script, just remove them from the ```STREAM_WRITERS``` array.
|
||||
|
||||
Hint: Note that `worker_pid_file` is required if `worker_daemonize` is `true`. Uncomment and/or modify the line if needed.
|
||||
If you run the script as-is, it will create workers with the replication listener starting from port 8034 and another, regular http listener starting from 8044. If you don't need all of the stream writers listed in the script, just remove them from the ```STREAM_WRITERS``` array.
|
||||
|
||||
```sh
|
||||
#!/bin/bash
|
||||
@@ -48,11 +46,9 @@ worker_listeners:
|
||||
|
||||
- type: http
|
||||
port: $(expr $HTTP_START_PORT + $i)
|
||||
x_forwarded: true
|
||||
resources:
|
||||
- names: [client]
|
||||
|
||||
#worker_pid_file: DATADIR/${STREAM_WRITERS[$i]}.pid
|
||||
worker_log_config: /etc/matrix-synapse/stream-writer-log.yaml
|
||||
EOF
|
||||
HOMESERVER_YAML_INSTANCE_MAP+=$" ${STREAM_WRITERS[$i]}_stream_writer:
|
||||
@@ -95,9 +91,7 @@ Simply run the script to create YAML files in the current folder and print out t
|
||||
|
||||
```console
|
||||
$ ./create_stream_writers.sh
|
||||
```
|
||||
You should receive an output similar to the following:
|
||||
```console
|
||||
|
||||
# Add these lines to your homeserver.yaml.
|
||||
# Don't forget to configure your reverse proxy and
|
||||
# necessary endpoints to their respective worker.
|
||||
|
||||
3
debian/build_virtualenv
vendored
3
debian/build_virtualenv
vendored
@@ -31,11 +31,12 @@ case $(dpkg-architecture -q DEB_HOST_ARCH) in
|
||||
esac
|
||||
|
||||
# Manually install Poetry and export a pip-compatible `requirements.txt`
|
||||
# We need a Poetry pre-release as the export command is buggy in < 1.2
|
||||
TEMP_VENV="$(mktemp -d)"
|
||||
python3 -m venv "$TEMP_VENV"
|
||||
source "$TEMP_VENV/bin/activate"
|
||||
pip install -U pip
|
||||
pip install poetry==1.3.2
|
||||
pip install poetry==1.2.0
|
||||
poetry export \
|
||||
--extras all \
|
||||
--extras test \
|
||||
|
||||
232
debian/changelog
vendored
232
debian/changelog
vendored
@@ -1,235 +1,3 @@
|
||||
matrix-synapse-py3 (1.86.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.86.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Jun 2023 17:22:46 +0200
|
||||
|
||||
matrix-synapse-py3 (1.86.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.86.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 14 Jun 2023 12:16:27 +0200
|
||||
|
||||
matrix-synapse-py3 (1.86.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.86.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Jun 2023 14:30:45 +0200
|
||||
|
||||
matrix-synapse-py3 (1.85.2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.85.2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 08 Jun 2023 13:04:18 +0100
|
||||
|
||||
matrix-synapse-py3 (1.85.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.85.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 07 Jun 2023 10:51:12 +0100
|
||||
|
||||
matrix-synapse-py3 (1.85.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.85.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Jun 2023 09:39:29 +0100
|
||||
|
||||
matrix-synapse-py3 (1.85.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.85.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 01 Jun 2023 09:16:18 -0700
|
||||
|
||||
matrix-synapse-py3 (1.85.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.85.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 May 2023 13:56:54 +0100
|
||||
|
||||
matrix-synapse-py3 (1.84.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.84.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 26 May 2023 16:15:30 +0100
|
||||
|
||||
matrix-synapse-py3 (1.84.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.84.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 May 2023 10:57:22 +0100
|
||||
|
||||
matrix-synapse-py3 (1.84.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.84.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 16 May 2023 11:12:02 +0100
|
||||
|
||||
matrix-synapse-py3 (1.83.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.83.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 09 May 2023 18:13:37 +0200
|
||||
|
||||
matrix-synapse-py3 (1.83.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.83.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 May 2023 15:56:38 +0100
|
||||
|
||||
matrix-synapse-py3 (1.82.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.82.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Apr 2023 11:56:06 +0100
|
||||
|
||||
matrix-synapse-py3 (1.82.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.82.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Apr 2023 09:47:30 +0100
|
||||
|
||||
matrix-synapse-py3 (1.81.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.81.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Apr 2023 14:18:35 +0100
|
||||
|
||||
matrix-synapse-py3 (1.81.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.81.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 06 Apr 2023 16:07:54 +0100
|
||||
|
||||
matrix-synapse-py3 (1.81.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.81.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Apr 2023 14:29:03 +0100
|
||||
|
||||
matrix-synapse-py3 (1.80.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.80.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Mar 2023 11:10:33 +0100
|
||||
|
||||
matrix-synapse-py3 (1.80.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.80.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 22 Mar 2023 08:30:16 -0700
|
||||
|
||||
matrix-synapse-py3 (1.80.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.80.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Mar 2023 10:56:08 -0700
|
||||
|
||||
matrix-synapse-py3 (1.79.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.79.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Mar 2023 16:14:50 +0100
|
||||
|
||||
matrix-synapse-py3 (1.79.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.79.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 13 Mar 2023 12:54:21 +0000
|
||||
|
||||
matrix-synapse-py3 (1.79.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.79.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Mar 2023 12:03:49 +0000
|
||||
|
||||
matrix-synapse-py3 (1.78.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.78.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Feb 2023 08:56:03 -0800
|
||||
|
||||
matrix-synapse-py3 (1.78.0~rc1) stable; urgency=medium
|
||||
|
||||
* Add `matrix-org-archive-keyring` package as recommended.
|
||||
* New Synapse release 1.78.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Feb 2023 14:29:19 +0000
|
||||
|
||||
matrix-synapse-py3 (1.77.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.77.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Feb 2023 12:59:02 +0100
|
||||
|
||||
matrix-synapse-py3 (1.77.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.77.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 10 Feb 2023 12:44:21 +0000
|
||||
|
||||
matrix-synapse-py3 (1.77.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.77.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Feb 2023 13:45:14 +0000
|
||||
|
||||
matrix-synapse-py3 (1.76.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.76.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 31 Jan 2023 08:21:47 -0800
|
||||
|
||||
matrix-synapse-py3 (1.76.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.76.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 27 Jan 2023 11:17:57 +0000
|
||||
|
||||
matrix-synapse-py3 (1.76.0~rc1) stable; urgency=medium
|
||||
|
||||
* Use Poetry 1.3.2 to manage the bundled virtualenv included with this package.
|
||||
* New Synapse release 1.76.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 25 Jan 2023 16:21:16 +0000
|
||||
|
||||
matrix-synapse-py3 (1.75.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.75.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 17 Jan 2023 11:36:02 +0000
|
||||
|
||||
matrix-synapse-py3 (1.75.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.75.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 12 Jan 2023 10:30:15 -0800
|
||||
|
||||
matrix-synapse-py3 (1.75.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.75.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Jan 2023 12:18:27 +0000
|
||||
|
||||
matrix-synapse-py3 (1.74.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.74.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Dec 2022 16:07:38 +0000
|
||||
|
||||
matrix-synapse-py3 (1.74.0~rc1) stable; urgency=medium
|
||||
|
||||
* New dependency on libicu-dev to provide improved results for user
|
||||
search.
|
||||
* New Synapse release 1.74.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Dec 2022 13:30:01 +0000
|
||||
|
||||
matrix-synapse-py3 (1.73.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.73.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Dec 2022 11:48:56 +0000
|
||||
|
||||
matrix-synapse-py3 (1.73.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.73.0rc2.
|
||||
|
||||
3
debian/control
vendored
3
debian/control
vendored
@@ -8,8 +8,6 @@ Build-Depends:
|
||||
dh-virtualenv (>= 1.1),
|
||||
libsystemd-dev,
|
||||
libpq-dev,
|
||||
libicu-dev,
|
||||
pkg-config,
|
||||
lsb-release,
|
||||
python3-dev,
|
||||
python3,
|
||||
@@ -37,7 +35,6 @@ Depends:
|
||||
# so we put perl:Depends in Suggests rather than Depends.
|
||||
Recommends:
|
||||
${shlibs1:Recommends},
|
||||
matrix-org-archive-keyring,
|
||||
Suggests:
|
||||
sqlite3,
|
||||
${perl:Depends},
|
||||
|
||||
@@ -46,7 +46,7 @@ for port in 8080 8081 8082; do
|
||||
echo ''
|
||||
|
||||
# Warning, this heredoc depends on the interaction of tabs and spaces.
|
||||
# Please don't accidentally bork me with your fancy settings.
|
||||
# Please don't accidentaly bork me with your fancy settings.
|
||||
listeners=$(cat <<-PORTLISTENERS
|
||||
# Configure server to listen on both $https_port and $port
|
||||
# This overides some of the default settings above
|
||||
@@ -80,8 +80,12 @@ for port in 8080 8081 8082; do
|
||||
echo "tls_certificate_path: \"$DIR/$port/localhost:$port.tls.crt\""
|
||||
echo "tls_private_key_path: \"$DIR/$port/localhost:$port.tls.key\""
|
||||
|
||||
# Request keys directly from servers contacted over federation
|
||||
echo 'trusted_key_servers: []'
|
||||
# Ignore keys from the trusted keys server
|
||||
echo '# Ignore keys from the trusted keys server'
|
||||
echo 'trusted_key_servers:'
|
||||
echo ' - server_name: "matrix.org"'
|
||||
echo ' accept_keys_insecurely: true'
|
||||
echo ''
|
||||
|
||||
# Allow the servers to communicate over localhost.
|
||||
allow_list=$(cat <<-ALLOW_LIST
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = .
|
||||
BUILDDIR = _build
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
help:
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
.PHONY: help Makefile
|
||||
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%: Makefile
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
@@ -1,50 +0,0 @@
|
||||
# Configuration file for the Sphinx documentation builder.
|
||||
#
|
||||
# For the full list of built-in configuration values, see the documentation:
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
|
||||
|
||||
project = "Synapse development"
|
||||
copyright = "2023, The Matrix.org Foundation C.I.C."
|
||||
author = "The Synapse Maintainers and Community"
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
||||
|
||||
extensions = [
|
||||
"autodoc2",
|
||||
"myst_parser",
|
||||
]
|
||||
|
||||
templates_path = ["_templates"]
|
||||
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
|
||||
|
||||
|
||||
# -- Options for Autodoc2 ----------------------------------------------------
|
||||
|
||||
autodoc2_docstring_parser_regexes = [
|
||||
# this will render all docstrings as 'MyST' Markdown
|
||||
(r".*", "myst"),
|
||||
]
|
||||
|
||||
autodoc2_packages = [
|
||||
{
|
||||
"path": "../synapse",
|
||||
# Don't render documentation for everything as a matter of course
|
||||
"auto_mode": False,
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
# -- Options for MyST (Markdown) ---------------------------------------------
|
||||
|
||||
# myst_heading_anchors = 2
|
||||
|
||||
|
||||
# -- Options for HTML output -------------------------------------------------
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
|
||||
|
||||
html_theme = "furo"
|
||||
html_static_path = ["_static"]
|
||||
@@ -1,22 +0,0 @@
|
||||
.. Synapse Developer Documentation documentation master file, created by
|
||||
sphinx-quickstart on Mon Mar 13 08:59:51 2023.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to the Synapse Developer Documentation!
|
||||
===========================================================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contents:
|
||||
|
||||
modules/federation_sender
|
||||
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
@@ -1,5 +0,0 @@
|
||||
Federation Sender
|
||||
=================
|
||||
|
||||
```{autodoc2-docstring} synapse.federation.sender
|
||||
```
|
||||
@@ -17,48 +17,39 @@
|
||||
|
||||
# Irritatingly, there is no blessed guide on how to distribute an application with its
|
||||
# poetry-managed environment in a docker image. We have opted for
|
||||
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
|
||||
# in `poetry export` in the past.
|
||||
# `poetry export | pip install -r /dev/stdin`, but there are known bugs in
|
||||
# in `poetry export` whose fixes (scheduled for poetry 1.2) have yet to be released.
|
||||
# In case we get bitten by those bugs in the future, the recommendations here might
|
||||
# be useful:
|
||||
# https://github.com/python-poetry/poetry/discussions/1879#discussioncomment-216865
|
||||
# https://stackoverflow.com/questions/53835198/integrating-python-poetry-with-docker?answertab=scoredesc
|
||||
|
||||
ARG PYTHON_VERSION=3.11
|
||||
|
||||
|
||||
ARG PYTHON_VERSION=3.9
|
||||
|
||||
###
|
||||
### Stage 0: generate requirements.txt
|
||||
###
|
||||
# We hardcode the use of Debian bullseye here because this could change upstream
|
||||
# and other Dockerfiles used for testing are expecting bullseye.
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bullseye as requirements
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as requirements
|
||||
|
||||
# RUN --mount is specific to buildkit and is documented at
|
||||
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
|
||||
# Here we use it to set up a cache for apt (and below for pip), to improve
|
||||
# rebuild speeds on slow connections.
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && apt-get install -yqq \
|
||||
build-essential curl git libffi-dev libssl-dev pkg-config \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install rust and ensure its in the PATH.
|
||||
# (Rust may be needed to compile `cryptography`---which is one of poetry's
|
||||
# dependencies---on platforms that don't have a `cryptography` wheel.
|
||||
ENV RUSTUP_HOME=/rust
|
||||
ENV CARGO_HOME=/cargo
|
||||
ENV PATH=/cargo/bin:/rust/bin:$PATH
|
||||
RUN mkdir /rust /cargo
|
||||
|
||||
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
||||
|
||||
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
|
||||
# set to true, so we expose it as a build-arg.
|
||||
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
|
||||
ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && apt-get install -yqq \
|
||||
build-essential cargo git libffi-dev libssl-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# We install poetry in its own build stage to avoid its dependencies conflicting with
|
||||
# synapse's dependencies.
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install --user "poetry==1.3.2"
|
||||
pip install --user "poetry==1.2.0"
|
||||
|
||||
WORKDIR /synapse
|
||||
|
||||
@@ -79,36 +70,34 @@ ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
|
||||
# Otherwise, just create an empty requirements file so that the Dockerfile can
|
||||
# proceed.
|
||||
RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
||||
/root/.local/bin/poetry export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \
|
||||
/root/.local/bin/poetry export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \
|
||||
else \
|
||||
touch /synapse/requirements.txt; \
|
||||
touch /synapse/requirements.txt; \
|
||||
fi
|
||||
|
||||
###
|
||||
### Stage 1: builder
|
||||
###
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bullseye as builder
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as builder
|
||||
|
||||
# install the OS build deps
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && apt-get install -yqq \
|
||||
build-essential \
|
||||
libffi-dev \
|
||||
libjpeg-dev \
|
||||
libpq-dev \
|
||||
libssl-dev \
|
||||
libwebp-dev \
|
||||
libxml++2.6-dev \
|
||||
libxslt1-dev \
|
||||
openssl \
|
||||
zlib1g-dev \
|
||||
git \
|
||||
curl \
|
||||
libicu-dev \
|
||||
pkg-config \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && apt-get install -yqq \
|
||||
build-essential \
|
||||
libffi-dev \
|
||||
libjpeg-dev \
|
||||
libpq-dev \
|
||||
libssl-dev \
|
||||
libwebp-dev \
|
||||
libxml++2.6-dev \
|
||||
libxslt1-dev \
|
||||
openssl \
|
||||
zlib1g-dev \
|
||||
git \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
||||
# Install rust and ensure its in the PATH
|
||||
@@ -149,16 +138,16 @@ ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
|
||||
RUN --mount=type=cache,target=/synapse/target,sharing=locked \
|
||||
--mount=type=cache,target=${CARGO_HOME}/registry,sharing=locked \
|
||||
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
||||
pip install --prefix="/install" --no-deps --no-warn-script-location /synapse[all]; \
|
||||
pip install --prefix="/install" --no-deps --no-warn-script-location /synapse[all]; \
|
||||
else \
|
||||
pip install --prefix="/install" --no-warn-script-location /synapse[all]; \
|
||||
pip install --prefix="/install" --no-warn-script-location /synapse[all]; \
|
||||
fi
|
||||
|
||||
###
|
||||
### Stage 2: runtime
|
||||
###
|
||||
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bullseye
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye
|
||||
|
||||
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
||||
LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md'
|
||||
@@ -166,20 +155,19 @@ LABEL org.opencontainers.image.source='https://github.com/matrix-org/synapse.git
|
||||
LABEL org.opencontainers.image.licenses='Apache-2.0'
|
||||
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && apt-get install -yqq \
|
||||
curl \
|
||||
gosu \
|
||||
libjpeg62-turbo \
|
||||
libpq5 \
|
||||
libwebp6 \
|
||||
xmlsec1 \
|
||||
libjemalloc2 \
|
||||
libicu67 \
|
||||
libssl-dev \
|
||||
openssl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
curl \
|
||||
gosu \
|
||||
libjpeg62-turbo \
|
||||
libpq5 \
|
||||
libwebp6 \
|
||||
xmlsec1 \
|
||||
libjemalloc2 \
|
||||
libssl-dev \
|
||||
openssl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY --from=builder /install /usr/local
|
||||
COPY ./docker/start.py /start.py
|
||||
@@ -190,4 +178,4 @@ EXPOSE 8008/tcp 8009/tcp 8448/tcp
|
||||
ENTRYPOINT ["/start.py"]
|
||||
|
||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||
CMD curl -fSs http://localhost:8008/health || exit 1
|
||||
CMD curl -fSs http://localhost:8008/health || exit 1
|
||||
|
||||
@@ -24,7 +24,7 @@ ARG distro=""
|
||||
# https://launchpad.net/~jyrki-pulliainen/+archive/ubuntu/dh-virtualenv, but
|
||||
# it's not obviously easier to use that than to build our own.)
|
||||
|
||||
FROM docker.io/library/${distro} as builder
|
||||
FROM ${distro} as builder
|
||||
|
||||
RUN apt-get update -qq -o Acquire::Languages=none
|
||||
RUN env DEBIAN_FRONTEND=noninteractive apt-get install \
|
||||
@@ -36,10 +36,8 @@ RUN env DEBIAN_FRONTEND=noninteractive apt-get install \
|
||||
wget
|
||||
|
||||
# fetch and unpack the package
|
||||
# We are temporarily using a fork of dh-virtualenv due to an incompatibility with Python 3.11, which ships with
|
||||
# Debian sid. TODO: Switch back to upstream once https://github.com/spotify/dh-virtualenv/pull/354 has merged.
|
||||
RUN mkdir /dh-virtualenv
|
||||
RUN wget -q -O /dh-virtualenv.tar.gz https://github.com/matrix-org/dh-virtualenv/archive/refs/tags/matrixorg-2023010302.tar.gz
|
||||
RUN wget -q -O /dh-virtualenv.tar.gz https://github.com/spotify/dh-virtualenv/archive/refs/tags/1.2.2.tar.gz
|
||||
RUN tar -xv --strip-components=1 -C /dh-virtualenv -f /dh-virtualenv.tar.gz
|
||||
|
||||
# install its build deps. We do another apt-cache-update here, because we might
|
||||
@@ -55,7 +53,7 @@ RUN cd /dh-virtualenv && DEB_BUILD_OPTIONS=nodoc dpkg-buildpackage -us -uc -b
|
||||
###
|
||||
### Stage 1
|
||||
###
|
||||
FROM docker.io/library/${distro}
|
||||
FROM ${distro}
|
||||
|
||||
# Get the distro we want to pull from as a dynamic build variable
|
||||
# (We need to define it in each build stage)
|
||||
@@ -86,8 +84,6 @@ RUN apt-get update -qq -o Acquire::Languages=none \
|
||||
python3-venv \
|
||||
sqlite3 \
|
||||
libpq-dev \
|
||||
libicu-dev \
|
||||
pkg-config \
|
||||
xmlsec1
|
||||
|
||||
# Install rust and ensure it's in the PATH
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||
|
||||
# first of all, we create a base image with an nginx which we can copy into the
|
||||
# target image. For repeated rebuilds, this is much faster than apt installing
|
||||
# each time.
|
||||
|
||||
FROM docker.io/library/debian:bullseye-slim AS deps_base
|
||||
FROM debian:bullseye-slim AS deps_base
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
@@ -21,10 +20,10 @@ FROM docker.io/library/debian:bullseye-slim AS deps_base
|
||||
# which makes it much easier to copy (but we need to make sure we use an image
|
||||
# based on the same debian version as the synapse image, to make sure we get
|
||||
# the expected version of libc.
|
||||
FROM docker.io/library/redis:7-bullseye AS redis_base
|
||||
FROM redis:6-bullseye AS redis_base
|
||||
|
||||
# now build the final image, based on the the regular Synapse docker image
|
||||
FROM $FROM
|
||||
FROM matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||
|
||||
# Install supervisord with pip instead of apt, to avoid installing a second
|
||||
# copy of python.
|
||||
|
||||
@@ -73,8 +73,7 @@ The following environment variables are supported in `generate` mode:
|
||||
will log sensitive information such as access tokens.
|
||||
This should not be needed unless you are a developer attempting to debug something
|
||||
particularly tricky.
|
||||
* `SYNAPSE_LOG_TESTING`: if set, Synapse will log additional information useful
|
||||
for testing.
|
||||
|
||||
|
||||
## Postgres
|
||||
|
||||
|
||||
@@ -7,10 +7,8 @@
|
||||
# https://github.com/matrix-org/synapse/blob/develop/docker/README-testing.md#testing-with-postgresql-and-single-or-multi-process-synapse
|
||||
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
# This is an intermediate image, to be built locally (not pulled from a registry).
|
||||
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||
|
||||
FROM $FROM
|
||||
FROM matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||
# First of all, we copy postgres server from the official postgres image,
|
||||
# since for repeated rebuilds, this is much faster than apt installing
|
||||
# postgres each time.
|
||||
@@ -20,8 +18,8 @@ FROM $FROM
|
||||
# the same debian version as Synapse's docker image (so the versions of the
|
||||
# shared libraries match).
|
||||
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||
COPY --from=docker.io/library/postgres:13-bullseye /usr/lib/postgresql /usr/lib/postgresql
|
||||
COPY --from=docker.io/library/postgres:13-bullseye /usr/share/postgresql /usr/share/postgresql
|
||||
COPY --from=postgres:13-bullseye /usr/lib/postgresql /usr/lib/postgresql
|
||||
COPY --from=postgres:13-bullseye /usr/share/postgresql /usr/share/postgresql
|
||||
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
|
||||
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
||||
ENV PGDATA=/var/lib/postgresql/data
|
||||
|
||||
@@ -6,7 +6,7 @@ set -e
|
||||
|
||||
echo "Complement Synapse launcher"
|
||||
echo " Args: $@"
|
||||
echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR"
|
||||
echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS"
|
||||
|
||||
function log {
|
||||
d=$(date +"%Y-%m-%d %H:%M:%S,%3N")
|
||||
@@ -51,7 +51,8 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
||||
# -z True if the length of string is zero.
|
||||
if [[ -z "$SYNAPSE_WORKER_TYPES" ]]; then
|
||||
export SYNAPSE_WORKER_TYPES="\
|
||||
event_persister:2, \
|
||||
event_persister, \
|
||||
event_persister, \
|
||||
background_worker, \
|
||||
frontend_proxy, \
|
||||
event_creator, \
|
||||
@@ -63,8 +64,7 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
||||
synchrotron, \
|
||||
client_reader, \
|
||||
appservice, \
|
||||
pusher, \
|
||||
stream_writers=account_data+presence+receipts+to_device+typing"
|
||||
pusher"
|
||||
|
||||
fi
|
||||
log "Workers requested: $SYNAPSE_WORKER_TYPES"
|
||||
@@ -76,17 +76,6 @@ else
|
||||
fi
|
||||
|
||||
|
||||
if [[ -n "$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR" ]]; then
|
||||
if [[ -n "$SYNAPSE_USE_EXPERIMENTAL_FORKING_LAUNCHER" ]]; then
|
||||
export SYNAPSE_COMPLEMENT_FORKING_LAUNCHER_ASYNC_IO_REACTOR="1"
|
||||
else
|
||||
export SYNAPSE_ASYNC_IO_REACTOR="1"
|
||||
fi
|
||||
else
|
||||
export SYNAPSE_ASYNC_IO_REACTOR="0"
|
||||
fi
|
||||
|
||||
|
||||
# Add Complement's appservice registration directory, if there is one
|
||||
# (It can be absent when there are no application services in this test!)
|
||||
if [ -d /complement/appservice ]; then
|
||||
|
||||
@@ -92,14 +92,14 @@ allow_device_name_lookup_over_federation: true
|
||||
## Experimental Features ##
|
||||
|
||||
experimental_features:
|
||||
# Enable history backfilling support
|
||||
msc2716_enabled: true
|
||||
# server-side support for partial state in /send_join responses
|
||||
msc3706_enabled: true
|
||||
{% if not workers_in_use %}
|
||||
# client-side support for partial state in /send_join responses
|
||||
faster_joins: true
|
||||
# Enable support for polls
|
||||
msc3381_polls_enabled: true
|
||||
# Enable deleting device-specific notification settings stored in account data
|
||||
msc3890_enabled: true
|
||||
# Enable removing account data support
|
||||
msc3391_enabled: true
|
||||
{% endif %}
|
||||
# Filtering /messages by relation type.
|
||||
msc3874_enabled: true
|
||||
|
||||
|
||||
@@ -6,6 +6,10 @@
|
||||
worker_app: "{{ app }}"
|
||||
worker_name: "{{ name }}"
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: {{ port }}
|
||||
|
||||
@@ -49,35 +49,17 @@ handlers:
|
||||
class: logging.StreamHandler
|
||||
formatter: precise
|
||||
|
||||
{% if not SYNAPSE_LOG_SENSITIVE %}
|
||||
{#
|
||||
If SYNAPSE_LOG_SENSITIVE is unset, then override synapse.storage.SQL to INFO
|
||||
so that DEBUG entries (containing sensitive information) are not emitted.
|
||||
#}
|
||||
loggers:
|
||||
# This is just here so we can leave `loggers` in the config regardless of whether
|
||||
# we configure other loggers below (avoid empty yaml dict error).
|
||||
_placeholder:
|
||||
level: "INFO"
|
||||
|
||||
{% if not SYNAPSE_LOG_SENSITIVE %}
|
||||
{#
|
||||
If SYNAPSE_LOG_SENSITIVE is unset, then override synapse.storage.SQL to INFO
|
||||
so that DEBUG entries (containing sensitive information) are not emitted.
|
||||
#}
|
||||
synapse.storage.SQL:
|
||||
# beware: increasing this to DEBUG will make synapse log sensitive
|
||||
# information such as access tokens.
|
||||
level: INFO
|
||||
{% endif %}
|
||||
|
||||
{% if SYNAPSE_LOG_TESTING %}
|
||||
{#
|
||||
If Synapse is under test, log a few more useful things for a developer
|
||||
attempting to debug something particularly tricky.
|
||||
|
||||
With `synapse.visibility.filtered_event_debug`, it logs when events are (maybe
|
||||
unexpectedly) filtered out of responses in tests. It's just nice to be able to
|
||||
look at the CI log and figure out why an event isn't being returned.
|
||||
#}
|
||||
synapse.visibility.filtered_event_debug:
|
||||
level: DEBUG
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
root:
|
||||
level: {{ SYNAPSE_LOG_LEVEL or "INFO" }}
|
||||
|
||||
@@ -19,15 +19,8 @@
|
||||
# The environment variables it reads are:
|
||||
# * SYNAPSE_SERVER_NAME: The desired server_name of the homeserver.
|
||||
# * SYNAPSE_REPORT_STATS: Whether to report stats.
|
||||
# * SYNAPSE_WORKER_TYPES: A comma separated list of worker names as specified in WORKERS_CONFIG
|
||||
# below. Leave empty for no workers. Add a ':' and a number at the end to
|
||||
# multiply that worker. Append multiple worker types with '+' to merge the
|
||||
# worker types into a single worker. Add a name and a '=' to the front of a
|
||||
# worker type to give this instance a name in logs and nginx.
|
||||
# Examples:
|
||||
# SYNAPSE_WORKER_TYPES='event_persister, federation_sender, client_reader'
|
||||
# SYNAPSE_WORKER_TYPES='event_persister:2, federation_sender:2, client_reader'
|
||||
# SYNAPSE_WORKER_TYPES='stream_writers=account_data+presence+typing'
|
||||
# * SYNAPSE_WORKER_TYPES: A comma separated list of worker names as specified in WORKER_CONFIG
|
||||
# below. Leave empty for no workers.
|
||||
# * SYNAPSE_AS_REGISTRATION_DIR: If specified, a directory in which .yaml and .yml files
|
||||
# will be treated as Application Service registration files.
|
||||
# * SYNAPSE_TLS_CERT: Path to a TLS certificate in PEM format.
|
||||
@@ -40,8 +33,6 @@
|
||||
# log level. INFO is the default.
|
||||
# * SYNAPSE_LOG_SENSITIVE: If unset, SQL and SQL values won't be logged,
|
||||
# regardless of the SYNAPSE_LOG_LEVEL setting.
|
||||
# * SYNAPSE_LOG_TESTING: if set, Synapse will log additional information useful
|
||||
# for testing.
|
||||
#
|
||||
# NOTE: According to Complement's ENTRYPOINT expectations for a homeserver image (as defined
|
||||
# in the project's README), this script may be run multiple times, and functionality should
|
||||
@@ -49,35 +40,15 @@
|
||||
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from itertools import chain
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
List,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
NoReturn,
|
||||
Optional,
|
||||
Set,
|
||||
SupportsIndex,
|
||||
)
|
||||
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional, Set
|
||||
|
||||
import yaml
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
MAIN_PROCESS_HTTP_LISTENER_PORT = 8080
|
||||
MAIN_PROCESS_INSTANCE_NAME = "main"
|
||||
MAIN_PROCESS_LOCALHOST_ADDRESS = "127.0.0.1"
|
||||
MAIN_PROCESS_REPLICATION_PORT = 9093
|
||||
|
||||
# A simple name used as a placeholder in the WORKERS_CONFIG below. This will be replaced
|
||||
# during processing with the name of the worker.
|
||||
WORKER_PLACEHOLDER_NAME = "placeholder_name"
|
||||
|
||||
# Workers with exposed endpoints needs either "client", "federation", or "media" listener_resources
|
||||
# Watching /_matrix/client needs a "client" listener
|
||||
@@ -99,13 +70,11 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"endpoint_patterns": [
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/user_directory/search$"
|
||||
],
|
||||
"shared_extra_conf": {
|
||||
"update_user_directory_from_worker": WORKER_PLACEHOLDER_NAME
|
||||
},
|
||||
"shared_extra_conf": {"update_user_directory_from_worker": "user_dir1"},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"media_repository": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"app": "synapse.app.media_repository",
|
||||
"listener_resources": ["media"],
|
||||
"endpoint_patterns": [
|
||||
"^/_matrix/media/",
|
||||
@@ -118,7 +87,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
# The first configured media worker will run the media background jobs
|
||||
"shared_extra_conf": {
|
||||
"enable_media_repo": False,
|
||||
"media_instance_running_background_jobs": WORKER_PLACEHOLDER_NAME,
|
||||
"media_instance_running_background_jobs": "media_repository1",
|
||||
},
|
||||
"worker_extra_conf": "enable_media_repo: true",
|
||||
},
|
||||
@@ -126,9 +95,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": [],
|
||||
"endpoint_patterns": [],
|
||||
"shared_extra_conf": {
|
||||
"notify_appservices_from_worker": WORKER_PLACEHOLDER_NAME
|
||||
},
|
||||
"shared_extra_conf": {"notify_appservices_from_worker": "appservice1"},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"federation_sender": {
|
||||
@@ -168,7 +135,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"^/_matrix/client/versions$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/register$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/register/available$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/auth/.*/fallback/web$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/messages$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/event",
|
||||
@@ -176,10 +142,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable/.*)/rooms/.*/aliases",
|
||||
"^/_matrix/client/v1/rooms/.*/timestamp_to_event$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/search",
|
||||
"^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$)",
|
||||
"^/_matrix/client/(r0|v3|unstable)/password_policy$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$",
|
||||
"^/_matrix/client/(r0|v3|unstable)/capabilities$",
|
||||
],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
@@ -229,9 +191,9 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": [],
|
||||
"endpoint_patterns": [],
|
||||
# This worker cannot be sharded. Therefore, there should only ever be one
|
||||
# background worker. This is enforced for the safety of your database.
|
||||
"shared_extra_conf": {"run_background_tasks_on": WORKER_PLACEHOLDER_NAME},
|
||||
# This worker cannot be sharded. Therefore there should only ever be one background
|
||||
# worker, and it should be named background_worker1
|
||||
"shared_extra_conf": {"run_background_tasks_on": "background_worker1"},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"event_creator": {
|
||||
@@ -242,8 +204,8 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/send",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/(join|invite|leave|ban|unban|kick)$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/join/",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/knock/",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/profile/",
|
||||
"^/_matrix/client/(v1|unstable/org.matrix.msc2716)/rooms/.*/batch_send",
|
||||
],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
@@ -311,7 +273,7 @@ NGINX_LOCATION_CONFIG_BLOCK = """
|
||||
"""
|
||||
|
||||
NGINX_UPSTREAM_CONFIG_BLOCK = """
|
||||
upstream {upstream_worker_base_name} {{
|
||||
upstream {upstream_worker_type} {{
|
||||
{body}
|
||||
}}
|
||||
"""
|
||||
@@ -362,7 +324,7 @@ def convert(src: str, dst: str, **template_vars: object) -> None:
|
||||
|
||||
def add_worker_roles_to_shared_config(
|
||||
shared_config: dict,
|
||||
worker_types_set: Set[str],
|
||||
worker_type: str,
|
||||
worker_name: str,
|
||||
worker_port: int,
|
||||
) -> None:
|
||||
@@ -370,36 +332,22 @@ def add_worker_roles_to_shared_config(
|
||||
append appropriate worker information to it for the current worker_type instance.
|
||||
|
||||
Args:
|
||||
shared_config: The config dict that all worker instances share (after being
|
||||
converted to YAML)
|
||||
worker_types_set: The type of worker (one of those defined in WORKERS_CONFIG).
|
||||
This list can be a single worker type or multiple.
|
||||
shared_config: The config dict that all worker instances share (after being converted to YAML)
|
||||
worker_type: The type of worker (one of those defined in WORKERS_CONFIG).
|
||||
worker_name: The name of the worker instance.
|
||||
worker_port: The HTTP replication port that the worker instance is listening on.
|
||||
"""
|
||||
# The instance_map config field marks the workers that write to various replication
|
||||
# streams
|
||||
# The instance_map config field marks the workers that write to various replication streams
|
||||
instance_map = shared_config.setdefault("instance_map", {})
|
||||
|
||||
# This is a list of the stream_writers that there can be only one of. Events can be
|
||||
# sharded, and therefore doesn't belong here.
|
||||
singular_stream_writers = [
|
||||
"account_data",
|
||||
"presence",
|
||||
"receipts",
|
||||
"to_device",
|
||||
"typing",
|
||||
]
|
||||
|
||||
# Worker-type specific sharding config. Now a single worker can fulfill multiple
|
||||
# roles, check each.
|
||||
if "pusher" in worker_types_set:
|
||||
# Worker-type specific sharding config
|
||||
if worker_type == "pusher":
|
||||
shared_config.setdefault("pusher_instances", []).append(worker_name)
|
||||
|
||||
if "federation_sender" in worker_types_set:
|
||||
elif worker_type == "federation_sender":
|
||||
shared_config.setdefault("federation_sender_instances", []).append(worker_name)
|
||||
|
||||
if "event_persister" in worker_types_set:
|
||||
elif worker_type == "event_persister":
|
||||
# Event persisters write to the events stream, so we need to update
|
||||
# the list of event stream writers
|
||||
shared_config.setdefault("stream_writers", {}).setdefault("events", []).append(
|
||||
@@ -412,154 +360,19 @@ def add_worker_roles_to_shared_config(
|
||||
"port": worker_port,
|
||||
}
|
||||
|
||||
# Update the list of stream writers. It's convenient that the name of the worker
|
||||
# type is the same as the stream to write. Iterate over the whole list in case there
|
||||
# is more than one.
|
||||
for worker in worker_types_set:
|
||||
if worker in singular_stream_writers:
|
||||
shared_config.setdefault("stream_writers", {}).setdefault(
|
||||
worker, []
|
||||
).append(worker_name)
|
||||
elif worker_type in ["account_data", "presence", "receipts", "to_device", "typing"]:
|
||||
# Update the list of stream writers
|
||||
# It's convenient that the name of the worker type is the same as the stream to write
|
||||
shared_config.setdefault("stream_writers", {}).setdefault(
|
||||
worker_type, []
|
||||
).append(worker_name)
|
||||
|
||||
# Map of stream writer instance names to host/ports combos
|
||||
# For now, all stream writers need http replication ports
|
||||
instance_map[worker_name] = {
|
||||
"host": "localhost",
|
||||
"port": worker_port,
|
||||
}
|
||||
|
||||
|
||||
def merge_worker_template_configs(
|
||||
existing_dict: Optional[Dict[str, Any]],
|
||||
to_be_merged_dict: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
"""When given an existing dict of worker template configuration consisting with both
|
||||
dicts and lists, merge new template data from WORKERS_CONFIG(or create) and
|
||||
return new dict.
|
||||
|
||||
Args:
|
||||
existing_dict: Either an existing worker template or a fresh blank one.
|
||||
to_be_merged_dict: The template from WORKERS_CONFIGS to be merged into
|
||||
existing_dict.
|
||||
Returns: The newly merged together dict values.
|
||||
"""
|
||||
new_dict: Dict[str, Any] = {}
|
||||
if not existing_dict:
|
||||
# It doesn't exist yet, just use the new dict(but take a copy not a reference)
|
||||
new_dict = to_be_merged_dict.copy()
|
||||
else:
|
||||
for i in to_be_merged_dict.keys():
|
||||
if (i == "endpoint_patterns") or (i == "listener_resources"):
|
||||
# merge the two lists, remove duplicates
|
||||
new_dict[i] = list(set(existing_dict[i] + to_be_merged_dict[i]))
|
||||
elif i == "shared_extra_conf":
|
||||
# merge dictionary's, the worker name will be replaced later
|
||||
new_dict[i] = {**existing_dict[i], **to_be_merged_dict[i]}
|
||||
elif i == "worker_extra_conf":
|
||||
# There is only one worker type that has a 'worker_extra_conf' and it is
|
||||
# the media_repo. Since duplicate worker types on the same worker don't
|
||||
# work, this is fine.
|
||||
new_dict[i] = existing_dict[i] + to_be_merged_dict[i]
|
||||
else:
|
||||
# Everything else should be identical, like "app", which only works
|
||||
# because all apps are now generic_workers.
|
||||
new_dict[i] = to_be_merged_dict[i]
|
||||
return new_dict
|
||||
|
||||
|
||||
def insert_worker_name_for_worker_config(
|
||||
existing_dict: Dict[str, Any], worker_name: str
|
||||
) -> Dict[str, Any]:
|
||||
"""Insert a given worker name into the worker's configuration dict.
|
||||
|
||||
Args:
|
||||
existing_dict: The worker_config dict that is imported into shared_config.
|
||||
worker_name: The name of the worker to insert.
|
||||
Returns: Copy of the dict with newly inserted worker name
|
||||
"""
|
||||
dict_to_edit = existing_dict.copy()
|
||||
for k, v in dict_to_edit["shared_extra_conf"].items():
|
||||
# Only proceed if it's the placeholder name string
|
||||
if v == WORKER_PLACEHOLDER_NAME:
|
||||
dict_to_edit["shared_extra_conf"][k] = worker_name
|
||||
return dict_to_edit
|
||||
|
||||
|
||||
def apply_requested_multiplier_for_worker(worker_types: List[str]) -> List[str]:
|
||||
"""
|
||||
Apply multiplier(if found) by returning a new expanded list with some basic error
|
||||
checking.
|
||||
|
||||
Args:
|
||||
worker_types: The unprocessed List of requested workers
|
||||
Returns:
|
||||
A new list with all requested workers expanded.
|
||||
"""
|
||||
# Checking performed:
|
||||
# 1. if worker:2 or more is declared, it will create additional workers up to number
|
||||
# 2. if worker:1, it will create a single copy of this worker as if no number was
|
||||
# given
|
||||
# 3. if worker:0 is declared, this worker will be ignored. This is to allow for
|
||||
# scripting and automated expansion and is intended behaviour.
|
||||
# 4. if worker:NaN or is a negative number, it will error and log it.
|
||||
new_worker_types = []
|
||||
for worker_type in worker_types:
|
||||
if ":" in worker_type:
|
||||
worker_type_components = split_and_strip_string(worker_type, ":", 1)
|
||||
worker_count = 0
|
||||
# Should only be 2 components, a type of worker(s) and an integer as a
|
||||
# string. Cast the number as an int then it can be used as a counter.
|
||||
try:
|
||||
worker_count = int(worker_type_components[1])
|
||||
except ValueError:
|
||||
error(
|
||||
f"Bad number in worker count for '{worker_type}': "
|
||||
f"'{worker_type_components[1]}' is not an integer"
|
||||
)
|
||||
|
||||
# As long as there are more than 0, we add one to the list to make below.
|
||||
for _ in range(worker_count):
|
||||
new_worker_types.append(worker_type_components[0])
|
||||
|
||||
else:
|
||||
# If it's not a real worker_type, it will error out later.
|
||||
new_worker_types.append(worker_type)
|
||||
return new_worker_types
|
||||
|
||||
|
||||
def is_sharding_allowed_for_worker_type(worker_type: str) -> bool:
|
||||
"""Helper to check to make sure worker types that cannot have multiples do not.
|
||||
|
||||
Args:
|
||||
worker_type: The type of worker to check against.
|
||||
Returns: True if allowed, False if not
|
||||
"""
|
||||
return worker_type not in [
|
||||
"background_worker",
|
||||
"account_data",
|
||||
"presence",
|
||||
"receipts",
|
||||
"typing",
|
||||
"to_device",
|
||||
]
|
||||
|
||||
|
||||
def split_and_strip_string(
|
||||
given_string: str, split_char: str, max_split: SupportsIndex = -1
|
||||
) -> List[str]:
|
||||
"""
|
||||
Helper to split a string on split_char and strip whitespace from each end of each
|
||||
element.
|
||||
Args:
|
||||
given_string: The string to split
|
||||
split_char: The character to split the string on
|
||||
max_split: kwarg for split() to limit how many times the split() happens
|
||||
Returns:
|
||||
A List of strings
|
||||
"""
|
||||
# Removes whitespace from ends of result strings before adding to list. Allow for
|
||||
# overriding 'maxsplit' kwarg, default being -1 to signify no maximum.
|
||||
return [x.strip() for x in given_string.split(split_char, maxsplit=max_split)]
|
||||
# Map of stream writer instance names to host/ports combos
|
||||
# For now, all stream writers need http replication ports
|
||||
instance_map[worker_name] = {
|
||||
"host": "localhost",
|
||||
"port": worker_port,
|
||||
}
|
||||
|
||||
|
||||
def generate_base_homeserver_config() -> None:
|
||||
@@ -574,157 +387,33 @@ def generate_base_homeserver_config() -> None:
|
||||
subprocess.run(["/usr/local/bin/python", "/start.py", "migrate_config"], check=True)
|
||||
|
||||
|
||||
def parse_worker_types(
|
||||
requested_worker_types: List[str],
|
||||
) -> Dict[str, Set[str]]:
|
||||
"""Read the desired list of requested workers and prepare the data for use in
|
||||
generating worker config files while also checking for potential gotchas.
|
||||
|
||||
Args:
|
||||
requested_worker_types: The list formed from the split environment variable
|
||||
containing the unprocessed requests for workers.
|
||||
|
||||
Returns: A dict of worker names to set of worker types. Format:
|
||||
{'worker_name':
|
||||
{'worker_type', 'worker_type2'}
|
||||
}
|
||||
"""
|
||||
# A counter of worker_base_name -> int. Used for determining the name for a given
|
||||
# worker when generating its config file, as each worker's name is just
|
||||
# worker_base_name followed by instance number
|
||||
worker_base_name_counter: Dict[str, int] = defaultdict(int)
|
||||
|
||||
# Similar to above, but more finely grained. This is used to determine we don't have
|
||||
# more than a single worker for cases where multiples would be bad(e.g. presence).
|
||||
worker_type_shard_counter: Dict[str, int] = defaultdict(int)
|
||||
|
||||
# The final result of all this processing
|
||||
dict_to_return: Dict[str, Set[str]] = {}
|
||||
|
||||
# Handle any multipliers requested for given workers.
|
||||
multiple_processed_worker_types = apply_requested_multiplier_for_worker(
|
||||
requested_worker_types
|
||||
)
|
||||
|
||||
# Process each worker_type_string
|
||||
# Examples of expected formats:
|
||||
# - requested_name=type1+type2+type3
|
||||
# - synchrotron
|
||||
# - event_creator+event_persister
|
||||
for worker_type_string in multiple_processed_worker_types:
|
||||
# First, if a name is requested, use that — otherwise generate one.
|
||||
worker_base_name: str = ""
|
||||
if "=" in worker_type_string:
|
||||
# Split on "=", remove extra whitespace from ends then make list
|
||||
worker_type_split = split_and_strip_string(worker_type_string, "=")
|
||||
if len(worker_type_split) > 2:
|
||||
error(
|
||||
"There should only be one '=' in the worker type string. "
|
||||
f"Please fix: {worker_type_string}"
|
||||
)
|
||||
|
||||
# Assign the name
|
||||
worker_base_name = worker_type_split[0]
|
||||
|
||||
if not re.match(r"^[a-zA-Z0-9_+-]*[a-zA-Z_+-]$", worker_base_name):
|
||||
# Apply a fairly narrow regex to the worker names. Some characters
|
||||
# aren't safe for use in file paths or nginx configurations.
|
||||
# Don't allow to end with a number because we'll add a number
|
||||
# ourselves in a moment.
|
||||
error(
|
||||
"Invalid worker name; please choose a name consisting of "
|
||||
"alphanumeric letters, _ + -, but not ending with a digit: "
|
||||
f"{worker_base_name!r}"
|
||||
)
|
||||
|
||||
# Continue processing the remainder of the worker_type string
|
||||
# with the name override removed.
|
||||
worker_type_string = worker_type_split[1]
|
||||
|
||||
# Split the worker_type_string on "+", remove whitespace from ends then make
|
||||
# the list a set so it's deduplicated.
|
||||
worker_types_set: Set[str] = set(
|
||||
split_and_strip_string(worker_type_string, "+")
|
||||
)
|
||||
|
||||
if not worker_base_name:
|
||||
# No base name specified: generate one deterministically from set of
|
||||
# types
|
||||
worker_base_name = "+".join(sorted(worker_types_set))
|
||||
|
||||
# At this point, we have:
|
||||
# worker_base_name which is the name for the worker, without counter.
|
||||
# worker_types_set which is the set of worker types for this worker.
|
||||
|
||||
# Validate worker_type and make sure we don't allow sharding for a worker type
|
||||
# that doesn't support it. Will error and stop if it is a problem,
|
||||
# e.g. 'background_worker'.
|
||||
for worker_type in worker_types_set:
|
||||
# Verify this is a real defined worker type. If it's not, stop everything so
|
||||
# it can be fixed.
|
||||
if worker_type not in WORKERS_CONFIG:
|
||||
error(
|
||||
f"{worker_type} is an unknown worker type! Was found in "
|
||||
f"'{worker_type_string}'. Please fix!"
|
||||
)
|
||||
|
||||
if worker_type in worker_type_shard_counter:
|
||||
if not is_sharding_allowed_for_worker_type(worker_type):
|
||||
error(
|
||||
f"There can be only a single worker with {worker_type} "
|
||||
"type. Please recount and remove."
|
||||
)
|
||||
# Not in shard counter, must not have seen it yet, add it.
|
||||
worker_type_shard_counter[worker_type] += 1
|
||||
|
||||
# Generate the number for the worker using incrementing counter
|
||||
worker_base_name_counter[worker_base_name] += 1
|
||||
worker_number = worker_base_name_counter[worker_base_name]
|
||||
worker_name = f"{worker_base_name}{worker_number}"
|
||||
|
||||
if worker_number > 1:
|
||||
# If this isn't the first worker, check that we don't have a confusing
|
||||
# mixture of worker types with the same base name.
|
||||
first_worker_with_base_name = dict_to_return[f"{worker_base_name}1"]
|
||||
if first_worker_with_base_name != worker_types_set:
|
||||
error(
|
||||
f"Can not use worker_name: '{worker_name}' for worker_type(s): "
|
||||
f"{worker_types_set!r}. It is already in use by "
|
||||
f"worker_type(s): {first_worker_with_base_name!r}"
|
||||
)
|
||||
|
||||
dict_to_return[worker_name] = worker_types_set
|
||||
|
||||
return dict_to_return
|
||||
|
||||
|
||||
def generate_worker_files(
|
||||
environ: Mapping[str, str],
|
||||
config_path: str,
|
||||
data_dir: str,
|
||||
requested_worker_types: Dict[str, Set[str]],
|
||||
environ: Mapping[str, str], config_path: str, data_dir: str
|
||||
) -> None:
|
||||
"""Read the desired workers(if any) that is passed in and generate shared
|
||||
homeserver, nginx and supervisord configs.
|
||||
"""Read the desired list of workers from environment variables and generate
|
||||
shared homeserver, nginx and supervisord configs.
|
||||
|
||||
Args:
|
||||
environ: os.environ instance.
|
||||
config_path: The location of the generated Synapse main worker config file.
|
||||
data_dir: The location of the synapse data directory. Where log and
|
||||
user-facing config files live.
|
||||
requested_worker_types: A Dict containing requested workers in the format of
|
||||
{'worker_name1': {'worker_type', ...}}
|
||||
"""
|
||||
# Note that yaml cares about indentation, so care should be taken to insert lines
|
||||
# into files at the correct indentation below.
|
||||
|
||||
# First read the original config file and extract the listeners block. Then we'll
|
||||
# add another listener for replication. Later we'll write out the result to the
|
||||
# shared config file.
|
||||
# shared_config is the contents of a Synapse config file that will be shared amongst
|
||||
# the main Synapse process as well as all workers.
|
||||
# It is intended mainly for disabling functionality when certain workers are spun up,
|
||||
# and adding a replication listener.
|
||||
|
||||
# First read the original config file and extract the listeners block. Then we'll add
|
||||
# another listener for replication. Later we'll write out the result to the shared
|
||||
# config file.
|
||||
listeners = [
|
||||
{
|
||||
"port": MAIN_PROCESS_REPLICATION_PORT,
|
||||
"bind_address": MAIN_PROCESS_LOCALHOST_ADDRESS,
|
||||
"port": 9093,
|
||||
"bind_address": "127.0.0.1",
|
||||
"type": "http",
|
||||
"resources": [{"names": ["replication"]}],
|
||||
}
|
||||
@@ -736,9 +425,9 @@ def generate_worker_files(
|
||||
listeners += original_listeners
|
||||
|
||||
# The shared homeserver config. The contents of which will be inserted into the
|
||||
# base shared worker jinja2 template. This config file will be passed to all
|
||||
# workers, included Synapse's main process. It is intended mainly for disabling
|
||||
# functionality when certain workers are spun up, and adding a replication listener.
|
||||
# base shared worker jinja2 template.
|
||||
#
|
||||
# This config file will be passed to all workers, included Synapse's main process.
|
||||
shared_config: Dict[str, Any] = {"listeners": listeners}
|
||||
|
||||
# List of dicts that describe workers.
|
||||
@@ -746,20 +435,31 @@ def generate_worker_files(
|
||||
# program blocks.
|
||||
worker_descriptors: List[Dict[str, Any]] = []
|
||||
|
||||
# Upstreams for load-balancing purposes. This dict takes the form of the worker
|
||||
# type to the ports of each worker. For example:
|
||||
# Upstreams for load-balancing purposes. This dict takes the form of a worker type to the
|
||||
# ports of each worker. For example:
|
||||
# {
|
||||
# worker_type: {1234, 1235, ...}}
|
||||
# }
|
||||
# and will be used to construct 'upstream' nginx directives.
|
||||
nginx_upstreams: Dict[str, Set[int]] = {}
|
||||
|
||||
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what
|
||||
# will be placed after the proxy_pass directive. The main benefit to representing
|
||||
# this data as a dict over a str is that we can easily deduplicate endpoints
|
||||
# across multiple instances of the same worker. The final rendering will be combined
|
||||
# with nginx_upstreams and placed in /etc/nginx/conf.d.
|
||||
nginx_locations: Dict[str, str] = {}
|
||||
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what will be
|
||||
# placed after the proxy_pass directive. The main benefit to representing this data as a
|
||||
# dict over a str is that we can easily deduplicate endpoints across multiple instances
|
||||
# of the same worker.
|
||||
#
|
||||
# An nginx site config that will be amended to depending on the workers that are
|
||||
# spun up. To be placed in /etc/nginx/conf.d.
|
||||
nginx_locations = {}
|
||||
|
||||
# Read the desired worker configuration from the environment
|
||||
worker_types_env = environ.get("SYNAPSE_WORKER_TYPES", "").strip()
|
||||
if not worker_types_env:
|
||||
# No workers, just the main process
|
||||
worker_types = []
|
||||
else:
|
||||
# Split type names by comma, ignoring whitespace.
|
||||
worker_types = [x.strip() for x in worker_types_env.split(",")]
|
||||
|
||||
# Create the worker configuration directory if it doesn't already exist
|
||||
os.makedirs("/conf/workers", exist_ok=True)
|
||||
@@ -767,57 +467,66 @@ def generate_worker_files(
|
||||
# Start worker ports from this arbitrary port
|
||||
worker_port = 18009
|
||||
|
||||
# A counter of worker_type -> int. Used for determining the name for a given
|
||||
# worker type when generating its config file, as each worker's name is just
|
||||
# worker_type + instance #
|
||||
worker_type_counter: Dict[str, int] = {}
|
||||
|
||||
# A list of internal endpoints to healthcheck, starting with the main process
|
||||
# which exists even if no workers do.
|
||||
healthcheck_urls = ["http://localhost:8080/health"]
|
||||
|
||||
# Get the set of all worker types that we have configured
|
||||
all_worker_types_in_use = set(chain(*requested_worker_types.values()))
|
||||
# Map locations to upstreams (corresponding to worker types) in Nginx
|
||||
# but only if we use the appropriate worker type
|
||||
for worker_type in all_worker_types_in_use:
|
||||
for endpoint_pattern in WORKERS_CONFIG[worker_type]["endpoint_patterns"]:
|
||||
nginx_locations[endpoint_pattern] = f"http://{worker_type}"
|
||||
# For each worker type specified by the user, create config values
|
||||
for worker_type in worker_types:
|
||||
worker_config = WORKERS_CONFIG.get(worker_type)
|
||||
if worker_config:
|
||||
worker_config = worker_config.copy()
|
||||
else:
|
||||
error(worker_type + " is an unknown worker type! Please fix!")
|
||||
|
||||
# For each worker type specified by the user, create config values and write it's
|
||||
# yaml config file
|
||||
for worker_name, worker_types_set in requested_worker_types.items():
|
||||
# The collected and processed data will live here.
|
||||
worker_config: Dict[str, Any] = {}
|
||||
|
||||
# Merge all worker config templates for this worker into a single config
|
||||
for worker_type in worker_types_set:
|
||||
copy_of_template_config = WORKERS_CONFIG[worker_type].copy()
|
||||
|
||||
# Merge worker type template configuration data. It's a combination of lists
|
||||
# and dicts, so use this helper.
|
||||
worker_config = merge_worker_template_configs(
|
||||
worker_config, copy_of_template_config
|
||||
)
|
||||
|
||||
# Replace placeholder names in the config template with the actual worker name.
|
||||
worker_config = insert_worker_name_for_worker_config(worker_config, worker_name)
|
||||
new_worker_count = worker_type_counter.setdefault(worker_type, 0) + 1
|
||||
worker_type_counter[worker_type] = new_worker_count
|
||||
|
||||
# Name workers by their type concatenated with an incrementing number
|
||||
# e.g. federation_reader1
|
||||
worker_name = worker_type + str(new_worker_count)
|
||||
worker_config.update(
|
||||
{"name": worker_name, "port": str(worker_port), "config_path": config_path}
|
||||
)
|
||||
|
||||
# Update the shared config with any worker_type specific options. The first of a
|
||||
# given worker_type needs to stay assigned and not be replaced.
|
||||
worker_config["shared_extra_conf"].update(shared_config)
|
||||
shared_config = worker_config["shared_extra_conf"]
|
||||
# Update the shared config with any worker-type specific options
|
||||
shared_config.update(worker_config["shared_extra_conf"])
|
||||
|
||||
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
||||
|
||||
# Check if more than one instance of this worker type has been specified
|
||||
worker_type_total_count = worker_types.count(worker_type)
|
||||
|
||||
# Update the shared config with sharding-related options if necessary
|
||||
add_worker_roles_to_shared_config(
|
||||
shared_config, worker_types_set, worker_name, worker_port
|
||||
shared_config, worker_type, worker_name, worker_port
|
||||
)
|
||||
|
||||
# Enable the worker in supervisord
|
||||
worker_descriptors.append(worker_config)
|
||||
|
||||
# Add nginx location blocks for this worker's endpoints (if any are defined)
|
||||
for pattern in worker_config["endpoint_patterns"]:
|
||||
# Determine whether we need to load-balance this worker
|
||||
if worker_type_total_count > 1:
|
||||
# Create or add to a load-balanced upstream for this worker
|
||||
nginx_upstreams.setdefault(worker_type, set()).add(worker_port)
|
||||
|
||||
# Upstreams are named after the worker_type
|
||||
upstream = "http://" + worker_type
|
||||
else:
|
||||
upstream = "http://localhost:%d" % (worker_port,)
|
||||
|
||||
# Note that this endpoint should proxy to this upstream
|
||||
nginx_locations[pattern] = upstream
|
||||
|
||||
# Write out the worker's logging config file
|
||||
|
||||
log_config_filepath = generate_worker_log_config(environ, worker_name, data_dir)
|
||||
|
||||
# Then a worker config file
|
||||
@@ -828,10 +537,6 @@ def generate_worker_files(
|
||||
worker_log_config_filepath=log_config_filepath,
|
||||
)
|
||||
|
||||
# Save this worker's port number to the correct nginx upstreams
|
||||
for worker_type in worker_types_set:
|
||||
nginx_upstreams.setdefault(worker_type, set()).add(worker_port)
|
||||
|
||||
worker_port += 1
|
||||
|
||||
# Build the nginx location config blocks
|
||||
@@ -844,14 +549,15 @@ def generate_worker_files(
|
||||
|
||||
# Determine the load-balancing upstreams to configure
|
||||
nginx_upstream_config = ""
|
||||
for upstream_worker_base_name, upstream_worker_ports in nginx_upstreams.items():
|
||||
|
||||
for upstream_worker_type, upstream_worker_ports in nginx_upstreams.items():
|
||||
body = ""
|
||||
for port in upstream_worker_ports:
|
||||
body += f" server localhost:{port};\n"
|
||||
body += " server localhost:%d;\n" % (port,)
|
||||
|
||||
# Add to the list of configured upstreams
|
||||
nginx_upstream_config += NGINX_UPSTREAM_CONFIG_BLOCK.format(
|
||||
upstream_worker_base_name=upstream_worker_base_name,
|
||||
upstream_worker_type=upstream_worker_type,
|
||||
body=body,
|
||||
)
|
||||
|
||||
@@ -872,15 +578,7 @@ def generate_worker_files(
|
||||
if reg_path.suffix.lower() in (".yaml", ".yml")
|
||||
]
|
||||
|
||||
workers_in_use = len(requested_worker_types) > 0
|
||||
|
||||
# If there are workers, add the main process to the instance_map too.
|
||||
if workers_in_use:
|
||||
instance_map = shared_config.setdefault("instance_map", {})
|
||||
instance_map[MAIN_PROCESS_INSTANCE_NAME] = {
|
||||
"host": MAIN_PROCESS_LOCALHOST_ADDRESS,
|
||||
"port": MAIN_PROCESS_REPLICATION_PORT,
|
||||
}
|
||||
workers_in_use = len(worker_types) > 0
|
||||
|
||||
# Shared homeserver config
|
||||
convert(
|
||||
@@ -948,7 +646,6 @@ def generate_worker_log_config(
|
||||
extra_log_template_args["SYNAPSE_LOG_SENSITIVE"] = environ.get(
|
||||
"SYNAPSE_LOG_SENSITIVE"
|
||||
)
|
||||
extra_log_template_args["SYNAPSE_LOG_TESTING"] = environ.get("SYNAPSE_LOG_TESTING")
|
||||
|
||||
# Render and write the file
|
||||
log_config_filepath = f"/conf/workers/{worker_name}.log.config"
|
||||
@@ -977,34 +674,17 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
if not os.path.exists(config_path):
|
||||
log("Generating base homeserver config")
|
||||
generate_base_homeserver_config()
|
||||
else:
|
||||
log("Base homeserver config exists—not regenerating")
|
||||
# This script may be run multiple times (mostly by Complement, see note at top of
|
||||
# file). Don't re-configure workers in this instance.
|
||||
|
||||
# This script may be run multiple times (mostly by Complement, see note at top of file).
|
||||
# Don't re-configure workers in this instance.
|
||||
mark_filepath = "/conf/workers_have_been_configured"
|
||||
if not os.path.exists(mark_filepath):
|
||||
# Collect and validate worker_type requests
|
||||
# Read the desired worker configuration from the environment
|
||||
worker_types_env = environ.get("SYNAPSE_WORKER_TYPES", "").strip()
|
||||
# Only process worker_types if they exist
|
||||
if not worker_types_env:
|
||||
# No workers, just the main process
|
||||
worker_types = []
|
||||
requested_worker_types: Dict[str, Any] = {}
|
||||
else:
|
||||
# Split type names by comma, ignoring whitespace.
|
||||
worker_types = split_and_strip_string(worker_types_env, ",")
|
||||
requested_worker_types = parse_worker_types(worker_types)
|
||||
|
||||
# Always regenerate all other config files
|
||||
log("Generating worker config files")
|
||||
generate_worker_files(environ, config_path, data_dir, requested_worker_types)
|
||||
generate_worker_files(environ, config_path, data_dir)
|
||||
|
||||
# Mark workers as being configured
|
||||
with open(mark_filepath, "w") as f:
|
||||
f.write("")
|
||||
else:
|
||||
log("Worker config exists—not regenerating")
|
||||
|
||||
# Lifted right out of start.py
|
||||
jemallocpath = "/usr/lib/%s-linux-gnu/libjemalloc.so.2" % (platform.machine(),)
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
# This dockerfile builds an editable install of Synapse.
|
||||
#
|
||||
# Used by `complement.sh`. Not suitable for production use.
|
||||
|
||||
ARG PYTHON_VERSION=3.9
|
||||
|
||||
###
|
||||
### Stage 0: generate requirements.txt
|
||||
###
|
||||
# We hardcode the use of Debian bullseye here because this could change upstream
|
||||
# and other Dockerfiles used for testing are expecting bullseye.
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bullseye
|
||||
|
||||
# Install Rust and other dependencies (stolen from normal Dockerfile)
|
||||
# install the OS build deps
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && apt-get install -yqq \
|
||||
build-essential \
|
||||
libffi-dev \
|
||||
libjpeg-dev \
|
||||
libpq-dev \
|
||||
libssl-dev \
|
||||
libwebp-dev \
|
||||
libxml++2.6-dev \
|
||||
libxslt1-dev \
|
||||
openssl \
|
||||
zlib1g-dev \
|
||||
git \
|
||||
curl \
|
||||
gosu \
|
||||
libjpeg62-turbo \
|
||||
libpq5 \
|
||||
libwebp6 \
|
||||
xmlsec1 \
|
||||
libjemalloc2 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
ENV RUSTUP_HOME=/rust
|
||||
ENV CARGO_HOME=/cargo
|
||||
ENV PATH=/cargo/bin:/rust/bin:$PATH
|
||||
RUN mkdir /rust /cargo
|
||||
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
||||
|
||||
|
||||
# Make a base copy of the editable source tree, so that we have something to
|
||||
# install and build now — even though it's going to be covered up by a mount
|
||||
# at runtime.
|
||||
COPY synapse /editable-src/synapse/
|
||||
COPY rust /editable-src/rust/
|
||||
# ... and what we need to `pip install`.
|
||||
COPY pyproject.toml poetry.lock README.rst build_rust.py Cargo.toml Cargo.lock /editable-src/
|
||||
|
||||
RUN pip install poetry
|
||||
RUN poetry config virtualenvs.create false
|
||||
RUN cd /editable-src && poetry install --extras all
|
||||
|
||||
# Make copies of useful things for inspection:
|
||||
# - the Rust module (must be copied to the editable source tree before startup)
|
||||
# - poetry.lock is useful for checking if dependencies have changed.
|
||||
RUN cp /editable-src/synapse/synapse_rust.abi3.so /synapse_rust.abi3.so.bak
|
||||
RUN cp /editable-src/poetry.lock /poetry.lock.bak
|
||||
|
||||
|
||||
### Extra setup from original Dockerfile
|
||||
COPY ./docker/start.py /start.py
|
||||
COPY ./docker/conf /conf
|
||||
|
||||
EXPOSE 8008/tcp 8009/tcp 8448/tcp
|
||||
|
||||
ENTRYPOINT ["/start.py"]
|
||||
|
||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||
CMD curl -fSs http://localhost:8008/health || exit 1
|
||||
@@ -57,7 +57,6 @@
|
||||
- [Account Validity](admin_api/account_validity.md)
|
||||
- [Background Updates](usage/administration/admin_api/background_updates.md)
|
||||
- [Event Reports](admin_api/event_reports.md)
|
||||
- [Experimental Features](admin_api/experimental_features.md)
|
||||
- [Media](admin_api/media_admin_api.md)
|
||||
- [Purge History](admin_api/purge_history_api.md)
|
||||
- [Register Users](admin_api/register_api.md)
|
||||
@@ -98,7 +97,6 @@
|
||||
- [Log Contexts](log_contexts.md)
|
||||
- [Replication](replication.md)
|
||||
- [TCP Replication](tcp_replication.md)
|
||||
- [Faster remote joins](development/synapse_architecture/faster_joins.md)
|
||||
- [Internal Documentation](development/internal_documentation/README.md)
|
||||
- [Single Sign-On]()
|
||||
- [SAML](development/saml.md)
|
||||
|
||||
@@ -5,7 +5,7 @@ use it, you must enable the account validity feature (under
|
||||
`account_validity`) in Synapse's configuration.
|
||||
|
||||
To use it, you will need to authenticate by providing an `access_token`
|
||||
for a server admin: see [Admin API](../usage/administration/admin_api/).
|
||||
for a server admin: see [Admin API](../usage/administration/admin_api).
|
||||
|
||||
## Renew account
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
This API returns information about reported events.
|
||||
|
||||
To use it, you will need to authenticate by providing an `access_token`
|
||||
for a server admin: see [Admin API](../usage/administration/admin_api/).
|
||||
for a server admin: see [Admin API](../usage/administration/admin_api).
|
||||
|
||||
The api is:
|
||||
```
|
||||
@@ -169,17 +169,3 @@ The following fields are returned in the JSON response body:
|
||||
* `canonical_alias`: string - The canonical alias of the room. `null` if the room does not
|
||||
have a canonical alias set.
|
||||
* `event_json`: object - Details of the original event that was reported.
|
||||
|
||||
# Delete a specific event report
|
||||
|
||||
This API deletes a specific event report. If the request is successful, the response body
|
||||
will be an empty JSON object.
|
||||
|
||||
The api is:
|
||||
```
|
||||
DELETE /_synapse/admin/v1/event_reports/<report_id>
|
||||
```
|
||||
|
||||
**URL parameters:**
|
||||
|
||||
* `report_id`: string - The ID of the event report.
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
# Experimental Features API
|
||||
|
||||
This API allows a server administrator to enable or disable some experimental features on a per-user
|
||||
basis. The currently supported features are:
|
||||
- [MSC3026](https://github.com/matrix-org/matrix-spec-proposals/pull/3026): busy
|
||||
presence state enabled
|
||||
- [MSC3881](https://github.com/matrix-org/matrix-spec-proposals/pull/3881): enable remotely toggling push notifications
|
||||
for another client
|
||||
- [MSC3967](https://github.com/matrix-org/matrix-spec-proposals/pull/3967): do not require
|
||||
UIA when first uploading cross-signing keys.
|
||||
|
||||
|
||||
To use it, you will need to authenticate by providing an `access_token`
|
||||
for a server admin: see [Admin API](../usage/administration/admin_api/).
|
||||
|
||||
## Enabling/Disabling Features
|
||||
|
||||
This API allows a server administrator to enable experimental features for a given user. The request must
|
||||
provide a body containing the user id and listing the features to enable/disable in the following format:
|
||||
```json
|
||||
{
|
||||
"features": {
|
||||
"msc3026":true,
|
||||
"msc3881":true
|
||||
}
|
||||
}
|
||||
```
|
||||
where true is used to enable the feature, and false is used to disable the feature.
|
||||
|
||||
|
||||
The API is:
|
||||
|
||||
```
|
||||
PUT /_synapse/admin/v1/experimental_features/<user_id>
|
||||
```
|
||||
|
||||
## Listing Enabled Features
|
||||
|
||||
To list which features are enabled/disabled for a given user send a request to the following API:
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/experimental_features/<user_id>
|
||||
```
|
||||
|
||||
It will return a list of possible features and indicate whether they are enabled or disabled for the
|
||||
user like so:
|
||||
```json
|
||||
{
|
||||
"features": {
|
||||
"msc3026": true,
|
||||
"msc3881": false,
|
||||
"msc3967": false
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -6,7 +6,7 @@ Details about the format of the `media_id` and storage of the media in the file
|
||||
are documented under [media repository](../media_repository.md).
|
||||
|
||||
To use it, you will need to authenticate by providing an `access_token`
|
||||
for a server admin: see [Admin API](../usage/administration/admin_api/).
|
||||
for a server admin: see [Admin API](../usage/administration/admin_api).
|
||||
|
||||
## List all media in a room
|
||||
|
||||
@@ -235,14 +235,6 @@ The following fields are returned in the JSON response body:
|
||||
|
||||
Request:
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v1/media/delete?before_ts=<before_ts>
|
||||
|
||||
{}
|
||||
```
|
||||
|
||||
*Deprecated in Synapse v1.78.0:* This API is available at the deprecated endpoint:
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v1/media/<server_name>/delete?before_ts=<before_ts>
|
||||
|
||||
@@ -251,7 +243,7 @@ POST /_synapse/admin/v1/media/<server_name>/delete?before_ts=<before_ts>
|
||||
|
||||
URL Parameters
|
||||
|
||||
* `server_name`: string - The name of your local server (e.g `matrix.org`). *Deprecated in Synapse v1.78.0.*
|
||||
* `server_name`: string - The name of your local server (e.g `matrix.org`).
|
||||
* `before_ts`: string representing a positive integer - Unix timestamp in milliseconds.
|
||||
Files that were last used before this timestamp will be deleted. It is the timestamp of
|
||||
last access, not the timestamp when the file was created.
|
||||
|
||||
@@ -11,7 +11,7 @@ Note that Synapse requires at least one message in each room, so it will never
|
||||
delete the last message in a room.
|
||||
|
||||
To use it, you will need to authenticate by providing an `access_token`
|
||||
for a server admin: see [Admin API](../usage/administration/admin_api/).
|
||||
for a server admin: see [Admin API](../usage/administration/admin_api).
|
||||
|
||||
The API is:
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ local users. The server administrator must be in the room and have permission to
|
||||
invite users.
|
||||
|
||||
To use it, you will need to authenticate by providing an `access_token`
|
||||
for a server admin: see [Admin API](../usage/administration/admin_api/).
|
||||
for a server admin: see [Admin API](../usage/administration/admin_api).
|
||||
|
||||
## Parameters
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user