1
0

Compare commits

..

1 Commits

Author SHA1 Message Date
Action Bot
0529c9fd9a Version picker added for v1.83 docs 2023-12-11 14:51:47 +00:00
676 changed files with 19822 additions and 42204 deletions

View File

@@ -29,12 +29,11 @@ IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
# First calculate the various trial jobs.
#
# For PRs, we only run each type of test with the oldest Python version supported (which
# is Python 3.8 right now)
# For each type of test we only run on Py3.7 on PRs
trial_sqlite_tests = [
{
"python-version": "3.8",
"python-version": "3.7",
"database": "sqlite",
"extras": "all",
}
@@ -47,12 +46,13 @@ if not IS_PR:
"database": "sqlite",
"extras": "all",
}
for version in ("3.9", "3.10", "3.11", "3.12")
for version in ("3.8", "3.9", "3.10", "3.11")
)
trial_postgres_tests = [
{
"python-version": "3.8",
"python-version": "3.7",
"database": "postgres",
"postgres-version": "11",
"extras": "all",
@@ -62,16 +62,16 @@ trial_postgres_tests = [
if not IS_PR:
trial_postgres_tests.append(
{
"python-version": "3.12",
"python-version": "3.11",
"database": "postgres",
"postgres-version": "16",
"postgres-version": "15",
"extras": "all",
}
)
trial_no_extra_tests = [
{
"python-version": "3.8",
"python-version": "3.7",
"database": "sqlite",
"extras": "",
}
@@ -133,6 +133,11 @@ if not IS_PR:
"sytest-tag": "testing",
"postgres": "postgres",
},
{
"sytest-tag": "buster",
"postgres": "multi-postgres",
"workers": "workers",
},
]
)

View File

@@ -31,6 +31,35 @@ sed -i \
-e '/systemd/d' \
pyproject.toml
# Use poetry to do the installation. This ensures that the versions are all mutually
# compatible (as far the package metadata declares, anyway); pip's package resolver
# is more lax.
#
# Rather than `poetry install --no-dev`, we drop all dev dependencies and the dev-docs
# group from the toml file. This means we don't have to ensure compatibility between
# old deps and dev tools.
pip install toml wheel
REMOVE_DEV_DEPENDENCIES="
import toml
with open('pyproject.toml', 'r') as f:
data = toml.loads(f.read())
del data['tool']['poetry']['dev-dependencies']
del data['tool']['poetry']['group']['dev-docs']
with open('pyproject.toml', 'w') as f:
toml.dump(data, f)
"
python3 -c "$REMOVE_DEV_DEPENDENCIES"
pip install poetry==1.3.2
poetry lock
echo "::group::Patched pyproject.toml"
cat pyproject.toml
echo "::endgroup::"
echo "::group::Lockfile after patch"
cat poetry.lock
echo "::endgroup::"

View File

@@ -8,21 +8,21 @@
# If ignoring a pull request that was not squash merged, only the merge
# commit needs to be put here. Child commits will be resolved from it.
# Run black (https://github.com/matrix-org/synapse/pull/3679).
# Run black (#3679).
8b3d9b6b199abb87246f982d5db356f1966db925
# Black reformatting (https://github.com/matrix-org/synapse/pull/5482).
# Black reformatting (#5482).
32e7c9e7f20b57dd081023ac42d6931a8da9b3a3
# Target Python 3.5 with black (https://github.com/matrix-org/synapse/pull/8664).
# Target Python 3.5 with black (#8664).
aff1eb7c671b0a3813407321d2702ec46c71fa56
# Update black to 20.8b1 (https://github.com/matrix-org/synapse/pull/9381).
# Update black to 20.8b1 (#9381).
0a00b7ff14890987f09112a2ae696c61001e6cf1
# Convert tests/rest/admin/test_room.py to unix file endings (https://github.com/matrix-org/synapse/pull/7953).
# Convert tests/rest/admin/test_room.py to unix file endings (#7953).
c4268e3da64f1abb5b31deaeb5769adb6510c0a7
# Update black to 23.1.0 (https://github.com/matrix-org/synapse/pull/15103)
# Update black to 23.1.0 (#15103)
9bb2eac71962970d02842bca441f4bcdbbf93a11

View File

@@ -0,0 +1,49 @@
name: Write changelog for dependabot PR
on:
pull_request:
types:
- opened
- reopened # For debugging!
permissions:
# Needed to be able to push the commit. See
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#enable-auto-merge-on-a-pull-request
# for a similar example
contents: write
jobs:
add-changelog:
runs-on: 'ubuntu-latest'
if: ${{ github.actor == 'dependabot[bot]' }}
steps:
- uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.ref }}
- name: Write, commit and push changelog
env:
PR_TITLE: ${{ github.event.pull_request.title }}
PR_NUMBER: ${{ github.event.pull_request.number }}
run: |
echo "${PR_TITLE}." > "changelog.d/${PR_NUMBER}".misc
git add changelog.d
git config user.email "github-actions[bot]@users.noreply.github.com"
git config user.name "GitHub Actions"
git commit -m "Changelog"
git push
shell: bash
# The `git push` above does not trigger CI on the dependabot PR.
#
# By default, workflows can't trigger other workflows when they're just using the
# default `GITHUB_TOKEN` access token. (This is intended to stop you from writing
# recursive workflow loops by accident, because that'll get very expensive very
# quickly.) Instead, you have to manually call out to another workflow, or else
# make your changes (i.e. the `git push` above) using a personal access token.
# See
# https://docs.github.com/en/actions/using-workflows/triggering-a-workflow#triggering-a-workflow-from-a-workflow
#
# I have tried and failed to find a way to trigger CI on the "merge ref" of the PR.
# See git commit history for previous attempts. If anyone desperately wants to try
# again in the future, make a matrix-bot account and use its access token to git push.
# THIS WORKFLOW HAS WRITE PERMISSIONS---do not add other jobs here unless they
# are sufficiently locked down to dependabot only as above.

View File

@@ -18,35 +18,25 @@ jobs:
steps:
- name: Set up QEMU
id: qemu
uses: docker/setup-qemu-action@v3
uses: docker/setup-qemu-action@v2
with:
platforms: arm64
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@v2
- name: Inspect builder
run: docker buildx inspect
- name: Checkout repository
uses: actions/checkout@v4
- name: Extract version from pyproject.toml
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsshell
shell: bash
run: |
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
- name: Log in to DockerHub
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Log in to GHCR
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -68,12 +58,10 @@ jobs:
type=pep440,pattern={{raw}}
- name: Build and push all platforms
uses: docker/build-push-action@v5
uses: docker/build-push-action@v4
with:
push: true
labels: |
gitsha1=${{ github.sha }}
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
labels: "gitsha1=${{ github.sha }}"
tags: "${{ steps.set-tag.outputs.tags }}"
file: "docker/Dockerfile"
platforms: linux/amd64,linux/arm64

View File

@@ -14,7 +14,7 @@ jobs:
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
- name: 📥 Download artifact
uses: dawidd6/action-download-artifact@268677152d06ba59fcec7a7f0b5d961b6ccd7e1e # v2.28.0
uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615 # v2.27.0
with:
workflow: docs-pr.yaml
run_id: ${{ github.event.workflow_run.id }}
@@ -22,7 +22,7 @@ jobs:
path: book
- name: 📤 Deploy to Netlify
uses: matrix-org/netlify-pr-preview@v2
uses: matrix-org/netlify-pr-preview@v1
with:
path: book
owner: ${{ github.event.workflow_run.head_repository.owner.login }}

View File

@@ -12,7 +12,7 @@ jobs:
name: GitHub Pages
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Setup mdbook
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
@@ -39,7 +39,7 @@ jobs:
name: Check links in documentation
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Setup mdbook
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0

View File

@@ -50,7 +50,7 @@ jobs:
needs:
- pre
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Setup mdbook
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
@@ -80,7 +80,7 @@ jobs:
needs:
- pre
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: "Set up Sphinx"
uses: matrix-org/setup-python-poetry@v1

View File

@@ -22,24 +22,10 @@ concurrency:
cancel-in-progress: true
jobs:
check_repo:
# Prevent this workflow from running on any fork of Synapse other than matrix-org/synapse, as it is
# only useful to the Synapse core team.
# All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest
# of the workflow will be skipped as well.
runs-on: ubuntu-latest
outputs:
should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }}
steps:
- id: check_condition
run: echo "should_run_workflow=${{ github.repository == 'matrix-org/synapse' }}" >> "$GITHUB_OUTPUT"
mypy:
needs: check_repo
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
@@ -57,12 +43,10 @@ jobs:
# `pip install matrix-synapse[all]` as closely as possible.
- run: poetry update --no-dev
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
- name: Remove unhelpful options from mypy config
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
- name: Remove warn_unused_ignores from mypy config
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
- run: poetry run mypy
trial:
needs: check_repo
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest
strategy:
matrix:
@@ -72,7 +56,7 @@ jobs:
postgres-version: "14"
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
@@ -121,8 +105,6 @@ jobs:
sytest:
needs: check_repo
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest
container:
image: matrixdotorg/sytest-synapse:testing
@@ -145,7 +127,7 @@ jobs:
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
@@ -174,8 +156,7 @@ jobs:
complement:
needs: check_repo
if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'"
if: "${{ !failure() && !cancelled() }}"
runs-on: ubuntu-latest
strategy:
@@ -192,19 +173,16 @@ jobs:
database: Postgres
steps:
- name: Run actions/checkout@v4 for synapse
uses: actions/checkout@v4
- name: Run actions/checkout@v3 for synapse
uses: actions/checkout@v3
with:
path: synapse
- uses: actions/setup-go@v4
- name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- uses: actions/setup-go@v4
with:
cache-dependency-path: complement/go.sum
go-version-file: complement/go.mod
- run: |
set -o pipefail
TEST_ONLY_IGNORE_POETRY_LOCKFILE=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
@@ -214,7 +192,7 @@ jobs:
# Open an issue if the build fails, so we know about it.
# Only do this if we're not experimenting with this action in a PR.
open-issue:
if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request' && needs.check_repo.outputs.should_run_workflow == 'true'"
if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request'"
needs:
# TODO: should mypy be included here? It feels more brittle than the others.
- mypy
@@ -225,7 +203,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -16,7 +16,7 @@ jobs:
name: "Check locked dependencies have sdists"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.x'

View File

@@ -33,29 +33,29 @@ jobs:
packages: write
steps:
- name: Checkout specific branch (debug build)
uses: actions/checkout@v4
uses: actions/checkout@v3
if: github.event_name == 'workflow_dispatch'
with:
ref: ${{ inputs.branch }}
- name: Checkout clean copy of develop (scheduled build)
uses: actions/checkout@v4
uses: actions/checkout@v3
if: github.event_name == 'schedule'
with:
ref: develop
- name: Checkout clean copy of master (on-push)
uses: actions/checkout@v4
uses: actions/checkout@v3
if: github.event_name == 'push'
with:
ref: master
- name: Login to registry
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Work out labels for complement image
id: meta
uses: docker/metadata-action@v5
uses: docker/metadata-action@v4
with:
images: ghcr.io/${{ github.repository }}/complement-synapse
tags: |

View File

@@ -27,14 +27,13 @@ jobs:
name: "Calculate list of debian distros"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.x'
- id: set-distros
run: |
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
# NOTE: inside the actual Dockerfile-dhvirtualenv, the image name is expanded into its full image path
dists='["debian:sid"]'
if [[ $GITHUB_REF == refs/tags/* ]]; then
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
@@ -55,13 +54,13 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
path: src
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@v2
with:
install: true
@@ -121,7 +120,7 @@ jobs:
arch: aarch64
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
@@ -134,7 +133,7 @@ jobs:
- name: Set up QEMU to emulate aarch64
if: matrix.arch == 'aarch64'
uses: docker/setup-qemu-action@v3
uses: docker/setup-qemu-action@v2
with:
platforms: arm64
@@ -144,7 +143,7 @@ jobs:
- name: Only build a single wheel on PR
if: startsWith(github.ref, 'refs/pull/')
run: echo "CIBW_BUILD="cp38-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
run: echo "CIBW_BUILD="cp37-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
- name: Build wheels
run: python -m cibuildwheel --output-dir wheelhouse
@@ -167,7 +166,7 @@ jobs:
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.10'

View File

@@ -12,19 +12,12 @@ concurrency:
cancel-in-progress: true
jobs:
check-signoff:
if: "github.event_name == 'pull_request'"
uses: "matrix-org/backend-meta/.github/workflows/sign-off.yml@v2"
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
# don't modify Rust code.
changes:
runs-on: ubuntu-latest
outputs:
rust: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.rust }}
trial: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.trial }}
integration: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.integration }}
linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }}
steps:
- uses: dorny/paths-filter@v2
id: filter
@@ -36,56 +29,13 @@ jobs:
- 'rust/**'
- 'Cargo.toml'
- 'Cargo.lock'
- '.rustfmt.toml'
- '.github/workflows/tests.yml'
trial:
- 'synapse/**'
- 'tests/**'
- 'rust/**'
- '.ci/scripts/calculate_jobs.py'
- 'Cargo.toml'
- 'Cargo.lock'
- 'pyproject.toml'
- 'poetry.lock'
- '.github/workflows/tests.yml'
integration:
- 'synapse/**'
- 'rust/**'
- 'docker/**'
- 'Cargo.toml'
- 'Cargo.lock'
- 'pyproject.toml'
- 'poetry.lock'
- 'docker/**'
- '.ci/**'
- 'scripts-dev/complement.sh'
- '.github/workflows/tests.yml'
linting:
- 'synapse/**'
- 'docker/**'
- 'tests/**'
- 'scripts-dev/**'
- 'contrib/**'
- 'synmark/**'
- 'stubs/**'
- '.ci/**'
- 'mypy.ini'
- 'pyproject.toml'
- 'poetry.lock'
- '.github/workflows/tests.yml'
check-sampleconfig:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0
uses: dtolnay/rust-toolchain@1.58.1
- uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1
with:
@@ -97,11 +47,8 @@ jobs:
check-schema-delta:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.x"
@@ -111,7 +58,7 @@ jobs:
check-lockfile:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.x"
@@ -119,12 +66,9 @@ jobs:
lint:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@v1
@@ -144,16 +88,9 @@ jobs:
lint-mypy:
runs-on: ubuntu-latest
name: Typechecking
needs: changes
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0
- uses: Swatinem/rust-cache@v2
uses: actions/checkout@v3
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@v1
@@ -166,15 +103,18 @@ jobs:
# To make CI green, err towards caution and install the project.
install-project: "true"
# Cribbed from
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
- name: Install Rust
uses: dtolnay/rust-toolchain@1.58.1
- uses: Swatinem/rust-cache@v2
# NB: I have two concerns with this action:
# 1. We occasionally see odd mypy problems that aren't reproducible
# locally with clean caches. I suspect some dodgy caching behaviour.
# 2. The action uses GHA machinery that's deprecated
# (https://github.com/AustinScola/mypy-cache-github-action/issues/277)
# It may be simpler to use actions/cache ourselves to restore .mypy_cache.
- name: Restore/persist mypy's cache
uses: actions/cache@v3
with:
path: |
.mypy_cache
key: mypy-cache-${{ github.context.sha }}
restore-keys: mypy-cache-
uses: AustinScola/mypy-cache-github-action@df56268388422ee282636ee2c7a9cc55ec644a41
- name: Run mypy
run: poetry run mypy
@@ -182,7 +122,7 @@ jobs:
lint-crlf:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Check line endings
run: scripts-dev/check_line_terminators.sh
@@ -190,7 +130,7 @@ jobs:
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 0
@@ -204,15 +144,12 @@ jobs:
lint-pydantic:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0
uses: dtolnay/rust-toolchain@1.58.1
- uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1
with:
@@ -226,10 +163,10 @@ jobs:
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0
uses: dtolnay/rust-toolchain@1.58.1
with:
components: clippy
- uses: Swatinem/rust-cache@v2
@@ -244,7 +181,7 @@ jobs:
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Install Rust
uses: dtolnay/rust-toolchain@master
@@ -261,7 +198,7 @@ jobs:
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Install Rust
uses: dtolnay/rust-toolchain@master
@@ -296,7 +233,7 @@ jobs:
needs: linting-done
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.x"
@@ -307,17 +244,15 @@ jobs:
sytest_test_matrix: ${{ steps.get-matrix.outputs.sytest_test_matrix }}
trial:
if: ${{ !cancelled() && !failure() && needs.changes.outputs.trial == 'true' }} # Allow previous steps to be skipped, but not fail
needs:
- calculate-test-jobs
- changes
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
needs: calculate-test-jobs
runs-on: ubuntu-latest
strategy:
matrix:
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- run: sudo apt-get -qq install xmlsec1
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
if: ${{ matrix.job.postgres-version }}
@@ -332,7 +267,7 @@ jobs:
postgres:${{ matrix.job.postgres-version }}
- name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0
uses: dtolnay/rust-toolchain@1.58.1
- uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1
@@ -365,48 +300,54 @@ jobs:
trial-olddeps:
# Note: sqlite only; no postgres
if: ${{ !cancelled() && !failure() && needs.changes.outputs.trial == 'true' }} # Allow previous steps to be skipped, but not fail
needs:
- linting-done
- changes
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
needs: linting-done
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0
uses: dtolnay/rust-toolchain@1.58.1
- uses: Swatinem/rust-cache@v2
# There aren't wheels for some of the older deps, so we need to install
# their build dependencies
- run: |
sudo apt-get -qq update
sudo apt-get -qq install build-essential libffi-dev python-dev \
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
- uses: actions/setup-python@v4
with:
python-version: '3.8'
python-version: '3.7'
# Calculating the old-deps actually takes a bunch of time, so we cache the
# pyproject.toml / poetry.lock. We need to cache pyproject.toml as
# otherwise the `poetry install` step will error due to the poetry.lock
# file being outdated.
#
# This caches the output of `Prepare old deps`, which should generate the
# same `pyproject.toml` and `poetry.lock` for a given `pyproject.toml` input.
- uses: actions/cache@v3
id: cache-poetry-old-deps
name: Cache poetry.lock
with:
path: |
poetry.lock
pyproject.toml
key: poetry-old-deps2-${{ hashFiles('pyproject.toml') }}
- name: Prepare old deps
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
run: .ci/scripts/prepare_old_deps.sh
# Note: we install using `pip` here, not poetry. `poetry install` ignores the
# build-system section (https://github.com/python-poetry/poetry/issues/6154), but
# we explicitly want to test that you can `pip install` using the oldest version
# of poetry-core and setuptools-rust.
- run: pip install .[all,test]
# We only now install poetry so that `setup-python-poetry` caches the
# right poetry.lock's dependencies.
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: '3.7'
poetry-version: "1.3.2"
extras: "all test"
# We nuke the local copy, as we've installed synapse into the virtualenv
# (rather than use an editable install, which we no longer support). If we
# don't do this then python can't find the native lib.
- run: rm -rf synapse/
# Sanity check we can import/run Synapse
- run: python -m synapse.app.homeserver --help
- run: python -m twisted.trial -j6 tests
- run: poetry run trial -j6 tests
- name: Dump logs
# Logs are most useful when the command fails, always include them.
if: ${{ always() }}
@@ -423,18 +364,16 @@ jobs:
trial-pypy:
# Very slow; only run if the branch name includes 'pypy'
# Note: sqlite only; no postgres. Completely untested since poetry move.
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() && needs.changes.outputs.trial == 'true' }}
needs:
- linting-done
- changes
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
needs: linting-done
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["pypy-3.8"]
python-version: ["pypy-3.7"]
extras: ["all"]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
# Install libs necessary for PyPy to build binary wheels for dependencies
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
- uses: matrix-org/setup-python-poetry@v1
@@ -457,10 +396,8 @@ jobs:
|| true
sytest:
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true' }}
needs:
- calculate-test-jobs
- changes
if: ${{ !failure() && !cancelled() }}
needs: calculate-test-jobs
runs-on: ubuntu-latest
container:
image: matrixdotorg/sytest-synapse:${{ matrix.job.sytest-tag }}
@@ -469,8 +406,8 @@ jobs:
env:
SYTEST_BRANCH: ${{ github.head_ref }}
POSTGRES: ${{ matrix.job.postgres && 1}}
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') || '' }}
ASYNCIO_REACTOR: ${{ (matrix.job.reactor == 'asyncio') || '' }}
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') && 1}}
ASYNCIO_REACTOR: ${{ (matrix.job.reactor == 'asyncio') && 1 }}
WORKERS: ${{ matrix.job.workers && 1 }}
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
TOP: ${{ github.workspace }}
@@ -481,12 +418,12 @@ jobs:
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Prepare test blacklist
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
- name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0
uses: dtolnay/rust-toolchain@1.58.1
- uses: Swatinem/rust-cache@v2
- name: Run SyTest
@@ -505,8 +442,8 @@ jobs:
/logs/**/*.log*
export-data:
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true'}} # Allow previous steps to be skipped, but not fail
needs: [linting-done, portdb, changes]
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
needs: [linting-done, portdb]
runs-on: ubuntu-latest
env:
TOP: ${{ github.workspace }}
@@ -526,7 +463,7 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- run: sudo apt-get -qq install xmlsec1 postgresql-client
- uses: matrix-org/setup-python-poetry@v1
with:
@@ -541,15 +478,13 @@ jobs:
portdb:
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true'}} # Allow previous steps to be skipped, but not fail
needs:
- linting-done
- changes
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
needs: linting-done
runs-on: ubuntu-latest
strategy:
matrix:
include:
- python-version: "3.8"
- python-version: "3.7"
postgres-version: "11"
- python-version: "3.11"
@@ -570,7 +505,7 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Add PostgreSQL apt repository
# We need a version of pg_dump that can handle the version of
# PostgreSQL being tested against. The Ubuntu package repository lags
@@ -604,10 +539,8 @@ jobs:
schema_diff
complement:
if: "${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true' }}"
needs:
- linting-done
- changes
if: "${{ !failure() && !cancelled() }}"
needs: linting-done
runs-on: ubuntu-latest
strategy:
@@ -624,27 +557,23 @@ jobs:
database: Postgres
steps:
- name: Run actions/checkout@v4 for synapse
uses: actions/checkout@v4
- name: Run actions/checkout@v3 for synapse
uses: actions/checkout@v3
with:
path: synapse
- name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0
uses: dtolnay/rust-toolchain@1.58.1
- uses: Swatinem/rust-cache@v2
- uses: actions/setup-go@v4
- name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- uses: actions/setup-go@v4
with:
cache-dependency-path: complement/go.sum
go-version-file: complement/go.mod
# use p=1 concurrency as GHA boxes are underpowered and don't like running tons of synapses at once.
- run: |
set -o pipefail
COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -p 1 -json 2>&1 | synapse/.ci/scripts/gotestfmt
COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
shell: bash
env:
POSTGRES: ${{ (matrix.database == 'Postgres') && 1 || '' }}
@@ -659,10 +588,10 @@ jobs:
- changes
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Install Rust
uses: dtolnay/rust-toolchain@1.61.0
uses: dtolnay/rust-toolchain@1.58.1
- uses: Swatinem/rust-cache@v2
- run: cargo test
@@ -677,7 +606,7 @@ jobs:
- changes
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Install Rust
uses: dtolnay/rust-toolchain@master
@@ -705,16 +634,9 @@ jobs:
with:
needs: ${{ toJSON(needs) }}
# Various bits are skipped if there was no applicable changes.
# The newsfile and signoff lint may be skipped on non PR builds.
# The newsfile lint may be skipped on non PR builds
# Cargo test is skipped if there is no changes on Rust code
skippable: |
trial
trial-olddeps
sytest
portdb
export-data
complement
check-signoff
lint-newsfile
cargo-test
cargo-bench

View File

@@ -5,9 +5,6 @@ on:
- cron: 0 8 * * *
workflow_dispatch:
# NB: inputs are only present when this workflow is dispatched manually.
# (The default below is the default field value in the form to trigger
# a manual dispatch). Otherwise the inputs will evaluate to null.
inputs:
twisted_ref:
description: Commit, branch or tag to checkout from upstream Twisted.
@@ -21,26 +18,11 @@ concurrency:
cancel-in-progress: true
jobs:
check_repo:
# Prevent this workflow from running on any fork of Synapse other than matrix-org/synapse, as it is
# only useful to the Synapse core team.
# All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest
# of the workflow will be skipped as well.
if: github.repository == 'matrix-org/synapse'
runs-on: ubuntu-latest
outputs:
should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }}
steps:
- id: check_condition
run: echo "should_run_workflow=${{ github.repository == 'matrix-org/synapse' }}" >> "$GITHUB_OUTPUT"
mypy:
needs: check_repo
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
@@ -52,19 +34,17 @@ jobs:
extras: "all"
- run: |
poetry remove twisted
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref || 'trunk' }}
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref }}
poetry install --no-interaction --extras "all test"
- name: Remove unhelpful options from mypy config
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
- name: Remove warn_unused_ignores from mypy config
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
- run: poetry run mypy
trial:
needs: check_repo
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- run: sudo apt-get -qq install xmlsec1
- name: Install Rust
@@ -95,20 +75,14 @@ jobs:
|| true
sytest:
needs: check_repo
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest
container:
# We're using ubuntu:focal because it uses Python 3.8 which is our minimum supported Python version.
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
image: matrixdotorg/sytest-synapse:focal
image: matrixdotorg/sytest-synapse:buster
volumes:
- ${{ github.workspace }}:/src
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Install Rust
uses: dtolnay/rust-toolchain@stable
@@ -145,8 +119,7 @@ jobs:
/logs/**/*.log*
complement:
needs: check_repo
if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'"
if: "${{ !failure() && !cancelled() }}"
runs-on: ubuntu-latest
strategy:
@@ -163,19 +136,16 @@ jobs:
database: Postgres
steps:
- name: Run actions/checkout@v4 for synapse
uses: actions/checkout@v4
- name: Run actions/checkout@v3 for synapse
uses: actions/checkout@v3
with:
path: synapse
- uses: actions/setup-go@v4
- name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- uses: actions/setup-go@v4
with:
cache-dependency-path: complement/go.sum
go-version-file: complement/go.mod
# This step is specific to the 'Twisted trunk' test run:
- name: Patch dependencies
run: |
@@ -196,7 +166,7 @@ jobs:
# open an issue if the build fails, so we know about it.
open-issue:
if: failure() && needs.check_repo.outputs.should_run_workflow == 'true'
if: failure()
needs:
- mypy
- trial
@@ -206,7 +176,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

1
.gitignore vendored
View File

@@ -34,7 +34,6 @@ __pycache__/
/logs
/media_store/
/uploads
/homeserver-config-overrides.d
# For direnv users
/.envrc

3920
CHANGES.md

File diff suppressed because it is too large Load Diff

97
Cargo.lock generated
View File

@@ -4,18 +4,18 @@ version = 3
[[package]]
name = "aho-corasick"
version = "1.0.2"
version = "0.7.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41"
checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
dependencies = [
"memchr",
]
[[package]]
name = "anyhow"
version = "1.0.75"
version = "1.0.71"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
[[package]]
name = "arc-swap"
@@ -132,21 +132,24 @@ dependencies = [
[[package]]
name = "log"
version = "0.4.20"
version = "0.4.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
dependencies = [
"cfg-if",
]
[[package]]
name = "memchr"
version = "2.6.3"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f232d6ef707e1956a43342693d2a31e72989554d58299d7a88738cc95b0d35c"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "memoffset"
version = "0.9.0"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c"
checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
dependencies = [
"autocfg",
]
@@ -182,18 +185,18 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.64"
version = "1.0.52"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78803b62cbf1f46fde80d7c0e803111524b9877184cfe7c3033659490ac7a7da"
checksum = "1d0e1ae9e836cc3beddd63db0df682593d7e2d3d891ae8c9083d2113e1744224"
dependencies = [
"unicode-ident",
]
[[package]]
name = "pyo3"
version = "0.19.2"
version = "0.17.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e681a6cfdc4adcc93b4d3cf993749a4552018ee0a9b65fc0ccfad74352c72a38"
checksum = "268be0c73583c183f2b14052337465768c07726936a260f480f0857cb95ba543"
dependencies = [
"anyhow",
"cfg-if",
@@ -209,9 +212,9 @@ dependencies = [
[[package]]
name = "pyo3-build-config"
version = "0.19.2"
version = "0.17.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "076c73d0bc438f7a4ef6fdd0c3bb4732149136abd952b110ac93e4edb13a6ba5"
checksum = "28fcd1e73f06ec85bf3280c48c67e731d8290ad3d730f8be9dc07946923005c8"
dependencies = [
"once_cell",
"target-lexicon",
@@ -219,9 +222,9 @@ dependencies = [
[[package]]
name = "pyo3-ffi"
version = "0.19.2"
version = "0.17.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e53cee42e77ebe256066ba8aa77eff722b3bb91f3419177cf4cd0f304d3284d9"
checksum = "0f6cb136e222e49115b3c51c32792886defbfb0adead26a688142b346a0b9ffc"
dependencies = [
"libc",
"pyo3-build-config",
@@ -229,9 +232,9 @@ dependencies = [
[[package]]
name = "pyo3-log"
version = "0.8.4"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c09c2b349b6538d8a73d436ca606dab6ce0aaab4dad9e6b7bdd57a4f556c3bc3"
checksum = "f9c8b57fe71fb5dcf38970ebedc2b1531cf1c14b1b9b4c560a182a57e115575c"
dependencies = [
"arc-swap",
"log",
@@ -240,9 +243,9 @@ dependencies = [
[[package]]
name = "pyo3-macros"
version = "0.19.2"
version = "0.17.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dfeb4c99597e136528c6dd7d5e3de5434d1ceaf487436a3f03b2d56b6fc9efd1"
checksum = "94144a1266e236b1c932682136dc35a9dee8d3589728f68130c7c3861ef96b28"
dependencies = [
"proc-macro2",
"pyo3-macros-backend",
@@ -252,9 +255,9 @@ dependencies = [
[[package]]
name = "pyo3-macros-backend"
version = "0.19.2"
version = "0.17.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "947dc12175c254889edc0c02e399476c2f652b4b9ebd123aa655c224de259536"
checksum = "c8df9be978a2d2f0cdebabb03206ed73b11314701a5bfe71b0d753b81997777f"
dependencies = [
"proc-macro2",
"quote",
@@ -263,9 +266,9 @@ dependencies = [
[[package]]
name = "pythonize"
version = "0.19.0"
version = "0.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e35b716d430ace57e2d1b4afb51c9e5b7c46d2bce72926e07f9be6a98ced03e"
checksum = "0f7f0c136f5fbc01868185eef462800e49659eb23acca83b9e884367a006acb6"
dependencies = [
"pyo3",
"serde",
@@ -273,9 +276,9 @@ dependencies = [
[[package]]
name = "quote"
version = "1.0.29"
version = "1.0.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105"
checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc"
dependencies = [
"proc-macro2",
]
@@ -291,21 +294,9 @@ dependencies = [
[[package]]
name = "regex"
version = "1.9.6"
version = "1.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ebee201405406dbf528b8b672104ae6d6d63e6d118cb10e4d51abbc7b58044ff"
dependencies = [
"aho-corasick",
"memchr",
"regex-automata",
"regex-syntax",
]
[[package]]
name = "regex-automata"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9"
checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d"
dependencies = [
"aho-corasick",
"memchr",
@@ -314,9 +305,9 @@ dependencies = [
[[package]]
name = "regex-syntax"
version = "0.7.5"
version = "0.6.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da"
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
[[package]]
name = "ryu"
@@ -332,29 +323,29 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "serde"
version = "1.0.192"
version = "1.0.160"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bca2a08484b285dcb282d0f67b26cadc0df8b19f8c12502c13d966bf9482f001"
checksum = "bb2f3770c8bce3bcda7e149193a069a0f4365bda1fa5cd88e03bca26afc1216c"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.192"
version = "1.0.160"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6c7207fbec9faa48073f3e3074cbe553af6ea512d7c21ba46e434e70ea9fbc1"
checksum = "291a097c63d8497e00160b166a967a4a79c64f3facdd01cbd7502231688d77df"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.28",
"syn 2.0.10",
]
[[package]]
name = "serde_json"
version = "1.0.108"
version = "1.0.96"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b"
checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1"
dependencies = [
"itoa",
"ryu",
@@ -386,9 +377,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.28"
version = "2.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04361975b3f5e348b2189d8dc55bc942f278b2d482a6a0365de5bdd62d351567"
checksum = "5aad1363ed6d37b84299588d62d3a7d95b5a5c2d9aad5c85609fda12afaa1f40"
dependencies = [
"proc-macro2",
"quote",

View File

@@ -3,4 +3,3 @@
[workspace]
members = ["rust"]
resolver = "2"

View File

@@ -122,7 +122,7 @@ You will need to change the server you are logging into from ``matrix.org``
and instead specify a Homeserver URL of ``https://<server_name>:8448``
(or just ``https://<server_name>`` if you are using a reverse proxy).
If you prefer to use another client, refer to our
`client breakdown <https://matrix.org/ecosystem/clients/>`_.
`client breakdown <https://matrix.org/docs/projects/clients-matrix>`_.
If all goes well you should at least be able to log in, create a room, and
start sending messages.

View File

@@ -34,6 +34,14 @@ additional-css = [
"docs/website_files/table-of-contents.css",
"docs/website_files/remove-nav-buttons.css",
"docs/website_files/indent-section-headers.css",
"docs/website_files/version-picker.css",
]
additional-js = ["docs/website_files/table-of-contents.js"]
theme = "docs/website_files/theme"
additional-js = [
"docs/website_files/table-of-contents.js",
"docs/website_files/version-picker.js",
"docs/website_files/version.js",
]
theme = "docs/website_files/theme"
[preprocessor.schema_versions]
command = "./scripts-dev/schema_versions.py"

View File

@@ -1 +0,0 @@
Add support for asynchronous uploads as defined by [MSC2246](https://github.com/matrix-org/matrix-spec-proposals/pull/2246). Contributed by @sumnerevans at @beeper.

View File

@@ -1 +0,0 @@
Remove whole table locks on push rule modifications. Contributed by Nick @ Beeper (@fizzadar).

View File

@@ -1 +0,0 @@
Add a Postgres `REPLICA IDENTITY` to tables that do not have an implicit one. This should allow use of Postgres logical replication.

View File

@@ -1 +0,0 @@
Support reactor tick timings on more types of event loops.

View File

@@ -1 +0,0 @@
Improve type hints.

View File

@@ -1 +0,0 @@
Avoid executing no-op queries.

View File

@@ -1 +0,0 @@
Simplify persistance code to be per-room.

View File

@@ -1 +0,0 @@
Use standard SQL helpers in persistence code.

View File

@@ -1 +0,0 @@
Avoid updating the stream cache unnecessarily.

View File

@@ -1 +0,0 @@
Bump twisted from 23.8.0 to 23.10.0.

View File

@@ -1 +0,0 @@
Improve performance when using opentracing.

View File

@@ -1 +0,0 @@
Run push rule evaluator setup in parallel.

View File

@@ -1 +0,0 @@
Improve tests of the SQL generator.

View File

@@ -1 +0,0 @@
Bump setuptools-rust from 1.8.0 to 1.8.1.

View File

@@ -1 +0,0 @@
Fix a long-standing bug where some queries updated the same row twice. Introduced in Synapse 1.57.0.

View File

@@ -1 +0,0 @@
Improve type hints.

View File

@@ -1 +0,0 @@
Improve type hints.

View File

@@ -1 +0,0 @@
Improve the performance of some operations in multi-worker deployments.

View File

@@ -1 +0,0 @@
Use more generic database methods.

View File

@@ -1 +0,0 @@
Improve the performance of some operations in multi-worker deployments.

View File

@@ -1 +0,0 @@
Fix a long-standing bug where Synapse would not unbind third-party identifiers for Application Service users when deactivated and would not emit a compliant response.

View File

@@ -1 +0,0 @@
Use `dbname` instead of the deprecated `database` connection parameter for psycopg2.

View File

@@ -1 +0,0 @@
Note that the option [`outbound_federation_restricted_to`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#outbound_federation_restricted_to) was added in Synapse 1.89.0, and fix a nearby formatting error.

View File

@@ -1 +0,0 @@
Update parameter information for the `/timestamp_to_event` admin API.

View File

@@ -1 +0,0 @@
Add an internal [Admin API endpoint](https://matrix-org.github.io/synapse/v1.97/usage/configuration/config_documentation.html#allow-replacing-master-cross-signing-key-without-user-interactive-auth) to temporarily grant the ability to update an existing cross-signing key without UIA.

View File

@@ -1 +0,0 @@
Improve references to GitHub issues.

View File

@@ -1 +0,0 @@
Improve references to GitHub issues.

View File

@@ -769,7 +769,7 @@ def main(server_url, identity_server_url, username, token, config_path):
global CONFIG_JSON
CONFIG_JSON = config_path # bit cheeky, but just overwrite the global
try:
with open(config_path) as config:
with open(config_path, "r") as config:
syn_cmd.config = json.load(config)
try:
http_client.verbose = "on" == syn_cmd.config["verbose"]

View File

@@ -37,6 +37,7 @@ class HttpClient:
Deferred: Succeeds when we get a 2xx HTTP response. The result
will be the decoded JSON body.
"""
pass
def get_json(self, url, args=None):
"""Gets some json from the given host homeserver and path
@@ -52,6 +53,7 @@ class HttpClient:
Deferred: Succeeds when we get a 2xx HTTP response. The result
will be the decoded JSON body.
"""
pass
class TwistedHttpClient(HttpClient):

View File

@@ -70,10 +70,6 @@ redis:
port: 6379
# dbid: <redis_logical_db_id>
# password: <secret_password>
# use_tls: True
# certificate_file: <path_to_certificate>
# private_key_file: <path_to_private_key>
# ca_file: <path_to_ca_certificate>
```
This assumes that your Redis service is called `redis` in your Docker Compose file.

File diff suppressed because it is too large Load Diff

View File

@@ -29,7 +29,7 @@
"level": "error"
},
{
"line": "my-matrix-server-federation-sender-1 | 2023-01-25 20:56:20,995 - synapse.http.matrixfederationclient - 709 - WARNING - federation_transaction_transmission_loop-3 - {PUT-O-3} [example.com] Request failed: PUT matrix-federation://example.com/_matrix/federation/v1/send/1674680155797: HttpResponseException('403: Forbidden')",
"line": "my-matrix-server-federation-sender-1 | 2023-01-25 20:56:20,995 - synapse.http.matrixfederationclient - 709 - WARNING - federation_transaction_transmission_loop-3 - {PUT-O-3} [example.com] Request failed: PUT matrix://example.com/_matrix/federation/v1/send/1674680155797: HttpResponseException('403: Forbidden')",
"level": "warning"
},
{

218
debian/changelog vendored
View File

@@ -1,219 +1,3 @@
matrix-synapse-py3 (1.96.0~rc1) stable; urgency=medium
* New Synapse release 1.96.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 31 Oct 2023 14:09:09 +0000
matrix-synapse-py3 (1.95.1) stable; urgency=medium
* New Synapse release 1.95.1.
-- Synapse Packaging team <packages@matrix.org> Tue, 31 Oct 2023 14:00:00 +0000
matrix-synapse-py3 (1.95.0) stable; urgency=medium
* New Synapse release 1.95.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 24 Oct 2023 13:00:46 +0100
matrix-synapse-py3 (1.95.0~rc1) stable; urgency=medium
* New synapse release 1.95.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 17 Oct 2023 15:50:17 +0000
matrix-synapse-py3 (1.94.0) stable; urgency=medium
* New Synapse release 1.94.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Oct 2023 10:57:41 +0100
matrix-synapse-py3 (1.94.0~rc1) stable; urgency=medium
* New Synapse release 1.94.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Oct 2023 11:48:18 +0100
matrix-synapse-py3 (1.93.0) stable; urgency=medium
* New Synapse release 1.93.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Sep 2023 15:54:40 +0100
matrix-synapse-py3 (1.93.0~rc1) stable; urgency=medium
* New synapse release 1.93.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Sep 2023 11:55:00 +0000
matrix-synapse-py3 (1.92.3) stable; urgency=medium
* New Synapse release 1.92.3.
-- Synapse Packaging team <packages@matrix.org> Mon, 18 Sep 2023 15:05:04 +0200
matrix-synapse-py3 (1.92.2) stable; urgency=medium
* New Synapse release 1.92.2.
-- Synapse Packaging team <packages@matrix.org> Fri, 15 Sep 2023 13:17:41 +0100
matrix-synapse-py3 (1.92.1) stable; urgency=medium
* New Synapse release 1.92.1.
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Sep 2023 13:19:42 +0200
matrix-synapse-py3 (1.92.0) stable; urgency=medium
* New Synapse release 1.92.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Sep 2023 11:59:23 +0200
matrix-synapse-py3 (1.91.2) stable; urgency=medium
* New synapse release 1.91.2.
-- Synapse Packaging team <packages@matrix.org> Wed, 06 Sep 2023 14:59:30 +0000
matrix-synapse-py3 (1.92.0~rc1) stable; urgency=medium
* New Synapse release 1.92.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Sep 2023 11:21:43 +0100
matrix-synapse-py3 (1.91.1) stable; urgency=medium
* New Synapse release 1.91.1.
-- Synapse Packaging team <packages@matrix.org> Mon, 04 Sep 2023 14:03:18 +0100
matrix-synapse-py3 (1.91.0) stable; urgency=medium
* New Synapse release 1.91.0.
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Aug 2023 11:18:10 +0100
matrix-synapse-py3 (1.91.0~rc1) stable; urgency=medium
* New Synapse release 1.91.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 23 Aug 2023 09:47:18 -0700
matrix-synapse-py3 (1.90.0) stable; urgency=medium
* New Synapse release 1.90.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Aug 2023 11:17:34 +0100
matrix-synapse-py3 (1.90.0~rc1) stable; urgency=medium
* New Synapse release 1.90.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Aug 2023 15:29:34 +0100
matrix-synapse-py3 (1.89.0) stable; urgency=medium
* New Synapse release 1.89.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Aug 2023 11:07:15 +0100
matrix-synapse-py3 (1.89.0~rc1) stable; urgency=medium
* New Synapse release 1.89.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Jul 2023 14:31:07 +0200
matrix-synapse-py3 (1.88.0) stable; urgency=medium
* New Synapse release 1.88.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Jul 2023 13:59:28 +0100
matrix-synapse-py3 (1.88.0~rc1) stable; urgency=medium
* New Synapse release 1.88.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Jul 2023 10:20:19 +0100
matrix-synapse-py3 (1.87.0) stable; urgency=medium
* New Synapse release 1.87.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Jul 2023 16:24:00 +0100
matrix-synapse-py3 (1.87.0~rc1) stable; urgency=medium
* New synapse release 1.87.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 27 Jun 2023 15:27:04 +0000
matrix-synapse-py3 (1.86.0) stable; urgency=medium
* New Synapse release 1.86.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Jun 2023 17:22:46 +0200
matrix-synapse-py3 (1.86.0~rc2) stable; urgency=medium
* New Synapse release 1.86.0rc2.
-- Synapse Packaging team <packages@matrix.org> Wed, 14 Jun 2023 12:16:27 +0200
matrix-synapse-py3 (1.86.0~rc1) stable; urgency=medium
* New Synapse release 1.86.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Jun 2023 14:30:45 +0200
matrix-synapse-py3 (1.85.2) stable; urgency=medium
* New Synapse release 1.85.2.
-- Synapse Packaging team <packages@matrix.org> Thu, 08 Jun 2023 13:04:18 +0100
matrix-synapse-py3 (1.85.1) stable; urgency=medium
* New Synapse release 1.85.1.
-- Synapse Packaging team <packages@matrix.org> Wed, 07 Jun 2023 10:51:12 +0100
matrix-synapse-py3 (1.85.0) stable; urgency=medium
* New Synapse release 1.85.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Jun 2023 09:39:29 +0100
matrix-synapse-py3 (1.85.0~rc2) stable; urgency=medium
* New Synapse release 1.85.0rc2.
-- Synapse Packaging team <packages@matrix.org> Thu, 01 Jun 2023 09:16:18 -0700
matrix-synapse-py3 (1.85.0~rc1) stable; urgency=medium
* New Synapse release 1.85.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 30 May 2023 13:56:54 +0100
matrix-synapse-py3 (1.84.1) stable; urgency=medium
* New Synapse release 1.84.1.
-- Synapse Packaging team <packages@matrix.org> Fri, 26 May 2023 16:15:30 +0100
matrix-synapse-py3 (1.84.0) stable; urgency=medium
* New Synapse release 1.84.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 23 May 2023 10:57:22 +0100
matrix-synapse-py3 (1.84.0~rc1) stable; urgency=medium
* New Synapse release 1.84.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 16 May 2023 11:12:02 +0100
matrix-synapse-py3 (1.83.0) stable; urgency=medium
* New Synapse release 1.83.0.
@@ -1637,7 +1421,7 @@ matrix-synapse-py3 (0.99.3.1) stable; urgency=medium
matrix-synapse-py3 (0.99.3) stable; urgency=medium
[ Richard van der Hoff ]
* Fix warning during preconfiguration. (Fixes: https://github.com/matrix-org/synapse/issues/4819)
* Fix warning during preconfiguration. (Fixes: #4819)
[ Synapse Packaging team ]
* New synapse release 0.99.3.

View File

@@ -46,7 +46,7 @@ for port in 8080 8081 8082; do
echo ''
# Warning, this heredoc depends on the interaction of tabs and spaces.
# Please don't accidentally bork me with your fancy settings.
# Please don't accidentaly bork me with your fancy settings.
listeners=$(cat <<-PORTLISTENERS
# Configure server to listen on both $https_port and $port
# This overides some of the default settings above
@@ -80,8 +80,12 @@ for port in 8080 8081 8082; do
echo "tls_certificate_path: \"$DIR/$port/localhost:$port.tls.crt\""
echo "tls_private_key_path: \"$DIR/$port/localhost:$port.tls.key\""
# Request keys directly from servers contacted over federation
echo 'trusted_key_servers: []'
# Ignore keys from the trusted keys server
echo '# Ignore keys from the trusted keys server'
echo 'trusted_key_servers:'
echo ' - server_name: "matrix.org"'
echo ' accept_keys_insecurely: true'
echo ''
# Allow the servers to communicate over localhost.
allow_list=$(cat <<-ALLOW_LIST

View File

@@ -25,9 +25,9 @@ ARG PYTHON_VERSION=3.11
###
### Stage 0: generate requirements.txt
###
# We hardcode the use of Debian bookworm here because this could change upstream
# and other Dockerfiles used for testing are expecting bookworm.
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm as requirements
# We hardcode the use of Debian bullseye here because this could change upstream
# and other Dockerfiles used for testing are expecting bullseye.
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as requirements
# RUN --mount is specific to buildkit and is documented at
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
@@ -37,7 +37,7 @@ RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update -qq && apt-get install -yqq \
build-essential curl git libffi-dev libssl-dev pkg-config \
build-essential curl git libffi-dev libssl-dev \
&& rm -rf /var/lib/apt/lists/*
# Install rust and ensure its in the PATH.
@@ -87,7 +87,7 @@ RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
###
### Stage 1: builder
###
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm as builder
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as builder
# install the OS build deps
RUN \
@@ -158,7 +158,7 @@ RUN --mount=type=cache,target=/synapse/target,sharing=locked \
### Stage 2: runtime
###
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md'
@@ -173,10 +173,10 @@ RUN \
gosu \
libjpeg62-turbo \
libpq5 \
libwebp7 \
libwebp6 \
xmlsec1 \
libjemalloc2 \
libicu72 \
libicu67 \
libssl-dev \
openssl \
&& rm -rf /var/lib/apt/lists/*

View File

@@ -24,16 +24,16 @@ ARG distro=""
# https://launchpad.net/~jyrki-pulliainen/+archive/ubuntu/dh-virtualenv, but
# it's not obviously easier to use that than to build our own.)
FROM docker.io/library/${distro} as builder
FROM ${distro} as builder
RUN apt-get update -qq -o Acquire::Languages=none
RUN env DEBIAN_FRONTEND=noninteractive apt-get install \
-yqq --no-install-recommends \
build-essential \
ca-certificates \
devscripts \
equivs \
wget
-yqq --no-install-recommends \
build-essential \
ca-certificates \
devscripts \
equivs \
wget
# fetch and unpack the package
# We are temporarily using a fork of dh-virtualenv due to an incompatibility with Python 3.11, which ships with
@@ -55,36 +55,40 @@ RUN cd /dh-virtualenv && DEB_BUILD_OPTIONS=nodoc dpkg-buildpackage -us -uc -b
###
### Stage 1
###
FROM docker.io/library/${distro}
FROM ${distro}
# Get the distro we want to pull from as a dynamic build variable
# (We need to define it in each build stage)
ARG distro=""
ENV distro ${distro}
# Python < 3.7 assumes LANG="C" means ASCII-only and throws on printing unicode
# http://bugs.python.org/issue19846
ENV LANG C.UTF-8
# Install the build dependencies
#
# NB: keep this list in sync with the list of build-deps in debian/control
# TODO: it would be nice to do that automatically.
RUN apt-get update -qq -o Acquire::Languages=none \
&& env DEBIAN_FRONTEND=noninteractive apt-get install \
-yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \
build-essential \
curl \
debhelper \
devscripts \
libsystemd-dev \
lsb-release \
pkg-config \
python3-dev \
python3-pip \
python3-setuptools \
python3-venv \
sqlite3 \
libpq-dev \
libicu-dev \
pkg-config \
xmlsec1
-yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \
build-essential \
curl \
debhelper \
devscripts \
libsystemd-dev \
lsb-release \
pkg-config \
python3-dev \
python3-pip \
python3-setuptools \
python3-venv \
sqlite3 \
libpq-dev \
libicu-dev \
pkg-config \
xmlsec1
# Install rust and ensure it's in the PATH
ENV RUSTUP_HOME=/rust

View File

@@ -7,7 +7,7 @@ ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
# target image. For repeated rebuilds, this is much faster than apt installing
# each time.
FROM docker.io/library/debian:bookworm-slim AS deps_base
FROM debian:bullseye-slim AS deps_base
RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
@@ -21,7 +21,7 @@ FROM docker.io/library/debian:bookworm-slim AS deps_base
# which makes it much easier to copy (but we need to make sure we use an image
# based on the same debian version as the synapse image, to make sure we get
# the expected version of libc.
FROM docker.io/library/redis:7-bookworm AS redis_base
FROM redis:6-bullseye AS redis_base
# now build the final image, based on the the regular Synapse docker image
FROM $FROM

View File

@@ -73,8 +73,7 @@ The following environment variables are supported in `generate` mode:
will log sensitive information such as access tokens.
This should not be needed unless you are a developer attempting to debug something
particularly tricky.
* `SYNAPSE_LOG_TESTING`: if set, Synapse will log additional information useful
for testing.
## Postgres

View File

@@ -7,7 +7,6 @@
# https://github.com/matrix-org/synapse/blob/develop/docker/README-testing.md#testing-with-postgresql-and-single-or-multi-process-synapse
ARG SYNAPSE_VERSION=latest
# This is an intermediate image, to be built locally (not pulled from a registry).
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
FROM $FROM
@@ -20,8 +19,8 @@ FROM $FROM
# the same debian version as Synapse's docker image (so the versions of the
# shared libraries match).
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
COPY --from=docker.io/library/postgres:13-bookworm /usr/lib/postgresql /usr/lib/postgresql
COPY --from=docker.io/library/postgres:13-bookworm /usr/share/postgresql /usr/share/postgresql
COPY --from=postgres:13-bullseye /usr/lib/postgresql /usr/lib/postgresql
COPY --from=postgres:13-bullseye /usr/share/postgresql /usr/share/postgresql
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
ENV PGDATA=/var/lib/postgresql/data

View File

@@ -68,11 +68,6 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
fi
log "Workers requested: $SYNAPSE_WORKER_TYPES"
# adjust connection pool limits on worker mode as otherwise running lots of worker synapses
# can make docker unhappy (in GHA)
export POSTGRES_CP_MIN=1
export POSTGRES_CP_MAX=3
echo "using reduced connection pool limits for worker mode"
# Improve startup times by using a launcher based on fork()
export SYNAPSE_USE_EXPERIMENTAL_FORKING_LAUNCHER=1
else

View File

@@ -92,6 +92,8 @@ allow_device_name_lookup_over_federation: true
## Experimental Features ##
experimental_features:
# Enable history backfilling support
msc2716_enabled: true
# client-side support for partial state in /send_join responses
faster_joins: true
# Enable support for polls

View File

@@ -35,11 +35,7 @@ server {
# Send all other traffic to the main process
location ~* ^(\\/_matrix|\\/_synapse) {
{% if using_unix_sockets %}
proxy_pass http://unix:/run/main_public.sock;
{% else %}
proxy_pass http://localhost:8080;
{% endif %}
proxy_set_header X-Forwarded-For $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $host;

View File

@@ -6,9 +6,6 @@
{% if enable_redis %}
redis:
enabled: true
{% if using_unix_sockets %}
path: /tmp/redis.sock
{% endif %}
{% endif %}
{% if appservice_registrations is not none %}

View File

@@ -19,11 +19,7 @@ username=www-data
autorestart=true
[program:redis]
{% if using_unix_sockets %}
command=/usr/local/bin/prefix-log /usr/local/bin/redis-server --unixsocket /tmp/redis.sock
{% else %}
command=/usr/local/bin/prefix-log /usr/local/bin/redis-server
{% endif %}
priority=1
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0

View File

@@ -6,13 +6,13 @@
worker_app: "{{ app }}"
worker_name: "{{ name }}"
# The replication listener on the main synapse process.
worker_replication_host: 127.0.0.1
worker_replication_http_port: 9093
worker_listeners:
- type: http
{% if using_unix_sockets %}
path: "/run/worker.{{ port }}"
{% else %}
port: {{ port }}
{% endif %}
{% if listener_resources %}
resources:
- names:

View File

@@ -36,17 +36,12 @@ listeners:
# Allow configuring in case we want to reverse proxy 8008
# using another process in the same container
{% if SYNAPSE_USE_UNIX_SOCKET %}
# Unix sockets don't care about TLS or IP addresses or ports
- path: '/run/main_public.sock'
type: http
{% else %}
- port: {{ SYNAPSE_HTTP_PORT or 8008 }}
tls: false
bind_addresses: ['::']
type: http
x_forwarded: false
{% endif %}
resources:
- names: [client]
compress: true
@@ -62,13 +57,10 @@ database:
user: "{{ POSTGRES_USER or "synapse" }}"
password: "{{ POSTGRES_PASSWORD }}"
database: "{{ POSTGRES_DB or "synapse" }}"
{% if not SYNAPSE_USE_UNIX_SOCKET %}
{# Synapse will use a default unix socket for Postgres when host/port is not specified (behavior from `psycopg2`). #}
host: "{{ POSTGRES_HOST or "db" }}"
port: "{{ POSTGRES_PORT or "5432" }}"
{% endif %}
cp_min: {{ POSTGRES_CP_MIN or 5 }}
cp_max: {{ POSTGRES_CP_MAX or 10 }}
cp_min: 5
cp_max: 10
{% else %}
database:
name: "sqlite3"

View File

@@ -49,35 +49,17 @@ handlers:
class: logging.StreamHandler
formatter: precise
{% if not SYNAPSE_LOG_SENSITIVE %}
{#
If SYNAPSE_LOG_SENSITIVE is unset, then override synapse.storage.SQL to INFO
so that DEBUG entries (containing sensitive information) are not emitted.
#}
loggers:
# This is just here so we can leave `loggers` in the config regardless of whether
# we configure other loggers below (avoid empty yaml dict error).
_placeholder:
level: "INFO"
{% if not SYNAPSE_LOG_SENSITIVE %}
{#
If SYNAPSE_LOG_SENSITIVE is unset, then override synapse.storage.SQL to INFO
so that DEBUG entries (containing sensitive information) are not emitted.
#}
synapse.storage.SQL:
# beware: increasing this to DEBUG will make synapse log sensitive
# information such as access tokens.
level: INFO
{% endif %}
{% if SYNAPSE_LOG_TESTING %}
{#
If Synapse is under test, log a few more useful things for a developer
attempting to debug something particularly tricky.
With `synapse.visibility.filtered_event_debug`, it logs when events are (maybe
unexpectedly) filtered out of responses in tests. It's just nice to be able to
look at the CI log and figure out why an event isn't being returned.
#}
synapse.visibility.filtered_event_debug:
level: DEBUG
{% endif %}
{% endif %}
root:
level: {{ SYNAPSE_LOG_LEVEL or "INFO" }}

View File

@@ -40,8 +40,6 @@
# log level. INFO is the default.
# * SYNAPSE_LOG_SENSITIVE: If unset, SQL and SQL values won't be logged,
# regardless of the SYNAPSE_LOG_LEVEL setting.
# * SYNAPSE_LOG_TESTING: if set, Synapse will log additional information useful
# for testing.
#
# NOTE: According to Complement's ENTRYPOINT expectations for a homeserver image (as defined
# in the project's README), this script may be run multiple times, and functionality should
@@ -71,12 +69,6 @@ import yaml
from jinja2 import Environment, FileSystemLoader
MAIN_PROCESS_HTTP_LISTENER_PORT = 8080
MAIN_PROCESS_INSTANCE_NAME = "main"
MAIN_PROCESS_LOCALHOST_ADDRESS = "127.0.0.1"
MAIN_PROCESS_REPLICATION_PORT = 9093
# Obviously, these would only be used with the UNIX socket option
MAIN_PROCESS_UNIX_SOCKET_PUBLIC_PATH = "/run/main_public.sock"
MAIN_PROCESS_UNIX_SOCKET_PRIVATE_PATH = "/run/main_private.sock"
# A simple name used as a placeholder in the WORKERS_CONFIG below. This will be replaced
# during processing with the name of the worker.
@@ -183,7 +175,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
"^/_matrix/client/(r0|v3|unstable)/password_policy$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$",
"^/_matrix/client/(r0|v3|unstable)/capabilities$",
"^/_matrix/client/(r0|v3|unstable)/notifications$",
],
"shared_extra_conf": {},
"worker_extra_conf": "",
@@ -248,6 +239,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
"^/_matrix/client/(api/v1|r0|v3|unstable)/join/",
"^/_matrix/client/(api/v1|r0|v3|unstable)/knock/",
"^/_matrix/client/(api/v1|r0|v3|unstable)/profile/",
"^/_matrix/client/(v1|unstable/org.matrix.msc2716)/rooms/.*/batch_send",
],
"shared_extra_conf": {},
"worker_extra_conf": "",
@@ -411,15 +403,11 @@ def add_worker_roles_to_shared_config(
)
# Map of stream writer instance names to host/ports combos
if os.environ.get("SYNAPSE_USE_UNIX_SOCKET", False):
instance_map[worker_name] = {
"path": f"/run/worker.{worker_port}",
}
else:
instance_map[worker_name] = {
"host": "localhost",
"port": worker_port,
}
instance_map[worker_name] = {
"host": "localhost",
"port": worker_port,
}
# Update the list of stream writers. It's convenient that the name of the worker
# type is the same as the stream to write. Iterate over the whole list in case there
# is more than one.
@@ -431,15 +419,10 @@ def add_worker_roles_to_shared_config(
# Map of stream writer instance names to host/ports combos
# For now, all stream writers need http replication ports
if os.environ.get("SYNAPSE_USE_UNIX_SOCKET", False):
instance_map[worker_name] = {
"path": f"/run/worker.{worker_port}",
}
else:
instance_map[worker_name] = {
"host": "localhost",
"port": worker_port,
}
instance_map[worker_name] = {
"host": "localhost",
"port": worker_port,
}
def merge_worker_template_configs(
@@ -731,29 +714,17 @@ def generate_worker_files(
# Note that yaml cares about indentation, so care should be taken to insert lines
# into files at the correct indentation below.
# Convenience helper for if using unix sockets instead of host:port
using_unix_sockets = environ.get("SYNAPSE_USE_UNIX_SOCKET", False)
# First read the original config file and extract the listeners block. Then we'll
# add another listener for replication. Later we'll write out the result to the
# shared config file.
listeners: List[Any]
if using_unix_sockets:
listeners = [
{
"path": MAIN_PROCESS_UNIX_SOCKET_PRIVATE_PATH,
"type": "http",
"resources": [{"names": ["replication"]}],
}
]
else:
listeners = [
{
"port": MAIN_PROCESS_REPLICATION_PORT,
"bind_address": MAIN_PROCESS_LOCALHOST_ADDRESS,
"type": "http",
"resources": [{"names": ["replication"]}],
}
]
listeners = [
{
"port": 9093,
"bind_address": "127.0.0.1",
"type": "http",
"resources": [{"names": ["replication"]}],
}
]
with open(config_path) as file_stream:
original_config = yaml.safe_load(file_stream)
original_listeners = original_config.get("listeners")
@@ -794,17 +765,7 @@ def generate_worker_files(
# A list of internal endpoints to healthcheck, starting with the main process
# which exists even if no workers do.
# This list ends up being part of the command line to curl, (curl added support for
# Unix sockets in version 7.40).
if using_unix_sockets:
healthcheck_urls = [
f"--unix-socket {MAIN_PROCESS_UNIX_SOCKET_PUBLIC_PATH} "
# The scheme and hostname from the following URL are ignored.
# The only thing that matters is the path `/health`
"http://localhost/health"
]
else:
healthcheck_urls = ["http://localhost:8080/health"]
healthcheck_urls = ["http://localhost:8080/health"]
# Get the set of all worker types that we have configured
all_worker_types_in_use = set(chain(*requested_worker_types.values()))
@@ -841,12 +802,8 @@ def generate_worker_files(
# given worker_type needs to stay assigned and not be replaced.
worker_config["shared_extra_conf"].update(shared_config)
shared_config = worker_config["shared_extra_conf"]
if using_unix_sockets:
healthcheck_urls.append(
f"--unix-socket /run/worker.{worker_port} http://localhost/health"
)
else:
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
# Update the shared config with sharding-related options if necessary
add_worker_roles_to_shared_config(
@@ -862,10 +819,9 @@ def generate_worker_files(
# Then a worker config file
convert(
"/conf/worker.yaml.j2",
f"/conf/workers/{worker_name}.yaml",
"/conf/workers/{name}.yaml".format(name=worker_name),
**worker_config,
worker_log_config_filepath=log_config_filepath,
using_unix_sockets=using_unix_sockets,
)
# Save this worker's port number to the correct nginx upstreams
@@ -886,13 +842,8 @@ def generate_worker_files(
nginx_upstream_config = ""
for upstream_worker_base_name, upstream_worker_ports in nginx_upstreams.items():
body = ""
if using_unix_sockets:
for port in upstream_worker_ports:
body += f" server unix:/run/worker.{port};\n"
else:
for port in upstream_worker_ports:
body += f" server localhost:{port};\n"
for port in upstream_worker_ports:
body += f" server localhost:{port};\n"
# Add to the list of configured upstreams
nginx_upstream_config += NGINX_UPSTREAM_CONFIG_BLOCK.format(
@@ -919,19 +870,6 @@ def generate_worker_files(
workers_in_use = len(requested_worker_types) > 0
# If there are workers, add the main process to the instance_map too.
if workers_in_use:
instance_map = shared_config.setdefault("instance_map", {})
if using_unix_sockets:
instance_map[MAIN_PROCESS_INSTANCE_NAME] = {
"path": MAIN_PROCESS_UNIX_SOCKET_PRIVATE_PATH,
}
else:
instance_map[MAIN_PROCESS_INSTANCE_NAME] = {
"host": MAIN_PROCESS_LOCALHOST_ADDRESS,
"port": MAIN_PROCESS_REPLICATION_PORT,
}
# Shared homeserver config
convert(
"/conf/shared.yaml.j2",
@@ -940,7 +878,6 @@ def generate_worker_files(
appservice_registrations=appservice_registrations,
enable_redis=workers_in_use,
workers_in_use=workers_in_use,
using_unix_sockets=using_unix_sockets,
)
# Nginx config
@@ -951,7 +888,6 @@ def generate_worker_files(
upstream_directives=nginx_upstream_config,
tls_cert_path=os.environ.get("SYNAPSE_TLS_CERT"),
tls_key_path=os.environ.get("SYNAPSE_TLS_KEY"),
using_unix_sockets=using_unix_sockets,
)
# Supervisord config
@@ -961,7 +897,6 @@ def generate_worker_files(
"/etc/supervisor/supervisord.conf",
main_config_path=config_path,
enable_redis=workers_in_use,
using_unix_sockets=using_unix_sockets,
)
convert(
@@ -1001,7 +936,6 @@ def generate_worker_log_config(
extra_log_template_args["SYNAPSE_LOG_SENSITIVE"] = environ.get(
"SYNAPSE_LOG_SENSITIVE"
)
extra_log_template_args["SYNAPSE_LOG_TESTING"] = environ.get("SYNAPSE_LOG_TESTING")
# Render and write the file
log_config_filepath = f"/conf/workers/{worker_name}.log.config"

View File

@@ -8,9 +8,9 @@ ARG PYTHON_VERSION=3.9
###
### Stage 0: generate requirements.txt
###
# We hardcode the use of Debian bookworm here because this could change upstream
# and other Dockerfiles used for testing are expecting bookworm.
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
# We hardcode the use of Debian bullseye here because this could change upstream
# and other Dockerfiles used for testing are expecting bullseye.
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye
# Install Rust and other dependencies (stolen from normal Dockerfile)
# install the OS build deps
@@ -33,7 +33,7 @@ RUN \
gosu \
libjpeg62-turbo \
libpq5 \
libwebp7 \
libwebp6 \
xmlsec1 \
libjemalloc2 \
&& rm -rf /var/lib/apt/lists/*

View File

@@ -82,7 +82,7 @@ def generate_config_from_template(
with open(filename) as handle:
value = handle.read()
else:
log(f"Generating a random secret for {secret}")
log("Generating a random secret for {}".format(secret))
value = codecs.encode(os.urandom(32), "hex").decode()
with open(filename, "w") as handle:
handle.write(value)
@@ -239,7 +239,7 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
log("Could not find %s, will not use" % (jemallocpath,))
# if there are no config files passed to synapse, try adding the default file
if not any(p.startswith(("--config-path", "-c")) for p in args):
if not any(p.startswith("--config-path") or p.startswith("-c") for p in args):
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
config_path = environ.get(
"SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml"

View File

@@ -19,7 +19,7 @@
# Usage
- [Federation](federate.md)
- [Configuration](usage/configuration/README.md)
- [Configuration Manual](usage/configuration/config_documentation.md)
- [Configuration Manual](usage/configuration/config_documentation.md)
- [Homeserver Sample Config File](usage/configuration/homeserver_sample_config.md)
- [Logging Sample Config File](usage/configuration/logging_sample_config.md)
- [Structured Logging](structured_logging.md)
@@ -48,7 +48,6 @@
- [Password auth provider callbacks](modules/password_auth_provider_callbacks.md)
- [Background update controller callbacks](modules/background_update_controller_callbacks.md)
- [Account data callbacks](modules/account_data_callbacks.md)
- [Add extra fields to client events unsigned section callbacks](modules/add_extra_fields_to_client_events_unsigned.md)
- [Porting a legacy module to the new interface](modules/porting_legacy_module.md)
- [Workers](workers.md)
- [Using `synctl` with Workers](synctl_workers.md)
@@ -98,7 +97,6 @@
- [Cancellation](development/synapse_architecture/cancellation.md)
- [Log Contexts](log_contexts.md)
- [Replication](replication.md)
- [Streams](development/synapse_architecture/streams.md)
- [TCP Replication](tcp_replication.md)
- [Faster remote joins](development/synapse_architecture/faster_joins.md)
- [Internal Documentation](development/internal_documentation/README.md)

View File

@@ -1,7 +1,5 @@
# Account validity API
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
This API allows a server administrator to manage the validity of an account. To
use it, you must enable the account validity feature (under
`account_validity`) in Synapse's configuration.

View File

@@ -1,12 +1,10 @@
# Experimental Features API
This API allows a server administrator to enable or disable some experimental features on a per-user
basis. The currently supported features are:
- [MSC3026](https://github.com/matrix-org/matrix-spec-proposals/pull/3026): busy
presence state enabled
- [MSC3881](https://github.com/matrix-org/matrix-spec-proposals/pull/3881): enable remotely toggling push notifications
for another client
- [MSC3967](https://github.com/matrix-org/matrix-spec-proposals/pull/3967): do not require
basis. Currently supported features are [msc3026](https://github.com/matrix-org/matrix-spec-proposals/pull/3026): busy
presence state enabled, [msc2654](https://github.com/matrix-org/matrix-spec-proposals/pull/2654): enable unread counts,
[msc3881](https://github.com/matrix-org/matrix-spec-proposals/pull/3881): enable remotely toggling push notifications
for another client, and [msc3967](https://github.com/matrix-org/matrix-spec-proposals/pull/3967): do not require
UIA when first uploading cross-signing keys.
@@ -21,7 +19,7 @@ provide a body containing the user id and listing the features to enable/disable
{
"features": {
"msc3026":true,
"msc3881":true
"msc2654":true
}
}
```
@@ -48,6 +46,7 @@ user like so:
{
"features": {
"msc3026": true,
"msc2654": true,
"msc3881": false,
"msc3967": false
}

View File

@@ -1,7 +1,5 @@
# Shared-Secret Registration
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
This API allows for the creation of users in an administrative and
non-interactive way. This is generally used for bootstrapping a Synapse
instance with administrator accounts.

View File

@@ -419,7 +419,7 @@ The following query parameters are available:
* `from` (required) - The token to start returning events from. This token can be obtained from a prev_batch
or next_batch token returned by the /sync endpoint, or from an end token returned by a previous request to this endpoint.
* `to` - The token to stop returning events at.
* `to` - The token to spot returning events at.
* `limit` - The maximum number of events to return. Defaults to `10`.
* `filter` - A JSON RoomEventFilter to filter returned events with.
* `dir` - The direction to return events from. Either `f` for forwards or `b` for backwards. Setting
@@ -536,8 +536,7 @@ The following query parameters are available:
**Response**
* `event_id` - The event ID closest to the given timestamp.
* `origin_server_ts` - The timestamp of the event in milliseconds since the Unix epoch.
* `event_id` - converted from timestamp
# Block Room API
The Block Room admin API allows server admins to block and unblock rooms,

View File

@@ -100,7 +100,7 @@ it should be enough to get a rough idea of where database disk space is going.
The API is:
```
GET /_synapse/admin/v1/statistics/database/rooms
GET /_synapse/admin/v1/statistics/statistics/database/rooms
```
A response body like the following is returned:

View File

@@ -54,8 +54,7 @@ It returns a JSON body like the following:
"external_id": "<user_id_provider_2>"
}
],
"user_type": null,
"locked": false
"user_type": null
}
```
@@ -63,7 +62,7 @@ URL parameters:
- `user_id`: fully-qualified user id: for example, `@user:server.com`.
## Create or modify account
## Create or modify Account
This API allows an administrator to create or modify a user account with a
specific `user_id`.
@@ -79,33 +78,31 @@ with a body of:
```json
{
"password": "user_password",
"logout_devices": false,
"displayname": "Alice Marigold",
"avatar_url": "mxc://example.com/abcde12345",
"displayname": "User",
"threepids": [
{
"medium": "email",
"address": "alice@example.com"
"address": "<user_mail_1>"
},
{
"medium": "email",
"address": "alice@domain.org"
"address": "<user_mail_2>"
}
],
"external_ids": [
{
"auth_provider": "example",
"external_id": "12345"
"auth_provider": "<provider1>",
"external_id": "<user_id_provider_1>"
},
{
"auth_provider": "example2",
"external_id": "abc54321"
"auth_provider": "<provider2>",
"external_id": "<user_id_provider_2>"
}
],
"avatar_url": "<avatar_url>",
"admin": false,
"deactivated": false,
"user_type": null,
"locked": false
"user_type": null
}
```
@@ -115,52 +112,41 @@ Returns HTTP status code:
URL parameters:
- `user_id` - A fully-qualified user id. For example, `@user:server.com`.
- `user_id`: fully-qualified user id: for example, `@user:server.com`.
Body parameters:
- `password` - **string**, optional. If provided, the user's password is updated and all
- `password` - string, optional. If provided, the user's password is updated and all
devices are logged out, unless `logout_devices` is set to `false`.
- `logout_devices` - **bool**, optional, defaults to `true`. If set to `false`, devices aren't
- `logout_devices` - bool, optional, defaults to `true`. If set to false, devices aren't
logged out even when `password` is provided.
- `displayname` - **string**, optional. If set to an empty string (`""`), the user's display name
will be removed.
- `avatar_url` - **string**, optional. Must be a
[MXC URI](https://matrix.org/docs/spec/client_server/r0.6.0#matrix-content-mxc-uris).
If set to an empty string (`""`), the user's avatar is removed.
- `threepids` - **array**, optional. If provided, the user's third-party IDs (email, msisdn) are
entirely replaced with the given list. Each item in the array is an object with the following
fields:
- `medium` - **string**, required. The type of third-party ID, either `email` or `msisdn` (phone number).
- `address` - **string**, required. The third-party ID itself, e.g. `alice@example.com` for `email` or
`447470274584` (for a phone number with country code "44") and `19254857364` (for a phone number
with country code "1") for `msisdn`.
Note: If a threepid is removed from a user via this option, Synapse will also attempt to remove
that threepid from any identity servers it is aware has a binding for it.
- `external_ids` - **array**, optional. Allow setting the identifier of the external identity
provider for SSO (Single sign-on). More details are in the configuration manual under the
- `displayname` - string, optional, defaults to the value of `user_id`.
- `threepids` - array, optional, allows setting the third-party IDs (email, msisdn)
- `medium` - string. Kind of third-party ID, either `email` or `msisdn`.
- `address` - string. Value of third-party ID.
belonging to a user.
- `external_ids` - array, optional. Allow setting the identifier of the external identity
provider for SSO (Single sign-on). Details in the configuration manual under the
sections [sso](../usage/configuration/config_documentation.md#sso) and [oidc_providers](../usage/configuration/config_documentation.md#oidc_providers).
- `auth_provider` - **string**, required. The unique, internal ID of the external identity provider.
The same as `idp_id` from the homeserver configuration. Note that no error is raised if the
provided value is not in the homeserver configuration.
- `external_id` - **string**, required. An identifier for the user in the external identity provider.
When the user logs in to the identity provider, this must be the unique ID that they map to.
- `admin` - **bool**, optional, defaults to `false`. Whether the user is a homeserver administrator,
granting them access to the Admin API, among other things.
- `deactivated` - **bool**, optional. If unspecified, deactivation state will be left unchanged.
- `auth_provider` - string. ID of the external identity provider. Value of `idp_id`
in the homeserver configuration. Note that no error is raised if the provided
value is not in the homeserver configuration.
- `external_id` - string, user ID in the external identity provider.
- `avatar_url` - string, optional, must be a
[MXC URI](https://matrix.org/docs/spec/client_server/r0.6.0#matrix-content-mxc-uris).
- `admin` - bool, optional, defaults to `false`.
- `deactivated` - bool, optional. If unspecified, deactivation state will be left
unchanged on existing accounts and set to `false` for new accounts.
A user cannot be erased by deactivating with this API. For details on
deactivating users see [Deactivate Account](#deactivate-account).
- `user_type` - string or null, optional. If provided, the user type will be
adjusted. If `null` given, the user type will be cleared. Other
allowed options are: `bot` and `support`.
Note: the `password` field must also be set if both of the following are true:
- `deactivated` is set to `false` and the user was previously deactivated (you are reactivating this user)
- Users are allowed to set their password on this homeserver (both `password_config.enabled` and
`password_config.localdb_enabled` config options are set to `true`).
Users' passwords are wiped upon account deactivation, hence the need to set a new one here.
If the user already exists then optional parameters default to the current value.
Note: a user cannot be erased with this API. For more details on
deactivating and erasing users see [Deactivate Account](#deactivate-account).
- `locked` - **bool**, optional. If unspecified, locked state will be left unchanged.
- `user_type` - **string** or null, optional. If not provided, the user type will be
not be changed. If `null` is given, the user type will be cleared.
Other allowed options are: `bot` and `support`.
In order to re-activate an account `deactivated` must be set to `false`. If
users do not login via single-sign-on, a new `password` must be provided.
## List Accounts
@@ -186,8 +172,7 @@ A response body like the following is returned:
"shadow_banned": 0,
"displayname": "<User One>",
"avatar_url": null,
"creation_ts": 1560432668000,
"locked": false
"creation_ts": 1560432668000
}, {
"name": "<user_id2>",
"is_guest": 0,
@@ -198,8 +183,7 @@ A response body like the following is returned:
"shadow_banned": 0,
"displayname": "<User Two>",
"avatar_url": "<avatar_url>",
"creation_ts": 1561550621000,
"locked": false
"creation_ts": 1561550621000
}
],
"next_token": "100",
@@ -222,9 +206,7 @@ The following parameters should be set in the URL:
- `name` - Is optional and filters to only return users with user ID localparts
**or** displaynames that contain this value.
- `guests` - string representing a bool - Is optional and if `false` will **exclude** guest users.
Defaults to `true` to include guest users. This parameter is not supported when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
- `admins` - Optional flag to filter admins. If `true`, only admins are queried. If `false`, admins are excluded from
the query. When the flag is absent (the default), **both** admins and non-admins are included in the search results.
Defaults to `true` to include guest users.
- `deactivated` - string representing a bool - Is optional and if `true` will **include** deactivated users.
Defaults to `false` to exclude deactivated users.
- `limit` - string representing a positive integer - Is optional but is used for pagination,
@@ -246,15 +228,9 @@ The following parameters should be set in the URL:
- `displayname` - Users are ordered alphabetically by `displayname`.
- `avatar_url` - Users are ordered alphabetically by avatar URL.
- `creation_ts` - Users are ordered by when the users was created in ms.
- `last_seen_ts` - Users are ordered by when the user was lastly seen in ms.
- `dir` - Direction of media order. Either `f` for forwards or `b` for backwards.
Setting this value to `b` will reverse the above sort order. Defaults to `f`.
- `not_user_type` - Exclude certain user types, such as bot users, from the request.
Can be provided multiple times. Possible values are `bot`, `support` or "empty string".
"empty string" here means to exclude users without a type.
- `locked` - string representing a bool - Is optional and if `true` will **include** locked users.
Defaults to `false` to exclude locked users. Note: Introduced in v1.93.
Caution. The database only has indexes on the columns `name` and `creation_ts`.
This means that if a different sort order is used (`is_guest`, `admin`,
@@ -279,12 +255,10 @@ The following fields are returned in the JSON response body:
- `displayname` - string - The user's display name if they have set one.
- `avatar_url` - string - The user's avatar URL if they have set one.
- `creation_ts` - integer - The user's creation timestamp in ms.
- `last_seen_ts` - integer - The user's last activity timestamp in ms.
- `locked` - bool - Status if that user has been marked as locked. Note: Introduced in v1.93.
- `next_token`: string representing a positive integer - Indication for pagination. See above.
- `total` - integer - Total number of media.
*Added in Synapse 1.93:* the `locked` query parameter and response field.
## Query current sessions for a user
@@ -399,8 +373,6 @@ The following actions are **NOT** performed. The list may be incomplete.
## Reset password
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
Changes the password of another user. This will automatically log the user out of all their devices.
The api is:
@@ -424,8 +396,6 @@ The parameter `logout_devices` is optional and defaults to `true`.
## Get whether a user is a server administrator or not
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
The api is:
```
@@ -443,8 +413,6 @@ A response body like the following is returned:
## Change whether a user is a server administrator or not
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
Note that you cannot demote yourself.
The api is:
@@ -738,8 +706,6 @@ delete largest/smallest or newest/oldest files first.
## Login as a user
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
Get an access token that can be used to authenticate as that user. Useful for
when admins wish to do actions on behalf of a user.
@@ -752,8 +718,7 @@ POST /_synapse/admin/v1/users/<user_id>/login
An optional `valid_until_ms` field can be specified in the request body as an
integer timestamp that specifies when the token should expire. By default tokens
do not expire. Note that this API does not allow a user to login as themselves
(to create more tokens).
do not expire.
A response body like the following is returned:
@@ -773,43 +738,6 @@ Note: The token will expire if the *admin* user calls `/logout/all` from any
of their devices, but the token will *not* expire if the target user does the
same.
## Allow replacing master cross-signing key without User-Interactive Auth
This endpoint is not intended for server administrator usage;
we describe it here for completeness.
This API temporarily permits a user to replace their master cross-signing key
without going through
[user-interactive authentication](https://spec.matrix.org/v1.8/client-server-api/#user-interactive-authentication-api) (UIA).
This is useful when Synapse has delegated its authentication to the
[Matrix Authentication Service](https://github.com/matrix-org/matrix-authentication-service/);
as Synapse cannot perform UIA is not possible in these circumstances.
The API is
```http request
POST /_synapse/admin/v1/users/<user_id>/_allow_cross_signing_replacement_without_uia
{}
```
If the user does not exist, or does exist but has no master cross-signing key,
this will return with status code `404 Not Found`.
Otherwise, a response body like the following is returned, with status `200 OK`:
```json
{
"updatable_without_uia_before_ms": 1234567890
}
```
The response body is a JSON object with a single field:
- `updatable_without_uia_before_ms`: integer. The timestamp in milliseconds
before which the user is permitted to replace their cross-signing key without
going through UIA.
_Added in Synapse 1.97.0._
## User devices
@@ -874,33 +802,6 @@ The following fields are returned in the JSON response body:
- `total` - Total number of user's devices.
### Create a device
Creates a new device for a specific `user_id` and `device_id`. Does nothing if the `device_id`
exists already.
The API is:
```
POST /_synapse/admin/v2/users/<user_id>/devices
{
"device_id": "QBUAZIFURK"
}
```
An empty JSON dict is returned.
**Parameters**
The following parameters should be set in the URL:
- `user_id` - fully qualified: for example, `@user:server.com`.
The following fields are required in the JSON request body:
- `device_id` - The device ID to create.
### Delete multiple devices
Deletes the given devices for a specific `user_id`, and invalidates
any access token associated with them.
@@ -1241,7 +1142,7 @@ The following parameters should be set in the URL:
- `user_id` - The fully qualified MXID: for example, `@user:server.com`. The user must
be local.
## Check username availability
### Check username availability
Checks to see if a username is available, and valid, for the server. See [the client-server
API](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available)
@@ -1259,7 +1160,7 @@ GET /_synapse/admin/v1/username_available?username=$localpart
The request and response format is the same as the
[/_matrix/client/r0/register/available](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available) API.
## Find a user based on their ID in an auth provider
### Find a user based on their ID in an auth provider
The API is:
@@ -1298,7 +1199,7 @@ Returns a `404` HTTP status code if no user was found, with a response body like
_Added in Synapse 1.68.0._
## Find a user based on their Third Party ID (ThreePID or 3PID)
### Find a user based on their Third Party ID (ThreePID or 3PID)
The API is:

View File

@@ -1,7 +1,7 @@
# Version API
This API returns the running Synapse version.
This is useful when a Synapse instance
This API returns the running Synapse version and the Python version
on which Synapse is being run. This is useful when a Synapse instance
is behind a proxy that does not forward the 'Server' header (which also
contains Synapse version information).
@@ -15,9 +15,7 @@ It returns a JSON body like the following:
```json
{
"server_version": "0.99.2rc1 (b=develop, abcdef123)"
"server_version": "0.99.2rc1 (b=develop, abcdef123)",
"python_version": "3.7.8"
}
```
*Changed in Synapse 1.94.0:* The `python_version` key was removed from the
response body.

View File

@@ -24,7 +24,7 @@ Server with a domain specific API.
1. **Messaging Layer**
This is what the rest of the homeserver hits to send messages, join rooms,
etc. It also allows you to register callbacks for when it gets notified by
etc. It also allows you to register callbacks for when it get's notified by
lower levels that e.g. a new message has been received.
It is responsible for serializing requests to send to the data

View File

@@ -164,7 +164,7 @@ Synapse 1.6.0rc2 (2019-11-25)
Bugfixes
--------
- Fix a bug which could cause the background database update handler for event labels to get stuck in a loop raising exceptions. ([\#6407](https://github.com/matrix-org/synapse/issues/6407))
- Fix a bug which could cause the background database update hander for event labels to get stuck in a loop raising exceptions. ([\#6407](https://github.com/matrix-org/synapse/issues/6407))
Synapse 1.6.0rc1 (2019-11-20)
@@ -191,7 +191,7 @@ Bugfixes
- Appservice requests will no longer contain a double slash prefix when the appservice url provided ends in a slash. ([\#6306](https://github.com/matrix-org/synapse/issues/6306))
- Fix `/purge_room` admin API. ([\#6307](https://github.com/matrix-org/synapse/issues/6307))
- Fix the `hidden` field in the `devices` table for SQLite versions prior to 3.23.0. ([\#6313](https://github.com/matrix-org/synapse/issues/6313))
- Fix bug which caused rejected events to be persisted with the wrong room state. ([\#6320](https://github.com/matrix-org/synapse/issues/6320))
- Fix bug which casued rejected events to be persisted with the wrong room state. ([\#6320](https://github.com/matrix-org/synapse/issues/6320))
- Fix bug where `rc_login` ratelimiting would prematurely kick in. ([\#6335](https://github.com/matrix-org/synapse/issues/6335))
- Prevent the server taking a long time to start up when guest registration is enabled. ([\#6338](https://github.com/matrix-org/synapse/issues/6338))
- Fix bug where upgrading a guest account to a full user would fail when account validity is enabled. ([\#6359](https://github.com/matrix-org/synapse/issues/6359))
@@ -232,7 +232,7 @@ Internal Changes
- Add some documentation about worker replication. ([\#6305](https://github.com/matrix-org/synapse/issues/6305))
- Move admin endpoints into separate files. Contributed by Awesome Technologies Innovationslabor GmbH. ([\#6308](https://github.com/matrix-org/synapse/issues/6308))
- Document the use of `lint.sh` for code style enforcement & extend it to run on specified paths only. ([\#6312](https://github.com/matrix-org/synapse/issues/6312))
- Add optional python dependencies and dependent binary libraries to snapcraft packaging. ([\#6317](https://github.com/matrix-org/synapse/issues/6317))
- Add optional python dependencies and dependant binary libraries to snapcraft packaging. ([\#6317](https://github.com/matrix-org/synapse/issues/6317))
- Remove the dependency on psutil and replace functionality with the stdlib `resource` module. ([\#6318](https://github.com/matrix-org/synapse/issues/6318), [\#6336](https://github.com/matrix-org/synapse/issues/6336))
- Improve documentation for EventContext fields. ([\#6319](https://github.com/matrix-org/synapse/issues/6319))
- Add some checks that we aren't using state from rejected events. ([\#6330](https://github.com/matrix-org/synapse/issues/6330))
@@ -653,7 +653,7 @@ Internal Changes
- Return 502 not 500 when failing to reach any remote server. ([\#5810](https://github.com/matrix-org/synapse/issues/5810))
- Reduce global pauses in the events stream caused by expensive state resolution during persistence. ([\#5826](https://github.com/matrix-org/synapse/issues/5826))
- Add a lower bound to well-known lookup cache time to avoid repeated lookups. ([\#5836](https://github.com/matrix-org/synapse/issues/5836))
- Whitelist history visibility sytests in worker mode tests. ([\#5843](https://github.com/matrix-org/synapse/issues/5843))
- Whitelist history visbility sytests in worker mode tests. ([\#5843](https://github.com/matrix-org/synapse/issues/5843))
Synapse 1.2.1 (2019-07-26)
@@ -817,7 +817,7 @@ See the [upgrade notes](docs/upgrade.md#upgrading-to-v110) for more details.
Features
--------
- Added possibility to disable local password authentication. Contributed by Daniel Hoffend. ([\#5092](https://github.com/matrix-org/synapse/issues/5092))
- Added possibilty to disable local password authentication. Contributed by Daniel Hoffend. ([\#5092](https://github.com/matrix-org/synapse/issues/5092))
- Add monthly active users to phonehome stats. ([\#5252](https://github.com/matrix-org/synapse/issues/5252))
- Allow expired user to trigger renewal email sending manually. ([\#5363](https://github.com/matrix-org/synapse/issues/5363))
- Statistics on forward extremities per room are now exposed via Prometheus. ([\#5384](https://github.com/matrix-org/synapse/issues/5384), [\#5458](https://github.com/matrix-org/synapse/issues/5458), [\#5461](https://github.com/matrix-org/synapse/issues/5461))
@@ -850,7 +850,7 @@ Bugfixes
- Fix bug where clients could tight loop calling `/sync` for a period. ([\#5507](https://github.com/matrix-org/synapse/issues/5507))
- Fix bug with `jinja2` preventing Synapse from starting. Users who had this problem should now simply need to run `pip install matrix-synapse`. ([\#5514](https://github.com/matrix-org/synapse/issues/5514))
- Fix a regression where homeservers on private IP addresses were incorrectly blacklisted. ([\#5523](https://github.com/matrix-org/synapse/issues/5523))
- Fixed m.login.jwt using unregistered user_id and added pyjwt>=1.6.4 as jwt conditional dependencies. Contributed by Pau Rodriguez-Estivill. ([\#5555](https://github.com/matrix-org/synapse/issues/5555), [\#5586](https://github.com/matrix-org/synapse/issues/5586))
- Fixed m.login.jwt using unregistred user_id and added pyjwt>=1.6.4 as jwt conditional dependencies. Contributed by Pau Rodriguez-Estivill. ([\#5555](https://github.com/matrix-org/synapse/issues/5555), [\#5586](https://github.com/matrix-org/synapse/issues/5586))
- Fix a bug that would cause invited users to receive several emails for a single 3PID invite in case the inviter is rate limited. ([\#5576](https://github.com/matrix-org/synapse/issues/5576))

View File

@@ -251,7 +251,7 @@ Internal Changes
- Optimise `/createRoom` with multiple invited users. ([\#8559](https://github.com/matrix-org/synapse/issues/8559))
- Implement and use an `@lru_cache` decorator. ([\#8595](https://github.com/matrix-org/synapse/issues/8595))
- Don't instantiate Requester directly. ([\#8614](https://github.com/matrix-org/synapse/issues/8614))
- Don't instansiate Requester directly. ([\#8614](https://github.com/matrix-org/synapse/issues/8614))
- Type hints for `RegistrationStore`. ([\#8615](https://github.com/matrix-org/synapse/issues/8615))
- Change schema to support access tokens belonging to one user but granting access to another. ([\#8616](https://github.com/matrix-org/synapse/issues/8616))
- Remove unused OPTIONS handlers. ([\#8621](https://github.com/matrix-org/synapse/issues/8621))
@@ -518,7 +518,7 @@ Bugfixes
- Fix a bug which cause the logging system to report errors, if `DEBUG` was enabled and no `context` filter was applied. ([\#8278](https://github.com/matrix-org/synapse/issues/8278))
- Fix edge case where push could get delayed for a user until a later event was pushed. ([\#8287](https://github.com/matrix-org/synapse/issues/8287))
- Fix fetching malformed events from remote servers. ([\#8324](https://github.com/matrix-org/synapse/issues/8324))
- Fix `UnboundLocalError` from occurring when appservices send a malformed register request. ([\#8329](https://github.com/matrix-org/synapse/issues/8329))
- Fix `UnboundLocalError` from occuring when appservices send a malformed register request. ([\#8329](https://github.com/matrix-org/synapse/issues/8329))
- Don't send push notifications to expired user accounts. ([\#8353](https://github.com/matrix-org/synapse/issues/8353))
- Fix a regression in v1.19.0 with reactivating users through the admin API. ([\#8362](https://github.com/matrix-org/synapse/issues/8362))
- Fix a bug where during device registration the length of the device name wasn't limited. ([\#8364](https://github.com/matrix-org/synapse/issues/8364))
@@ -815,7 +815,7 @@ Bugfixes
- Fix a bug introduced in Synapse v1.7.2 which caused inaccurate membership counts in the room directory. ([\#7977](https://github.com/matrix-org/synapse/issues/7977))
- Fix a long standing bug: 'Duplicate key value violates unique constraint "event_relations_id"' when message retention is configured. ([\#7978](https://github.com/matrix-org/synapse/issues/7978))
- Fix "no create event in auth events" when trying to reject invitation after inviter leaves. Bug introduced in Synapse v1.10.0. ([\#7980](https://github.com/matrix-org/synapse/issues/7980))
- Fix various comments and minor discrepancies in server notices code. ([\#7996](https://github.com/matrix-org/synapse/issues/7996))
- Fix various comments and minor discrepencies in server notices code. ([\#7996](https://github.com/matrix-org/synapse/issues/7996))
- Fix a long standing bug where HTTP HEAD requests resulted in a 400 error. ([\#7999](https://github.com/matrix-org/synapse/issues/7999))
- Fix a long-standing bug which caused two copies of some log lines to be written when synctl was used along with a MemoryHandler logger. ([\#8011](https://github.com/matrix-org/synapse/issues/8011), [\#8012](https://github.com/matrix-org/synapse/issues/8012))
@@ -1460,7 +1460,7 @@ Bugfixes
- Transfer alias mappings on room upgrade. ([\#6946](https://github.com/matrix-org/synapse/issues/6946))
- Ensure that a user interactive authentication session is tied to a single request. ([\#7068](https://github.com/matrix-org/synapse/issues/7068), [\#7455](https://github.com/matrix-org/synapse/issues/7455))
- Fix a bug in the federation API which could cause occasional "Failed to get PDU" errors. ([\#7089](https://github.com/matrix-org/synapse/issues/7089))
- Return the proper error (`M_BAD_ALIAS`) when a non-existent canonical alias is provided. ([\#7109](https://github.com/matrix-org/synapse/issues/7109))
- Return the proper error (`M_BAD_ALIAS`) when a non-existant canonical alias is provided. ([\#7109](https://github.com/matrix-org/synapse/issues/7109))
- Fix a bug which meant that groups updates were not correctly replicated between workers. ([\#7117](https://github.com/matrix-org/synapse/issues/7117))
- Fix starting workers when federation sending not split out. ([\#7133](https://github.com/matrix-org/synapse/issues/7133))
- Ensure `is_verified` is a boolean in responses to `GET /_matrix/client/r0/room_keys/keys`. Also warn the user if they forgot the `version` query param. ([\#7150](https://github.com/matrix-org/synapse/issues/7150))
@@ -1482,7 +1482,7 @@ Bugfixes
- Fix bad error handling that would cause Synapse to crash if it's provided with a YAML configuration file that's either empty or doesn't parse into a key-value map. ([\#7341](https://github.com/matrix-org/synapse/issues/7341))
- Fix incorrect metrics reporting for `renew_attestations` background task. ([\#7344](https://github.com/matrix-org/synapse/issues/7344))
- Prevent non-federating rooms from appearing in responses to federated `POST /publicRoom` requests when a filter was included. ([\#7367](https://github.com/matrix-org/synapse/issues/7367))
- Fix a bug which would cause the room directory to be incorrectly populated if Synapse was upgraded directly from v1.2.1 or earlier to v1.4.0 or later. Note that this fix does not apply retrospectively; see the [upgrade notes](docs/upgrade.md#upgrading-to-v1130) for more information. ([\#7387](https://github.com/matrix-org/synapse/issues/7387))
- Fix a bug which would cause the room durectory to be incorrectly populated if Synapse was upgraded directly from v1.2.1 or earlier to v1.4.0 or later. Note that this fix does not apply retrospectively; see the [upgrade notes](docs/upgrade.md#upgrading-to-v1130) for more information. ([\#7387](https://github.com/matrix-org/synapse/issues/7387))
- Fix bug in `EventContext.deserialize`. ([\#7393](https://github.com/matrix-org/synapse/issues/7393))
@@ -1638,7 +1638,7 @@ Security advisory
-----------------
Synapse may be vulnerable to request-smuggling attacks when it is used with a
reverse-proxy. The vulnerabilities are fixed in Twisted 20.3.0, and are
reverse-proxy. The vulnerabilties are fixed in Twisted 20.3.0, and are
described in
[CVE-2020-10108](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10108)
and
@@ -1748,7 +1748,7 @@ Internal Changes
- Refactoring work in preparation for changing the event redaction algorithm. ([\#6874](https://github.com/matrix-org/synapse/issues/6874), [\#6875](https://github.com/matrix-org/synapse/issues/6875), [\#6983](https://github.com/matrix-org/synapse/issues/6983), [\#7003](https://github.com/matrix-org/synapse/issues/7003))
- Improve performance of v2 state resolution for large rooms. ([\#6952](https://github.com/matrix-org/synapse/issues/6952), [\#7095](https://github.com/matrix-org/synapse/issues/7095))
- Reduce time spent doing GC, by freezing objects on startup. ([\#6953](https://github.com/matrix-org/synapse/issues/6953))
- Minor performance fixes to `get_auth_chain_ids`. ([\#6954](https://github.com/matrix-org/synapse/issues/6954))
- Minor perfermance fixes to `get_auth_chain_ids`. ([\#6954](https://github.com/matrix-org/synapse/issues/6954))
- Don't record remote cross-signing keys in the `devices` table. ([\#6956](https://github.com/matrix-org/synapse/issues/6956))
- Use flake8-comprehensions to enforce good hygiene of list/set/dict comprehensions. ([\#6957](https://github.com/matrix-org/synapse/issues/6957))
- Merge worker apps together. ([\#6964](https://github.com/matrix-org/synapse/issues/6964), [\#7002](https://github.com/matrix-org/synapse/issues/7002), [\#7055](https://github.com/matrix-org/synapse/issues/7055), [\#7104](https://github.com/matrix-org/synapse/issues/7104))
@@ -1809,7 +1809,7 @@ Bugfixes
- Allow URL-encoded User IDs on `/_synapse/admin/v2/users/<user_id>[/admin]` endpoints. Thanks to @NHAS for reporting. ([\#6825](https://github.com/matrix-org/synapse/issues/6825))
- Fix Synapse refusing to start if `federation_certificate_verification_whitelist` option is blank. ([\#6849](https://github.com/matrix-org/synapse/issues/6849))
- Fix errors from logging in the purge jobs related to the message retention policies support. ([\#6945](https://github.com/matrix-org/synapse/issues/6945))
- Return a 404 instead of 200 for querying information of a non-existent user through the admin API. ([\#6901](https://github.com/matrix-org/synapse/issues/6901))
- Return a 404 instead of 200 for querying information of a non-existant user through the admin API. ([\#6901](https://github.com/matrix-org/synapse/issues/6901))
Updates to the Docker image
@@ -1889,7 +1889,7 @@ Bugfixes
Synapse 1.10.0rc4 (2020-02-11)
==============================
This release candidate was built incorrectly and is superseded by 1.10.0rc5.
This release candidate was built incorrectly and is superceded by 1.10.0rc5.
Synapse 1.10.0rc3 (2020-02-10)
==============================

View File

@@ -2270,7 +2270,7 @@ Features
Bugfixes
--------
- Fix spurious errors in logs when deleting a non-existent pusher. ([\#9121](https://github.com/matrix-org/synapse/issues/9121))
- Fix spurious errors in logs when deleting a non-existant pusher. ([\#9121](https://github.com/matrix-org/synapse/issues/9121))
- Fix a long-standing bug where Synapse would return a 500 error when a thumbnail did not exist (and auto-generation of thumbnails was not enabled). ([\#9163](https://github.com/matrix-org/synapse/issues/9163))
- Fix a long-standing bug where an internal server error was raised when attempting to preview an HTML document in an unknown character encoding. ([\#9164](https://github.com/matrix-org/synapse/issues/9164))
- Fix a long-standing bug where invalid data could cause errors when calculating the presentable room name for push. ([\#9165](https://github.com/matrix-org/synapse/issues/9165))
@@ -2522,7 +2522,7 @@ Bugfixes
- Fix a long-standing bug where a `m.image` event without a `url` would cause errors on push. ([\#8965](https://github.com/matrix-org/synapse/issues/8965))
- Fix a small bug in v2 state resolution algorithm, which could also cause performance issues for rooms with large numbers of power levels. ([\#8971](https://github.com/matrix-org/synapse/issues/8971))
- Add validation to the `sendToDevice` API to raise a missing parameters error instead of a 500 error. ([\#8975](https://github.com/matrix-org/synapse/issues/8975))
- Add validation of group IDs to raise a 400 error instead of a 500 error. ([\#8977](https://github.com/matrix-org/synapse/issues/8977))
- Add validation of group IDs to raise a 400 error instead of a 500 eror. ([\#8977](https://github.com/matrix-org/synapse/issues/8977))
Improved Documentation

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -8,9 +8,9 @@ to the server until they have.
There are several parts to this functionality; each requires some specific
configuration in `homeserver.yaml` to be enabled.
Note that various parts of the configuration and this document refer to the
Note that various parts of the configuation and this document refer to the
"privacy policy": agreement with a privacy policy is one particular use of this
feature, but of course administrators can specify other terms and conditions
feature, but of course adminstrators can specify other terms and conditions
unrelated to "privacy" per se.
Collecting policy agreement from a user

View File

@@ -23,7 +23,7 @@ people building from source should ensure they can fetch recent versions of Rust
(e.g. by using [rustup](https://rustup.rs/)).
The oldest supported version of SQLite is the version
[provided](https://packages.debian.org/bullseye/libsqlite3-0) by
[provided](https://packages.debian.org/buster/libsqlite3-0) by
[Debian oldstable](https://wiki.debian.org/DebianOldStable).
Context

View File

@@ -22,9 +22,6 @@ on Windows is not officially supported.
The code of Synapse is written in Python 3. To do pretty much anything, you'll need [a recent version of Python 3](https://www.python.org/downloads/). Your Python also needs support for [virtual environments](https://docs.python.org/3/library/venv.html). This is usually built-in, but some Linux distributions like Debian and Ubuntu split it out into its own package. Running `sudo apt install python3-venv` should be enough.
A recent version of the Rust compiler is needed to build the native modules. The
easiest way of installing the latest version is to use [rustup](https://rustup.rs/).
Synapse can connect to PostgreSQL via the [psycopg2](https://pypi.org/project/psycopg2/) Python library. Building this library from source requires access to PostgreSQL's C header files. On Debian or Ubuntu Linux, these can be installed with `sudo apt install libpq-dev`.
Synapse has an optional, improved user search with better Unicode support. For that you need the development package of `libicu`. On Debian or Ubuntu Linux, this can be installed with `sudo apt install libicu-dev`.
@@ -33,6 +30,9 @@ The source code of Synapse is hosted on GitHub. You will also need [a recent ver
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
A recent version of the Rust compiler is needed to build the native modules. The
easiest way of installing the latest version is to use [rustup](https://rustup.rs/).
# 3. Get the source.
@@ -53,11 +53,6 @@ can find many good git tutorials on the web.
# 4. Install the dependencies
Before installing the Python dependencies, make sure you have installed a recent version
of Rust (see the "What do I need?" section above). The easiest way of installing the
latest version is to use [rustup](https://rustup.rs/).
Synapse uses the [poetry](https://python-poetry.org/) project to manage its dependencies
and development environment. Once you have installed Python 3 and added the
source, you should install `poetry`.
@@ -66,7 +61,7 @@ Of their installation methods, we recommend
```shell
pip install --user pipx
pipx install poetry==1.5.2 # Problems with Poetry 1.6, see https://github.com/matrix-org/synapse/issues/16147
pipx install poetry
```
but see poetry's [installation instructions](https://python-poetry.org/docs/#installation)
@@ -81,8 +76,7 @@ cd path/where/you/have/cloned/the/repository
poetry install --extras all
```
This will install the runtime and developer dependencies for the project. Be sure to check
that the `poetry install` step completed cleanly.
This will install the runtime and developer dependencies for the project.
## Running Synapse via poetry
@@ -90,31 +84,14 @@ To start a local instance of Synapse in the locked poetry environment, create a
```sh
cp docs/sample_config.yaml homeserver.yaml
cp docs/sample_log_config.yaml log_config.yaml
```
Now edit `homeserver.yaml`, things you might want to change include:
- Set a `server_name`
- Adjusting paths to be correct for your system like the `log_config` to point to the log config you just copied
- Using a [PostgreSQL database instead of SQLite](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#database)
- Adding a [`registration_shared_secret`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#registration_shared_secret) so you can use [`register_new_matrix_user` command](https://matrix-org.github.io/synapse/latest/setup/installation.html#registering-a-user).
And then run Synapse with the following command:
Now edit homeserver.yaml, and run Synapse with:
```sh
poetry run python -m synapse.app.homeserver -c homeserver.yaml
```
If you get an error like the following:
```
importlib.metadata.PackageNotFoundError: matrix-synapse
```
this probably indicates that the `poetry install` step did not complete cleanly - go back and
resolve any issues and re-run until successful.
# 5. Get in touch.
Join our developer community on Matrix: [#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org)!
@@ -266,7 +243,7 @@ The easiest way to do so is to run Postgres via a docker container. In one
terminal:
```shell
docker run --rm -e POSTGRES_PASSWORD=mysecretpassword -e POSTGRES_USER=postgres -e POSTGRES_DB=postgres -p 5432:5432 postgres:14
docker run --rm -e POSTGRES_PASSWORD=mysecretpassword -e POSTGRES_USER=postgres -e POSTGRES_DB=postgress -p 5432:5432 postgres:14
```
If you see an error like
@@ -322,7 +299,7 @@ The following command will let you run the integration test with the most common
configuration:
```sh
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:focal
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:buster
```
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)
@@ -369,8 +346,6 @@ The above will run a monolithic (single-process) Synapse with SQLite as the data
A safe example would be `WORKER_TYPES="federation_inbound, federation_sender, synchrotron"`.
See the [worker documentation](../workers.md) for additional information on workers.
- Passing `ASYNCIO_REACTOR=1` as an environment variable to use the Twisted asyncio reactor instead of the default one.
- Passing `PODMAN=1` will use the [podman](https://podman.io/) container runtime, instead of docker.
- Passing `UNIX_SOCKETS=1` will utilise Unix socket functionality for Synapse, Redis, and Postgres(when applicable).
To increase the log level for the tests, set `SYNAPSE_TEST_LOG_LEVEL`, e.g:
```sh

View File

@@ -150,67 +150,6 @@ def run_upgrade(
...
```
## Background updates
It is sometimes appropriate to perform database migrations as part of a background
process (instead of blocking Synapse until the migration is done). In particular,
this is useful for migrating data when adding new columns or tables.
Pending background updates stored in the `background_updates` table and are denoted
by a unique name, the current status (stored in JSON), and some dependency information:
* Whether the update requires a previous update to be complete.
* A rough ordering for which to complete updates.
A new background updates needs to be added to the `background_updates` table:
```sql
INSERT INTO background_updates (ordering, update_name, depends_on, progress_json) VALUES
(7706, 'my_background_update', 'a_previous_background_update' '{}');
```
And then needs an associated handler in the appropriate datastore:
```python
self.db_pool.updates.register_background_update_handler(
"my_background_update",
update_handler=self._my_background_update,
)
```
There are a few types of updates that can be performed, see the `BackgroundUpdater`:
* `register_background_update_handler`: A generic handler for custom SQL
* `register_background_index_update`: Create an index in the background
* `register_background_validate_constraint`: Validate a constraint in the background
(PostgreSQL-only)
* `register_background_validate_constraint_and_delete_rows`: Similar to
`register_background_validate_constraint`, but deletes rows which don't fit
the constraint.
For `register_background_update_handler`, the generic handler must track progress
and then finalize the background update:
```python
async def _my_background_update(self, progress: JsonDict, batch_size: int) -> int:
def _do_something(txn: LoggingTransaction) -> int:
...
self.db_pool.updates._background_update_progress_txn(
txn, "my_background_update", {"last_processed": last_processed}
)
return last_processed - prev_last_processed
num_processed = await self.db_pool.runInteraction("_do_something", _do_something)
await self.db_pool.updates._end_background_update("my_background_update")
return num_processed
```
Synapse will attempt to rate-limit how often background updates are run via the
given batch-size and the returned number of processed entries (and how long the
function took to run). See
[background update controller callbacks](../modules/background_update_controller_callbacks.md).
## Boolean columns
Boolean columns require special treatment, since SQLite treats booleans the
@@ -245,160 +184,3 @@ version `3`, that can only happen with a hash collision, which we basically hope
will never happen (SHA256 has a massive big key space).
## Worked examples of gradual migrations
Some migrations need to be performed gradually. A prime example of this is anything
which would need to do a large table scan — including adding columns, indices or
`NOT NULL` constraints to non-empty tables — such a migration should be done as a
background update where possible, at least on Postgres.
We can afford to be more relaxed about SQLite databases since they are usually
used on smaller deployments and SQLite does not support the same concurrent
DDL operations as Postgres.
We also typically insist on having at least one Synapse version's worth of
backwards compatibility, so that administrators can roll back Synapse if an upgrade
did not go smoothly.
This sometimes results in having to plan a migration across multiple versions
of Synapse.
This section includes an example and may include more in the future.
### Transforming a column into another one, with `NOT NULL` constraints
This example illustrates how you would introduce a new column, write data into it
based on data from an old column and then drop the old column.
We are aiming for semantic equivalence to:
```sql
ALTER TABLE mytable ADD COLUMN new_column INTEGER;
UPDATE mytable SET new_column = old_column * 100;
ALTER TABLE mytable ALTER COLUMN new_column ADD CONSTRAINT NOT NULL;
ALTER TABLE mytable DROP COLUMN old_column;
```
#### Synapse version `N`
```python
SCHEMA_VERSION = S
SCHEMA_COMPAT_VERSION = ... # unimportant at this stage
```
**Invariants:**
1. `old_column` is read by Synapse and written to by Synapse.
#### Synapse version `N + 1`
```python
SCHEMA_VERSION = S + 1
SCHEMA_COMPAT_VERSION = ... # unimportant at this stage
```
**Changes:**
1.
```sql
ALTER TABLE mytable ADD COLUMN new_column INTEGER;
```
**Invariants:**
1. `old_column` is read by Synapse and written to by Synapse.
2. `new_column` is written to by Synapse.
**Notes:**
1. `new_column` can't have a `NOT NULL NOT VALID` constraint yet, because the previous Synapse version did not write to the new column (since we haven't bumped the `SCHEMA_COMPAT_VERSION` yet, we still need to be compatible with the previous version).
#### Synapse version `N + 2`
```python
SCHEMA_VERSION = S + 2
SCHEMA_COMPAT_VERSION = S + 1 # this signals that we can't roll back to a time before new_column existed
```
**Changes:**
1. On Postgres, add a `NOT VALID` constraint to ensure new rows are compliant. *SQLite does not have such a construct, but it would be unnecessary anyway since there is no way to concurrently perform this migration on SQLite.*
```sql
ALTER TABLE mytable ADD CONSTRAINT CHECK new_column_not_null (new_column IS NOT NULL) NOT VALID;
```
2. Start a background update to perform migration: it should gradually run e.g.
```sql
UPDATE mytable SET new_column = old_column * 100 WHERE 0 < mytable_id AND mytable_id <= 5;
```
This background update is technically pointless on SQLite, but you must schedule it anyway so that the `portdb` script to migrate to Postgres still works.
3. Upon completion of the background update, you should run `VALIDATE CONSTRAINT` on Postgres to turn the `NOT VALID` constraint into a valid one.
```sql
ALTER TABLE mytable VALIDATE CONSTRAINT new_column_not_null;
```
This will take some time but does **NOT** hold an exclusive lock over the table.
**Invariants:**
1. `old_column` is read by Synapse and written to by Synapse.
2. `new_column` is written to by Synapse and new rows always have a non-`NULL` value in this field.
**Notes:**
1. If you wish, you can convert the `CHECK (new_column IS NOT NULL)` to a `NOT NULL` constraint free of charge in Postgres by adding the `NOT NULL` constraint and then dropping the `CHECK` constraint, because Postgres can statically verify that the `NOT NULL` constraint is implied by the `CHECK` constraint without performing a table scan.
2. It might be tempting to make version `N + 2` redundant by moving the background update to `N + 1` and delaying adding the `NOT NULL` constraint to `N + 3`, but that would mean the constraint would always be validated in the foreground in `N + 3`. Whereas if the `N + 2` step is kept, the migration in `N + 3` would be fast in the happy case.
#### Synapse version `N + 3`
```python
SCHEMA_VERSION = S + 3
SCHEMA_COMPAT_VERSION = S + 1 # we can't roll back to a time before new_column existed
```
**Changes:**
1. (Postgres) Update the table to populate values of `new_column` in case the background update had not completed. Additionally, `VALIDATE CONSTRAINT` to make the check fully valid.
```sql
-- you ideally want an index on `new_column` or e.g. `(new_column) WHERE new_column IS NULL` first, or perhaps you can find a way to skip this if the `NOT NULL` constraint has already been validated.
UPDATE mytable SET new_column = old_column * 100 WHERE new_column IS NULL;
-- this is a no-op if it already ran as part of the background update
ALTER TABLE mytable VALIDATE CONSTRAINT new_column_not_null;
```
2. (SQLite) Recreate the table by precisely following [the 12-step procedure for SQLite table schema changes](https://www.sqlite.org/lang_altertable.html#otheralter).
During this table rewrite, you should recreate `new_column` as `NOT NULL` and populate any outstanding `NULL` values at the same time.
Unfortunately, you can't drop `old_column` yet because it must be present for compatibility with the Postgres schema, as needed by `portdb`.
(Otherwise you could do this all in one go with SQLite!)
**Invariants:**
1. `old_column` is written to by Synapse (but no longer read by Synapse!).
2. `new_column` is read by Synapse and written to by Synapse. Moreover, all rows have a non-`NULL` value in this field, as guaranteed by a schema constraint.
**Notes:**
1. We can't drop `old_column` yet, or even stop writing to it, because that would break a rollback to the previous version of Synapse.
2. Application code can now rely on `new_column` being populated. The remaining steps are only motivated by the wish to clean-up old columns.
#### Synapse version `N + 4`
```python
SCHEMA_VERSION = S + 4
SCHEMA_COMPAT_VERSION = S + 3 # we can't roll back to a time before new_column was entirely non-NULL
```
**Invariants:**
1. `old_column` exists but is not written to or read from by Synapse.
2. `new_column` is read by Synapse and written to by Synapse. Moreover, all rows have a non-`NULL` value in this field, as guaranteed by a schema constraint.
**Notes:**
1. We can't drop `old_column` yet because that would break a rollback to the previous version of Synapse. \
**TODO:** It may be possible to relax this and drop the column straight away as long as the previous version of Synapse detected a rollback occurred and stopped attempting to write to the column. This could possibly be done by checking whether the database's schema compatibility version was `S + 3`.
#### Synapse version `N + 5`
```python
SCHEMA_VERSION = S + 5
SCHEMA_COMPAT_VERSION = S + 4 # we can't roll back to a time before old_column was no longer being touched
```
**Changes:**
1.
```sql
ALTER TABLE mytable DROP COLUMN old_column;
```

View File

@@ -260,17 +260,15 @@ doesn't require poetry. (It's what we use in CI too). However, you could try
## ...handle a Dependabot pull request?
Synapse uses Dependabot to keep the `poetry.lock` and `Cargo.lock` file
up-to-date with the latest releases of our dependencies. The changelog check is
omitted for Dependabot PRs; the release script will include them in the
changelog.
When reviewing a dependabot PR, ensure that:
Synapse uses Dependabot to keep the `poetry.lock` file up-to-date. When it
creates a pull request a GitHub Action will run to automatically create a changelog
file. Ensure that:
* the lockfile changes look reasonable;
* the upstream changelog file (linked in the description) doesn't include any
breaking changes;
* continuous integration passes.
* continuous integration passes (due to permissions, the GitHub Actions run on
the changelog commit will fail, look at the initial commit of the pull request);
In particular, any updates to the type hints (usually packages which start with `types-`)
should be safe to merge if linting passes.

View File

@@ -12,7 +12,7 @@ Note that this schedule might be modified depending on the availability of the
Synapse team, e.g. releases may be skipped to avoid holidays.
Release announcements can be found in the
[release category of the Matrix blog](https://matrix.org/category/releases).
[release category of the Matrix blog](https://matrix.org/blog/category/releases).
## Bugfix releases
@@ -34,4 +34,4 @@ be held to be released together.
In some cases, a pre-disclosure of a security release will be issued as a notice
to Synapse operators that there is an upcoming security release. These can be
found in the [security category of the Matrix blog](https://matrix.org/category/security).
found in the [security category of the Matrix blog](https://matrix.org/blog/category/security).

View File

@@ -6,7 +6,7 @@ This is a work-in-progress set of notes with two goals:
See also [MSC3902](https://github.com/matrix-org/matrix-spec-proposals/pull/3902).
The key idea is described by [MSC3706](https://github.com/matrix-org/matrix-spec-proposals/pull/3706). This allows servers to
The key idea is described by [MSC706](https://github.com/matrix-org/matrix-spec-proposals/pull/3902). This allows servers to
request a lightweight response to the federation `/send_join` endpoint.
This is called a **faster join**, also known as a **partial join**. In these
notes we'll usually use the word "partial" as it matches the database schema.
@@ -264,7 +264,7 @@ But don't want to send out sensitive data in other HS's events in this way.
Suppose we discover after resync that we shouldn't have sent out one our events (not a prev_event) to a target HS. Not much we can do.
What about if we didn't send them an event but shouldn't've?
E.g. what if someone joined from a new HS shortly after you did? We wouldn't talk to them.
Could imagine sending out the "Missed" events after the resync but... painful to work out what they should have seen if they joined/left.
Could imagine sending out the "Missed" events after the resync but... painful to work out what they shuld have seen if they joined/left.
Instead, just send them the latest event (if they're still in the room after resync) and let them backfill.(?)
- Don't do this currently.
- If anyone who has received our messages sends a message to a HS we missed, they can backfill our messages

View File

@@ -1,164 +0,0 @@
## Streams
Synapse has a concept of "streams", which are roughly described in [`id_generators.py`](
https://github.com/matrix-org/synapse/blob/develop/synapse/storage/util/id_generators.py
).
Generally speaking, streams are a series of notifications that something in Synapse's database has changed that the application might need to respond to.
For example:
- The events stream reports new events (PDUs) that Synapse creates, or that Synapse accepts from another homeserver.
- The account data stream reports changes to users' [account data](https://spec.matrix.org/v1.7/client-server-api/#client-config).
- The to-device stream reports when a device has a new [to-device message](https://spec.matrix.org/v1.7/client-server-api/#send-to-device-messaging).
See [`synapse.replication.tcp.streams`](
https://github.com/matrix-org/synapse/blob/develop/synapse/replication/tcp/streams/__init__.py
) for the full list of streams.
It is very helpful to understand the streams mechanism when working on any part of Synapse that needs to respond to changes—especially if those changes are made by different workers.
To that end, let's describe streams formally, paraphrasing from the docstring of [`AbstractStreamIdGenerator`](
https://github.com/matrix-org/synapse/blob/a719b703d9bd0dade2565ddcad0e2f3a7a9d4c37/synapse/storage/util/id_generators.py#L96
).
### Definition
A stream is an append-only log `T1, T2, ..., Tn, ...` of facts[^1] which grows over time.
Only "writers" can add facts to a stream, and there may be multiple writers.
Each fact has an ID, called its "stream ID".
Readers should only process facts in ascending stream ID order.
Roughly speaking, each stream is backed by a database table.
It should have a `stream_id` (or similar) bigint column holding stream IDs, plus additional columns as necessary to describe the fact.
Typically, a fact is expressed with a single row in its backing table.[^2]
Within a stream, no two facts may have the same stream_id.
> _Aside_. Some additional notes on streams' backing tables.
>
> 1. Rich would like to [ditch the backing tables](https://github.com/matrix-org/synapse/issues/13456).
> 2. The backing tables may have other uses.
> For example, the events table serves backs the events stream, and is read when processing new events.
> But old rows are read from the table all the time, whenever Synapse needs to lookup some facts about an event.
> 3. Rich suspects that sometimes the stream is backed by multiple tables, so the stream proper is the union of those tables.
Stream writers can "reserve" a stream ID, and then later mark it as having being completed.
Stream writers need to track the completion of each stream fact.
In the happy case, completion means a fact has been written to the stream table.
But unhappy cases (e.g. transaction rollback due to an error) also count as completion.
Once completed, the rows written with that stream ID are fixed, and no new rows
will be inserted with that ID.
### Current stream ID
For any given stream reader (including writers themselves), we may define a per-writer current stream ID:
> A current stream ID _for a writer W_ is the largest stream ID such that
> all transactions added by W with equal or smaller ID have completed.
Similarly, there is a "linear" notion of current stream ID:
> A "linear" current stream ID is the largest stream ID such that
> all facts (added by any writer) with equal or smaller ID have completed.
Because different stream readers A and B learn about new facts at different times, A and B may disagree about current stream IDs.
Put differently: we should think of stream readers as being independent of each other, proceeding through a stream of facts at different rates.
The above definition does not give a unique current stream ID, in fact there can
be a range of current stream IDs. Synapse uses both the minimum and maximum IDs
for different purposes. Most often the maximum is used, as its generally
beneficial for workers to advance their IDs as soon as possible. However, the
minimum is used in situations where e.g. another worker is going to wait until
the stream advances past a position.
**NB.** For both senses of "current", that if a writer opens a transaction that never completes, the current stream ID will never advance beyond that writer's last written stream ID.
For single-writer streams, the per-writer current ID and the linear current ID are the same.
Both senses of current ID are monotonic, but they may "skip" or jump over IDs because facts complete out of order.
_Example_.
Consider a single-writer stream which is initially at ID 1.
| Action | Current stream ID | Notes |
|------------|-------------------|-------------------------------------------------|
| | 1 | |
| Reserve 2 | 1 | |
| Reserve 3 | 1 | |
| Complete 3 | 1 | current ID unchanged, waiting for 2 to complete |
| Complete 2 | 3 | current ID jumps from 1 -> 3 |
| Reserve 4 | 3 | |
| Reserve 5 | 3 | |
| Reserve 6 | 3 | |
| Complete 5 | 3 | |
| Complete 4 | 5 | current ID jumps 3->5, even though 6 is pending |
| Complete 6 | 6 | |
### Multi-writer streams
There are two ways to view a multi-writer stream.
1. Treat it as a collection of distinct single-writer streams, one
for each writer.
2. Treat it as a single stream.
The single stream (option 2) is conceptually simpler, and easier to represent (a single stream id).
However, it requires each reader to know about the entire set of writers, to ensures that readers don't erroneously advance their current stream position too early and miss a fact from an unknown writer.
In contrast, multiple parallel streams (option 1) are more complex, requiring more state to represent (map from writer to stream id).
The payoff for doing so is that readers can "peek" ahead to facts that completed on one writer no matter the state of the others, reducing latency.
Note that a multi-writer stream can be viewed in both ways.
For example, the events stream is treated as multiple single-writer streams (option 1) by the sync handler, so that events are sent to clients as soon as possible.
But the background process that works through events treats them as a single linear stream.
Another useful example is the cache invalidation stream.
The facts this stream holds are instructions to "you should now invalidate these cache entries".
We only ever treat this as a multiple single-writer streams as there is no important ordering between cache invalidations.
(Invalidations are self-contained facts; and the invalidations commute/are idempotent).
### Writing to streams
Writers need to track:
- track their current position (i.e. its own per-writer stream ID).
- their facts currently awaiting completion.
At startup,
- the current position of that writer can be found by querying the database (which suggests that facts need to be written to the database atomically, in a transaction); and
- there are no facts awaiting completion.
To reserve a stream ID, call [`nextval`](https://www.postgresql.org/docs/current/functions-sequence.html) on the appropriate postgres sequence.
To write a fact to the stream: insert the appropriate rows to the appropriate backing table.
To complete a fact, first remove it from your map of facts currently awaiting completion.
Then, if no earlier fact is awaiting completion, the writer can advance its current position in that stream.
Upon doing so it should emit an `RDATA` message[^3], once for every fact between the old and the new stream ID.
### Subscribing to streams
Readers need to track the current position of every writer.
At startup, they can find this by contacting each writer with a `REPLICATE` message,
requesting that all writers reply describing their current position in their streams.
Writers reply with a `POSITION` message.
To learn about new facts, readers should listen for `RDATA` messages and process them to respond to the new fact.
The `RDATA` itself is not a self-contained representation of the fact;
readers will have to query the stream tables for the full details.
Readers must also advance their record of the writer's current position for that stream.
# Summary
In a nutshell: we have an append-only log with a "buffer/scratchpad" at the end where we have to wait for the sequence to be linear and contiguous.
---
[^1]: we use the word _fact_ here for two reasons.
Firstly, the word "event" is already heavily overloaded (PDUs, EDUs, account data, ...) and we don't need to make that worse.
Secondly, "fact" emphasises that the things we append to a stream cannot change after the fact.
[^2]: A fact might be expressed with 0 rows, e.g. if we opened a transaction to persist an event, but failed and rolled the transaction back before marking the fact as completed.
In principle a fact might be expressed with 2 or more rows; if so, each of those rows should share the fact's stream ID.
[^3]: This communication used to happen directly with the writers [over TCP](../../tcp_replication.md);
nowadays it's done via Redis's Pubsub.

View File

@@ -86,7 +86,7 @@ So we have stopped processing the request (and will probably go on to
start processing the next), without clearing the logcontext.
To circumvent this problem, synapse code assumes that, wherever you have
an awaitable, you will want to `await` it. To that end, wherever
an awaitable, you will want to `await` it. To that end, whereever
functions return awaitables, we adopt the following conventions:
**Rules for functions returning awaitables:**

View File

@@ -8,7 +8,8 @@ and allow server and room admins to configure how long messages should
be kept in a homeserver's database before being purged from it.
**Please note that, as this feature isn't part of the Matrix
specification yet, this implementation is to be considered as
experimental.**
experimental. There are known bugs which may cause database corruption.
Proceed with caution.**
A message retention policy is mainly defined by its `max_lifetime`
parameter, which defines how long a message can be kept around after

View File

@@ -1,32 +0,0 @@
# Add extra fields to client events unsigned section callbacks
_First introduced in Synapse v1.96.0_
This callback allows modules to add extra fields to the unsigned section of
events when they get sent down to clients.
These get called *every* time an event is to be sent to clients, so care should
be taken to ensure with respect to performance.
### API
To register the callback, use
`register_add_extra_fields_to_unsigned_client_event_callbacks` on the
`ModuleApi`.
The callback should be of the form
```python
async def add_field_to_unsigned(
event: EventBase,
) -> JsonDict:
```
where the extra fields to add to the event's unsigned section is returned.
(Modules must not attempt to modify the `event` directly).
This cannot be used to alter the "core" fields in the unsigned section emitted
by Synapse itself.
If multiple such callbacks try to add the same field to an event's unsigned
section, the last-registered callback wins.

View File

@@ -46,9 +46,6 @@ instead.
If the authentication is unsuccessful, the module must return `None`.
Note that the user is not automatically registered, the `register_user(..)` method of
the [module API](writing_a_module.html) can be used to lazily create users.
If multiple modules register an auth checker for the same login type but with different
fields, Synapse will refuse to start.

View File

@@ -1,16 +1,8 @@
# Presence router callbacks
Presence router callbacks allow module developers to define additional users
which receive presence updates from local users. The additional users
can be local or remote.
For example, it could be used to direct all of `@alice:example.com` (a local user)'s
presence updates to `@bob:matrix.org` (a remote user), even though they don't share a
room. (Note that those presence updates might not make it to `@bob:matrix.org`'s client
unless a similar presence router is running on that homeserver.)
Presence router callbacks can be registered using the module API's
`register_presence_router_callbacks` method.
Presence router callbacks allow module developers to specify additional users (local or remote)
to receive certain presence updates from local users. Presence router callbacks can be
registered using the module API's `register_presence_router_callbacks` method.
## Callbacks

View File

@@ -348,42 +348,6 @@ callback returns `False`, Synapse falls through to the next one. The value of th
callback that does not return `False` will be used. If this happens, Synapse will not call
any of the subsequent implementations of this callback.
### `check_login_for_spam`
_First introduced in Synapse v1.87.0_
```python
async def check_login_for_spam(
user_id: str,
device_id: Optional[str],
initial_display_name: Optional[str],
request_info: Collection[Tuple[Optional[str], str]],
auth_provider_id: Optional[str] = None,
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes"]
```
Called when a user logs in.
The arguments passed to this callback are:
* `user_id`: The user ID the user is logging in with
* `device_id`: The device ID the user is re-logging into.
* `initial_display_name`: The device display name, if any.
* `request_info`: A collection of tuples, which first item is a user agent, and which
second item is an IP address. These user agents and IP addresses are the ones that were
used during the login process.
* `auth_provider_id`: The identifier of the SSO authentication provider, if any.
If multiple modules implement this callback, they will be considered in order. If a
callback returns `synapse.module_api.NOT_SPAM`, Synapse falls through to the next one.
The value of the first callback that does not return `synapse.module_api.NOT_SPAM` will
be used. If this happens, Synapse will not call any of the subsequent implementations of
this callback.
*Note:* This will not be called when a user registers.
## Example
The example below is a module that implements the spam checker callback

View File

@@ -569,7 +569,7 @@ You should receive a response similar to the following. Make sure to save it.
{"client_id":"someclientid_123","client_secret":"someclientsecret_123","id":"12345","name":"my_synapse_app","redirect_uri":"https://[synapse_public_baseurl]/_synapse/client/oidc/callback","website":null,"vapid_key":"somerandomvapidkey_123"}
```
As the Synapse login mechanism needs an attribute to uniquely identify users, and Mastodon's endpoint does not return a `sub` property, an alternative `subject_template` has to be set. Your Synapse configuration should include the following:
As the Synapse login mechanism needs an attribute to uniquely identify users, and Mastodon's endpoint does not return a `sub` property, an alternative `subject_claim` has to be set. Your Synapse configuration should include the following:
```yaml
oidc_providers:
@@ -585,9 +585,7 @@ oidc_providers:
scopes: ["read"]
user_mapping_provider:
config:
subject_template: "{{ user.id }}"
localpart_template: "{{ user.username }}"
display_name_template: "{{ user.display_name }}"
subject_claim: "id"
```
Note that the fields `client_id` and `client_secret` are taken from the CURL response above.

View File

@@ -51,11 +51,6 @@ docker run -d --name jaeger \
jaegertracing/all-in-one:1
```
By default, Synapse will publish traces to Jaeger on localhost.
If Jaeger is hosted elsewhere, point Synapse to the correct host by setting
`opentracing.jaeger_config.local_agent.reporting_host` [in the Synapse configuration](usage/configuration/config_documentation.md#opentracing-1)
or by setting the `JAEGER_AGENT_HOST` environment variable to the desired address.
Latest documentation is probably at
https://www.jaegertracing.io/docs/latest/getting-started.

Some files were not shown because too many files have changed in this diff Show More