1
0

Compare commits

..

5 Commits

Author SHA1 Message Date
Andrew Morgan
1889bda695 fix syntax 2025-07-01 12:57:40 +01:00
Andrew Morgan
7d880b5e94 bump to 3.0.0 2025-07-01 12:55:59 +01:00
Andrew Morgan
8e9105919a Bump cibuildwheel to 2.24.0 2025-07-01 12:53:15 +01:00
Andrew Morgan
50349eb041 test 2025-07-01 12:50:14 +01:00
Andrew Morgan
053697ad79 Use aarch64 2025-07-01 12:38:34 +01:00
163 changed files with 1561 additions and 3042 deletions

View File

@@ -5,7 +5,7 @@ name: Build docker images
on:
push:
tags: ["v*"]
branches: [master, main, develop]
branches: [ master, main, develop ]
workflow_dispatch:
permissions:
@@ -14,22 +14,24 @@ permissions:
id-token: write # needed for signing the images with GitHub OIDC Token
jobs:
build:
name: Build and push image for ${{ matrix.platform }}
runs-on: ${{ matrix.runs_on }}
strategy:
matrix:
include:
- platform: linux/amd64
runs_on: ubuntu-24.04
suffix: linux-amd64
- platform: linux/arm64
runs_on: ubuntu-24.04-arm
suffix: linux-arm64
runs-on: ubuntu-22.04
steps:
- name: Set up QEMU
id: qemu
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
with:
platforms: arm64
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- name: Inspect builder
run: docker buildx inspect
- name: Install Cosign
uses: sigstore/cosign-installer@fb28c2b6339dcd94da6e4cbcbc5e888961f6f8c3 # v3.9.0
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
@@ -53,79 +55,13 @@ jobs:
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push by digest
id: build
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
with:
push: true
labels: |
gitsha1=${{ github.sha }}
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
tags: |
docker.io/matrixdotorg/synapse
ghcr.io/element-hq/synapse
file: "docker/Dockerfile"
platforms: ${{ matrix.platform }}
outputs: type=image,push-by-digest=true,name-canonical=true,push=true
- name: Export digest
run: |
mkdir -p ${{ runner.temp }}/digests
digest="${{ steps.build.outputs.digest }}"
touch "${{ runner.temp }}/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v4
with:
name: digests-${{ matrix.suffix }}
path: ${{ runner.temp }}/digests/*
if-no-files-found: error
retention-days: 1
merge:
name: Push merged images to ${{ matrix.repository }}
runs-on: ubuntu-latest
strategy:
matrix:
repository:
- docker.io/matrixdotorg/synapse
- ghcr.io/element-hq/synapse
needs:
- build
steps:
- name: Download digests
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
path: ${{ runner.temp }}/digests
pattern: digests-*
merge-multiple: true
- name: Log in to DockerHub
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
if: ${{ startsWith(matrix.repository, 'docker.io') }}
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Log in to GHCR
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
if: ${{ startsWith(matrix.repository, 'ghcr.io') }}
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- name: Install Cosign
uses: sigstore/cosign-installer@398d4b0eeef1380460a10c8013a76f728fb906ac # v3.9.1
- name: Calculate docker image tag
id: set-tag
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
with:
images: ${{ matrix.repository }}
images: |
docker.io/matrixdotorg/synapse
ghcr.io/element-hq/synapse
flavor: |
latest=false
tags: |
@@ -133,23 +69,31 @@ jobs:
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
type=pep440,pattern={{raw}}
type=sha
- name: Create manifest list and push
working-directory: ${{ runner.temp }}/digests
env:
REPOSITORY: ${{ matrix.repository }}
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf "$REPOSITORY@sha256:%s " *)
- name: Build and push all platforms
id: build-and-push
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
with:
push: true
labels: |
gitsha1=${{ github.sha }}
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
tags: "${{ steps.set-tag.outputs.tags }}"
file: "docker/Dockerfile"
platforms: linux/amd64,linux/arm64
- name: Sign each manifest
# arm64 builds OOM without the git fetch setting. c.f.
# https://github.com/rust-lang/cargo/issues/10583
build-args: |
CARGO_NET_GIT_FETCH_WITH_CLI=true
- name: Sign the images with GitHub OIDC Token
env:
REPOSITORY: ${{ matrix.repository }}
DIGEST: ${{ steps.build-and-push.outputs.digest }}
TAGS: ${{ steps.set-tag.outputs.tags }}
run: |
DIGESTS=""
for TAG in $(echo "$DOCKER_METADATA_OUTPUT_JSON" | jq -r '.tags[]'); do
DIGEST="$(docker buildx imagetools inspect $TAG --format '{{json .Manifest}}' | jq -r '.digest')"
DIGESTS="$DIGESTS $REPOSITORY@$DIGEST"
images=""
for tag in ${TAGS}; do
images+="${tag}@${DIGEST} "
done
cosign sign --yes $DIGESTS
cosign sign --yes ${images}

View File

@@ -6,11 +6,6 @@ name: Attempt to automatically fix linting errors
on:
workflow_dispatch:
env:
# We use nightly so that `fmt` correctly groups together imports, and
# clippy correctly fixes up the benchmarks.
RUST_VERSION: nightly-2025-06-24
jobs:
fixup:
name: Fix up
@@ -21,11 +16,13 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
with:
toolchain: ${{ env.RUST_VERSION }}
# We use nightly so that `fmt` correctly groups together imports, and
# clippy correctly fixes up the benchmarks.
toolchain: nightly-2022-12-01
components: clippy, rustfmt
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
@@ -47,6 +44,6 @@ jobs:
- run: cargo fmt
continue-on-error: true
- uses: stefanzweifel/git-auto-commit-action@778341af668090896ca464160c2def5d1d1a3eb0 # v6.0.1
- uses: stefanzweifel/git-auto-commit-action@b863ae1933cb653a53c021fe36dbb774e1fb9403 # v5.2.0
with:
commit_message: "Attempt to fix linting"

View File

@@ -21,9 +21,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
RUST_VERSION: 1.87.0
jobs:
check_repo:
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
@@ -44,10 +41,8 @@ jobs:
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
# The dev dependencies aren't exposed in the wheel metadata (at least with current
# poetry-core versions), so we install with poetry.
@@ -60,7 +55,7 @@ jobs:
- run: poetry run pip list > before.txt
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
# `pip install matrix-synapse[all]` as closely as possible.
- run: poetry update --without dev
- run: poetry update --no-dev
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
- name: Remove unhelpful options from mypy config
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
@@ -80,10 +75,8 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- run: sudo apt-get -qq install xmlsec1
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
@@ -155,10 +148,8 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: Ensure sytest runs `pip install`
# Delete the lockfile so sytest will `pip install` rather than `poetry install`

View File

@@ -30,7 +30,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
python-version: "3.x"
python-version: '3.x'
- id: set-distros
run: |
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
@@ -76,7 +76,7 @@ jobs:
- name: Set up python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
python-version: "3.x"
python-version: '3.x'
- name: Build the packages
# see https://github.com/docker/build-push-action/issues/252
@@ -107,15 +107,12 @@ jobs:
path: debs/*
build-wheels:
name: Build wheels on ${{ matrix.os }}
name: Build wheels on ${{ matrix.os }} for ${{ matrix.arch }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
os:
- ubuntu-24.04
- ubuntu-24.04-arm
- macos-13 # This uses x86-64
- macos-14 # This uses arm64
os: [ubuntu-22.04, macos-13]
arch: [x86_64, aarch64]
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
# It is not read by the rest of the workflow.
is_pr:
@@ -125,11 +122,12 @@ jobs:
# Don't build macos wheels on PR CI.
- is_pr: true
os: "macos-13"
- is_pr: true
os: "macos-14"
# Don't build aarch64 wheels on mac.
- os: "macos-13"
arch: aarch64
# Don't build aarch64 wheels on PR CI.
- is_pr: true
os: "ubuntu-24.04-arm"
arch: aarch64
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
@@ -143,9 +141,20 @@ jobs:
- name: Install cibuildwheel
run: python -m pip install cibuildwheel==3.0.0
- name: Set up QEMU to emulate aarch64
if: matrix.arch == 'aarch64'
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
with:
platforms: arm64
- name: Build aarch64 wheels
if: matrix.arch == 'aarch64'
run: |
echo 'CIBW_ARCHS_LINUX=aarch64' >> $GITHUB_ENV
- name: Only build a single wheel on PR
if: startsWith(github.ref, 'refs/pull/')
run: echo "CIBW_BUILD="cp39-manylinux_*"" >> $GITHUB_ENV
run: echo 'CIBW_BUILD=cp39-manylinux_${{ matrix.arch }}' >> $GITHUB_ENV
- name: Build wheels
run: python -m cibuildwheel --output-dir wheelhouse
@@ -153,10 +162,13 @@ jobs:
# Skip testing for platforms which various libraries don't have wheels
# for, and so need extra build deps.
CIBW_TEST_SKIP: pp3*-* *i686* *musl*
# Fix Rust OOM errors on emulated aarch64: https://github.com/rust-lang/cargo/issues/10583
CARGO_NET_GIT_FETCH_WITH_CLI: true
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: Wheel-${{ matrix.os }}
name: Wheel-${{ matrix.os }}-${{ matrix.arch }}
path: ./wheelhouse/*.whl
build-sdist:
@@ -168,7 +180,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
python-version: "3.10"
python-version: '3.10'
- run: pip install build
@@ -180,6 +192,7 @@ jobs:
name: Sdist
path: dist/*.tar.gz
# if it's a tag, create a release and attach the artifacts to it
attach-assets:
name: "Attach assets to release"

View File

@@ -11,9 +11,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
RUST_VERSION: 1.87.0
jobs:
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
# don't modify Rust code.
@@ -88,10 +85,8 @@ jobs:
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
uses: dtolnay/rust-toolchain@c1678930c21fb233e4987c4ae12158f9125e5762 # 1.81.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
python-version: "3.x"
@@ -154,10 +149,8 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
uses: dtolnay/rust-toolchain@c1678930c21fb233e4987c4ae12158f9125e5762 # 1.81.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
@@ -217,10 +210,8 @@ jobs:
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
uses: dtolnay/rust-toolchain@c1678930c21fb233e4987c4ae12158f9125e5762 # 1.81.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
poetry-version: "2.1.1"
@@ -236,11 +227,10 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
uses: dtolnay/rust-toolchain@0d72692bcfbf448b1e2afa01a67f71b455a9dcec # 1.86.0
with:
components: clippy
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- run: cargo clippy -- -D warnings
@@ -255,11 +245,11 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
with:
toolchain: nightly-2025-04-23
components: clippy
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- run: cargo clippy --all-features -- -D warnings
@@ -272,12 +262,12 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
with:
# We use nightly so that it correctly groups together imports
toolchain: nightly-2025-04-23
components: rustfmt
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- run: cargo fmt --check
@@ -372,10 +362,8 @@ jobs:
postgres:${{ matrix.job.postgres-version }}
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
uses: dtolnay/rust-toolchain@c1678930c21fb233e4987c4ae12158f9125e5762 # 1.81.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
@@ -416,10 +404,8 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
uses: dtolnay/rust-toolchain@c1678930c21fb233e4987c4ae12158f9125e5762 # 1.81.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
# There aren't wheels for some of the older deps, so we need to install
# their build dependencies
@@ -533,10 +519,8 @@ jobs:
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
uses: dtolnay/rust-toolchain@c1678930c21fb233e4987c4ae12158f9125e5762 # 1.81.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: Run SyTest
run: /bootstrap.sh synapse
@@ -679,10 +663,8 @@ jobs:
path: synapse
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
uses: dtolnay/rust-toolchain@c1678930c21fb233e4987c4ae12158f9125e5762 # 1.81.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
@@ -713,10 +695,8 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
uses: dtolnay/rust-toolchain@c1678930c21fb233e4987c4ae12158f9125e5762 # 1.81.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- run: cargo test
@@ -733,10 +713,10 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
with:
toolchain: nightly-2022-12-01
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- run: cargo bench --no-run

View File

@@ -20,9 +20,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
RUST_VERSION: 1.87.0
jobs:
check_repo:
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
@@ -46,10 +43,8 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
@@ -74,10 +69,8 @@ jobs:
- run: sudo apt-get -qq install xmlsec1
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
@@ -120,10 +113,8 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: Patch dependencies
# Note: The poetry commands want to create a virtualenv in /src/.venv/,

1
.gitignore vendored
View File

@@ -47,7 +47,6 @@ __pycache__/
/.idea/
/.ropeproject/
/.vscode/
/.zed/
# build products
!/.coveragerc

View File

@@ -1,109 +1,3 @@
# Synapse 1.134.0 (2025-07-15)
No significant changes since 1.134.0rc1.
# Synapse 1.134.0rc1 (2025-07-09)
### Features
- Support for [MSC4235](https://github.com/matrix-org/matrix-spec-proposals/pull/4235): `via` query param for hierarchy endpoint. Contributed by Krishan (@kfiven). ([\#18070](https://github.com/element-hq/synapse/issues/18070))
- Add `forget_forced_upon_leave` capability as per [MSC4267](https://github.com/matrix-org/matrix-spec-proposals/pull/4267). ([\#18196](https://github.com/element-hq/synapse/issues/18196))
- Add `federated_user_may_invite` spam checker callback which receives the entire invite event. Contributed by @tulir @ Beeper. ([\#18241](https://github.com/element-hq/synapse/issues/18241))
### Bugfixes
- Fix `KeyError` on background updates when using split main/state databases. ([\#18509](https://github.com/element-hq/synapse/issues/18509))
- Improve performance of device deletion by adding missing index. ([\#18582](https://github.com/element-hq/synapse/issues/18582))
- Fix `avatar_url` and `displayname` being sent on federation profile queries when they are not set. ([\#18593](https://github.com/element-hq/synapse/issues/18593))
- Respond with 401 & `M_USER_LOCKED` when a locked user calls `POST /login`, as per the spec. ([\#18594](https://github.com/element-hq/synapse/issues/18594))
- Ensure policy servers are not asked to scan policy server change events, allowing rooms to disable the use of a policy server while the policy server is down. ([\#18605](https://github.com/element-hq/synapse/issues/18605))
### Improved Documentation
- Fix documentation of the Delete Room Admin API's status field. ([\#18519](https://github.com/element-hq/synapse/issues/18519))
### Deprecations and Removals
- Stop adding the "origin" field to newly-created events (PDUs). ([\#18418](https://github.com/element-hq/synapse/issues/18418))
### Internal Changes
- Replace `PyICU` crate with equivalent `icu_segmenter` Rust crate. ([\#18553](https://github.com/element-hq/synapse/issues/18553), [\#18646](https://github.com/element-hq/synapse/issues/18646))
- Improve docstring on `simple_upsert_many`. ([\#18573](https://github.com/element-hq/synapse/issues/18573))
- Raise poetry-core version cap to 2.1.3. ([\#18575](https://github.com/element-hq/synapse/issues/18575))
- Raise setuptools_rust version cap to 1.11.1. ([\#18576](https://github.com/element-hq/synapse/issues/18576))
- Better handling of ratelimited requests. ([\#18595](https://github.com/element-hq/synapse/issues/18595), [\#18600](https://github.com/element-hq/synapse/issues/18600))
- Update to Rust 1.87.0 in CI, and bump the pinned commit of the `dtolnay/rust-toolchain` GitHub Action to `b3b07ba8b418998c39fb20f53e8b695cdcc8de1b`. ([\#18596](https://github.com/element-hq/synapse/issues/18596))
- Speed up bulk device deletion. ([\#18602](https://github.com/element-hq/synapse/issues/18602))
- Speed up the building of arm-based wheels in CI. ([\#18618](https://github.com/element-hq/synapse/issues/18618))
- Speed up the building of Docker images in CI. ([\#18620](https://github.com/element-hq/synapse/issues/18620))
- Add `.zed/` directory to `.gitignore`. ([\#18623](https://github.com/element-hq/synapse/issues/18623))
- Log the room ID we're purging state for. ([\#18625](https://github.com/element-hq/synapse/issues/18625))
### Updates to locked dependencies
* Bump Swatinem/rust-cache from 2.7.8 to 2.8.0. ([\#18612](https://github.com/element-hq/synapse/issues/18612))
* Bump attrs from 24.2.0 to 25.3.0. ([\#18649](https://github.com/element-hq/synapse/issues/18649))
* Bump authlib from 1.5.2 to 1.6.0. ([\#18642](https://github.com/element-hq/synapse/issues/18642))
* Bump base64 from 0.21.7 to 0.22.1. ([\#18589](https://github.com/element-hq/synapse/issues/18589))
* Bump base64 from 0.21.7 to 0.22.1. ([\#18629](https://github.com/element-hq/synapse/issues/18629))
* Bump docker/build-push-action from 6.17.0 to 6.18.0. ([\#18497](https://github.com/element-hq/synapse/issues/18497))
* Bump docker/setup-buildx-action from 3.10.0 to 3.11.1. ([\#18587](https://github.com/element-hq/synapse/issues/18587))
* Bump hiredis from 3.1.0 to 3.2.1. ([\#18638](https://github.com/element-hq/synapse/issues/18638))
* Bump ijson from 3.3.0 to 3.4.0. ([\#18650](https://github.com/element-hq/synapse/issues/18650))
* Bump jsonschema from 4.23.0 to 4.24.0. ([\#18630](https://github.com/element-hq/synapse/issues/18630))
* Bump msgpack from 1.1.0 to 1.1.1. ([\#18651](https://github.com/element-hq/synapse/issues/18651))
* Bump mypy-zope from 1.0.11 to 1.0.12. ([\#18640](https://github.com/element-hq/synapse/issues/18640))
* Bump phonenumbers from 9.0.2 to 9.0.8. ([\#18652](https://github.com/element-hq/synapse/issues/18652))
* Bump pillow from 11.2.1 to 11.3.0. ([\#18624](https://github.com/element-hq/synapse/issues/18624))
* Bump prometheus-client from 0.21.0 to 0.22.1. ([\#18609](https://github.com/element-hq/synapse/issues/18609))
* Bump pyasn1-modules from 0.4.1 to 0.4.2. ([\#18495](https://github.com/element-hq/synapse/issues/18495))
* Bump pydantic from 2.11.4 to 2.11.7. ([\#18639](https://github.com/element-hq/synapse/issues/18639))
* Bump reqwest from 0.12.15 to 0.12.20. ([\#18590](https://github.com/element-hq/synapse/issues/18590))
* Bump reqwest from 0.12.20 to 0.12.22. ([\#18627](https://github.com/element-hq/synapse/issues/18627))
* Bump ruff from 0.11.11 to 0.12.1. ([\#18645](https://github.com/element-hq/synapse/issues/18645))
* Bump ruff from 0.12.1 to 0.12.2. ([\#18657](https://github.com/element-hq/synapse/issues/18657))
* Bump sentry-sdk from 2.22.0 to 2.32.0. ([\#18633](https://github.com/element-hq/synapse/issues/18633))
* Bump setuptools-rust from 1.10.2 to 1.11.1. ([\#18655](https://github.com/element-hq/synapse/issues/18655))
* Bump sigstore/cosign-installer from 3.8.2 to 3.9.0. ([\#18588](https://github.com/element-hq/synapse/issues/18588))
* Bump sigstore/cosign-installer from 3.9.0 to 3.9.1. ([\#18608](https://github.com/element-hq/synapse/issues/18608))
* Bump stefanzweifel/git-auto-commit-action from 5.2.0 to 6.0.1. ([\#18607](https://github.com/element-hq/synapse/issues/18607))
* Bump tokio from 1.45.1 to 1.46.0. ([\#18628](https://github.com/element-hq/synapse/issues/18628))
* Bump tokio from 1.46.0 to 1.46.1. ([\#18667](https://github.com/element-hq/synapse/issues/18667))
* Bump treq from 24.9.1 to 25.5.0. ([\#18610](https://github.com/element-hq/synapse/issues/18610))
* Bump types-bleach from 6.2.0.20241123 to 6.2.0.20250514. ([\#18634](https://github.com/element-hq/synapse/issues/18634))
* Bump types-jsonschema from 4.23.0.20250516 to 4.24.0.20250528. ([\#18611](https://github.com/element-hq/synapse/issues/18611))
* Bump types-opentracing from 2.4.10.6 to 2.4.10.20250622. ([\#18586](https://github.com/element-hq/synapse/issues/18586))
* Bump types-psycopg2 from 2.9.21.20250318 to 2.9.21.20250516. ([\#18658](https://github.com/element-hq/synapse/issues/18658))
* Bump types-pyyaml from 6.0.12.20241230 to 6.0.12.20250516. ([\#18643](https://github.com/element-hq/synapse/issues/18643))
* Bump types-setuptools from 75.2.0.20241019 to 80.9.0.20250529. ([\#18644](https://github.com/element-hq/synapse/issues/18644))
* Bump typing-extensions from 4.12.2 to 4.14.0. ([\#18654](https://github.com/element-hq/synapse/issues/18654))
* Bump typing-extensions from 4.14.0 to 4.14.1. ([\#18668](https://github.com/element-hq/synapse/issues/18668))
* Bump urllib3 from 2.2.2 to 2.5.0. ([\#18572](https://github.com/element-hq/synapse/issues/18572))
# Synapse 1.133.0 (2025-07-01)
Pre-built wheels are now built using the [manylinux_2_28](https://github.com/pypa/manylinux#manylinux_2_28-almalinux-8-based) base, which is expected to be compatible with distros using glibc 2.28 or later, including:
- Debian 10+
- Ubuntu 18.10+
- Fedora 29+
- CentOS/RHEL 8+
Previously, wheels were built using the [manylinux2014](https://github.com/pypa/manylinux#manylinux2014-centos-7-based-glibc-217) base, which was expected to be compatible with distros using glibc 2.17 or later.
### Bugfixes
- Bump `cibuildwheel` to 3.0.0 to fix the `manylinux` wheel builds. ([\#18615](https://github.com/element-hq/synapse/issues/18615))
# Synapse 1.133.0rc1 (2025-06-24)
### Features

275
Cargo.lock generated
View File

@@ -65,6 +65,12 @@ dependencies = [
"windows-targets",
]
[[package]]
name = "base64"
version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]]
name = "base64"
version = "0.22.1"
@@ -144,15 +150,6 @@ version = "0.8.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
[[package]]
name = "core_maths"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77745e017f5edba1a9c1d854f6f3a52dac8a12dd5af5d2f54aecf61e43d80d30"
dependencies = [
"libm",
]
[[package]]
name = "cpufeatures"
version = "0.2.12"
@@ -374,7 +371,7 @@ version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3314d5adb5d94bcdf56771f2e50dbbc80bb4bdf88967526706205ac9eff24eb"
dependencies = [
"base64",
"base64 0.22.1",
"bytes",
"headers-core",
"http",
@@ -494,7 +491,7 @@ version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc2fdfdbff08affe55bb779f33b053aa1fe5dd5b54c257343c17edfa55711bdb"
dependencies = [
"base64",
"base64 0.22.1",
"bytes",
"futures-channel",
"futures-core",
@@ -519,59 +516,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526"
dependencies = [
"displaydoc",
"yoke 0.7.5",
"yoke",
"zerofrom",
"zerovec 0.10.4",
"zerovec",
]
[[package]]
name = "icu_collections"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47"
dependencies = [
"displaydoc",
"potential_utf",
"yoke 0.8.0",
"zerofrom",
"zerovec 0.11.2",
]
[[package]]
name = "icu_locale"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ae5921528335e91da1b6c695dbf1ec37df5ac13faa3f91e5640be93aa2fbefd"
dependencies = [
"displaydoc",
"icu_collections 2.0.0",
"icu_locale_core",
"icu_locale_data",
"icu_provider 2.0.0",
"potential_utf",
"tinystr 0.8.1",
"zerovec 0.11.2",
]
[[package]]
name = "icu_locale_core"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a"
dependencies = [
"displaydoc",
"litemap 0.8.0",
"tinystr 0.8.1",
"writeable 0.6.1",
"zerovec 0.11.2",
]
[[package]]
name = "icu_locale_data"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fdef0c124749d06a743c69e938350816554eb63ac979166590e2b4ee4252765"
[[package]]
name = "icu_locid"
version = "1.5.0"
@@ -579,10 +528,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637"
dependencies = [
"displaydoc",
"litemap 0.7.5",
"tinystr 0.7.6",
"writeable 0.5.5",
"zerovec 0.10.4",
"litemap",
"tinystr",
"writeable",
"zerovec",
]
[[package]]
@@ -594,9 +543,9 @@ dependencies = [
"displaydoc",
"icu_locid",
"icu_locid_transform_data",
"icu_provider 1.5.0",
"tinystr 0.7.6",
"zerovec 0.10.4",
"icu_provider",
"tinystr",
"zerovec",
]
[[package]]
@@ -612,15 +561,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f"
dependencies = [
"displaydoc",
"icu_collections 1.5.0",
"icu_collections",
"icu_normalizer_data",
"icu_properties",
"icu_provider 1.5.0",
"icu_provider",
"smallvec",
"utf16_iter",
"utf8_iter",
"write16",
"zerovec 0.10.4",
"zerovec",
]
[[package]]
@@ -636,12 +585,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5"
dependencies = [
"displaydoc",
"icu_collections 1.5.0",
"icu_collections",
"icu_locid_transform",
"icu_properties_data",
"icu_provider 1.5.0",
"tinystr 0.7.6",
"zerovec 0.10.4",
"icu_provider",
"tinystr",
"zerovec",
]
[[package]]
@@ -660,28 +609,11 @@ dependencies = [
"icu_locid",
"icu_provider_macros",
"stable_deref_trait",
"tinystr 0.7.6",
"writeable 0.5.5",
"yoke 0.7.5",
"tinystr",
"writeable",
"yoke",
"zerofrom",
"zerovec 0.10.4",
]
[[package]]
name = "icu_provider"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af"
dependencies = [
"displaydoc",
"icu_locale_core",
"stable_deref_trait",
"tinystr 0.8.1",
"writeable 0.6.1",
"yoke 0.8.0",
"zerofrom",
"zerotrie",
"zerovec 0.11.2",
"zerovec",
]
[[package]]
@@ -695,30 +627,6 @@ dependencies = [
"syn",
]
[[package]]
name = "icu_segmenter"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e185fc13b6401c138cf40db12b863b35f5edf31b88192a545857b41aeaf7d3d3"
dependencies = [
"core_maths",
"displaydoc",
"icu_collections 2.0.0",
"icu_locale",
"icu_locale_core",
"icu_provider 2.0.0",
"icu_segmenter_data",
"potential_utf",
"utf8_iter",
"zerovec 0.11.2",
]
[[package]]
name = "icu_segmenter_data"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5360a2fbe97f617c4f8b944356dedb36d423f7da7f13c070995cf89e59f01220"
[[package]]
name = "idna"
version = "1.0.3"
@@ -756,17 +664,6 @@ version = "2.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5"
[[package]]
name = "io-uring"
version = "0.7.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b86e202f00093dcba4275d4636b93ef9dd75d025ae560d2521b45ea28ab49013"
dependencies = [
"bitflags",
"cfg-if",
"libc",
]
[[package]]
name = "ipnet"
version = "2.11.0"
@@ -811,24 +708,12 @@ version = "0.2.172"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
[[package]]
name = "libm"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de"
[[package]]
name = "litemap"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856"
[[package]]
name = "litemap"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956"
[[package]]
name = "log"
version = "0.4.27"
@@ -921,16 +806,6 @@ version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0"
[[package]]
name = "potential_utf"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585"
dependencies = [
"serde",
"zerovec 0.11.2",
]
[[package]]
name = "ppv-lite86"
version = "0.2.17"
@@ -1180,11 +1055,11 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "reqwest"
version = "0.12.22"
version = "0.12.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531"
checksum = "eabf4c97d9130e2bf606614eb937e86edac8292eaa6f422f995d7e8de1eb1813"
dependencies = [
"base64",
"base64 0.22.1",
"bytes",
"futures-core",
"futures-util",
@@ -1458,7 +1333,7 @@ name = "synapse"
version = "0.1.0"
dependencies = [
"anyhow",
"base64",
"base64 0.21.7",
"blake2",
"bytes",
"futures",
@@ -1466,7 +1341,6 @@ dependencies = [
"hex",
"http",
"http-body-util",
"icu_segmenter",
"lazy_static",
"log",
"mime",
@@ -1535,17 +1409,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f"
dependencies = [
"displaydoc",
"zerovec 0.10.4",
]
[[package]]
name = "tinystr"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b"
dependencies = [
"displaydoc",
"zerovec 0.11.2",
"zerovec",
]
[[package]]
@@ -1565,17 +1429,15 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "tokio"
version = "1.46.1"
version = "1.45.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0cc3a2344dafbe23a245241fe8b09735b521110d30fcefbbd5feb1797ca35d17"
checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779"
dependencies = [
"backtrace",
"bytes",
"io-uring",
"libc",
"mio",
"pin-project-lite",
"slab",
"socket2",
"windows-sys 0.52.0",
]
@@ -1967,12 +1829,6 @@ version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51"
[[package]]
name = "writeable"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
[[package]]
name = "yoke"
version = "0.7.5"
@@ -1981,19 +1837,7 @@ checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40"
dependencies = [
"serde",
"stable_deref_trait",
"yoke-derive 0.7.5",
"zerofrom",
]
[[package]]
name = "yoke"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc"
dependencies = [
"serde",
"stable_deref_trait",
"yoke-derive 0.8.0",
"yoke-derive",
"zerofrom",
]
@@ -2009,18 +1853,6 @@ dependencies = [
"synstructure",
]
[[package]]
name = "yoke-derive"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
dependencies = [
"proc-macro2",
"quote",
"syn",
"synstructure",
]
[[package]]
name = "zerocopy"
version = "0.8.17"
@@ -2068,35 +1900,15 @@ version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde"
[[package]]
name = "zerotrie"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595"
dependencies = [
"displaydoc",
]
[[package]]
name = "zerovec"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079"
dependencies = [
"yoke 0.7.5",
"yoke",
"zerofrom",
"zerovec-derive 0.10.3",
]
[[package]]
name = "zerovec"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428"
dependencies = [
"yoke 0.8.0",
"zerofrom",
"zerovec-derive 0.11.1",
"zerovec-derive",
]
[[package]]
@@ -2109,14 +1921,3 @@ dependencies = [
"quote",
"syn",
]
[[package]]
name = "zerovec-derive"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f"
dependencies = [
"proc-macro2",
"quote",
"syn",
]

View File

@@ -1 +0,0 @@
Add `recaptcha_private_key_path` and `recaptcha_public_key_path` config option.

View File

@@ -0,0 +1 @@
Support for [MSC4235](https://github.com/matrix-org/matrix-spec-proposals/pull/4235): via query param for hierarchy endpoint. Contributed by Krishan (@kfiven).

View File

@@ -1 +0,0 @@
Add plain-text handling for rich-text topics as per [MSC3765](https://github.com/matrix-org/matrix-spec-proposals/pull/3765).

View File

@@ -0,0 +1 @@
Add `forget_forced_upon_leave` capability as per [MSC4267](https://github.com/matrix-org/matrix-spec-proposals/pull/4267).

View File

@@ -1 +0,0 @@
If enabled by the user, server admins will see [soft failed](https://spec.matrix.org/v1.13/server-server-api/#soft-failure) events over the Client-Server API.

View File

@@ -0,0 +1 @@
Add `federated_user_may_invite` spam checker callback which receives the entire invite event. Contributed by @tulir @ Beeper.

View File

@@ -1 +0,0 @@
Add experimental support for [MSC4277](https://github.com/matrix-org/matrix-spec-proposals/pull/4277).

View File

@@ -0,0 +1 @@
Stop adding the "origin" field to newly-created events (PDUs).

View File

@@ -1 +0,0 @@
Fix CPU and database spinning when retrying sending events to servers whilst at the same time purging those events.

1
changelog.d/18509.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix `KeyError` on background updates when using split main/state databases.

View File

@@ -1 +0,0 @@
Add ability to limit amount uploaded by a user in a given time period.

View File

@@ -1 +0,0 @@
Document that some config options for the user directory are in violation of the Matrix spec.

View File

@@ -1 +0,0 @@
Allow user registrations to be done on workers.

View File

@@ -1 +0,0 @@
Remove unnecessary HTTP replication calls.

1
changelog.d/18573.misc Normal file
View File

@@ -0,0 +1 @@
Improve docstring on `simple_upsert_many`.

1
changelog.d/18582.bugfix Normal file
View File

@@ -0,0 +1 @@
Improve performance of device deletion by adding missing index.

1
changelog.d/18595.misc Normal file
View File

@@ -0,0 +1 @@
Better handling of ratelimited requests.

1
changelog.d/18600.misc Normal file
View File

@@ -0,0 +1 @@
Better handling of ratelimited requests.

View File

@@ -1 +0,0 @@
Refactor `Measure` block metrics to be homeserver-scoped.

1
changelog.d/18602.misc Normal file
View File

@@ -0,0 +1 @@
Speed up bulk device deletion.

1
changelog.d/18605.bugfix Normal file
View File

@@ -0,0 +1 @@
Ensure policy servers are not asked to scan policy server change events, allowing rooms to disable the use of a policy server while the policy server is down.

View File

@@ -1 +0,0 @@
Unbreak "Latest dependencies" workflow by using the `--without dev` poetry option instead of removed `--no-dev`.

View File

@@ -1 +0,0 @@
Raise minimum Python version to `3.9.12`.

View File

@@ -1 +0,0 @@
Update URL Preview code to work with `lxml` 6.0.0+.

View File

@@ -1 +0,0 @@
Use `markdown-it-py` instead of `commonmark` in the release script.

View File

@@ -1 +0,0 @@
Advertise support for Matrix v1.12.

View File

@@ -1 +0,0 @@
Fix typing errors with upgraded mypy version.

View File

@@ -1 +0,0 @@
Add doc comment explaining that config files are shallowly merged.

View File

@@ -1 +0,0 @@
Minor speed up of insertion into `stream_positions` table.

View File

@@ -1 +0,0 @@
Remove unused `allow_no_prev_events` option when creating an event.

View File

@@ -1 +0,0 @@
Add `recaptcha_private_key_path` and `recaptcha_public_key_path` config option.

18
debian/changelog vendored
View File

@@ -1,21 +1,3 @@
matrix-synapse-py3 (1.134.0) stable; urgency=medium
* New Synapse release 1.134.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Jul 2025 14:22:50 +0100
matrix-synapse-py3 (1.134.0~rc1) stable; urgency=medium
* New Synapse release 1.134.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 09 Jul 2025 11:27:13 +0100
matrix-synapse-py3 (1.133.0) stable; urgency=medium
* New synapse release 1.133.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Jul 2025 13:13:24 +0000
matrix-synapse-py3 (1.133.0~rc1) stable; urgency=medium
* New Synapse release 1.133.0rc1.

View File

@@ -74,7 +74,6 @@
- [Users](admin_api/user_admin_api.md)
- [Server Version](admin_api/version_api.md)
- [Federation](usage/administration/admin_api/federation.md)
- [Client-Server API Extensions](admin_api/client_server_api_extensions.md)
- [Manhole](manhole.md)
- [Monitoring](metrics-howto.md)
- [Reporting Homeserver Usage Statistics](usage/administration/monitoring/reporting_homeserver_usage_statistics.md)

View File

@@ -1,25 +0,0 @@
# Client-Server API Extensions
Server administrators can set special account data to change how the Client-Server API behaves for
their clients. Setting the account data, or having it already set, as a non-admin has no effect.
All configuration options can be set through the `io.element.synapse.admin_client_config` global
account data on the admin's user account.
Example:
```
PUT /_matrix/client/v3/user/{adminUserId}/account_data/io.element.synapse.admin_client_config
{
"return_soft_failed_events": true
}
```
## See soft failed events
Learn more about soft failure from [the spec](https://spec.matrix.org/v1.14/server-server-api/#soft-failure).
To receive soft failed events in APIs like `/sync` and `/messages`, set `return_soft_failed_events`
to `true` in the admin client config. When `false`, the normal behaviour of these endpoints is to
exclude soft failed events.
Default: `false`

View File

@@ -806,7 +806,7 @@ A response body like the following is returned:
}, {
"delete_id": "delete_id2",
"room_id": "!roomid:example.com",
"status": "active",
"status": "purging",
"shutdown_room": {
"kicked_users": [
"@foobar:example.com"
@@ -843,7 +843,7 @@ A response body like the following is returned:
```json
{
"status": "active",
"status": "purging",
"delete_id": "bHkCNQpHqOaFhPtK",
"room_id": "!roomid:example.com",
"shutdown_room": {
@@ -876,8 +876,8 @@ The following fields are returned in the JSON response body:
- `delete_id` - The ID for this purge
- `room_id` - The ID of the room being deleted
- `status` - The status will be one of:
- `scheduled` - The deletion is waiting to be started
- `active` - The process is purging the room and event data from database.
- `shutting_down` - The process is removing users from the room.
- `purging` - The process is purging the room and event data from database.
- `complete` - The process has completed successfully.
- `failed` - The process is aborted, an error has occurred.
- `error` - A string that shows an error message if `status` is `failed`.

View File

@@ -29,6 +29,8 @@ easiest way of installing the latest version is to use [rustup](https://rustup.r
Synapse can connect to PostgreSQL via the [psycopg2](https://pypi.org/project/psycopg2/) Python library. Building this library from source requires access to PostgreSQL's C header files. On Debian or Ubuntu Linux, these can be installed with `sudo apt install libpq-dev`.
Synapse has an optional, improved user search with better Unicode support. For that you need the development package of `libicu`. On Debian or Ubuntu Linux, this can be installed with `sudo apt install libicu-dev`.
The source code of Synapse is hosted on GitHub. You will also need [a recent version of git](https://github.com/git-guides/install-git).
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).

View File

@@ -164,7 +164,10 @@ $ poetry cache clear --all .
# including the wheel artifacts which is not covered by the above command
# (see https://github.com/python-poetry/poetry/issues/10304)
#
# This is necessary in order to rebuild or fetch new wheels.
# This is necessary in order to rebuild or fetch new wheels. For example, if you update
# the `icu` library in on your system, you will need to rebuild the PyICU Python package
# in order to incorporate the correct dynamically linked library locations otherwise you
# will run into errors like: `ImportError: libicui18n.so.75: cannot open shared object file: No such file or directory`
$ rm -rf $(poetry config cache-dir)
```

View File

@@ -286,7 +286,7 @@ Installing prerequisites on Ubuntu or Debian:
```sh
sudo apt install build-essential python3-dev libffi-dev \
python3-pip python3-setuptools sqlite3 \
libssl-dev virtualenv libjpeg-dev libxslt1-dev
libssl-dev virtualenv libjpeg-dev libxslt1-dev libicu-dev
```
##### ArchLinux
@@ -295,7 +295,7 @@ Installing prerequisites on ArchLinux:
```sh
sudo pacman -S base-devel python python-pip \
python-setuptools python-virtualenv sqlite3
python-setuptools python-virtualenv sqlite3 icu
```
##### CentOS/Fedora
@@ -305,7 +305,8 @@ Installing prerequisites on CentOS or Fedora Linux:
```sh
sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
libwebp-devel libxml2-devel libxslt-devel libpq-devel \
python3-virtualenv libffi-devel openssl-devel python3-devel
python3-virtualenv libffi-devel openssl-devel python3-devel \
libicu-devel
sudo dnf group install "Development Tools"
```
@@ -332,7 +333,7 @@ dnf install python3.12 python3.12-devel
```
Finally, install common prerequisites
```bash
dnf install libpq5 libpq5-devel lz4 pkgconf
dnf install libicu libicu-devel libpq5 libpq5-devel lz4 pkgconf
dnf group install "Development Tools"
```
###### Using venv module instead of virtualenv command
@@ -364,6 +365,20 @@ xcode-select --install
Some extra dependencies may be needed. You can use Homebrew (https://brew.sh) for them.
You may need to install icu, and make the icu binaries and libraries accessible.
Please follow [the official instructions of PyICU](https://pypi.org/project/PyICU/) to do so.
If you're struggling to get icu discovered, and see:
```
RuntimeError:
Please install pkg-config on your system or set the ICU_VERSION environment
variable to the version of ICU you have installed.
```
despite it being installed and having your `PATH` updated, you can omit this dependency by
not specifying `--extras all` to `poetry`. If using postgres, you can install Synapse via
`poetry install --extras saml2 --extras oidc --extras postgres --extras opentracing --extras redis --extras sentry`.
ICU is not a hard dependency on getting a working installation.
On ARM-based Macs you may also need to install libjpeg and libpq:
```sh
brew install jpeg libpq
@@ -385,7 +400,8 @@ Installing prerequisites on openSUSE:
```sh
sudo zypper in -t pattern devel_basis
sudo zypper in python-pip python-setuptools sqlite3 python-virtualenv \
python-devel libffi-devel libopenssl-devel libjpeg62-devel
python-devel libffi-devel libopenssl-devel libjpeg62-devel \
libicu-devel
```
##### OpenBSD

View File

@@ -117,21 +117,6 @@ each upgrade are complete before moving on to the next upgrade, to avoid
stacking them up. You can monitor the currently running background updates with
[the Admin API](usage/administration/admin_api/background_updates.html#status).
# Upgrading to v1.135.0
## `on_user_registration` module API callback may now run on any worker
Previously, the `on_user_registration` callback would only run on the main
process. Modules relying on this callback must assume that they may now be
called from any worker, not just the main process.
# Upgrading to v1.134.0
## ICU bundled with Synapse
Synapse now uses the Rust `icu` library for improved user search. Installing the
native ICU library on your system is no longer required.
# Upgrading to v1.130.0
## Documented endpoint which can be delegated to a federation worker

View File

@@ -2086,23 +2086,6 @@ Example configuration:
max_upload_size: 60M
```
---
### `media_upload_limits`
*(array)* A list of media upload limits defining how much data a given user can upload in a given time period.
An empty list means no limits are applied.
Defaults to `[]`.
Example configuration:
```yaml
media_upload_limits:
- time_period: 1h
max_size: 100M
- time_period: 1w
max_size: 500M
```
---
### `max_image_pixels`
*(byte size)* Maximum number of pixels that will be thumbnailed. Defaults to `"32M"`.
@@ -2357,21 +2340,6 @@ Example configuration:
recaptcha_public_key: YOUR_PUBLIC_KEY
```
---
### `recaptcha_public_key_path`
*(string|null)* An alternative to [`recaptcha_public_key`](#recaptcha_public_key): allows the public key to be specified in an external file.
The file should be a plain text file, containing only the public key. Synapse reads the public key from the given file once at startup.
_Added in Synapse 1.135.0._
Defaults to `null`.
Example configuration:
```yaml
recaptcha_public_key_path: /path/to/key/file
```
---
### `recaptcha_private_key`
*(string|null)* This homeserver's ReCAPTCHA private key. Must be specified if [`enable_registration_captcha`](#enable_registration_captcha) is enabled. Defaults to `null`.
@@ -2381,21 +2349,6 @@ Example configuration:
recaptcha_private_key: YOUR_PRIVATE_KEY
```
---
### `recaptcha_private_key_path`
*(string|null)* An alternative to [`recaptcha_private_key`](#recaptcha_private_key): allows the private key to be specified in an external file.
The file should be a plain text file, containing only the private key. Synapse reads the private key from the given file once at startup.
_Added in Synapse 1.135.0._
Defaults to `null`.
Example configuration:
```yaml
recaptcha_private_key_path: /path/to/key/file
```
---
### `enable_registration_captcha`
*(boolean)* Set to `true` to require users to complete a CAPTCHA test when registering an account. Requires a valid ReCaptcha public/private key.
@@ -3808,11 +3761,7 @@ encryption_enabled_by_default_for_room_type: invite
This setting has the following sub-options:
* `enabled` (boolean): Defines whether users can search the user directory. If `false` then empty responses are returned to all queries.
*Warning: While the homeserver may determine which subset of users are searched, the Matrix specification requires homeservers to include (at minimum) users visible in public rooms and users sharing a room with the requester. Using `false` improves performance but violates this requirement.*
Defaults to `true`.
* `enabled` (boolean): Defines whether users can search the user directory. If false then empty responses are returned to all queries. Defaults to `true`.
* `search_all_users` (boolean): Defines whether to search all users visible to your homeserver at the time the search is performed. If set to true, will return all users known to the homeserver matching the search query. If false, search results will only contain users visible in public rooms and users sharing a room with the requester.

View File

@@ -77,11 +77,14 @@ The user provided search term is lowercased and normalized using [NFKC](https://
this treats the string as case-insensitive, canonicalizes different forms of the
same text, and maps some "roughly equivalent" characters together.
The search term is then split into segments using the [`icu_segmenter`
Rust crate](https://crates.io/crates/icu_segmenter). This crate ships with its
own dictionary and Long Short Term-Memory (LSTM) machine learning models
per-language to segment words. Read more [in the crate's
documentation](https://docs.rs/icu/latest/icu/segmenter/struct.WordSegmenter.html#method.new_auto).
The search term is then split into words:
* If [ICU](https://en.wikipedia.org/wiki/International_Components_for_Unicode) is
available, then the system's [default locale](https://unicode-org.github.io/icu/userguide/locale/#default-locales)
will be used to break the search term into words. (See the
[installation instructions](setup/installation.md) for how to install ICU.)
* If unavailable, then runs of ASCII characters, numbers, underscores, and hyphens
are considered words.
The queries for PostgreSQL and SQLite are detailed below, but their overall goal
is to find matching users, preferring users who are "real" (e.g. not bots,

View File

@@ -96,6 +96,7 @@
gnumake
# Native dependencies for running Synapse.
icu
libffi
libjpeg
libpqxx

1046
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -101,7 +101,7 @@ module-name = "synapse.synapse_rust"
[tool.poetry]
name = "matrix-synapse"
version = "1.134.0"
version = "1.133.0rc1"
description = "Homeserver for the Matrix decentralised comms protocol"
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
license = "AGPL-3.0-or-later"
@@ -159,13 +159,7 @@ synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main"
update_synapse_database = "synapse._scripts.update_synapse_database:main"
[tool.poetry.dependencies]
# We aim to support all versions of Python currently supported upstream as per
# our dependency deprecation policy:
# https://element-hq.github.io/synapse/latest/deprecation_policy.html#policy
#
# 3.9.12 is currently the minimum version due to Twisted requiring it:
# https://github.com/twisted/twisted/commit/27674f64d8a553ad5e68913bfb1c936e6fdeb46a
python = "^3.9.12"
python = "^3.9.0"
# Mandatory Dependencies
# ----------------------
@@ -230,7 +224,7 @@ pydantic = ">=1.7.4, <3"
# https://github.com/python-poetry/poetry/issues/6154). Both `pip install` and
# `poetry build` do the right thing without this explicit dependency.
#
# This isn't really a dev-dependency, as `poetry install --without dev` will fail,
# This isn't really a dev-dependency, as `poetry install --no-dev` will fail,
# but the alternative is to add it to the main list of deps where it isn't
# needed.
setuptools_rust = ">=1.3"
@@ -260,6 +254,7 @@ hiredis = { version = "*", optional = true }
Pympler = { version = "*", optional = true }
parameterized = { version = ">=0.7.4", optional = true }
idna = { version = ">=2.5", optional = true }
pyicu = { version = ">=2.10.2", optional = true }
[tool.poetry.extras]
# NB: Packages that should be part of `pip install matrix-synapse[all]` need to be specified
@@ -282,6 +277,10 @@ redis = ["txredisapi", "hiredis"]
# Required to use experimental `caches.track_memory_usage` config option.
cache-memory = ["pympler"]
test = ["parameterized", "idna"]
# Allows for better search for international characters in the user directory. This
# requires libicu's development headers installed on the system (e.g. libicu-dev on
# Debian-based distributions).
user-search = ["pyicu"]
# The duplication here is awful. I hate hate hate hate hate it. However, for now I want
# to ensure you can still `pip install matrix-synapse[all]` like today. Two motivations:
@@ -313,6 +312,8 @@ all = [
"txredisapi", "hiredis",
# cache-memory
"pympler",
# improved user search
"pyicu",
# omitted:
# - test: it's useful to have this separate from dev deps in the olddeps job
# - systemd: this is a system-based requirement
@@ -323,7 +324,7 @@ all = [
# failing on new releases. Keeping lower bounds loose here means that dependabot
# can bump versions without having to update the content-hash in the lockfile.
# This helps prevents merge conflicts when running a batch of dependabot updates.
ruff = "0.12.2"
ruff = "0.11.11"
# Type checking only works with the pydantic.v1 compat module from pydantic v2
pydantic = "^2"
@@ -332,6 +333,7 @@ lxml-stubs = ">=0.4.0"
mypy = "*"
mypy-zope = "*"
types-bleach = ">=4.1.0"
types-commonmark = ">=0.9.2"
types-jsonschema = ">=3.2.0"
types-netaddr = ">=0.8.0.6"
types-opentracing = ">=2.4.2"
@@ -354,7 +356,7 @@ idna = ">=2.5"
click = ">=8.1.3"
# GitPython was == 3.1.14; bumped to 3.1.20, the first release with type hints.
GitPython = ">=3.1.20"
markdown-it-py = ">=3.0.0"
commonmark = ">=0.9.1"
pygithub = ">=1.55"
# The following are executed as commands by the release script.
twine = "*"
@@ -372,7 +374,7 @@ tomli = ">=1.2.3"
# runtime errors caused by build system changes.
# We are happy to raise these upper bounds upon request,
# provided we check that it's safe to do so (i.e. that CI passes).
requires = ["poetry-core>=1.1.0,<=2.1.3", "setuptools_rust>=1.3,<=1.11.1"]
requires = ["poetry-core>=1.1.0,<=1.9.1", "setuptools_rust>=1.3,<=1.10.2"]
build-backend = "poetry.core.masonry.api"
@@ -380,10 +382,13 @@ build-backend = "poetry.core.masonry.api"
# Skip unsupported platforms (by us or by Rust).
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets.
# We skip:
# - CPython and PyPy 3.8: EOLed
# - CPython 3.6, 3.7 and 3.8: EOLed
# - PyPy 3.7 and 3.8: we only support Python 3.9+
# - musllinux i686: excluded to reduce number of wheels we build.
# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677
skip = "cp38* pp38* *-musllinux_i686"
# - PyPy on Aarch64 and musllinux on aarch64: too slow to build.
# c.f. https://github.com/matrix-org/synapse/pull/14259
skip = "cp36* cp37* cp38* pp37* pp38* *-musllinux_i686 pp*aarch64 *-musllinux_aarch64"
# Enable non-default builds.
# "pypy" used to be included by default up until cibuildwheel 3.
enable = "pypy"

View File

@@ -43,7 +43,6 @@ sha2 = "0.10.8"
serde = { version = "1.0.144", features = ["derive"] }
serde_json = "1.0.85"
ulid = "1.1.2"
icu_segmenter = "2.0.0"
reqwest = { version = "0.12.15", default-features = false, features = [
"http2",
"stream",

View File

@@ -13,7 +13,6 @@ pub mod identifier;
pub mod matrix_const;
pub mod push;
pub mod rendezvous;
pub mod segmenter;
lazy_static! {
static ref LOGGING_HANDLE: ResetHandle = pyo3_log::init();
@@ -54,7 +53,6 @@ fn synapse_rust(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
events::register_module(py, m)?;
http_client::register_module(py, m)?;
rendezvous::register_module(py, m)?;
segmenter::register_module(py, m)?;
Ok(())
}

View File

@@ -1,33 +0,0 @@
use icu_segmenter::options::WordBreakInvariantOptions;
use icu_segmenter::WordSegmenter;
use pyo3::prelude::*;
#[pyfunction]
pub fn parse_words(text: &str) -> PyResult<Vec<String>> {
let segmenter = WordSegmenter::new_auto(WordBreakInvariantOptions::default());
let mut parts = Vec::new();
let mut last = 0usize;
// `segment_str` gives us word boundaries as a vector of indexes. Use that
// to build a vector of words, and return.
for boundary in segmenter.segment_str(text) {
if boundary > last {
parts.push(text[last..boundary].to_string());
}
last = boundary;
}
Ok(parts)
}
pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
let child_module = PyModule::new(py, "segmenter")?;
child_module.add_function(wrap_pyfunction!(parse_words, m)?)?;
m.add_submodule(&child_module)?;
py.import("sys")?
.getattr("modules")?
.set_item("synapse.synapse_rust.segmenter", child_module)?;
Ok(())
}

View File

@@ -1,5 +1,5 @@
$schema: https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json
$id: https://element-hq.github.io/synapse/schema/synapse/v1.134/synapse-config.schema.json
$id: https://element-hq.github.io/synapse/schema/synapse/v1.133/synapse-config.schema.json
type: object
properties:
modules:
@@ -2335,30 +2335,6 @@ properties:
default: 50M
examples:
- 60M
media_upload_limits:
type: array
description: >-
A list of media upload limits defining how much data a given user can
upload in a given time period.
An empty list means no limits are applied.
default: []
items:
time_period:
type: "#/$defs/duration"
description: >-
The time period over which the limit applies. Required.
max_size:
type: "#/$defs/bytes"
description: >-
Amount of data that can be uploaded in the time period by the user.
Required.
examples:
- - time_period: 1h
max_size: 100M
- time_period: 1w
max_size: 500M
max_image_pixels:
$ref: "#/$defs/bytes"
description: Maximum number of pixels that will be thumbnailed.
@@ -2692,21 +2668,6 @@ properties:
default: null
examples:
- YOUR_PUBLIC_KEY
recaptcha_public_key_path:
type: ["string", "null"]
description: >-
An alternative to [`recaptcha_public_key`](#recaptcha_public_key): allows
the public key to be specified in an external file.
The file should be a plain text file, containing only the public key.
Synapse reads the public key from the given file once at startup.
_Added in Synapse 1.135.0._
default: null
examples:
- /path/to/key/file
recaptcha_private_key:
type: ["string", "null"]
description: >-
@@ -2715,21 +2676,6 @@ properties:
default: null
examples:
- YOUR_PRIVATE_KEY
recaptcha_private_key_path:
type: ["string", "null"]
description: >-
An alternative to [`recaptcha_private_key`](#recaptcha_private_key):
allows the private key to be specified in an external file.
The file should be a plain text file, containing only the private key.
Synapse reads the private key from the given file once at startup.
_Added in Synapse 1.135.0._
default: null
examples:
- /path/to/key/file
enable_registration_captcha:
type: boolean
description: >-
@@ -4719,15 +4665,8 @@ properties:
enabled:
type: boolean
description: >-
Defines whether users can search the user directory. If `false` then
Defines whether users can search the user directory. If false then
empty responses are returned to all queries.
*Warning: While the homeserver may determine which subset of users are
searched, the Matrix specification requires homeservers to include (at
minimum) users visible in public rooms and users sharing a room with
the requester. Using `false` improves performance but violates this
requirement.*
default: true
search_all_users:
type: boolean

View File

@@ -36,11 +36,11 @@ from typing import Any, List, Match, Optional, Union
import attr
import click
import commonmark
import git
from click.exceptions import ClickException
from git import GitCommandError, Repo
from github import BadCredentialsException, Github
from markdown_it import MarkdownIt
from packaging import version
@@ -851,7 +851,7 @@ def get_changes_for_version(wanted_version: version.Version) -> str:
# First we parse the changelog so that we can split it into sections based
# on the release headings.
tokens = MarkdownIt().parse(changes)
ast = commonmark.Parser().parse(changes)
@attr.s(auto_attribs=True)
class VersionSection:
@@ -862,22 +862,19 @@ def get_changes_for_version(wanted_version: version.Version) -> str:
end_line: Optional[int] = None # Is none if its the last entry
headings: List[VersionSection] = []
for i, token in enumerate(tokens):
# We look for level 1 headings (h1 tags).
if token.type != "heading_open" or token.tag != "h1":
for node, _ in ast.walker():
# We look for all text nodes that are in a level 1 heading.
if node.t != "text":
continue
# The next token should be an inline token containing the heading text
if i + 1 < len(tokens) and tokens[i + 1].type == "inline":
heading_text = tokens[i + 1].content
# The map property contains [line_begin, line_end] (0-based)
start_line = token.map[0] if token.map else 0
if node.parent.t != "heading" or node.parent.level != 1:
continue
# If we have a previous heading then we update its `end_line`.
if headings:
headings[-1].end_line = start_line
# If we have a previous heading then we update its `end_line`.
if headings:
headings[-1].end_line = node.parent.sourcepos[0][0] - 1
headings.append(VersionSection(heading_text, start_line))
headings.append(VersionSection(node.literal, node.parent.sourcepos[0][0] - 1))
changes_by_line = changes.split("\n")

View File

@@ -29,7 +29,6 @@ from synapse.api.errors import (
InvalidClientTokenError,
MissingClientTokenError,
UnrecognizedRequestError,
UserLockedError,
)
from synapse.http.site import SynapseRequest
from synapse.logging.opentracing import active_span, force_tracing, start_active_span
@@ -163,7 +162,12 @@ class InternalAuth(BaseAuth):
if not allow_locked and await self.store.get_user_locked_status(
requester.user.to_string()
):
raise UserLockedError()
raise AuthError(
401,
"User account has been locked",
errcode=Codes.USER_LOCKED,
additional_fields={"soft_logout": True},
)
# Deny the request if the user account has expired.
# This check is only done for regular users, not appservice ones.

View File

@@ -262,11 +262,6 @@ class EventContentFields:
TOMBSTONE_SUCCESSOR_ROOM: Final = "replacement_room"
# Used in m.room.topic events.
TOPIC: Final = "topic"
M_TOPIC: Final = "m.topic"
M_TEXT: Final = "m.text"
class EventUnsignedContentFields:
"""Fields found inside the 'unsigned' data on events"""
@@ -275,13 +270,6 @@ class EventUnsignedContentFields:
MEMBERSHIP: Final = "membership"
class MTextFields:
"""Fields found inside m.text content blocks."""
BODY: Final = "body"
MIMETYPE: Final = "mimetype"
class RoomTypes:
"""Understood values of the room_type field of m.room.create events."""
@@ -302,9 +290,6 @@ class AccountDataTypes:
MSC4155_INVITE_PERMISSION_CONFIG: Final = (
"org.matrix.msc4155.invite_permission_config"
)
# Synapse-specific behaviour. See "Client-Server API Extensions" documentation
# in Admin API for more information.
SYNAPSE_ADMIN_CLIENT_CONFIG: Final = "io.element.synapse.admin_client_config"
class HistoryVisibility:

View File

@@ -306,20 +306,6 @@ class UserDeactivatedError(SynapseError):
)
class UserLockedError(SynapseError):
"""The error returned to the client when the user attempted to access an
authenticated endpoint, but the account has been locked.
"""
def __init__(self) -> None:
super().__init__(
code=HTTPStatus.UNAUTHORIZED,
msg="User account has been locked",
errcode=Codes.USER_LOCKED,
additional_fields={"soft_logout": True},
)
class FederationDeniedError(SynapseError):
"""An error raised when the server tries to federate with a server which
is not on its federation whitelist.

View File

@@ -118,6 +118,7 @@ class GenericWorkerStore(
# FIXME(https://github.com/matrix-org/synapse/issues/3714): We need to add
# UserDirectoryStore as we write directly rather than going via the correct worker.
UserDirectoryStore,
StatsStore,
UIAuthWorkerStore,
EndToEndRoomKeyStore,
PresenceStore,
@@ -153,7 +154,6 @@ class GenericWorkerStore(
StreamWorkerStore,
EventsWorkerStore,
RegistrationWorkerStore,
StatsStore,
SearchStore,
TransactionWorkerStore,
LockStore,

View File

@@ -555,9 +555,6 @@ class ApplicationServiceApi(SimpleHttpClient):
)
and service.is_interested_in_user(e.state_key)
),
# Appservices are considered 'trusted' by the admin and should have
# applicable metadata on their events.
include_admin_metadata=True,
),
)
for e in events

View File

@@ -909,10 +909,7 @@ class RootConfig:
def read_config_files(config_files: Iterable[str]) -> Dict[str, Any]:
"""Read the config files and shallowly merge them into a dict.
Successive configurations are shallowly merged into ones provided earlier,
i.e., entirely replacing top-level sections of the configuration.
"""Read the config files into a dict
Args:
config_files: A list of the config files to read

View File

@@ -23,17 +23,7 @@ from typing import Any
from synapse.types import JsonDict
from ._base import Config, ConfigError, read_file
CONFLICTING_RECAPTCHA_PRIVATE_KEY_OPTS_ERROR = """\
You have configured both `recaptcha_private_key` and
`recaptcha_private_key_path`. These are mutually incompatible.
"""
CONFLICTING_RECAPTCHA_PUBLIC_KEY_OPTS_ERROR = """\
You have configured both `recaptcha_public_key` and `recaptcha_public_key_path`.
These are mutually incompatible.
"""
from ._base import Config, ConfigError
class CaptchaConfig(Config):
@@ -48,13 +38,6 @@ class CaptchaConfig(Config):
"Config options that expect an in-line secret as value are disabled",
("recaptcha_private_key",),
)
recaptcha_private_key_path = config.get("recaptcha_private_key_path")
if recaptcha_private_key_path:
if recaptcha_private_key:
raise ConfigError(CONFLICTING_RECAPTCHA_PRIVATE_KEY_OPTS_ERROR)
recaptcha_private_key = read_file(
recaptcha_private_key_path, ("recaptcha_private_key_path",)
).strip()
if recaptcha_private_key is not None and not isinstance(
recaptcha_private_key, str
):
@@ -67,13 +50,6 @@ class CaptchaConfig(Config):
"Config options that expect an in-line secret as value are disabled",
("recaptcha_public_key",),
)
recaptcha_public_key_path = config.get("recaptcha_public_key_path")
if recaptcha_public_key_path:
if recaptcha_public_key:
raise ConfigError(CONFLICTING_RECAPTCHA_PUBLIC_KEY_OPTS_ERROR)
recaptcha_public_key = read_file(
recaptcha_public_key_path, ("recaptcha_public_key_path",)
).strip()
if recaptcha_public_key is not None and not isinstance(
recaptcha_public_key, str
):

View File

@@ -42,9 +42,6 @@ class CasConfig(Config):
self.cas_enabled = cas_config and cas_config.get("enabled", True)
if self.cas_enabled:
if not isinstance(cas_config, dict):
raise ConfigError("Must be a dictionary", ("cas_config",))
self.cas_server_url = cas_config["server_url"]
# TODO Update this to a _synapse URL.

View File

@@ -561,12 +561,6 @@ class ExperimentalConfig(Config):
# MSC4076: Add `disable_badge_count`` to pusher configuration
self.msc4076_enabled: bool = experimental.get("msc4076_enabled", False)
# MSC4277: Harmonizing the reporting endpoints
#
# If enabled, ignore the score parameter and respond with HTTP 200 on
# reporting requests regardless of the subject's existence.
self.msc4277_enabled: bool = experimental.get("msc4277_enabled", False)
# MSC4235: Add `via` param to hierarchy endpoint
self.msc4235_enabled: bool = experimental.get("msc4235_enabled", False)

View File

@@ -212,14 +212,11 @@ class KeyConfig(Config):
"Config options that expect an in-line secret as value are disabled",
("form_secret",),
)
if form_secret is not None and not isinstance(form_secret, str):
raise ConfigError("Config option must be a string", ("form_secret",))
form_secret_path = config.get("form_secret_path", None)
if form_secret_path:
if form_secret:
raise ConfigError(CONFLICTING_FORM_SECRET_OPTS_ERROR)
self.form_secret: Optional[str] = read_file(
self.form_secret = read_file(
form_secret_path, ("form_secret_path",)
).strip()
else:

View File

@@ -119,15 +119,6 @@ def parse_thumbnail_requirements(
}
@attr.s(auto_attribs=True, slots=True, frozen=True)
class MediaUploadLimit:
"""A limit on the amount of data a user can upload in a given time
period."""
max_bytes: int
time_period_ms: int
class ContentRepositoryConfig(Config):
section = "media"
@@ -283,13 +274,6 @@ class ContentRepositoryConfig(Config):
self.enable_authenticated_media = config.get("enable_authenticated_media", True)
self.media_upload_limits: List[MediaUploadLimit] = []
for limit_config in config.get("media_upload_limits", []):
time_period_ms = self.parse_duration(limit_config["time_period"])
max_bytes = self.parse_size(limit_config["max_size"])
self.media_upload_limits.append(MediaUploadLimit(max_bytes, time_period_ms))
def generate_config_section(self, data_dir_path: str, **kwargs: Any) -> str:
assert data_dir_path is not None
media_store = os.path.join(data_dir_path, "media_store")

View File

@@ -27,6 +27,8 @@ from typing import Any, Dict, List, Optional, Union
import attr
from synapse._pydantic_compat import (
BaseModel,
Extra,
StrictBool,
StrictInt,
StrictStr,
@@ -45,7 +47,6 @@ from synapse.config.server import (
parse_listener_def,
)
from synapse.types import JsonDict
from synapse.util.pydantic_models import ParseModel
_DEPRECATED_WORKER_DUTY_OPTION_USED = """
The '%s' configuration option is deprecated and will be removed in a future
@@ -89,7 +90,30 @@ def _instance_to_list_converter(obj: Union[str, List[str]]) -> List[str]:
return obj
class InstanceTcpLocationConfig(ParseModel):
class ConfigModel(BaseModel):
"""A custom version of Pydantic's BaseModel which
- ignores unknown fields and
- does not allow fields to be overwritten after construction,
but otherwise uses Pydantic's default behaviour.
For now, ignore unknown fields. In the future, we could change this so that unknown
config values cause a ValidationError, provided the error messages are meaningful to
server operators.
Subclassing in this way is recommended by
https://pydantic-docs.helpmanual.io/usage/model_config/#change-behaviour-globally
"""
class Config:
# By default, ignore fields that we don't recognise.
extra = Extra.ignore
# By default, don't allow fields to be reassigned after parsing.
allow_mutation = False
class InstanceTcpLocationConfig(ConfigModel):
"""The host and port to talk to an instance via HTTP replication."""
host: StrictStr
@@ -105,7 +129,7 @@ class InstanceTcpLocationConfig(ParseModel):
return f"{self.host}:{self.port}"
class InstanceUnixLocationConfig(ParseModel):
class InstanceUnixLocationConfig(ConfigModel):
"""The socket file to talk to an instance via HTTP replication."""
path: StrictStr
@@ -238,16 +262,10 @@ class WorkerConfig(Config):
if worker_replication_secret_path:
if worker_replication_secret:
raise ConfigError(CONFLICTING_WORKER_REPLICATION_SECRET_OPTS_ERROR)
self.worker_replication_secret: Optional[str] = read_file(
self.worker_replication_secret = read_file(
worker_replication_secret_path, ("worker_replication_secret_path",)
).strip()
else:
if worker_replication_secret is not None and not isinstance(
worker_replication_secret, str
):
raise ConfigError(
"Config option must be a string", ("worker_replication_secret",)
)
self.worker_replication_secret = worker_replication_secret
self.worker_name = config.get("worker_name", self.worker_app)

View File

@@ -421,21 +421,11 @@ class SerializeEventConfig:
# False, that state will be removed from the event before it is returned.
# Otherwise, it will be kept.
include_stripped_room_state: bool = False
# When True, sets unsigned fields to help clients identify events which
# only server admins can see through other configuration. For example,
# whether an event was soft failed by the server.
include_admin_metadata: bool = False
_DEFAULT_SERIALIZE_EVENT_CONFIG = SerializeEventConfig()
def make_config_for_admin(existing: SerializeEventConfig) -> SerializeEventConfig:
# Set the options which are only available to server admins,
# and copy the rest.
return attr.evolve(existing, include_admin_metadata=True)
def serialize_event(
e: Union[JsonDict, EventBase],
time_now_ms: int,
@@ -538,9 +528,6 @@ def serialize_event(
d["content"] = dict(d["content"])
d["content"]["redacts"] = e.redacts
if config.include_admin_metadata and e.internal_metadata.is_soft_failed():
d["unsigned"]["io.element.synapse.soft_failed"] = True
only_event_fields = config.only_event_fields
if only_event_fields:
if not isinstance(only_event_fields, list) or not all(
@@ -561,7 +548,6 @@ class EventClientSerializer:
def __init__(self, hs: "HomeServer") -> None:
self._store = hs.get_datastores().main
self._auth = hs.get_auth()
self._add_extra_fields_to_unsigned_client_event_callbacks: List[
ADD_EXTRA_FIELDS_TO_UNSIGNED_CLIENT_EVENT_CALLBACK
] = []
@@ -590,15 +576,6 @@ class EventClientSerializer:
if not isinstance(event, EventBase):
return event
# Force-enable server admin metadata because the only time an event with
# relevant metadata will be when the admin requested it via their admin
# client config account data. Also, it's "just" some `unsigned` fields, so
# shouldn't cause much in terms of problems to downstream consumers.
if config.requester is not None and await self._auth.is_server_admin(
config.requester
):
config = make_config_for_admin(config)
serialized_event = serialize_event(event, time_now, config=config)
new_unsigned = {}

View File

@@ -85,6 +85,7 @@ from synapse.logging.opentracing import (
from synapse.metrics.background_process_metrics import wrap_as_background_process
from synapse.replication.http.federation import (
ReplicationFederationSendEduRestServlet,
ReplicationGetQueryRestServlet,
)
from synapse.storage.databases.main.lock import Lock
from synapse.storage.databases.main.roommember import extract_heroes_from_room_summary
@@ -1379,6 +1380,7 @@ class FederationHandlerRegistry:
# and use them. However we have guards before we use them to ensure that
# we don't route to ourselves, and in monolith mode that will always be
# the case.
self._get_query_client = ReplicationGetQueryRestServlet.make_client(hs)
self._send_edu = ReplicationFederationSendEduRestServlet.make_client(hs)
self.edu_handlers: Dict[str, Callable[[str, dict], Awaitable[None]]] = {}
@@ -1467,6 +1469,10 @@ class FederationHandlerRegistry:
if handler:
return await handler(args)
# Check if we can route it somewhere else that isn't us
if self._instance_name == "master":
return await self._get_query_client(query_type=query_type, args=args)
# Uh oh, no handler! Let's raise an exception so the request returns an
# error.
logger.warning("No handler registered for query type %s", query_type)

View File

@@ -156,9 +156,7 @@ class FederationRemoteSendQueue(AbstractFederationSender):
def _clear_queue_before_pos(self, position_to_delete: int) -> None:
"""Clear all the queues from before a given position"""
with Measure(
self.clock, name="send_queue._clear", server_name=self.server_name
):
with Measure(self.clock, "send_queue._clear"):
# Delete things out of presence maps
keys = self.presence_destinations.keys()
i = self.presence_destinations.bisect_left(position_to_delete)

View File

@@ -657,11 +657,7 @@ class FederationSender(AbstractFederationSender):
logger.debug(
"Handling %i events in room %s", len(events), events[0].room_id
)
with Measure(
self.clock,
name="handle_room_events",
server_name=self.server_name,
):
with Measure(self.clock, "handle_room_events"):
for event in events:
await handle_event(event)

View File

@@ -129,8 +129,6 @@ class PerDestinationQueue:
# The stream_ordering of the most recent PDU that was discarded due to
# being in catch-up mode.
# Can be set to zero if no PDU has been discarded since the last time
# we queried for new PDUs during catch-up.
self._catchup_last_skipped: int = 0
# Cache of the last successfully-transmitted stream ordering for this
@@ -464,18 +462,8 @@ class PerDestinationQueue:
# of a race condition, so we check that no new events have been
# skipped due to us being in catch-up mode
if (
self._catchup_last_skipped != 0
and self._catchup_last_skipped > last_successful_stream_ordering
):
if self._catchup_last_skipped > last_successful_stream_ordering:
# another event has been skipped because we were in catch-up mode
# As an exception to this case: we can hit this branch if the
# room has been purged whilst we have been looping.
# In that case we avoid hot-looping by resetting the 'catch-up skipped
# PDU' flag.
# Then if there is still no progress to be made at the next iteration,
# we can exit catch-up mode.
self._catchup_last_skipped = 0
continue
# we are done catching up!

View File

@@ -58,7 +58,7 @@ class TransactionManager:
"""
def __init__(self, hs: "synapse.server.HomeServer"):
self.server_name = hs.hostname # nb must be called this for @measure_func
self._server_name = hs.hostname
self.clock = hs.get_clock() # nb must be called this for @measure_func
self._store = hs.get_datastores().main
self._transaction_actions = TransactionActions(self._store)
@@ -116,7 +116,7 @@ class TransactionManager:
transaction = Transaction(
origin_server_ts=int(self.clock.time_msec()),
transaction_id=txn_id,
origin=self.server_name,
origin=self._server_name,
destination=destination,
pdus=[p.get_pdu_json() for p in pdus],
edus=[edu.get_dict() for edu in edus],

View File

@@ -73,7 +73,6 @@ events_processed_counter = Counter("synapse_handlers_appservice_events_processed
class ApplicationServicesHandler:
def __init__(self, hs: "HomeServer"):
self.server_name = hs.hostname
self.store = hs.get_datastores().main
self.is_mine_id = hs.is_mine_id
self.appservice_api = hs.get_application_service_api()
@@ -121,9 +120,7 @@ class ApplicationServicesHandler:
@wrap_as_background_process("notify_interested_services")
async def _notify_interested_services(self, max_token: RoomStreamToken) -> None:
with Measure(
self.clock, name="notify_interested_services", server_name=self.server_name
):
with Measure(self.clock, "notify_interested_services"):
self.is_processing = True
try:
upper_bound = -1
@@ -332,11 +329,7 @@ class ApplicationServicesHandler:
users: Collection[Union[str, UserID]],
) -> None:
logger.debug("Checking interested services for %s", stream_key)
with Measure(
self.clock,
name="notify_interested_services_ephemeral",
server_name=self.server_name,
):
with Measure(self.clock, "notify_interested_services_ephemeral"):
for service in services:
if stream_key == StreamKeyType.TYPING:
# Note that we don't persist the token (via set_appservice_stream_type_pos)

View File

@@ -174,7 +174,6 @@ def login_id_phone_to_thirdparty(identifier: JsonDict) -> Dict[str, str]:
# Accept both "phone" and "number" as valid keys in m.id.phone
phone_number = identifier.get("phone", identifier["number"])
assert isinstance(phone_number, str)
# Convert user-provided phone number to a consistent representation
msisdn = phone_number_to_msisdn(identifier["country"], phone_number)

View File

@@ -378,8 +378,7 @@ class CasHandler:
# Arbitrarily use the first attribute found.
display_name = cas_response.attributes.get(
self._cas_displayname_attribute, # type: ignore[arg-type]
[None],
self._cas_displayname_attribute, [None]
)[0]
return UserAttributes(localpart=localpart, display_name=display_name)

View File

@@ -54,7 +54,6 @@ logger = logging.getLogger(__name__)
class DelayedEventsHandler:
def __init__(self, hs: "HomeServer"):
self.server_name = hs.hostname
self._store = hs.get_datastores().main
self._storage_controllers = hs.get_storage_controllers()
self._config = hs.config
@@ -160,9 +159,7 @@ class DelayedEventsHandler:
# Loop round handling deltas until we're up to date
while True:
with Measure(
self._clock, name="delayed_events_delta", server_name=self.server_name
):
with Measure(self._clock, "delayed_events_delta"):
room_max_stream_ordering = self._store.get_room_max_stream_ordering()
if self._event_pos == room_max_stream_ordering:
return

View File

@@ -526,8 +526,6 @@ class DeviceHandler(DeviceWorkerHandler):
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self.server_name = hs.hostname # nb must be called this for @measure_func
self.clock = hs.get_clock() # nb must be called this for @measure_func
self.federation_sender = hs.get_federation_sender()
self._account_data_handler = hs.get_account_data_handler()
self._storage_controllers = hs.get_storage_controllers()
@@ -1217,8 +1215,7 @@ class DeviceListUpdater(DeviceListWorkerUpdater):
def __init__(self, hs: "HomeServer", device_handler: DeviceHandler):
self.store = hs.get_datastores().main
self.federation = hs.get_federation_client()
self.server_name = hs.hostname # nb must be called this for @measure_func
self.clock = hs.get_clock() # nb must be called this for @measure_func
self.clock = hs.get_clock()
self.device_handler = device_handler
self._notifier = hs.get_notifier()

View File

@@ -73,6 +73,10 @@ from synapse.logging.context import nested_logging_context
from synapse.logging.opentracing import SynapseTags, set_tag, tag_args, trace
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.module_api import NOT_SPAM
from synapse.replication.http.federation import (
ReplicationCleanRoomRestServlet,
ReplicationStoreRoomOnOutlierMembershipRestServlet,
)
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
from synapse.storage.invite_rule import InviteRule
from synapse.types import JsonDict, StrCollection, get_domain_from_id
@@ -159,6 +163,19 @@ class FederationHandler:
self._notifier = hs.get_notifier()
self._worker_locks = hs.get_worker_locks_handler()
self._clean_room_for_join_client = ReplicationCleanRoomRestServlet.make_client(
hs
)
if hs.config.worker.worker_app:
self._maybe_store_room_on_outlier_membership = (
ReplicationStoreRoomOnOutlierMembershipRestServlet.make_client(hs)
)
else:
self._maybe_store_room_on_outlier_membership = (
self.store.maybe_store_room_on_outlier_membership
)
self._room_backfill = Linearizer("room_backfill")
self._third_party_event_rules = (
@@ -630,7 +647,7 @@ class FederationHandler:
# room.
# In short, the races either have an acceptable outcome or should be
# impossible.
await self.store.clean_room_for_join(room_id)
await self._clean_room_for_join(room_id)
try:
# Try the host we successfully got a response to /make_join/
@@ -840,7 +857,7 @@ class FederationHandler:
event.internal_metadata.out_of_band_membership = True
# Record the room ID and its version so that we have a record of the room
await self.store.maybe_store_room_on_outlier_membership(
await self._maybe_store_room_on_outlier_membership(
room_id=event.room_id, room_version=event_format_version
)
@@ -1098,7 +1115,7 @@ class FederationHandler:
# keep a record of the room version, if we don't yet know it.
# (this may get overwritten if we later get a different room version in a
# join dance).
await self.store.maybe_store_room_on_outlier_membership(
await self._maybe_store_room_on_outlier_membership(
room_id=event.room_id, room_version=room_version
)
@@ -1744,6 +1761,18 @@ class FederationHandler:
if "valid" not in response or not response["valid"]:
raise AuthError(403, "Third party certificate was invalid")
async def _clean_room_for_join(self, room_id: str) -> None:
"""Called to clean up any data in DB for a given room, ready for the
server to join the room.
Args:
room_id
"""
if self.config.worker.worker_app:
await self._clean_room_for_join_client(room_id)
else:
await self.store.clean_room_for_join(room_id)
async def get_room_complexity(
self, remote_room_hosts: List[str], room_id: str
) -> Optional[dict]:

View File

@@ -476,16 +476,16 @@ _DUMMY_EVENT_ROOM_EXCLUSION_EXPIRY = 7 * 24 * 60 * 60 * 1000
class EventCreationHandler:
def __init__(self, hs: "HomeServer"):
self.hs = hs
self.validator = EventValidator()
self.event_builder_factory = hs.get_event_builder_factory()
self.server_name = hs.hostname # nb must be called this for @measure_func
self.clock = hs.get_clock() # nb must be called this for @measure_func
self.auth_blocking = hs.get_auth_blocking()
self._event_auth_handler = hs.get_event_auth_handler()
self.store = hs.get_datastores().main
self._storage_controllers = hs.get_storage_controllers()
self.state = hs.get_state_handler()
self.clock = hs.get_clock()
self.validator = EventValidator()
self.profile_handler = hs.get_profile_handler()
self.event_builder_factory = hs.get_event_builder_factory()
self.server_name = hs.hostname
self.notifier = hs.get_notifier()
self.config = hs.config
self.require_membership_for_aliases = (
@@ -568,6 +568,7 @@ class EventCreationHandler:
requester: Requester,
event_dict: dict,
txn_id: Optional[str] = None,
allow_no_prev_events: bool = False,
prev_event_ids: Optional[List[str]] = None,
auth_event_ids: Optional[List[str]] = None,
state_event_ids: Optional[List[str]] = None,
@@ -593,6 +594,10 @@ class EventCreationHandler:
requester
event_dict: An entire event
txn_id
allow_no_prev_events: Whether to allow this event to be created an empty
list of prev_events. Normally this is prohibited just because most
events should have a prev_event and we should only use this in special
cases (previously useful for MSC2716).
prev_event_ids:
the forward extremities to use as the prev_events for the
new event.
@@ -712,6 +717,7 @@ class EventCreationHandler:
event, unpersisted_context = await self.create_new_client_event(
builder=builder,
requester=requester,
allow_no_prev_events=allow_no_prev_events,
prev_event_ids=prev_event_ids,
auth_event_ids=auth_event_ids,
state_event_ids=state_event_ids,
@@ -939,6 +945,7 @@ class EventCreationHandler:
self,
requester: Requester,
event_dict: dict,
allow_no_prev_events: bool = False,
prev_event_ids: Optional[List[str]] = None,
state_event_ids: Optional[List[str]] = None,
ratelimit: bool = True,
@@ -955,6 +962,10 @@ class EventCreationHandler:
Args:
requester: The requester sending the event.
event_dict: An entire event.
allow_no_prev_events: Whether to allow this event to be created an empty
list of prev_events. Normally this is prohibited just because most
events should have a prev_event and we should only use this in special
cases (previously useful for MSC2716).
prev_event_ids:
The event IDs to use as the prev events.
Should normally be left as None to automatically request them
@@ -1040,6 +1051,7 @@ class EventCreationHandler:
return await self._create_and_send_nonmember_event_locked(
requester=requester,
event_dict=event_dict,
allow_no_prev_events=allow_no_prev_events,
prev_event_ids=prev_event_ids,
state_event_ids=state_event_ids,
ratelimit=ratelimit,
@@ -1053,6 +1065,7 @@ class EventCreationHandler:
self,
requester: Requester,
event_dict: dict,
allow_no_prev_events: bool = False,
prev_event_ids: Optional[List[str]] = None,
state_event_ids: Optional[List[str]] = None,
ratelimit: bool = True,
@@ -1084,6 +1097,7 @@ class EventCreationHandler:
requester,
event_dict,
txn_id=txn_id,
allow_no_prev_events=allow_no_prev_events,
prev_event_ids=prev_event_ids,
state_event_ids=state_event_ids,
outlier=outlier,
@@ -1166,6 +1180,7 @@ class EventCreationHandler:
self,
builder: EventBuilder,
requester: Optional[Requester] = None,
allow_no_prev_events: bool = False,
prev_event_ids: Optional[List[str]] = None,
auth_event_ids: Optional[List[str]] = None,
state_event_ids: Optional[List[str]] = None,
@@ -1185,6 +1200,10 @@ class EventCreationHandler:
Args:
builder:
requester:
allow_no_prev_events: Whether to allow this event to be created an empty
list of prev_events. Normally this is prohibited just because most
events should have a prev_event and we should only use this in special
cases (previously useful for MSC2716).
prev_event_ids:
the forward extremities to use as the prev_events for the
new event.
@@ -1222,6 +1241,7 @@ class EventCreationHandler:
if state_event_ids is not None:
# Do a quick check to make sure that prev_event_ids is present to
# make the type-checking around `builder.build` happy.
# prev_event_ids could be an empty array though.
assert prev_event_ids is not None
temp_event = await builder.build(
@@ -1249,14 +1269,24 @@ class EventCreationHandler:
else:
prev_event_ids = await self.store.get_prev_events_for_room(builder.room_id)
# We now ought to have some `prev_events` (unless it's a create event).
#
# Do a quick sanity check here, rather than waiting until we've created the
# event and then try to auth it (which fails with a somewhat confusing "No
# create event in auth events")
assert builder.type == EventTypes.Create or len(prev_event_ids) > 0, (
"Attempting to create an event with no prev_events"
)
if allow_no_prev_events:
# We allow events with no `prev_events` but it better have some `auth_events`
assert (
builder.type == EventTypes.Create
# Allow an event to have empty list of prev_event_ids
# only if it has auth_event_ids.
or auth_event_ids
), (
"Attempting to create a non-m.room.create event with no prev_events or auth_event_ids"
)
else:
# we now ought to have some prev_events (unless it's a create event).
assert builder.type == EventTypes.Create or prev_event_ids, (
"Attempting to create a non-m.room.create event with no prev_events"
)
if for_batch:
assert prev_event_ids is not None

View File

@@ -747,7 +747,6 @@ class WorkerPresenceHandler(BasePresenceHandler):
class PresenceHandler(BasePresenceHandler):
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self.server_name = hs.hostname
self.wheel_timer: WheelTimer[str] = WheelTimer()
self.notifier = hs.get_notifier()
@@ -942,9 +941,7 @@ class PresenceHandler(BasePresenceHandler):
now = self.clock.time_msec()
with Measure(
self.clock, name="presence_update_states", server_name=self.server_name
):
with Measure(self.clock, "presence_update_states"):
# NOTE: We purposefully don't await between now and when we've
# calculated what we want to do with the new states, to avoid races.
@@ -1408,7 +1405,7 @@ class PresenceHandler(BasePresenceHandler):
# Based on the state of each user's device calculate the new presence state.
presence = _combine_device_states(devices.values())
new_fields: JsonDict = {"state": presence}
new_fields = {"state": presence}
if presence == PresenceState.ONLINE or presence == PresenceState.BUSY:
new_fields["last_active_ts"] = now
@@ -1500,9 +1497,7 @@ class PresenceHandler(BasePresenceHandler):
async def _unsafe_process(self) -> None:
# Loop round handling deltas until we're up to date
while True:
with Measure(
self.clock, name="presence_delta", server_name=self.server_name
):
with Measure(self.clock, "presence_delta"):
room_max_stream_ordering = self.store.get_room_max_stream_ordering()
if self._event_pos == room_max_stream_ordering:
return
@@ -1764,7 +1759,6 @@ class PresenceEventSource(EventSource[int, UserPresenceState]):
# Same with get_presence_router:
#
# AuthHandler -> Notifier -> PresenceEventSource -> ModuleApi -> AuthHandler
self.server_name = hs.hostname
self.get_presence_handler = hs.get_presence_handler
self.get_presence_router = hs.get_presence_router
self.clock = hs.get_clock()
@@ -1798,9 +1792,7 @@ class PresenceEventSource(EventSource[int, UserPresenceState]):
user_id = user.to_string()
stream_change_cache = self.store.presence_stream_cache
with Measure(
self.clock, name="presence.get_new_events", server_name=self.server_name
):
with Measure(self.clock, "presence.get_new_events"):
if from_key is not None:
from_key = int(from_key)

View File

@@ -539,17 +539,11 @@ class ProfileHandler:
response: JsonDict = {}
try:
if just_field is None or just_field == ProfileFields.DISPLAYNAME:
displayname = await self.store.get_profile_displayname(user)
# do not set the displayname field if it is None,
# since then we send a null in the JSON response
if displayname is not None:
response["displayname"] = displayname
response["displayname"] = await self.store.get_profile_displayname(user)
if just_field is None or just_field == ProfileFields.AVATAR_URL:
avatar_url = await self.store.get_profile_avatar_url(user)
# do not set the avatar_url field if it is None,
# since then we send a null in the JSON response
if avatar_url is not None:
response["avatar_url"] = avatar_url
response["avatar_url"] = await self.store.get_profile_avatar_url(user)
if self.hs.config.experimental.msc4133_enabled:
if just_field is None:
response.update(await self.store.get_profile_fields(user))

View File

@@ -49,6 +49,7 @@ from synapse.http.servlet import assert_params_in_dict
from synapse.replication.http.login import RegisterDeviceReplicationServlet
from synapse.replication.http.register import (
ReplicationPostRegisterActionsServlet,
ReplicationRegisterServlet,
)
from synapse.spam_checker_api import RegistrationBehaviour
from synapse.types import GUEST_USER_ID_PATTERN, RoomAlias, UserID, create_requester
@@ -119,6 +120,7 @@ class RegistrationHandler:
self._spam_checker_module_callbacks = hs.get_module_api_callbacks().spam_checker
if hs.config.worker.worker_app:
self._register_client = ReplicationRegisterServlet.make_client(hs)
self._register_device_client = RegisterDeviceReplicationServlet.make_client(
hs
)
@@ -557,7 +559,7 @@ class RegistrationHandler:
if join_rules_event:
join_rule = join_rules_event.content.get("join_rule", None)
requires_invite = (
join_rule is not None and join_rule != JoinRules.PUBLIC
join_rule and join_rule != JoinRules.PUBLIC
)
# Send the invite, if necessary.
@@ -736,20 +738,37 @@ class RegistrationHandler:
shadow_banned: Whether to shadow-ban the user
approved: Whether to mark the user as approved by an administrator
"""
await self.store.register_user(
user_id=user_id,
password_hash=password_hash,
was_guest=was_guest,
make_guest=make_guest,
appservice_id=appservice_id,
create_profile_with_displayname=create_profile_with_displayname,
admin=admin,
user_type=user_type,
shadow_banned=shadow_banned,
approved=approved,
)
if self.hs.config.worker.worker_app:
await self._register_client(
user_id=user_id,
password_hash=password_hash,
was_guest=was_guest,
make_guest=make_guest,
appservice_id=appservice_id,
create_profile_with_displayname=create_profile_with_displayname,
admin=admin,
user_type=user_type,
address=address,
shadow_banned=shadow_banned,
approved=approved,
)
else:
await self.store.register_user(
user_id=user_id,
password_hash=password_hash,
was_guest=was_guest,
make_guest=make_guest,
appservice_id=appservice_id,
create_profile_with_displayname=create_profile_with_displayname,
admin=admin,
user_type=user_type,
shadow_banned=shadow_banned,
approved=approved,
)
await self._account_validity_handler.on_user_registration(user_id)
# Only call the account validity module(s) on the main process, to avoid
# repeating e.g. database writes on all of the workers.
await self._account_validity_handler.on_user_registration(user_id)
async def register_device(
self,

View File

@@ -51,7 +51,6 @@ from synapse.api.constants import (
HistoryVisibility,
JoinRules,
Membership,
MTextFields,
RoomCreationPreset,
RoomEncryptionAlgorithms,
RoomTypes,
@@ -1304,13 +1303,7 @@ class RoomCreationHandler:
topic = room_config["topic"]
topic_event, topic_context = await create_event(
EventTypes.Topic,
{
EventContentFields.TOPIC: topic,
EventContentFields.M_TOPIC: {
# The mimetype property defaults to `text/plain` if omitted.
EventContentFields.M_TEXT: [{MTextFields.BODY: topic}]
},
},
{"topic": topic},
True,
)
events_to_send.append((topic_event, topic_context))

View File

@@ -388,11 +388,11 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
async def _local_membership_update(
self,
*,
requester: Requester,
target: UserID,
room_id: str,
membership: str,
allow_no_prev_events: bool = False,
prev_event_ids: Optional[List[str]] = None,
state_event_ids: Optional[List[str]] = None,
depth: Optional[int] = None,
@@ -414,6 +414,11 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
desired membership event.
room_id:
membership:
allow_no_prev_events: Whether to allow this event to be created an empty
list of prev_events. Normally this is prohibited just because most
events should have a prev_event and we should only use this in special
cases (previously useful for MSC2716).
prev_event_ids: The event IDs to use as the prev events
state_event_ids:
The full state at a given event. This was previously used particularly
@@ -481,6 +486,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
"origin_server_ts": origin_server_ts,
},
txn_id=txn_id,
allow_no_prev_events=allow_no_prev_events,
prev_event_ids=prev_event_ids,
state_event_ids=state_event_ids,
depth=depth,
@@ -577,6 +583,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
new_room: bool = False,
require_consent: bool = True,
outlier: bool = False,
allow_no_prev_events: bool = False,
prev_event_ids: Optional[List[str]] = None,
state_event_ids: Optional[List[str]] = None,
depth: Optional[int] = None,
@@ -600,6 +607,10 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
outlier: Indicates whether the event is an `outlier`, i.e. if
it's from an arbitrary point and floating in the DAG as
opposed to being inline with the current DAG.
allow_no_prev_events: Whether to allow this event to be created an empty
list of prev_events. Normally this is prohibited just because most
events should have a prev_event and we should only use this in special
cases (previously useful for MSC2716).
prev_event_ids: The event IDs to use as the prev events
state_event_ids:
The full state at a given event. This was previously used particularly
@@ -669,6 +680,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
new_room=new_room,
require_consent=require_consent,
outlier=outlier,
allow_no_prev_events=allow_no_prev_events,
prev_event_ids=prev_event_ids,
state_event_ids=state_event_ids,
depth=depth,
@@ -691,6 +703,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
new_room: bool = False,
require_consent: bool = True,
outlier: bool = False,
allow_no_prev_events: bool = False,
prev_event_ids: Optional[List[str]] = None,
state_event_ids: Optional[List[str]] = None,
depth: Optional[int] = None,
@@ -716,6 +729,10 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
outlier: Indicates whether the event is an `outlier`, i.e. if
it's from an arbitrary point and floating in the DAG as
opposed to being inline with the current DAG.
allow_no_prev_events: Whether to allow this event to be created an empty
list of prev_events. Normally this is prohibited just because most
events should have a prev_event and we should only use this in special
cases (previously useful for MSC2716).
prev_event_ids: The event IDs to use as the prev events
state_event_ids:
The full state at a given event. This was previously used particularly
@@ -916,6 +933,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
)
# InviteRule.IGNORE is handled at the sync layer.
# An empty prev_events list is allowed as long as the auth_event_ids are present
if prev_event_ids is not None:
return await self._local_membership_update(
requester=requester,
@@ -924,6 +942,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
membership=effective_membership_state,
txn_id=txn_id,
ratelimit=ratelimit,
allow_no_prev_events=allow_no_prev_events,
prev_event_ids=prev_event_ids,
state_event_ids=state_event_ids,
depth=depth,

View File

@@ -197,7 +197,7 @@ class SendEmailHandler:
additional_headers: A map of additional headers to include.
"""
try:
from_string = self._from % {"app": app_name} # type: ignore[operator]
from_string = self._from % {"app": app_name}
except (KeyError, TypeError):
from_string = self._from

View File

@@ -818,13 +818,13 @@ class SsoHandler:
server_name = avatar_url_parts[-2]
media_id = avatar_url_parts[-1]
if self._is_mine_server_name(server_name):
media = await self._media_repo.store.get_local_media(media_id)
media = await self._media_repo.store.get_local_media(media_id) # type: ignore[has-type]
if media is not None and upload_name == media.upload_name:
logger.info("skipping saving the user avatar")
return True
# store it in media repository
avatar_mxc_url = await self._media_repo.create_or_update_content(
avatar_mxc_url = await self._media_repo.create_content(
media_type=headers[b"Content-Type"][0].decode("utf-8"),
upload_name=upload_name,
content=picture,

View File

@@ -36,7 +36,6 @@ from synapse.metrics import event_processing_positions
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.storage.databases.main.state_deltas import StateDelta
from synapse.types import JsonDict
from synapse.util.events import get_plain_text_topic_from_event_content
if TYPE_CHECKING:
from synapse.server import HomeServer
@@ -300,9 +299,7 @@ class StatsHandler:
elif delta.event_type == EventTypes.Name:
room_state["name"] = event_content.get("name")
elif delta.event_type == EventTypes.Topic:
room_state["topic"] = get_plain_text_topic_from_event_content(
event_content
)
room_state["topic"] = event_content.get("topic")
elif delta.event_type == EventTypes.RoomAvatar:
room_state["avatar"] = event_content.get("url")
elif delta.event_type == EventTypes.CanonicalAlias:

View File

@@ -329,7 +329,6 @@ class E2eeSyncResult:
class SyncHandler:
def __init__(self, hs: "HomeServer"):
self.server_name = hs.hostname
self.hs_config = hs.config
self.store = hs.get_datastores().main
self.notifier = hs.get_notifier()
@@ -711,9 +710,7 @@ class SyncHandler:
sync_config = sync_result_builder.sync_config
with Measure(
self.clock, name="ephemeral_by_room", server_name=self.server_name
):
with Measure(self.clock, "ephemeral_by_room"):
typing_key = since_token.typing_key if since_token else 0
room_ids = sync_result_builder.joined_room_ids
@@ -786,9 +783,7 @@ class SyncHandler:
and current token to send down to clients.
newly_joined_room
"""
with Measure(
self.clock, name="load_filtered_recents", server_name=self.server_name
):
with Measure(self.clock, "load_filtered_recents"):
timeline_limit = sync_config.filter_collection.timeline_limit()
block_all_timeline = (
sync_config.filter_collection.blocks_all_room_timeline()
@@ -1179,9 +1174,7 @@ class SyncHandler:
# updates even if they occurred logically before the previous event.
# TODO(mjark) Check for new redactions in the state events.
with Measure(
self.clock, name="compute_state_delta", server_name=self.server_name
):
with Measure(self.clock, "compute_state_delta"):
# The memberships needed for events in the timeline.
# Only calculated when `lazy_load_members` is on.
members_to_fetch: Optional[Set[str]] = None
@@ -1798,9 +1791,7 @@ class SyncHandler:
# the DB.
return RoomNotifCounts.empty()
with Measure(
self.clock, name="unread_notifs_for_room_id", server_name=self.server_name
):
with Measure(self.clock, "unread_notifs_for_room_id"):
return await self.store.get_unread_event_push_actions_by_room_for_user(
room_id,
sync_config.user.to_string(),

View File

@@ -503,7 +503,6 @@ class TypingWriterHandler(FollowerTypingHandler):
class TypingNotificationEventSource(EventSource[int, JsonMapping]):
def __init__(self, hs: "HomeServer"):
self.server_name = hs.hostname
self._main_store = hs.get_datastores().main
self.clock = hs.get_clock()
# We can't call get_typing_handler here because there's a cycle:
@@ -536,9 +535,7 @@ class TypingNotificationEventSource(EventSource[int, JsonMapping]):
appservice may be interested in.
* The latest known room serial.
"""
with Measure(
self.clock, name="typing.get_new_events_as", server_name=self.server_name
):
with Measure(self.clock, "typing.get_new_events_as"):
handler = self.get_typing_handler()
events = []
@@ -574,9 +571,7 @@ class TypingNotificationEventSource(EventSource[int, JsonMapping]):
Find typing notifications for given rooms (> `from_token` and <= `to_token`)
"""
with Measure(
self.clock, name="typing.get_new_events", server_name=self.server_name
):
with Measure(self.clock, "typing.get_new_events"):
from_key = int(from_key)
handler = self.get_typing_handler()

View File

@@ -237,9 +237,7 @@ class UserDirectoryHandler(StateDeltasHandler):
# Loop round handling deltas until we're up to date
while True:
with Measure(
self.clock, name="user_dir_delta", server_name=self.server_name
):
with Measure(self.clock, "user_dir_delta"):
room_max_stream_ordering = self.store.get_room_max_stream_ordering()
if self.pos == room_max_stream_ordering:
return

View File

@@ -33,11 +33,10 @@ from twisted.internet.interfaces import (
IAddress,
IConnector,
IProtocol,
IProtocolFactory,
IReactorCore,
IStreamClientEndpoint,
)
from twisted.internet.protocol import ClientFactory, connectionDone
from twisted.internet.protocol import ClientFactory, Protocol, connectionDone
from twisted.python.failure import Failure
from twisted.web import http
@@ -117,7 +116,11 @@ class HTTPConnectProxyEndpoint:
def __repr__(self) -> str:
return "<HTTPConnectProxyEndpoint %s>" % (self._proxy_endpoint,)
def connect(self, protocolFactory: IProtocolFactory) -> "defer.Deferred[IProtocol]":
# Mypy encounters a false positive here: it complains that ClientFactory
# is incompatible with IProtocolFactory. But ClientFactory inherits from
# Factory, which implements IProtocolFactory. So I think this is a bug
# in mypy-zope.
def connect(self, protocolFactory: ClientFactory) -> "defer.Deferred[IProtocol]": # type: ignore[override]
f = HTTPProxiedClientFactory(
self._host, self._port, protocolFactory, self._proxy_creds
)
@@ -145,7 +148,7 @@ class HTTPProxiedClientFactory(protocol.ClientFactory):
self,
dst_host: bytes,
dst_port: int,
wrapped_factory: IProtocolFactory,
wrapped_factory: ClientFactory,
proxy_creds: Optional[ProxyCredentials],
):
self.dst_host = dst_host
@@ -155,10 +158,7 @@ class HTTPProxiedClientFactory(protocol.ClientFactory):
self.on_connection: "defer.Deferred[None]" = defer.Deferred()
def startedConnecting(self, connector: IConnector) -> None:
# We expect the wrapped factory to be a ClientFactory, but the generic
# interfaces only guarantee that it implements IProtocolFactory.
if isinstance(self.wrapped_factory, ClientFactory):
return self.wrapped_factory.startedConnecting(connector)
return self.wrapped_factory.startedConnecting(connector)
def buildProtocol(self, addr: IAddress) -> "HTTPConnectProtocol":
wrapped_protocol = self.wrapped_factory.buildProtocol(addr)
@@ -177,15 +177,13 @@ class HTTPProxiedClientFactory(protocol.ClientFactory):
logger.debug("Connection to proxy failed: %s", reason)
if not self.on_connection.called:
self.on_connection.errback(reason)
if isinstance(self.wrapped_factory, ClientFactory):
return self.wrapped_factory.clientConnectionFailed(connector, reason)
return self.wrapped_factory.clientConnectionFailed(connector, reason)
def clientConnectionLost(self, connector: IConnector, reason: Failure) -> None:
logger.debug("Connection to proxy lost: %s", reason)
if not self.on_connection.called:
self.on_connection.errback(reason)
if isinstance(self.wrapped_factory, ClientFactory):
return self.wrapped_factory.clientConnectionLost(connector, reason)
return self.wrapped_factory.clientConnectionLost(connector, reason)
class HTTPConnectProtocol(protocol.Protocol):
@@ -210,7 +208,7 @@ class HTTPConnectProtocol(protocol.Protocol):
self,
host: bytes,
port: int,
wrapped_protocol: IProtocol,
wrapped_protocol: Protocol,
connected_deferred: defer.Deferred,
proxy_creds: Optional[ProxyCredentials],
):
@@ -225,14 +223,11 @@ class HTTPConnectProtocol(protocol.Protocol):
)
self.http_setup_client.on_connected.addCallback(self.proxyConnected)
# Set once we start connecting to the wrapped protocol
self.wrapped_connection_started = False
def connectionMade(self) -> None:
self.http_setup_client.makeConnection(self.transport)
def connectionLost(self, reason: Failure = connectionDone) -> None:
if self.wrapped_connection_started:
if self.wrapped_protocol.connected:
self.wrapped_protocol.connectionLost(reason)
self.http_setup_client.connectionLost(reason)
@@ -241,8 +236,6 @@ class HTTPConnectProtocol(protocol.Protocol):
self.connected_deferred.errback(reason)
def proxyConnected(self, _: Union[None, "defer.Deferred[None]"]) -> None:
self.wrapped_connection_started = True
assert self.transport is not None
self.wrapped_protocol.makeConnection(self.transport)
self.connected_deferred.callback(self.wrapped_protocol)
@@ -254,7 +247,7 @@ class HTTPConnectProtocol(protocol.Protocol):
def dataReceived(self, data: bytes) -> None:
# if we've set up the HTTP protocol, we can send the data there
if self.wrapped_connection_started:
if self.wrapped_protocol.connected:
return self.wrapped_protocol.dataReceived(data)
# otherwise, we must still be setting up the connection: send the data to the

View File

@@ -92,7 +92,6 @@ class MatrixFederationAgent:
def __init__(
self,
server_name: str,
reactor: ISynapseReactor,
tls_client_options_factory: Optional[FederationPolicyForHTTPS],
user_agent: bytes,
@@ -101,11 +100,6 @@ class MatrixFederationAgent:
_srv_resolver: Optional[SrvResolver] = None,
_well_known_resolver: Optional[WellKnownResolver] = None,
):
"""
Args:
server_name: Our homeserver name (used to label metrics) (`hs.hostname`).
"""
# proxy_reactor is not blocklisting reactor
proxy_reactor = reactor
@@ -133,7 +127,6 @@ class MatrixFederationAgent:
if _well_known_resolver is None:
_well_known_resolver = WellKnownResolver(
server_name,
reactor,
agent=BlocklistingAgentWrapper(
ProxyAgent(

View File

@@ -91,19 +91,12 @@ class WellKnownResolver:
def __init__(
self,
server_name: str,
reactor: IReactorTime,
agent: IAgent,
user_agent: bytes,
well_known_cache: Optional[TTLCache[bytes, Optional[bytes]]] = None,
had_well_known_cache: Optional[TTLCache[bytes, bool]] = None,
):
"""
Args:
server_name: Our homeserver name (used to label metrics) (`hs.hostname`).
"""
self.server_name = server_name
self._reactor = reactor
self._clock = Clock(reactor)
@@ -141,13 +134,7 @@ class WellKnownResolver:
# TODO: should we linearise so that we don't end up doing two .well-known
# requests for the same server in parallel?
try:
with Measure(
self._clock,
name="get_well_known",
# This should be our homeserver where the the code is running (used to
# label metrics)
server_name=self.server_name,
):
with Measure(self._clock, "get_well_known"):
result: Optional[bytes]
cache_period: float

View File

@@ -417,7 +417,6 @@ class MatrixFederationHttpClient:
if hs.get_instance_name() in outbound_federation_restricted_to:
# Talk to federation directly
federation_agent: IAgent = MatrixFederationAgent(
self.server_name,
self.reactor,
tls_client_options_factory,
user_agent.encode("ascii"),
@@ -698,11 +697,7 @@ class MatrixFederationHttpClient:
outgoing_requests_counter.labels(request.method).inc()
try:
with Measure(
self.clock,
name="outbound_request",
server_name=self.server_name,
):
with Measure(self.clock, "outbound_request"):
# we don't want all the fancy cookie and redirect handling
# that treq.request gives: just use the raw Agent.

View File

@@ -177,13 +177,6 @@ class MediaRepository:
else:
self.url_previewer = None
# We get the media upload limits and sort them in descending order of
# time period, so that we can apply some optimizations.
self.media_upload_limits = hs.config.media.media_upload_limits
self.media_upload_limits.sort(
key=lambda limit: limit.time_period_ms, reverse=True
)
def _start_update_recently_accessed(self) -> Deferred:
return run_as_background_process(
"update_recently_accessed_media", self._update_recently_accessed
@@ -292,16 +285,63 @@ class MediaRepository:
raise NotFoundError("Media ID has expired")
@trace
async def create_or_update_content(
async def update_content(
self,
media_id: str,
media_type: str,
upload_name: Optional[str],
content: IO,
content_length: int,
auth_user: UserID,
) -> None:
"""Update the content of the given media ID.
Args:
media_id: The media ID to replace.
media_type: The content type of the file.
upload_name: The name of the file, if provided.
content: A file like object that is the content to store
content_length: The length of the content
auth_user: The user_id of the uploader
"""
file_info = FileInfo(server_name=None, file_id=media_id)
sha256reader = SHA256TransparentIOReader(content)
# This implements all of IO as it has a passthrough
fname = await self.media_storage.store_file(sha256reader.wrap(), file_info)
sha256 = sha256reader.hexdigest()
should_quarantine = await self.store.get_is_hash_quarantined(sha256)
logger.info("Stored local media in file %r", fname)
if should_quarantine:
logger.warning(
"Media has been automatically quarantined as it matched existing quarantined media"
)
await self.store.update_local_media(
media_id=media_id,
media_type=media_type,
upload_name=upload_name,
media_length=content_length,
user_id=auth_user,
sha256=sha256,
quarantined_by="system" if should_quarantine else None,
)
try:
await self._generate_thumbnails(None, media_id, media_id, media_type)
except Exception as e:
logger.info("Failed to generate thumbnails: %s", e)
@trace
async def create_content(
self,
media_type: str,
upload_name: Optional[str],
content: IO,
content_length: int,
auth_user: UserID,
media_id: Optional[str] = None,
) -> MXCUri:
"""Create or update the content of the given media ID.
"""Store uploaded content for a local user and return the mxc URL
Args:
media_type: The content type of the file.
@@ -309,20 +349,16 @@ class MediaRepository:
content: A file like object that is the content to store
content_length: The length of the content
auth_user: The user_id of the uploader
media_id: The media ID to update if provided, otherwise creates
new media ID.
Returns:
The mxc url of the stored content
"""
is_new_media = media_id is None
if media_id is None:
media_id = random_string(24)
media_id = random_string(24)
file_info = FileInfo(server_name=None, file_id=media_id)
sha256reader = SHA256TransparentIOReader(content)
# This implements all of IO as it has a passthrough
sha256reader = SHA256TransparentIOReader(content)
fname = await self.media_storage.store_file(sha256reader.wrap(), file_info)
sha256 = sha256reader.hexdigest()
should_quarantine = await self.store.get_is_hash_quarantined(sha256)
@@ -334,56 +370,16 @@ class MediaRepository:
"Media has been automatically quarantined as it matched existing quarantined media"
)
# Check that the user has not exceeded any of the media upload limits.
# This is the total size of media uploaded by the user in the last
# `time_period_ms` milliseconds, or None if we haven't checked yet.
uploaded_media_size: Optional[int] = None
# Note: the media upload limits are sorted so larger time periods are
# first.
for limit in self.media_upload_limits:
# We only need to check the amount of media uploaded by the user in
# this latest (smaller) time period if the amount of media uploaded
# in a previous (larger) time period is above the limit.
#
# This optimization means that in the common case where the user
# hasn't uploaded much media, we only need to query the database
# once.
if (
uploaded_media_size is None
or uploaded_media_size + content_length > limit.max_bytes
):
uploaded_media_size = await self.store.get_media_uploaded_size_for_user(
user_id=auth_user.to_string(), time_period_ms=limit.time_period_ms
)
if uploaded_media_size + content_length > limit.max_bytes:
raise SynapseError(
400, "Media upload limit exceeded", Codes.RESOURCE_LIMIT_EXCEEDED
)
if is_new_media:
await self.store.store_local_media(
media_id=media_id,
media_type=media_type,
time_now_ms=self.clock.time_msec(),
upload_name=upload_name,
media_length=content_length,
user_id=auth_user,
sha256=sha256,
quarantined_by="system" if should_quarantine else None,
)
else:
await self.store.update_local_media(
media_id=media_id,
media_type=media_type,
upload_name=upload_name,
media_length=content_length,
user_id=auth_user,
sha256=sha256,
quarantined_by="system" if should_quarantine else None,
)
await self.store.store_local_media(
media_id=media_id,
media_type=media_type,
time_now_ms=self.clock.time_msec(),
upload_name=upload_name,
media_length=content_length,
user_id=auth_user,
sha256=sha256,
quarantined_by="system" if should_quarantine else None,
)
try:
await self._generate_thumbnails(None, media_id, media_id, media_type)

View File

@@ -133,7 +133,7 @@ def decode_body(
content_type: The Content-Type header.
Returns:
The parsed HTML body, or None if an error occurred during processing.
The parsed HTML body, or None if an error occurred during processed.
"""
# If there's no body, nothing useful is going to be found.
if not body:
@@ -158,31 +158,9 @@ def decode_body(
# Create an HTML parser.
parser = etree.HTMLParser(recover=True, encoding=encoding)
# Attempt to parse the body. With `lxml` 6.0.0+, this will be an empty HTML
# tree if the body was successfully parsed, but no tree was found. In
# previous `lxml` versions, `etree.fromstring` would return `None` in that
# case.
html_tree = etree.fromstring(body, parser)
# Account for the above referenced case where `html_tree` is an HTML tree
# with an empty body. If so, return None.
if html_tree is not None and html_tree.tag == "html":
# If the tree has only a single <body> element and it's empty, then
# return None.
body_el = html_tree.find("body")
if body_el is not None and len(html_tree) == 1:
# Extract the content of the body tag as text.
body_text = "".join(cast(Iterable[str], body_el.itertext()))
# Strip any undecodable Unicode characters and whitespace.
body_text = body_text.strip("\ufffd").strip()
# If there's no text left, and there were no child tags,
# then we consider the <body> tag empty.
if not body_text and len(body_el) == 0:
return None
return html_tree
# Attempt to parse the body. Returns None if the body was successfully
# parsed, but no tree was found.
return etree.fromstring(body, parser)
def _get_meta_tags(

Some files were not shown because too many files have changed in this diff Show More