Compare commits
2 Commits
develop
...
erikj/dele
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
80b73196c3 | ||
|
|
c10b78469f |
@@ -7,4 +7,4 @@ if command -v yum &> /dev/null; then
|
||||
fi
|
||||
|
||||
# Install a Rust toolchain
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable -y --profile minimal
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain 1.82.0 -y --profile minimal
|
||||
|
||||
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -1 +0,0 @@
|
||||
.github/workflows/* merge=ours
|
||||
155
.github/workflows/docker.yml
vendored
Normal file
155
.github/workflows/docker.yml
vendored
Normal file
@@ -0,0 +1,155 @@
|
||||
# GitHub actions workflow which builds and publishes the docker images.
|
||||
|
||||
name: Build docker images
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: ["v*"]
|
||||
branches: [master, main, develop]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write # needed for signing the images with GitHub OIDC Token
|
||||
jobs:
|
||||
build:
|
||||
name: Build and push image for ${{ matrix.platform }}
|
||||
runs-on: ${{ matrix.runs_on }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runs_on: ubuntu-24.04
|
||||
suffix: linux-amd64
|
||||
- platform: linux/arm64
|
||||
runs_on: ubuntu-24.04-arm
|
||||
suffix: linux-arm64
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Extract version from pyproject.toml
|
||||
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
|
||||
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsshell
|
||||
shell: bash
|
||||
run: |
|
||||
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
push: true
|
||||
labels: |
|
||||
gitsha1=${{ github.sha }}
|
||||
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
|
||||
tags: |
|
||||
docker.io/matrixdotorg/synapse
|
||||
ghcr.io/element-hq/synapse
|
||||
file: "docker/Dockerfile"
|
||||
platforms: ${{ matrix.platform }}
|
||||
outputs: type=image,push-by-digest=true,name-canonical=true,push=true
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: digests-${{ matrix.suffix }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
merge:
|
||||
name: Push merged images to ${{ matrix.repository }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
repository:
|
||||
- docker.io/matrixdotorg/synapse
|
||||
- ghcr.io/element-hq/synapse
|
||||
|
||||
needs:
|
||||
- build
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
if: ${{ startsWith(matrix.repository, 'docker.io') }}
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
if: ${{ startsWith(matrix.repository, 'ghcr.io') }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
|
||||
- name: Calculate docker image tag
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
with:
|
||||
images: ${{ matrix.repository }}
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
type=raw,value=develop,enable=${{ github.ref == 'refs/heads/develop' }}
|
||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
|
||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
||||
type=pep440,pattern={{raw}}
|
||||
type=sha
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
env:
|
||||
REPOSITORY: ${{ matrix.repository }}
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf "$REPOSITORY@sha256:%s " *)
|
||||
|
||||
- name: Sign each manifest
|
||||
env:
|
||||
REPOSITORY: ${{ matrix.repository }}
|
||||
run: |
|
||||
DIGESTS=""
|
||||
for TAG in $(echo "$DOCKER_METADATA_OUTPUT_JSON" | jq -r '.tags[]'); do
|
||||
DIGEST="$(docker buildx imagetools inspect $TAG --format '{{json .Manifest}}' | jq -r '.digest')"
|
||||
DIGESTS="$DIGESTS $REPOSITORY@$DIGEST"
|
||||
done
|
||||
cosign sign --yes $DIGESTS
|
||||
71
.github/workflows/docs-pr.yaml
vendored
Normal file
71
.github/workflows/docs-pr.yaml
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
name: Prepare documentation PR preview
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- docs/**
|
||||
- book.toml
|
||||
- .github/workflows/docs-pr.yaml
|
||||
- scripts-dev/schema_versions.py
|
||||
|
||||
jobs:
|
||||
pages:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
# Fetch all history so that the schema_versions script works.
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- run: "pip install 'packaging>=20.0' 'GitPython>=3.1.20'"
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
||||
# as the default. Let's opt for the welcome page instead.
|
||||
run: |
|
||||
mdbook build
|
||||
cp book/welcome_and_overview.html book/index.html
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: book
|
||||
path: book
|
||||
# We'll only use this in a workflow_run, then we're done with it
|
||||
retention-days: 1
|
||||
|
||||
link-check:
|
||||
name: Check links in documentation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Setup htmltest
|
||||
run: |
|
||||
wget https://github.com/wjdp/htmltest/releases/download/v0.17.0/htmltest_0.17.0_linux_amd64.tar.gz
|
||||
echo '775c597ee74899d6002cd2d93076f897f4ba68686bceabe2e5d72e84c57bc0fb htmltest_0.17.0_linux_amd64.tar.gz' | sha256sum -c
|
||||
tar zxf htmltest_0.17.0_linux_amd64.tar.gz
|
||||
|
||||
- name: Test links with htmltest
|
||||
# Build the book with `./` as the site URL (to make checks on 404.html possible)
|
||||
# Then run htmltest (without checking external links since that involves the network and is slow).
|
||||
run: |
|
||||
MDBOOK_OUTPUT__HTML__SITE_URL="./" mdbook build
|
||||
./htmltest book --skip-external
|
||||
99
.github/workflows/docs.yaml
vendored
Normal file
99
.github/workflows/docs.yaml
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
name: Deploy the documentation
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
# For bleeding-edge documentation
|
||||
- develop
|
||||
# For documentation specific to a release
|
||||
- 'release-v*'
|
||||
# stable docs
|
||||
- master
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
pre:
|
||||
name: Calculate variables for GitHub Pages deployment
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Figure out the target directory.
|
||||
#
|
||||
# The target directory depends on the name of the branch
|
||||
#
|
||||
- name: Get the target directory name
|
||||
id: vars
|
||||
run: |
|
||||
# first strip the 'refs/heads/' prefix with some shell foo
|
||||
branch="${GITHUB_REF#refs/heads/}"
|
||||
|
||||
case $branch in
|
||||
release-*)
|
||||
# strip 'release-' from the name for release branches.
|
||||
branch="${branch#release-}"
|
||||
;;
|
||||
master)
|
||||
# deploy to "latest" for the master branch.
|
||||
branch="latest"
|
||||
;;
|
||||
esac
|
||||
|
||||
# finally, set the 'branch-version' var.
|
||||
echo "branch-version=$branch" >> "$GITHUB_OUTPUT"
|
||||
outputs:
|
||||
branch-version: ${{ steps.vars.outputs.branch-version }}
|
||||
|
||||
################################################################################
|
||||
pages-docs:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- pre
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
# Fetch all history so that the schema_versions script works.
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Set version of docs
|
||||
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- run: "pip install 'packaging>=20.0' 'GitPython>=3.1.20'"
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
||||
# as the default. Let's opt for the welcome page instead.
|
||||
run: |
|
||||
mdbook build
|
||||
cp book/welcome_and_overview.html book/index.html
|
||||
|
||||
- name: Prepare and publish schema files
|
||||
run: |
|
||||
sudo apt-get update && sudo apt-get install -y yq
|
||||
mkdir -p book/schema
|
||||
# Remove developer notice before publishing.
|
||||
rm schema/v*/Do\ not\ edit\ files\ in\ this\ folder
|
||||
# Copy schema files that are independent from current Synapse version.
|
||||
cp -r -t book/schema schema/v*/
|
||||
# Convert config schema from YAML source file to JSON.
|
||||
yq < schema/synapse-config.schema.yaml \
|
||||
> book/schema/synapse-config.schema.json
|
||||
|
||||
# Deploy to the target directory.
|
||||
- name: Deploy to gh pages
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./book
|
||||
destination_dir: ./${{ needs.pre.outputs.branch-version }}
|
||||
52
.github/workflows/fix_lint.yaml
vendored
Normal file
52
.github/workflows/fix_lint.yaml
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
# A helper workflow to automatically fixup any linting errors on a PR. Must be
|
||||
# triggered manually.
|
||||
|
||||
name: Attempt to automatically fix linting errors
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
# We use nightly so that `fmt` correctly groups together imports, and
|
||||
# clippy correctly fixes up the benchmarks.
|
||||
RUST_VERSION: nightly-2025-06-24
|
||||
|
||||
jobs:
|
||||
fixup:
|
||||
name: Fix up
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
components: clippy, rustfmt
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
install-project: "false"
|
||||
poetry-version: "2.1.1"
|
||||
|
||||
- name: Run ruff check
|
||||
continue-on-error: true
|
||||
run: poetry run ruff check --fix .
|
||||
|
||||
- name: Run ruff format
|
||||
continue-on-error: true
|
||||
run: poetry run ruff format --quiet .
|
||||
|
||||
- run: cargo clippy --all-features --fix -- -D warnings
|
||||
continue-on-error: true
|
||||
|
||||
- run: cargo fmt
|
||||
continue-on-error: true
|
||||
|
||||
- uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
||||
with:
|
||||
commit_message: "Attempt to fix linting"
|
||||
243
.github/workflows/latest_deps.yml
vendored
Normal file
243
.github/workflows/latest_deps.yml
vendored
Normal file
@@ -0,0 +1,243 @@
|
||||
# People who are freshly `pip install`ing from PyPI will pull in the latest versions of
|
||||
# dependencies which match the broad requirements. Since most CI runs are against
|
||||
# the locked poetry environment, run specifically against the latest dependencies to
|
||||
# know if there's an upcoming breaking change.
|
||||
#
|
||||
# As an overview this workflow:
|
||||
# - checks out develop,
|
||||
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
|
||||
# - runs mypy and test suites in that checkout.
|
||||
#
|
||||
# Based on the twisted trunk CI job.
|
||||
|
||||
name: Latest dependencies
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 7 * * *
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
RUST_VERSION: 1.87.0
|
||||
|
||||
jobs:
|
||||
check_repo:
|
||||
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
||||
# only useful to the Synapse core team.
|
||||
# All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest
|
||||
# of the workflow will be skipped as well.
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }}
|
||||
steps:
|
||||
- id: check_condition
|
||||
run: echo "should_run_workflow=${{ github.repository == 'element-hq/synapse' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
mypy:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||
# poetry-core versions), so we install with poetry.
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "2.1.1"
|
||||
extras: "all"
|
||||
# Dump installed versions for debugging.
|
||||
- run: poetry run pip list > before.txt
|
||||
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
||||
# `pip install matrix-synapse[all]` as closely as possible.
|
||||
- run: poetry update --without dev
|
||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
||||
- name: Remove unhelpful options from mypy config
|
||||
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
|
||||
- run: poetry run mypy
|
||||
trial:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- database: "sqlite"
|
||||
- database: "postgres"
|
||||
postgres-version: "14"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
if: ${{ matrix.postgres-version }}
|
||||
run: |
|
||||
docker run -d -p 5432:5432 \
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: pip install .[all,test]
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
|
||||
# We nuke the local copy, as we've installed synapse into the virtualenv
|
||||
# (rather than use an editable install, which we no longer support). If we
|
||||
# don't do this then python can't find the native lib.
|
||||
- run: rm -rf synapse/
|
||||
|
||||
- run: python -m twisted.trial --jobs=2 tests
|
||||
env:
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
|
||||
sytest:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:testing
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- sytest-tag: bookworm
|
||||
|
||||
- sytest-tag: bookworm
|
||||
postgres: postgres
|
||||
workers: workers
|
||||
redis: redis
|
||||
env:
|
||||
POSTGRES: ${{ matrix.postgres && 1}}
|
||||
WORKERS: ${{ matrix.workers && 1 }}
|
||||
REDIS: ${{ matrix.redis && 1 }}
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- name: Ensure sytest runs `pip install`
|
||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||
run: rm /src/poetry.lock
|
||||
working-directory: /src
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
path: |
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
|
||||
complement:
|
||||
needs: check_repo
|
||||
if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arrangement: monolith
|
||||
database: SQLite
|
||||
|
||||
- arrangement: monolith
|
||||
database: Postgres
|
||||
|
||||
- arrangement: workers
|
||||
database: Postgres
|
||||
|
||||
steps:
|
||||
- name: Check out synapse codebase
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||
with:
|
||||
cache-dependency-path: complement/go.sum
|
||||
go-version-file: complement/go.mod
|
||||
|
||||
- run: |
|
||||
set -o pipefail
|
||||
TEST_ONLY_IGNORE_POETRY_LOCKFILE=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
|
||||
# Open an issue if the build fails, so we know about it.
|
||||
# Only do this if we're not experimenting with this action in a PR.
|
||||
open-issue:
|
||||
if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request' && needs.check_repo.outputs.should_run_workflow == 'true'"
|
||||
needs:
|
||||
# TODO: should mypy be included here? It feels more brittle than the others.
|
||||
- mypy
|
||||
- trial
|
||||
- sytest
|
||||
- complement
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
update_existing: true
|
||||
filename: .ci/latest_deps_build_failed_issue_template.md
|
||||
24
.github/workflows/poetry_lockfile.yaml
vendored
Normal file
24
.github/workflows/poetry_lockfile.yaml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
on:
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
paths:
|
||||
- poetry.lock
|
||||
pull_request:
|
||||
paths:
|
||||
- poetry.lock
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check-sdists:
|
||||
name: "Check locked dependencies have sdists"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- run: pip install tomli
|
||||
- run: ./scripts-dev/check_locked_deps_have_sdists.py
|
||||
74
.github/workflows/push_complement_image.yml
vendored
Normal file
74
.github/workflows/push_complement_image.yml
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
# This task does not run complement tests, see tests.yaml instead.
|
||||
# This task does not build docker images for synapse for use on docker hub, see docker.yaml instead
|
||||
|
||||
name: Store complement-synapse image in ghcr.io
|
||||
on:
|
||||
push:
|
||||
branches: [ "master" ]
|
||||
schedule:
|
||||
- cron: '0 5 * * *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
required: true
|
||||
default: 'develop'
|
||||
type: choice
|
||||
options:
|
||||
- develop
|
||||
- master
|
||||
|
||||
# Only run this action once per pull request/branch; restart if a new commit arrives.
|
||||
# C.f. https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency
|
||||
# and https://docs.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#github-context
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build and push complement image
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout specific branch (debug build)
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
with:
|
||||
ref: ${{ inputs.branch }}
|
||||
- name: Checkout clean copy of develop (scheduled build)
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
if: github.event_name == 'schedule'
|
||||
with:
|
||||
ref: develop
|
||||
- name: Checkout clean copy of master (on-push)
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
if: github.event_name == 'push'
|
||||
with:
|
||||
ref: master
|
||||
- name: Login to registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Work out labels for complement image
|
||||
id: meta
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository }}/complement-synapse
|
||||
tags: |
|
||||
type=schedule,pattern=nightly,enable=${{ github.event_name == 'schedule'}}
|
||||
type=raw,value=develop,enable=${{ github.event_name == 'schedule' || inputs.branch == 'develop' }}
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'push' || inputs.branch == 'master' }}
|
||||
type=sha,format=long
|
||||
- name: Run scripts-dev/complement.sh to generate complement-synapse:latest image.
|
||||
run: scripts-dev/complement.sh --build-only
|
||||
- name: Tag and push generated image
|
||||
run: |
|
||||
for TAG in ${{ join(fromJson(steps.meta.outputs.json).tags, ' ') }}; do
|
||||
echo "tag and push $TAG"
|
||||
docker tag complement-synapse $TAG
|
||||
docker push $TAG
|
||||
done
|
||||
208
.github/workflows/release-artifacts.yml
vendored
Normal file
208
.github/workflows/release-artifacts.yml
vendored
Normal file
@@ -0,0 +1,208 @@
|
||||
# GitHub actions workflow which builds the release artifacts.
|
||||
|
||||
name: Build release artifacts
|
||||
|
||||
on:
|
||||
# we build on PRs and develop to (hopefully) get early warning
|
||||
# of things breaking (but only build one set of debs). PRs skip
|
||||
# building wheels on macOS & ARM.
|
||||
pull_request:
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
|
||||
# we do the full build on tags.
|
||||
tags: ["v*"]
|
||||
merge_group:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
get-distros:
|
||||
name: "Calculate list of debian distros"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- id: set-distros
|
||||
run: |
|
||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||
# NOTE: inside the actual Dockerfile-dhvirtualenv, the image name is expanded into its full image path
|
||||
dists='["debian:sid"]'
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
|
||||
fi
|
||||
echo "distros=$dists" >> "$GITHUB_OUTPUT"
|
||||
# map the step outputs to job outputs
|
||||
outputs:
|
||||
distros: ${{ steps.set-distros.outputs.distros }}
|
||||
|
||||
# now build the packages with a matrix build.
|
||||
build-debs:
|
||||
needs: get-distros
|
||||
name: "Build .deb packages"
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
distro: ${{ fromJson(needs.get-distros.outputs.distros) }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
path: src
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
with:
|
||||
install: true
|
||||
|
||||
- name: Set up docker layer caching
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
- name: Set up python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Build the packages
|
||||
# see https://github.com/docker/build-push-action/issues/252
|
||||
# for the cache magic here
|
||||
run: |
|
||||
./src/scripts-dev/build_debian_packages.py \
|
||||
--docker-build-arg=--cache-from=type=local,src=/tmp/.buildx-cache \
|
||||
--docker-build-arg=--cache-to=type=local,mode=max,dest=/tmp/.buildx-cache-new \
|
||||
--docker-build-arg=--progress=plain \
|
||||
--docker-build-arg=--load \
|
||||
"${{ matrix.distro }}"
|
||||
rm -rf /tmp/.buildx-cache
|
||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
||||
|
||||
- name: Artifact name
|
||||
id: artifact-name
|
||||
# We can't have colons in the upload name of the artifact, so we convert
|
||||
# e.g. `debian:sid` to `sid`.
|
||||
env:
|
||||
DISTRO: ${{ matrix.distro }}
|
||||
run: |
|
||||
echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Upload debs as artifacts
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }}
|
||||
path: debs/*
|
||||
|
||||
build-wheels:
|
||||
name: Build wheels on ${{ matrix.os }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-24.04
|
||||
- ubuntu-24.04-arm
|
||||
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
||||
# It is not read by the rest of the workflow.
|
||||
is_pr:
|
||||
- ${{ startsWith(github.ref, 'refs/pull/') }}
|
||||
|
||||
exclude:
|
||||
# Don't build aarch64 wheels on PR CI.
|
||||
- is_pr: true
|
||||
os: "ubuntu-24.04-arm"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
||||
# here, because `python` on osx points to Python 2.7.
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install cibuildwheel
|
||||
run: python -m pip install cibuildwheel==3.2.1
|
||||
|
||||
- name: Only build a single wheel on PR
|
||||
if: startsWith(github.ref, 'refs/pull/')
|
||||
run: echo "CIBW_BUILD="cp310-manylinux_*"" >> $GITHUB_ENV
|
||||
|
||||
- name: Build wheels
|
||||
run: python -m cibuildwheel --output-dir wheelhouse
|
||||
env:
|
||||
# The platforms that we build for are determined by the
|
||||
# `tool.cibuildwheel.skip` option in `pyproject.toml`.
|
||||
|
||||
# We skip testing wheels for the following platforms in CI:
|
||||
#
|
||||
# pp3*-* (PyPy wheels) broke in CI (TODO: investigate).
|
||||
# musl: (TODO: investigate).
|
||||
CIBW_TEST_SKIP: pp3*-* *musl*
|
||||
|
||||
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: Wheel-${{ matrix.os }}
|
||||
path: ./wheelhouse/*.whl
|
||||
|
||||
build-sdist:
|
||||
name: Build sdist
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- run: pip install build
|
||||
|
||||
- name: Build sdist
|
||||
run: python -m build --sdist
|
||||
|
||||
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: Sdist
|
||||
path: dist/*.tar.gz
|
||||
|
||||
# if it's a tag, create a release and attach the artifacts to it
|
||||
attach-assets:
|
||||
name: "Attach assets to release"
|
||||
if: ${{ !failure() && !cancelled() && startsWith(github.ref, 'refs/tags/') }}
|
||||
needs:
|
||||
- build-debs
|
||||
- build-wheels
|
||||
- build-sdist
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download all workflow run artifacts
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
- name: Build a tarball for the debs
|
||||
# We need to merge all the debs uploads into one folder, then compress
|
||||
# that.
|
||||
run: |
|
||||
mkdir debs
|
||||
mv debs*/* debs/
|
||||
tar -cvJf debs.tar.xz debs
|
||||
- name: Attach to release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gh release upload "${{ github.ref_name }}" \
|
||||
Sdist/* \
|
||||
Wheel*/* \
|
||||
debs.tar.xz \
|
||||
--repo ${{ github.repository }}
|
||||
57
.github/workflows/schema.yaml
vendored
Normal file
57
.github/workflows/schema.yaml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
name: Schema
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- schema/**
|
||||
- docs/usage/configuration/config_documentation.md
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
validate-schema:
|
||||
name: Ensure Synapse config schema is valid
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- name: Install check-jsonschema
|
||||
run: pip install check-jsonschema==0.33.0
|
||||
|
||||
- name: Validate meta schema
|
||||
run: check-jsonschema --check-metaschema schema/v*/meta.schema.json
|
||||
- name: Validate schema
|
||||
run: |-
|
||||
# Please bump on introduction of a new meta schema.
|
||||
LATEST_META_SCHEMA_VERSION=v1
|
||||
check-jsonschema \
|
||||
--schemafile="schema/$LATEST_META_SCHEMA_VERSION/meta.schema.json" \
|
||||
schema/synapse-config.schema.yaml
|
||||
- name: Validate default config
|
||||
# Populates the empty instance with default values and checks against the schema.
|
||||
run: |-
|
||||
echo "{}" | check-jsonschema \
|
||||
--fill-defaults --schemafile=schema/synapse-config.schema.yaml -
|
||||
|
||||
check-doc-generation:
|
||||
name: Ensure generated documentation is up-to-date
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- name: Install PyYAML
|
||||
run: pip install PyYAML==6.0.2
|
||||
|
||||
- name: Regenerate config documentation
|
||||
run: |
|
||||
scripts-dev/gen_config_documentation.py \
|
||||
schema/synapse-config.schema.yaml \
|
||||
> docs/usage/configuration/config_documentation.md
|
||||
- name: Error in case of any differences
|
||||
# Errors if there are now any modified files (untracked files are ignored).
|
||||
run: 'git diff --exit-code'
|
||||
790
.github/workflows/tests.yml
vendored
Normal file
790
.github/workflows/tests.yml
vendored
Normal file
@@ -0,0 +1,790 @@
|
||||
name: Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
pull_request:
|
||||
merge_group:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
RUST_VERSION: 1.87.0
|
||||
|
||||
jobs:
|
||||
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
|
||||
# don't modify Rust code.
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
rust: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.rust }}
|
||||
trial: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.trial }}
|
||||
integration: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.integration }}
|
||||
linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }}
|
||||
linting_readme: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting_readme }}
|
||||
steps:
|
||||
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
id: filter
|
||||
# We only check on PRs
|
||||
if: startsWith(github.ref, 'refs/pull/')
|
||||
with:
|
||||
filters: |
|
||||
rust:
|
||||
- 'rust/**'
|
||||
- 'Cargo.toml'
|
||||
- 'Cargo.lock'
|
||||
- '.rustfmt.toml'
|
||||
- '.github/workflows/tests.yml'
|
||||
|
||||
trial:
|
||||
- 'synapse/**'
|
||||
- 'tests/**'
|
||||
- 'rust/**'
|
||||
- '.ci/scripts/calculate_jobs.py'
|
||||
- 'Cargo.toml'
|
||||
- 'Cargo.lock'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- '.github/workflows/tests.yml'
|
||||
|
||||
integration:
|
||||
- 'synapse/**'
|
||||
- 'rust/**'
|
||||
- 'docker/**'
|
||||
- 'Cargo.toml'
|
||||
- 'Cargo.lock'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- 'docker/**'
|
||||
- '.ci/**'
|
||||
- 'scripts-dev/complement.sh'
|
||||
- '.github/workflows/tests.yml'
|
||||
|
||||
linting:
|
||||
- 'synapse/**'
|
||||
- 'docker/**'
|
||||
- 'tests/**'
|
||||
- 'scripts-dev/**'
|
||||
- 'contrib/**'
|
||||
- 'synmark/**'
|
||||
- 'stubs/**'
|
||||
- '.ci/**'
|
||||
- 'mypy.ini'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- '.github/workflows/tests.yml'
|
||||
|
||||
linting_readme:
|
||||
- 'README.rst'
|
||||
|
||||
check-sampleconfig:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "2.1.1"
|
||||
extras: "all"
|
||||
- run: poetry run scripts-dev/generate_sample_config.sh --check
|
||||
- run: poetry run scripts-dev/config-lint.sh
|
||||
|
||||
check-schema-delta:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20' 'sqlglot>=28.0.0'"
|
||||
- run: scripts-dev/check_schema_delta.py --force-colors
|
||||
|
||||
check-lockfile:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: .ci/scripts/check_lockfile.py
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
poetry-version: "2.1.1"
|
||||
install-project: "false"
|
||||
|
||||
- name: Run ruff check
|
||||
run: poetry run ruff check --output-format=github .
|
||||
|
||||
- name: Run ruff format
|
||||
run: poetry run ruff format --check .
|
||||
|
||||
lint-mypy:
|
||||
runs-on: ubuntu-latest
|
||||
name: Typechecking
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
# We want to make use of type hints in optional dependencies too.
|
||||
extras: all
|
||||
# We have seen odd mypy failures that were resolved when we started
|
||||
# installing the project again:
|
||||
# https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775
|
||||
# To make CI green, err towards caution and install the project.
|
||||
install-project: "true"
|
||||
poetry-version: "2.1.1"
|
||||
|
||||
# Cribbed from
|
||||
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
||||
- name: Restore/persist mypy's cache
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: |
|
||||
.mypy_cache
|
||||
key: mypy-cache-${{ github.context.sha }}
|
||||
restore-keys: mypy-cache-
|
||||
|
||||
- name: Run mypy
|
||||
run: poetry run mypy
|
||||
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- name: Check line endings
|
||||
run: scripts-dev/check_line_terminators.sh
|
||||
|
||||
lint-newsfile:
|
||||
# Only run on pull_request events, targeting develop/release branches, and skip when the PR author is dependabot[bot].
|
||||
if: ${{ github.event_name == 'pull_request' && (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.event.pull_request.user.login != 'dependabot[bot]' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
||||
- run: scripts-dev/check-newsfragment.sh
|
||||
env:
|
||||
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
||||
|
||||
lint-clippy:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
components: clippy
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- run: cargo clippy -- -D warnings
|
||||
|
||||
# We also lint against a nightly rustc so that we can lint the benchmark
|
||||
# suite, which requires a nightly compiler.
|
||||
lint-clippy-nightly:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: nightly-2025-04-23
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- run: cargo clippy --all-features -- -D warnings
|
||||
|
||||
lint-rust:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
# Install like a normal project from source with all optional dependencies
|
||||
extras: all
|
||||
install-project: "true"
|
||||
poetry-version: "2.1.1"
|
||||
|
||||
- name: Ensure `Cargo.lock` is up to date (no stray changes after install)
|
||||
# The `::error::` syntax is using GitHub Actions' error annotations, see
|
||||
# https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions
|
||||
run: |
|
||||
if git diff --quiet Cargo.lock; then
|
||||
echo "Cargo.lock is up to date"
|
||||
else
|
||||
echo "::error::Cargo.lock has uncommitted changes after install. Please run 'poetry install --extras all' and commit the Cargo.lock changes."
|
||||
git diff --exit-code Cargo.lock
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# This job is split from `lint-rust` because it requires a nightly Rust toolchain
|
||||
# for some of the unstable options we use in `.rustfmt.toml`.
|
||||
lint-rustfmt:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
# We use nightly so that we can use some unstable options that we use in
|
||||
# `.rustfmt.toml`.
|
||||
toolchain: nightly-2025-04-23
|
||||
components: rustfmt
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- run: cargo fmt --check
|
||||
|
||||
# This is to detect issues with the rst file, which can otherwise cause issues
|
||||
# when uploading packages to PyPi.
|
||||
lint-readme:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.linting_readme == 'true' }}
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: "pip install rstcheck"
|
||||
- run: "rstcheck --report-level=WARNING README.rst"
|
||||
|
||||
# Dummy step to gate other tests on without repeating the whole list
|
||||
linting-done:
|
||||
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
||||
needs:
|
||||
- lint
|
||||
- lint-mypy
|
||||
- lint-crlf
|
||||
- lint-newsfile
|
||||
- check-sampleconfig
|
||||
- check-schema-delta
|
||||
- check-lockfile
|
||||
- lint-clippy
|
||||
- lint-clippy-nightly
|
||||
- lint-rust
|
||||
- lint-rustfmt
|
||||
- lint-readme
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
# Various bits are skipped if there was no applicable changes.
|
||||
skippable: |
|
||||
check-sampleconfig
|
||||
check-schema-delta
|
||||
lint
|
||||
lint-mypy
|
||||
lint-newsfile
|
||||
lint-clippy
|
||||
lint-clippy-nightly
|
||||
lint-rust
|
||||
lint-rustfmt
|
||||
lint-readme
|
||||
|
||||
|
||||
calculate-test-jobs:
|
||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- id: get-matrix
|
||||
run: .ci/scripts/calculate_jobs.py
|
||||
outputs:
|
||||
trial_test_matrix: ${{ steps.get-matrix.outputs.trial_test_matrix }}
|
||||
sytest_test_matrix: ${{ steps.get-matrix.outputs.sytest_test_matrix }}
|
||||
|
||||
trial:
|
||||
if: ${{ !cancelled() && !failure() && needs.changes.outputs.trial == 'true' }} # Allow previous steps to be skipped, but not fail
|
||||
needs:
|
||||
- calculate-test-jobs
|
||||
- changes
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
# 1. Mount postgres data files onto a tmpfs in-memory filesystem to reduce overhead of docker's overlayfs layer.
|
||||
# 2. Expose the unix socket for postgres. This removes latency of using docker-proxy for connections.
|
||||
run: |
|
||||
docker run -d -p 5432:5432 \
|
||||
--tmpfs /var/lib/postgres:rw,size=6144m \
|
||||
--mount 'type=bind,src=/var/run/postgresql,dst=/var/run/postgresql' \
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.job.postgres-version }}
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.job.python-version }}
|
||||
poetry-version: "2.1.1"
|
||||
extras: ${{ matrix.job.extras }}
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: poetry run trial --jobs=6 tests
|
||||
env:
|
||||
SYNAPSE_POSTGRES: ${{ matrix.job.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: /var/run/postgresql
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
trial-olddeps:
|
||||
# Note: sqlite only; no postgres
|
||||
if: ${{ !cancelled() && !failure() && needs.changes.outputs.trial == 'true' }} # Allow previous steps to be skipped, but not fail
|
||||
needs:
|
||||
- linting-done
|
||||
- changes
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
# There aren't wheels for some of the older deps, so we need to install
|
||||
# their build dependencies
|
||||
- run: |
|
||||
sudo apt-get -qq update
|
||||
sudo apt-get -qq install build-essential libffi-dev python3-dev \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Prepare old deps
|
||||
# Note: we install using `uv` here, not poetry or pip to allow us to test with the
|
||||
# minimum version of all dependencies, both those explicitly specified and those
|
||||
# implicitly brought in by the explicit dependencies.
|
||||
run: |
|
||||
pip install uv
|
||||
uv pip install --system --resolution=lowest .[all,test]
|
||||
|
||||
# We nuke the local copy, as we've installed synapse into the virtualenv
|
||||
# (rather than use an editable install, which we no longer support). If we
|
||||
# don't do this then python can't find the native lib.
|
||||
- run: rm -rf synapse/
|
||||
|
||||
# Sanity check we can import/run Synapse
|
||||
- run: python -m synapse.app.homeserver --help
|
||||
|
||||
- run: python -m twisted.trial -j6 tests
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
trial-pypy:
|
||||
# Very slow; only run if the branch name includes 'pypy'
|
||||
# Note: sqlite only; no postgres. Completely untested since poetry move.
|
||||
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() && needs.changes.outputs.trial == 'true' }}
|
||||
needs:
|
||||
- linting-done
|
||||
- changes
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["pypy-3.10"]
|
||||
extras: ["all"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
poetry-version: "2.1.1"
|
||||
extras: ${{ matrix.extras }}
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
sytest:
|
||||
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true' }}
|
||||
needs:
|
||||
- calculate-test-jobs
|
||||
- changes
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:${{ matrix.job.sytest-tag }}
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
env:
|
||||
# If this is a pull request to a release branch, use that branch as default branch for sytest, else use develop
|
||||
# This works because the release script always create a branch on the sytest repo with the same name as the release branch
|
||||
SYTEST_DEFAULT_BRANCH: ${{ startsWith(github.base_ref, 'release-') && github.base_ref || 'develop' }}
|
||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
||||
POSTGRES: ${{ matrix.job.postgres && 1}}
|
||||
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') || '' }}
|
||||
ASYNCIO_REACTOR: ${{ (matrix.job.reactor == 'asyncio') || '' }}
|
||||
WORKERS: ${{ matrix.job.workers && 1 }}
|
||||
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
|
||||
TOP: ${{ github.workspace }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
||||
path: |
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
export-data:
|
||||
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true'}} # Allow previous steps to be skipped, but not fail
|
||||
needs: [linting-done, portdb, changes]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TOP: ${{ github.workspace }}
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres
|
||||
ports:
|
||||
- 5432:5432
|
||||
env:
|
||||
POSTGRES_PASSWORD: "postgres"
|
||||
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
poetry-version: "2.1.1"
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_export_data_command.sh
|
||||
env:
|
||||
PGHOST: localhost
|
||||
PGUSER: postgres
|
||||
PGPASSWORD: postgres
|
||||
PGDATABASE: postgres
|
||||
|
||||
|
||||
portdb:
|
||||
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true'}} # Allow previous steps to be skipped, but not fail
|
||||
needs:
|
||||
- linting-done
|
||||
- changes
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- python-version: "3.10"
|
||||
postgres-version: "14"
|
||||
|
||||
- python-version: "3.14"
|
||||
postgres-version: "17"
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:${{ matrix.postgres-version }}
|
||||
ports:
|
||||
- 5432:5432
|
||||
env:
|
||||
POSTGRES_PASSWORD: "postgres"
|
||||
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- name: Add PostgreSQL apt repository
|
||||
# We need a version of pg_dump that can handle the version of
|
||||
# PostgreSQL being tested against. The Ubuntu package repository lags
|
||||
# behind new releases, so we have to use the PostreSQL apt repository.
|
||||
# Steps taken from https://www.postgresql.org/download/linux/ubuntu/
|
||||
run: |
|
||||
sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
|
||||
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
|
||||
sudo apt-get update
|
||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
poetry-version: "2.1.1"
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_synapse_port_db.sh
|
||||
id: run_tester_script
|
||||
env:
|
||||
PGHOST: localhost
|
||||
PGUSER: postgres
|
||||
PGPASSWORD: postgres
|
||||
PGDATABASE: postgres
|
||||
- name: "Upload schema differences"
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
||||
with:
|
||||
name: Schema dumps
|
||||
path: |
|
||||
unported.sql
|
||||
ported.sql
|
||||
schema_diff
|
||||
|
||||
complement:
|
||||
if: "${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true' }}"
|
||||
needs:
|
||||
- linting-done
|
||||
- changes
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arrangement: monolith
|
||||
database: SQLite
|
||||
|
||||
- arrangement: monolith
|
||||
database: Postgres
|
||||
|
||||
- arrangement: workers
|
||||
database: Postgres
|
||||
|
||||
steps:
|
||||
- name: Checkout synapse codebase
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||
with:
|
||||
cache-dependency-path: complement/go.sum
|
||||
go-version-file: complement/go.mod
|
||||
|
||||
# use p=1 concurrency as GHA boxes are underpowered and don't like running tons of synapses at once.
|
||||
- run: |
|
||||
set -o pipefail
|
||||
COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -p 1 -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
||||
shell: bash
|
||||
env:
|
||||
POSTGRES: ${{ (matrix.database == 'Postgres') && 1 || '' }}
|
||||
WORKERS: ${{ (matrix.arrangement == 'workers') && 1 || '' }}
|
||||
name: Run Complement Tests
|
||||
|
||||
cargo-test:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- linting-done
|
||||
- changes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- run: cargo test
|
||||
|
||||
# We want to ensure that the cargo benchmarks still compile, which requires a
|
||||
# nightly compiler.
|
||||
cargo-bench:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- linting-done
|
||||
- changes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- run: cargo bench --no-run
|
||||
|
||||
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
|
||||
tests-done:
|
||||
if: ${{ always() }}
|
||||
needs:
|
||||
- trial
|
||||
- trial-olddeps
|
||||
- sytest
|
||||
- export-data
|
||||
- portdb
|
||||
- complement
|
||||
- cargo-test
|
||||
- cargo-bench
|
||||
- linting-done
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
# Various bits are skipped if there was no applicable changes.
|
||||
# The newsfile lint may be skipped on non PR builds.
|
||||
skippable: |
|
||||
trial
|
||||
trial-olddeps
|
||||
sytest
|
||||
portdb
|
||||
export-data
|
||||
complement
|
||||
lint-newsfile
|
||||
cargo-test
|
||||
cargo-bench
|
||||
14
.github/workflows/triage-incoming.yml
vendored
Normal file
14
.github/workflows/triage-incoming.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
name: Move new issues into the issue triage board
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [ opened ]
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@18beaf3c8e536108bd04d18e6c3dc40ba3931e28 # v2.0.3
|
||||
with:
|
||||
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
|
||||
content_id: ${{ github.event.issue.node_id }}
|
||||
secrets:
|
||||
github_access_token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
31
.github/workflows/triage_labelled.yml
vendored
Normal file
31
.github/workflows/triage_labelled.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: Move labelled issues to correct projects
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [ labeled ]
|
||||
|
||||
jobs:
|
||||
move_needs_info:
|
||||
runs-on: ubuntu-latest
|
||||
if: >
|
||||
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
# This token must have the following scopes: ["repo:public_repo", "admin:org->read:org", "user->read:user", "project"]
|
||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
PROJECT_OWNER: matrix-org
|
||||
# Backend issue triage board.
|
||||
# https://github.com/orgs/matrix-org/projects/67/views/1
|
||||
PROJECT_NUMBER: 67
|
||||
ISSUE_URL: ${{ github.event.issue.html_url }}
|
||||
# This field is case-sensitive.
|
||||
TARGET_STATUS: Needs info
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
# Only clone the script file we care about, instead of the whole repo.
|
||||
sparse-checkout: .ci/scripts/triage_labelled_issue.sh
|
||||
|
||||
- name: Ensure issue exists on the board, then set Status
|
||||
run: .ci/scripts/triage_labelled_issue.sh
|
||||
226
.github/workflows/twisted_trunk.yml
vendored
Normal file
226
.github/workflows/twisted_trunk.yml
vendored
Normal file
@@ -0,0 +1,226 @@
|
||||
name: Twisted Trunk
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 8 * * *
|
||||
|
||||
workflow_dispatch:
|
||||
# NB: inputs are only present when this workflow is dispatched manually.
|
||||
# (The default below is the default field value in the form to trigger
|
||||
# a manual dispatch). Otherwise the inputs will evaluate to null.
|
||||
inputs:
|
||||
twisted_ref:
|
||||
description: Commit, branch or tag to checkout from upstream Twisted.
|
||||
required: false
|
||||
default: 'trunk'
|
||||
type: string
|
||||
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
RUST_VERSION: 1.87.0
|
||||
|
||||
jobs:
|
||||
check_repo:
|
||||
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
||||
# only useful to the Synapse core team.
|
||||
# All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest
|
||||
# of the workflow will be skipped as well.
|
||||
if: github.repository == 'element-hq/synapse'
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }}
|
||||
steps:
|
||||
- id: check_condition
|
||||
run: echo "should_run_workflow=${{ github.repository == 'element-hq/synapse' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
mypy:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
extras: "all"
|
||||
poetry-version: "2.1.1"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref || 'trunk' }}
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- name: Remove unhelpful options from mypy config
|
||||
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
|
||||
- run: poetry run mypy
|
||||
|
||||
trial:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
extras: "all test"
|
||||
poetry-version: "2.1.1"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- run: poetry run trial --jobs 2 tests
|
||||
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
sytest:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
# We're using bookworm because that's what Debian oldstable is at the time of writing.
|
||||
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
|
||||
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
|
||||
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
|
||||
image: matrixdotorg/sytest-synapse:bookworm
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- name: Patch dependencies
|
||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||
# but the sytest-synapse container expects it to be in /venv/.
|
||||
# We symlink it before running poetry so that poetry actually
|
||||
# ends up installing to `/venv`.
|
||||
run: |
|
||||
ln -s -T /venv /src/.venv
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
working-directory: /src
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
env:
|
||||
# Use offline mode to avoid reinstalling the pinned version of
|
||||
# twisted.
|
||||
OFFLINE: 1
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
path: |
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
complement:
|
||||
needs: check_repo
|
||||
if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arrangement: monolith
|
||||
database: SQLite
|
||||
|
||||
- arrangement: monolith
|
||||
database: Postgres
|
||||
|
||||
- arrangement: workers
|
||||
database: Postgres
|
||||
|
||||
steps:
|
||||
- name: Run actions/checkout@v4 for synapse
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||
with:
|
||||
cache-dependency-path: complement/go.sum
|
||||
go-version-file: complement/go.mod
|
||||
|
||||
# This step is specific to the 'Twisted trunk' test run:
|
||||
- name: Patch dependencies
|
||||
run: |
|
||||
set -x
|
||||
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
|
||||
pipx install poetry==2.1.1
|
||||
|
||||
poetry remove -n twisted
|
||||
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry lock
|
||||
working-directory: synapse
|
||||
|
||||
- run: |
|
||||
set -o pipefail
|
||||
TEST_ONLY_SKIP_DEP_HASH_VERIFICATION=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
|
||||
# open an issue if the build fails, so we know about it.
|
||||
open-issue:
|
||||
if: failure() && needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
needs:
|
||||
- mypy
|
||||
- trial
|
||||
- sytest
|
||||
- complement
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
update_existing: true
|
||||
filename: .ci/twisted_trunk_build_failed_issue_template.md
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug where Mastodon posts (and possibly other embeds) have the wrong description for URL previews.
|
||||
@@ -1 +0,0 @@
|
||||
Add `memberships` endpoint to the admin API. This is useful for forensics and T&S purpose.
|
||||
@@ -1 +0,0 @@
|
||||
Add an admin API for retrieving a paginated list of quarantined media.
|
||||
@@ -1 +0,0 @@
|
||||
Document the importance of `public_baseurl` when configuring OpenID Connect authentication.
|
||||
@@ -1 +0,0 @@
|
||||
Fix bug introduced in 1.143.0 that broke support for versions of `zope-interface` older than 6.2.
|
||||
@@ -1 +0,0 @@
|
||||
Server admins can bypass the quarantine media check when downloading media by setting the `admin_unsafely_bypass_quarantine` query parameter to `true` on Client-Server API media download requests.
|
||||
@@ -1 +0,0 @@
|
||||
Implemented pagination for the [MSC2666](https://github.com/matrix-org/matrix-spec-proposals/pull/2666) mutual rooms endpoint. Contributed by @tulir @ Beeper.
|
||||
@@ -1 +0,0 @@
|
||||
Admin API: add worker support to `GET /_synapse/admin/v2/users/<user_id>`.
|
||||
@@ -1 +0,0 @@
|
||||
Log the original bind exception when encountering `Failed to listen on 0.0.0.0, continuing because listening on [::]`.
|
||||
@@ -1 +0,0 @@
|
||||
Improve proxy support for the `federation_client.py` dev script. Contributed by Denis Kasak (@dkasak).
|
||||
@@ -1 +0,0 @@
|
||||
Unpin the version of Rust we use to build Synapse wheels (was 1.82.0) now that MacOS support has been dropped.
|
||||
1
changelog.d/19306.misc
Normal file
1
changelog.d/19306.misc
Normal file
@@ -0,0 +1 @@
|
||||
Prune stale entries from `sliding_sync_connection_required_state` table.
|
||||
@@ -73,33 +73,6 @@ Response:
|
||||
}
|
||||
```
|
||||
|
||||
## Listing all quarantined media
|
||||
|
||||
This API returns a list of all quarantined media on the server. It is paginated, and can be scoped to either local or
|
||||
remote media. Note that the pagination values are also scoped to the request parameters - changing them but keeping the
|
||||
same pagination values will result in unexpected results.
|
||||
|
||||
Request:
|
||||
```http
|
||||
GET /_synapse/admin/v1/media/quarantined?from=0&limit=100&kind=local
|
||||
```
|
||||
|
||||
`from` and `limit` are optional parameters, and default to `0` and `100` respectively. They are the row index and number
|
||||
of rows to return - they are not timestamps.
|
||||
|
||||
`kind` *MUST* either be `local` or `remote`.
|
||||
|
||||
The API returns a JSON body containing MXC URIs for the quarantined media, like the following:
|
||||
|
||||
```json
|
||||
{
|
||||
"media": [
|
||||
"mxc://localhost/xwvutsrqponmlkjihgfedcba",
|
||||
"mxc://localhost/abcdefghijklmnopqrstuvwx"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
# Quarantine media
|
||||
|
||||
Quarantining media means that it is marked as inaccessible by users. It applies
|
||||
@@ -115,20 +88,6 @@ is quarantined, Synapse will:
|
||||
- Quarantine any existing cached remote media.
|
||||
- Quarantine any future remote media.
|
||||
|
||||
## Downloading quarantined media
|
||||
|
||||
Normally, when media is quarantined, it will return a 404 error when downloaded.
|
||||
Admins can bypass this by adding `?admin_unsafely_bypass_quarantine=true`
|
||||
to the [normal download URL](https://spec.matrix.org/v1.16/client-server-api/#get_matrixclientv1mediadownloadservernamemediaid).
|
||||
|
||||
Bypassing the quarantine check is not recommended. Media is typically quarantined
|
||||
to prevent harmful content from being served to users, which includes admins. Only
|
||||
set the bypass parameter if you intentionally want to access potentially harmful
|
||||
content.
|
||||
|
||||
Non-admin users cannot bypass quarantine checks, even when specifying the above
|
||||
query parameter.
|
||||
|
||||
## Quarantining media by ID
|
||||
|
||||
This API quarantines a single piece of local or remote media.
|
||||
|
||||
@@ -505,55 +505,6 @@ with a body of:
|
||||
}
|
||||
```
|
||||
|
||||
## List room memberships of a user
|
||||
|
||||
Gets a list of room memberships for a specific `user_id`. This
|
||||
endpoint differs from
|
||||
[`GET /_synapse/admin/v1/users/<user_id>/joined_rooms`](#list-joined-rooms-of-a-user)
|
||||
in that it returns rooms with memberships other than "join".
|
||||
|
||||
The API is:
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/users/<user_id>/memberships
|
||||
```
|
||||
|
||||
A response body like the following is returned:
|
||||
|
||||
```json
|
||||
{
|
||||
"memberships": {
|
||||
"!DuGcnbhHGaSZQoNQR:matrix.org": "join",
|
||||
"!ZtSaPCawyWtxfWiIy:matrix.org": "leave",
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
which is a list of room membership states for the given user. This endpoint can
|
||||
be used with both local and remote users, with the caveat that the homeserver will
|
||||
only be aware of the memberships for rooms that one of its local users has joined.
|
||||
|
||||
Remote user memberships may also be out of date if all local users have since left
|
||||
a room. The homeserver will thus no longer receive membership updates about it.
|
||||
|
||||
The list includes rooms that the user has since left; other membership states (knock,
|
||||
invite, etc.) are also possible.
|
||||
|
||||
Note that rooms will only disappear from this list if they are
|
||||
[purged](./rooms.md#delete-room-api) from the homeserver.
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following parameters should be set in the URL:
|
||||
|
||||
- `user_id` - fully qualified: for example, `@user:server.com`.
|
||||
|
||||
**Response**
|
||||
|
||||
The following fields are returned in the JSON response body:
|
||||
|
||||
- `memberships` - A map of `room_id` (string) to `membership` state (string).
|
||||
|
||||
## List joined rooms of a user
|
||||
|
||||
Gets a list of all `room_id` that a specific `user_id` is joined to and is a member of (participating in).
|
||||
|
||||
@@ -50,11 +50,6 @@ setting in your configuration file.
|
||||
See the [configuration manual](usage/configuration/config_documentation.md#oidc_providers) for some sample settings, as well as
|
||||
the text below for example configurations for specific providers.
|
||||
|
||||
For setups using [`.well-known` delegation](delegate.md), make sure
|
||||
[`public_baseurl`](usage/configuration/config_documentation.md#public_baseurl) is set
|
||||
appropriately. If unset, Synapse defaults to `https://<server_name>/` which is used in
|
||||
the OIDC callback URL.
|
||||
|
||||
## OIDC Back-Channel Logout
|
||||
|
||||
Synapse supports receiving [OpenID Connect Back-Channel Logout](https://openid.net/specs/openid-connect-backchannel-1_0.html) notifications.
|
||||
|
||||
@@ -255,8 +255,6 @@ information.
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$
|
||||
^/_matrix/client/(r0|v3|unstable)/capabilities$
|
||||
^/_matrix/client/(r0|v3|unstable)/notifications$
|
||||
|
||||
# Admin API requests
|
||||
^/_synapse/admin/v1/rooms/[^/]+$
|
||||
|
||||
# Encryption requests
|
||||
@@ -302,9 +300,6 @@ Additionally, the following REST endpoints can be handled for GET requests:
|
||||
# Presence requests
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/presence/
|
||||
|
||||
# Admin API requests
|
||||
^/_synapse/admin/v2/users/[^/]+$
|
||||
|
||||
Pagination requests can also be handled, but all requests for a given
|
||||
room must be routed to the same instance. Additionally, care must be taken to
|
||||
ensure that the purge history admin API is not used while pagination requests
|
||||
|
||||
2
poetry.lock
generated
2
poetry.lock
generated
@@ -3542,4 +3542,4 @@ url-preview = ["lxml"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10.0,<4.0.0"
|
||||
content-hash = "1caa5072f6304122c89377420f993a54f54587f3618ccc8094ec31642264592c"
|
||||
content-hash = "abbbdff591a306b56cc8890dbb2f477ac5f1a2d328baa6409e01084abc655bbf"
|
||||
|
||||
@@ -109,12 +109,7 @@ dependencies = [
|
||||
"pyrsistent>=0.18.0", # via jsonschema
|
||||
"requests>=2.16.0", # 2.16.0+ no longer vendors urllib3, avoiding Python 3.10+ incompatibility
|
||||
"urllib3>=1.26.5", # via treq; 1.26.5 fixes Python 3.10+ collections.abc compatibility
|
||||
# 5.2 is the current version in Debian oldstable. If we don't care to support that, then 5.4 is
|
||||
# the minimum version from Ubuntu 22.04 and RHEL 9. (as of 2025-12)
|
||||
# When bumping this version to 6.2 or above, refer to https://github.com/element-hq/synapse/pull/19274
|
||||
# for details of Synapse improvements that may be unlocked. Particularly around the use of `|`
|
||||
# syntax with zope interface types.
|
||||
"zope-interface>=5.2", # via twisted
|
||||
"zope-interface>=6.2", # via twisted
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
@@ -388,10 +383,15 @@ select = [
|
||||
"G",
|
||||
# pyupgrade
|
||||
"UP006",
|
||||
"UP007",
|
||||
"UP045",
|
||||
]
|
||||
extend-safe-fixes = [
|
||||
# pyupgrade rules compatible with Python >= 3.9
|
||||
"UP006",
|
||||
"UP007",
|
||||
# pyupgrade rules compatible with Python >= 3.10
|
||||
"UP045",
|
||||
# Allow ruff to automatically fix trailing spaces within a multi-line string/comment.
|
||||
"W293"
|
||||
]
|
||||
@@ -471,6 +471,9 @@ skip = "cp3??t-* *i686* *macosx*"
|
||||
enable = "pypy"
|
||||
|
||||
# We need a rust compiler.
|
||||
#
|
||||
# We temporarily pin Rust to 1.82.0 to work around
|
||||
# https://github.com/element-hq/synapse/issues/17988
|
||||
before-all = "sh .ci/before_build_wheel.sh"
|
||||
environment= { PATH = "$PATH:$HOME/.cargo/bin" }
|
||||
|
||||
|
||||
@@ -145,7 +145,7 @@ def request(
|
||||
print("Requesting %s" % dest, file=sys.stderr)
|
||||
|
||||
s = requests.Session()
|
||||
s.mount("matrix-federation://", MatrixConnectionAdapter(verify_tls=verify_tls))
|
||||
s.mount("matrix-federation://", MatrixConnectionAdapter())
|
||||
|
||||
headers: dict[str, str] = {
|
||||
"Authorization": authorization_headers[0],
|
||||
@@ -267,17 +267,6 @@ def read_args_from_config(args: argparse.Namespace) -> None:
|
||||
|
||||
|
||||
class MatrixConnectionAdapter(HTTPAdapter):
|
||||
"""
|
||||
A Matrix federation-aware HTTP Adapter.
|
||||
"""
|
||||
|
||||
verify_tls: bool
|
||||
"""whether to verify the remote server's TLS certificate."""
|
||||
|
||||
def __init__(self, verify_tls: bool = True) -> None:
|
||||
self.verify_tls = verify_tls
|
||||
super().__init__()
|
||||
|
||||
def send(
|
||||
self,
|
||||
request: PreparedRequest,
|
||||
@@ -291,7 +280,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
assert isinstance(request.url, str)
|
||||
parsed = urlparse.urlsplit(request.url)
|
||||
server_name = parsed.netloc
|
||||
well_known = self._get_well_known(parsed.netloc, verify_tls=self.verify_tls)
|
||||
well_known = self._get_well_known(parsed.netloc)
|
||||
|
||||
if well_known:
|
||||
server_name = well_known
|
||||
@@ -329,21 +318,6 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
print(
|
||||
f"Connecting to {host}:{port} with SNI {ssl_server_name}", file=sys.stderr
|
||||
)
|
||||
|
||||
if proxies:
|
||||
scheme = parsed.scheme
|
||||
if isinstance(scheme, bytes):
|
||||
scheme = scheme.decode("utf-8")
|
||||
|
||||
proxy_for_scheme = proxies.get(scheme)
|
||||
if proxy_for_scheme:
|
||||
return self.proxy_manager_for(proxy_for_scheme).connection_from_host(
|
||||
host,
|
||||
port=port,
|
||||
scheme="https",
|
||||
pool_kwargs={"server_hostname": ssl_server_name},
|
||||
)
|
||||
|
||||
return self.poolmanager.connection_from_host(
|
||||
host,
|
||||
port=port,
|
||||
@@ -394,7 +368,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
return server_name, 8448, server_name
|
||||
|
||||
@staticmethod
|
||||
def _get_well_known(server_name: str, verify_tls: bool = True) -> str | None:
|
||||
def _get_well_known(server_name: str) -> str | None:
|
||||
if ":" in server_name:
|
||||
# explicit port, or ipv6 literal. Either way, no .well-known
|
||||
return None
|
||||
@@ -405,7 +379,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
print(f"fetching {uri}", file=sys.stderr)
|
||||
|
||||
try:
|
||||
resp = requests.get(uri, verify=verify_tls)
|
||||
resp = requests.get(uri)
|
||||
if resp.status_code != 200:
|
||||
print("%s gave %i" % (uri, resp.status_code), file=sys.stderr)
|
||||
return None
|
||||
|
||||
@@ -54,9 +54,7 @@ def check_bind_error(
|
||||
"""
|
||||
if address == "0.0.0.0" and "::" in bind_addresses:
|
||||
logger.warning(
|
||||
"Failed to listen on 0.0.0.0, continuing because listening on [::]. Original exception: %s: %s",
|
||||
type(e).__name__,
|
||||
str(e),
|
||||
"Failed to listen on 0.0.0.0, continuing because listening on [::]"
|
||||
)
|
||||
else:
|
||||
raise e
|
||||
|
||||
@@ -36,7 +36,6 @@ from typing import (
|
||||
Awaitable,
|
||||
Callable,
|
||||
NoReturn,
|
||||
Optional,
|
||||
cast,
|
||||
)
|
||||
from wsgiref.simple_server import WSGIServer
|
||||
@@ -456,7 +455,7 @@ def listen_http(
|
||||
root_resource: Resource,
|
||||
version_string: str,
|
||||
max_request_body_size: int,
|
||||
context_factory: Optional[IOpenSSLContextFactory],
|
||||
context_factory: IOpenSSLContextFactory | None,
|
||||
reactor: ISynapseReactor = reactor,
|
||||
) -> list[Port]:
|
||||
"""
|
||||
|
||||
@@ -24,7 +24,7 @@ import logging
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Mapping, Optional, Sequence
|
||||
from typing import Mapping, Sequence
|
||||
|
||||
from twisted.internet import defer, task
|
||||
|
||||
@@ -291,7 +291,7 @@ def load_config(argv_options: list[str]) -> tuple[HomeServerConfig, argparse.Nam
|
||||
|
||||
def create_homeserver(
|
||||
config: HomeServerConfig,
|
||||
reactor: Optional[ISynapseReactor] = None,
|
||||
reactor: ISynapseReactor | None = None,
|
||||
) -> AdminCmdServer:
|
||||
"""
|
||||
Create a homeserver instance for the Synapse admin command process.
|
||||
|
||||
@@ -21,7 +21,6 @@
|
||||
#
|
||||
import logging
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
from twisted.web.resource import Resource
|
||||
|
||||
@@ -336,7 +335,7 @@ def load_config(argv_options: list[str]) -> HomeServerConfig:
|
||||
|
||||
def create_homeserver(
|
||||
config: HomeServerConfig,
|
||||
reactor: Optional[ISynapseReactor] = None,
|
||||
reactor: ISynapseReactor | None = None,
|
||||
) -> GenericWorkerServer:
|
||||
"""
|
||||
Create a homeserver instance for the Synapse worker process.
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from typing import Iterable, Optional
|
||||
from typing import Iterable
|
||||
|
||||
from twisted.internet.tcp import Port
|
||||
from twisted.web.resource import EncodingResourceWrapper, Resource
|
||||
@@ -350,7 +350,7 @@ def load_or_generate_config(argv_options: list[str]) -> HomeServerConfig:
|
||||
|
||||
def create_homeserver(
|
||||
config: HomeServerConfig,
|
||||
reactor: Optional[ISynapseReactor] = None,
|
||||
reactor: ISynapseReactor | None = None,
|
||||
) -> SynapseHomeServer:
|
||||
"""
|
||||
Create a homeserver instance for the Synapse main process.
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
#
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from twisted.internet.interfaces import IDelayedCall
|
||||
|
||||
@@ -74,7 +74,7 @@ class DelayedEventsHandler:
|
||||
cfg=self._config.ratelimiting.rc_delayed_event_mgmt,
|
||||
)
|
||||
|
||||
self._next_delayed_event_call: Optional[IDelayedCall] = None
|
||||
self._next_delayed_event_call: IDelayedCall | None = None
|
||||
|
||||
# The current position in the current_state_delta stream
|
||||
self._event_pos: int | None = None
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
import logging
|
||||
import random
|
||||
from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING, Any, Mapping, Optional, Sequence
|
||||
from typing import TYPE_CHECKING, Any, Mapping, Sequence
|
||||
|
||||
from canonicaljson import encode_canonical_json
|
||||
|
||||
@@ -111,7 +111,7 @@ class MessageHandler:
|
||||
|
||||
# The scheduled call to self._expire_event. None if no call is currently
|
||||
# scheduled.
|
||||
self._scheduled_expiry: Optional[IDelayedCall] = None
|
||||
self._scheduled_expiry: IDelayedCall | None = None
|
||||
|
||||
if not hs.config.worker.worker_app:
|
||||
self.hs.run_as_background_process(
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
|
||||
import logging
|
||||
from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from twisted.internet.interfaces import IDelayedCall
|
||||
|
||||
@@ -125,7 +125,7 @@ class UserDirectoryHandler(StateDeltasHandler):
|
||||
# Guard to ensure we only have one process for refreshing remote profiles
|
||||
self._is_refreshing_remote_profiles = False
|
||||
# Handle to cancel the `call_later` of `kick_off_remote_profile_refresh_process`
|
||||
self._refresh_remote_profiles_call_later: Optional[IDelayedCall] = None
|
||||
self._refresh_remote_profiles_call_later: IDelayedCall | None = None
|
||||
|
||||
# Guard to ensure we only have one process for refreshing remote profiles
|
||||
# for the given servers.
|
||||
|
||||
@@ -28,7 +28,6 @@ from typing import (
|
||||
BinaryIO,
|
||||
Callable,
|
||||
Mapping,
|
||||
Optional,
|
||||
Protocol,
|
||||
)
|
||||
|
||||
@@ -314,7 +313,7 @@ class BlocklistingAgentWrapper(Agent):
|
||||
method: bytes,
|
||||
uri: bytes,
|
||||
headers: Headers | None = None,
|
||||
bodyProducer: Optional[IBodyProducer] = None,
|
||||
bodyProducer: IBodyProducer | None = None,
|
||||
) -> defer.Deferred:
|
||||
h = urllib.parse.urlparse(uri.decode("ascii"))
|
||||
|
||||
@@ -1034,7 +1033,7 @@ class BodyExceededMaxSize(Exception):
|
||||
class _DiscardBodyWithMaxSizeProtocol(protocol.Protocol):
|
||||
"""A protocol which immediately errors upon receiving data."""
|
||||
|
||||
transport: Optional[ITCPTransport] = None
|
||||
transport: ITCPTransport | None = None
|
||||
|
||||
def __init__(self, deferred: defer.Deferred):
|
||||
self.deferred = deferred
|
||||
@@ -1076,7 +1075,7 @@ class _MultipartParserProtocol(protocol.Protocol):
|
||||
Protocol to read and parse a MSC3916 multipart/mixed response
|
||||
"""
|
||||
|
||||
transport: Optional[ITCPTransport] = None
|
||||
transport: ITCPTransport | None = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -1189,7 +1188,7 @@ class _MultipartParserProtocol(protocol.Protocol):
|
||||
class _ReadBodyWithMaxSizeProtocol(protocol.Protocol):
|
||||
"""A protocol which reads body to a stream, erroring if the body exceeds a maximum size."""
|
||||
|
||||
transport: Optional[ITCPTransport] = None
|
||||
transport: ITCPTransport | None = None
|
||||
|
||||
def __init__(
|
||||
self, stream: ByteWriteable, deferred: defer.Deferred, max_size: int | None
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
#
|
||||
import logging
|
||||
import urllib.parse
|
||||
from typing import Any, Generator, Optional
|
||||
from typing import Any, Generator
|
||||
from urllib.request import ( # type: ignore[attr-defined]
|
||||
proxy_bypass_environment,
|
||||
)
|
||||
@@ -173,7 +173,7 @@ class MatrixFederationAgent:
|
||||
method: bytes,
|
||||
uri: bytes,
|
||||
headers: Headers | None = None,
|
||||
bodyProducer: Optional[IBodyProducer] = None,
|
||||
bodyProducer: IBodyProducer | None = None,
|
||||
) -> Generator[defer.Deferred, Any, IResponse]:
|
||||
"""
|
||||
Args:
|
||||
|
||||
@@ -33,7 +33,6 @@ from typing import (
|
||||
Callable,
|
||||
Generic,
|
||||
Literal,
|
||||
Optional,
|
||||
TextIO,
|
||||
TypeVar,
|
||||
cast,
|
||||
@@ -692,7 +691,7 @@ class MatrixFederationHttpClient:
|
||||
destination_bytes, method_bytes, url_to_sign_bytes, json
|
||||
)
|
||||
data = encode_canonical_json(json)
|
||||
producer: Optional[IBodyProducer] = QuieterFileBodyProducer(
|
||||
producer: IBodyProducer | None = QuieterFileBodyProducer(
|
||||
BytesIO(data), cooperator=self._cooperator
|
||||
)
|
||||
else:
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
import json
|
||||
import logging
|
||||
import urllib.parse
|
||||
from typing import TYPE_CHECKING, Any, Optional, cast
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from twisted.internet import protocol
|
||||
from twisted.internet.interfaces import ITCPTransport
|
||||
@@ -237,7 +237,7 @@ class _ProxyResponseBody(protocol.Protocol):
|
||||
request.
|
||||
"""
|
||||
|
||||
transport: Optional[ITCPTransport] = None
|
||||
transport: ITCPTransport | None = None
|
||||
|
||||
def __init__(self, request: "SynapseRequest") -> None:
|
||||
self._request = request
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
import logging
|
||||
import random
|
||||
import re
|
||||
from typing import Any, Collection, Optional, Sequence, cast
|
||||
from typing import Any, Collection, Sequence, cast
|
||||
from urllib.parse import urlparse
|
||||
from urllib.request import ( # type: ignore[attr-defined]
|
||||
proxy_bypass_environment,
|
||||
@@ -119,8 +119,8 @@ class ProxyAgent(_AgentBase):
|
||||
self,
|
||||
*,
|
||||
reactor: IReactorCore,
|
||||
proxy_reactor: Optional[IReactorCore] = None,
|
||||
contextFactory: Optional[IPolicyForHTTPS] = None,
|
||||
proxy_reactor: IReactorCore | None = None,
|
||||
contextFactory: IPolicyForHTTPS | None = None,
|
||||
connectTimeout: float | None = None,
|
||||
bindAddress: bytes | None = None,
|
||||
pool: HTTPConnectionPool | None = None,
|
||||
@@ -175,7 +175,7 @@ class ProxyAgent(_AgentBase):
|
||||
self._policy_for_https = contextFactory
|
||||
self._reactor = cast(IReactorTime, reactor)
|
||||
|
||||
self._federation_proxy_endpoint: Optional[IStreamClientEndpoint] = None
|
||||
self._federation_proxy_endpoint: IStreamClientEndpoint | None = None
|
||||
self._federation_proxy_credentials: ProxyCredentials | None = None
|
||||
if federation_proxy_locations:
|
||||
assert federation_proxy_credentials is not None, (
|
||||
@@ -221,7 +221,7 @@ class ProxyAgent(_AgentBase):
|
||||
method: bytes,
|
||||
uri: bytes,
|
||||
headers: Headers | None = None,
|
||||
bodyProducer: Optional[IBodyProducer] = None,
|
||||
bodyProducer: IBodyProducer | None = None,
|
||||
) -> "defer.Deferred[IResponse]":
|
||||
"""
|
||||
Issue a request to the server indicated by the given uri.
|
||||
@@ -365,11 +365,11 @@ class ProxyAgent(_AgentBase):
|
||||
def http_proxy_endpoint(
|
||||
proxy: bytes | None,
|
||||
reactor: IReactorCore,
|
||||
tls_options_factory: Optional[IPolicyForHTTPS],
|
||||
tls_options_factory: IPolicyForHTTPS | None,
|
||||
timeout: float = 30,
|
||||
bindAddress: bytes | str | tuple[bytes | str, int] | None = None,
|
||||
attemptDelay: float | None = None,
|
||||
) -> tuple[Optional[IStreamClientEndpoint], ProxyCredentials | None]:
|
||||
) -> tuple[IStreamClientEndpoint | None, ProxyCredentials | None]:
|
||||
"""Parses an http proxy setting and returns an endpoint for the proxy
|
||||
|
||||
Args:
|
||||
|
||||
@@ -20,7 +20,6 @@
|
||||
#
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from zope.interface import implementer
|
||||
|
||||
@@ -150,7 +149,7 @@ class ReplicationAgent(_AgentBase):
|
||||
method: bytes,
|
||||
uri: bytes,
|
||||
headers: Headers | None = None,
|
||||
bodyProducer: Optional[IBodyProducer] = None,
|
||||
bodyProducer: IBodyProducer | None = None,
|
||||
) -> "defer.Deferred[IResponse]":
|
||||
"""
|
||||
Issue a request to the server indicated by the given uri.
|
||||
|
||||
@@ -25,7 +25,7 @@ import traceback
|
||||
from collections import deque
|
||||
from ipaddress import IPv4Address, IPv6Address, ip_address
|
||||
from math import floor
|
||||
from typing import Callable, Optional
|
||||
from typing import Callable
|
||||
|
||||
import attr
|
||||
from zope.interface import implementer
|
||||
@@ -113,7 +113,7 @@ class RemoteHandler(logging.Handler):
|
||||
port: int,
|
||||
maximum_buffer: int = 1000,
|
||||
level: int = logging.NOTSET,
|
||||
_reactor: Optional[IReactorTime] = None,
|
||||
_reactor: IReactorTime | None = None,
|
||||
):
|
||||
super().__init__(level=level)
|
||||
self.host = host
|
||||
|
||||
@@ -89,7 +89,7 @@ class TerseJsonFormatter(JsonFormatter):
|
||||
"log": record.getMessage(),
|
||||
"namespace": record.name,
|
||||
"level": record.levelname,
|
||||
"time": record.created,
|
||||
"time": round(record.created, 2),
|
||||
}
|
||||
|
||||
return self._format(record, event)
|
||||
|
||||
@@ -3,7 +3,7 @@ import time
|
||||
from logging import Handler, LogRecord
|
||||
from logging.handlers import MemoryHandler
|
||||
from threading import Thread
|
||||
from typing import Optional, cast
|
||||
from typing import cast
|
||||
|
||||
from twisted.internet.interfaces import IReactorCore
|
||||
|
||||
@@ -26,7 +26,7 @@ class PeriodicallyFlushingMemoryHandler(MemoryHandler):
|
||||
target: Handler | None = None,
|
||||
flushOnClose: bool = True,
|
||||
period: float = 5.0,
|
||||
reactor: Optional[IReactorCore] = None,
|
||||
reactor: IReactorCore | None = None,
|
||||
) -> None:
|
||||
"""
|
||||
period: the period between automatic flushes
|
||||
|
||||
@@ -30,7 +30,6 @@ from typing import (
|
||||
Awaitable,
|
||||
BinaryIO,
|
||||
Generator,
|
||||
Optional,
|
||||
)
|
||||
|
||||
import attr
|
||||
@@ -706,7 +705,7 @@ class ThreadedFileSender:
|
||||
|
||||
self.file: BinaryIO | None = None
|
||||
self.deferred: "Deferred[None]" = Deferred()
|
||||
self.consumer: Optional[IConsumer] = None
|
||||
self.consumer: interfaces.IConsumer | None = None
|
||||
|
||||
# Signals if the thread should keep reading/sending data. Set means
|
||||
# continue, clear means pause.
|
||||
|
||||
@@ -439,11 +439,7 @@ class MediaRepository:
|
||||
return await self.store.get_cached_remote_media(origin, media_id)
|
||||
|
||||
async def get_local_media_info(
|
||||
self,
|
||||
request: SynapseRequest,
|
||||
media_id: str,
|
||||
max_timeout_ms: int,
|
||||
bypass_quarantine: bool = False,
|
||||
self, request: SynapseRequest, media_id: str, max_timeout_ms: int
|
||||
) -> LocalMedia | None:
|
||||
"""Gets the info dictionary for given local media ID. If the media has
|
||||
not been uploaded yet, this function will wait up to ``max_timeout_ms``
|
||||
@@ -455,7 +451,6 @@ class MediaRepository:
|
||||
the file_id for local content.)
|
||||
max_timeout_ms: the maximum number of milliseconds to wait for the
|
||||
media to be uploaded.
|
||||
bypass_quarantine: whether to bypass quarantine checks
|
||||
|
||||
Returns:
|
||||
Either the info dictionary for the given local media ID or
|
||||
@@ -471,7 +466,7 @@ class MediaRepository:
|
||||
respond_404(request)
|
||||
return None
|
||||
|
||||
if media_info.quarantined_by and not bypass_quarantine:
|
||||
if media_info.quarantined_by:
|
||||
logger.info("Media %s is quarantined", media_id)
|
||||
respond_404(request)
|
||||
return None
|
||||
@@ -505,7 +500,6 @@ class MediaRepository:
|
||||
max_timeout_ms: int,
|
||||
allow_authenticated: bool = True,
|
||||
federation: bool = False,
|
||||
bypass_quarantine: bool = False,
|
||||
) -> None:
|
||||
"""Responds to requests for local media, if exists, or returns 404.
|
||||
|
||||
@@ -519,14 +513,11 @@ class MediaRepository:
|
||||
media to be uploaded.
|
||||
allow_authenticated: whether media marked as authenticated may be served to this request
|
||||
federation: whether the local media being fetched is for a federation request
|
||||
bypass_quarantine: whether to bypass quarantine checks
|
||||
|
||||
Returns:
|
||||
Resolves once a response has successfully been written to request
|
||||
"""
|
||||
media_info = await self.get_local_media_info(
|
||||
request, media_id, max_timeout_ms, bypass_quarantine=bypass_quarantine
|
||||
)
|
||||
media_info = await self.get_local_media_info(request, media_id, max_timeout_ms)
|
||||
if not media_info:
|
||||
return
|
||||
|
||||
@@ -570,7 +561,6 @@ class MediaRepository:
|
||||
ip_address: str,
|
||||
use_federation_endpoint: bool,
|
||||
allow_authenticated: bool = True,
|
||||
bypass_quarantine: bool = False,
|
||||
) -> None:
|
||||
"""Respond to requests for remote media.
|
||||
|
||||
@@ -587,7 +577,6 @@ class MediaRepository:
|
||||
federation `/download` endpoint
|
||||
allow_authenticated: whether media marked as authenticated may be served to this
|
||||
request
|
||||
bypass_quarantine: whether to bypass quarantine checks
|
||||
|
||||
Returns:
|
||||
Resolves once a response has successfully been written to request
|
||||
@@ -620,7 +609,6 @@ class MediaRepository:
|
||||
ip_address,
|
||||
use_federation_endpoint,
|
||||
allow_authenticated,
|
||||
bypass_quarantine=bypass_quarantine,
|
||||
)
|
||||
|
||||
# Check if the media is cached on the client, if so return 304. We need
|
||||
@@ -709,7 +697,6 @@ class MediaRepository:
|
||||
ip_address: str,
|
||||
use_federation_endpoint: bool,
|
||||
allow_authenticated: bool,
|
||||
bypass_quarantine: bool = False,
|
||||
) -> tuple[Responder | None, RemoteMedia]:
|
||||
"""Looks for media in local cache, if not there then attempt to
|
||||
download from remote server.
|
||||
@@ -725,7 +712,6 @@ class MediaRepository:
|
||||
ip_address: the IP address of the requester
|
||||
use_federation_endpoint: whether to request the remote media over the new federation
|
||||
/download endpoint
|
||||
bypass_quarantine: whether to bypass quarantine checks
|
||||
|
||||
Returns:
|
||||
A tuple of responder and the media info of the file.
|
||||
@@ -746,7 +732,7 @@ class MediaRepository:
|
||||
file_id = media_info.filesystem_id
|
||||
file_info = FileInfo(server_name, file_id)
|
||||
|
||||
if media_info.quarantined_by and not bypass_quarantine:
|
||||
if media_info.quarantined_by:
|
||||
logger.info("Media is quarantined")
|
||||
raise NotFoundError()
|
||||
|
||||
@@ -928,7 +914,6 @@ class MediaRepository:
|
||||
filesystem_id=file_id,
|
||||
last_access_ts=time_now_ms,
|
||||
quarantined_by=None,
|
||||
quarantined_ts=None,
|
||||
authenticated=authenticated,
|
||||
sha256=sha256writer.hexdigest(),
|
||||
)
|
||||
@@ -1062,7 +1047,6 @@ class MediaRepository:
|
||||
filesystem_id=file_id,
|
||||
last_access_ts=time_now_ms,
|
||||
quarantined_by=None,
|
||||
quarantined_ts=None,
|
||||
authenticated=authenticated,
|
||||
sha256=sha256writer.hexdigest(),
|
||||
)
|
||||
|
||||
@@ -331,16 +331,10 @@ class UrlPreviewer:
|
||||
# response failed or is incomplete.
|
||||
og_from_html = parse_html_to_open_graph(tree)
|
||||
|
||||
# Compile an Open Graph response by combining the oEmbed response
|
||||
# and the information from the HTML, with information in the HTML
|
||||
# preferred.
|
||||
#
|
||||
# The ordering here is intentional: certain websites (especially
|
||||
# SPA JavaScript-based ones) including Mastodon and YouTube provide
|
||||
# almost complete OpenGraph descriptions but only stubs for oEmbed,
|
||||
# with further oEmbed information being populated with JavaScript,
|
||||
# that Synapse won't execute.
|
||||
og = og_from_oembed | og_from_html
|
||||
# Compile the Open Graph response by using the scraped
|
||||
# information from the HTML and overlaying any information
|
||||
# from the oEmbed response.
|
||||
og = {**og_from_html, **og_from_oembed}
|
||||
|
||||
await self._precache_image_url(user, media_info, og)
|
||||
else:
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
#
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from twisted.internet.error import AlreadyCalled, AlreadyCancelled
|
||||
from twisted.internet.interfaces import IDelayedCall
|
||||
@@ -71,7 +71,7 @@ class EmailPusher(Pusher):
|
||||
self.server_name = hs.hostname
|
||||
self.store = self.hs.get_datastores().main
|
||||
self.email = pusher_config.pushkey
|
||||
self.timed_call: Optional[IDelayedCall] = None
|
||||
self.timed_call: IDelayedCall | None = None
|
||||
self.throttle_params: dict[str, ThrottleParams] = {}
|
||||
self._inited = False
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
import logging
|
||||
import random
|
||||
import urllib.parse
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from prometheus_client import Counter
|
||||
|
||||
@@ -120,7 +120,7 @@ class HttpPusher(Pusher):
|
||||
self.data = pusher_config.data
|
||||
self.backoff_delay = HttpPusher.INITIAL_BACKOFF_SEC
|
||||
self.failing_since = pusher_config.failing_since
|
||||
self.timed_call: Optional[IDelayedCall] = None
|
||||
self.timed_call: IDelayedCall | None = None
|
||||
self._is_processing = False
|
||||
self._group_unread_count_by_room = (
|
||||
hs.config.push.push_group_unread_count_by_room
|
||||
|
||||
@@ -114,12 +114,10 @@ from synapse.rest.admin.users import (
|
||||
UserByThreePid,
|
||||
UserInvitesCount,
|
||||
UserJoinedRoomCount,
|
||||
UserJoinedRoomsRestServlet,
|
||||
UserMembershipsRestServlet,
|
||||
UserMembershipRestServlet,
|
||||
UserRegisterServlet,
|
||||
UserReplaceMasterCrossSigningKeyRestServlet,
|
||||
UserRestServletV2,
|
||||
UserRestServletV2Get,
|
||||
UsersRestServletV2,
|
||||
UsersRestServletV3,
|
||||
UserTokenRestServlet,
|
||||
@@ -282,8 +280,6 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
# matrix_authentication_service integration uses the dedicated MAS API.
|
||||
if hs.config.experimental.msc3861.enabled:
|
||||
register_servlets_for_msc3861_delegation(hs, http_server)
|
||||
else:
|
||||
UserRestServletV2Get(hs).register(http_server)
|
||||
|
||||
return
|
||||
|
||||
@@ -301,8 +297,7 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
VersionServlet(hs).register(http_server)
|
||||
if not auth_delegated:
|
||||
UserAdminServlet(hs).register(http_server)
|
||||
UserJoinedRoomsRestServlet(hs).register(http_server)
|
||||
UserMembershipsRestServlet(hs).register(http_server)
|
||||
UserMembershipRestServlet(hs).register(http_server)
|
||||
if not auth_delegated:
|
||||
UserTokenRestServlet(hs).register(http_server)
|
||||
UserRestServletV2(hs).register(http_server)
|
||||
|
||||
@@ -293,38 +293,6 @@ class ListMediaInRoom(RestServlet):
|
||||
return HTTPStatus.OK, {"local": local_mxcs, "remote": remote_mxcs}
|
||||
|
||||
|
||||
class ListQuarantinedMedia(RestServlet):
|
||||
"""Lists all quarantined media on the server."""
|
||||
|
||||
PATTERNS = admin_patterns("/media/quarantined$")
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.store = hs.get_datastores().main
|
||||
self.auth = hs.get_auth()
|
||||
|
||||
async def on_GET(
|
||||
self,
|
||||
request: SynapseRequest,
|
||||
) -> tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
|
||||
start = parse_integer(request, "from", default=0)
|
||||
limit = parse_integer(request, "limit", default=100)
|
||||
local_or_remote = parse_string(request, "kind", required=True)
|
||||
|
||||
if local_or_remote not in ["local", "remote"]:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter `kind` must be either 'local' or 'remote'.",
|
||||
)
|
||||
|
||||
mxcs = await self.store.get_quarantined_media_mxcs(
|
||||
start, limit, local_or_remote == "local"
|
||||
)
|
||||
|
||||
return HTTPStatus.OK, {"media": mxcs}
|
||||
|
||||
|
||||
class PurgeMediaCacheRestServlet(RestServlet):
|
||||
PATTERNS = admin_patterns("/purge_media_cache$")
|
||||
|
||||
@@ -564,7 +532,6 @@ def register_servlets_for_media_repo(hs: "HomeServer", http_server: HttpServer)
|
||||
ProtectMediaByID(hs).register(http_server)
|
||||
UnprotectMediaByID(hs).register(http_server)
|
||||
ListMediaInRoom(hs).register(http_server)
|
||||
ListQuarantinedMedia(hs).register(http_server)
|
||||
# XXX DeleteMediaByDateSize must be registered before DeleteMediaByID as
|
||||
# their URL routes overlap.
|
||||
DeleteMediaByDateSize(hs).register(http_server)
|
||||
|
||||
@@ -210,7 +210,7 @@ class UsersRestServletV3(UsersRestServletV2):
|
||||
return parse_boolean(request, "deactivated")
|
||||
|
||||
|
||||
class UserRestServletV2Get(RestServlet):
|
||||
class UserRestServletV2(RestServlet):
|
||||
PATTERNS = admin_patterns("/users/(?P<user_id>[^/]*)$", "v2")
|
||||
|
||||
"""Get request to list user details.
|
||||
@@ -220,6 +220,22 @@ class UserRestServletV2Get(RestServlet):
|
||||
|
||||
returns:
|
||||
200 OK with user details if success otherwise an error.
|
||||
|
||||
Put request to allow an administrator to add or modify a user.
|
||||
This needs user to have administrator access in Synapse.
|
||||
We use PUT instead of POST since we already know the id of the user
|
||||
object to create. POST could be used to create guests.
|
||||
|
||||
PUT /_synapse/admin/v2/users/<user_id>
|
||||
{
|
||||
"password": "secret",
|
||||
"displayname": "User"
|
||||
}
|
||||
|
||||
returns:
|
||||
201 OK with new user object if user was created or
|
||||
200 OK with modified user object if user was modified
|
||||
otherwise an error.
|
||||
"""
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
@@ -251,28 +267,6 @@ class UserRestServletV2Get(RestServlet):
|
||||
|
||||
return HTTPStatus.OK, user_info_dict
|
||||
|
||||
|
||||
class UserRestServletV2(UserRestServletV2Get):
|
||||
"""
|
||||
Put request to allow an administrator to add or modify a user.
|
||||
This needs user to have administrator access in Synapse.
|
||||
We use PUT instead of POST since we already know the id of the user
|
||||
object to create. POST could be used to create guests.
|
||||
|
||||
Note: This inherits from `UserRestServletV2Get`, so also supports the `GET` route.
|
||||
|
||||
PUT /_synapse/admin/v2/users/<user_id>
|
||||
{
|
||||
"password": "secret",
|
||||
"displayname": "User"
|
||||
}
|
||||
|
||||
returns:
|
||||
201 OK with new user object if user was created or
|
||||
200 OK with modified user object if user was modified
|
||||
otherwise an error.
|
||||
"""
|
||||
|
||||
async def on_PUT(
|
||||
self, request: SynapseRequest, user_id: str
|
||||
) -> tuple[int, JsonMapping]:
|
||||
@@ -1037,7 +1031,7 @@ class UserAdminServlet(RestServlet):
|
||||
return HTTPStatus.OK, {}
|
||||
|
||||
|
||||
class UserJoinedRoomsRestServlet(RestServlet):
|
||||
class UserMembershipRestServlet(RestServlet):
|
||||
"""
|
||||
Get list of joined room ID's for a user.
|
||||
"""
|
||||
@@ -1060,28 +1054,6 @@ class UserJoinedRoomsRestServlet(RestServlet):
|
||||
return HTTPStatus.OK, rooms_response
|
||||
|
||||
|
||||
class UserMembershipsRestServlet(RestServlet):
|
||||
"""
|
||||
Get list of room memberships for a user.
|
||||
"""
|
||||
|
||||
PATTERNS = admin_patterns("/users/(?P<user_id>[^/]*)/memberships$")
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.is_mine = hs.is_mine
|
||||
self.auth = hs.get_auth()
|
||||
self.store = hs.get_datastores().main
|
||||
|
||||
async def on_GET(
|
||||
self, request: SynapseRequest, user_id: str
|
||||
) -> tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
|
||||
memberships = await self.store.get_memberships_for_user(user_id)
|
||||
|
||||
return HTTPStatus.OK, {"memberships": memberships}
|
||||
|
||||
|
||||
class PushersRestServlet(RestServlet):
|
||||
"""
|
||||
Gets information about all pushers for a specific `user_id`.
|
||||
|
||||
@@ -23,7 +23,6 @@
|
||||
import logging
|
||||
import re
|
||||
|
||||
from synapse.api.errors import Codes, cs_error
|
||||
from synapse.http.server import (
|
||||
HttpServer,
|
||||
respond_with_json,
|
||||
@@ -236,23 +235,7 @@ class DownloadResource(RestServlet):
|
||||
# Validate the server name, raising if invalid
|
||||
parse_and_validate_server_name(server_name)
|
||||
|
||||
requester = await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
is_admin = await self.auth.is_server_admin(requester)
|
||||
bypass_quarantine = False
|
||||
if parse_string(request, "admin_unsafely_bypass_quarantine") == "true":
|
||||
if is_admin:
|
||||
logger.info("Admin bypassing quarantine for media download")
|
||||
bypass_quarantine = True
|
||||
else:
|
||||
respond_with_json(
|
||||
request,
|
||||
400,
|
||||
cs_error(
|
||||
"Must be a server admin to bypass quarantine",
|
||||
code=Codes.UNKNOWN,
|
||||
),
|
||||
send_cors=True,
|
||||
)
|
||||
await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
|
||||
set_cors_headers(request)
|
||||
set_corp_headers(request)
|
||||
@@ -276,11 +259,7 @@ class DownloadResource(RestServlet):
|
||||
|
||||
if self._is_mine_server_name(server_name):
|
||||
await self.media_repo.get_local_media(
|
||||
request,
|
||||
media_id,
|
||||
file_name,
|
||||
max_timeout_ms,
|
||||
bypass_quarantine=bypass_quarantine,
|
||||
request, media_id, file_name, max_timeout_ms
|
||||
)
|
||||
else:
|
||||
ip_address = request.getClientAddress().host
|
||||
@@ -292,7 +271,6 @@ class DownloadResource(RestServlet):
|
||||
max_timeout_ms,
|
||||
ip_address,
|
||||
True,
|
||||
bypass_quarantine=bypass_quarantine,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -19,12 +19,9 @@
|
||||
#
|
||||
#
|
||||
import logging
|
||||
from bisect import bisect
|
||||
from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from unpaddedbase64 import decode_base64, encode_base64
|
||||
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet, parse_strings_from_args
|
||||
@@ -38,34 +35,10 @@ if TYPE_CHECKING:
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
MUTUAL_ROOMS_BATCH_LIMIT = 100
|
||||
|
||||
|
||||
def _parse_mutual_rooms_batch_token_args(args: dict[bytes, list[bytes]]) -> str | None:
|
||||
from_batches = parse_strings_from_args(args, "from")
|
||||
if not from_batches:
|
||||
return None
|
||||
if len(from_batches) > 1:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Duplicate from query parameter",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
if from_batches[0]:
|
||||
try:
|
||||
return decode_base64(from_batches[0]).decode("utf-8")
|
||||
except Exception:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Malformed from token",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
class UserMutualRoomsServlet(RestServlet):
|
||||
"""
|
||||
GET /uk.half-shot.msc2666/user/mutual_rooms?user_id={user_id}&from={token} HTTP/1.1
|
||||
GET /uk.half-shot.msc2666/user/mutual_rooms?user_id={user_id} HTTP/1.1
|
||||
"""
|
||||
|
||||
PATTERNS = client_patterns(
|
||||
@@ -83,7 +56,6 @@ class UserMutualRoomsServlet(RestServlet):
|
||||
args: dict[bytes, list[bytes]] = request.args # type: ignore
|
||||
|
||||
user_ids = parse_strings_from_args(args, "user_id", required=True)
|
||||
from_batch = _parse_mutual_rooms_batch_token_args(args)
|
||||
|
||||
if len(user_ids) > 1:
|
||||
raise SynapseError(
|
||||
@@ -92,52 +64,29 @@ class UserMutualRoomsServlet(RestServlet):
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
# We don't do batching, so a batch token is illegal by default
|
||||
if b"batch_token" in args:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Unknown batch_token",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
user_id = user_ids[0]
|
||||
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
if user_id == requester.user.to_string():
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
HTTPStatus.UNPROCESSABLE_ENTITY,
|
||||
"You cannot request a list of shared rooms with yourself",
|
||||
errcode=Codes.UNKNOWN,
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
# Sort here instead of the database function, so that we don't expose
|
||||
# clients to any unrelated changes to the sorting algorithm.
|
||||
rooms = sorted(
|
||||
await self.store.get_mutual_rooms_between_users(
|
||||
frozenset((requester.user.to_string(), user_id))
|
||||
)
|
||||
rooms = await self.store.get_mutual_rooms_between_users(
|
||||
frozenset((requester.user.to_string(), user_id))
|
||||
)
|
||||
|
||||
if from_batch:
|
||||
# A from_batch token was provided, so cut off any rooms where the ID is
|
||||
# lower than or equal to the token. This method doesn't care whether the
|
||||
# provided token room still exists, nor whether it's even a real room ID.
|
||||
#
|
||||
# However, if rooms with a lower ID are added after the token was issued,
|
||||
# they will not be included until the client makes a new request without a
|
||||
# from token. This is considered acceptable, as clients generally won't
|
||||
# persist these results for long periods.
|
||||
rooms = rooms[bisect(rooms, from_batch) :]
|
||||
|
||||
if len(rooms) <= MUTUAL_ROOMS_BATCH_LIMIT:
|
||||
# We've reached the end of the list, don't return a batch token
|
||||
return 200, {"joined": rooms}
|
||||
|
||||
rooms = rooms[:MUTUAL_ROOMS_BATCH_LIMIT]
|
||||
# We use urlsafe unpadded base64 encoding for the batch token in order to
|
||||
# handle funny room IDs in old pre-v12 rooms properly. We also truncate it
|
||||
# to stay within the 255-character limit of opaque tokens.
|
||||
next_batch = encode_base64(rooms[-1].encode("utf-8"), urlsafe=True)[:255]
|
||||
# Due to the truncation, it is technically possible to have conflicting next
|
||||
# batches by creating hundreds of rooms with the same 191 character prefix
|
||||
# in the room ID. In the event that some silly user does that, don't let
|
||||
# them paginate further.
|
||||
if next_batch == from_batch:
|
||||
return 200, {"joined": rooms}
|
||||
|
||||
return 200, {"joined": list(rooms), "next_batch": next_batch}
|
||||
return 200, {"joined": list(rooms)}
|
||||
|
||||
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
|
||||
@@ -34,7 +34,6 @@ from typing import (
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Optional,
|
||||
TypeVar,
|
||||
cast,
|
||||
)
|
||||
@@ -321,7 +320,7 @@ class HomeServer(metaclass=abc.ABCMeta):
|
||||
self,
|
||||
hostname: str,
|
||||
config: HomeServerConfig,
|
||||
reactor: Optional[ISynapseReactor] = None,
|
||||
reactor: ISynapseReactor | None = None,
|
||||
):
|
||||
"""
|
||||
Args:
|
||||
@@ -354,7 +353,7 @@ class HomeServer(metaclass=abc.ABCMeta):
|
||||
self._module_web_resources_consumed = False
|
||||
|
||||
# This attribute is set by the free function `refresh_certificate`.
|
||||
self.tls_server_context_factory: Optional[IOpenSSLContextFactory] = None
|
||||
self.tls_server_context_factory: IOpenSSLContextFactory | None = None
|
||||
|
||||
self._is_shutdown = False
|
||||
self._async_shutdown_handlers: list[ShutdownInfo] = []
|
||||
|
||||
@@ -61,7 +61,6 @@ class LocalMedia:
|
||||
url_cache: str | None
|
||||
last_access_ts: int
|
||||
quarantined_by: str | None
|
||||
quarantined_ts: int | None
|
||||
safe_from_quarantine: bool
|
||||
user_id: str | None
|
||||
authenticated: bool | None
|
||||
@@ -79,7 +78,6 @@ class RemoteMedia:
|
||||
created_ts: int
|
||||
last_access_ts: int
|
||||
quarantined_by: str | None
|
||||
quarantined_ts: int | None
|
||||
authenticated: bool | None
|
||||
sha256: str | None
|
||||
|
||||
@@ -245,7 +243,6 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
|
||||
"user_id",
|
||||
"authenticated",
|
||||
"sha256",
|
||||
"quarantined_ts",
|
||||
),
|
||||
allow_none=True,
|
||||
desc="get_local_media",
|
||||
@@ -265,7 +262,6 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
|
||||
user_id=row[8],
|
||||
authenticated=row[9],
|
||||
sha256=row[10],
|
||||
quarantined_ts=row[11],
|
||||
)
|
||||
|
||||
async def get_local_media_by_user_paginate(
|
||||
@@ -323,8 +319,7 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
|
||||
safe_from_quarantine,
|
||||
user_id,
|
||||
authenticated,
|
||||
sha256,
|
||||
quarantined_ts
|
||||
sha256
|
||||
FROM local_media_repository
|
||||
WHERE user_id = ?
|
||||
ORDER BY {order_by_column} {order}, media_id ASC
|
||||
@@ -350,7 +345,6 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
|
||||
user_id=row[9],
|
||||
authenticated=row[10],
|
||||
sha256=row[11],
|
||||
quarantined_ts=row[12],
|
||||
)
|
||||
for row in txn
|
||||
]
|
||||
@@ -701,7 +695,6 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
|
||||
"quarantined_by",
|
||||
"authenticated",
|
||||
"sha256",
|
||||
"quarantined_ts",
|
||||
),
|
||||
allow_none=True,
|
||||
desc="get_cached_remote_media",
|
||||
@@ -720,7 +713,6 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
|
||||
quarantined_by=row[6],
|
||||
authenticated=row[7],
|
||||
sha256=row[8],
|
||||
quarantined_ts=row[9],
|
||||
)
|
||||
|
||||
async def store_cached_remote_media(
|
||||
|
||||
@@ -945,50 +945,6 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
|
||||
max_lifetime=max_lifetime,
|
||||
)
|
||||
|
||||
async def get_quarantined_media_mxcs(
|
||||
self, index_start: int, index_limit: int, local: bool
|
||||
) -> list[str]:
|
||||
"""Retrieves all the quarantined media MXC URIs starting from the given position,
|
||||
ordered from oldest quarantined timestamp, then alphabetically by media ID
|
||||
(including origin).
|
||||
|
||||
Note that on established servers the "quarantined timestamp" may be zero due to
|
||||
being introduced after the quarantine timestamp field was introduced.
|
||||
|
||||
Args:
|
||||
index_start: The position to start from.
|
||||
index_limit: The maximum number of results to return.
|
||||
local: When true, only local media will be returned. When false, only remote media will be returned.
|
||||
|
||||
Returns:
|
||||
The quarantined media as a list of media IDs.
|
||||
"""
|
||||
|
||||
def _get_quarantined_media_mxcs_txn(
|
||||
txn: LoggingTransaction,
|
||||
) -> list[str]:
|
||||
# We order by quarantined timestamp *and* media ID (including origin, when
|
||||
# known) to ensure the ordering is stable for established servers.
|
||||
if local:
|
||||
sql = "SELECT '' as media_origin, media_id FROM local_media_repository WHERE quarantined_by IS NOT NULL ORDER BY quarantined_ts, media_id ASC LIMIT ? OFFSET ?"
|
||||
else:
|
||||
sql = "SELECT media_origin, media_id FROM remote_media_cache WHERE quarantined_by IS NOT NULL ORDER BY quarantined_ts, media_origin, media_id ASC LIMIT ? OFFSET ?"
|
||||
txn.execute(sql, (index_limit, index_start))
|
||||
|
||||
mxcs = []
|
||||
|
||||
for media_origin, media_id in txn:
|
||||
if local:
|
||||
media_origin = self.hs.hostname
|
||||
mxcs.append(f"mxc://{media_origin}/{media_id}")
|
||||
|
||||
return mxcs
|
||||
|
||||
return await self.db_pool.runInteraction(
|
||||
"get_quarantined_media_mxcs",
|
||||
_get_quarantined_media_mxcs_txn,
|
||||
)
|
||||
|
||||
async def get_media_mxcs_in_room(self, room_id: str) -> tuple[list[str], list[str]]:
|
||||
"""Retrieves all the local and remote media MXC URIs in a given room
|
||||
|
||||
@@ -996,7 +952,7 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
|
||||
room_id
|
||||
|
||||
Returns:
|
||||
The local and remote media as lists of the media IDs.
|
||||
The local and remote media as a lists of the media IDs.
|
||||
"""
|
||||
|
||||
def _get_media_mxcs_in_room_txn(
|
||||
@@ -1191,10 +1147,6 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
|
||||
The total number of media items quarantined
|
||||
"""
|
||||
total_media_quarantined = 0
|
||||
now_ts: int | None = self.clock.time_msec()
|
||||
|
||||
if quarantined_by is None:
|
||||
now_ts = None
|
||||
|
||||
# Effectively a legacy path, update any media that was explicitly named.
|
||||
if media_ids:
|
||||
@@ -1203,13 +1155,13 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
|
||||
)
|
||||
sql = f"""
|
||||
UPDATE local_media_repository
|
||||
SET quarantined_by = ?, quarantined_ts = ?
|
||||
SET quarantined_by = ?
|
||||
WHERE {sql_many_clause_sql}"""
|
||||
|
||||
if quarantined_by is not None:
|
||||
sql += " AND safe_from_quarantine = FALSE"
|
||||
|
||||
txn.execute(sql, [quarantined_by, now_ts] + sql_many_clause_args)
|
||||
txn.execute(sql, [quarantined_by] + sql_many_clause_args)
|
||||
# Note that a rowcount of -1 can be used to indicate no rows were affected.
|
||||
total_media_quarantined += txn.rowcount if txn.rowcount > 0 else 0
|
||||
|
||||
@@ -1220,13 +1172,13 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
|
||||
)
|
||||
sql = f"""
|
||||
UPDATE local_media_repository
|
||||
SET quarantined_by = ?, quarantined_ts = ?
|
||||
SET quarantined_by = ?
|
||||
WHERE {sql_many_clause_sql}"""
|
||||
|
||||
if quarantined_by is not None:
|
||||
sql += " AND safe_from_quarantine = FALSE"
|
||||
|
||||
txn.execute(sql, [quarantined_by, now_ts] + sql_many_clause_args)
|
||||
txn.execute(sql, [quarantined_by] + sql_many_clause_args)
|
||||
total_media_quarantined += txn.rowcount if txn.rowcount > 0 else 0
|
||||
|
||||
return total_media_quarantined
|
||||
@@ -1250,10 +1202,6 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
|
||||
The total number of media items quarantined
|
||||
"""
|
||||
total_media_quarantined = 0
|
||||
now_ts: int | None = self.clock.time_msec()
|
||||
|
||||
if quarantined_by is None:
|
||||
now_ts = None
|
||||
|
||||
if media:
|
||||
sql_in_list_clause, sql_args = make_tuple_in_list_sql_clause(
|
||||
@@ -1263,10 +1211,10 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
|
||||
)
|
||||
sql = f"""
|
||||
UPDATE remote_media_cache
|
||||
SET quarantined_by = ?, quarantined_ts = ?
|
||||
SET quarantined_by = ?
|
||||
WHERE {sql_in_list_clause}"""
|
||||
|
||||
txn.execute(sql, [quarantined_by, now_ts] + sql_args)
|
||||
txn.execute(sql, [quarantined_by] + sql_args)
|
||||
total_media_quarantined += txn.rowcount if txn.rowcount > 0 else 0
|
||||
|
||||
total_media_quarantined = 0
|
||||
@@ -1276,9 +1224,9 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
|
||||
)
|
||||
sql = f"""
|
||||
UPDATE remote_media_cache
|
||||
SET quarantined_by = ?, quarantined_ts = ?
|
||||
SET quarantined_by = ?
|
||||
WHERE {sql_many_clause_sql}"""
|
||||
txn.execute(sql, [quarantined_by, now_ts] + sql_many_clause_args)
|
||||
txn.execute(sql, [quarantined_by] + sql_many_clause_args)
|
||||
total_media_quarantined += txn.rowcount if txn.rowcount > 0 else 0
|
||||
|
||||
return total_media_quarantined
|
||||
|
||||
@@ -747,27 +747,6 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore):
|
||||
|
||||
return frozenset(room_ids)
|
||||
|
||||
async def get_memberships_for_user(self, user_id: str) -> dict[str, str]:
|
||||
"""Returns a dict of room_id to membership state for a given user.
|
||||
|
||||
If a remote user only returns rooms this server is currently
|
||||
participating in.
|
||||
"""
|
||||
|
||||
rows = cast(
|
||||
list[tuple[str, str]],
|
||||
await self.db_pool.simple_select_list(
|
||||
"current_state_events",
|
||||
keyvalues={
|
||||
"type": EventTypes.Member,
|
||||
"state_key": user_id,
|
||||
},
|
||||
retcols=["room_id", "membership"],
|
||||
desc="get_memberships_for_user",
|
||||
),
|
||||
)
|
||||
return dict(rows)
|
||||
|
||||
@cached(max_entries=500000, iterable=True)
|
||||
async def get_rooms_for_user(self, user_id: str) -> frozenset[str]:
|
||||
"""Returns a set of room_ids the user is currently joined to.
|
||||
|
||||
@@ -450,6 +450,9 @@ class SlidingSyncStore(SQLBaseStore):
|
||||
|
||||
# Now that we have seen the client has received and used the connection
|
||||
# position, we can delete all the other connection positions.
|
||||
#
|
||||
# Note: the rest of the code here assumes this is the only remaining
|
||||
# connection position.
|
||||
sql = """
|
||||
DELETE FROM sliding_sync_connection_positions
|
||||
WHERE connection_key = ? AND connection_position != ?
|
||||
@@ -515,6 +518,41 @@ class SlidingSyncStore(SQLBaseStore):
|
||||
required_state_map=required_state_map[required_state_id],
|
||||
)
|
||||
|
||||
# Clean up any required state IDs that are no longer used by any
|
||||
# connection position on this connection.
|
||||
#
|
||||
# We store the required state config per-connection per-room. Since this
|
||||
# can be a lot of data, we deduplicate the required state JSON and store
|
||||
# it separately, with multiple rooms referencing the same required state
|
||||
# ID. Over time as the required state configs change, some required
|
||||
# state IDs may no longer be referenced by any room config, so we need
|
||||
# to clean them up.
|
||||
#
|
||||
# We do this by noting that we have pulled out *all* rows from
|
||||
# `sliding_sync_connection_required_state` for this connection above. We
|
||||
# have also pulled out all referenced required state IDs for *this*
|
||||
# connection position, which is the only connection position that
|
||||
# remains (we deleted the others above).
|
||||
#
|
||||
# Thus we can compute the unused required state IDs by looking for any
|
||||
# required state IDs that are not referenced by the remaining connection
|
||||
# position.
|
||||
used_required_state_ids = {
|
||||
required_state_id for _, _, required_state_id in room_config_rows
|
||||
}
|
||||
|
||||
unused_required_state_ids = required_state_map.keys() - used_required_state_ids
|
||||
if unused_required_state_ids:
|
||||
self.db_pool.simple_delete_many_batch_txn(
|
||||
txn,
|
||||
table="sliding_sync_connection_required_state",
|
||||
keys=("connection_key", "required_state_id"),
|
||||
values=[
|
||||
(connection_key, required_state_id)
|
||||
for required_state_id in unused_required_state_ids
|
||||
],
|
||||
)
|
||||
|
||||
# Now look up the per-room stream data.
|
||||
rooms: dict[str, HaveSentRoom[str]] = {}
|
||||
receipts: dict[str, HaveSentRoom[str]] = {}
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
--
|
||||
-- This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
--
|
||||
-- Copyright (C) 2025 Element Creations, Ltd
|
||||
--
|
||||
-- This program is free software: you can redistribute it and/or modify
|
||||
-- it under the terms of the GNU Affero General Public License as
|
||||
-- published by the Free Software Foundation, either version 3 of the
|
||||
-- License, or (at your option) any later version.
|
||||
--
|
||||
-- See the GNU Affero General Public License for more details:
|
||||
-- <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
|
||||
-- Add a timestamp for when the sliding sync connection position was last used,
|
||||
-- only updated with a small granularity.
|
||||
--
|
||||
-- This should be NOT NULL, but we need to consider existing rows. In future we
|
||||
-- may want to either backfill this or delete all rows with a NULL value (and
|
||||
-- then make it NOT NULL).
|
||||
ALTER TABLE local_media_repository ADD COLUMN quarantined_ts BIGINT;
|
||||
ALTER TABLE remote_media_cache ADD COLUMN quarantined_ts BIGINT;
|
||||
|
||||
UPDATE local_media_repository SET quarantined_ts = 0 WHERE quarantined_by IS NOT NULL;
|
||||
UPDATE remote_media_cache SET quarantined_ts = 0 WHERE quarantined_by IS NOT NULL;
|
||||
|
||||
-- Note: We *probably* should have an index on quarantined_ts, but we're going
|
||||
-- to try to defer that to a future migration after seeing the performance impact.
|
||||
@@ -19,7 +19,7 @@
|
||||
#
|
||||
|
||||
import queue
|
||||
from typing import Any, BinaryIO, Optional, Union, cast
|
||||
from typing import Any, BinaryIO, cast
|
||||
|
||||
from twisted.internet import threads
|
||||
from twisted.internet.defer import Deferred
|
||||
@@ -50,7 +50,7 @@ class BackgroundFileConsumer:
|
||||
self._reactor: ISynapseReactor = reactor
|
||||
|
||||
# Producer we're registered with
|
||||
self._producer: Optional[Union[IPushProducer, IPullProducer]] = None
|
||||
self._producer: IPushProducer | IPullProducer | None = None
|
||||
|
||||
# True if PushProducer, false if PullProducer
|
||||
self.streaming = False
|
||||
@@ -72,7 +72,7 @@ class BackgroundFileConsumer:
|
||||
self._write_exception: Exception | None = None
|
||||
|
||||
def registerProducer(
|
||||
self, producer: Union[IPushProducer, IPullProducer], streaming: bool
|
||||
self, producer: IPushProducer | IPullProducer, streaming: bool
|
||||
) -> None:
|
||||
"""Part of IConsumer interface
|
||||
|
||||
|
||||
@@ -71,43 +71,14 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase):
|
||||
return resources
|
||||
|
||||
def _ensure_quarantined(
|
||||
self,
|
||||
user_tok: str,
|
||||
server_and_media_id: str,
|
||||
include_bypass_param: bool = False,
|
||||
self, admin_user_tok: str, server_and_media_id: str
|
||||
) -> None:
|
||||
"""Ensure a piece of media is quarantined when trying to access it.
|
||||
|
||||
The include_bypass_param flag enables the presence of the
|
||||
admin_unsafely_bypass_quarantine query parameter, but still expects that the
|
||||
request will fail to download the media.
|
||||
"""
|
||||
if include_bypass_param:
|
||||
query_string = "?admin_unsafely_bypass_quarantine=true"
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_matrix/client/v1/media/download/{server_and_media_id}{query_string}",
|
||||
shorthand=False,
|
||||
access_token=user_tok,
|
||||
)
|
||||
|
||||
# Non-admins can't bypass, so this should fail regardless of whether the
|
||||
# media is actually quarantined.
|
||||
self.assertEqual(
|
||||
400,
|
||||
channel.code,
|
||||
msg=(
|
||||
"Expected to receive a 400 when bypassing quarantined media: %s"
|
||||
% server_and_media_id
|
||||
),
|
||||
)
|
||||
|
||||
# Repeat the request, this time without the bypass parameter.
|
||||
"""Ensure a piece of media is quarantined when trying to access it."""
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_matrix/client/v1/media/download/{server_and_media_id}",
|
||||
shorthand=False,
|
||||
access_token=user_tok,
|
||||
access_token=admin_user_tok,
|
||||
)
|
||||
|
||||
# Should be quarantined
|
||||
@@ -120,62 +91,6 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase):
|
||||
),
|
||||
)
|
||||
|
||||
def test_admin_can_bypass_quarantine(self) -> None:
|
||||
self.register_user("admin", "pass", admin=True)
|
||||
admin_user_tok = self.login("admin", "pass")
|
||||
|
||||
# Upload some media
|
||||
response = self.helper.upload_media(SMALL_PNG, tok=admin_user_tok)
|
||||
|
||||
# Extract media ID from the response
|
||||
server_name_and_media_id = response["content_uri"][6:] # Cut off 'mxc://'
|
||||
server_name, media_id = server_name_and_media_id.split("/")
|
||||
|
||||
# Attempt to access the media
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_matrix/client/v1/media/download/{server_name_and_media_id}",
|
||||
shorthand=False,
|
||||
access_token=admin_user_tok,
|
||||
)
|
||||
|
||||
# Should be successful
|
||||
self.assertEqual(200, channel.code)
|
||||
|
||||
# Quarantine the media
|
||||
url = "/_synapse/admin/v1/media/quarantine/%s/%s" % (
|
||||
urllib.parse.quote(server_name),
|
||||
urllib.parse.quote(media_id),
|
||||
)
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
url,
|
||||
access_token=admin_user_tok,
|
||||
)
|
||||
self.pump(1.0)
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
|
||||
# Now access it *without* the bypass parameter - this should fail (as expected).
|
||||
self._ensure_quarantined(
|
||||
admin_user_tok, server_name_and_media_id, include_bypass_param=False
|
||||
)
|
||||
|
||||
# Now access it *with* the bypass parameter - this should work
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_matrix/client/v1/media/download/{server_name_and_media_id}?admin_unsafely_bypass_quarantine=true",
|
||||
shorthand=False,
|
||||
access_token=admin_user_tok,
|
||||
)
|
||||
self.assertEqual(
|
||||
200,
|
||||
channel.code,
|
||||
msg=(
|
||||
"Expected to receive a 200 on accessing (with bypass) quarantined media: %s"
|
||||
% server_name_and_media_id
|
||||
),
|
||||
)
|
||||
|
||||
@parameterized.expand(
|
||||
[
|
||||
# Attempt quarantine media APIs as non-admin
|
||||
@@ -239,14 +154,8 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase):
|
||||
self.pump(1.0)
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
|
||||
# Attempt to access the media (and ensure non-admins can't download it, even
|
||||
# with a bypass parameter). Admins cannot download it without the bypass param.
|
||||
self._ensure_quarantined(
|
||||
non_admin_user_tok, server_name_and_media_id, include_bypass_param=True
|
||||
)
|
||||
self._ensure_quarantined(
|
||||
admin_user_tok, server_name_and_media_id, include_bypass_param=False
|
||||
)
|
||||
# Attempt to access the media
|
||||
self._ensure_quarantined(admin_user_tok, server_name_and_media_id)
|
||||
|
||||
@parameterized.expand(
|
||||
[
|
||||
@@ -305,21 +214,9 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase):
|
||||
server_and_media_id_1 = mxc_1[6:]
|
||||
server_and_media_id_2 = mxc_2[6:]
|
||||
|
||||
# Test that we cannot download any of the media anymore, especially with the
|
||||
# bypass parameter set. Admins cannot download the media without supplying the
|
||||
# bypass parameter, so we check that too.
|
||||
self._ensure_quarantined(
|
||||
non_admin_user_tok, server_and_media_id_1, include_bypass_param=True
|
||||
)
|
||||
self._ensure_quarantined(
|
||||
non_admin_user_tok, server_and_media_id_2, include_bypass_param=True
|
||||
)
|
||||
self._ensure_quarantined(
|
||||
admin_user_tok, server_and_media_id_1, include_bypass_param=False
|
||||
)
|
||||
self._ensure_quarantined(
|
||||
admin_user_tok, server_and_media_id_2, include_bypass_param=False
|
||||
)
|
||||
# Test that we cannot download any of the media anymore
|
||||
self._ensure_quarantined(admin_user_tok, server_and_media_id_1)
|
||||
self._ensure_quarantined(admin_user_tok, server_and_media_id_2)
|
||||
|
||||
def test_quarantine_all_media_by_user(self) -> None:
|
||||
self.register_user("user_admin", "pass", admin=True)
|
||||
@@ -366,27 +263,10 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase):
|
||||
channel.json_body, {"num_quarantined": 3}, "Expected 3 quarantined items"
|
||||
)
|
||||
|
||||
# Attempt to access each piece of media, ensuring that it can't be downloaded
|
||||
# even with a bypass parameter. Admins should not be able to download the media
|
||||
# either when not supplying the bypass parameter, so we check that too.
|
||||
self._ensure_quarantined(
|
||||
non_admin_user_tok, server_and_media_id_1, include_bypass_param=True
|
||||
)
|
||||
self._ensure_quarantined(
|
||||
non_admin_user_tok, server_and_media_id_2, include_bypass_param=True
|
||||
)
|
||||
self._ensure_quarantined(
|
||||
non_admin_user_tok, server_and_media_id_3, include_bypass_param=True
|
||||
)
|
||||
self._ensure_quarantined(
|
||||
admin_user_tok, server_and_media_id_1, include_bypass_param=False
|
||||
)
|
||||
self._ensure_quarantined(
|
||||
admin_user_tok, server_and_media_id_2, include_bypass_param=False
|
||||
)
|
||||
self._ensure_quarantined(
|
||||
admin_user_tok, server_and_media_id_3, include_bypass_param=False
|
||||
)
|
||||
# Attempt to access each piece of media
|
||||
self._ensure_quarantined(admin_user_tok, server_and_media_id_1)
|
||||
self._ensure_quarantined(admin_user_tok, server_and_media_id_2)
|
||||
self._ensure_quarantined(admin_user_tok, server_and_media_id_3)
|
||||
|
||||
def test_cannot_quarantine_safe_media(self) -> None:
|
||||
self.register_user("user_admin", "pass", admin=True)
|
||||
@@ -427,14 +307,8 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase):
|
||||
)
|
||||
|
||||
# Attempt to access each piece of media, the first should fail, the
|
||||
# second should succeed. We check both the non-admin user with a bypass
|
||||
# parameter, and the admin user without.
|
||||
self._ensure_quarantined(
|
||||
non_admin_user_tok, server_and_media_id_1, include_bypass_param=True
|
||||
)
|
||||
self._ensure_quarantined(
|
||||
admin_user_tok, server_and_media_id_1, include_bypass_param=False
|
||||
)
|
||||
# second should succeed.
|
||||
self._ensure_quarantined(admin_user_tok, server_and_media_id_1)
|
||||
|
||||
# Attempt to access each piece of media
|
||||
channel = self.make_request(
|
||||
|
||||
@@ -756,112 +756,6 @@ class DeleteMediaByDateSizeTestCase(_AdminMediaTests):
|
||||
self.assertFalse(os.path.exists(local_path))
|
||||
|
||||
|
||||
class ListQuarantinedMediaTestCase(_AdminMediaTests):
|
||||
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||
self.store = hs.get_datastores().main
|
||||
self.server_name = hs.hostname
|
||||
|
||||
@parameterized.expand(["local", "remote"])
|
||||
def test_no_auth(self, kind: str) -> None:
|
||||
"""
|
||||
Try to list quarantined media without authentication.
|
||||
"""
|
||||
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
"/_synapse/admin/v1/media/quarantined?kind=%s" % (kind,),
|
||||
)
|
||||
|
||||
self.assertEqual(401, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(Codes.MISSING_TOKEN, channel.json_body["errcode"])
|
||||
|
||||
@parameterized.expand(["local", "remote"])
|
||||
def test_requester_is_not_admin(self, kind: str) -> None:
|
||||
"""
|
||||
If the user is not a server admin, an error is returned.
|
||||
"""
|
||||
self.other_user = self.register_user("user", "pass")
|
||||
self.other_user_token = self.login("user", "pass")
|
||||
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
"/_synapse/admin/v1/media/quarantined?kind=%s" % (kind,),
|
||||
access_token=self.other_user_token,
|
||||
)
|
||||
|
||||
self.assertEqual(403, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
|
||||
|
||||
def test_list_quarantined_media(self) -> None:
|
||||
"""
|
||||
Ensure we actually get results for each page. We can't really test that
|
||||
remote media is quarantined, but we can test that local media is.
|
||||
"""
|
||||
self.admin_user = self.register_user("admin", "pass", admin=True)
|
||||
self.admin_user_tok = self.login("admin", "pass")
|
||||
|
||||
def _upload() -> str:
|
||||
return self.helper.upload_media(
|
||||
SMALL_PNG, tok=self.admin_user_tok, expect_code=200
|
||||
)["content_uri"][6:].split("/")[1] # Cut off 'mxc://' and domain
|
||||
|
||||
self.media_id_1 = _upload()
|
||||
self.media_id_2 = _upload()
|
||||
self.media_id_3 = _upload()
|
||||
|
||||
def _quarantine(media_id: str) -> None:
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
"/_synapse/admin/v1/media/quarantine/%s/%s"
|
||||
% (
|
||||
self.server_name,
|
||||
media_id,
|
||||
),
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
|
||||
_quarantine(self.media_id_1)
|
||||
_quarantine(self.media_id_2)
|
||||
_quarantine(self.media_id_3)
|
||||
|
||||
# Page 1
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
"/_synapse/admin/v1/media/quarantined?kind=local&from=0&limit=1",
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(1, len(channel.json_body["media"]))
|
||||
|
||||
# Page 2
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
"/_synapse/admin/v1/media/quarantined?kind=local&from=1&limit=1",
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(1, len(channel.json_body["media"]))
|
||||
|
||||
# Page 3
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
"/_synapse/admin/v1/media/quarantined?kind=local&from=2&limit=1",
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(1, len(channel.json_body["media"]))
|
||||
|
||||
# Page 4 (no media)
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
"/_synapse/admin/v1/media/quarantined?kind=local&from=3&limit=1",
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(0, len(channel.json_body["media"]))
|
||||
|
||||
|
||||
class QuarantineMediaByIDTestCase(_AdminMediaTests):
|
||||
def upload_media_and_return_media_id(self, data: bytes) -> str:
|
||||
# Upload some media into the room
|
||||
|
||||
@@ -2976,120 +2976,6 @@ class JoinAliasRoomTestCase(unittest.HomeserverTestCase):
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(private_room_id, channel.json_body["joined_rooms"][0])
|
||||
|
||||
def test_joined_rooms(self) -> None:
|
||||
"""
|
||||
Test joined_rooms admin endpoint.
|
||||
"""
|
||||
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
f"/_matrix/client/v3/join/{self.public_room_id}",
|
||||
content={"user_id": self.second_user_id},
|
||||
access_token=self.second_tok,
|
||||
)
|
||||
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(self.public_room_id, channel.json_body["room_id"])
|
||||
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_synapse/admin/v1/users/{self.second_user_id}/joined_rooms",
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(self.public_room_id, channel.json_body["joined_rooms"][0])
|
||||
|
||||
def test_memberships(self) -> None:
|
||||
"""
|
||||
Test user memberships admin endpoint.
|
||||
"""
|
||||
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
f"/_matrix/client/v3/join/{self.public_room_id}",
|
||||
content={"user_id": self.second_user_id},
|
||||
access_token=self.second_tok,
|
||||
)
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
|
||||
other_room_id = self.helper.create_room_as(
|
||||
self.admin_user, tok=self.admin_user_tok
|
||||
)
|
||||
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
f"/_matrix/client/v3/join/{other_room_id}",
|
||||
content={"user_id": self.second_user_id},
|
||||
access_token=self.second_tok,
|
||||
)
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_synapse/admin/v1/users/{self.second_user_id}/memberships",
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(
|
||||
{
|
||||
"memberships": {
|
||||
self.public_room_id: Membership.JOIN,
|
||||
other_room_id: Membership.JOIN,
|
||||
}
|
||||
},
|
||||
channel.json_body,
|
||||
)
|
||||
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
f"/_matrix/client/v3/rooms/{other_room_id}/leave",
|
||||
content={"user_id": self.second_user_id},
|
||||
access_token=self.second_tok,
|
||||
)
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
|
||||
invited_room_id = self.helper.create_room_as(
|
||||
self.admin_user, tok=self.admin_user_tok
|
||||
)
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
f"/_matrix/client/v3/rooms/{invited_room_id}/invite",
|
||||
content={"user_id": self.second_user_id},
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
|
||||
banned_room_id = self.helper.create_room_as(
|
||||
self.admin_user, tok=self.admin_user_tok
|
||||
)
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
f"/_matrix/client/v3/rooms/{banned_room_id}/ban",
|
||||
content={"user_id": self.second_user_id},
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
f"/_synapse/admin/v1/users/{self.second_user_id}/memberships",
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(
|
||||
{
|
||||
"memberships": {
|
||||
self.public_room_id: Membership.JOIN,
|
||||
other_room_id: Membership.LEAVE,
|
||||
invited_room_id: Membership.INVITE,
|
||||
banned_room_id: Membership.BAN,
|
||||
}
|
||||
},
|
||||
channel.json_body,
|
||||
)
|
||||
|
||||
def test_context_as_non_admin(self) -> None:
|
||||
"""
|
||||
Test that, without being admin, one cannot use the context admin API
|
||||
|
||||
@@ -55,16 +55,12 @@ class UserMutualRoomsTest(unittest.HomeserverTestCase):
|
||||
|
||||
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||
self.store = hs.get_datastores().main
|
||||
mutual_rooms.MUTUAL_ROOMS_BATCH_LIMIT = 10
|
||||
|
||||
def _get_mutual_rooms(
|
||||
self, token: str, other_user: str, since_token: str | None = None
|
||||
) -> FakeChannel:
|
||||
def _get_mutual_rooms(self, token: str, other_user: str) -> FakeChannel:
|
||||
return self.make_request(
|
||||
"GET",
|
||||
"/_matrix/client/unstable/uk.half-shot.msc2666/user/mutual_rooms"
|
||||
f"?user_id={quote(other_user)}"
|
||||
+ (f"&from={quote(since_token)}" if since_token else ""),
|
||||
f"?user_id={quote(other_user)}",
|
||||
access_token=token,
|
||||
)
|
||||
|
||||
@@ -145,52 +141,6 @@ class UserMutualRoomsTest(unittest.HomeserverTestCase):
|
||||
for room_id_id in channel.json_body["joined"]:
|
||||
self.assertIn(room_id_id, [room_id_one, room_id_two])
|
||||
|
||||
def _create_rooms_for_pagination_test(
|
||||
self, count: int
|
||||
) -> tuple[str, str, list[str]]:
|
||||
u1 = self.register_user("user1", "pass")
|
||||
u1_token = self.login(u1, "pass")
|
||||
u2 = self.register_user("user2", "pass")
|
||||
u2_token = self.login(u2, "pass")
|
||||
room_ids = []
|
||||
for i in range(count):
|
||||
room_id = self.helper.create_room_as(u1, is_public=i % 2 == 0, tok=u1_token)
|
||||
self.helper.invite(room_id, src=u1, targ=u2, tok=u1_token)
|
||||
self.helper.join(room_id, user=u2, tok=u2_token)
|
||||
room_ids.append(room_id)
|
||||
room_ids.sort()
|
||||
return u1_token, u2, room_ids
|
||||
|
||||
def test_shared_room_list_pagination_two_pages(self) -> None:
|
||||
u1_token, u2, room_ids = self._create_rooms_for_pagination_test(15)
|
||||
|
||||
channel = self._get_mutual_rooms(u1_token, u2)
|
||||
self.assertEqual(200, channel.code, channel.result)
|
||||
self.assertEqual(channel.json_body["joined"], room_ids[0:10])
|
||||
self.assertIn("next_batch", channel.json_body)
|
||||
|
||||
channel = self._get_mutual_rooms(u1_token, u2, channel.json_body["next_batch"])
|
||||
self.assertEqual(200, channel.code, channel.result)
|
||||
self.assertEqual(channel.json_body["joined"], room_ids[10:20])
|
||||
self.assertNotIn("next_batch", channel.json_body)
|
||||
|
||||
def test_shared_room_list_pagination_one_page(self) -> None:
|
||||
u1_token, u2, room_ids = self._create_rooms_for_pagination_test(10)
|
||||
|
||||
channel = self._get_mutual_rooms(u1_token, u2)
|
||||
self.assertEqual(200, channel.code, channel.result)
|
||||
self.assertEqual(channel.json_body["joined"], room_ids)
|
||||
self.assertNotIn("next_batch", channel.json_body)
|
||||
|
||||
def test_shared_room_list_pagination_invalid_token(self) -> None:
|
||||
u1_token, u2, room_ids = self._create_rooms_for_pagination_test(10)
|
||||
|
||||
channel = self._get_mutual_rooms(u1_token, u2, "!<>##faketoken")
|
||||
self.assertEqual(400, channel.code, channel.result)
|
||||
self.assertEqual(
|
||||
"M_INVALID_PARAM", channel.json_body["errcode"], channel.result
|
||||
)
|
||||
|
||||
def test_shared_room_list_after_leave(self) -> None:
|
||||
"""
|
||||
A room should no longer be considered shared if the other
|
||||
@@ -222,14 +172,3 @@ class UserMutualRoomsTest(unittest.HomeserverTestCase):
|
||||
channel = self._get_mutual_rooms(u2_token, u1)
|
||||
self.assertEqual(200, channel.code, channel.result)
|
||||
self.assertEqual(len(channel.json_body["joined"]), 0)
|
||||
|
||||
def test_shared_room_list_nonexistent_user(self) -> None:
|
||||
u1 = self.register_user("user1", "pass")
|
||||
u1_token = self.login(u1, "pass")
|
||||
|
||||
# Check shared rooms from user1's perspective.
|
||||
# We should see the one room in common
|
||||
channel = self._get_mutual_rooms(u1_token, "@meow:example.com")
|
||||
self.assertEqual(200, channel.code, channel.result)
|
||||
self.assertEqual(len(channel.json_body["joined"]), 0)
|
||||
self.assertNotIn("next_batch", channel.json_body)
|
||||
|
||||
@@ -147,7 +147,7 @@ class FakeChannel:
|
||||
_reactor: MemoryReactorClock
|
||||
result: dict = attr.Factory(dict)
|
||||
_ip: str = "127.0.0.1"
|
||||
_producer: Optional[Union[IPullProducer, IPushProducer]] = None
|
||||
_producer: IPullProducer | IPushProducer | None = None
|
||||
resource_usage: ContextResourceUsage | None = None
|
||||
_request: Request | None = None
|
||||
|
||||
@@ -248,7 +248,7 @@ class FakeChannel:
|
||||
# TODO This should ensure that the IProducer is an IPushProducer or
|
||||
# IPullProducer, unfortunately twisted.protocols.basic.FileSender does
|
||||
# implement those, but doesn't declare it.
|
||||
self._producer = cast(Union[IPushProducer, IPullProducer], producer)
|
||||
self._producer = cast(IPushProducer | IPullProducer, producer)
|
||||
self.producerStreaming = streaming
|
||||
|
||||
def _produce() -> None:
|
||||
@@ -852,7 +852,7 @@ class FakeTransport:
|
||||
"""Test reactor
|
||||
"""
|
||||
|
||||
_protocol: Optional[IProtocol] = None
|
||||
_protocol: IProtocol | None = None
|
||||
"""The Protocol which is producing data for this transport. Optional, but if set
|
||||
will get called back for connectionLost() notifications etc.
|
||||
"""
|
||||
@@ -871,7 +871,7 @@ class FakeTransport:
|
||||
disconnected = False
|
||||
connected = True
|
||||
buffer: bytes = b""
|
||||
producer: Optional[IPushProducer] = None
|
||||
producer: IPushProducer | None = None
|
||||
autoflush: bool = True
|
||||
|
||||
def getPeer(self) -> IPv4Address | IPv6Address:
|
||||
@@ -1073,7 +1073,7 @@ def setup_test_homeserver(
|
||||
cleanup_func: Callable[[Callable[[], Optional["Deferred[None]"]]], None],
|
||||
server_name: str = "test",
|
||||
config: HomeServerConfig | None = None,
|
||||
reactor: Optional[ISynapseReactor] = None,
|
||||
reactor: ISynapseReactor | None = None,
|
||||
homeserver_to_use: type[HomeServer] = TestHomeServer,
|
||||
db_txn_limit: int | None = None,
|
||||
**extra_homeserver_attributes: Any,
|
||||
|
||||
@@ -37,7 +37,6 @@ from typing import (
|
||||
Iterable,
|
||||
Mapping,
|
||||
NoReturn,
|
||||
Optional,
|
||||
Protocol,
|
||||
TypeVar,
|
||||
)
|
||||
@@ -637,7 +636,7 @@ class HomeserverTestCase(TestCase):
|
||||
self,
|
||||
server_name: str | None = None,
|
||||
config: JsonDict | None = None,
|
||||
reactor: Optional[ISynapseReactor] = None,
|
||||
reactor: ISynapseReactor | None = None,
|
||||
clock: Clock | None = None,
|
||||
**extra_homeserver_attributes: Any,
|
||||
) -> HomeServer:
|
||||
|
||||
@@ -198,9 +198,7 @@ def default_config(
|
||||
"rc_invites": {
|
||||
"per_room": {"per_second": 10000, "burst_count": 10000},
|
||||
"per_user": {"per_second": 10000, "burst_count": 10000},
|
||||
"per_issuer": {"per_second": 10000, "burst_count": 10000},
|
||||
},
|
||||
"rc_room_creation": {"per_second": 10000, "burst_count": 10000},
|
||||
"rc_3pid_validation": {"per_second": 10000, "burst_count": 10000},
|
||||
"rc_presence": {"per_user": {"per_second": 10000, "burst_count": 10000}},
|
||||
"saml2_enabled": False,
|
||||
|
||||
Reference in New Issue
Block a user