Compare commits
7 Commits
devon/medi
...
quenting/d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0535f631f5 | ||
|
|
f2188cf931 | ||
|
|
c4746a321d | ||
|
|
47e4c6eb79 | ||
|
|
b9fab43943 | ||
|
|
799bd77170 | ||
|
|
ab3f4dc5b5 |
@@ -1,10 +0,0 @@
|
||||
#!/bin/sh
|
||||
set -xeu
|
||||
|
||||
# On 32-bit Linux platforms, we need libatomic1 to use rustup
|
||||
if command -v yum &> /dev/null; then
|
||||
yum install -y libatomic
|
||||
fi
|
||||
|
||||
# Install a Rust toolchain
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain 1.82.0 -y --profile minimal
|
||||
@@ -11,12 +11,12 @@ with open("poetry.lock", "rb") as f:
|
||||
|
||||
try:
|
||||
lock_version = lockfile["metadata"]["lock-version"]
|
||||
assert lock_version == "2.1"
|
||||
assert lock_version == "2.0"
|
||||
except Exception:
|
||||
print(
|
||||
"""\
|
||||
Lockfile is not version 2.1. You probably need to upgrade poetry on your local box
|
||||
and re-run `poetry lock`. See the Poetry cheat sheet at
|
||||
Lockfile is not version 2.0. You probably need to upgrade poetry on your local box
|
||||
and re-run `poetry lock --no-update`. See the Poetry cheat sheet at
|
||||
https://element-hq.github.io/synapse/develop/development/dependencies.html
|
||||
"""
|
||||
)
|
||||
|
||||
30
.github/workflows/docker.yml
vendored
30
.github/workflows/docker.yml
vendored
@@ -4,8 +4,9 @@ name: Build docker images
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: ["v*"]
|
||||
branches: [ master, main, develop ]
|
||||
# TEMP
|
||||
# tags: ["v*"]
|
||||
# branches: [ master, main, develop ]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
@@ -18,22 +19,22 @@ jobs:
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
id: qemu
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
platforms: arm64
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Inspect builder
|
||||
run: docker buildx inspect
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2
|
||||
uses: sigstore/cosign-installer@v3.8.0
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Extract version from pyproject.toml
|
||||
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
|
||||
@@ -43,13 +44,17 @@ jobs:
|
||||
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@v3
|
||||
# TEMP
|
||||
if: ${{ false }}
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@v3
|
||||
# TEMP
|
||||
if: ${{ false }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -57,7 +62,7 @@ jobs:
|
||||
|
||||
- name: Calculate docker image tag
|
||||
id: set-tag
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
uses: docker/metadata-action@master
|
||||
with:
|
||||
images: |
|
||||
docker.io/matrixdotorg/synapse
|
||||
@@ -72,9 +77,10 @@ jobs:
|
||||
|
||||
- name: Build and push all platforms
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
push: true
|
||||
# TEMP
|
||||
push: false
|
||||
labels: |
|
||||
gitsha1=${{ github.sha }}
|
||||
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
|
||||
@@ -88,6 +94,8 @@ jobs:
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||
|
||||
- name: Sign the images with GitHub OIDC Token
|
||||
# TEMP
|
||||
if: ${{ false }}
|
||||
env:
|
||||
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
||||
TAGS: ${{ steps.set-tag.outputs.tags }}
|
||||
|
||||
4
.github/workflows/docs-pr-netlify.yaml
vendored
4
.github/workflows/docs-pr-netlify.yaml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
||||
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
||||
- name: 📥 Download artifact
|
||||
uses: dawidd6/action-download-artifact@07ab29fd4a977ae4d2b275087cf67563dfdf0295 # v9
|
||||
uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||
with:
|
||||
workflow: docs-pr.yaml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
@@ -22,7 +22,7 @@ jobs:
|
||||
path: book
|
||||
|
||||
- name: 📤 Deploy to Netlify
|
||||
uses: matrix-org/netlify-pr-preview@9805cd123fc9a7e421e35340a05e1ebc5dee46b5 # v3
|
||||
uses: matrix-org/netlify-pr-preview@v3
|
||||
with:
|
||||
path: book
|
||||
owner: ${{ github.event.workflow_run.head_repository.owner.login }}
|
||||
|
||||
8
.github/workflows/docs-pr.yaml
vendored
8
.github/workflows/docs-pr.yaml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
# Fetch all history so that the schema_versions script works.
|
||||
fetch-depth: 0
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
cp book/welcome_and_overview.html book/index.html
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: book
|
||||
path: book
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
name: Check links in documentation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||
|
||||
4
.github/workflows/docs.yaml
vendored
4
.github/workflows/docs.yaml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
needs:
|
||||
- pre
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
# Fetch all history so that the schema_versions script works.
|
||||
fetch-depth: 0
|
||||
@@ -64,7 +64,7 @@ jobs:
|
||||
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
|
||||
11
.github/workflows/fix_lint.yaml
vendored
11
.github/workflows/fix_lint.yaml
vendored
@@ -13,22 +13,21 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
# We use nightly so that `fmt` correctly groups together imports, and
|
||||
# clippy correctly fixes up the benchmarks.
|
||||
toolchain: nightly-2022-12-01
|
||||
components: clippy, rustfmt
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
install-project: "false"
|
||||
poetry-version: "2.1.1"
|
||||
|
||||
- name: Run ruff check
|
||||
continue-on-error: true
|
||||
@@ -44,6 +43,6 @@ jobs:
|
||||
- run: cargo fmt
|
||||
continue-on-error: true
|
||||
|
||||
- uses: stefanzweifel/git-auto-commit-action@b863ae1933cb653a53c021fe36dbb774e1fb9403 # v5.2.0
|
||||
- uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "Attempt to fix linting"
|
||||
|
||||
34
.github/workflows/latest_deps.yml
vendored
34
.github/workflows/latest_deps.yml
vendored
@@ -39,17 +39,17 @@ jobs:
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||
# poetry-core versions), so we install with poetry.
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "2.1.1"
|
||||
poetry-version: "1.3.2"
|
||||
extras: "all"
|
||||
# Dump installed versions for debugging.
|
||||
- run: poetry run pip list > before.txt
|
||||
@@ -72,11 +72,11 @@ jobs:
|
||||
postgres-version: "14"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
@@ -86,7 +86,7 @@ jobs:
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: pip install .[all,test]
|
||||
@@ -145,11 +145,11 @@ jobs:
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Ensure sytest runs `pip install`
|
||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||
@@ -164,7 +164,7 @@ jobs:
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
@@ -192,15 +192,15 @@ jobs:
|
||||
database: Postgres
|
||||
|
||||
steps:
|
||||
- name: Check out synapse codebase
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Run actions/checkout@v4 for synapse
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- uses: actions/setup-go@0aaccfd150d50ccaeb58ebd88d36e91967a5f35b # v5.4.0
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
cache-dependency-path: complement/go.sum
|
||||
go-version-file: complement/go.mod
|
||||
@@ -225,7 +225,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
4
.github/workflows/poetry_lockfile.yaml
vendored
4
.github/workflows/poetry_lockfile.yaml
vendored
@@ -16,8 +16,8 @@ jobs:
|
||||
name: "Check locked dependencies have sdists"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- run: pip install tomli
|
||||
|
||||
10
.github/workflows/push_complement_image.yml
vendored
10
.github/workflows/push_complement_image.yml
vendored
@@ -33,29 +33,29 @@ jobs:
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout specific branch (debug build)
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
with:
|
||||
ref: ${{ inputs.branch }}
|
||||
- name: Checkout clean copy of develop (scheduled build)
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
if: github.event_name == 'schedule'
|
||||
with:
|
||||
ref: develop
|
||||
- name: Checkout clean copy of master (on-push)
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
if: github.event_name == 'push'
|
||||
with:
|
||||
ref: master
|
||||
- name: Login to registry
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Work out labels for complement image
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository }}/complement-synapse
|
||||
tags: |
|
||||
|
||||
34
.github/workflows/release-artifacts.yml
vendored
34
.github/workflows/release-artifacts.yml
vendored
@@ -27,8 +27,8 @@ jobs:
|
||||
name: "Calculate list of debian distros"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- id: set-distros
|
||||
@@ -55,18 +55,18 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: src
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
install: true
|
||||
|
||||
- name: Set up docker layer caching
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
@@ -74,7 +74,7 @@ jobs:
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
- name: Set up python
|
||||
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
@@ -101,7 +101,7 @@ jobs:
|
||||
echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Upload debs as artifacts
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }}
|
||||
path: debs/*
|
||||
@@ -130,20 +130,20 @@ jobs:
|
||||
arch: aarch64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
||||
# here, because `python` on osx points to Python 2.7.
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install cibuildwheel
|
||||
run: python -m pip install cibuildwheel==2.23.0
|
||||
run: python -m pip install cibuildwheel==2.19.1
|
||||
|
||||
- name: Set up QEMU to emulate aarch64
|
||||
if: matrix.arch == 'aarch64'
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
uses: docker/setup-qemu-action@v3
|
||||
with:
|
||||
platforms: arm64
|
||||
|
||||
@@ -165,7 +165,7 @@ jobs:
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
||||
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
|
||||
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: Wheel-${{ matrix.os }}-${{ matrix.arch }}
|
||||
path: ./wheelhouse/*.whl
|
||||
@@ -176,8 +176,8 @@ jobs:
|
||||
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
@@ -186,7 +186,7 @@ jobs:
|
||||
- name: Build sdist
|
||||
run: python -m build --sdist
|
||||
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: Sdist
|
||||
path: dist/*.tar.gz
|
||||
@@ -203,7 +203,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download all workflow run artifacts
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
uses: actions/download-artifact@v4
|
||||
- name: Build a tarball for the debs
|
||||
# We need to merge all the debs uploads into one folder, then compress
|
||||
# that.
|
||||
@@ -213,7 +213,7 @@ jobs:
|
||||
tar -cvJf debs.tar.xz debs
|
||||
- name: Attach to release
|
||||
# Pinned to work around https://github.com/softprops/action-gh-release/issues/445
|
||||
uses: softprops/action-gh-release@c95fe1489396fe8a9eb87c0abf8aa5b2ef267fda # v0.1.15
|
||||
uses: softprops/action-gh-release@v0.1.15
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
|
||||
150
.github/workflows/tests.yml
vendored
150
.github/workflows/tests.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }}
|
||||
linting_readme: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting_readme }}
|
||||
steps:
|
||||
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
- uses: dorny/paths-filter@v3
|
||||
id: filter
|
||||
# We only check on PRs
|
||||
if: startsWith(github.ref, 'refs/pull/')
|
||||
@@ -83,14 +83,14 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "2.1.1"
|
||||
poetry-version: "1.3.2"
|
||||
extras: "all"
|
||||
- run: poetry run scripts-dev/generate_sample_config.sh --check
|
||||
- run: poetry run scripts-dev/config-lint.sh
|
||||
@@ -101,8 +101,8 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
||||
@@ -111,8 +111,8 @@ jobs:
|
||||
check-lockfile:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: .ci/scripts/check_lockfile.py
|
||||
@@ -124,12 +124,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
poetry-version: "2.1.1"
|
||||
install-project: "false"
|
||||
|
||||
- name: Run ruff check
|
||||
@@ -146,14 +145,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
# We want to make use of type hints in optional dependencies too.
|
||||
extras: all
|
||||
@@ -162,12 +161,11 @@ jobs:
|
||||
# https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775
|
||||
# To make CI green, err towards caution and install the project.
|
||||
install-project: "true"
|
||||
poetry-version: "2.1.1"
|
||||
|
||||
# Cribbed from
|
||||
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
||||
- name: Restore/persist mypy's cache
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
.mypy_cache
|
||||
@@ -180,7 +178,7 @@ jobs:
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Check line endings
|
||||
run: scripts-dev/check_line_terminators.sh
|
||||
|
||||
@@ -188,11 +186,11 @@ jobs:
|
||||
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
||||
@@ -206,15 +204,15 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
poetry-version: "2.1.1"
|
||||
poetry-version: "1.3.2"
|
||||
extras: "all"
|
||||
- run: poetry run scripts-dev/check_pydantic_models.py
|
||||
|
||||
@@ -224,13 +222,13 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
with:
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: cargo clippy -- -D warnings
|
||||
|
||||
@@ -242,14 +240,14 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: cargo clippy --all-features -- -D warnings
|
||||
|
||||
@@ -259,15 +257,15 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
# We use nightly so that it correctly groups together imports
|
||||
toolchain: nightly-2022-12-01
|
||||
components: rustfmt
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: cargo fmt --check
|
||||
|
||||
@@ -278,8 +276,8 @@ jobs:
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.linting_readme == 'true' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: "pip install rstcheck"
|
||||
@@ -303,7 +301,7 @@ jobs:
|
||||
- lint-readme
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3
|
||||
- uses: matrix-org/done-action@v3
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
@@ -326,8 +324,8 @@ jobs:
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- id: get-matrix
|
||||
@@ -347,7 +345,7 @@ jobs:
|
||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
@@ -362,13 +360,13 @@ jobs:
|
||||
postgres:${{ matrix.job.postgres-version }}
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.job.python-version }}
|
||||
poetry-version: "2.1.1"
|
||||
poetry-version: "1.3.2"
|
||||
extras: ${{ matrix.job.extras }}
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
@@ -401,11 +399,11 @@ jobs:
|
||||
- changes
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
# There aren't wheels for some of the older deps, so we need to install
|
||||
# their build dependencies
|
||||
@@ -414,7 +412,7 @@ jobs:
|
||||
sudo apt-get -qq install build-essential libffi-dev python3-dev \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
|
||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
@@ -464,13 +462,13 @@ jobs:
|
||||
extras: ["all"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
poetry-version: "2.1.1"
|
||||
poetry-version: "1.3.2"
|
||||
extras: ${{ matrix.extras }}
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
- name: Dump logs
|
||||
@@ -514,13 +512,13 @@ jobs:
|
||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
@@ -529,7 +527,7 @@ jobs:
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
||||
@@ -559,11 +557,11 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
poetry-version: "2.1.1"
|
||||
poetry-version: "1.3.2"
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_export_data_command.sh
|
||||
env:
|
||||
@@ -603,7 +601,7 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Add PostgreSQL apt repository
|
||||
# We need a version of pg_dump that can handle the version of
|
||||
# PostgreSQL being tested against. The Ubuntu package repository lags
|
||||
@@ -614,10 +612,10 @@ jobs:
|
||||
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
|
||||
sudo apt-get update
|
||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
poetry-version: "2.1.1"
|
||||
poetry-version: "1.3.2"
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_synapse_port_db.sh
|
||||
id: run_tester_script
|
||||
@@ -627,7 +625,7 @@ jobs:
|
||||
PGPASSWORD: postgres
|
||||
PGDATABASE: postgres
|
||||
- name: "Upload schema differences"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
||||
with:
|
||||
name: Schema dumps
|
||||
@@ -657,19 +655,19 @@ jobs:
|
||||
database: Postgres
|
||||
|
||||
steps:
|
||||
- name: Checkout synapse codebase
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Run actions/checkout@v4 for synapse
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- uses: actions/setup-go@0aaccfd150d50ccaeb58ebd88d36e91967a5f35b # v5.4.0
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
cache-dependency-path: complement/go.sum
|
||||
go-version-file: complement/go.mod
|
||||
@@ -692,11 +690,11 @@ jobs:
|
||||
- changes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@1.66.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: cargo test
|
||||
|
||||
@@ -710,13 +708,13 @@ jobs:
|
||||
- changes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: cargo bench --no-run
|
||||
|
||||
@@ -735,7 +733,7 @@ jobs:
|
||||
- linting-done
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3
|
||||
- uses: matrix-org/done-action@v3
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
|
||||
2
.github/workflows/triage-incoming.yml
vendored
2
.github/workflows/triage-incoming.yml
vendored
@@ -6,7 +6,7 @@ on:
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@18beaf3c8e536108bd04d18e6c3dc40ba3931e28 # v2.0.3
|
||||
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v2
|
||||
with:
|
||||
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
|
||||
content_id: ${{ github.event.issue.node_id }}
|
||||
|
||||
2
.github/workflows/triage_labelled.yml
vendored
2
.github/workflows/triage_labelled.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
if: >
|
||||
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
||||
steps:
|
||||
- uses: actions/add-to-project@5b1a254a3546aef88e0a7724a77a623fa2e47c36 # main (v1.0.2 + 10 commits)
|
||||
- uses: actions/add-to-project@main
|
||||
id: add_project
|
||||
with:
|
||||
project-url: "https://github.com/orgs/matrix-org/projects/67"
|
||||
|
||||
36
.github/workflows/twisted_trunk.yml
vendored
36
.github/workflows/twisted_trunk.yml
vendored
@@ -40,17 +40,16 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
extras: "all"
|
||||
poetry-version: "2.1.1"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref || 'trunk' }}
|
||||
@@ -65,18 +64,17 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
extras: "all test"
|
||||
poetry-version: "2.1.1"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
@@ -110,11 +108,11 @@ jobs:
|
||||
- ${{ github.workspace }}:/src
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Patch dependencies
|
||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||
@@ -138,7 +136,7 @@ jobs:
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
uses: actions/upload-artifact@v4
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
@@ -166,14 +164,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Run actions/checkout@v4 for synapse
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- uses: actions/setup-go@0aaccfd150d50ccaeb58ebd88d36e91967a5f35b # v5.4.0
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
cache-dependency-path: complement/go.sum
|
||||
go-version-file: complement/go.mod
|
||||
@@ -183,11 +181,11 @@ jobs:
|
||||
run: |
|
||||
set -x
|
||||
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
|
||||
pipx install poetry==2.1.1
|
||||
pipx install poetry==1.3.2
|
||||
|
||||
poetry remove -n twisted
|
||||
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry lock
|
||||
poetry lock --no-update
|
||||
working-directory: synapse
|
||||
|
||||
- run: |
|
||||
@@ -208,7 +206,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
180
CHANGES.md
180
CHANGES.md
@@ -1,183 +1,3 @@
|
||||
# Synapse 1.128.0 (2025-04-08)
|
||||
|
||||
No significant changes since 1.128.0rc1.
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.128.0rc1 (2025-04-01)
|
||||
|
||||
### Features
|
||||
|
||||
- Add an access token introspection cache to make Matrix Authentication Service integration ([MSC3861](https://github.com/matrix-org/matrix-doc/pull/3861)) more efficient. ([\#18231](https://github.com/element-hq/synapse/issues/18231))
|
||||
- Add background job to clear unreferenced state groups. ([\#18254](https://github.com/element-hq/synapse/issues/18254))
|
||||
- Hashes of media files are now tracked by Synapse. Media quarantines will now apply to all files with the same hash. ([\#18277](https://github.com/element-hq/synapse/issues/18277), [\#18302](https://github.com/element-hq/synapse/issues/18302), [\#18296](https://github.com/element-hq/synapse/issues/18296))
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Add index to sliding sync ([MSC4186](https://github.com/matrix-org/matrix-doc/pull/4186)) membership snapshot table, to fix a performance issue. ([\#18074](https://github.com/element-hq/synapse/issues/18074))
|
||||
|
||||
### Updates to the Docker image
|
||||
|
||||
- Specify the architecture of installed packages via an APT config option, which is more reliable than appending package names with `:{arch}`. ([\#18271](https://github.com/element-hq/synapse/issues/18271))
|
||||
- Always specify base image debian versions with a build argument. ([\#18272](https://github.com/element-hq/synapse/issues/18272))
|
||||
- Allow passing arguments to `start_for_complement.sh` (to be sent to `configure_workers_and_start.py`). ([\#18273](https://github.com/element-hq/synapse/issues/18273))
|
||||
- Make some improvements to the `prefix-log` script in the workers image. ([\#18274](https://github.com/element-hq/synapse/issues/18274))
|
||||
- Use `uv pip` to install `supervisor` in the worker image. ([\#18275](https://github.com/element-hq/synapse/issues/18275))
|
||||
- Avoid needing to download & use `rsync` in a build layer. ([\#18287](https://github.com/element-hq/synapse/issues/18287))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Fix how to obtain access token and change naming from riot to element ([\#18225](https://github.com/element-hq/synapse/issues/18225))
|
||||
- Correct a small typo in the SSO mapping providers documentation. ([\#18276](https://github.com/element-hq/synapse/issues/18276))
|
||||
- Add docs for how to clear out the Poetry wheel cache. ([\#18283](https://github.com/element-hq/synapse/issues/18283))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Add a column `participant` to `room_memberships` table. ([\#18068](https://github.com/element-hq/synapse/issues/18068))
|
||||
- Update Poetry to 2.1.1, including updating the lock file version. ([\#18251](https://github.com/element-hq/synapse/issues/18251))
|
||||
- Pin GitHub Actions dependencies by commit hash. ([\#18255](https://github.com/element-hq/synapse/issues/18255))
|
||||
- Add DB delta to remove the old state group deletion job. ([\#18284](https://github.com/element-hq/synapse/issues/18284))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump actions/add-to-project from f5473ace9aeee8b97717b281e26980aa5097023f to 280af8ae1f83a494cfad2cb10f02f6d13529caa9. ([\#18303](https://github.com/element-hq/synapse/issues/18303))
|
||||
* Bump actions/cache from 4.2.2 to 4.2.3. ([\#18266](https://github.com/element-hq/synapse/issues/18266))
|
||||
* Bump actions/download-artifact from 4.2.0 to 4.2.1. ([\#18268](https://github.com/element-hq/synapse/issues/18268))
|
||||
* Bump actions/setup-python from 5.4.0 to 5.5.0. ([\#18298](https://github.com/element-hq/synapse/issues/18298))
|
||||
* Bump actions/upload-artifact from 4.6.1 to 4.6.2. ([\#18304](https://github.com/element-hq/synapse/issues/18304))
|
||||
* Bump authlib from 1.4.1 to 1.5.1. ([\#18306](https://github.com/element-hq/synapse/issues/18306))
|
||||
* Bump dawidd6/action-download-artifact from 8 to 9. ([\#18204](https://github.com/element-hq/synapse/issues/18204))
|
||||
* Bump jinja2 from 3.1.5 to 3.1.6. ([\#18223](https://github.com/element-hq/synapse/issues/18223))
|
||||
* Bump log from 0.4.26 to 0.4.27. ([\#18267](https://github.com/element-hq/synapse/issues/18267))
|
||||
* Bump phonenumbers from 8.13.50 to 9.0.2. ([\#18299](https://github.com/element-hq/synapse/issues/18299))
|
||||
* Bump pygithub from 2.5.0 to 2.6.1. ([\#18243](https://github.com/element-hq/synapse/issues/18243))
|
||||
* Bump pyo3-log from 0.12.1 to 0.12.2. ([\#18269](https://github.com/element-hq/synapse/issues/18269))
|
||||
|
||||
# Synapse 1.127.1 (2025-03-26)
|
||||
|
||||
## Security
|
||||
- Fix [CVE-2025-30355](https://www.cve.org/CVERecord?id=CVE-2025-30355) / [GHSA-v56r-hwv5-mxg6](https://github.com/element-hq/synapse/security/advisories/GHSA-v56r-hwv5-mxg6). **High severity vulnerability affecting federation. The vulnerability has been exploited in the wild.**
|
||||
|
||||
|
||||
|
||||
# Synapse 1.127.0 (2025-03-25)
|
||||
|
||||
No significant changes since 1.127.0rc1.
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.127.0rc1 (2025-03-18)
|
||||
|
||||
### Features
|
||||
|
||||
- Update [MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140) implementation to no longer cancel a user's own delayed state events with an event type & state key that match a more recent state event sent by that user. ([\#17810](https://github.com/element-hq/synapse/issues/17810))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Fixed a minor typo in the Synapse documentation. Contributed by @karuto12. ([\#18224](https://github.com/element-hq/synapse/issues/18224))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Remove undocumented `SYNAPSE_USE_FROZEN_DICTS` environment variable. ([\#18123](https://github.com/element-hq/synapse/issues/18123))
|
||||
- Fix detection of workflow failures in the release script. ([\#18211](https://github.com/element-hq/synapse/issues/18211))
|
||||
- Add caching support to media endpoints. ([\#18235](https://github.com/element-hq/synapse/issues/18235))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump anyhow from 1.0.96 to 1.0.97. ([\#18201](https://github.com/element-hq/synapse/issues/18201))
|
||||
* Bump bcrypt from 4.2.1 to 4.3.0. ([\#18207](https://github.com/element-hq/synapse/issues/18207))
|
||||
* Bump bytes from 1.10.0 to 1.10.1. ([\#18227](https://github.com/element-hq/synapse/issues/18227))
|
||||
* Bump http from 1.2.0 to 1.3.1. ([\#18245](https://github.com/element-hq/synapse/issues/18245))
|
||||
* Bump sentry-sdk from 2.19.2 to 2.22.0. ([\#18205](https://github.com/element-hq/synapse/issues/18205))
|
||||
* Bump serde from 1.0.218 to 1.0.219. ([\#18228](https://github.com/element-hq/synapse/issues/18228))
|
||||
* Bump serde_json from 1.0.139 to 1.0.140. ([\#18202](https://github.com/element-hq/synapse/issues/18202))
|
||||
* Bump ulid from 1.2.0 to 1.2.1. ([\#18246](https://github.com/element-hq/synapse/issues/18246))
|
||||
|
||||
# Synapse 1.126.0 (2025-03-11)
|
||||
Administrators using the Debian/Ubuntu packages from `packages.matrix.org`, please check
|
||||
[the relevant section in the upgrade notes](https://github.com/element-hq/synapse/blob/release-v1.126/docs/upgrade.md#change-of-signing-key-expiry-date-for-the-debianubuntu-package-repository)
|
||||
as we have recently updated the expiry date on the repository's GPG signing key. The old version of the key will expire on `2025-03-15`.
|
||||
|
||||
No significant changes since 1.126.0rc3.
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.126.0rc3 (2025-03-07)
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Revert the background job to clear unreferenced state groups (that was introduced in v1.126.0rc1), due to [a suspected issue](https://github.com/element-hq/synapse/issues/18217) that causes increased disk usage. ([\#18222](https://github.com/element-hq/synapse/issues/18222))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.126.0rc2 (2025-03-05)
|
||||
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Fix wheel building configuration in CI by installing libatomic1. ([\#18212](https://github.com/element-hq/synapse/issues/18212), [\#18213](https://github.com/element-hq/synapse/issues/18213))
|
||||
|
||||
# Synapse 1.126.0rc1 (2025-03-04)
|
||||
|
||||
Synapse 1.126.0rc1 was not fully released due to an error in CI.
|
||||
|
||||
### Features
|
||||
|
||||
- Define ratelimit configuration for delayed event management. ([\#18019](https://github.com/element-hq/synapse/issues/18019))
|
||||
- Add `form_secret_path` config option. ([\#18090](https://github.com/element-hq/synapse/issues/18090))
|
||||
- Add the `--no-secrets-in-config` command line option. ([\#18092](https://github.com/element-hq/synapse/issues/18092))
|
||||
- Add background job to clear unreferenced state groups. ([\#18154](https://github.com/element-hq/synapse/issues/18154))
|
||||
- Add support for specifying/overriding `id_token_signing_alg_values_supported` for an OpenID identity provider. ([\#18177](https://github.com/element-hq/synapse/issues/18177))
|
||||
- Add `worker_replication_secret_path` config option. ([\#18191](https://github.com/element-hq/synapse/issues/18191))
|
||||
- Add support for specifying/overriding `redirect_uri` in the authorization and token requests against an OpenID identity provider. ([\#18197](https://github.com/element-hq/synapse/issues/18197))
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Make sure we advertise registration as disabled when [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) is enabled. ([\#17661](https://github.com/element-hq/synapse/issues/17661))
|
||||
- Prevent suspended users from sending encrypted messages. ([\#18157](https://github.com/element-hq/synapse/issues/18157))
|
||||
- Cleanup deleted state group references. ([\#18165](https://github.com/element-hq/synapse/issues/18165))
|
||||
- Fix [MSC4108 QR-code login](https://github.com/matrix-org/matrix-spec-proposals/pull/4108) not working with some reverse-proxy setups. ([\#18178](https://github.com/element-hq/synapse/issues/18178))
|
||||
- Support device IDs that can't be represented in a scope when delegating auth to Matrix Authentication Service 0.15.0+. ([\#18174](https://github.com/element-hq/synapse/issues/18174))
|
||||
|
||||
### Updates to the Docker image
|
||||
|
||||
- Speed up the building of the Docker image. ([\#18038](https://github.com/element-hq/synapse/issues/18038))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Move incorrectly placed version indicator in User Event Redaction Admin API docs. ([\#18152](https://github.com/element-hq/synapse/issues/18152))
|
||||
- Document suspension Admin API. ([\#18162](https://github.com/element-hq/synapse/issues/18162))
|
||||
|
||||
### Deprecations and Removals
|
||||
|
||||
- Disable room list publication by default. ([\#18175](https://github.com/element-hq/synapse/issues/18175))
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump anyhow from 1.0.95 to 1.0.96. ([\#18187](https://github.com/element-hq/synapse/issues/18187))
|
||||
* Bump authlib from 1.4.0 to 1.4.1. ([\#18190](https://github.com/element-hq/synapse/issues/18190))
|
||||
* Bump click from 8.1.7 to 8.1.8. ([\#18189](https://github.com/element-hq/synapse/issues/18189))
|
||||
* Bump log from 0.4.25 to 0.4.26. ([\#18184](https://github.com/element-hq/synapse/issues/18184))
|
||||
* Bump pyo3-log from 0.12.0 to 0.12.1. ([\#18046](https://github.com/element-hq/synapse/issues/18046))
|
||||
* Bump serde from 1.0.217 to 1.0.218. ([\#18183](https://github.com/element-hq/synapse/issues/18183))
|
||||
* Bump serde_json from 1.0.138 to 1.0.139. ([\#18186](https://github.com/element-hq/synapse/issues/18186))
|
||||
* Bump sigstore/cosign-installer from 3.8.0 to 3.8.1. ([\#18185](https://github.com/element-hq/synapse/issues/18185))
|
||||
* Bump types-psycopg2 from 2.9.21.20241019 to 2.9.21.20250121. ([\#18188](https://github.com/element-hq/synapse/issues/18188))
|
||||
|
||||
|
||||
# Synapse 1.125.0 (2025-02-25)
|
||||
|
||||
No significant changes since 1.125.0rc1.
|
||||
|
||||
|
||||
# Synapse 1.125.0rc1 (2025-02-18)
|
||||
|
||||
### Features
|
||||
|
||||
56
Cargo.lock
generated
56
Cargo.lock
generated
@@ -13,9 +13,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.98"
|
||||
version = "1.0.95"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487"
|
||||
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
@@ -67,9 +67,9 @@ checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
|
||||
|
||||
[[package]]
|
||||
name = "bytes"
|
||||
version = "1.10.1"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
|
||||
checksum = "f61dac84819c6588b558454b194026eb1f09c293b9036ae9b159e74e73ab6cf9"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
@@ -173,9 +173,9 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
||||
|
||||
[[package]]
|
||||
name = "http"
|
||||
version = "1.3.1"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565"
|
||||
checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"fnv",
|
||||
@@ -223,9 +223,9 @@ checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346"
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.27"
|
||||
version = "0.4.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
|
||||
checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
@@ -277,9 +277,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3"
|
||||
version = "0.23.5"
|
||||
version = "0.23.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7778bffd85cf38175ac1f545509665d0b9b92a198ca7941f131f85f7a4f9a872"
|
||||
checksum = "57fe09249128b3173d092de9523eaa75136bf7ba85e0d69eca241c7939c933cc"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cfg-if",
|
||||
@@ -296,9 +296,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-build-config"
|
||||
version = "0.23.5"
|
||||
version = "0.23.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94f6cbe86ef3bf18998d9df6e0f3fc1050a8c5efa409bf712e661a4366e010fb"
|
||||
checksum = "1cd3927b5a78757a0d71aa9dff669f903b1eb64b54142a9bd9f757f8fde65fd7"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"target-lexicon",
|
||||
@@ -306,9 +306,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-ffi"
|
||||
version = "0.23.5"
|
||||
version = "0.23.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e9f1b4c431c0bb1c8fb0a338709859eed0d030ff6daa34368d3b152a63dfdd8d"
|
||||
checksum = "dab6bb2102bd8f991e7749f130a70d05dd557613e39ed2deeee8e9ca0c4d548d"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"pyo3-build-config",
|
||||
@@ -316,9 +316,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-log"
|
||||
version = "0.12.3"
|
||||
version = "0.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7079e412e909af5d6be7c04a7f29f6a2837a080410e1c529c9dee2c367383db4"
|
||||
checksum = "3eb421dc86d38d08e04b927b02424db480be71b777fa3a56f32e2f2a3a1a3b08"
|
||||
dependencies = [
|
||||
"arc-swap",
|
||||
"log",
|
||||
@@ -327,9 +327,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros"
|
||||
version = "0.23.5"
|
||||
version = "0.23.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fbc2201328f63c4710f68abdf653c89d8dbc2858b88c5d88b0ff38a75288a9da"
|
||||
checksum = "91871864b353fd5ffcb3f91f2f703a22a9797c91b9ab497b1acac7b07ae509c7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-macros-backend",
|
||||
@@ -339,9 +339,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros-backend"
|
||||
version = "0.23.5"
|
||||
version = "0.23.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fca6726ad0f3da9c9de093d6f116a93c1a38e417ed73bf138472cf4064f72028"
|
||||
checksum = "43abc3b80bc20f3facd86cd3c60beed58c3e2aa26213f3cda368de39c60a27e4"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
@@ -437,18 +437,18 @@ checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.219"
|
||||
version = "1.0.217"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
|
||||
checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.219"
|
||||
version = "1.0.217"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
|
||||
checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -457,9 +457,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.140"
|
||||
version = "1.0.138"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
|
||||
checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
@@ -544,9 +544,9 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
|
||||
|
||||
[[package]]
|
||||
name = "ulid"
|
||||
version = "1.2.1"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "470dbf6591da1b39d43c14523b2b469c86879a53e8b758c8e090a470fe7b1fbe"
|
||||
checksum = "ab82fc73182c29b02e2926a6df32f2241dbadb5cfc111fd595515b3598f46bb3"
|
||||
dependencies = [
|
||||
"rand",
|
||||
"web-time",
|
||||
|
||||
1
changelog.d/17661.bugfix
Normal file
1
changelog.d/17661.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Make sure we advertise registration as disabled when MSC3861 is enabled.
|
||||
1
changelog.d/18038.docker
Normal file
1
changelog.d/18038.docker
Normal file
@@ -0,0 +1 @@
|
||||
Speed up the building of the Docker image.
|
||||
1
changelog.d/18039.docker
Normal file
1
changelog.d/18039.docker
Normal file
@@ -0,0 +1 @@
|
||||
Use a [`distroless`](https://github.com/GoogleContainerTools/distroless) base runtime image.
|
||||
@@ -1 +0,0 @@
|
||||
Disable statement timeout during room purge.
|
||||
1
changelog.d/18165.bugfix
Normal file
1
changelog.d/18165.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Cleanup deleted state group references.
|
||||
@@ -1 +0,0 @@
|
||||
Add `passthrough_authorization_parameters` in OIDC configuration to allow to pass parameters to the authorization grant URL.
|
||||
@@ -1 +0,0 @@
|
||||
Add documentation for configuring [Pocket ID](https://github.com/pocket-id/pocket-id) as an OIDC provider.
|
||||
@@ -1 +0,0 @@
|
||||
In configure_workers_and_start.py, use the same absolute path of Python in the interpreter shebang, and invoke child Python processes with `sys.executable`.
|
||||
@@ -1 +0,0 @@
|
||||
Optimize the build of the workers image.
|
||||
@@ -1 +0,0 @@
|
||||
In start_for_complement.sh, replace some external program calls with shell builtins.
|
||||
@@ -1 +0,0 @@
|
||||
Optimize the build of the complement-synapse image.
|
||||
@@ -1 +0,0 @@
|
||||
When generating container scripts from templates, don't add a leading newline so that their shebangs may be handled correctly.
|
||||
@@ -1 +0,0 @@
|
||||
Fix typo in docs about the `push` config option. Contributed by @HarHarLinks.
|
||||
@@ -1 +0,0 @@
|
||||
Fix `force_tracing_for_users` config when using delegated auth.
|
||||
@@ -1 +0,0 @@
|
||||
Fix the token introspection cache logging access tokens when MAS integration is in use.
|
||||
@@ -1 +0,0 @@
|
||||
Add cache to storage functions used to auth requests when using delegated auth.
|
||||
@@ -1 +0,0 @@
|
||||
Stop caching introspection failures when delegating auth to MAS.
|
||||
@@ -1 +0,0 @@
|
||||
Fix `ExternalIDReuse` exception after migrating to MAS on workers with a high traffic.
|
||||
@@ -1 +0,0 @@
|
||||
Fix minor performance regression caused by tracking of room participation. Regressed in v1.128.0.
|
||||
@@ -1 +0,0 @@
|
||||
Add support for handling `GET /devices/` on workers.
|
||||
@@ -1 +0,0 @@
|
||||
Allow `/rooms/` admin API to be run on workers.
|
||||
@@ -1 +0,0 @@
|
||||
Fix longstanding bug where Synapse would immediately retry a failing push endpoint when a new event is received, ignoring any backoff timers.
|
||||
@@ -1 +0,0 @@
|
||||
Minor performance improvements to the notifier.
|
||||
@@ -1 +0,0 @@
|
||||
Slight performance increase when using the ratelimiter.
|
||||
@@ -1 +0,0 @@
|
||||
Allow client & media admin apis to coexist.
|
||||
2
debian/build_virtualenv
vendored
2
debian/build_virtualenv
vendored
@@ -35,7 +35,7 @@ TEMP_VENV="$(mktemp -d)"
|
||||
python3 -m venv "$TEMP_VENV"
|
||||
source "$TEMP_VENV/bin/activate"
|
||||
pip install -U pip
|
||||
pip install poetry==2.1.1 poetry-plugin-export==1.9.0
|
||||
pip install poetry==1.3.2
|
||||
poetry export \
|
||||
--extras all \
|
||||
--extras test \
|
||||
|
||||
61
debian/changelog
vendored
61
debian/changelog
vendored
@@ -1,64 +1,3 @@
|
||||
matrix-synapse-py3 (1.128.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.128.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Apr 2025 14:09:54 +0100
|
||||
|
||||
matrix-synapse-py3 (1.128.0~rc1) stable; urgency=medium
|
||||
|
||||
* Update Poetry to 2.1.1.
|
||||
* New synapse release 1.128.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Apr 2025 14:35:33 +0000
|
||||
|
||||
matrix-synapse-py3 (1.127.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.127.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 26 Mar 2025 21:07:31 +0000
|
||||
|
||||
matrix-synapse-py3 (1.127.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.127.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Mar 2025 12:04:15 +0000
|
||||
|
||||
matrix-synapse-py3 (1.127.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.127.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Mar 2025 13:30:05 +0000
|
||||
|
||||
matrix-synapse-py3 (1.126.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.126.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Mar 2025 13:11:29 +0000
|
||||
|
||||
matrix-synapse-py3 (1.126.0~rc3) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.126.0rc3.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Mar 2025 15:45:05 +0000
|
||||
|
||||
matrix-synapse-py3 (1.126.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.126.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 05 Mar 2025 14:29:12 +0000
|
||||
|
||||
matrix-synapse-py3 (1.126.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.126.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Mar 2025 13:11:51 +0000
|
||||
|
||||
matrix-synapse-py3 (1.125.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.125.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Feb 2025 08:10:07 -0700
|
||||
|
||||
matrix-synapse-py3 (1.125.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.125.0rc1.
|
||||
|
||||
@@ -142,9 +142,6 @@ for port in 8080 8081 8082; do
|
||||
per_user:
|
||||
per_second: 1000
|
||||
burst_count: 1000
|
||||
rc_delayed_event_mgmt:
|
||||
per_second: 1000
|
||||
burst_count: 1000
|
||||
RC
|
||||
)
|
||||
echo "${ratelimiting}" >> "$port.config"
|
||||
|
||||
@@ -21,8 +21,9 @@
|
||||
# in `poetry export` in the past.
|
||||
|
||||
ARG DEBIAN_VERSION=bookworm
|
||||
ARG DEBIAN_VERSION_NUMERIC=12
|
||||
ARG PYTHON_VERSION=3.12
|
||||
ARG POETRY_VERSION=2.1.1
|
||||
ARG POETRY_VERSION=1.8.3
|
||||
|
||||
###
|
||||
### Stage 0: generate requirements.txt
|
||||
@@ -56,7 +57,7 @@ ENV UV_LINK_MODE=copy
|
||||
ARG POETRY_VERSION
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
||||
uvx --with poetry-plugin-export==1.9.0 \
|
||||
uvx --with poetry-plugin-export==1.8.0 \
|
||||
poetry@${POETRY_VERSION} export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \
|
||||
else \
|
||||
touch /synapse/requirements.txt; \
|
||||
@@ -116,9 +117,9 @@ RUN \
|
||||
fi
|
||||
|
||||
###
|
||||
### Stage 2: runtime dependencies download for ARM64 and AMD64
|
||||
## Stage 2: runtime dependencies download for ARM64 and AMD64
|
||||
###
|
||||
FROM --platform=$BUILDPLATFORM docker.io/library/debian:${DEBIAN_VERSION} AS runtime-deps
|
||||
FROM --platform=$BUILDPLATFORM ghcr.io/astral-sh/uv:${DEBIAN_VERSION} AS runtime-deps
|
||||
|
||||
# Tell apt to keep downloaded package files, as we're using cache mounts.
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
@@ -127,6 +128,15 @@ RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloa
|
||||
RUN dpkg --add-architecture arm64
|
||||
RUN dpkg --add-architecture amd64
|
||||
|
||||
ARG PYTHON_VERSION
|
||||
RUN uv python install \
|
||||
cpython-${PYTHON_VERSION}-linux-aarch64-gnu \
|
||||
cpython-${PYTHON_VERSION}-linux-x86_64_v2-gnu
|
||||
|
||||
RUN mkdir -p /install-amd64/usr/lib /install-arm64/usr/lib
|
||||
RUN mv $(uv python dir)/cpython-*-linux-aarch64-gnu/ /install-arm64/usr/local
|
||||
RUN mv $(uv python dir)/cpython-*-linux-x86_64_v2-gnu/ /install-amd64/usr/local
|
||||
|
||||
# Fetch the runtime dependencies debs for both architectures
|
||||
# We do that by building a recursive list of packages we need to download with `apt-cache depends`
|
||||
# and then downloading them with `apt-get download`.
|
||||
@@ -135,19 +145,20 @@ RUN \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && \
|
||||
apt-cache depends --recurse --no-recommends --no-suggests --no-conflicts --no-breaks --no-replaces --no-enhances --no-pre-depends \
|
||||
curl \
|
||||
gosu \
|
||||
zlib1g \
|
||||
libjpeg62-turbo \
|
||||
libpq5 \
|
||||
libwebp7 \
|
||||
xmlsec1 \
|
||||
libjemalloc2 \
|
||||
libicu \
|
||||
openssl \
|
||||
| grep '^\w' > /tmp/pkg-list && \
|
||||
for arch in arm64 amd64; do \
|
||||
mkdir -p /tmp/debs-${arch} && \
|
||||
cd /tmp/debs-${arch} && \
|
||||
apt-get -o APT::Architecture="${arch}" download $(cat /tmp/pkg-list); \
|
||||
apt-get download $(sed "s/$/:${arch}/" /tmp/pkg-list); \
|
||||
done
|
||||
|
||||
# Extract the debs for each architecture
|
||||
@@ -167,7 +178,7 @@ RUN \
|
||||
### Stage 3: runtime
|
||||
###
|
||||
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-${DEBIAN_VERSION}
|
||||
FROM gcr.io/distroless/base-nossl-debian${DEBIAN_VERSION_NUMERIC}:debug
|
||||
|
||||
ARG TARGETARCH
|
||||
|
||||
@@ -176,21 +187,15 @@ LABEL org.opencontainers.image.documentation='https://github.com/element-hq/syna
|
||||
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
|
||||
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later'
|
||||
|
||||
# On the runtime image, /lib is a symlink to /usr/lib, so we need to copy the
|
||||
# libraries to the right place, else the `COPY` won't work.
|
||||
# On amd64, we'll also have a /lib64 folder with ld-linux-x86-64.so.2, which is
|
||||
# already present in the runtime image.
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/lib /usr/lib
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/etc /etc
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/usr /usr
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/var /var
|
||||
COPY --from=runtime-deps /install-${TARGETARCH} /
|
||||
COPY --from=builder /install /usr/local
|
||||
COPY ./docker/start.py /start.py
|
||||
COPY ./docker/conf /conf
|
||||
|
||||
EXPOSE 8008/tcp 8009/tcp 8448/tcp
|
||||
|
||||
SHELL ["/busybox/sh", "-c"]
|
||||
ENTRYPOINT ["/start.py"]
|
||||
|
||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||
CMD curl -fSs http://localhost:8008/health || exit 1
|
||||
CMD wget --quiet --tries=1 --spider http://localhost:8008/health || exit 1
|
||||
|
||||
@@ -3,75 +3,75 @@
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||
ARG DEBIAN_VERSION=bookworm
|
||||
ARG DEBIAN_VERSION_NUMERIC=12
|
||||
ARG PYTHON_VERSION=3.12
|
||||
|
||||
# first of all, we create a base image with dependencies which we can copy into the
|
||||
# first of all, we create a base image with an nginx which we can copy into the
|
||||
# target image. For repeated rebuilds, this is much faster than apt installing
|
||||
# each time.
|
||||
|
||||
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
|
||||
|
||||
# Tell apt to keep downloaded package files, as we're using cache mounts.
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
# This silences a warning as uv isn't able to do hardlinks between its cache
|
||||
# (mounted as --mount=type=cache) and the target directory.
|
||||
ENV UV_LINK_MODE=copy
|
||||
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
|
||||
nginx-light
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && \
|
||||
apt-cache depends --recurse --no-recommends --no-suggests --no-conflicts --no-breaks --no-replaces --no-enhances --no-pre-depends \
|
||||
redis-server \
|
||||
nginx-light \
|
||||
mawk \
|
||||
| grep '^\w' > /tmp/pkg-list && \
|
||||
mkdir -p /tmp/debs && \
|
||||
cat /tmp/pkg-list && \
|
||||
cd /tmp/debs && \
|
||||
xargs apt-get download </tmp/pkg-list
|
||||
|
||||
RUN \
|
||||
# remove default page
|
||||
rm /etc/nginx/sites-enabled/default && \
|
||||
# have nginx log to stderr/out
|
||||
ln -sf /dev/stdout /var/log/nginx/access.log && \
|
||||
ln -sf /dev/stderr /var/log/nginx/error.log
|
||||
# Extract the debs for each architecture
|
||||
RUN \
|
||||
mkdir -p /install/var/lib/dpkg/status.d/ && \
|
||||
for deb in /tmp/debs/*.deb; do \
|
||||
package_name=$(dpkg-deb -I ${deb} | awk '/^ Package: .*$/ {print $2}'); \
|
||||
echo "Extracting: ${package_name}"; \
|
||||
dpkg --ctrl-tarfile $deb | tar -Ox ./control > /install/var/lib/dpkg/status.d/${package_name}; \
|
||||
dpkg --extract $deb /install; \
|
||||
done;
|
||||
|
||||
# --link-mode=copy silences a warning as uv isn't able to do hardlinks between its cache
|
||||
# (mounted as --mount=type=cache) and the target directory.
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
uv pip install --link-mode=copy --prefix="/uv/usr/local" supervisor~=4.2
|
||||
|
||||
RUN mkdir -p /uv/etc/supervisor/conf.d
|
||||
|
||||
# Similarly, a base to copy the redis server from.
|
||||
#
|
||||
# The redis docker image has fewer dynamic libraries than the debian package,
|
||||
# which makes it much easier to copy (but we need to make sure we use an image
|
||||
# based on the same debian version as the synapse image, to make sure we get
|
||||
# the expected version of libc.
|
||||
FROM docker.io/library/redis:7-${DEBIAN_VERSION} AS redis_base
|
||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||
uv pip install --prefix="/install/usr/local" supervisor~=4.2
|
||||
|
||||
# now build the final image, based on the the regular Synapse docker image
|
||||
FROM $FROM
|
||||
|
||||
# Copy over dependencies
|
||||
COPY --from=redis_base /usr/local/bin/redis-server /usr/local/bin
|
||||
COPY --from=deps_base /uv /
|
||||
COPY --from=deps_base /usr/sbin/nginx /usr/sbin
|
||||
COPY --from=deps_base /usr/share/nginx /usr/share/nginx
|
||||
COPY --from=deps_base /usr/lib/nginx /usr/lib/nginx
|
||||
COPY --from=deps_base /etc/nginx /etc/nginx
|
||||
COPY --from=deps_base /var/log/nginx /var/log/nginx
|
||||
# chown to allow non-root user to write to http-*-temp-path dirs
|
||||
COPY --from=deps_base --chown=www-data:root /var/lib/nginx /var/lib/nginx
|
||||
# Copy over redis, nginx and supervisor
|
||||
COPY --from=deps_base /install /
|
||||
RUN mkdir -p /etc/supervisor/conf.d
|
||||
RUN addgroup -S -g 33 www-data
|
||||
RUN adduser -S -u 33 -G www-data -h /var/www -s /usr/sbin/nologin -H www-data
|
||||
RUN chown www-data /var/lib/nginx
|
||||
|
||||
# Copy Synapse worker, nginx and supervisord configuration template files
|
||||
COPY ./docker/conf-workers/* /conf/
|
||||
# have nginx log to stderr/out
|
||||
RUN ln -sf /dev/stdout /var/log/nginx/access.log
|
||||
RUN ln -sf /dev/stderr /var/log/nginx/error.log
|
||||
|
||||
# Copy a script to prefix log lines with the supervisor program name
|
||||
COPY ./docker/prefix-log /usr/local/bin/
|
||||
# Copy Synapse worker, nginx and supervisord configuration template files
|
||||
COPY ./docker/conf-workers/* /conf/
|
||||
|
||||
# Expose nginx listener port
|
||||
EXPOSE 8080/tcp
|
||||
# Copy a script to prefix log lines with the supervisor program name
|
||||
COPY ./docker/prefix-log /usr/local/bin/
|
||||
|
||||
# A script to read environment variables and create the necessary
|
||||
# files to run the desired worker configuration. Will start supervisord.
|
||||
COPY ./docker/configure_workers_and_start.py /configure_workers_and_start.py
|
||||
ENTRYPOINT ["/configure_workers_and_start.py"]
|
||||
# Expose nginx listener port
|
||||
EXPOSE 8080/tcp
|
||||
|
||||
# Replace the healthcheck with one which checks *all* the workers. The script
|
||||
# is generated by configure_workers_and_start.py.
|
||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||
CMD ["/healthcheck.sh"]
|
||||
# A script to read environment variables and create the necessary
|
||||
# files to run the desired worker configuration. Will start supervisord.
|
||||
COPY ./docker/configure_workers_and_start.py /configure_workers_and_start.py
|
||||
ENTRYPOINT ["/configure_workers_and_start.py"]
|
||||
|
||||
# Replace the healthcheck with one which checks *all* the workers. The script
|
||||
# is generated by configure_workers_and_start.py.
|
||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||
CMD /bin/sh /healthcheck.sh
|
||||
|
||||
@@ -9,23 +9,22 @@
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
# This is an intermediate image, to be built locally (not pulled from a registry).
|
||||
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||
ARG DEBIAN_VERSION=bookworm
|
||||
|
||||
FROM docker.io/library/postgres:13-${DEBIAN_VERSION} AS postgres_base
|
||||
|
||||
FROM $FROM
|
||||
# First of all, we copy postgres server from the official postgres image,
|
||||
# since for repeated rebuilds, this is much faster than apt installing
|
||||
# postgres each time.
|
||||
|
||||
# This trick only works because (a) the Synapse image happens to have all the
|
||||
# shared libraries that postgres wants, (b) we use a postgres image based on
|
||||
# the same debian version as Synapse's docker image (so the versions of the
|
||||
# shared libraries match).
|
||||
# This trick only works because we use a postgres image based on the same debian
|
||||
# version as Synapse's docker image (so the versions of the shared libraries
|
||||
# match).
|
||||
RUN echo "nogroup:x:65534:" >> /etc/group
|
||||
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
|
||||
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
|
||||
COPY --from=postgres_base --chown=postgres /var/run/postgresql /var/run/postgresql
|
||||
COPY --from=docker.io/library/postgres:13-bookworm /usr/lib /usr/lib
|
||||
COPY --from=docker.io/library/postgres:13-bookworm /usr/share/postgresql /usr/share/postgresql
|
||||
# initdb expects /bin/sh to be available
|
||||
RUN ln -s /busybox/sh /bin/sh
|
||||
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
|
||||
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
||||
ENV PGDATA=/var/lib/postgresql/data
|
||||
|
||||
@@ -58,4 +57,4 @@ ENTRYPOINT ["/start_for_complement.sh"]
|
||||
|
||||
# Update the healthcheck to have a shorter check interval
|
||||
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
||||
CMD ["/healthcheck.sh"]
|
||||
CMD /bin/sh /healthcheck.sh
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
#!/bin/bash
|
||||
#!/bin/sh
|
||||
#
|
||||
# Default ENTRYPOINT for the docker image used for testing synapse with workers under complement
|
||||
|
||||
set -e
|
||||
|
||||
echo "Complement Synapse launcher"
|
||||
echo " Args: $*"
|
||||
echo " Args: $@"
|
||||
echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR"
|
||||
|
||||
function log {
|
||||
d=$(printf '%(%Y-%m-%d %H:%M:%S)T,%.3s\n' ${EPOCHREALTIME/./ })
|
||||
echo "$d $*"
|
||||
d=$(date +"%Y-%m-%d %H:%M:%S,%3N")
|
||||
echo "$d $@"
|
||||
}
|
||||
|
||||
# Set the server name of the homeserver
|
||||
@@ -103,11 +103,12 @@ fi
|
||||
# Note that both the key and certificate are in PEM format (not DER).
|
||||
|
||||
# First generate a configuration file to set up a Subject Alternative Name.
|
||||
echo "\
|
||||
cat > /conf/server.tls.conf <<EOF
|
||||
.include /etc/ssl/openssl.cnf
|
||||
|
||||
[SAN]
|
||||
subjectAltName=DNS:${SERVER_NAME}" > /conf/server.tls.conf
|
||||
subjectAltName=DNS:${SERVER_NAME}
|
||||
EOF
|
||||
|
||||
# Generate an RSA key
|
||||
openssl genrsa -out /conf/server.tls.key 2048
|
||||
@@ -122,12 +123,12 @@ openssl x509 -req -in /conf/server.tls.csr \
|
||||
-out /conf/server.tls.crt -extfile /conf/server.tls.conf -extensions SAN
|
||||
|
||||
# Assert that we have a Subject Alternative Name in the certificate.
|
||||
# (the test will exit with 1 here if there isn't a SAN in the certificate.)
|
||||
[[ $(openssl x509 -in /conf/server.tls.crt -noout -text) == *DNS:* ]]
|
||||
# (grep will exit with 1 here if there isn't a SAN in the certificate.)
|
||||
openssl x509 -in /conf/server.tls.crt -noout -text | grep DNS:
|
||||
|
||||
export SYNAPSE_TLS_CERT=/conf/server.tls.crt
|
||||
export SYNAPSE_TLS_KEY=/conf/server.tls.key
|
||||
|
||||
# Run the script that writes the necessary config files and starts supervisord, which in turn
|
||||
# starts everything else
|
||||
exec /configure_workers_and_start.py "$@"
|
||||
exec /configure_workers_and_start.py
|
||||
|
||||
@@ -94,10 +94,6 @@ rc_presence:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_delayed_event_mgmt:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
federation_rr_transactions_per_room_per_second: 9999
|
||||
|
||||
allow_device_name_lookup_over_federation: true
|
||||
@@ -143,9 +139,4 @@ caches:
|
||||
sync_response_cache_duration: 0
|
||||
|
||||
|
||||
# Complement assumes that it can publish to the room list by default.
|
||||
room_list_publication_rules:
|
||||
- action: allow
|
||||
|
||||
|
||||
{% include "shared-orig.yaml.j2" %}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/sh
|
||||
# This healthcheck script is designed to return OK when every
|
||||
# This healthcheck script is designed to return OK when every
|
||||
# host involved returns OK
|
||||
{%- for healthcheck_url in healthcheck_urls %}
|
||||
curl -fSs {{ healthcheck_url }} || exit 1
|
||||
wget --quiet --tries=1 --spider {{ healthcheck_url }} || exit 1
|
||||
{%- endfor %}
|
||||
|
||||
@@ -376,11 +376,9 @@ def convert(src: str, dst: str, **template_vars: object) -> None:
|
||||
#
|
||||
# We use append mode in case the files have already been written to by something else
|
||||
# (for instance, as part of the instructions in a dockerfile).
|
||||
exists = os.path.isfile(dst)
|
||||
with open(dst, "a") as outfile:
|
||||
# In case the existing file doesn't end with a newline
|
||||
if exists:
|
||||
outfile.write("\n")
|
||||
outfile.write("\n")
|
||||
|
||||
outfile.write(rendered)
|
||||
|
||||
@@ -606,7 +604,7 @@ def generate_base_homeserver_config() -> None:
|
||||
# start.py already does this for us, so just call that.
|
||||
# note that this script is copied in in the official, monolith dockerfile
|
||||
os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT)
|
||||
subprocess.run([sys.executable, "/start.py", "migrate_config"], check=True)
|
||||
subprocess.run(["/usr/local/bin/python", "/start.py", "migrate_config"], check=True)
|
||||
|
||||
|
||||
def parse_worker_types(
|
||||
@@ -1000,7 +998,6 @@ def generate_worker_files(
|
||||
"/healthcheck.sh",
|
||||
healthcheck_urls=healthcheck_urls,
|
||||
)
|
||||
os.chmod("/healthcheck.sh", 0o755)
|
||||
|
||||
# Ensure the logging directory exists
|
||||
log_dir = data_dir + "/logs"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/bin/sh
|
||||
#
|
||||
# Prefixes all lines on stdout and stderr with the process name (as determined by
|
||||
# the SUPERVISOR_PROCESS_NAME env var, which is automatically set by Supervisor).
|
||||
@@ -10,9 +10,6 @@
|
||||
# '-W interactive' is a `mawk` extension which disables buffering on stdout and sets line-buffered reads on
|
||||
# stdin. The effect is that the output is flushed after each line, rather than being batched, which helps reduce
|
||||
# confusion due to to interleaving of the different processes.
|
||||
prefixer() {
|
||||
mawk -W interactive '{printf("%s | %s\n", ENVIRON["SUPERVISOR_PROCESS_NAME"], $0); fflush() }'
|
||||
}
|
||||
exec 1> >(prefixer)
|
||||
exec 2> >(prefixer >&2)
|
||||
exec 1> >(mawk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0; fflush() }' >&1)
|
||||
exec 2> >(mawk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0; fflush() }' >&2)
|
||||
exec "$@"
|
||||
|
||||
@@ -46,14 +46,6 @@ to any local media, and any locally-cached copies of remote media.
|
||||
|
||||
The media file itself (and any thumbnails) is not deleted from the server.
|
||||
|
||||
Since Synapse 1.128.0, hashes of uploaded media are tracked. If this media
|
||||
is quarantined, Synapse will:
|
||||
|
||||
- Quarantine any media with a matching hash that has already been uploaded.
|
||||
- Quarantine any future media.
|
||||
- Quarantine any existing cached remote media.
|
||||
- Quarantine any future remote media.
|
||||
|
||||
## Quarantining media by ID
|
||||
|
||||
This API quarantines a single piece of local or remote media.
|
||||
|
||||
@@ -414,32 +414,6 @@ The following actions are **NOT** performed. The list may be incomplete.
|
||||
- Remove from monthly active users
|
||||
- Remove user's consent information (consent version and timestamp)
|
||||
|
||||
## Suspend/Unsuspend Account
|
||||
|
||||
This API allows an admin to suspend/unsuspend an account. While an account is suspended, the user is
|
||||
prohibited from sending invites, joining or knocking on rooms, sending messages, changing profile data, and redacting messages other than their own.
|
||||
|
||||
The api is:
|
||||
|
||||
```
|
||||
PUT /_synapse/admin/v1/suspend/<user_id>
|
||||
```
|
||||
|
||||
with a body of:
|
||||
|
||||
```json
|
||||
{
|
||||
"suspend": true
|
||||
}
|
||||
```
|
||||
|
||||
To unsuspend a user, use the same endpoint with a body of:
|
||||
```json
|
||||
{
|
||||
"suspend": false
|
||||
}
|
||||
```
|
||||
|
||||
## Reset password
|
||||
|
||||
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
|
||||
@@ -1494,13 +1468,13 @@ The following JSON body parameter must be provided:
|
||||
- `rooms` - A list of rooms to redact the user's events in. If an empty list is provided all events in all rooms
|
||||
the user is a member of will be redacted
|
||||
|
||||
_Added in Synapse 1.116.0._
|
||||
|
||||
The following JSON body parameters are optional:
|
||||
|
||||
- `reason` - Reason the redaction is being requested, ie "spam", "abuse", etc. This will be included in each redaction event, and be visible to users.
|
||||
- `limit` - a limit on the number of the user's events to search for ones that can be redacted (events are redacted newest to oldest) in each room, defaults to 1000 if not provided
|
||||
|
||||
_Added in Synapse 1.116.0._
|
||||
|
||||
|
||||
## Check the status of a redaction process
|
||||
|
||||
|
||||
@@ -162,7 +162,7 @@ by a unique name, the current status (stored in JSON), and some dependency infor
|
||||
* Whether the update requires a previous update to be complete.
|
||||
* A rough ordering for which to complete updates.
|
||||
|
||||
A new background update needs to be added to the `background_updates` table:
|
||||
A new background updates needs to be added to the `background_updates` table:
|
||||
|
||||
```sql
|
||||
INSERT INTO background_updates (ordering, update_name, depends_on, progress_json) VALUES
|
||||
|
||||
@@ -150,28 +150,6 @@ $ poetry shell
|
||||
$ poetry install --extras all
|
||||
```
|
||||
|
||||
If you want to go even further and remove the Poetry caches:
|
||||
|
||||
```shell
|
||||
# Find your Poetry cache directory
|
||||
# Docs: https://github.com/python-poetry/poetry/blob/main/docs/configuration.md#cache-directory
|
||||
$ poetry config cache-dir
|
||||
|
||||
# Remove packages from all cached repositories
|
||||
$ poetry cache clear --all .
|
||||
|
||||
# Go completely nuclear and clear out everything Poetry cache related
|
||||
# including the wheel artifacts which is not covered by the above command
|
||||
# (see https://github.com/python-poetry/poetry/issues/10304)
|
||||
#
|
||||
# This is necessary in order to rebuild or fetch new wheels. For example, if you update
|
||||
# the `icu` library in on your system, you will need to rebuild the PyICU Python package
|
||||
# in order to incorporate the correct dynamically linked library locations otherwise you
|
||||
# will run into errors like: `ImportError: libicui18n.so.75: cannot open shared object file: No such file or directory`
|
||||
$ rm -rf $(poetry config cache-dir)
|
||||
```
|
||||
|
||||
|
||||
## ...run a command in the `poetry` virtualenv?
|
||||
|
||||
Use `poetry run cmd args` when you need the python virtualenv context.
|
||||
@@ -209,7 +187,7 @@ useful.
|
||||
## ...add a new dependency?
|
||||
|
||||
Either:
|
||||
- manually update `pyproject.toml`; then `poetry lock`; or else
|
||||
- manually update `pyproject.toml`; then `poetry lock --no-update`; or else
|
||||
- `poetry add packagename`. See `poetry add --help`; note the `--dev`,
|
||||
`--extras` and `--optional` flags in particular.
|
||||
|
||||
@@ -224,12 +202,12 @@ poetry remove packagename
|
||||
```
|
||||
|
||||
ought to do the trick. Alternatively, manually update `pyproject.toml` and
|
||||
`poetry lock`. Include the updated `pyproject.toml` and `poetry.lock`
|
||||
`poetry lock --no-update`. Include the updated `pyproject.toml` and `poetry.lock`
|
||||
files in your commit.
|
||||
|
||||
## ...update the version range for an existing dependency?
|
||||
|
||||
Best done by manually editing `pyproject.toml`, then `poetry lock`.
|
||||
Best done by manually editing `pyproject.toml`, then `poetry lock --no-update`.
|
||||
Include the updated `pyproject.toml` and `poetry.lock` in your commit.
|
||||
|
||||
## ...update a dependency in the locked environment?
|
||||
@@ -255,7 +233,7 @@ poetry add packagename==1.2.3
|
||||
|
||||
# Get poetry to recompute the content-hash of pyproject.toml without changing
|
||||
# the locked package versions.
|
||||
poetry lock
|
||||
poetry lock --no-update
|
||||
```
|
||||
|
||||
Either way, include the updated `poetry.lock` file in your commit.
|
||||
|
||||
@@ -23,7 +23,6 @@ such as [Github][github-idp].
|
||||
[auth0]: https://auth0.com/
|
||||
[authentik]: https://goauthentik.io/
|
||||
[lemonldap]: https://lemonldap-ng.org/
|
||||
[pocket-id]: https://pocket-id.org/
|
||||
[okta]: https://www.okta.com/
|
||||
[dex-idp]: https://github.com/dexidp/dex
|
||||
[keycloak-idp]: https://www.keycloak.org/docs/latest/server_admin/#sso-protocols
|
||||
@@ -625,32 +624,6 @@ oidc_providers:
|
||||
|
||||
Note that the fields `client_id` and `client_secret` are taken from the CURL response above.
|
||||
|
||||
### Pocket ID
|
||||
|
||||
[Pocket ID][pocket-id] is a simple OIDC provider that allows users to authenticate with their passkeys.
|
||||
1. Go to `OIDC Clients`
|
||||
2. Click on `Add OIDC Client`
|
||||
3. Add a name, for example `Synapse`
|
||||
4. Add `"https://auth.example.org/_synapse/client/oidc/callback` to `Callback URLs` # Replace `auth.example.org` with your domain
|
||||
5. Click on `Save`
|
||||
6. Note down your `Client ID` and `Client secret`, these will be used later
|
||||
|
||||
Synapse config:
|
||||
|
||||
```yaml
|
||||
oidc_providers:
|
||||
- idp_id: pocket_id
|
||||
idp_name: Pocket ID
|
||||
issuer: "https://auth.example.org/" # Replace with your domain
|
||||
client_id: "your-client-id" # Replace with the "Client ID" you noted down before
|
||||
client_secret: "your-client-secret" # Replace with the "Client secret" you noted down before
|
||||
scopes: ["openid", "profile"]
|
||||
user_mapping_provider:
|
||||
config:
|
||||
localpart_template: "{{ user.preferred_username }}"
|
||||
display_name_template: "{{ user.name }}"
|
||||
```
|
||||
|
||||
### Shibboleth with OIDC Plugin
|
||||
|
||||
[Shibboleth](https://www.shibboleth.net/) is an open Standard IdP solution widely used by Universities.
|
||||
|
||||
@@ -10,7 +10,7 @@ As an example, a SSO service may return the email address
|
||||
to turn that into a displayname when creating a Matrix user for this individual.
|
||||
It may choose `John Smith`, or `Smith, John [Example.com]` or any number of
|
||||
variations. As each Synapse configuration may want something different, this is
|
||||
where SSO mapping providers come into play.
|
||||
where SAML mapping providers come into play.
|
||||
|
||||
SSO mapping providers are currently supported for OpenID and SAML SSO
|
||||
configurations. Please see the details below for how to implement your own.
|
||||
|
||||
@@ -117,44 +117,6 @@ each upgrade are complete before moving on to the next upgrade, to avoid
|
||||
stacking them up. You can monitor the currently running background updates with
|
||||
[the Admin API](usage/administration/admin_api/background_updates.html#status).
|
||||
|
||||
# Upgrading to v1.126.0
|
||||
|
||||
## Room list publication rules change
|
||||
|
||||
The default [`room_list_publication_rules`] setting was changed to disallow
|
||||
anyone (except server admins) from publishing to the room list by default.
|
||||
|
||||
This is in line with Synapse policy of locking down features by default that can
|
||||
be abused without moderation.
|
||||
|
||||
To keep the previous behavior of allowing publication by default, add the
|
||||
following to the config:
|
||||
|
||||
```yaml
|
||||
room_list_publication_rules:
|
||||
- "action": "allow"
|
||||
```
|
||||
|
||||
[`room_list_publication_rules`]: usage/configuration/config_documentation.md#room_list_publication_rules
|
||||
|
||||
## Change of signing key expiry date for the Debian/Ubuntu package repository
|
||||
|
||||
Administrators using the Debian/Ubuntu packages from `packages.matrix.org`,
|
||||
please be aware that we have recently updated the expiry date on the repository's GPG signing key,
|
||||
but this change must be imported into your keyring.
|
||||
|
||||
If you have the `matrix-org-archive-keyring` package installed and it updates before the current key expires, this should
|
||||
happen automatically.
|
||||
|
||||
Otherwise, if you see an error similar to `The following signatures were invalid: EXPKEYSIG F473DD4473365DE1`, you
|
||||
will need to get a fresh copy of the keys. You can do so with:
|
||||
|
||||
```sh
|
||||
sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
|
||||
```
|
||||
|
||||
The old version of the key will expire on `2025-03-15`.
|
||||
|
||||
# Upgrading to v1.122.0
|
||||
|
||||
## Dropping support for PostgreSQL 11 and 12
|
||||
|
||||
@@ -160,7 +160,7 @@ Using the following curl command:
|
||||
```console
|
||||
curl -H 'Authorization: Bearer <access-token>' -X DELETE https://matrix.org/_matrix/client/r0/directory/room/<room-alias>
|
||||
```
|
||||
`<access-token>` - can be obtained in element by looking in All settings, clicking Help & About and down the bottom is:
|
||||
`<access-token>` - can be obtained in riot by looking in the riot settings, down the bottom is:
|
||||
Access Token:\<click to reveal\>
|
||||
|
||||
`<room-alias>` - the room alias, eg. #my_room:matrix.org this possibly needs to be URL encoded also, for example %23my_room%3Amatrix.org
|
||||
|
||||
@@ -1947,29 +1947,6 @@ rc_presence:
|
||||
burst_count: 1
|
||||
```
|
||||
---
|
||||
### `rc_delayed_event_mgmt`
|
||||
|
||||
Ratelimiting settings for delayed event management.
|
||||
|
||||
This is a ratelimiting option that ratelimits
|
||||
attempts to restart, cancel, or view delayed events
|
||||
based on the sending client's account and device ID.
|
||||
It defaults to: `per_second: 1`, `burst_count: 5`.
|
||||
|
||||
Attempts to create or send delayed events are ratelimited not by this setting, but by `rc_message`.
|
||||
|
||||
Setting this to a high value allows clients to make delayed event management requests often
|
||||
(such as repeatedly restarting a delayed event with a short timeout,
|
||||
or restarting several different delayed events all at once)
|
||||
without the risk of being ratelimited.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
rc_delayed_event_mgmt:
|
||||
per_second: 2
|
||||
burst_count: 20
|
||||
```
|
||||
---
|
||||
### `federation_rr_transactions_per_room_per_second`
|
||||
|
||||
Sets outgoing federation transaction frequency for sending read-receipts,
|
||||
@@ -3238,22 +3215,6 @@ Example configuration:
|
||||
```yaml
|
||||
form_secret: <PRIVATE STRING>
|
||||
```
|
||||
---
|
||||
### `form_secret_path`
|
||||
|
||||
An alternative to [`form_secret`](#form_secret):
|
||||
allows the secret to be specified in an external file.
|
||||
|
||||
The file should be a plain text file, containing only the secret.
|
||||
Synapse reads the secret from the given file once at startup.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
form_secret_path: /path/to/secrets/file
|
||||
```
|
||||
|
||||
_Added in Synapse 1.126.0._
|
||||
|
||||
---
|
||||
## Signing Keys
|
||||
Config options relating to signing keys
|
||||
@@ -3618,24 +3579,6 @@ Options for each entry include:
|
||||
to `auto`, which uses PKCE if supported during metadata discovery. Set to `always`
|
||||
to force enable PKCE or `never` to force disable PKCE.
|
||||
|
||||
* `id_token_signing_alg_values_supported`: List of the JWS signing algorithms (`alg`
|
||||
values) that are supported for signing the `id_token`.
|
||||
|
||||
This is *not* required if `discovery` is disabled. We default to supporting `RS256` in
|
||||
the downstream usage if no algorithms are configured here or in the discovery
|
||||
document.
|
||||
|
||||
According to the spec, the algorithm `"RS256"` MUST be included. The absolute rigid
|
||||
approach would be to reject this provider as non-compliant if it's not included but we
|
||||
simply allow whatever and see what happens (you're the one that configured the value
|
||||
and cooperating with the identity provider).
|
||||
|
||||
The `alg` value `"none"` MAY be supported but can only be used if the Authorization
|
||||
Endpoint does not include `id_token` in the `response_type` (ex.
|
||||
`/authorize?response_type=code` where `none` can apply,
|
||||
`/authorize?response_type=code%20id_token` where `none` can't apply) (such as when
|
||||
using the Authorization Code Flow).
|
||||
|
||||
* `scopes`: list of scopes to request. This should normally include the "openid"
|
||||
scope. Defaults to `["openid"]`.
|
||||
|
||||
@@ -3662,19 +3605,9 @@ Options for each entry include:
|
||||
not included in `scopes`. Set to `userinfo_endpoint` to always use the
|
||||
userinfo endpoint.
|
||||
|
||||
* `redirect_uri`: An optional string, that if set will override the `redirect_uri`
|
||||
parameter sent in the requests to the authorization and token endpoints.
|
||||
Useful if you want to redirect the client to another endpoint as part of the
|
||||
OIDC login. Be aware that the client must then call Synapse's OIDC callback
|
||||
URL (`<public_baseurl>/_synapse/client/oidc/callback`) manually afterwards.
|
||||
Must be a valid URL including scheme and path.
|
||||
|
||||
* `additional_authorization_parameters`: String to string dictionary that will be passed as
|
||||
additional parameters to the authorization grant URL.
|
||||
|
||||
* `passthrough_authorization_parameters`: List of parameters that will be passed through from the redirect endpoint
|
||||
to the authorization grant URL.
|
||||
|
||||
* `allow_existing_users`: set to true to allow a user logging in via OIDC to
|
||||
match a pre-existing account instead of failing. This could be used if
|
||||
switching from password logins to OIDC. Defaults to false.
|
||||
@@ -3801,7 +3734,6 @@ oidc_providers:
|
||||
jwks_uri: "https://accounts.example.com/.well-known/jwks.json"
|
||||
additional_authorization_parameters:
|
||||
acr_values: 2fa
|
||||
passthrough_authorization_parameters: ["login_hint"]
|
||||
skip_verification: true
|
||||
enable_registration: true
|
||||
user_mapping_provider:
|
||||
@@ -4018,7 +3950,7 @@ This option has a number of sub-options. They are as follows:
|
||||
* `include_content`: Clients requesting push notifications can either have the body of
|
||||
the message sent in the notification poke along with other details
|
||||
like the sender, or just the event ID and room ID (`event_id_only`).
|
||||
If clients choose to have the body sent, this option controls whether the
|
||||
If clients choose the to have the body sent, this option controls whether the
|
||||
notification request includes the content of the event (other details
|
||||
like the sender are still included). If `event_id_only` is enabled, it
|
||||
has no effect.
|
||||
@@ -4295,8 +4227,8 @@ unwanted entries from being published in the public room list.
|
||||
|
||||
The format of this option is the same as that for
|
||||
[`alias_creation_rules`](#alias_creation_rules): an optional list of 0 or more
|
||||
rules. By default, no list is provided, meaning that no one may publish to the
|
||||
room list (except server admins).
|
||||
rules. By default, no list is provided, meaning that all rooms may be
|
||||
published to the room list.
|
||||
|
||||
Otherwise, requests to publish a room are matched against each rule in order.
|
||||
The first rule that matches decides if the request is allowed or denied. If no
|
||||
@@ -4322,10 +4254,6 @@ Note that the patterns match against fully qualified IDs, e.g. against
|
||||
of `alice`, `room` and `abcedgghijk`.
|
||||
|
||||
|
||||
_Changed in Synapse 1.126.0: The default was changed to deny publishing to the
|
||||
room list by default_
|
||||
|
||||
|
||||
Example configuration:
|
||||
|
||||
```yaml
|
||||
@@ -4538,22 +4466,6 @@ Example configuration:
|
||||
```yaml
|
||||
worker_replication_secret: "secret_secret"
|
||||
```
|
||||
---
|
||||
### `worker_replication_secret_path`
|
||||
|
||||
An alternative to [`worker_replication_secret`](#worker_replication_secret):
|
||||
allows the secret to be specified in an external file.
|
||||
|
||||
The file should be a plain text file, containing only the secret.
|
||||
Synapse reads the secret from the given file once at startup.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
worker_replication_secret_path: /path/to/secrets/file
|
||||
```
|
||||
|
||||
_Added in Synapse 1.126.0._
|
||||
|
||||
---
|
||||
### `start_pushers`
|
||||
|
||||
|
||||
@@ -249,14 +249,13 @@ information.
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$
|
||||
^/_matrix/client/(r0|v3|unstable)/capabilities$
|
||||
^/_matrix/client/(r0|v3|unstable)/notifications$
|
||||
^/_synapse/admin/v1/rooms/
|
||||
|
||||
# Encryption requests
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/query$
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/changes$
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/claim$
|
||||
^/_matrix/client/(r0|v3|unstable)/room_keys/
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/upload$
|
||||
^/_matrix/client/(r0|v3|unstable)/keys/upload/
|
||||
|
||||
# Registration/login requests
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/login$
|
||||
@@ -281,7 +280,6 @@ Additionally, the following REST endpoints can be handled for GET requests:
|
||||
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/
|
||||
^/_matrix/client/unstable/org.matrix.msc4140/delayed_events
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/devices/
|
||||
|
||||
# Account data requests
|
||||
^/_matrix/client/(r0|v3|unstable)/.*/tags
|
||||
|
||||
359
poetry.lock
generated
359
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -97,7 +97,7 @@ module-name = "synapse.synapse_rust"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.128.0"
|
||||
version = "1.125.0rc1"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "AGPL-3.0-or-later"
|
||||
@@ -390,7 +390,7 @@ skip = "cp36* cp37* cp38* pp37* pp38* *-musllinux_i686 pp*aarch64 *-musllinux_aa
|
||||
#
|
||||
# We temporarily pin Rust to 1.82.0 to work around
|
||||
# https://github.com/element-hq/synapse/issues/17988
|
||||
before-all = "sh .ci/before_build_wheel.sh"
|
||||
before-all = "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain 1.82.0 -y --profile minimal"
|
||||
environment= { PATH = "$PATH:$HOME/.cargo/bin" }
|
||||
|
||||
# For some reason if we don't manually clean the build directory we
|
||||
|
||||
@@ -30,7 +30,7 @@ http = "1.1.0"
|
||||
lazy_static = "1.4.0"
|
||||
log = "0.4.17"
|
||||
mime = "0.3.17"
|
||||
pyo3 = { version = "0.23.5", features = [
|
||||
pyo3 = { version = "0.23.2", features = [
|
||||
"macros",
|
||||
"anyhow",
|
||||
"abi3",
|
||||
|
||||
@@ -47,7 +47,7 @@ fn prepare_headers(headers: &mut HeaderMap, session: &Session) {
|
||||
headers.typed_insert(AccessControlAllowOrigin::ANY);
|
||||
headers.typed_insert(AccessControlExposeHeaders::from_iter([ETAG]));
|
||||
headers.typed_insert(Pragma::no_cache());
|
||||
headers.typed_insert(CacheControl::new().with_no_store().with_no_transform());
|
||||
headers.typed_insert(CacheControl::new().with_no_store());
|
||||
headers.typed_insert(session.etag());
|
||||
headers.typed_insert(session.expires());
|
||||
headers.typed_insert(session.last_modified());
|
||||
@@ -192,12 +192,10 @@ impl RendezvousHandler {
|
||||
"url": uri,
|
||||
})
|
||||
.to_string();
|
||||
let length = response.len() as _;
|
||||
|
||||
let mut response = Response::new(response.as_bytes());
|
||||
*response.status_mut() = StatusCode::CREATED;
|
||||
response.headers_mut().typed_insert(ContentType::json());
|
||||
response.headers_mut().typed_insert(ContentLength(length));
|
||||
prepare_headers(response.headers_mut(), &session);
|
||||
http_response_to_twisted(twisted_request, response)?;
|
||||
|
||||
@@ -301,7 +299,6 @@ impl RendezvousHandler {
|
||||
// proxy/cache setup which strips the ETag header if there is no Content-Type set.
|
||||
// Specifically, we noticed this behaviour when placing Synapse behind Cloudflare.
|
||||
response.headers_mut().typed_insert(ContentType::text());
|
||||
response.headers_mut().typed_insert(ContentLength(0));
|
||||
|
||||
http_response_to_twisted(twisted_request, response)?;
|
||||
|
||||
@@ -319,7 +316,6 @@ impl RendezvousHandler {
|
||||
response
|
||||
.headers_mut()
|
||||
.typed_insert(AccessControlAllowOrigin::ANY);
|
||||
response.headers_mut().typed_insert(ContentLength(0));
|
||||
http_response_to_twisted(twisted_request, response)?;
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -592,7 +592,7 @@ def _wait_for_actions(gh_token: Optional[str]) -> None:
|
||||
if all(
|
||||
workflow["status"] != "in_progress" for workflow in resp["workflow_runs"]
|
||||
):
|
||||
success = all(
|
||||
success = (
|
||||
workflow["status"] == "completed" for workflow in resp["workflow_runs"]
|
||||
)
|
||||
if success:
|
||||
|
||||
@@ -128,7 +128,6 @@ BOOLEAN_COLUMNS = {
|
||||
"pushers": ["enabled"],
|
||||
"redactions": ["have_censored"],
|
||||
"remote_media_cache": ["authenticated"],
|
||||
"room_memberships": ["participant"],
|
||||
"room_stats_state": ["is_federatable"],
|
||||
"rooms": ["is_public", "has_auth_chain_index"],
|
||||
"sliding_sync_joined_rooms": ["is_encrypted"],
|
||||
@@ -192,11 +191,6 @@ APPEND_ONLY_TABLES = [
|
||||
|
||||
|
||||
IGNORED_TABLES = {
|
||||
# Porting the auto generated sequence in this table is non-trivial.
|
||||
# None of the entries in this list are mandatory for Synapse to keep working.
|
||||
# If state group disk space is an issue after the port, the
|
||||
# `mark_unreferenced_state_groups_for_deletion_bg_update` background task can be run again.
|
||||
"state_groups_pending_deletion",
|
||||
# We don't port these tables, as they're a faff and we can regenerate
|
||||
# them anyway.
|
||||
"user_directory",
|
||||
@@ -222,15 +216,6 @@ IGNORED_TABLES = {
|
||||
}
|
||||
|
||||
|
||||
# These background updates will not be applied upon creation of the postgres database.
|
||||
IGNORED_BACKGROUND_UPDATES = {
|
||||
# Reapplying this background update to the postgres database is unnecessary after
|
||||
# already having waited for the SQLite database to complete all running background
|
||||
# updates.
|
||||
"mark_unreferenced_state_groups_for_deletion_bg_update",
|
||||
}
|
||||
|
||||
|
||||
# Error returned by the run function. Used at the top-level part of the script to
|
||||
# handle errors and return codes.
|
||||
end_error: Optional[str] = None
|
||||
@@ -702,20 +687,6 @@ class Porter:
|
||||
# 0 means off. 1 means full. 2 means incremental.
|
||||
return autovacuum_setting != 0
|
||||
|
||||
async def remove_ignored_background_updates_from_database(self) -> None:
|
||||
def _remove_delete_unreferenced_state_groups_bg_updates(
|
||||
txn: LoggingTransaction,
|
||||
) -> None:
|
||||
txn.execute(
|
||||
"DELETE FROM background_updates WHERE update_name = ANY(?)",
|
||||
(list(IGNORED_BACKGROUND_UPDATES),),
|
||||
)
|
||||
|
||||
await self.postgres_store.db_pool.runInteraction(
|
||||
"remove_delete_unreferenced_state_groups_bg_updates",
|
||||
_remove_delete_unreferenced_state_groups_bg_updates,
|
||||
)
|
||||
|
||||
async def run(self) -> None:
|
||||
"""Ports the SQLite database to a PostgreSQL database.
|
||||
|
||||
@@ -761,8 +732,6 @@ class Porter:
|
||||
self.hs_config.database.get_single_database()
|
||||
)
|
||||
|
||||
await self.remove_ignored_background_updates_from_database()
|
||||
|
||||
await self.run_background_updates_on_postgres()
|
||||
|
||||
self.progress.set_state("Creating port tables")
|
||||
|
||||
@@ -19,7 +19,6 @@
|
||||
#
|
||||
#
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
@@ -45,11 +44,9 @@ from synapse.api.errors import (
|
||||
)
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.context import make_deferred_yieldable
|
||||
from synapse.logging.opentracing import active_span, force_tracing, start_active_span
|
||||
from synapse.types import Requester, UserID, create_requester
|
||||
from synapse.util import json_decoder
|
||||
from synapse.util.caches.cached_call import RetryOnExceptionCachedCall
|
||||
from synapse.util.caches.response_cache import ResponseCache, ResponseCacheContext
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.rest.admin.experimental_features import ExperimentalFeature
|
||||
@@ -79,61 +76,6 @@ def scope_to_list(scope: str) -> List[str]:
|
||||
return scope.strip().split(" ")
|
||||
|
||||
|
||||
@dataclass
|
||||
class IntrospectionResult:
|
||||
_inner: IntrospectionToken
|
||||
|
||||
# when we retrieved this token,
|
||||
# in milliseconds since the Unix epoch
|
||||
retrieved_at_ms: int
|
||||
|
||||
def is_active(self, now_ms: int) -> bool:
|
||||
if not self._inner.get("active"):
|
||||
return False
|
||||
|
||||
expires_in = self._inner.get("expires_in")
|
||||
if expires_in is None:
|
||||
return True
|
||||
if not isinstance(expires_in, int):
|
||||
raise InvalidClientTokenError("token `expires_in` is not an int")
|
||||
|
||||
absolute_expiry_ms = expires_in * 1000 + self.retrieved_at_ms
|
||||
return now_ms < absolute_expiry_ms
|
||||
|
||||
def get_scope_list(self) -> List[str]:
|
||||
value = self._inner.get("scope")
|
||||
if not isinstance(value, str):
|
||||
return []
|
||||
return scope_to_list(value)
|
||||
|
||||
def get_sub(self) -> Optional[str]:
|
||||
value = self._inner.get("sub")
|
||||
if not isinstance(value, str):
|
||||
return None
|
||||
return value
|
||||
|
||||
def get_username(self) -> Optional[str]:
|
||||
value = self._inner.get("username")
|
||||
if not isinstance(value, str):
|
||||
return None
|
||||
return value
|
||||
|
||||
def get_name(self) -> Optional[str]:
|
||||
value = self._inner.get("name")
|
||||
if not isinstance(value, str):
|
||||
return None
|
||||
return value
|
||||
|
||||
def get_device_id(self) -> Optional[str]:
|
||||
value = self._inner.get("device_id")
|
||||
if value is not None and not isinstance(value, str):
|
||||
raise AuthError(
|
||||
500,
|
||||
"Invalid device ID in introspection result",
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
class PrivateKeyJWTWithKid(PrivateKeyJWT): # type: ignore[misc]
|
||||
"""An implementation of the private_key_jwt client auth method that includes a kid header.
|
||||
|
||||
@@ -178,34 +120,6 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
self._http_client = hs.get_proxied_http_client()
|
||||
self._hostname = hs.hostname
|
||||
self._admin_token: Callable[[], Optional[str]] = self._config.admin_token
|
||||
self._force_tracing_for_users = hs.config.tracing.force_tracing_for_users
|
||||
|
||||
# # Token Introspection Cache
|
||||
# This remembers what users/devices are represented by which access tokens,
|
||||
# in order to reduce overall system load:
|
||||
# - on Synapse (as requests are relatively expensive)
|
||||
# - on the network
|
||||
# - on MAS
|
||||
#
|
||||
# Since there is no invalidation mechanism currently,
|
||||
# the entries expire after 2 minutes.
|
||||
# This does mean tokens can be treated as valid by Synapse
|
||||
# for longer than reality.
|
||||
#
|
||||
# Ideally, tokens should logically be invalidated in the following circumstances:
|
||||
# - If a session logout happens.
|
||||
# In this case, MAS will delete the device within Synapse
|
||||
# anyway and this is good enough as an invalidation.
|
||||
# - If the client refreshes their token in MAS.
|
||||
# In this case, the device still exists and it's not the end of the world for
|
||||
# the old access token to continue working for a short time.
|
||||
self._introspection_cache: ResponseCache[str] = ResponseCache(
|
||||
self._clock,
|
||||
"token_introspection",
|
||||
timeout_ms=120_000,
|
||||
# don't log because the keys are access tokens
|
||||
enable_logging=False,
|
||||
)
|
||||
|
||||
self._issuer_metadata = RetryOnExceptionCachedCall[OpenIDProviderMetadata](
|
||||
self._load_metadata
|
||||
@@ -279,9 +193,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
metadata = await self._issuer_metadata.get()
|
||||
return metadata.get("introspection_endpoint")
|
||||
|
||||
async def _introspect_token(
|
||||
self, token: str, cache_context: ResponseCacheContext[str]
|
||||
) -> IntrospectionResult:
|
||||
async def _introspect_token(self, token: str) -> IntrospectionToken:
|
||||
"""
|
||||
Send a token to the introspection endpoint and returns the introspection response
|
||||
|
||||
@@ -297,16 +209,11 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
Returns:
|
||||
The introspection response
|
||||
"""
|
||||
# By default, we shouldn't cache the result unless we know it's valid
|
||||
cache_context.should_cache = False
|
||||
introspection_endpoint = await self._introspection_endpoint()
|
||||
raw_headers: Dict[str, str] = {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"User-Agent": str(self._http_client.user_agent, "utf-8"),
|
||||
"Accept": "application/json",
|
||||
# Tell MAS that we support reading the device ID as an explicit
|
||||
# value, not encoded in the scope. This is supported by MAS 0.15+
|
||||
"X-MAS-Supports-Device-Id": "1",
|
||||
}
|
||||
|
||||
args = {"token": token, "token_type_hint": "access_token"}
|
||||
@@ -356,11 +263,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
"The introspection endpoint returned an invalid JSON response."
|
||||
)
|
||||
|
||||
# We had a valid response, so we can cache it
|
||||
cache_context.should_cache = True
|
||||
return IntrospectionResult(
|
||||
IntrospectionToken(**resp), retrieved_at_ms=self._clock.time_msec()
|
||||
)
|
||||
return IntrospectionToken(**resp)
|
||||
|
||||
async def is_server_admin(self, requester: Requester) -> bool:
|
||||
return "urn:synapse:admin:*" in requester.scope
|
||||
@@ -371,55 +274,6 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
allow_guest: bool = False,
|
||||
allow_expired: bool = False,
|
||||
allow_locked: bool = False,
|
||||
) -> Requester:
|
||||
"""Get a registered user's ID.
|
||||
|
||||
Args:
|
||||
request: An HTTP request with an access_token query parameter.
|
||||
allow_guest: If False, will raise an AuthError if the user making the
|
||||
request is a guest.
|
||||
allow_expired: If True, allow the request through even if the account
|
||||
is expired, or session token lifetime has ended. Note that
|
||||
/login will deliver access tokens regardless of expiration.
|
||||
|
||||
Returns:
|
||||
Resolves to the requester
|
||||
Raises:
|
||||
InvalidClientCredentialsError if no user by that token exists or the token
|
||||
is invalid.
|
||||
AuthError if access is denied for the user in the access token
|
||||
"""
|
||||
parent_span = active_span()
|
||||
with start_active_span("get_user_by_req"):
|
||||
requester = await self._wrapped_get_user_by_req(
|
||||
request, allow_guest, allow_expired, allow_locked
|
||||
)
|
||||
|
||||
if parent_span:
|
||||
if requester.authenticated_entity in self._force_tracing_for_users:
|
||||
# request tracing is enabled for this user, so we need to force it
|
||||
# tracing on for the parent span (which will be the servlet span).
|
||||
#
|
||||
# It's too late for the get_user_by_req span to inherit the setting,
|
||||
# so we also force it on for that.
|
||||
force_tracing()
|
||||
force_tracing(parent_span)
|
||||
parent_span.set_tag(
|
||||
"authenticated_entity", requester.authenticated_entity
|
||||
)
|
||||
parent_span.set_tag("user_id", requester.user.to_string())
|
||||
if requester.device_id is not None:
|
||||
parent_span.set_tag("device_id", requester.device_id)
|
||||
if requester.app_service is not None:
|
||||
parent_span.set_tag("appservice_id", requester.app_service.id)
|
||||
return requester
|
||||
|
||||
async def _wrapped_get_user_by_req(
|
||||
self,
|
||||
request: SynapseRequest,
|
||||
allow_guest: bool = False,
|
||||
allow_expired: bool = False,
|
||||
allow_locked: bool = False,
|
||||
) -> Requester:
|
||||
access_token = self.get_access_token_from_request(request)
|
||||
|
||||
@@ -487,9 +341,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
)
|
||||
|
||||
try:
|
||||
introspection_result = await self._introspection_cache.wrap(
|
||||
token, self._introspect_token, token, cache_context=True
|
||||
)
|
||||
introspection_result = await self._introspect_token(token)
|
||||
except Exception:
|
||||
logger.exception("Failed to introspect token")
|
||||
raise SynapseError(503, "Unable to introspect the access token")
|
||||
@@ -498,11 +350,11 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
|
||||
# TODO: introspection verification should be more extensive, especially:
|
||||
# - verify the audience
|
||||
if not introspection_result.is_active(self._clock.time_msec()):
|
||||
if not introspection_result.get("active"):
|
||||
raise InvalidClientTokenError("Token is not active")
|
||||
|
||||
# Let's look at the scope
|
||||
scope: List[str] = introspection_result.get_scope_list()
|
||||
scope: List[str] = scope_to_list(introspection_result.get("scope", ""))
|
||||
|
||||
# Determine type of user based on presence of particular scopes
|
||||
has_user_scope = SCOPE_MATRIX_API in scope
|
||||
@@ -512,7 +364,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
raise InvalidClientTokenError("No scope in token granting user rights")
|
||||
|
||||
# Match via the sub claim
|
||||
sub: Optional[str] = introspection_result.get_sub()
|
||||
sub: Optional[str] = introspection_result.get("sub")
|
||||
if sub is None:
|
||||
raise InvalidClientTokenError(
|
||||
"Invalid sub claim in the introspection result"
|
||||
@@ -526,7 +378,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
# or the external_id was never recorded
|
||||
|
||||
# TODO: claim mapping should be configurable
|
||||
username: Optional[str] = introspection_result.get_username()
|
||||
username: Optional[str] = introspection_result.get("username")
|
||||
if username is None or not isinstance(username, str):
|
||||
raise AuthError(
|
||||
500,
|
||||
@@ -544,7 +396,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
|
||||
# TODO: claim mapping should be configurable
|
||||
# If present, use the name claim as the displayname
|
||||
name: Optional[str] = introspection_result.get_name()
|
||||
name: Optional[str] = introspection_result.get("name")
|
||||
|
||||
await self.store.register_user(
|
||||
user_id=user_id.to_string(), create_profile_with_displayname=name
|
||||
@@ -557,34 +409,29 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
else:
|
||||
user_id = UserID.from_string(user_id_str)
|
||||
|
||||
# MAS 0.15+ will give us the device ID as an explicit value for compatibility sessions
|
||||
# If present, we get it from here, if not we get it in thee scope
|
||||
device_id = introspection_result.get_device_id()
|
||||
if device_id is None:
|
||||
# Find device_ids in scope
|
||||
# We only allow a single device_id in the scope, so we find them all in the
|
||||
# scope list, and raise if there are more than one. The OIDC server should be
|
||||
# the one enforcing valid scopes, so we raise a 500 if we find an invalid scope.
|
||||
device_ids = [
|
||||
tok[len(SCOPE_MATRIX_DEVICE_PREFIX) :]
|
||||
for tok in scope
|
||||
if tok.startswith(SCOPE_MATRIX_DEVICE_PREFIX)
|
||||
]
|
||||
# Find device_ids in scope
|
||||
# We only allow a single device_id in the scope, so we find them all in the
|
||||
# scope list, and raise if there are more than one. The OIDC server should be
|
||||
# the one enforcing valid scopes, so we raise a 500 if we find an invalid scope.
|
||||
device_ids = [
|
||||
tok[len(SCOPE_MATRIX_DEVICE_PREFIX) :]
|
||||
for tok in scope
|
||||
if tok.startswith(SCOPE_MATRIX_DEVICE_PREFIX)
|
||||
]
|
||||
|
||||
if len(device_ids) > 1:
|
||||
raise AuthError(
|
||||
500,
|
||||
"Multiple device IDs in scope",
|
||||
)
|
||||
|
||||
device_id = device_ids[0] if device_ids else None
|
||||
if len(device_ids) > 1:
|
||||
raise AuthError(
|
||||
500,
|
||||
"Multiple device IDs in scope",
|
||||
)
|
||||
|
||||
device_id = device_ids[0] if device_ids else None
|
||||
if device_id is not None:
|
||||
# Sanity check the device_id
|
||||
if len(device_id) > 255 or len(device_id) < 1:
|
||||
raise AuthError(
|
||||
500,
|
||||
"Invalid device ID in introspection result",
|
||||
"Invalid device ID in scope",
|
||||
)
|
||||
|
||||
# Create the device on the fly if it does not exist
|
||||
|
||||
@@ -29,13 +29,8 @@ from typing import Final
|
||||
# the max size of a (canonical-json-encoded) event
|
||||
MAX_PDU_SIZE = 65536
|
||||
|
||||
# Max/min size of ints in canonical JSON
|
||||
CANONICALJSON_MAX_INT = (2**53) - 1
|
||||
CANONICALJSON_MIN_INT = -CANONICALJSON_MAX_INT
|
||||
|
||||
# the "depth" field on events is limited to the same as what
|
||||
# canonicaljson accepts
|
||||
MAX_DEPTH = CANONICALJSON_MAX_INT
|
||||
# the "depth" field on events is limited to 2**63 - 1
|
||||
MAX_DEPTH = 2**63 - 1
|
||||
|
||||
# the maximum length for a room alias is 255 characters
|
||||
MAX_ALIAS_LENGTH = 255
|
||||
|
||||
@@ -20,7 +20,8 @@
|
||||
#
|
||||
#
|
||||
|
||||
from typing import Dict, Hashable, Optional, Tuple
|
||||
from collections import OrderedDict
|
||||
from typing import Hashable, Optional, Tuple
|
||||
|
||||
from synapse.api.errors import LimitExceededError
|
||||
from synapse.config.ratelimiting import RatelimitSettings
|
||||
@@ -79,14 +80,12 @@ class Ratelimiter:
|
||||
self.store = store
|
||||
self._limiter_name = cfg.key
|
||||
|
||||
# A dictionary representing the token buckets tracked by this rate
|
||||
# An ordered dictionary representing the token buckets tracked by this rate
|
||||
# limiter. Each entry maps a key of arbitrary type to a tuple representing:
|
||||
# * The number of tokens currently in the bucket,
|
||||
# * The time point when the bucket was last completely empty, and
|
||||
# * The rate_hz (leak rate) of this particular bucket.
|
||||
self.actions: Dict[Hashable, Tuple[float, float, float]] = {}
|
||||
|
||||
self.clock.looping_call(self._prune_message_counts, 60 * 1000)
|
||||
self.actions: OrderedDict[Hashable, Tuple[float, float, float]] = OrderedDict()
|
||||
|
||||
def _get_key(
|
||||
self, requester: Optional[Requester], key: Optional[Hashable]
|
||||
@@ -170,6 +169,9 @@ class Ratelimiter:
|
||||
rate_hz = rate_hz if rate_hz is not None else self.rate_hz
|
||||
burst_count = burst_count if burst_count is not None else self.burst_count
|
||||
|
||||
# Remove any expired entries
|
||||
self._prune_message_counts(time_now_s)
|
||||
|
||||
# Check if there is an existing count entry for this key
|
||||
action_count, time_start, _ = self._get_action_counts(key, time_now_s)
|
||||
|
||||
@@ -244,12 +246,13 @@ class Ratelimiter:
|
||||
action_count, time_start, rate_hz = self._get_action_counts(key, time_now_s)
|
||||
self.actions[key] = (action_count + n_actions, time_start, rate_hz)
|
||||
|
||||
def _prune_message_counts(self) -> None:
|
||||
def _prune_message_counts(self, time_now_s: float) -> None:
|
||||
"""Remove message count entries that have not exceeded their defined
|
||||
rate_hz limit
|
||||
"""
|
||||
time_now_s = self.clock.time()
|
||||
|
||||
Args:
|
||||
time_now_s: The current time
|
||||
"""
|
||||
# We create a copy of the key list here as the dictionary is modified during
|
||||
# the loop
|
||||
for key in list(self.actions.keys()):
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
#
|
||||
import logging
|
||||
import sys
|
||||
from typing import Dict, List, cast
|
||||
from typing import Dict, List
|
||||
|
||||
from twisted.web.resource import Resource
|
||||
|
||||
@@ -51,8 +51,8 @@ from synapse.http.server import JsonResource, OptionsResource
|
||||
from synapse.logging.context import LoggingContext
|
||||
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
|
||||
from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
|
||||
from synapse.rest import ClientRestResource, admin
|
||||
from synapse.rest.admin import AdminRestResource, register_servlets_for_media_repo
|
||||
from synapse.rest import ClientRestResource
|
||||
from synapse.rest.admin import register_servlets_for_media_repo
|
||||
from synapse.rest.health import HealthResource
|
||||
from synapse.rest.key.v2 import KeyResource
|
||||
from synapse.rest.synapse.client import build_synapse_client_resource_tree
|
||||
@@ -190,11 +190,7 @@ class GenericWorkerServer(HomeServer):
|
||||
|
||||
resources.update(build_synapse_client_resource_tree(self))
|
||||
resources["/.well-known"] = well_known_resource(self)
|
||||
admin_res = resources.get("/_synapse/admin")
|
||||
if admin_res is not None:
|
||||
admin.register_servlets(self, cast(JsonResource, admin_res))
|
||||
else:
|
||||
resources["/_synapse/admin"] = AdminRestResource(self)
|
||||
|
||||
elif name == "federation":
|
||||
resources[FEDERATION_PREFIX] = TransportLayerServer(self)
|
||||
elif name == "media":
|
||||
@@ -203,21 +199,15 @@ class GenericWorkerServer(HomeServer):
|
||||
|
||||
# We need to serve the admin servlets for media on the
|
||||
# worker.
|
||||
admin_res = resources.get("/_synapse/admin")
|
||||
if admin_res is not None:
|
||||
register_servlets_for_media_repo(
|
||||
self, cast(JsonResource, admin_res)
|
||||
)
|
||||
else:
|
||||
admin_resource = JsonResource(self, canonical_json=False)
|
||||
register_servlets_for_media_repo(self, admin_resource)
|
||||
resources["/_synapse/admin"] = admin_resource
|
||||
admin_resource = JsonResource(self, canonical_json=False)
|
||||
register_servlets_for_media_repo(self, admin_resource)
|
||||
|
||||
resources.update(
|
||||
{
|
||||
MEDIA_R0_PREFIX: media_repo,
|
||||
MEDIA_V3_PREFIX: media_repo,
|
||||
LEGACY_MEDIA_PREFIX: media_repo,
|
||||
"/_synapse/admin": admin_resource,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -589,14 +589,6 @@ class RootConfig:
|
||||
" Defaults to the directory containing the last config file",
|
||||
)
|
||||
|
||||
config_parser.add_argument(
|
||||
"--no-secrets-in-config",
|
||||
dest="secrets_in_config",
|
||||
action="store_false",
|
||||
default=True,
|
||||
help="Reject config options that expect an in-line secret as value.",
|
||||
)
|
||||
|
||||
cls.invoke_all_static("add_arguments", config_parser)
|
||||
|
||||
@classmethod
|
||||
@@ -634,10 +626,7 @@ class RootConfig:
|
||||
|
||||
config_dict = read_config_files(config_files)
|
||||
obj.parse_config_dict(
|
||||
config_dict,
|
||||
config_dir_path=config_dir_path,
|
||||
data_dir_path=data_dir_path,
|
||||
allow_secrets_in_config=config_args.secrets_in_config,
|
||||
config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
|
||||
)
|
||||
|
||||
obj.invoke_all("read_arguments", config_args)
|
||||
@@ -664,13 +653,6 @@ class RootConfig:
|
||||
help="Specify config file. Can be given multiple times and"
|
||||
" may specify directories containing *.yaml files.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-secrets-in-config",
|
||||
dest="secrets_in_config",
|
||||
action="store_false",
|
||||
default=True,
|
||||
help="Reject config options that expect an in-line secret as value.",
|
||||
)
|
||||
|
||||
# we nest the mutually-exclusive group inside another group so that the help
|
||||
# text shows them in their own group.
|
||||
@@ -839,21 +821,14 @@ class RootConfig:
|
||||
return None
|
||||
|
||||
obj.parse_config_dict(
|
||||
config_dict,
|
||||
config_dir_path=config_dir_path,
|
||||
data_dir_path=data_dir_path,
|
||||
allow_secrets_in_config=config_args.secrets_in_config,
|
||||
config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
|
||||
)
|
||||
obj.invoke_all("read_arguments", config_args)
|
||||
|
||||
return obj
|
||||
|
||||
def parse_config_dict(
|
||||
self,
|
||||
config_dict: Dict[str, Any],
|
||||
config_dir_path: str,
|
||||
data_dir_path: str,
|
||||
allow_secrets_in_config: bool = True,
|
||||
self, config_dict: Dict[str, Any], config_dir_path: str, data_dir_path: str
|
||||
) -> None:
|
||||
"""Read the information from the config dict into this Config object.
|
||||
|
||||
@@ -871,7 +846,6 @@ class RootConfig:
|
||||
config_dict,
|
||||
config_dir_path=config_dir_path,
|
||||
data_dir_path=data_dir_path,
|
||||
allow_secrets_in_config=allow_secrets_in_config,
|
||||
)
|
||||
|
||||
def generate_missing_files(
|
||||
|
||||
@@ -132,11 +132,7 @@ class RootConfig:
|
||||
@classmethod
|
||||
def invoke_all_static(cls, func_name: str, *args: Any, **kwargs: Any) -> None: ...
|
||||
def parse_config_dict(
|
||||
self,
|
||||
config_dict: Dict[str, Any],
|
||||
config_dir_path: str,
|
||||
data_dir_path: str,
|
||||
allow_secrets_in_config: bool = ...,
|
||||
self, config_dict: Dict[str, Any], config_dir_path: str, data_dir_path: str
|
||||
) -> None: ...
|
||||
def generate_config(
|
||||
self,
|
||||
|
||||
@@ -29,15 +29,8 @@ from ._base import Config, ConfigError
|
||||
class CaptchaConfig(Config):
|
||||
section = "captcha"
|
||||
|
||||
def read_config(
|
||||
self, config: JsonDict, allow_secrets_in_config: bool, **kwargs: Any
|
||||
) -> None:
|
||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||
recaptcha_private_key = config.get("recaptcha_private_key")
|
||||
if recaptcha_private_key and not allow_secrets_in_config:
|
||||
raise ConfigError(
|
||||
"Config options that expect an in-line secret as value are disabled",
|
||||
("recaptcha_private_key",),
|
||||
)
|
||||
if recaptcha_private_key is not None and not isinstance(
|
||||
recaptcha_private_key, str
|
||||
):
|
||||
@@ -45,11 +38,6 @@ class CaptchaConfig(Config):
|
||||
self.recaptcha_private_key = recaptcha_private_key
|
||||
|
||||
recaptcha_public_key = config.get("recaptcha_public_key")
|
||||
if recaptcha_public_key and not allow_secrets_in_config:
|
||||
raise ConfigError(
|
||||
"Config options that expect an in-line secret as value are disabled",
|
||||
("recaptcha_public_key",),
|
||||
)
|
||||
if recaptcha_public_key is not None and not isinstance(
|
||||
recaptcha_public_key, str
|
||||
):
|
||||
|
||||
@@ -250,9 +250,7 @@ class MSC3861:
|
||||
)
|
||||
return self._admin_token
|
||||
|
||||
def check_config_conflicts(
|
||||
self, root: RootConfig, allow_secrets_in_config: bool
|
||||
) -> None:
|
||||
def check_config_conflicts(self, root: RootConfig) -> None:
|
||||
"""Checks for any configuration conflicts with other parts of Synapse.
|
||||
|
||||
Raises:
|
||||
@@ -262,24 +260,6 @@ class MSC3861:
|
||||
if not self.enabled:
|
||||
return
|
||||
|
||||
if self._client_secret and not allow_secrets_in_config:
|
||||
raise ConfigError(
|
||||
"Config options that expect an in-line secret as value are disabled",
|
||||
("experimental", "msc3861", "client_secret"),
|
||||
)
|
||||
|
||||
if self.jwk and not allow_secrets_in_config:
|
||||
raise ConfigError(
|
||||
"Config options that expect an in-line secret as value are disabled",
|
||||
("experimental", "msc3861", "jwk"),
|
||||
)
|
||||
|
||||
if self._admin_token and not allow_secrets_in_config:
|
||||
raise ConfigError(
|
||||
"Config options that expect an in-line secret as value are disabled",
|
||||
("experimental", "msc3861", "admin_token"),
|
||||
)
|
||||
|
||||
if (
|
||||
root.auth.password_enabled_for_reauth
|
||||
or root.auth.password_enabled_for_login
|
||||
@@ -370,9 +350,7 @@ class ExperimentalConfig(Config):
|
||||
|
||||
section = "experimental"
|
||||
|
||||
def read_config(
|
||||
self, config: JsonDict, allow_secrets_in_config: bool, **kwargs: Any
|
||||
) -> None:
|
||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||
experimental = config.get("experimental_features") or {}
|
||||
|
||||
# MSC3026 (busy presence state)
|
||||
@@ -516,9 +494,7 @@ class ExperimentalConfig(Config):
|
||||
) from exc
|
||||
|
||||
# Check that none of the other config options conflict with MSC3861 when enabled
|
||||
self.msc3861.check_config_conflicts(
|
||||
self.root, allow_secrets_in_config=allow_secrets_in_config
|
||||
)
|
||||
self.msc3861.check_config_conflicts(self.root)
|
||||
|
||||
self.msc4028_push_encrypted_events = experimental.get(
|
||||
"msc4028_push_encrypted_events", False
|
||||
|
||||
@@ -96,11 +96,6 @@ Conflicting options 'macaroon_secret_key' and 'macaroon_secret_key_path' are
|
||||
both defined in config file.
|
||||
"""
|
||||
|
||||
CONFLICTING_FORM_SECRET_OPTS_ERROR = """\
|
||||
Conflicting options 'form_secret' and 'form_secret_path' are both defined in
|
||||
config file.
|
||||
"""
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -117,11 +112,7 @@ class KeyConfig(Config):
|
||||
section = "key"
|
||||
|
||||
def read_config(
|
||||
self,
|
||||
config: JsonDict,
|
||||
config_dir_path: str,
|
||||
allow_secrets_in_config: bool,
|
||||
**kwargs: Any,
|
||||
self, config: JsonDict, config_dir_path: str, **kwargs: Any
|
||||
) -> None:
|
||||
# the signing key can be specified inline or in a separate file
|
||||
if "signing_key" in config:
|
||||
@@ -181,11 +172,6 @@ class KeyConfig(Config):
|
||||
)
|
||||
|
||||
macaroon_secret_key = config.get("macaroon_secret_key")
|
||||
if macaroon_secret_key and not allow_secrets_in_config:
|
||||
raise ConfigError(
|
||||
"Config options that expect an in-line secret as value are disabled",
|
||||
("macaroon_secret_key",),
|
||||
)
|
||||
macaroon_secret_key_path = config.get("macaroon_secret_key_path")
|
||||
if macaroon_secret_key_path:
|
||||
if macaroon_secret_key:
|
||||
@@ -206,19 +192,7 @@ class KeyConfig(Config):
|
||||
|
||||
# a secret which is used to calculate HMACs for form values, to stop
|
||||
# falsification of values
|
||||
form_secret = config.get("form_secret", None)
|
||||
if form_secret and not allow_secrets_in_config:
|
||||
raise ConfigError(
|
||||
"Config options that expect an in-line secret as value are disabled",
|
||||
("form_secret",),
|
||||
)
|
||||
form_secret_path = config.get("form_secret_path", None)
|
||||
if form_secret_path:
|
||||
if form_secret:
|
||||
raise ConfigError(CONFLICTING_FORM_SECRET_OPTS_ERROR)
|
||||
self.form_secret = read_file(form_secret_path, "form_secret_path").strip()
|
||||
else:
|
||||
self.form_secret = form_secret
|
||||
self.form_secret = config.get("form_secret", None)
|
||||
|
||||
def generate_config_section(
|
||||
self,
|
||||
|
||||
@@ -125,10 +125,6 @@ OIDC_PROVIDER_CONFIG_SCHEMA = {
|
||||
"enum": ["client_secret_basic", "client_secret_post", "none"],
|
||||
},
|
||||
"pkce_method": {"type": "string", "enum": ["auto", "always", "never"]},
|
||||
"id_token_signing_alg_values_supported": {
|
||||
"type": "array",
|
||||
"items": {"type": "string"},
|
||||
},
|
||||
"scopes": {"type": "array", "items": {"type": "string"}},
|
||||
"authorization_endpoint": {"type": "string"},
|
||||
"token_endpoint": {"type": "string"},
|
||||
@@ -141,9 +137,6 @@ OIDC_PROVIDER_CONFIG_SCHEMA = {
|
||||
"type": "string",
|
||||
"enum": ["auto", "userinfo_endpoint"],
|
||||
},
|
||||
"redirect_uri": {
|
||||
"type": ["string", "null"],
|
||||
},
|
||||
"allow_existing_users": {"type": "boolean"},
|
||||
"user_mapping_provider": {"type": ["object", "null"]},
|
||||
"attribute_requirements": {
|
||||
@@ -333,9 +326,6 @@ def _parse_oidc_config_dict(
|
||||
client_secret_jwt_key=client_secret_jwt_key,
|
||||
client_auth_method=client_auth_method,
|
||||
pkce_method=oidc_config.get("pkce_method", "auto"),
|
||||
id_token_signing_alg_values_supported=oidc_config.get(
|
||||
"id_token_signing_alg_values_supported"
|
||||
),
|
||||
scopes=oidc_config.get("scopes", ["openid"]),
|
||||
authorization_endpoint=oidc_config.get("authorization_endpoint"),
|
||||
token_endpoint=oidc_config.get("token_endpoint"),
|
||||
@@ -347,7 +337,6 @@ def _parse_oidc_config_dict(
|
||||
),
|
||||
skip_verification=oidc_config.get("skip_verification", False),
|
||||
user_profile_method=oidc_config.get("user_profile_method", "auto"),
|
||||
redirect_uri=oidc_config.get("redirect_uri"),
|
||||
allow_existing_users=oidc_config.get("allow_existing_users", False),
|
||||
user_mapping_provider_class=user_mapping_provider_class,
|
||||
user_mapping_provider_config=user_mapping_provider_config,
|
||||
@@ -356,9 +345,6 @@ def _parse_oidc_config_dict(
|
||||
additional_authorization_parameters=oidc_config.get(
|
||||
"additional_authorization_parameters", {}
|
||||
),
|
||||
passthrough_authorization_parameters=oidc_config.get(
|
||||
"passthrough_authorization_parameters", []
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -416,34 +402,6 @@ class OidcProviderConfig:
|
||||
# Valid values are 'auto', 'always', and 'never'.
|
||||
pkce_method: str
|
||||
|
||||
id_token_signing_alg_values_supported: Optional[List[str]]
|
||||
"""
|
||||
List of the JWS signing algorithms (`alg` values) that are supported for signing the
|
||||
`id_token`.
|
||||
|
||||
This is *not* required if `discovery` is disabled. We default to supporting `RS256`
|
||||
in the downstream usage if no algorithms are configured here or in the discovery
|
||||
document.
|
||||
|
||||
According to the spec, the algorithm `"RS256"` MUST be included. The absolute rigid
|
||||
approach would be to reject this provider as non-compliant if it's not included but
|
||||
we can just allow whatever and see what happens (they're the ones that configured
|
||||
the value and cooperating with the identity provider). It wouldn't be wise to add it
|
||||
ourselves because absence of `RS256` might indicate that the provider actually
|
||||
doesn't support it, despite the spec requirement. Adding it silently could lead to
|
||||
failed authentication attempts or strange mismatch attacks.
|
||||
|
||||
The `alg` value `"none"` MAY be supported but can only be used if the Authorization
|
||||
Endpoint does not include `id_token` in the `response_type` (ex.
|
||||
`/authorize?response_type=code` where `none` can apply,
|
||||
`/authorize?response_type=code%20id_token` where `none` can't apply) (such as when
|
||||
using the Authorization Code Flow).
|
||||
|
||||
Spec:
|
||||
- https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata
|
||||
- https://openid.net/specs/openid-connect-core-1_0.html#AuthorizationExamples
|
||||
"""
|
||||
|
||||
# list of scopes to request
|
||||
scopes: Collection[str]
|
||||
|
||||
@@ -474,18 +432,6 @@ class OidcProviderConfig:
|
||||
# values are: "auto" or "userinfo_endpoint".
|
||||
user_profile_method: str
|
||||
|
||||
redirect_uri: Optional[str]
|
||||
"""
|
||||
An optional replacement for Synapse's hardcoded `redirect_uri` URL
|
||||
(`<public_baseurl>/_synapse/client/oidc/callback`). This can be used to send
|
||||
the client to a different URL after it receives a response from the
|
||||
`authorization_endpoint`.
|
||||
|
||||
If this is set, the client is expected to call Synapse's OIDC callback URL
|
||||
reproduced above itself with the necessary parameters and session cookie, in
|
||||
order to complete OIDC login.
|
||||
"""
|
||||
|
||||
# whether to allow a user logging in via OIDC to match a pre-existing account
|
||||
# instead of failing
|
||||
allow_existing_users: bool
|
||||
@@ -504,6 +450,3 @@ class OidcProviderConfig:
|
||||
|
||||
# Additional parameters that will be passed to the authorization grant URL
|
||||
additional_authorization_parameters: Mapping[str, str]
|
||||
|
||||
# Allow query parameters to the redirect endpoint that will be passed to the authorization grant URL
|
||||
passthrough_authorization_parameters: Collection[str]
|
||||
|
||||
@@ -234,9 +234,3 @@ class RatelimitConfig(Config):
|
||||
"rc_presence.per_user",
|
||||
defaults={"per_second": 0.1, "burst_count": 1},
|
||||
)
|
||||
|
||||
self.rc_delayed_event_mgmt = RatelimitSettings.parse(
|
||||
config,
|
||||
"rc_delayed_event_mgmt",
|
||||
defaults={"per_second": 1, "burst_count": 5},
|
||||
)
|
||||
|
||||
@@ -34,9 +34,7 @@ These are mutually incompatible.
|
||||
class RedisConfig(Config):
|
||||
section = "redis"
|
||||
|
||||
def read_config(
|
||||
self, config: JsonDict, allow_secrets_in_config: bool, **kwargs: Any
|
||||
) -> None:
|
||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||
redis_config = config.get("redis") or {}
|
||||
self.redis_enabled = redis_config.get("enabled", False)
|
||||
|
||||
@@ -50,11 +48,6 @@ class RedisConfig(Config):
|
||||
self.redis_path = redis_config.get("path", None)
|
||||
self.redis_dbid = redis_config.get("dbid", None)
|
||||
self.redis_password = redis_config.get("password")
|
||||
if self.redis_password and not allow_secrets_in_config:
|
||||
raise ConfigError(
|
||||
"Config options that expect an in-line secret as value are disabled",
|
||||
("redis", "password"),
|
||||
)
|
||||
redis_password_path = redis_config.get("password_path")
|
||||
if redis_password_path:
|
||||
if self.redis_password:
|
||||
|
||||
@@ -43,9 +43,7 @@ You have configured both `registration_shared_secret` and
|
||||
class RegistrationConfig(Config):
|
||||
section = "registration"
|
||||
|
||||
def read_config(
|
||||
self, config: JsonDict, allow_secrets_in_config: bool, **kwargs: Any
|
||||
) -> None:
|
||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||
self.enable_registration = strtobool(
|
||||
str(config.get("enable_registration", False))
|
||||
)
|
||||
@@ -70,11 +68,6 @@ class RegistrationConfig(Config):
|
||||
|
||||
# read the shared secret, either inline or from an external file
|
||||
self.registration_shared_secret = config.get("registration_shared_secret")
|
||||
if self.registration_shared_secret and not allow_secrets_in_config:
|
||||
raise ConfigError(
|
||||
"Config options that expect an in-line secret as value are disabled",
|
||||
("registration_shared_secret",),
|
||||
)
|
||||
registration_shared_secret_path = config.get("registration_shared_secret_path")
|
||||
if registration_shared_secret_path:
|
||||
if self.registration_shared_secret:
|
||||
|
||||
@@ -54,7 +54,9 @@ class RoomDirectoryConfig(Config):
|
||||
for rule in room_list_publication_rules
|
||||
]
|
||||
else:
|
||||
self._room_list_publication_rules = []
|
||||
self._room_list_publication_rules = [
|
||||
_RoomDirectoryRule("room_list_publication_rules", {"action": "allow"})
|
||||
]
|
||||
|
||||
def is_alias_creation_allowed(self, user_id: str, room_id: str, alias: str) -> bool:
|
||||
"""Checks if the given user is allowed to create the given alias
|
||||
|
||||
@@ -34,16 +34,9 @@ These are mutually incompatible.
|
||||
class VoipConfig(Config):
|
||||
section = "voip"
|
||||
|
||||
def read_config(
|
||||
self, config: JsonDict, allow_secrets_in_config: bool, **kwargs: Any
|
||||
) -> None:
|
||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||
self.turn_uris = config.get("turn_uris", [])
|
||||
self.turn_shared_secret = config.get("turn_shared_secret")
|
||||
if self.turn_shared_secret and not allow_secrets_in_config:
|
||||
raise ConfigError(
|
||||
"Config options that expect an in-line secret as value are disabled",
|
||||
("turn_shared_secret",),
|
||||
)
|
||||
turn_shared_secret_path = config.get("turn_shared_secret_path")
|
||||
if turn_shared_secret_path:
|
||||
if self.turn_shared_secret:
|
||||
|
||||
@@ -38,7 +38,6 @@ from synapse.config._base import (
|
||||
ConfigError,
|
||||
RoutableShardedWorkerHandlingConfig,
|
||||
ShardedWorkerHandlingConfig,
|
||||
read_file,
|
||||
)
|
||||
from synapse.config._util import parse_and_validate_mapping
|
||||
from synapse.config.server import (
|
||||
@@ -66,11 +65,6 @@ configuration under `main` inside the `instance_map`. See workers documentation
|
||||
`https://element-hq.github.io/synapse/latest/workers.html#worker-configuration`
|
||||
"""
|
||||
|
||||
CONFLICTING_WORKER_REPLICATION_SECRET_OPTS_ERROR = """\
|
||||
Conflicting options 'worker_replication_secret' and
|
||||
'worker_replication_secret_path' are both defined in config file.
|
||||
"""
|
||||
|
||||
# This allows for a handy knob when it's time to change from 'master' to
|
||||
# something with less 'history'
|
||||
MAIN_PROCESS_INSTANCE_NAME = "master"
|
||||
@@ -224,9 +218,7 @@ class WorkerConfig(Config):
|
||||
|
||||
section = "worker"
|
||||
|
||||
def read_config(
|
||||
self, config: JsonDict, allow_secrets_in_config: bool, **kwargs: Any
|
||||
) -> None:
|
||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||
self.worker_app = config.get("worker_app")
|
||||
|
||||
# Canonicalise worker_app so that master always has None
|
||||
@@ -250,23 +242,7 @@ class WorkerConfig(Config):
|
||||
raise ConfigError(DIRECT_TCP_ERROR, ("worker_replication_port",))
|
||||
|
||||
# The shared secret used for authentication when connecting to the main synapse.
|
||||
worker_replication_secret = config.get("worker_replication_secret", None)
|
||||
if worker_replication_secret and not allow_secrets_in_config:
|
||||
raise ConfigError(
|
||||
"Config options that expect an in-line secret as value are disabled",
|
||||
("worker_replication_secret",),
|
||||
)
|
||||
worker_replication_secret_path = config.get(
|
||||
"worker_replication_secret_path", None
|
||||
)
|
||||
if worker_replication_secret_path:
|
||||
if worker_replication_secret:
|
||||
raise ConfigError(CONFLICTING_WORKER_REPLICATION_SECRET_OPTS_ERROR)
|
||||
self.worker_replication_secret = read_file(
|
||||
worker_replication_secret_path, "worker_replication_secret_path"
|
||||
).strip()
|
||||
else:
|
||||
self.worker_replication_secret = worker_replication_secret
|
||||
self.worker_replication_secret = config.get("worker_replication_secret", None)
|
||||
|
||||
self.worker_name = config.get("worker_name", self.worker_app)
|
||||
self.instance_name = self.worker_name or MAIN_PROCESS_INSTANCE_NAME
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
|
||||
import abc
|
||||
import collections.abc
|
||||
import os
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
@@ -47,21 +48,21 @@ from synapse.synapse_rust.events import EventInternalMetadata
|
||||
from synapse.types import JsonDict, StrCollection
|
||||
from synapse.util.caches import intern_dict
|
||||
from synapse.util.frozenutils import freeze
|
||||
from synapse.util.stringutils import strtobool
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.events.builder import EventBuilder
|
||||
|
||||
# Whether we should use frozen_dict in FrozenEvent. Using frozen_dicts prevents
|
||||
# bugs where we accidentally share e.g. signature dicts. However, converting a
|
||||
# dict to frozen_dicts is expensive.
|
||||
#
|
||||
# NOTE: This is overridden by the configuration by the Synapse worker apps, but
|
||||
# for the sake of tests, it is set here while it cannot be configured on the
|
||||
# homeserver object itself.
|
||||
|
||||
USE_FROZEN_DICTS = False
|
||||
"""
|
||||
Whether we should use frozen_dict in FrozenEvent. Using frozen_dicts prevents
|
||||
bugs where we accidentally share e.g. signature dicts. However, converting a
|
||||
dict to frozen_dicts is expensive.
|
||||
USE_FROZEN_DICTS = strtobool(os.environ.get("SYNAPSE_USE_FROZEN_DICTS", "0"))
|
||||
|
||||
NOTE: This is overridden by the configuration by the Synapse worker apps, but
|
||||
for the sake of tests, it is set here because it cannot be configured on the
|
||||
homeserver object itself.
|
||||
"""
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
@@ -40,8 +40,6 @@ import attr
|
||||
from canonicaljson import encode_canonical_json
|
||||
|
||||
from synapse.api.constants import (
|
||||
CANONICALJSON_MAX_INT,
|
||||
CANONICALJSON_MIN_INT,
|
||||
MAX_PDU_SIZE,
|
||||
EventContentFields,
|
||||
EventTypes,
|
||||
@@ -63,6 +61,9 @@ SPLIT_FIELD_REGEX = re.compile(r"\\*\.")
|
||||
# Find escaped characters, e.g. those with a \ in front of them.
|
||||
ESCAPE_SEQUENCE_PATTERN = re.compile(r"\\(.)")
|
||||
|
||||
CANONICALJSON_MAX_INT = (2**53) - 1
|
||||
CANONICALJSON_MIN_INT = -CANONICALJSON_MAX_INT
|
||||
|
||||
|
||||
# Module API callback that allows adding fields to the unsigned section of
|
||||
# events that are sent to clients.
|
||||
|
||||
@@ -86,7 +86,9 @@ class EventValidator:
|
||||
|
||||
# Depending on the room version, ensure the data is spec compliant JSON.
|
||||
if event.room_version.strict_canonicaljson:
|
||||
validate_canonicaljson(event.get_pdu_json())
|
||||
# Note that only the client controlled portion of the event is
|
||||
# checked, since we trust the portions of the event we created.
|
||||
validate_canonicaljson(event.content)
|
||||
|
||||
if event.type == EventTypes.Aliases:
|
||||
if "aliases" in event.content:
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
#
|
||||
#
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Awaitable, Callable, List, Optional, Sequence
|
||||
from typing import TYPE_CHECKING, Awaitable, Callable, Optional
|
||||
|
||||
from synapse.api.constants import MAX_DEPTH, EventContentFields, EventTypes, Membership
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
@@ -29,7 +29,6 @@ from synapse.crypto.event_signing import check_event_content_hash
|
||||
from synapse.crypto.keyring import Keyring
|
||||
from synapse.events import EventBase, make_event_from_dict
|
||||
from synapse.events.utils import prune_event, validate_canonicaljson
|
||||
from synapse.federation.units import filter_pdus_for_valid_depth
|
||||
from synapse.http.servlet import assert_params_in_dict
|
||||
from synapse.logging.opentracing import log_kv, trace
|
||||
from synapse.types import JsonDict, get_domain_from_id
|
||||
@@ -268,15 +267,6 @@ def _is_invite_via_3pid(event: EventBase) -> bool:
|
||||
)
|
||||
|
||||
|
||||
def parse_events_from_pdu_json(
|
||||
pdus_json: Sequence[JsonDict], room_version: RoomVersion
|
||||
) -> List[EventBase]:
|
||||
return [
|
||||
event_from_pdu_json(pdu_json, room_version)
|
||||
for pdu_json in filter_pdus_for_valid_depth(pdus_json)
|
||||
]
|
||||
|
||||
|
||||
def event_from_pdu_json(pdu_json: JsonDict, room_version: RoomVersion) -> EventBase:
|
||||
"""Construct an EventBase from an event json received over federation
|
||||
|
||||
|
||||
@@ -68,7 +68,6 @@ from synapse.federation.federation_base import (
|
||||
FederationBase,
|
||||
InvalidEventSignatureError,
|
||||
event_from_pdu_json,
|
||||
parse_events_from_pdu_json,
|
||||
)
|
||||
from synapse.federation.transport.client import SendJoinResponse
|
||||
from synapse.http.client import is_unknown_endpoint
|
||||
@@ -350,7 +349,7 @@ class FederationClient(FederationBase):
|
||||
|
||||
room_version = await self.store.get_room_version(room_id)
|
||||
|
||||
pdus = parse_events_from_pdu_json(transaction_data_pdus, room_version)
|
||||
pdus = [event_from_pdu_json(p, room_version) for p in transaction_data_pdus]
|
||||
|
||||
# Check signatures and hash of pdus, removing any from the list that fail checks
|
||||
pdus[:] = await self._check_sigs_and_hash_for_pulled_events_and_fetch(
|
||||
@@ -394,7 +393,9 @@ class FederationClient(FederationBase):
|
||||
transaction_data,
|
||||
)
|
||||
|
||||
pdu_list = parse_events_from_pdu_json(transaction_data["pdus"], room_version)
|
||||
pdu_list: List[EventBase] = [
|
||||
event_from_pdu_json(p, room_version) for p in transaction_data["pdus"]
|
||||
]
|
||||
|
||||
if pdu_list and pdu_list[0]:
|
||||
pdu = pdu_list[0]
|
||||
@@ -808,7 +809,7 @@ class FederationClient(FederationBase):
|
||||
|
||||
room_version = await self.store.get_room_version(room_id)
|
||||
|
||||
auth_chain = parse_events_from_pdu_json(res["auth_chain"], room_version)
|
||||
auth_chain = [event_from_pdu_json(p, room_version) for p in res["auth_chain"]]
|
||||
|
||||
signed_auth = await self._check_sigs_and_hash_for_pulled_events_and_fetch(
|
||||
destination, auth_chain, room_version=room_version
|
||||
@@ -1528,7 +1529,9 @@ class FederationClient(FederationBase):
|
||||
|
||||
room_version = await self.store.get_room_version(room_id)
|
||||
|
||||
events = parse_events_from_pdu_json(content.get("events", []), room_version)
|
||||
events = [
|
||||
event_from_pdu_json(e, room_version) for e in content.get("events", [])
|
||||
]
|
||||
|
||||
signed_events = await self._check_sigs_and_hash_for_pulled_events_and_fetch(
|
||||
destination, events, room_version=room_version
|
||||
|
||||
@@ -66,7 +66,7 @@ from synapse.federation.federation_base import (
|
||||
event_from_pdu_json,
|
||||
)
|
||||
from synapse.federation.persistence import TransactionActions
|
||||
from synapse.federation.units import Edu, Transaction, serialize_and_filter_pdus
|
||||
from synapse.federation.units import Edu, Transaction
|
||||
from synapse.handlers.worker_lock import NEW_EVENT_DURING_PURGE_LOCK_NAME
|
||||
from synapse.http.servlet import assert_params_in_dict
|
||||
from synapse.logging.context import (
|
||||
@@ -469,12 +469,7 @@ class FederationServer(FederationBase):
|
||||
logger.info("Ignoring PDU: %s", e)
|
||||
continue
|
||||
|
||||
try:
|
||||
event = event_from_pdu_json(p, room_version)
|
||||
except SynapseError as e:
|
||||
logger.info("Ignoring PDU for failing to deserialize: %s", e)
|
||||
continue
|
||||
|
||||
event = event_from_pdu_json(p, room_version)
|
||||
pdus_by_room.setdefault(room_id, []).append(event)
|
||||
|
||||
if event.origin_server_ts > newest_pdu_ts:
|
||||
@@ -641,8 +636,8 @@ class FederationServer(FederationBase):
|
||||
)
|
||||
|
||||
return {
|
||||
"pdus": serialize_and_filter_pdus(pdus),
|
||||
"auth_chain": serialize_and_filter_pdus(auth_chain),
|
||||
"pdus": [pdu.get_pdu_json() for pdu in pdus],
|
||||
"auth_chain": [pdu.get_pdu_json() for pdu in auth_chain],
|
||||
}
|
||||
|
||||
async def on_pdu_request(
|
||||
@@ -766,8 +761,8 @@ class FederationServer(FederationBase):
|
||||
event_json = event.get_pdu_json(time_now)
|
||||
resp = {
|
||||
"event": event_json,
|
||||
"state": serialize_and_filter_pdus(state_events, time_now),
|
||||
"auth_chain": serialize_and_filter_pdus(auth_chain_events, time_now),
|
||||
"state": [p.get_pdu_json(time_now) for p in state_events],
|
||||
"auth_chain": [p.get_pdu_json(time_now) for p in auth_chain_events],
|
||||
"members_omitted": caller_supports_partial_state,
|
||||
}
|
||||
|
||||
@@ -1010,7 +1005,7 @@ class FederationServer(FederationBase):
|
||||
|
||||
time_now = self._clock.time_msec()
|
||||
auth_pdus = await self.handler.on_event_auth(event_id)
|
||||
res = {"auth_chain": serialize_and_filter_pdus(auth_pdus, time_now)}
|
||||
res = {"auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus]}
|
||||
return 200, res
|
||||
|
||||
async def on_query_client_keys(
|
||||
@@ -1095,7 +1090,7 @@ class FederationServer(FederationBase):
|
||||
|
||||
time_now = self._clock.time_msec()
|
||||
|
||||
return {"events": serialize_and_filter_pdus(missing_events, time_now)}
|
||||
return {"events": [ev.get_pdu_json(time_now) for ev in missing_events]}
|
||||
|
||||
async def on_openid_userinfo(self, token: str) -> Optional[str]:
|
||||
ts_now_ms = self._clock.time_msec()
|
||||
|
||||
@@ -24,12 +24,10 @@ server protocol.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import List, Optional, Sequence
|
||||
from typing import List, Optional
|
||||
|
||||
import attr
|
||||
|
||||
from synapse.api.constants import CANONICALJSON_MAX_INT, CANONICALJSON_MIN_INT
|
||||
from synapse.events import EventBase
|
||||
from synapse.types import JsonDict
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -106,28 +104,8 @@ class Transaction:
|
||||
result = {
|
||||
"origin": self.origin,
|
||||
"origin_server_ts": self.origin_server_ts,
|
||||
"pdus": filter_pdus_for_valid_depth(self.pdus),
|
||||
"pdus": self.pdus,
|
||||
}
|
||||
if self.edus:
|
||||
result["edus"] = self.edus
|
||||
return result
|
||||
|
||||
|
||||
def filter_pdus_for_valid_depth(pdus: Sequence[JsonDict]) -> List[JsonDict]:
|
||||
filtered_pdus = []
|
||||
for pdu in pdus:
|
||||
# Drop PDUs that have a depth that is outside of the range allowed
|
||||
# by canonical json.
|
||||
if (
|
||||
"depth" in pdu
|
||||
and CANONICALJSON_MIN_INT <= pdu["depth"] <= CANONICALJSON_MAX_INT
|
||||
):
|
||||
filtered_pdus.append(pdu)
|
||||
|
||||
return filtered_pdus
|
||||
|
||||
|
||||
def serialize_and_filter_pdus(
|
||||
pdus: Sequence[EventBase], time_now: Optional[int] = None
|
||||
) -> List[JsonDict]:
|
||||
return filter_pdus_for_valid_depth([pdu.get_pdu_json(time_now) for pdu in pdus])
|
||||
|
||||
@@ -19,7 +19,6 @@ from twisted.internet.interfaces import IDelayedCall
|
||||
|
||||
from synapse.api.constants import EventTypes
|
||||
from synapse.api.errors import ShadowBanError
|
||||
from synapse.api.ratelimiting import Ratelimiter
|
||||
from synapse.config.workers import MAIN_PROCESS_INSTANCE_NAME
|
||||
from synapse.logging.opentracing import set_tag
|
||||
from synapse.metrics import event_processing_positions
|
||||
@@ -58,19 +57,10 @@ class DelayedEventsHandler:
|
||||
self._storage_controllers = hs.get_storage_controllers()
|
||||
self._config = hs.config
|
||||
self._clock = hs.get_clock()
|
||||
self._request_ratelimiter = hs.get_request_ratelimiter()
|
||||
self._event_creation_handler = hs.get_event_creation_handler()
|
||||
self._room_member_handler = hs.get_room_member_handler()
|
||||
|
||||
self._request_ratelimiter = hs.get_request_ratelimiter()
|
||||
|
||||
# Ratelimiter for management of existing delayed events,
|
||||
# keyed by the sending user ID & device ID.
|
||||
self._delayed_event_mgmt_ratelimiter = Ratelimiter(
|
||||
store=self._store,
|
||||
clock=self._clock,
|
||||
cfg=self._config.ratelimiting.rc_delayed_event_mgmt,
|
||||
)
|
||||
|
||||
self._next_delayed_event_call: Optional[IDelayedCall] = None
|
||||
|
||||
# The current position in the current_state_delta stream
|
||||
@@ -191,36 +181,18 @@ class DelayedEventsHandler:
|
||||
|
||||
async def _handle_state_deltas(self, deltas: List[StateDelta]) -> None:
|
||||
"""
|
||||
Process current state deltas to cancel other users' pending delayed events
|
||||
Process current state deltas to cancel pending delayed events
|
||||
that target the same state.
|
||||
"""
|
||||
for delta in deltas:
|
||||
if delta.event_id is None:
|
||||
logger.debug(
|
||||
"Not handling delta for deleted state: %r %r",
|
||||
delta.event_type,
|
||||
delta.state_key,
|
||||
)
|
||||
continue
|
||||
|
||||
logger.debug(
|
||||
"Handling: %r %r, %s", delta.event_type, delta.state_key, delta.event_id
|
||||
)
|
||||
|
||||
event = await self._store.get_event(
|
||||
delta.event_id, check_room_id=delta.room_id
|
||||
)
|
||||
sender = UserID.from_string(event.sender)
|
||||
|
||||
next_send_ts = await self._store.cancel_delayed_state_events(
|
||||
room_id=delta.room_id,
|
||||
event_type=delta.event_type,
|
||||
state_key=delta.state_key,
|
||||
not_from_localpart=(
|
||||
sender.localpart
|
||||
if sender.domain == self._config.server.server_name
|
||||
else ""
|
||||
),
|
||||
)
|
||||
|
||||
if self._next_send_ts_changed(next_send_ts):
|
||||
@@ -255,9 +227,6 @@ class DelayedEventsHandler:
|
||||
Raises:
|
||||
SynapseError: if the delayed event fails validation checks.
|
||||
"""
|
||||
# Use standard request limiter for scheduling new delayed events.
|
||||
# TODO: Instead apply ratelimiting based on the scheduled send time.
|
||||
# See https://github.com/element-hq/synapse/issues/18021
|
||||
await self._request_ratelimiter.ratelimit(requester)
|
||||
|
||||
self._event_creation_handler.validator.validate_builder(
|
||||
@@ -316,10 +285,7 @@ class DelayedEventsHandler:
|
||||
NotFoundError: if no matching delayed event could be found.
|
||||
"""
|
||||
assert self._is_master
|
||||
await self._delayed_event_mgmt_ratelimiter.ratelimit(
|
||||
requester,
|
||||
(requester.user.to_string(), requester.device_id),
|
||||
)
|
||||
await self._request_ratelimiter.ratelimit(requester)
|
||||
await self._initialized_from_db
|
||||
|
||||
next_send_ts = await self._store.cancel_delayed_event(
|
||||
@@ -342,10 +308,7 @@ class DelayedEventsHandler:
|
||||
NotFoundError: if no matching delayed event could be found.
|
||||
"""
|
||||
assert self._is_master
|
||||
await self._delayed_event_mgmt_ratelimiter.ratelimit(
|
||||
requester,
|
||||
(requester.user.to_string(), requester.device_id),
|
||||
)
|
||||
await self._request_ratelimiter.ratelimit(requester)
|
||||
await self._initialized_from_db
|
||||
|
||||
next_send_ts = await self._store.restart_delayed_event(
|
||||
@@ -369,8 +332,6 @@ class DelayedEventsHandler:
|
||||
NotFoundError: if no matching delayed event could be found.
|
||||
"""
|
||||
assert self._is_master
|
||||
# Use standard request limiter for sending delayed events on-demand,
|
||||
# as an on-demand send is similar to sending a regular event.
|
||||
await self._request_ratelimiter.ratelimit(requester)
|
||||
await self._initialized_from_db
|
||||
|
||||
@@ -454,10 +415,7 @@ class DelayedEventsHandler:
|
||||
|
||||
async def get_all_for_user(self, requester: Requester) -> List[JsonDict]:
|
||||
"""Return all pending delayed events requested by the given user."""
|
||||
await self._delayed_event_mgmt_ratelimiter.ratelimit(
|
||||
requester,
|
||||
(requester.user.to_string(), requester.device_id),
|
||||
)
|
||||
await self._request_ratelimiter.ratelimit(requester)
|
||||
return await self._store.get_all_delayed_events_for_user(
|
||||
requester.user.localpart
|
||||
)
|
||||
|
||||
@@ -163,8 +163,6 @@ class DeviceWorkerHandler:
|
||||
raise errors.NotFoundError()
|
||||
|
||||
ips = await self.store.get_last_client_ip_by_device(user_id, device_id)
|
||||
|
||||
device = dict(device)
|
||||
_update_device_from_client_ips(device, ips)
|
||||
|
||||
set_tag("device", str(device))
|
||||
|
||||
@@ -644,33 +644,11 @@ class EventCreationHandler:
|
||||
"""
|
||||
await self.auth_blocking.check_auth_blocking(requester=requester)
|
||||
|
||||
requester_suspended = await self.store.get_user_suspended_status(
|
||||
requester.user.to_string()
|
||||
)
|
||||
if requester_suspended:
|
||||
# We want to allow suspended users to perform "corrective" actions
|
||||
# asked of them by server admins, such as redact their messages and
|
||||
# leave rooms.
|
||||
if event_dict["type"] in ["m.room.redaction", "m.room.member"]:
|
||||
if event_dict["type"] == "m.room.redaction":
|
||||
event = await self.store.get_event(
|
||||
event_dict["content"]["redacts"], allow_none=True
|
||||
)
|
||||
if event:
|
||||
if event.sender != requester.user.to_string():
|
||||
raise SynapseError(
|
||||
403,
|
||||
"You can only redact your own events while account is suspended.",
|
||||
Codes.USER_ACCOUNT_SUSPENDED,
|
||||
)
|
||||
if event_dict["type"] == "m.room.member":
|
||||
if event_dict["content"]["membership"] != "leave":
|
||||
raise SynapseError(
|
||||
403,
|
||||
"Changing membership while account is suspended is not allowed.",
|
||||
Codes.USER_ACCOUNT_SUSPENDED,
|
||||
)
|
||||
else:
|
||||
if event_dict["type"] == EventTypes.Message:
|
||||
requester_suspended = await self.store.get_user_suspended_status(
|
||||
requester.user.to_string()
|
||||
)
|
||||
if requester_suspended:
|
||||
raise SynapseError(
|
||||
403,
|
||||
"Sending messages while account is suspended is not allowed.",
|
||||
@@ -1462,12 +1440,6 @@ class EventCreationHandler:
|
||||
)
|
||||
return prev_event
|
||||
|
||||
if not event.is_state() and event.type in [
|
||||
EventTypes.Message,
|
||||
EventTypes.Encrypted,
|
||||
]:
|
||||
await self.store.set_room_participation(event.user_id, event.room_id)
|
||||
|
||||
if event.internal_metadata.is_out_of_band_membership():
|
||||
# the only sort of out-of-band-membership events we expect to see here are
|
||||
# invite rejections and rescinded knocks that we have generated ourselves.
|
||||
|
||||
@@ -382,12 +382,7 @@ class OidcProvider:
|
||||
self._macaroon_generaton = macaroon_generator
|
||||
|
||||
self._config = provider
|
||||
|
||||
self._callback_url: str
|
||||
if provider.redirect_uri is not None:
|
||||
self._callback_url = provider.redirect_uri
|
||||
else:
|
||||
self._callback_url = hs.config.oidc.oidc_callback_url
|
||||
self._callback_url: str = hs.config.oidc.oidc_callback_url
|
||||
|
||||
# Calculate the prefix for OIDC callback paths based on the public_baseurl.
|
||||
# We'll insert this into the Path= parameter of any session cookies we set.
|
||||
@@ -467,10 +462,6 @@ class OidcProvider:
|
||||
|
||||
self._sso_handler.register_identity_provider(self)
|
||||
|
||||
self.passthrough_authorization_parameters = (
|
||||
provider.passthrough_authorization_parameters
|
||||
)
|
||||
|
||||
def _validate_metadata(self, m: OpenIDProviderMetadata) -> None:
|
||||
"""Verifies the provider metadata.
|
||||
|
||||
@@ -649,11 +640,6 @@ class OidcProvider:
|
||||
elif self._config.pkce_method == "never":
|
||||
metadata.pop("code_challenge_methods_supported", None)
|
||||
|
||||
if self._config.id_token_signing_alg_values_supported:
|
||||
metadata["id_token_signing_alg_values_supported"] = (
|
||||
self._config.id_token_signing_alg_values_supported
|
||||
)
|
||||
|
||||
self._validate_metadata(metadata)
|
||||
|
||||
return metadata
|
||||
@@ -1009,6 +995,7 @@ class OidcProvider:
|
||||
when everything is done (or None for UI Auth)
|
||||
ui_auth_session_id: The session ID of the ongoing UI Auth (or
|
||||
None if this is a login).
|
||||
|
||||
Returns:
|
||||
The redirect URL to the authorization endpoint.
|
||||
|
||||
@@ -1081,13 +1068,6 @@ class OidcProvider:
|
||||
)
|
||||
)
|
||||
|
||||
# add passthrough additional authorization parameters
|
||||
passthrough_authorization_parameters = self.passthrough_authorization_parameters
|
||||
for parameter in passthrough_authorization_parameters:
|
||||
parameter_value = parse_string(request, parameter)
|
||||
if parameter_value:
|
||||
additional_authorization_parameters.update({parameter: parameter_value})
|
||||
|
||||
authorization_endpoint = metadata.get("authorization_endpoint")
|
||||
return prepare_grant_uri(
|
||||
authorization_endpoint,
|
||||
|
||||
@@ -118,9 +118,6 @@ DEFAULT_MAX_TIMEOUT_MS = 20_000
|
||||
# Maximum allowed timeout_ms for download and thumbnail requests
|
||||
MAXIMUM_ALLOWED_MAX_TIMEOUT_MS = 60_000
|
||||
|
||||
# The ETag header value to use for immutable media. This can be anything.
|
||||
_IMMUTABLE_ETAG = "1"
|
||||
|
||||
|
||||
def respond_404(request: SynapseRequest) -> None:
|
||||
assert request.path is not None
|
||||
@@ -227,7 +224,12 @@ def add_file_headers(
|
||||
|
||||
request.setHeader(b"Content-Disposition", disposition.encode("ascii"))
|
||||
|
||||
_add_cache_headers(request)
|
||||
# cache for at least a day.
|
||||
# XXX: we might want to turn this off for data we don't want to
|
||||
# recommend caching as it's sensitive or private - or at least
|
||||
# select private. don't bother setting Expires as all our
|
||||
# clients are smart enough to be happy with Cache-Control
|
||||
request.setHeader(b"Cache-Control", b"public,max-age=86400,s-maxage=86400")
|
||||
|
||||
if file_size is not None:
|
||||
request.setHeader(b"Content-Length", b"%d" % (file_size,))
|
||||
@@ -238,26 +240,6 @@ def add_file_headers(
|
||||
request.setHeader(b"X-Robots-Tag", "noindex, nofollow, noarchive, noimageindex")
|
||||
|
||||
|
||||
def _add_cache_headers(request: Request) -> None:
|
||||
"""Adds the appropriate cache headers to the response"""
|
||||
|
||||
# Cache on the client for at least a day.
|
||||
#
|
||||
# We set this to "public,s-maxage=0,proxy-revalidate" to allow CDNs to cache
|
||||
# the media, so long as they "revalidate" the media on every request. By
|
||||
# revalidate, we mean send the request to Synapse with a `If-None-Match`
|
||||
# header, to which Synapse can either respond with a 304 if the user is
|
||||
# authenticated/authorized, or a 401/403 if they're not.
|
||||
request.setHeader(
|
||||
b"Cache-Control", b"public,max-age=86400,s-maxage=0,proxy-revalidate"
|
||||
)
|
||||
|
||||
# Set an ETag header to allow requesters to use it in requests to check if
|
||||
# the cache is still valid. Since media is immutable (though may be
|
||||
# deleted), we just set this to a constant.
|
||||
request.setHeader(b"ETag", _IMMUTABLE_ETAG)
|
||||
|
||||
|
||||
# separators as defined in RFC2616. SP and HT are handled separately.
|
||||
# see _can_encode_filename_as_token.
|
||||
_FILENAME_SEPARATOR_CHARS = {
|
||||
@@ -354,15 +336,13 @@ async def respond_with_multipart_responder(
|
||||
|
||||
from synapse.media.media_storage import MultipartFileConsumer
|
||||
|
||||
_add_cache_headers(request)
|
||||
|
||||
# note that currently the json_object is just {}, this will change when linked media
|
||||
# is implemented
|
||||
multipart_consumer = MultipartFileConsumer(
|
||||
clock,
|
||||
request,
|
||||
media_type,
|
||||
{}, # Note: if we change this we need to change the returned ETag.
|
||||
{},
|
||||
disposition,
|
||||
media_length,
|
||||
)
|
||||
@@ -439,46 +419,6 @@ async def respond_with_responder(
|
||||
finish_request(request)
|
||||
|
||||
|
||||
def respond_with_304(request: SynapseRequest) -> None:
|
||||
request.setResponseCode(304)
|
||||
|
||||
# could alternatively use request.notifyFinish() and flip a flag when
|
||||
# the Deferred fires, but since the flag is RIGHT THERE it seems like
|
||||
# a waste.
|
||||
if request._disconnected:
|
||||
logger.warning(
|
||||
"Not sending response to request %s, already disconnected.", request
|
||||
)
|
||||
return None
|
||||
|
||||
_add_cache_headers(request)
|
||||
|
||||
request.finish()
|
||||
|
||||
|
||||
def check_for_cached_entry_and_respond(request: SynapseRequest) -> bool:
|
||||
"""Check if the request has a conditional header that allows us to return a
|
||||
304 Not Modified response, and if it does, return a 304 response.
|
||||
|
||||
This handles clients and intermediary proxies caching media.
|
||||
This method assumes that the user has already been
|
||||
authorised to request the media.
|
||||
|
||||
Returns True if we have responded."""
|
||||
|
||||
# We've checked the user has access to the media, so we now check if it
|
||||
# is a "conditional request" and we can just return a `304 Not Modified`
|
||||
# response. Since media is immutable (though may be deleted), we just
|
||||
# check this is the expected constant.
|
||||
etag = request.getHeader("If-None-Match")
|
||||
if etag == _IMMUTABLE_ETAG:
|
||||
# Return a `304 Not modified`.
|
||||
respond_with_304(request)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class Responder(ABC):
|
||||
"""Represents a response that can be streamed to the requester.
|
||||
|
||||
|
||||
@@ -52,18 +52,13 @@ from synapse.media._base import (
|
||||
FileInfo,
|
||||
Responder,
|
||||
ThumbnailInfo,
|
||||
check_for_cached_entry_and_respond,
|
||||
get_filename_from_headers,
|
||||
respond_404,
|
||||
respond_with_multipart_responder,
|
||||
respond_with_responder,
|
||||
)
|
||||
from synapse.media.filepath import MediaFilePaths
|
||||
from synapse.media.media_storage import (
|
||||
MediaStorage,
|
||||
SHA256TransparentIOReader,
|
||||
SHA256TransparentIOWriter,
|
||||
)
|
||||
from synapse.media.media_storage import MediaStorage
|
||||
from synapse.media.storage_provider import StorageProviderWrapper
|
||||
from synapse.media.thumbnailer import Thumbnailer, ThumbnailError
|
||||
from synapse.media.url_previewer import UrlPreviewer
|
||||
@@ -305,26 +300,15 @@ class MediaRepository:
|
||||
auth_user: The user_id of the uploader
|
||||
"""
|
||||
file_info = FileInfo(server_name=None, file_id=media_id)
|
||||
sha256reader = SHA256TransparentIOReader(content)
|
||||
# This implements all of IO as it has a passthrough
|
||||
fname = await self.media_storage.store_file(sha256reader.wrap(), file_info)
|
||||
sha256 = sha256reader.hexdigest()
|
||||
should_quarantine = await self.store.get_is_hash_quarantined(sha256)
|
||||
fname = await self.media_storage.store_file(content, file_info)
|
||||
logger.info("Stored local media in file %r", fname)
|
||||
|
||||
if should_quarantine:
|
||||
logger.warn(
|
||||
"Media has been automatically quarantined as it matched existing quarantined media"
|
||||
)
|
||||
|
||||
await self.store.update_local_media(
|
||||
media_id=media_id,
|
||||
media_type=media_type,
|
||||
upload_name=upload_name,
|
||||
media_length=content_length,
|
||||
user_id=auth_user,
|
||||
sha256=sha256,
|
||||
quarantined_by="system" if should_quarantine else None,
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -357,19 +341,11 @@ class MediaRepository:
|
||||
media_id = random_string(24)
|
||||
|
||||
file_info = FileInfo(server_name=None, file_id=media_id)
|
||||
# This implements all of IO as it has a passthrough
|
||||
sha256reader = SHA256TransparentIOReader(content)
|
||||
fname = await self.media_storage.store_file(sha256reader.wrap(), file_info)
|
||||
sha256 = sha256reader.hexdigest()
|
||||
should_quarantine = await self.store.get_is_hash_quarantined(sha256)
|
||||
|
||||
fname = await self.media_storage.store_file(content, file_info)
|
||||
|
||||
logger.info("Stored local media in file %r", fname)
|
||||
|
||||
if should_quarantine:
|
||||
logger.warn(
|
||||
"Media has been automatically quarantined as it matched existing quarantined media"
|
||||
)
|
||||
|
||||
await self.store.store_local_media(
|
||||
media_id=media_id,
|
||||
media_type=media_type,
|
||||
@@ -377,9 +353,6 @@ class MediaRepository:
|
||||
upload_name=upload_name,
|
||||
media_length=content_length,
|
||||
user_id=auth_user,
|
||||
sha256=sha256,
|
||||
# TODO: Better name?
|
||||
quarantined_by="system" if should_quarantine else None,
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -486,11 +459,6 @@ class MediaRepository:
|
||||
|
||||
self.mark_recently_accessed(None, media_id)
|
||||
|
||||
# Once we've checked auth we can return early if the media is cached on
|
||||
# the client
|
||||
if check_for_cached_entry_and_respond(request):
|
||||
return
|
||||
|
||||
media_type = media_info.media_type
|
||||
if not media_type:
|
||||
media_type = "application/octet-stream"
|
||||
@@ -570,17 +538,6 @@ class MediaRepository:
|
||||
allow_authenticated,
|
||||
)
|
||||
|
||||
# Check if the media is cached on the client, if so return 304. We need
|
||||
# to do this after we have fetched remote media, as we need it to do the
|
||||
# auth.
|
||||
if check_for_cached_entry_and_respond(request):
|
||||
# We always need to use the responder.
|
||||
if responder:
|
||||
with responder:
|
||||
pass
|
||||
|
||||
return
|
||||
|
||||
# We deliberately stream the file outside the lock
|
||||
if responder and media_info:
|
||||
upload_name = name if name else media_info.upload_name
|
||||
@@ -782,13 +739,11 @@ class MediaRepository:
|
||||
file_info = FileInfo(server_name=server_name, file_id=file_id)
|
||||
|
||||
async with self.media_storage.store_into_file(file_info) as (f, fname):
|
||||
sha256writer = SHA256TransparentIOWriter(f)
|
||||
try:
|
||||
length, headers = await self.client.download_media(
|
||||
server_name,
|
||||
media_id,
|
||||
# This implements all of BinaryIO as it has a passthrough
|
||||
output_stream=sha256writer.wrap(),
|
||||
output_stream=f,
|
||||
max_size=self.max_upload_size,
|
||||
max_timeout_ms=max_timeout_ms,
|
||||
download_ratelimiter=download_ratelimiter,
|
||||
@@ -853,7 +808,6 @@ class MediaRepository:
|
||||
upload_name=upload_name,
|
||||
media_length=length,
|
||||
filesystem_id=file_id,
|
||||
sha256=sha256writer.hexdigest(),
|
||||
)
|
||||
|
||||
logger.info("Stored remote media in file %r", fname)
|
||||
@@ -874,7 +828,6 @@ class MediaRepository:
|
||||
last_access_ts=time_now_ms,
|
||||
quarantined_by=None,
|
||||
authenticated=authenticated,
|
||||
sha256=sha256writer.hexdigest(),
|
||||
)
|
||||
|
||||
async def _federation_download_remote_file(
|
||||
@@ -909,13 +862,11 @@ class MediaRepository:
|
||||
file_info = FileInfo(server_name=server_name, file_id=file_id)
|
||||
|
||||
async with self.media_storage.store_into_file(file_info) as (f, fname):
|
||||
sha256writer = SHA256TransparentIOWriter(f)
|
||||
try:
|
||||
res = await self.client.federation_download_media(
|
||||
server_name,
|
||||
media_id,
|
||||
# This implements all of BinaryIO as it has a passthrough
|
||||
output_stream=sha256writer.wrap(),
|
||||
output_stream=f,
|
||||
max_size=self.max_upload_size,
|
||||
max_timeout_ms=max_timeout_ms,
|
||||
download_ratelimiter=download_ratelimiter,
|
||||
@@ -986,7 +937,6 @@ class MediaRepository:
|
||||
upload_name=upload_name,
|
||||
media_length=length,
|
||||
filesystem_id=file_id,
|
||||
sha256=sha256writer.hexdigest(),
|
||||
)
|
||||
|
||||
logger.debug("Stored remote media in file %r", fname)
|
||||
@@ -1007,7 +957,6 @@ class MediaRepository:
|
||||
last_access_ts=time_now_ms,
|
||||
quarantined_by=None,
|
||||
authenticated=authenticated,
|
||||
sha256=sha256writer.hexdigest(),
|
||||
)
|
||||
|
||||
def _get_thumbnail_requirements(
|
||||
|
||||
@@ -19,7 +19,6 @@
|
||||
#
|
||||
#
|
||||
import contextlib
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
@@ -71,88 +70,6 @@ logger = logging.getLogger(__name__)
|
||||
CRLF = b"\r\n"
|
||||
|
||||
|
||||
class SHA256TransparentIOWriter:
|
||||
"""Will generate a SHA256 hash from a source stream transparently.
|
||||
|
||||
Args:
|
||||
source: Source stream.
|
||||
"""
|
||||
|
||||
def __init__(self, source: BinaryIO):
|
||||
self._hash = hashlib.sha256()
|
||||
self._source = source
|
||||
|
||||
def write(self, buffer: Union[bytes, bytearray]) -> int:
|
||||
"""Wrapper for source.write()
|
||||
|
||||
Args:
|
||||
buffer
|
||||
|
||||
Returns:
|
||||
the value of source.write()
|
||||
"""
|
||||
res = self._source.write(buffer)
|
||||
self._hash.update(buffer)
|
||||
return res
|
||||
|
||||
def hexdigest(self) -> str:
|
||||
"""The digest of the written or read value.
|
||||
|
||||
Returns:
|
||||
The digest in hex formaat.
|
||||
"""
|
||||
return self._hash.hexdigest()
|
||||
|
||||
def wrap(self) -> BinaryIO:
|
||||
# This class implements a subset the IO interface and passes through everything else via __getattr__
|
||||
return cast(BinaryIO, self)
|
||||
|
||||
# Passthrough any other calls
|
||||
def __getattr__(self, attr_name: str) -> Any:
|
||||
return getattr(self._source, attr_name)
|
||||
|
||||
|
||||
class SHA256TransparentIOReader:
|
||||
"""Will generate a SHA256 hash from a source stream transparently.
|
||||
|
||||
Args:
|
||||
source: Source IO stream.
|
||||
"""
|
||||
|
||||
def __init__(self, source: IO):
|
||||
self._hash = hashlib.sha256()
|
||||
self._source = source
|
||||
|
||||
def read(self, n: int = -1) -> bytes:
|
||||
"""Wrapper for source.read()
|
||||
|
||||
Args:
|
||||
n
|
||||
|
||||
Returns:
|
||||
the value of source.read()
|
||||
"""
|
||||
bytes = self._source.read(n)
|
||||
self._hash.update(bytes)
|
||||
return bytes
|
||||
|
||||
def hexdigest(self) -> str:
|
||||
"""The digest of the written or read value.
|
||||
|
||||
Returns:
|
||||
The digest in hex formaat.
|
||||
"""
|
||||
return self._hash.hexdigest()
|
||||
|
||||
def wrap(self) -> IO:
|
||||
# This class implements a subset the IO interface and passes through everything else via __getattr__
|
||||
return cast(IO, self)
|
||||
|
||||
# Passthrough any other calls
|
||||
def __getattr__(self, attr_name: str) -> Any:
|
||||
return getattr(self._source, attr_name)
|
||||
|
||||
|
||||
class MediaStorage:
|
||||
"""Responsible for storing/fetching files from local sources.
|
||||
|
||||
@@ -190,6 +107,7 @@ class MediaStorage:
|
||||
Returns:
|
||||
the file path written to in the primary media store
|
||||
"""
|
||||
|
||||
async with self.store_into_file(file_info) as (f, fname):
|
||||
# Write to the main media repository
|
||||
await self.write_to_file(source, f)
|
||||
|
||||
@@ -34,7 +34,6 @@ from synapse.logging.opentracing import trace
|
||||
from synapse.media._base import (
|
||||
FileInfo,
|
||||
ThumbnailInfo,
|
||||
check_for_cached_entry_and_respond,
|
||||
respond_404,
|
||||
respond_with_file,
|
||||
respond_with_multipart_responder,
|
||||
@@ -295,11 +294,6 @@ class ThumbnailProvider:
|
||||
if media_info.authenticated:
|
||||
raise NotFoundError()
|
||||
|
||||
# Once we've checked auth we can return early if the media is cached on
|
||||
# the client
|
||||
if check_for_cached_entry_and_respond(request):
|
||||
return
|
||||
|
||||
thumbnail_infos = await self.store.get_local_media_thumbnails(media_id)
|
||||
await self._select_and_respond_with_thumbnail(
|
||||
request,
|
||||
@@ -340,11 +334,6 @@ class ThumbnailProvider:
|
||||
if media_info.authenticated:
|
||||
raise NotFoundError()
|
||||
|
||||
# Once we've checked auth we can return early if the media is cached on
|
||||
# the client
|
||||
if check_for_cached_entry_and_respond(request):
|
||||
return
|
||||
|
||||
thumbnail_infos = await self.store.get_local_media_thumbnails(media_id)
|
||||
for info in thumbnail_infos:
|
||||
t_w = info.width == desired_width
|
||||
@@ -442,10 +431,6 @@ class ThumbnailProvider:
|
||||
respond_404(request)
|
||||
return
|
||||
|
||||
# Check if the media is cached on the client, if so return 304.
|
||||
if check_for_cached_entry_and_respond(request):
|
||||
return
|
||||
|
||||
thumbnail_infos = await self.store.get_remote_media_thumbnails(
|
||||
server_name, media_id
|
||||
)
|
||||
@@ -525,10 +510,6 @@ class ThumbnailProvider:
|
||||
if media_info.authenticated:
|
||||
raise NotFoundError()
|
||||
|
||||
# Check if the media is cached on the client, if so return 304.
|
||||
if check_for_cached_entry_and_respond(request):
|
||||
return
|
||||
|
||||
thumbnail_infos = await self.store.get_remote_media_thumbnails(
|
||||
server_name, media_id
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user