From 51deadec41979cd94e16b5d1e42a47fedc3da98c Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Wed, 19 Mar 2025 14:16:04 +0000 Subject: [PATCH 01/70] Pin our GitHub Actions dependencies (#18255) After the [recent supply chain attack](https://www.wiz.io/blog/new-github-action-supply-chain-attack-reviewdog-action-setup) in `tj-actions/changed-files` and actions based on it, it's become clear that relying on git tags to pin our dependencies is not enough (as tags can simply be replaced). Therefore we need to switch to hashes. Dependabot should continue to update these dependencies for us. Best reviewed commit-by-commit. Though if CI passes, we're *probably* fine. --- .github/workflows/docker.yml | 16 +-- .github/workflows/docs-pr-netlify.yaml | 2 +- .github/workflows/docs-pr.yaml | 8 +- .github/workflows/docs.yaml | 4 +- .github/workflows/fix_lint.yaml | 10 +- .github/workflows/latest_deps.yml | 32 ++--- .github/workflows/poetry_lockfile.yaml | 4 +- .github/workflows/push_complement_image.yml | 10 +- .github/workflows/release-artifacts.yml | 32 ++--- .github/workflows/tests.yml | 136 ++++++++++---------- .github/workflows/triage-incoming.yml | 2 +- .github/workflows/triage_labelled.yml | 2 +- .github/workflows/twisted_trunk.yml | 30 ++--- changelog.d/18255.misc | 1 + 14 files changed, 145 insertions(+), 144 deletions(-) create mode 100644 changelog.d/18255.misc diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index e8319364dd..052dcf800b 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -18,22 +18,22 @@ jobs: steps: - name: Set up QEMU id: qemu - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0 with: platforms: arm64 - name: Set up Docker Buildx id: buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 - name: Inspect builder run: docker buildx inspect - name: Install Cosign - uses: sigstore/cosign-installer@v3.8.1 + uses: sigstore/cosign-installer@d7d6bc7722e3daa8354c50bcb52f4837da5e9b6a # v3.8.1 - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Extract version from pyproject.toml # Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see @@ -43,13 +43,13 @@ jobs: echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV - name: Log in to DockerHub - uses: docker/login-action@v3 + uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Log in to GHCR - uses: docker/login-action@v3 + uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 with: registry: ghcr.io username: ${{ github.repository_owner }} @@ -57,7 +57,7 @@ jobs: - name: Calculate docker image tag id: set-tag - uses: docker/metadata-action@master + uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0 with: images: | docker.io/matrixdotorg/synapse @@ -72,7 +72,7 @@ jobs: - name: Build and push all platforms id: build-and-push - uses: docker/build-push-action@v6 + uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0 with: push: true labels: | diff --git a/.github/workflows/docs-pr-netlify.yaml b/.github/workflows/docs-pr-netlify.yaml index 0fbf6e02b7..d1b789c8d0 100644 --- a/.github/workflows/docs-pr-netlify.yaml +++ b/.github/workflows/docs-pr-netlify.yaml @@ -22,7 +22,7 @@ jobs: path: book - name: 📤 Deploy to Netlify - uses: matrix-org/netlify-pr-preview@v3 + uses: matrix-org/netlify-pr-preview@9805cd123fc9a7e421e35340a05e1ebc5dee46b5 # v3 with: path: book owner: ${{ github.event.workflow_run.head_repository.owner.login }} diff --git a/.github/workflows/docs-pr.yaml b/.github/workflows/docs-pr.yaml index 07dc301b1a..1b416407d8 100644 --- a/.github/workflows/docs-pr.yaml +++ b/.github/workflows/docs-pr.yaml @@ -13,7 +13,7 @@ jobs: name: GitHub Pages runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: # Fetch all history so that the schema_versions script works. fetch-depth: 0 @@ -24,7 +24,7 @@ jobs: mdbook-version: '0.4.17' - name: Setup python - uses: actions/setup-python@v5 + uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: "3.x" @@ -39,7 +39,7 @@ jobs: cp book/welcome_and_overview.html book/index.html - name: Upload Artifact - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 with: name: book path: book @@ -50,7 +50,7 @@ jobs: name: Check links in documentation runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Setup mdbook uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0 diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 434dcbb6c7..121c29571a 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -50,7 +50,7 @@ jobs: needs: - pre steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: # Fetch all history so that the schema_versions script works. fetch-depth: 0 @@ -64,7 +64,7 @@ jobs: run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js - name: Setup python - uses: actions/setup-python@v5 + uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: "3.x" diff --git a/.github/workflows/fix_lint.yaml b/.github/workflows/fix_lint.yaml index d6aed83774..3dcda1bab9 100644 --- a/.github/workflows/fix_lint.yaml +++ b/.github/workflows/fix_lint.yaml @@ -13,19 +13,19 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install Rust - uses: dtolnay/rust-toolchain@master + uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1) with: # We use nightly so that `fmt` correctly groups together imports, and # clippy correctly fixes up the benchmarks. toolchain: nightly-2022-12-01 components: clippy, rustfmt - - uses: Swatinem/rust-cache@v2 + - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - name: Setup Poetry - uses: matrix-org/setup-python-poetry@v1 + uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 with: install-project: "false" @@ -43,6 +43,6 @@ jobs: - run: cargo fmt continue-on-error: true - - uses: stefanzweifel/git-auto-commit-action@v5 + - uses: stefanzweifel/git-auto-commit-action@e348103e9026cc0eee72ae06630dbe30c8bf7a79 # v5.1.0 with: commit_message: "Attempt to fix linting" diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index 3884b6d402..740c83758f 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -39,14 +39,14 @@ jobs: if: needs.check_repo.outputs.should_run_workflow == 'true' runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install Rust - uses: dtolnay/rust-toolchain@stable - - uses: Swatinem/rust-cache@v2 + uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1) + - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 # The dev dependencies aren't exposed in the wheel metadata (at least with current # poetry-core versions), so we install with poetry. - - uses: matrix-org/setup-python-poetry@v1 + - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 with: python-version: "3.x" poetry-version: "1.3.2" @@ -72,11 +72,11 @@ jobs: postgres-version: "14" steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install Rust - uses: dtolnay/rust-toolchain@stable - - uses: Swatinem/rust-cache@v2 + uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1) + - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - run: sudo apt-get -qq install xmlsec1 - name: Set up PostgreSQL ${{ matrix.postgres-version }} @@ -86,7 +86,7 @@ jobs: -e POSTGRES_PASSWORD=postgres \ -e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \ postgres:${{ matrix.postgres-version }} - - uses: actions/setup-python@v5 + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: "3.x" - run: pip install .[all,test] @@ -145,11 +145,11 @@ jobs: BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install Rust - uses: dtolnay/rust-toolchain@stable - - uses: Swatinem/rust-cache@v2 + uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1) + - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - name: Ensure sytest runs `pip install` # Delete the lockfile so sytest will `pip install` rather than `poetry install` @@ -164,7 +164,7 @@ jobs: if: ${{ always() }} run: /sytest/scripts/tap_to_gha.pl /logs/results.tap - name: Upload SyTest logs - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 if: ${{ always() }} with: name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }}) @@ -192,15 +192,15 @@ jobs: database: Postgres steps: - - name: Run actions/checkout@v4 for synapse - uses: actions/checkout@v4 + - name: Check out synapse codebase + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: path: synapse - name: Prepare Complement's Prerequisites run: synapse/.ci/scripts/setup_complement_prerequisites.sh - - uses: actions/setup-go@v5 + - uses: actions/setup-go@0aaccfd150d50ccaeb58ebd88d36e91967a5f35b # v5.4.0 with: cache-dependency-path: complement/go.sum go-version-file: complement/go.mod @@ -225,7 +225,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/poetry_lockfile.yaml b/.github/workflows/poetry_lockfile.yaml index 496e536b93..b3251d379e 100644 --- a/.github/workflows/poetry_lockfile.yaml +++ b/.github/workflows/poetry_lockfile.yaml @@ -16,8 +16,8 @@ jobs: name: "Check locked dependencies have sdists" runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: '3.x' - run: pip install tomli diff --git a/.github/workflows/push_complement_image.yml b/.github/workflows/push_complement_image.yml index 6fbd2ed015..7c8eb446cd 100644 --- a/.github/workflows/push_complement_image.yml +++ b/.github/workflows/push_complement_image.yml @@ -33,29 +33,29 @@ jobs: packages: write steps: - name: Checkout specific branch (debug build) - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 if: github.event_name == 'workflow_dispatch' with: ref: ${{ inputs.branch }} - name: Checkout clean copy of develop (scheduled build) - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 if: github.event_name == 'schedule' with: ref: develop - name: Checkout clean copy of master (on-push) - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 if: github.event_name == 'push' with: ref: master - name: Login to registry - uses: docker/login-action@v3 + uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Work out labels for complement image id: meta - uses: docker/metadata-action@v5 + uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0 with: images: ghcr.io/${{ github.repository }}/complement-synapse tags: | diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index 3311f09b2d..9985084a73 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -27,8 +27,8 @@ jobs: name: "Calculate list of debian distros" runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: '3.x' - id: set-distros @@ -55,18 +55,18 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: path: src - name: Set up Docker Buildx id: buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0 with: install: true - name: Set up docker layer caching - uses: actions/cache@v4 + uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4.2.2 with: path: /tmp/.buildx-cache key: ${{ runner.os }}-buildx-${{ github.sha }} @@ -74,7 +74,7 @@ jobs: ${{ runner.os }}-buildx- - name: Set up python - uses: actions/setup-python@v5 + uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: '3.x' @@ -101,7 +101,7 @@ jobs: echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT" - name: Upload debs as artifacts - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 with: name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }} path: debs/* @@ -130,9 +130,9 @@ jobs: arch: aarch64 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: actions/setup-python@v5 + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: # setup-python@v4 doesn't impose a default python version. Need to use 3.x # here, because `python` on osx points to Python 2.7. @@ -143,7 +143,7 @@ jobs: - name: Set up QEMU to emulate aarch64 if: matrix.arch == 'aarch64' - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0 with: platforms: arm64 @@ -165,7 +165,7 @@ jobs: CARGO_NET_GIT_FETCH_WITH_CLI: true CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI - - uses: actions/upload-artifact@v4 + - uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 with: name: Wheel-${{ matrix.os }}-${{ matrix.arch }} path: ./wheelhouse/*.whl @@ -176,8 +176,8 @@ jobs: if: ${{ !startsWith(github.ref, 'refs/pull/') }} steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: '3.10' @@ -186,7 +186,7 @@ jobs: - name: Build sdist run: python -m build --sdist - - uses: actions/upload-artifact@v4 + - uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 with: name: Sdist path: dist/*.tar.gz @@ -203,7 +203,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download all workflow run artifacts - uses: actions/download-artifact@v4 + uses: actions/download-artifact@b14cf4c92620c250e1c074ab0a5800e37df86765 # v4.2.0 - name: Build a tarball for the debs # We need to merge all the debs uploads into one folder, then compress # that. @@ -213,7 +213,7 @@ jobs: tar -cvJf debs.tar.xz debs - name: Attach to release # Pinned to work around https://github.com/softprops/action-gh-release/issues/445 - uses: softprops/action-gh-release@v0.1.15 + uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v0.1.15 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 084b08b249..a9fb982b11 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -23,7 +23,7 @@ jobs: linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }} linting_readme: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting_readme }} steps: - - uses: dorny/paths-filter@v3 + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 id: filter # We only check on PRs if: startsWith(github.ref, 'refs/pull/') @@ -83,11 +83,11 @@ jobs: if: ${{ needs.changes.outputs.linting == 'true' }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install Rust - uses: dtolnay/rust-toolchain@1.66.0 - - uses: Swatinem/rust-cache@v2 - - uses: matrix-org/setup-python-poetry@v1 + uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0 + - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 with: python-version: "3.x" poetry-version: "1.3.2" @@ -101,8 +101,8 @@ jobs: if: ${{ needs.changes.outputs.linting == 'true' }} steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: "3.x" - run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'" @@ -111,8 +111,8 @@ jobs: check-lockfile: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: "3.x" - run: .ci/scripts/check_lockfile.py @@ -124,10 +124,10 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Setup Poetry - uses: matrix-org/setup-python-poetry@v1 + uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 with: install-project: "false" @@ -145,14 +145,14 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install Rust - uses: dtolnay/rust-toolchain@1.66.0 - - uses: Swatinem/rust-cache@v2 + uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0 + - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - name: Setup Poetry - uses: matrix-org/setup-python-poetry@v1 + uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 with: # We want to make use of type hints in optional dependencies too. extras: all @@ -165,7 +165,7 @@ jobs: # Cribbed from # https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17 - name: Restore/persist mypy's cache - uses: actions/cache@v4 + uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4.2.2 with: path: | .mypy_cache @@ -178,7 +178,7 @@ jobs: lint-crlf: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Check line endings run: scripts-dev/check_line_terminators.sh @@ -186,11 +186,11 @@ jobs: if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: ${{ github.event.pull_request.head.sha }} fetch-depth: 0 - - uses: actions/setup-python@v5 + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: "3.x" - run: "pip install 'towncrier>=18.6.0rc1'" @@ -204,13 +204,13 @@ jobs: if: ${{ needs.changes.outputs.linting == 'true' }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: ref: ${{ github.event.pull_request.head.sha }} - name: Install Rust - uses: dtolnay/rust-toolchain@1.66.0 - - uses: Swatinem/rust-cache@v2 - - uses: matrix-org/setup-python-poetry@v1 + uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0 + - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 + - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 with: poetry-version: "1.3.2" extras: "all" @@ -222,13 +222,13 @@ jobs: if: ${{ needs.changes.outputs.rust == 'true' }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install Rust - uses: dtolnay/rust-toolchain@1.66.0 + uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0 with: components: clippy - - uses: Swatinem/rust-cache@v2 + - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - run: cargo clippy -- -D warnings @@ -240,14 +240,14 @@ jobs: if: ${{ needs.changes.outputs.rust == 'true' }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install Rust - uses: dtolnay/rust-toolchain@master + uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1) with: toolchain: nightly-2022-12-01 components: clippy - - uses: Swatinem/rust-cache@v2 + - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - run: cargo clippy --all-features -- -D warnings @@ -257,15 +257,15 @@ jobs: if: ${{ needs.changes.outputs.rust == 'true' }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install Rust - uses: dtolnay/rust-toolchain@master + uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1) with: # We use nightly so that it correctly groups together imports toolchain: nightly-2022-12-01 components: rustfmt - - uses: Swatinem/rust-cache@v2 + - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - run: cargo fmt --check @@ -276,8 +276,8 @@ jobs: needs: changes if: ${{ needs.changes.outputs.linting_readme == 'true' }} steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: "3.x" - run: "pip install rstcheck" @@ -301,7 +301,7 @@ jobs: - lint-readme runs-on: ubuntu-latest steps: - - uses: matrix-org/done-action@v3 + - uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3 with: needs: ${{ toJSON(needs) }} @@ -324,8 +324,8 @@ jobs: needs: linting-done runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: "3.x" - id: get-matrix @@ -345,7 +345,7 @@ jobs: job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - run: sudo apt-get -qq install xmlsec1 - name: Set up PostgreSQL ${{ matrix.job.postgres-version }} if: ${{ matrix.job.postgres-version }} @@ -360,10 +360,10 @@ jobs: postgres:${{ matrix.job.postgres-version }} - name: Install Rust - uses: dtolnay/rust-toolchain@1.66.0 - - uses: Swatinem/rust-cache@v2 + uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0 + - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - - uses: matrix-org/setup-python-poetry@v1 + - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 with: python-version: ${{ matrix.job.python-version }} poetry-version: "1.3.2" @@ -399,11 +399,11 @@ jobs: - changes runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install Rust - uses: dtolnay/rust-toolchain@1.66.0 - - uses: Swatinem/rust-cache@v2 + uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0 + - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 # There aren't wheels for some of the older deps, so we need to install # their build dependencies @@ -412,7 +412,7 @@ jobs: sudo apt-get -qq install build-essential libffi-dev python3-dev \ libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev - - uses: actions/setup-python@v5 + - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: '3.9' @@ -462,10 +462,10 @@ jobs: extras: ["all"] steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 # Install libs necessary for PyPy to build binary wheels for dependencies - run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev - - uses: matrix-org/setup-python-poetry@v1 + - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 with: python-version: ${{ matrix.python-version }} poetry-version: "1.3.2" @@ -512,13 +512,13 @@ jobs: job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Prepare test blacklist run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers - name: Install Rust - uses: dtolnay/rust-toolchain@1.66.0 - - uses: Swatinem/rust-cache@v2 + uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0 + - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - name: Run SyTest run: /bootstrap.sh synapse @@ -527,7 +527,7 @@ jobs: if: ${{ always() }} run: /sytest/scripts/tap_to_gha.pl /logs/results.tap - name: Upload SyTest logs - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 if: ${{ always() }} with: name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }}) @@ -557,9 +557,9 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - run: sudo apt-get -qq install xmlsec1 postgresql-client - - uses: matrix-org/setup-python-poetry@v1 + - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 with: poetry-version: "1.3.2" extras: "postgres" @@ -601,7 +601,7 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Add PostgreSQL apt repository # We need a version of pg_dump that can handle the version of # PostgreSQL being tested against. The Ubuntu package repository lags @@ -612,7 +612,7 @@ jobs: wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - sudo apt-get update - run: sudo apt-get -qq install xmlsec1 postgresql-client - - uses: matrix-org/setup-python-poetry@v1 + - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 with: python-version: ${{ matrix.python-version }} poetry-version: "1.3.2" @@ -625,7 +625,7 @@ jobs: PGPASSWORD: postgres PGDATABASE: postgres - name: "Upload schema differences" - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }} with: name: Schema dumps @@ -655,19 +655,19 @@ jobs: database: Postgres steps: - - name: Run actions/checkout@v4 for synapse - uses: actions/checkout@v4 + - name: Checkout synapse codebase + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: path: synapse - name: Install Rust - uses: dtolnay/rust-toolchain@1.66.0 - - uses: Swatinem/rust-cache@v2 + uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0 + - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - name: Prepare Complement's Prerequisites run: synapse/.ci/scripts/setup_complement_prerequisites.sh - - uses: actions/setup-go@v5 + - uses: actions/setup-go@0aaccfd150d50ccaeb58ebd88d36e91967a5f35b # v5.4.0 with: cache-dependency-path: complement/go.sum go-version-file: complement/go.mod @@ -690,11 +690,11 @@ jobs: - changes steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install Rust - uses: dtolnay/rust-toolchain@1.66.0 - - uses: Swatinem/rust-cache@v2 + uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0 + - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - run: cargo test @@ -708,13 +708,13 @@ jobs: - changes steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install Rust - uses: dtolnay/rust-toolchain@master + uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1) with: toolchain: nightly-2022-12-01 - - uses: Swatinem/rust-cache@v2 + - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - run: cargo bench --no-run @@ -733,7 +733,7 @@ jobs: - linting-done runs-on: ubuntu-latest steps: - - uses: matrix-org/done-action@v3 + - uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3 with: needs: ${{ toJSON(needs) }} diff --git a/.github/workflows/triage-incoming.yml b/.github/workflows/triage-incoming.yml index 7a369b77fe..1d291a319b 100644 --- a/.github/workflows/triage-incoming.yml +++ b/.github/workflows/triage-incoming.yml @@ -6,7 +6,7 @@ on: jobs: triage: - uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v2 + uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@18beaf3c8e536108bd04d18e6c3dc40ba3931e28 # v2.0.3 with: project_id: 'PVT_kwDOAIB0Bs4AFDdZ' content_id: ${{ github.event.issue.node_id }} diff --git a/.github/workflows/triage_labelled.yml b/.github/workflows/triage_labelled.yml index d1ac4357b1..c08c674c88 100644 --- a/.github/workflows/triage_labelled.yml +++ b/.github/workflows/triage_labelled.yml @@ -11,7 +11,7 @@ jobs: if: > contains(github.event.issue.labels.*.name, 'X-Needs-Info') steps: - - uses: actions/add-to-project@main + - uses: actions/add-to-project@f5473ace9aeee8b97717b281e26980aa5097023f # main (v1.0.2 + 10 commits) id: add_project with: project-url: "https://github.com/orgs/matrix-org/projects/67" diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index cdaa00ef90..45f70e5420 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -40,13 +40,13 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install Rust - uses: dtolnay/rust-toolchain@stable - - uses: Swatinem/rust-cache@v2 + uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1) + - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - - uses: matrix-org/setup-python-poetry@v1 + - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 with: python-version: "3.x" extras: "all" @@ -64,14 +64,14 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - run: sudo apt-get -qq install xmlsec1 - name: Install Rust - uses: dtolnay/rust-toolchain@stable - - uses: Swatinem/rust-cache@v2 + uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1) + - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - - uses: matrix-org/setup-python-poetry@v1 + - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 with: python-version: "3.x" extras: "all test" @@ -108,11 +108,11 @@ jobs: - ${{ github.workspace }}:/src steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install Rust - uses: dtolnay/rust-toolchain@stable - - uses: Swatinem/rust-cache@v2 + uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1) + - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - name: Patch dependencies # Note: The poetry commands want to create a virtualenv in /src/.venv/, @@ -136,7 +136,7 @@ jobs: if: ${{ always() }} run: /sytest/scripts/tap_to_gha.pl /logs/results.tap - name: Upload SyTest logs - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 if: ${{ always() }} with: name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }}) @@ -164,14 +164,14 @@ jobs: steps: - name: Run actions/checkout@v4 for synapse - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: path: synapse - name: Prepare Complement's Prerequisites run: synapse/.ci/scripts/setup_complement_prerequisites.sh - - uses: actions/setup-go@v5 + - uses: actions/setup-go@0aaccfd150d50ccaeb58ebd88d36e91967a5f35b # v5.4.0 with: cache-dependency-path: complement/go.sum go-version-file: complement/go.mod @@ -206,7 +206,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/changelog.d/18255.misc b/changelog.d/18255.misc new file mode 100644 index 0000000000..49a5b0e7b6 --- /dev/null +++ b/changelog.d/18255.misc @@ -0,0 +1 @@ +Pin GitHub Actions dependencies by commit hash. \ No newline at end of file From 33bcef9dc7eee6684dc1b8df69a30c61cae879e8 Mon Sep 17 00:00:00 2001 From: reivilibre Date: Fri, 21 Mar 2025 15:32:52 +0000 Subject: [PATCH 02/70] Update Poetry to 2.1.1, including updating the lock file version. (#18251) --- .ci/scripts/check_lockfile.py | 6 +- .github/workflows/fix_lint.yaml | 3 +- .github/workflows/latest_deps.yml | 4 +- .github/workflows/tests.yml | 30 ++-- .github/workflows/twisted_trunk.yml | 10 +- changelog.d/18251.misc | 1 + debian/build_virtualenv | 2 +- debian/changelog | 6 + docker/Dockerfile | 4 +- docs/development/dependencies.md | 8 +- poetry.lock | 220 ++++++++++++++++++++++++---- 11 files changed, 235 insertions(+), 59 deletions(-) create mode 100644 changelog.d/18251.misc diff --git a/.ci/scripts/check_lockfile.py b/.ci/scripts/check_lockfile.py index 19cec7ddd6..46d3952b4c 100755 --- a/.ci/scripts/check_lockfile.py +++ b/.ci/scripts/check_lockfile.py @@ -11,12 +11,12 @@ with open("poetry.lock", "rb") as f: try: lock_version = lockfile["metadata"]["lock-version"] - assert lock_version == "2.0" + assert lock_version == "2.1" except Exception: print( """\ - Lockfile is not version 2.0. You probably need to upgrade poetry on your local box - and re-run `poetry lock --no-update`. See the Poetry cheat sheet at + Lockfile is not version 2.1. You probably need to upgrade poetry on your local box + and re-run `poetry lock`. See the Poetry cheat sheet at https://element-hq.github.io/synapse/develop/development/dependencies.html """ ) diff --git a/.github/workflows/fix_lint.yaml b/.github/workflows/fix_lint.yaml index 3dcda1bab9..fe699c1b2f 100644 --- a/.github/workflows/fix_lint.yaml +++ b/.github/workflows/fix_lint.yaml @@ -25,9 +25,10 @@ jobs: - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - name: Setup Poetry - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 + uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 with: install-project: "false" + poetry-version: "2.1.1" - name: Run ruff check continue-on-error: true diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index 740c83758f..b7c1b727c9 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -46,10 +46,10 @@ jobs: # The dev dependencies aren't exposed in the wheel metadata (at least with current # poetry-core versions), so we install with poetry. - - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 + - uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 with: python-version: "3.x" - poetry-version: "1.3.2" + poetry-version: "2.1.1" extras: "all" # Dump installed versions for debugging. - run: poetry run pip list > before.txt diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index a9fb982b11..ff83d6e365 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -87,10 +87,10 @@ jobs: - name: Install Rust uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0 - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 + - uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 with: python-version: "3.x" - poetry-version: "1.3.2" + poetry-version: "2.1.1" extras: "all" - run: poetry run scripts-dev/generate_sample_config.sh --check - run: poetry run scripts-dev/config-lint.sh @@ -127,8 +127,9 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Setup Poetry - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 + uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 with: + poetry-version: "2.1.1" install-project: "false" - name: Run ruff check @@ -152,7 +153,7 @@ jobs: - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - name: Setup Poetry - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 + uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 with: # We want to make use of type hints in optional dependencies too. extras: all @@ -161,6 +162,7 @@ jobs: # https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775 # To make CI green, err towards caution and install the project. install-project: "true" + poetry-version: "2.1.1" # Cribbed from # https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17 @@ -210,9 +212,9 @@ jobs: - name: Install Rust uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0 - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 + - uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 with: - poetry-version: "1.3.2" + poetry-version: "2.1.1" extras: "all" - run: poetry run scripts-dev/check_pydantic_models.py @@ -363,10 +365,10 @@ jobs: uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0 - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 + - uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 with: python-version: ${{ matrix.job.python-version }} - poetry-version: "1.3.2" + poetry-version: "2.1.1" extras: ${{ matrix.job.extras }} - name: Await PostgreSQL if: ${{ matrix.job.postgres-version }} @@ -465,10 +467,10 @@ jobs: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 # Install libs necessary for PyPy to build binary wheels for dependencies - run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev - - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 + - uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 with: python-version: ${{ matrix.python-version }} - poetry-version: "1.3.2" + poetry-version: "2.1.1" extras: ${{ matrix.extras }} - run: poetry run trial --jobs=2 tests - name: Dump logs @@ -559,9 +561,9 @@ jobs: steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - run: sudo apt-get -qq install xmlsec1 postgresql-client - - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 + - uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 with: - poetry-version: "1.3.2" + poetry-version: "2.1.1" extras: "postgres" - run: .ci/scripts/test_export_data_command.sh env: @@ -612,10 +614,10 @@ jobs: wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - sudo apt-get update - run: sudo apt-get -qq install xmlsec1 postgresql-client - - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 + - uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 with: python-version: ${{ matrix.python-version }} - poetry-version: "1.3.2" + poetry-version: "2.1.1" extras: "postgres" - run: .ci/scripts/test_synapse_port_db.sh id: run_tester_script diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index 45f70e5420..aac1e350a2 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -46,10 +46,11 @@ jobs: uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1) - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 + - uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 with: python-version: "3.x" extras: "all" + poetry-version: "2.1.1" - run: | poetry remove twisted poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref || 'trunk' }} @@ -71,10 +72,11 @@ jobs: uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1) - uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8 - - uses: matrix-org/setup-python-poetry@4421c92b6223f03ae55560e29aa8ebd39cf6314a # v1.2.4 + - uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 with: python-version: "3.x" extras: "all test" + poetry-version: "2.1.1" - run: | poetry remove twisted poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk @@ -181,11 +183,11 @@ jobs: run: | set -x DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx - pipx install poetry==1.3.2 + pipx install poetry==2.1.1 poetry remove -n twisted poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk - poetry lock --no-update + poetry lock working-directory: synapse - run: | diff --git a/changelog.d/18251.misc b/changelog.d/18251.misc new file mode 100644 index 0000000000..3b557a06c4 --- /dev/null +++ b/changelog.d/18251.misc @@ -0,0 +1 @@ +Update Poetry to 2.1.1, including updating the lock file version. \ No newline at end of file diff --git a/debian/build_virtualenv b/debian/build_virtualenv index 5fc817b607..9e7fb95c8e 100755 --- a/debian/build_virtualenv +++ b/debian/build_virtualenv @@ -35,7 +35,7 @@ TEMP_VENV="$(mktemp -d)" python3 -m venv "$TEMP_VENV" source "$TEMP_VENV/bin/activate" pip install -U pip -pip install poetry==1.3.2 +pip install poetry==2.1.1 poetry-plugin-export==1.9.0 poetry export \ --extras all \ --extras test \ diff --git a/debian/changelog b/debian/changelog index c50249f081..3d77fe8e92 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.127.0~rc1+nmu1) UNRELEASED; urgency=medium + + * Update Poetry to 2.1.1. + + -- Synapse Packaging team Wed, 19 Mar 2025 17:38:49 +0000 + matrix-synapse-py3 (1.127.0~rc1) stable; urgency=medium * New Synapse release 1.127.0rc1. diff --git a/docker/Dockerfile b/docker/Dockerfile index 1dd65f2413..27125c4c0b 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -22,7 +22,7 @@ ARG DEBIAN_VERSION=bookworm ARG PYTHON_VERSION=3.12 -ARG POETRY_VERSION=1.8.3 +ARG POETRY_VERSION=2.1.1 ### ### Stage 0: generate requirements.txt @@ -56,7 +56,7 @@ ENV UV_LINK_MODE=copy ARG POETRY_VERSION RUN --mount=type=cache,target=/root/.cache/uv \ if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \ - uvx --with poetry-plugin-export==1.8.0 \ + uvx --with poetry-plugin-export==1.9.0 \ poetry@${POETRY_VERSION} export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \ else \ touch /synapse/requirements.txt; \ diff --git a/docs/development/dependencies.md b/docs/development/dependencies.md index e4378231aa..8e29ff3a57 100644 --- a/docs/development/dependencies.md +++ b/docs/development/dependencies.md @@ -187,7 +187,7 @@ useful. ## ...add a new dependency? Either: -- manually update `pyproject.toml`; then `poetry lock --no-update`; or else +- manually update `pyproject.toml`; then `poetry lock`; or else - `poetry add packagename`. See `poetry add --help`; note the `--dev`, `--extras` and `--optional` flags in particular. @@ -202,12 +202,12 @@ poetry remove packagename ``` ought to do the trick. Alternatively, manually update `pyproject.toml` and -`poetry lock --no-update`. Include the updated `pyproject.toml` and `poetry.lock` +`poetry lock`. Include the updated `pyproject.toml` and `poetry.lock` files in your commit. ## ...update the version range for an existing dependency? -Best done by manually editing `pyproject.toml`, then `poetry lock --no-update`. +Best done by manually editing `pyproject.toml`, then `poetry lock`. Include the updated `pyproject.toml` and `poetry.lock` in your commit. ## ...update a dependency in the locked environment? @@ -233,7 +233,7 @@ poetry add packagename==1.2.3 # Get poetry to recompute the content-hash of pyproject.toml without changing # the locked package versions. -poetry lock --no-update +poetry lock ``` Either way, include the updated `poetry.lock` file in your commit. diff --git a/poetry.lock b/poetry.lock index cbf22d9806..ed98dd9f4e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "annotated-types" @@ -6,6 +6,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -17,18 +18,19 @@ version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\""] [[package]] name = "authlib" @@ -36,6 +38,8 @@ version = "1.4.1" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." optional = true python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"oidc\" or extra == \"jwt\" or extra == \"all\"" files = [ {file = "Authlib-1.4.1-py2.py3-none-any.whl", hash = "sha256:edc29c3f6a3e72cd9e9f45fff67fc663a2c364022eb0371c003f22d5405915c1"}, {file = "authlib-1.4.1.tar.gz", hash = "sha256:30ead9ea4993cdbab821dc6e01e818362f92da290c04c7f6a1940f86507a790d"}, @@ -50,6 +54,7 @@ version = "22.10.0" description = "Self-service finite-state machines for the programmer on the go." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "Automat-22.10.0-py2.py3-none-any.whl", hash = "sha256:c3164f8742b9dc440f3682482d32aaff7bb53f71740dd018533f9de286b64180"}, {file = "Automat-22.10.0.tar.gz", hash = "sha256:e56beb84edad19dcc11d30e8d9b895f75deeb5ef5e96b84a467066b3b84bb04e"}, @@ -68,6 +73,7 @@ version = "4.3.0" description = "Modern password hashing for your software and your servers" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "bcrypt-4.3.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f01e060f14b6b57bbb72fc5b4a83ac21c443c9a2ee708e04a10e9192f90a6281"}, {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5eeac541cefd0bb887a371ef73c62c3cd78535e4887b310626036a7c0a817bb"}, @@ -132,6 +138,7 @@ version = "6.2.0" description = "An easy safelist-based HTML-sanitizing tool." optional = false python-versions = ">=3.9" +groups = ["main", "dev"] files = [ {file = "bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e"}, {file = "bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f"}, @@ -149,6 +156,7 @@ version = "2.0.0" description = "Canonical JSON" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "canonicaljson-2.0.0-py3-none-any.whl", hash = "sha256:c38a315de3b5a0532f1ec1f9153cd3d716abfc565a558d00a4835428a34fca5b"}, {file = "canonicaljson-2.0.0.tar.gz", hash = "sha256:e2fdaef1d7fadc5d9cb59bd3d0d41b064ddda697809ac4325dced721d12f113f"}, @@ -160,6 +168,7 @@ version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main", "dev"] files = [ {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, @@ -171,6 +180,7 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -250,6 +260,7 @@ version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" +groups = ["main", "dev"] files = [ {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, @@ -334,6 +345,7 @@ version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -348,6 +360,8 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] +markers = "platform_system == \"Windows\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -359,6 +373,7 @@ version = "0.9.1" description = "Python parser for the CommonMark Markdown spec" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, @@ -373,6 +388,7 @@ version = "15.1.0" description = "Symbolic constants in Python" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "constantly-15.1.0-py2.py3-none-any.whl", hash = "sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d"}, {file = "constantly-15.1.0.tar.gz", hash = "sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35"}, @@ -384,6 +400,7 @@ version = "43.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, @@ -433,6 +450,8 @@ version = "0.7.1" description = "XML bomb protection for Python stdlib modules" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] +markers = "extra == \"saml2\" or extra == \"all\"" files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, @@ -444,6 +463,7 @@ version = "1.2.13" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] files = [ {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"}, {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, @@ -453,7 +473,7 @@ files = [ wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"] +dev = ["PyTest (<5) ; python_version < \"3.6\"", "PyTest ; python_version >= \"3.6\"", "PyTest-Cov (<2.6) ; python_version < \"3.6\"", "PyTest-Cov ; python_version >= \"3.6\"", "bump2version (<1)", "configparser (<5) ; python_version < \"3\"", "importlib-metadata (<3) ; python_version < \"3\"", "importlib-resources (<4) ; python_version < \"3\"", "sphinx (<2)", "sphinxcontrib-websupport (<2) ; python_version < \"3\"", "tox", "zipp (<2) ; python_version < \"3\""] [[package]] name = "docutils" @@ -461,6 +481,7 @@ version = "0.19" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, @@ -472,6 +493,8 @@ version = "4.1.5" description = "XPath 1.0/2.0/3.0/3.1 parsers and selectors for ElementTree and lxml" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"saml2\" or extra == \"all\"" files = [ {file = "elementpath-4.1.5-py3-none-any.whl", hash = "sha256:2ac1a2fb31eb22bbbf817f8cf6752f844513216263f0e3892c8e79782fe4bb55"}, {file = "elementpath-4.1.5.tar.gz", hash = "sha256:c2d6dc524b29ef751ecfc416b0627668119d8812441c555d7471da41d4bacb8d"}, @@ -486,6 +509,7 @@ version = "4.0.10" description = "Git Object Database" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, @@ -500,6 +524,7 @@ version = "3.1.44" description = "GitPython is a Python library used to interact with Git repositories" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "GitPython-3.1.44-py3-none-any.whl", hash = "sha256:9e0e10cda9bed1ee64bc9a6de50e7e38a9c9943241cd7f585f6df3ed28011110"}, {file = "gitpython-3.1.44.tar.gz", hash = "sha256:c87e30b26253bf5418b01b0660f818967f3c503193838337fe5e573331249269"}, @@ -510,7 +535,7 @@ gitdb = ">=4.0.1,<5" [package.extras] doc = ["sphinx (>=7.1.2,<7.2)", "sphinx-autodoc-typehints", "sphinx_rtd_theme"] -test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] +test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock ; python_version < \"3.8\"", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions ; python_version < \"3.11\""] [[package]] name = "hiredis" @@ -518,6 +543,8 @@ version = "3.1.0" description = "Python wrapper for hiredis" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"redis\" or extra == \"all\"" files = [ {file = "hiredis-3.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:2892db9db21f0cf7cc298d09f85d3e1f6dc4c4c24463ab67f79bc7a006d51867"}, {file = "hiredis-3.1.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:93cfa6cc25ee2ceb0be81dc61eca9995160b9e16bdb7cca4a00607d57e998918"}, @@ -636,6 +663,7 @@ version = "21.0.0" description = "A featureful, immutable, and correct URL for Python." optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "hyperlink-21.0.0-py2.py3-none-any.whl", hash = "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4"}, {file = "hyperlink-21.0.0.tar.gz", hash = "sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b"}, @@ -650,6 +678,7 @@ version = "1.5.0" description = "A tool for generating OIDC identities" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "id-1.5.0-py3-none-any.whl", hash = "sha256:f1434e1cef91f2cbb8a4ec64663d5a23b9ed43ef44c4c957d02583d61714c658"}, {file = "id-1.5.0.tar.gz", hash = "sha256:292cb8a49eacbbdbce97244f47a97b4c62540169c976552e497fd57df0734c1d"}, @@ -669,6 +698,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main", "dev"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -683,6 +713,7 @@ version = "3.3.0" description = "Iterative JSON parser with standard Python iterator interfaces" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "ijson-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7f7a5250599c366369fbf3bc4e176f5daa28eb6bc7d6130d02462ed335361675"}, {file = "ijson-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f87a7e52f79059f9c58f6886c262061065eb6f7554a587be7ed3aa63e6b71b34"}, @@ -786,6 +817,7 @@ version = "4.2.1" description = "Immutable wrapper around dictionaries (a fork of frozendict)" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "immutabledict-4.2.1-py3-none-any.whl", hash = "sha256:c56a26ced38c236f79e74af3ccce53772827cef5c3bce7cab33ff2060f756373"}, {file = "immutabledict-4.2.1.tar.gz", hash = "sha256:d91017248981c72eb66c8ff9834e99c2f53562346f23e7f51e7a5ebcf66a3bcc"}, @@ -797,6 +829,8 @@ version = "6.7.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\" and python_version < \"3.12\" or python_version < \"3.10\"" files = [ {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, @@ -808,7 +842,7 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\"", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "importlib-resources" @@ -816,6 +850,8 @@ version = "5.12.0" description = "Read resources from Python packages" optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version < \"3.10\"" files = [ {file = "importlib_resources-5.12.0-py3-none-any.whl", hash = "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"}, {file = "importlib_resources-5.12.0.tar.gz", hash = "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6"}, @@ -826,7 +862,7 @@ zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8 ; python_version < \"3.12\"", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\""] [[package]] name = "incremental" @@ -834,6 +870,7 @@ version = "24.7.2" description = "A small library that versions your Python projects." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "incremental-24.7.2-py3-none-any.whl", hash = "sha256:8cb2c3431530bec48ad70513931a760f446ad6c25e8333ca5d95e24b0ed7b8fe"}, {file = "incremental-24.7.2.tar.gz", hash = "sha256:fb4f1d47ee60efe87d4f6f0ebb5f70b9760db2b2574c59c8e8912be4ebd464c9"}, @@ -852,6 +889,8 @@ version = "4.8.0" description = "Jaeger Python OpenTracing Tracer implementation" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"opentracing\" or extra == \"all\"" files = [ {file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"}, ] @@ -871,6 +910,8 @@ version = "3.2.3" description = "Utility functions for Python class constructs" optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\"" files = [ {file = "jaraco.classes-3.2.3-py3-none-any.whl", hash = "sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158"}, {file = "jaraco.classes-3.2.3.tar.gz", hash = "sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a"}, @@ -881,7 +922,7 @@ more-itertools = "*" [package.extras] docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] -testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\""] [[package]] name = "jeepney" @@ -889,6 +930,8 @@ version = "0.8.0" description = "Low-level, pure Python DBus protocol wrapper." optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\" and sys_platform == \"linux\"" files = [ {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, @@ -896,7 +939,7 @@ files = [ [package.extras] test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] -trio = ["async_generator", "trio"] +trio = ["async_generator ; python_version == \"3.6\"", "trio"] [[package]] name = "jinja2" @@ -904,6 +947,7 @@ version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, @@ -921,6 +965,7 @@ version = "4.23.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, @@ -942,6 +987,7 @@ version = "2023.6.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "jsonschema_specifications-2023.6.1-py3-none-any.whl", hash = "sha256:3d2b82663aff01815f744bb5c7887e2121a63399b49b104a3c96145474d091d7"}, {file = "jsonschema_specifications-2023.6.1.tar.gz", hash = "sha256:ca1c4dd059a9e7b34101cf5b3ab7ff1d18b139f35950d598d629837ef66e8f28"}, @@ -956,6 +1002,8 @@ version = "23.13.1" description = "Store and access your passwords safely." optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\"" files = [ {file = "keyring-23.13.1-py3-none-any.whl", hash = "sha256:771ed2a91909389ed6148631de678f82ddc73737d85a927f382a8a1b157898cd"}, {file = "keyring-23.13.1.tar.gz", hash = "sha256:ba2e15a9b35e21908d0aaf4e0a47acc52d6ae33444df0da2b49d41a46ef6d678"}, @@ -971,7 +1019,7 @@ SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} [package.extras] completion = ["shtab"] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] -testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8 ; python_version < \"3.12\"", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\""] [[package]] name = "ldap3" @@ -979,6 +1027,8 @@ version = "2.9.1" description = "A strictly RFC 4510 conforming LDAP V3 pure Python client library" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\"" files = [ {file = "ldap3-2.9.1-py2.py3-none-any.whl", hash = "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70"}, {file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"}, @@ -993,6 +1043,8 @@ version = "5.3.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"url-preview\" or extra == \"all\"" files = [ {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, @@ -1147,6 +1199,7 @@ version = "0.5.1" description = "Type annotations for the lxml package" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "lxml-stubs-0.5.1.tar.gz", hash = "sha256:e0ec2aa1ce92d91278b719091ce4515c12adc1d564359dfaf81efa7d4feab79d"}, {file = "lxml_stubs-0.5.1-py3-none-any.whl", hash = "sha256:1f689e5dbc4b9247cb09ae820c7d34daeb1fdbd1db06123814b856dae7787272"}, @@ -1161,6 +1214,7 @@ version = "2.2.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"}, {file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"}, @@ -1185,6 +1239,7 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" +groups = ["main", "dev"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -1255,6 +1310,7 @@ version = "1.3.0" description = "Common utilities for Synapse, Sydent and Sygnal" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "matrix_common-1.3.0-py3-none-any.whl", hash = "sha256:524e2785b9b03be4d15f3a8a6b857c5b6af68791ffb1b9918f0ad299abc4db20"}, {file = "matrix_common-1.3.0.tar.gz", hash = "sha256:62e121cccd9f243417b57ec37a76dc44aeb198a7a5c67afd6b8275992ff2abd1"}, @@ -1273,6 +1329,8 @@ version = "0.3.0" description = "An LDAP3 auth provider for Synapse" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\"" files = [ {file = "matrix-synapse-ldap3-0.3.0.tar.gz", hash = "sha256:8bb6517173164d4b9cc44f49de411d8cebdb2e705d5dd1ea1f38733c4a009e1d"}, {file = "matrix_synapse_ldap3-0.3.0-py3-none-any.whl", hash = "sha256:8b4d701f8702551e98cc1d8c20dbed532de5613584c08d0df22de376ba99159d"}, @@ -1292,6 +1350,7 @@ version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -1303,6 +1362,8 @@ version = "9.1.0" description = "More routines for operating on iterables, beyond itertools" optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\"" files = [ {file = "more-itertools-9.1.0.tar.gz", hash = "sha256:cabaa341ad0389ea83c17a94566a53ae4c9d07349861ecb14dc6d0345cf9ac5d"}, {file = "more_itertools-9.1.0-py3-none-any.whl", hash = "sha256:d2bc7f02446e86a68911e58ded76d6561eea00cddfb2a91e7019bbb586c799f3"}, @@ -1314,6 +1375,7 @@ version = "1.1.0" description = "MessagePack serializer" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, @@ -1387,6 +1449,7 @@ version = "1.13.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, @@ -1440,6 +1503,7 @@ version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, @@ -1451,6 +1515,7 @@ version = "1.0.9" description = "Plugin for mypy to support zope interfaces" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "mypy_zope-1.0.9-py3-none-any.whl", hash = "sha256:6666c1556891a3cb186137519dbd7a58cb30fb72b2504798cad47b35391921ba"}, {file = "mypy_zope-1.0.9.tar.gz", hash = "sha256:37d6985dfb05a4c27b35cff47577fd5bad878db4893ddedf54d165f7389a1cdb"}, @@ -1470,6 +1535,7 @@ version = "1.3.0" description = "A network address manipulation library for Python" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "netaddr-1.3.0-py3-none-any.whl", hash = "sha256:c2c6a8ebe5554ce33b7d5b3a306b71bbb373e000bbbf2350dd5213cc56e3dbbe"}, {file = "netaddr-1.3.0.tar.gz", hash = "sha256:5c3c3d9895b551b763779ba7db7a03487dc1f8e3b385af819af341ae9ef6e48a"}, @@ -1484,6 +1550,8 @@ version = "2.4.0" description = "OpenTracing API for Python. See documentation at http://opentracing.io" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"opentracing\" or extra == \"all\"" files = [ {file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"}, ] @@ -1497,6 +1565,7 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, @@ -1508,6 +1577,7 @@ version = "0.9.0" description = "Parameterized testing with any Python test framework" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "parameterized-0.9.0-py2.py3-none-any.whl", hash = "sha256:4e0758e3d41bea3bbd05ec14fc2c24736723f243b28d702081aef438c9372b1b"}, {file = "parameterized-0.9.0.tar.gz", hash = "sha256:7fc905272cefa4f364c1a3429cbbe9c0f98b793988efb5bf90aac80f08db09b1"}, @@ -1522,6 +1592,7 @@ version = "8.13.50" description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "phonenumbers-8.13.50-py2.py3-none-any.whl", hash = "sha256:bb95dbc0d9979c51f7ad94bcd780784938958861fbb4b75a2fe39ccd3d58954a"}, {file = "phonenumbers-8.13.50.tar.gz", hash = "sha256:e05ac6fb7b98c6d719a87ea895b9fc153673b4a51f455ec9afaf557ef4629da6"}, @@ -1533,6 +1604,7 @@ version = "11.1.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "pillow-11.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8"}, {file = "pillow-11.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192"}, @@ -1612,7 +1684,7 @@ docs = ["furo", "olefile", "sphinx (>=8.1)", "sphinx-copybutton", "sphinx-inline fpx = ["olefile"] mic = ["olefile"] tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "trove-classifiers (>=2024.10.12)"] -typing = ["typing-extensions"] +typing = ["typing-extensions ; python_version < \"3.10\""] xmp = ["defusedxml"] [[package]] @@ -1621,6 +1693,7 @@ version = "0.21.0" description = "Python client for the Prometheus monitoring system." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "prometheus_client-0.21.0-py3-none-any.whl", hash = "sha256:4fa6b4dd0ac16d58bb587c04b1caae65b8c5043e85f778f42f5f632f6af2e166"}, {file = "prometheus_client-0.21.0.tar.gz", hash = "sha256:96c83c606b71ff2b0a433c98889d275f51ffec6c5e267de37c7a2b5c9aa9233e"}, @@ -1635,6 +1708,8 @@ version = "2.9.10" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"postgres\" or extra == \"all\"" files = [ {file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"}, {file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"}, @@ -1654,6 +1729,8 @@ version = "2.9.0" description = ".. image:: https://travis-ci.org/chtd/psycopg2cffi.svg?branch=master" optional = true python-versions = "*" +groups = ["main"] +markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")" files = [ {file = "psycopg2cffi-2.9.0.tar.gz", hash = "sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52"}, ] @@ -1668,6 +1745,8 @@ version = "1.1" description = "A Simple library to enable psycopg2 compatability" optional = true python-versions = "*" +groups = ["main"] +markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")" files = [ {file = "psycopg2cffi-compat-1.1.tar.gz", hash = "sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05"}, ] @@ -1681,6 +1760,7 @@ version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, @@ -1692,6 +1772,7 @@ version = "0.4.1" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, @@ -1706,6 +1787,7 @@ version = "2.21" description = "C parser in Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main", "dev"] files = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, @@ -1717,6 +1799,7 @@ version = "2.10.3" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"}, {file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"}, @@ -1729,7 +1812,7 @@ typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" @@ -1737,6 +1820,7 @@ version = "2.27.1" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, @@ -1849,6 +1933,7 @@ version = "2.5.0" description = "Use the full Github API v3" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "PyGithub-2.5.0-py3-none-any.whl", hash = "sha256:b0b635999a658ab8e08720bdd3318893ff20e2275f6446fcf35bf3f44f2c0fd2"}, {file = "pygithub-2.5.0.tar.gz", hash = "sha256:e1613ac508a9be710920d26eb18b1905ebd9926aa49398e88151c1b526aad3cf"}, @@ -1868,13 +1953,14 @@ version = "2.15.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, ] [package.extras] -plugins = ["importlib-metadata"] +plugins = ["importlib-metadata ; python_version < \"3.8\""] [[package]] name = "pyicu" @@ -1882,6 +1968,8 @@ version = "2.14" description = "Python extension wrapping the ICU C++ API" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"user-search\" or extra == \"all\"" files = [ {file = "PyICU-2.14.tar.gz", hash = "sha256:acc7eb92bd5c554ed577249c6978450a4feda0aa6f01470152b3a7b382a02132"}, ] @@ -1892,6 +1980,7 @@ version = "2.6.0" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"}, {file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"}, @@ -1912,6 +2001,7 @@ version = "0.13.0" description = "Macaroon library for Python" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pymacaroons-0.13.0-py2.py3-none-any.whl", hash = "sha256:3e14dff6a262fdbf1a15e769ce635a8aea72e6f8f91e408f9a97166c53b91907"}, {file = "pymacaroons-0.13.0.tar.gz", hash = "sha256:1e6bba42a5f66c245adf38a5a4006a99dcc06a0703786ea636098667d42903b8"}, @@ -1927,6 +2017,8 @@ version = "1.0.1" description = "A development tool to measure, monitor and analyze the memory behavior of Python objects." optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"cache-memory\" or extra == \"all\"" files = [ {file = "Pympler-1.0.1-py3-none-any.whl", hash = "sha256:d260dda9ae781e1eab6ea15bacb84015849833ba5555f141d2d9b7b7473b307d"}, {file = "Pympler-1.0.1.tar.gz", hash = "sha256:993f1a3599ca3f4fcd7160c7545ad06310c9e12f70174ae7ae8d4e25f6c5d3fa"}, @@ -1938,6 +2030,7 @@ version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" optional = false python-versions = ">=3.6" +groups = ["main", "dev"] files = [ {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, @@ -1964,6 +2057,7 @@ version = "24.3.0" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "pyOpenSSL-24.3.0-py3-none-any.whl", hash = "sha256:e474f5a473cd7f92221cc04976e48f4d11502804657a08a989fb3be5514c904a"}, {file = "pyopenssl-24.3.0.tar.gz", hash = "sha256:49f7a019577d834746bc55c5fce6ecbcec0f2b4ec5ce1cf43a9a173b8138bb36"}, @@ -1982,6 +2076,8 @@ version = "7.5.0" description = "Python implementation of SAML Version 2 Standard" optional = true python-versions = ">=3.9,<4.0" +groups = ["main"] +markers = "extra == \"saml2\" or extra == \"all\"" files = [ {file = "pysaml2-7.5.0-py3-none-any.whl", hash = "sha256:bc6627cc344476a83c757f440a73fda1369f13b6fda1b4e16bca63ffbabb5318"}, {file = "pysaml2-7.5.0.tar.gz", hash = "sha256:f36871d4e5ee857c6b85532e942550d2cf90ea4ee943d75eb681044bbc4f54f7"}, @@ -2005,6 +2101,8 @@ version = "2.8.2" description = "Extensions to the standard Python datetime module" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +markers = "extra == \"saml2\" or extra == \"all\"" files = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, @@ -2019,6 +2117,7 @@ version = "0.0.20" description = "A streaming multipart parser for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104"}, {file = "python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13"}, @@ -2030,6 +2129,8 @@ version = "2022.7.1" description = "World timezone definitions, modern and historical" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"saml2\" or extra == \"all\"" files = [ {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, @@ -2041,6 +2142,8 @@ version = "0.2.0" description = "" optional = false python-versions = "*" +groups = ["dev"] +markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\" and sys_platform == \"win32\"" files = [ {file = "pywin32-ctypes-0.2.0.tar.gz", hash = "sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942"}, {file = "pywin32_ctypes-0.2.0-py2.py3-none-any.whl", hash = "sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98"}, @@ -2052,6 +2155,7 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -2114,6 +2218,7 @@ version = "37.3" description = "readme_renderer is a library for rendering \"readme\" descriptions for Warehouse" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "readme_renderer-37.3-py3-none-any.whl", hash = "sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343"}, {file = "readme_renderer-37.3.tar.gz", hash = "sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273"}, @@ -2133,6 +2238,7 @@ version = "0.29.1" description = "JSON Referencing + Python" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "referencing-0.29.1-py3-none-any.whl", hash = "sha256:d3c8f323ee1480095da44d55917cfb8278d73d6b4d5f677e3e40eb21314ac67f"}, {file = "referencing-0.29.1.tar.gz", hash = "sha256:90cb53782d550ba28d2166ef3f55731f38397def8832baac5d45235f1995e35e"}, @@ -2148,6 +2254,7 @@ version = "2.32.2" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "requests-2.32.2-py3-none-any.whl", hash = "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c"}, {file = "requests-2.32.2.tar.gz", hash = "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289"}, @@ -2169,6 +2276,7 @@ version = "1.0.0" description = "A utility belt for advanced users of python-requests" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] files = [ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, @@ -2183,6 +2291,7 @@ version = "2.0.0" description = "Validating URI References per RFC 3986" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd"}, {file = "rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c"}, @@ -2197,6 +2306,7 @@ version = "13.3.2" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" +groups = ["dev"] files = [ {file = "rich-13.3.2-py3-none-any.whl", hash = "sha256:a104f37270bf677148d8acb07d33be1569eeee87e2d1beb286a4e9113caf6f2f"}, {file = "rich-13.3.2.tar.gz", hash = "sha256:91954fe80cfb7985727a467ca98a7618e5dd15178cc2da10f553b36a93859001"}, @@ -2215,6 +2325,7 @@ version = "0.8.10" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "rpds_py-0.8.10-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:93d06cccae15b3836247319eee7b6f1fdcd6c10dabb4e6d350d27bd0bdca2711"}, {file = "rpds_py-0.8.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3816a890a6a9e9f1de250afa12ca71c9a7a62f2b715a29af6aaee3aea112c181"}, @@ -2321,6 +2432,7 @@ version = "0.7.3" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "ruff-0.7.3-py3-none-linux_armv6l.whl", hash = "sha256:34f2339dc22687ec7e7002792d1f50712bf84a13d5152e75712ac08be565d344"}, {file = "ruff-0.7.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:fb397332a1879b9764a3455a0bb1087bda876c2db8aca3a3cbb67b3dbce8cda0"}, @@ -2348,6 +2460,8 @@ version = "3.3.3" description = "Python bindings to FreeDesktop.org Secret Service API" optional = false python-versions = ">=3.6" +groups = ["dev"] +markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\" and sys_platform == \"linux\"" files = [ {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, @@ -2363,13 +2477,14 @@ version = "2.10.0" description = "A library implementing the 'SemVer' scheme." optional = false python-versions = ">=2.7" +groups = ["main"] files = [ {file = "semantic_version-2.10.0-py2.py3-none-any.whl", hash = "sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177"}, {file = "semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c"}, ] [package.extras] -dev = ["Django (>=1.11)", "check-manifest", "colorama (<=0.4.1)", "coverage", "flake8", "nose2", "readme-renderer (<25.0)", "tox", "wheel", "zest.releaser[recommended]"] +dev = ["Django (>=1.11)", "check-manifest", "colorama (<=0.4.1) ; python_version == \"3.4\"", "coverage", "flake8", "nose2", "readme-renderer (<25.0) ; python_version == \"3.4\"", "tox", "wheel", "zest.releaser[recommended]"] doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] @@ -2378,6 +2493,8 @@ version = "2.22.0" description = "Python client for Sentry (https://sentry.io)" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"sentry\" or extra == \"all\"" files = [ {file = "sentry_sdk-2.22.0-py2.py3-none-any.whl", hash = "sha256:3d791d631a6c97aad4da7074081a57073126c69487560c6f8bffcf586461de66"}, {file = "sentry_sdk-2.22.0.tar.gz", hash = "sha256:b4bf43bb38f547c84b2eadcefbe389b36ef75f3f38253d7a74d6b928c07ae944"}, @@ -2434,6 +2551,7 @@ version = "24.2.0" description = "Service identity verification for pyOpenSSL & cryptography." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "service_identity-24.2.0-py3-none-any.whl", hash = "sha256:6b047fbd8a84fd0bb0d55ebce4031e400562b9196e1e0d3e0fe2b8a59f6d4a85"}, {file = "service_identity-24.2.0.tar.gz", hash = "sha256:b8683ba13f0d39c6cd5d625d2c5f65421d6d707b013b375c355751557cbe8e09"}, @@ -2458,15 +2576,16 @@ version = "72.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"}, {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"}, ] [package.extras] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +core = ["importlib-metadata (>=6) ; python_version < \"3.10\"", "importlib-resources (>=5.10.2) ; python_version < \"3.9\"", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-ruff (<0.4) ; platform_system == \"Windows\"", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "pytest-ruff (>=0.3.2) ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "setuptools-rust" @@ -2474,6 +2593,7 @@ version = "1.10.2" description = "Setuptools Rust extension plugin" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "setuptools_rust-1.10.2-py3-none-any.whl", hash = "sha256:4b39c435ae9670315d522ed08fa0e8cb29f2a6048033966b6be2571a90ce4f1c"}, {file = "setuptools_rust-1.10.2.tar.gz", hash = "sha256:5d73e7eee5f87a6417285b617c97088a7c20d1a70fcea60e3bdc94ff567c29dc"}, @@ -2489,6 +2609,7 @@ version = "1.1.4" description = "Sign JSON with Ed25519 signatures" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "signedjson-1.1.4-py3-none-any.whl", hash = "sha256:45569ec54241c65d2403fe3faf7169be5322547706a231e884ca2b427f23d228"}, {file = "signedjson-1.1.4.tar.gz", hash = "sha256:cd91c56af53f169ef032c62e9c4a3292dc158866933318d0592e3462db3d6492"}, @@ -2508,6 +2629,7 @@ version = "1.16.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, @@ -2519,6 +2641,7 @@ version = "5.0.0" description = "A pure Python implementation of a sliding window memory map manager" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, @@ -2530,6 +2653,7 @@ version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, @@ -2541,6 +2665,8 @@ version = "235" description = "Python interface for libsystemd" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"systemd\"" files = [ {file = "systemd-python-235.tar.gz", hash = "sha256:4e57f39797fd5d9e2d22b8806a252d7c0106c936039d1e71c8c6b8008e695c0a"}, ] @@ -2551,6 +2677,8 @@ version = "1.0.2" description = "Tornado IOLoop Backed Concurrent Futures" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"opentracing\" or extra == \"all\"" files = [ {file = "threadloop-1.0.2-py2-none-any.whl", hash = "sha256:5c90dbefab6ffbdba26afb4829d2a9df8275d13ac7dc58dccb0e279992679599"}, {file = "threadloop-1.0.2.tar.gz", hash = "sha256:8b180aac31013de13c2ad5c834819771992d350267bddb854613ae77ef571944"}, @@ -2565,6 +2693,8 @@ version = "0.16.0" description = "Python bindings for the Apache Thrift RPC system" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"opentracing\" or extra == \"all\"" files = [ {file = "thrift-0.16.0.tar.gz", hash = "sha256:2b5b6488fcded21f9d312aa23c9ff6a0195d0f6ae26ddbd5ad9e3e25dfc14408"}, ] @@ -2583,6 +2713,7 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -2624,6 +2755,8 @@ version = "6.4.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = true python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"opentracing\" or extra == \"all\"" files = [ {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"}, {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"}, @@ -2644,6 +2777,7 @@ version = "24.8.0" description = "Building newsfiles for your project." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "towncrier-24.8.0-py3-none-any.whl", hash = "sha256:9343209592b839209cdf28c339ba45792fbfe9775b5f9c177462fd693e127d8d"}, {file = "towncrier-24.8.0.tar.gz", hash = "sha256:013423ee7eed102b2f393c287d22d95f66f1a3ea10a4baa82d298001a7f18af3"}, @@ -2665,6 +2799,7 @@ version = "24.9.1" description = "High-level Twisted HTTP Client API" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "treq-24.9.1-py3-none-any.whl", hash = "sha256:eee4756fd9a857c77f180fd5202b52c518f2d3e2826dce28b89066c03bfc45d0"}, {file = "treq-24.9.1.tar.gz", hash = "sha256:15da7fc404f3e4ed59d0abe5f8eef4966fabbe618039a2a23bc7c15305cefea8"}, @@ -2688,6 +2823,7 @@ version = "6.1.0" description = "Collection of utilities for publishing packages on PyPI" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "twine-6.1.0-py3-none-any.whl", hash = "sha256:a47f973caf122930bf0fbbf17f80b83bc1602c9ce393c7845f289a3001dc5384"}, {file = "twine-6.1.0.tar.gz", hash = "sha256:be324f6272eff91d07ee93f251edf232fc647935dd585ac003539b42404a8dbd"}, @@ -2714,6 +2850,7 @@ version = "24.7.0" description = "An asynchronous networking framework written in Python" optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "twisted-24.7.0-py3-none-any.whl", hash = "sha256:734832ef98108136e222b5230075b1079dad8a3fc5637319615619a7725b0c81"}, {file = "twisted-24.7.0.tar.gz", hash = "sha256:5a60147f044187a127ec7da96d170d49bcce50c6fd36f594e60f4587eff4d394"}, @@ -2732,19 +2869,19 @@ typing-extensions = ">=4.2.0" zope-interface = ">=5" [package.extras] -all-non-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] +all-non-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226) ; platform_system == \"Windows\"", "pywin32 (!=226) ; platform_system == \"Windows\"", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] conch = ["appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)"] dev = ["coverage (>=7.5,<8.0)", "cython-test-exception-raiser (>=1.0.2,<2)", "hypothesis (>=6.56)", "pydoctor (>=23.9.0,<23.10.0)", "pyflakes (>=2.2,<3.0)", "pyhamcrest (>=2)", "python-subunit (>=1.4,<2.0)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "twistedchecker (>=0.7,<1.0)"] dev-release = ["pydoctor (>=23.9.0,<23.10.0)", "pydoctor (>=23.9.0,<23.10.0)", "sphinx (>=6,<7)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "towncrier (>=23.6,<24.0)"] -gtk-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pygobject", "pygobject", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] +gtk-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pygobject", "pygobject", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226) ; platform_system == \"Windows\"", "pywin32 (!=226) ; platform_system == \"Windows\"", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] http2 = ["h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)"] -macos-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] -mypy = ["appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "coverage (>=7.5,<8.0)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "idna (>=2.4)", "mypy (>=1.8,<2.0)", "mypy-zope (>=1.0.3,<1.1.0)", "priority (>=1.1.0,<2.0)", "pydoctor (>=23.9.0,<23.10.0)", "pyflakes (>=2.2,<3.0)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "twistedchecker (>=0.7,<1.0)", "types-pyopenssl", "types-setuptools"] -osx-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] -serial = ["pyserial (>=3.0)", "pywin32 (!=226)"] +macos-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226) ; platform_system == \"Windows\"", "pywin32 (!=226) ; platform_system == \"Windows\"", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] +mypy = ["appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "coverage (>=7.5,<8.0)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "idna (>=2.4)", "mypy (>=1.8,<2.0)", "mypy-zope (>=1.0.3,<1.1.0)", "priority (>=1.1.0,<2.0)", "pydoctor (>=23.9.0,<23.10.0)", "pyflakes (>=2.2,<3.0)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226) ; platform_system == \"Windows\"", "service-identity (>=18.1.0)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "twistedchecker (>=0.7,<1.0)", "types-pyopenssl", "types-setuptools"] +osx-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226) ; platform_system == \"Windows\"", "pywin32 (!=226) ; platform_system == \"Windows\"", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] +serial = ["pyserial (>=3.0)", "pywin32 (!=226) ; platform_system == \"Windows\""] test = ["cython-test-exception-raiser (>=1.0.2,<2)", "hypothesis (>=6.56)", "pyhamcrest (>=2)"] tls = ["idna (>=2.4)", "pyopenssl (>=21.0.0)", "service-identity (>=18.1.0)"] -windows-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)", "twisted-iocpsupport (>=1.0.2)", "twisted-iocpsupport (>=1.0.2)"] +windows-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "pywin32 (!=226) ; platform_system == \"Windows\"", "pywin32 (!=226) ; platform_system == \"Windows\"", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)", "twisted-iocpsupport (>=1.0.2)", "twisted-iocpsupport (>=1.0.2)"] [[package]] name = "txredisapi" @@ -2752,6 +2889,8 @@ version = "1.4.10" description = "non-blocking redis client for python" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"redis\" or extra == \"all\"" files = [ {file = "txredisapi-1.4.10-py3-none-any.whl", hash = "sha256:0a6ea77f27f8cf092f907654f08302a97b48fa35f24e0ad99dfb74115f018161"}, {file = "txredisapi-1.4.10.tar.gz", hash = "sha256:7609a6af6ff4619a3189c0adfb86aeda789afba69eb59fc1e19ac0199e725395"}, @@ -2767,6 +2906,7 @@ version = "6.2.0.20241123" description = "Typing stubs for bleach" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types_bleach-6.2.0.20241123-py3-none-any.whl", hash = "sha256:c6e58b3646665ca7c6b29890375390f4569e84f0cf5c171e0fe1ddb71a7be86a"}, {file = "types_bleach-6.2.0.20241123.tar.gz", hash = "sha256:dac5fe9015173514da3ac810c1a935619a3ccbcc5d66c4cbf4707eac00539057"}, @@ -2781,6 +2921,7 @@ version = "1.16.0.20240331" description = "Typing stubs for cffi" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-cffi-1.16.0.20240331.tar.gz", hash = "sha256:b8b20d23a2b89cfed5f8c5bc53b0cb8677c3aac6d970dbc771e28b9c698f5dee"}, {file = "types_cffi-1.16.0.20240331-py3-none-any.whl", hash = "sha256:a363e5ea54a4eb6a4a105d800685fde596bc318089b025b27dee09849fe41ff0"}, @@ -2795,6 +2936,7 @@ version = "0.9.2.20240106" description = "Typing stubs for commonmark" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-commonmark-0.9.2.20240106.tar.gz", hash = "sha256:52a062b71766d6ab258fca2d8e19fb0853796e25ca9afa9d0f67a1e42c93479f"}, {file = "types_commonmark-0.9.2.20240106-py3-none-any.whl", hash = "sha256:606d9de1e3a96cab0b1c0b6cccf4df099116148d1d864d115fde2e27ad6877c3"}, @@ -2806,6 +2948,7 @@ version = "1.1.11.20240228" description = "Typing stubs for html5lib" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-html5lib-1.1.11.20240228.tar.gz", hash = "sha256:22736b7299e605ec4ba539d48691e905fd0c61c3ea610acc59922232dc84cede"}, {file = "types_html5lib-1.1.11.20240228-py3-none-any.whl", hash = "sha256:af5de0125cb0fe5667543b158db83849b22e25c0e36c9149836b095548bf1020"}, @@ -2817,6 +2960,7 @@ version = "4.23.0.20240813" description = "Typing stubs for jsonschema" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-jsonschema-4.23.0.20240813.tar.gz", hash = "sha256:c93f48206f209a5bc4608d295ac39f172fb98b9e24159ce577dbd25ddb79a1c0"}, {file = "types_jsonschema-4.23.0.20240813-py3-none-any.whl", hash = "sha256:be283e23f0b87547316c2ee6b0fd36d95ea30e921db06478029e10b5b6aa6ac3"}, @@ -2831,6 +2975,7 @@ version = "1.3.0.20240530" description = "Typing stubs for netaddr" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-netaddr-1.3.0.20240530.tar.gz", hash = "sha256:742c2ec1f202b666f544223e2616b34f1f13df80c91e5aeaaa93a72e4d0774ea"}, {file = "types_netaddr-1.3.0.20240530-py3-none-any.whl", hash = "sha256:354998d018e326da4f1d9b005fc91137b7c2c473aaf03c4ef64bf83c6861b440"}, @@ -2842,6 +2987,7 @@ version = "2.4.10.6" description = "Typing stubs for opentracing" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "types-opentracing-2.4.10.6.tar.gz", hash = "sha256:87a1bdfce9de5e555e30497663583b9b9c3bb494d029ef9806aa1f137c19e744"}, {file = "types_opentracing-2.4.10.6-py3-none-any.whl", hash = "sha256:25914c834db033a4a38fc322df0b5e5e14503b0ac97f78304ae180d721555e97"}, @@ -2853,6 +2999,7 @@ version = "10.2.0.20240822" description = "Typing stubs for Pillow" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-Pillow-10.2.0.20240822.tar.gz", hash = "sha256:559fb52a2ef991c326e4a0d20accb3bb63a7ba8d40eb493e0ecb0310ba52f0d3"}, {file = "types_Pillow-10.2.0.20240822-py3-none-any.whl", hash = "sha256:d9dab025aba07aeb12fd50a6799d4eac52a9603488eca09d7662543983f16c5d"}, @@ -2864,6 +3011,7 @@ version = "2.9.21.20250121" description = "Typing stubs for psycopg2" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "types_psycopg2-2.9.21.20250121-py3-none-any.whl", hash = "sha256:b890dc6f5a08b6433f0ff73a4ec9a834deedad3e914f2a4a6fd43df021f745f1"}, {file = "types_psycopg2-2.9.21.20250121.tar.gz", hash = "sha256:2b0e2cd0f3747af1ae25a7027898716d80209604770ef3cbf350fe055b9c349b"}, @@ -2875,6 +3023,7 @@ version = "24.1.0.20240722" description = "Typing stubs for pyOpenSSL" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39"}, {file = "types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54"}, @@ -2890,6 +3039,7 @@ version = "6.0.12.20241230" description = "Typing stubs for PyYAML" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types_PyYAML-6.0.12.20241230-py3-none-any.whl", hash = "sha256:fa4d32565219b68e6dee5f67534c722e53c00d1cfc09c435ef04d7353e1e96e6"}, {file = "types_pyyaml-6.0.12.20241230.tar.gz", hash = "sha256:7f07622dbd34bb9c8b264fe860a17e0efcad00d50b5f27e93984909d9363498c"}, @@ -2901,6 +3051,7 @@ version = "2.32.0.20241016" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, @@ -2915,6 +3066,7 @@ version = "75.2.0.20241019" description = "Typing stubs for setuptools" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-setuptools-75.2.0.20241019.tar.gz", hash = "sha256:86ea31b5f6df2c6b8f2dc8ae3f72b213607f62549b6fa2ed5866e5299f968694"}, {file = "types_setuptools-75.2.0.20241019-py3-none-any.whl", hash = "sha256:2e48ff3acd4919471e80d5e3f049cce5c177e108d5d36d2d4cee3fa4d4104258"}, @@ -2926,6 +3078,7 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -2937,6 +3090,7 @@ version = "2.1.0" description = "Encode and decode Base64 without \"=\" padding" optional = false python-versions = ">=3.6,<4.0" +groups = ["main"] files = [ {file = "unpaddedbase64-2.1.0-py3-none-any.whl", hash = "sha256:485eff129c30175d2cd6f0cd8d2310dff51e666f7f36175f738d75dfdbd0b1c6"}, {file = "unpaddedbase64-2.1.0.tar.gz", hash = "sha256:7273c60c089de39d90f5d6d4a7883a79e319dc9d9b1c8924a7fab96178a5f005"}, @@ -2948,13 +3102,14 @@ version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -2965,6 +3120,7 @@ version = "0.5.1" description = "Character encoding aliases for legacy web content" optional = false python-versions = "*" +groups = ["main", "dev"] files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, @@ -2976,6 +3132,7 @@ version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +groups = ["dev"] files = [ {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, @@ -3060,6 +3217,8 @@ version = "2.4.0" description = "An XML Schema validator and decoder" optional = true python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"saml2\" or extra == \"all\"" files = [ {file = "xmlschema-2.4.0-py3-none-any.whl", hash = "sha256:dc87be0caaa61f42649899189aab2fd8e0d567f2cf548433ba7b79278d231a4a"}, {file = "xmlschema-2.4.0.tar.gz", hash = "sha256:d74cd0c10866ac609e1ef94a5a69b018ad16e39077bc6393408b40c6babee793"}, @@ -3079,6 +3238,8 @@ version = "3.19.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\" and python_version < \"3.12\" or python_version < \"3.10\"" files = [ {file = "zipp-3.19.1-py3-none-any.whl", hash = "sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091"}, {file = "zipp-3.19.1.tar.gz", hash = "sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f"}, @@ -3094,6 +3255,7 @@ version = "4.6" description = "Very basic event publishing system" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "zope.event-4.6-py2.py3-none-any.whl", hash = "sha256:73d9e3ef750cca14816a9c322c7250b0d7c9dbc337df5d1b807ff8d3d0b9e97c"}, {file = "zope.event-4.6.tar.gz", hash = "sha256:81d98813046fc86cc4136e3698fee628a3282f9c320db18658c21749235fce80"}, @@ -3112,6 +3274,7 @@ version = "7.1.0" description = "Interfaces for Python" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "zope.interface-7.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2bd9e9f366a5df08ebbdc159f8224904c1c5ce63893984abb76954e6fbe4381a"}, {file = "zope.interface-7.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:661d5df403cd3c5b8699ac480fa7f58047a3253b029db690efa0c3cf209993ef"}, @@ -3166,6 +3329,7 @@ version = "7.0.1" description = "zope.interface extension for defining data schemas" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "zope.schema-7.0.1-py3-none-any.whl", hash = "sha256:cf006c678793b00e0075ad54d55281c8785ea21e5bc1f5ec0584787719c2aab2"}, {file = "zope.schema-7.0.1.tar.gz", hash = "sha256:ead4dbcb03354d4e410c9a3b904451eb44d90254751b1cbdedf4a61aede9fbb9"}, @@ -3197,6 +3361,6 @@ url-preview = ["lxml"] user-search = ["pyicu"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.9.0" content-hash = "d71159b19349fdc0b7cd8e06e8c8778b603fc37b941c6df34ddc31746783d94d" From 1efb826b54f4a8fdfa56c27a32da9c332fa87cc3 Mon Sep 17 00:00:00 2001 From: Devon Hudson Date: Fri, 21 Mar 2025 17:09:49 +0000 Subject: [PATCH 03/70] Delete unreferenced state groups in background (#18254) This PR fixes #18154 to avoid de-deltaing state groups which resulted in DB size temporarily increasing until the DB was `VACUUM`'ed. As a result, less state groups will get deleted now. It also attempts to improve performance by not duplicating work when processing state groups it has already processed in previous iterations. ### Pull Request Checklist * [X] Pull request is based on the develop branch * [X] Pull request includes a [changelog file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should: - Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.". - Use markdown where necessary, mostly for `code blocks`. - End with either a period (.) or an exclamation mark (!). - Start with a capital letter. - Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry. * [X] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters)) --------- Co-authored-by: Erik Johnston --- changelog.d/18254.feature | 1 + docs/development/database_schema.md | 2 +- synapse/_scripts/synapse_port_db.py | 30 +++ synapse/storage/controllers/purge_events.py | 247 +++++++++++++++++- synapse/storage/databases/state/bg_updates.py | 10 +- synapse/storage/databases/state/deletion.py | 44 +++- synapse/storage/schema/__init__.py | 1 + .../02_delete_unreferenced_state_groups.sql | 16 ++ synapse/types/storage/__init__.py | 4 + tests/storage/test_purge.py | 154 +++++++++++ 10 files changed, 495 insertions(+), 14 deletions(-) create mode 100644 changelog.d/18254.feature create mode 100644 synapse/storage/schema/state/delta/90/02_delete_unreferenced_state_groups.sql diff --git a/changelog.d/18254.feature b/changelog.d/18254.feature new file mode 100644 index 0000000000..62e1b79a15 --- /dev/null +++ b/changelog.d/18254.feature @@ -0,0 +1 @@ +Add background job to clear unreferenced state groups. diff --git a/docs/development/database_schema.md b/docs/development/database_schema.md index 37a06acc12..620d1c16b0 100644 --- a/docs/development/database_schema.md +++ b/docs/development/database_schema.md @@ -162,7 +162,7 @@ by a unique name, the current status (stored in JSON), and some dependency infor * Whether the update requires a previous update to be complete. * A rough ordering for which to complete updates. -A new background updates needs to be added to the `background_updates` table: +A new background update needs to be added to the `background_updates` table: ```sql INSERT INTO background_updates (ordering, update_name, depends_on, progress_json) VALUES diff --git a/synapse/_scripts/synapse_port_db.py b/synapse/_scripts/synapse_port_db.py index 1bb9940180..438b2ff8a0 100755 --- a/synapse/_scripts/synapse_port_db.py +++ b/synapse/_scripts/synapse_port_db.py @@ -192,6 +192,11 @@ APPEND_ONLY_TABLES = [ IGNORED_TABLES = { + # Porting the auto generated sequence in this table is non-trivial. + # None of the entries in this list are mandatory for Synapse to keep working. + # If state group disk space is an issue after the port, the + # `mark_unreferenced_state_groups_for_deletion_bg_update` background task can be run again. + "state_groups_pending_deletion", # We don't port these tables, as they're a faff and we can regenerate # them anyway. "user_directory", @@ -217,6 +222,15 @@ IGNORED_TABLES = { } +# These background updates will not be applied upon creation of the postgres database. +IGNORED_BACKGROUND_UPDATES = { + # Reapplying this background update to the postgres database is unnecessary after + # already having waited for the SQLite database to complete all running background + # updates. + "mark_unreferenced_state_groups_for_deletion_bg_update", +} + + # Error returned by the run function. Used at the top-level part of the script to # handle errors and return codes. end_error: Optional[str] = None @@ -688,6 +702,20 @@ class Porter: # 0 means off. 1 means full. 2 means incremental. return autovacuum_setting != 0 + async def remove_ignored_background_updates_from_database(self) -> None: + def _remove_delete_unreferenced_state_groups_bg_updates( + txn: LoggingTransaction, + ) -> None: + txn.execute( + "DELETE FROM background_updates WHERE update_name = ANY(?)", + (list(IGNORED_BACKGROUND_UPDATES),), + ) + + await self.postgres_store.db_pool.runInteraction( + "remove_delete_unreferenced_state_groups_bg_updates", + _remove_delete_unreferenced_state_groups_bg_updates, + ) + async def run(self) -> None: """Ports the SQLite database to a PostgreSQL database. @@ -733,6 +761,8 @@ class Porter: self.hs_config.database.get_single_database() ) + await self.remove_ignored_background_updates_from_database() + await self.run_background_updates_on_postgres() self.progress.set_state("Creating port tables") diff --git a/synapse/storage/controllers/purge_events.py b/synapse/storage/controllers/purge_events.py index 47cec8c469..c2d4bf8290 100644 --- a/synapse/storage/controllers/purge_events.py +++ b/synapse/storage/controllers/purge_events.py @@ -21,11 +21,19 @@ import itertools import logging -from typing import TYPE_CHECKING, Collection, Mapping, Set +from typing import ( + TYPE_CHECKING, + Collection, + Mapping, + Optional, + Set, +) from synapse.logging.context import nested_logging_context from synapse.metrics.background_process_metrics import wrap_as_background_process +from synapse.storage.database import LoggingTransaction from synapse.storage.databases import Databases +from synapse.types.storage import _BackgroundUpdates if TYPE_CHECKING: from synapse.server import HomeServer @@ -44,6 +52,11 @@ class PurgeEventsStorageController: self._delete_state_groups_loop, 60 * 1000 ) + self.stores.state.db_pool.updates.register_background_update_handler( + _BackgroundUpdates.MARK_UNREFERENCED_STATE_GROUPS_FOR_DELETION_BG_UPDATE, + self._background_delete_unrefereneced_state_groups, + ) + async def purge_room(self, room_id: str) -> None: """Deletes all record of a room""" @@ -81,7 +94,8 @@ class PurgeEventsStorageController: ) async def _find_unreferenced_groups( - self, state_groups: Collection[int] + self, + state_groups: Collection[int], ) -> Set[int]: """Used when purging history to figure out which state groups can be deleted. @@ -203,3 +217,232 @@ class PurgeEventsStorageController: room_id, groups_to_sequences, ) + + async def _background_delete_unrefereneced_state_groups( + self, progress: dict, batch_size: int + ) -> int: + """This background update will slowly delete any unreferenced state groups""" + + last_checked_state_group = progress.get("last_checked_state_group") + + if last_checked_state_group is None: + # This is the first run. + last_checked_state_group = ( + await self.stores.state.db_pool.simple_select_one_onecol( + table="state_groups", + keyvalues={}, + retcol="MAX(id)", + allow_none=True, + desc="get_max_state_group", + ) + ) + if last_checked_state_group is None: + # There are no state groups so the background process is finished. + await self.stores.state.db_pool.updates._end_background_update( + _BackgroundUpdates.MARK_UNREFERENCED_STATE_GROUPS_FOR_DELETION_BG_UPDATE + ) + return batch_size + last_checked_state_group += 1 + + ( + last_checked_state_group, + final_batch, + ) = await self._delete_unreferenced_state_groups_batch( + last_checked_state_group, + batch_size, + ) + + if not final_batch: + # There are more state groups to check. + progress = { + "last_checked_state_group": last_checked_state_group, + } + await self.stores.state.db_pool.updates._background_update_progress( + _BackgroundUpdates.MARK_UNREFERENCED_STATE_GROUPS_FOR_DELETION_BG_UPDATE, + progress, + ) + else: + # This background process is finished. + await self.stores.state.db_pool.updates._end_background_update( + _BackgroundUpdates.MARK_UNREFERENCED_STATE_GROUPS_FOR_DELETION_BG_UPDATE + ) + + return batch_size + + async def _delete_unreferenced_state_groups_batch( + self, + last_checked_state_group: int, + batch_size: int, + ) -> tuple[int, bool]: + """Looks for unreferenced state groups starting from the last state group + checked and marks them for deletion. + + Args: + last_checked_state_group: The last state group that was checked. + batch_size: How many state groups to process in this iteration. + + Returns: + (last_checked_state_group, final_batch) + """ + + # Find all state groups that can be deleted if any of the original set are deleted. + ( + to_delete, + last_checked_state_group, + final_batch, + ) = await self._find_unreferenced_groups_for_background_deletion( + last_checked_state_group, batch_size + ) + + if len(to_delete) == 0: + return last_checked_state_group, final_batch + + await self.stores.state_deletion.mark_state_groups_as_pending_deletion( + to_delete + ) + + return last_checked_state_group, final_batch + + async def _find_unreferenced_groups_for_background_deletion( + self, + last_checked_state_group: int, + batch_size: int, + ) -> tuple[Set[int], int, bool]: + """Used when deleting unreferenced state groups in the background to figure out + which state groups can be deleted. + To avoid increased DB usage due to de-deltaing state groups, this returns only + state groups which are free standing (ie. no shared edges with referenced groups) or + state groups which do not share edges which result in a future referenced group. + + The following scenarios outline the possibilities based on state group data in + the DB. + + ie. Free standing -> state groups 1-N would be returned: + SG_1 + | + ... + | + SG_N + + ie. Previous reference -> state groups 2-N would be returned: + SG_1 <- referenced by event + | + SG_2 + | + ... + | + SG_N + + ie. Future reference -> none of the following state groups would be returned: + SG_1 + | + SG_2 + | + ... + | + SG_N <- referenced by event + + Args: + last_checked_state_group: The last state group that was checked. + batch_size: How many state groups to process in this iteration. + + Returns: + (to_delete, last_checked_state_group, final_batch) + """ + + # If a state group's next edge is not pending deletion then we don't delete the state group. + # If there is no next edge or the next edges are all marked for deletion, then delete + # the state group. + # This holds since we walk backwards from the latest state groups, ensuring that + # we've already checked newer state groups for event references along the way. + def get_next_state_groups_marked_for_deletion_txn( + txn: LoggingTransaction, + ) -> tuple[dict[int, bool], dict[int, int]]: + state_group_sql = """ + SELECT s.id, e.state_group, d.state_group + FROM ( + SELECT id FROM state_groups + WHERE id < ? ORDER BY id DESC LIMIT ? + ) as s + LEFT JOIN state_group_edges AS e ON (s.id = e.prev_state_group) + LEFT JOIN state_groups_pending_deletion AS d ON (e.state_group = d.state_group) + """ + txn.execute(state_group_sql, (last_checked_state_group, batch_size)) + + # Mapping from state group to whether we should delete it. + state_groups_to_deletion: dict[int, bool] = {} + + # Mapping from state group to prev state group. + state_groups_to_prev: dict[int, int] = {} + + for row in txn: + state_group = row[0] + next_edge = row[1] + pending_deletion = row[2] + + if next_edge is not None: + state_groups_to_prev[next_edge] = state_group + + if next_edge is not None and not pending_deletion: + # We have found an edge not marked for deletion. + # Check previous results to see if this group is part of a chain + # within this batch that qualifies for deletion. + # ie. batch contains: + # SG_1 -> SG_2 -> SG_3 + # If SG_3 is a candidate for deletion, then SG_2 & SG_1 should also + # be, even though they have edges which may not be marked for + # deletion. + # This relies on SQL results being sorted in DESC order to work. + next_is_deletion_candidate = state_groups_to_deletion.get(next_edge) + if ( + next_is_deletion_candidate is None + or not next_is_deletion_candidate + ): + state_groups_to_deletion[state_group] = False + else: + state_groups_to_deletion.setdefault(state_group, True) + else: + # This state group may be a candidate for deletion + state_groups_to_deletion.setdefault(state_group, True) + + return state_groups_to_deletion, state_groups_to_prev + + ( + state_groups_to_deletion, + state_group_edges, + ) = await self.stores.state.db_pool.runInteraction( + "get_next_state_groups_marked_for_deletion", + get_next_state_groups_marked_for_deletion_txn, + ) + deletion_candidates = { + state_group + for state_group, deletion in state_groups_to_deletion.items() + if deletion + } + + final_batch = False + state_groups = state_groups_to_deletion.keys() + if len(state_groups) < batch_size: + final_batch = True + else: + last_checked_state_group = min(state_groups) + + if len(state_groups) == 0: + return set(), last_checked_state_group, final_batch + + # Determine if any of the remaining state groups are directly referenced. + referenced = await self.stores.main.get_referenced_state_groups( + deletion_candidates + ) + + # Remove state groups from deletion_candidates which are directly referenced or share a + # future edge with a referenced state group within this batch. + def filter_reference_chains(group: Optional[int]) -> None: + while group is not None: + deletion_candidates.discard(group) + group = state_group_edges.get(group) + + for referenced_group in referenced: + filter_reference_chains(referenced_group) + + return deletion_candidates, last_checked_state_group, final_batch diff --git a/synapse/storage/databases/state/bg_updates.py b/synapse/storage/databases/state/bg_updates.py index f7824cba0f..95fd0ae73a 100644 --- a/synapse/storage/databases/state/bg_updates.py +++ b/synapse/storage/databases/state/bg_updates.py @@ -20,7 +20,15 @@ # import logging -from typing import TYPE_CHECKING, Dict, List, Mapping, Optional, Tuple, Union +from typing import ( + TYPE_CHECKING, + Dict, + List, + Mapping, + Optional, + Tuple, + Union, +) from synapse.logging.opentracing import tag_args, trace from synapse.storage._base import SQLBaseStore diff --git a/synapse/storage/databases/state/deletion.py b/synapse/storage/databases/state/deletion.py index d4b1c20a45..f77c46f6ae 100644 --- a/synapse/storage/databases/state/deletion.py +++ b/synapse/storage/databases/state/deletion.py @@ -321,18 +321,42 @@ class StateDeletionDataStore: async def mark_state_groups_as_pending_deletion( self, state_groups: Collection[int] ) -> None: - """Mark the given state groups as pending deletion""" + """Mark the given state groups as pending deletion. + + If any of the state groups are already pending deletion, then those records are + left as is. + """ + + await self.db_pool.runInteraction( + "mark_state_groups_as_pending_deletion", + self._mark_state_groups_as_pending_deletion_txn, + state_groups, + ) + + def _mark_state_groups_as_pending_deletion_txn( + self, + txn: LoggingTransaction, + state_groups: Collection[int], + ) -> None: + sql = """ + INSERT INTO state_groups_pending_deletion (state_group, insertion_ts) + VALUES %s + ON CONFLICT (state_group) + DO NOTHING + """ now = self._clock.time_msec() - - await self.db_pool.simple_upsert_many( - table="state_groups_pending_deletion", - key_names=("state_group",), - key_values=[(state_group,) for state_group in state_groups], - value_names=("insertion_ts",), - value_values=[(now,) for _ in state_groups], - desc="mark_state_groups_as_pending_deletion", - ) + rows = [ + ( + state_group, + now, + ) + for state_group in state_groups + ] + if isinstance(txn.database_engine, PostgresEngine): + txn.execute_values(sql % ("?",), rows, fetch=False) + else: + txn.execute_batch(sql % ("(?, ?)",), rows) async def mark_state_groups_as_used(self, state_groups: Collection[int]) -> None: """Mark the given state groups as now being referenced""" diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index f87b1a4a0a..2160edb014 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -161,6 +161,7 @@ Changes in SCHEMA_VERSION = 89 Changes in SCHEMA_VERSION = 90 - Add a column `participant` to `room_memberships` table + - Add background update to delete unreferenced state groups. """ diff --git a/synapse/storage/schema/state/delta/90/02_delete_unreferenced_state_groups.sql b/synapse/storage/schema/state/delta/90/02_delete_unreferenced_state_groups.sql new file mode 100644 index 0000000000..55a038e2b8 --- /dev/null +++ b/synapse/storage/schema/state/delta/90/02_delete_unreferenced_state_groups.sql @@ -0,0 +1,16 @@ +-- +-- This file is licensed under the Affero General Public License (AGPL) version 3. +-- +-- Copyright (C) 2025 New Vector, Ltd +-- +-- This program is free software: you can redistribute it and/or modify +-- it under the terms of the GNU Affero General Public License as +-- published by the Free Software Foundation, either version 3 of the +-- License, or (at your option) any later version. +-- +-- See the GNU Affero General Public License for more details: +-- . + +-- Add a background update to delete any unreferenced state groups +INSERT INTO background_updates (ordering, update_name, progress_json) VALUES + (9002, 'mark_unreferenced_state_groups_for_deletion_bg_update', '{}'); diff --git a/synapse/types/storage/__init__.py b/synapse/types/storage/__init__.py index b5fa20a41a..e03ff7ffc8 100644 --- a/synapse/types/storage/__init__.py +++ b/synapse/types/storage/__init__.py @@ -48,3 +48,7 @@ class _BackgroundUpdates: SLIDING_SYNC_MEMBERSHIP_SNAPSHOTS_FIX_FORGOTTEN_COLUMN_BG_UPDATE = ( "sliding_sync_membership_snapshots_fix_forgotten_column_bg_update" ) + + MARK_UNREFERENCED_STATE_GROUPS_FOR_DELETION_BG_UPDATE = ( + "mark_unreferenced_state_groups_for_deletion_bg_update" + ) diff --git a/tests/storage/test_purge.py b/tests/storage/test_purge.py index 916e42e731..0aa14fd1f4 100644 --- a/tests/storage/test_purge.py +++ b/tests/storage/test_purge.py @@ -24,6 +24,7 @@ from synapse.api.errors import NotFoundError, SynapseError from synapse.rest.client import room from synapse.server import HomeServer from synapse.types.state import StateFilter +from synapse.types.storage import _BackgroundUpdates from synapse.util import Clock from tests.unittest import HomeserverTestCase @@ -303,3 +304,156 @@ class PurgeTests(HomeserverTestCase): ) ) self.assertEqual(len(state_groups), 1) + + def test_clear_unreferenced_state_groups(self) -> None: + """Test that any unreferenced state groups are automatically cleaned up.""" + + self.helper.send(self.room_id, body="test1") + state1 = self.helper.send_state( + self.room_id, "org.matrix.test", body={"number": 2} + ) + # Create enough state events to require multiple batches of + # mark_unreferenced_state_groups_for_deletion_bg_update to be run. + for i in range(200): + self.helper.send_state(self.room_id, "org.matrix.test", body={"number": i}) + self.helper.send(self.room_id, body="test4") + last = self.helper.send(self.room_id, body="test5") + + # Create an unreferenced state group that has no prev group. + unreferenced_free_state_group = self.get_success( + self.state_store.store_state_group( + event_id=last["event_id"], + room_id=self.room_id, + prev_group=None, + delta_ids={("org.matrix.test", ""): state1["event_id"]}, + current_state_ids={("org.matrix.test", ""): ""}, + ) + ) + + # Create some unreferenced state groups that have a prev group of one of the + # existing state groups. + prev_group = self.get_success( + self.store._get_state_group_for_event(state1["event_id"]) + ) + unreferenced_end_state_group = self.get_success( + self.state_store.store_state_group( + event_id=last["event_id"], + room_id=self.room_id, + prev_group=prev_group, + delta_ids={("org.matrix.test", ""): state1["event_id"]}, + current_state_ids=None, + ) + ) + another_unreferenced_end_state_group = self.get_success( + self.state_store.store_state_group( + event_id=last["event_id"], + room_id=self.room_id, + prev_group=unreferenced_end_state_group, + delta_ids={("org.matrix.test", ""): state1["event_id"]}, + current_state_ids=None, + ) + ) + + # Add some other unreferenced state groups which lead to a referenced state + # group. + # These state groups should not get deleted. + chain_state_group = self.get_success( + self.state_store.store_state_group( + event_id=last["event_id"], + room_id=self.room_id, + prev_group=None, + delta_ids={("org.matrix.test", ""): ""}, + current_state_ids={("org.matrix.test", ""): ""}, + ) + ) + chain_state_group_2 = self.get_success( + self.state_store.store_state_group( + event_id=last["event_id"], + room_id=self.room_id, + prev_group=chain_state_group, + delta_ids={("org.matrix.test", ""): ""}, + current_state_ids=None, + ) + ) + referenced_chain_state_group = self.get_success( + self.state_store.store_state_group( + event_id=last["event_id"], + room_id=self.room_id, + prev_group=chain_state_group_2, + delta_ids={("org.matrix.test", ""): ""}, + current_state_ids=None, + ) + ) + self.get_success( + self.store.db_pool.simple_insert( + "event_to_state_groups", + { + "event_id": "$new_event", + "state_group": referenced_chain_state_group, + }, + ) + ) + + # Insert and run the background update. + self.get_success( + self.store.db_pool.simple_insert( + "background_updates", + { + "update_name": _BackgroundUpdates.MARK_UNREFERENCED_STATE_GROUPS_FOR_DELETION_BG_UPDATE, + "progress_json": "{}", + }, + ) + ) + self.store.db_pool.updates._all_done = False + self.wait_for_background_updates() + + # Advance so that the background job to delete the state groups runs + self.reactor.advance( + 1 + self.state_deletion_store.DELAY_BEFORE_DELETION_MS / 1000 + ) + + # We expect that the unreferenced free state group has been deleted. + row = self.get_success( + self.state_store.db_pool.simple_select_one_onecol( + table="state_groups", + keyvalues={"id": unreferenced_free_state_group}, + retcol="id", + allow_none=True, + desc="test_purge_unreferenced_state_group", + ) + ) + self.assertIsNone(row) + + # We expect that both unreferenced end state groups have been deleted. + row = self.get_success( + self.state_store.db_pool.simple_select_one_onecol( + table="state_groups", + keyvalues={"id": unreferenced_end_state_group}, + retcol="id", + allow_none=True, + desc="test_purge_unreferenced_state_group", + ) + ) + self.assertIsNone(row) + row = self.get_success( + self.state_store.db_pool.simple_select_one_onecol( + table="state_groups", + keyvalues={"id": another_unreferenced_end_state_group}, + retcol="id", + allow_none=True, + desc="test_purge_unreferenced_state_group", + ) + ) + self.assertIsNone(row) + + # We expect there to now only be one state group for the room, which is + # the state group of the last event (as the only outlier). + state_groups = self.get_success( + self.state_store.db_pool.simple_select_onecol( + table="state_groups", + keyvalues={"room_id": self.room_id}, + retcol="id", + desc="test_purge_unreferenced_state_group", + ) + ) + self.assertEqual(len(state_groups), 210) From d8fef721a07870dc2dd6d86cd2063ea1b3e2a491 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Tue, 25 Mar 2025 10:35:01 +0000 Subject: [PATCH 04/70] Correct typo "SAML" -> SSO in mapping providers docs (#18276) --- changelog.d/18276.doc | 1 + docs/sso_mapping_providers.md | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/18276.doc diff --git a/changelog.d/18276.doc b/changelog.d/18276.doc new file mode 100644 index 0000000000..cacf6daf76 --- /dev/null +++ b/changelog.d/18276.doc @@ -0,0 +1 @@ +Correct a small typo in the SSO mapping providers documentation. \ No newline at end of file diff --git a/docs/sso_mapping_providers.md b/docs/sso_mapping_providers.md index d6c4e860ae..4d33c8da75 100644 --- a/docs/sso_mapping_providers.md +++ b/docs/sso_mapping_providers.md @@ -10,7 +10,7 @@ As an example, a SSO service may return the email address to turn that into a displayname when creating a Matrix user for this individual. It may choose `John Smith`, or `Smith, John [Example.com]` or any number of variations. As each Synapse configuration may want something different, this is -where SAML mapping providers come into play. +where SSO mapping providers come into play. SSO mapping providers are currently supported for OpenID and SAML SSO configurations. Please see the details below for how to implement your own. From 7af299b3652927a47a592ece0627d203fa7c5c73 Mon Sep 17 00:00:00 2001 From: Olivier 'reivilibre Date: Tue, 25 Mar 2025 12:04:21 +0000 Subject: [PATCH 05/70] 1.127.0 --- CHANGES.md | 7 +++++++ debian/changelog | 6 ++++++ pyproject.toml | 2 +- 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 3053724c9d..f63eabb58a 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,10 @@ +# Synapse 1.127.0 (2025-03-25) + +No significant changes since 1.127.0rc1. + + + + # Synapse 1.127.0rc1 (2025-03-18) ### Features diff --git a/debian/changelog b/debian/changelog index c50249f081..2e36b368d0 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.127.0) stable; urgency=medium + + * New Synapse release 1.127.0. + + -- Synapse Packaging team Tue, 25 Mar 2025 12:04:15 +0000 + matrix-synapse-py3 (1.127.0~rc1) stable; urgency=medium * New Synapse release 1.127.0rc1. diff --git a/pyproject.toml b/pyproject.toml index bc0f526390..6a29362919 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,7 +97,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.127.0rc1" +version = "1.127.0" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" From 92a29dcffc4fca0029c2f3ee63a66152c5e54e60 Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Tue, 25 Mar 2025 09:57:55 -0400 Subject: [PATCH 06/70] Docker: Use an ARG for debian version more often (#18272) --- changelog.d/18272.docker | 1 + docker/Dockerfile-workers | 5 +++-- docker/complement/Dockerfile | 7 +++++-- 3 files changed, 9 insertions(+), 4 deletions(-) create mode 100644 changelog.d/18272.docker diff --git a/changelog.d/18272.docker b/changelog.d/18272.docker new file mode 100644 index 0000000000..ceec619b6c --- /dev/null +++ b/changelog.d/18272.docker @@ -0,0 +1 @@ +Always specify base image debian versions with a build argument. diff --git a/docker/Dockerfile-workers b/docker/Dockerfile-workers index 2ceb6ab67c..3dec4bba05 100644 --- a/docker/Dockerfile-workers +++ b/docker/Dockerfile-workers @@ -2,12 +2,13 @@ ARG SYNAPSE_VERSION=latest ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION +ARG DEBIAN_VERSION=bookworm # first of all, we create a base image with an nginx which we can copy into the # target image. For repeated rebuilds, this is much faster than apt installing # each time. -FROM docker.io/library/debian:bookworm-slim AS deps_base +FROM docker.io/library/debian:${DEBIAN_VERSION}-slim AS deps_base RUN \ --mount=type=cache,target=/var/cache/apt,sharing=locked \ --mount=type=cache,target=/var/lib/apt,sharing=locked \ @@ -21,7 +22,7 @@ FROM docker.io/library/debian:bookworm-slim AS deps_base # which makes it much easier to copy (but we need to make sure we use an image # based on the same debian version as the synapse image, to make sure we get # the expected version of libc. -FROM docker.io/library/redis:7-bookworm AS redis_base +FROM docker.io/library/redis:7-${DEBIAN_VERSION} AS redis_base # now build the final image, based on the the regular Synapse docker image FROM $FROM diff --git a/docker/complement/Dockerfile b/docker/complement/Dockerfile index ce82c400eb..3e7f808cc5 100644 --- a/docker/complement/Dockerfile +++ b/docker/complement/Dockerfile @@ -9,6 +9,9 @@ ARG SYNAPSE_VERSION=latest # This is an intermediate image, to be built locally (not pulled from a registry). ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION +ARG DEBIAN_VERSION=bookworm + +FROM docker.io/library/postgres:13-${DEBIAN_VERSION} AS postgres_base FROM $FROM # First of all, we copy postgres server from the official postgres image, @@ -20,8 +23,8 @@ FROM $FROM # the same debian version as Synapse's docker image (so the versions of the # shared libraries match). RUN adduser --system --uid 999 postgres --home /var/lib/postgresql -COPY --from=docker.io/library/postgres:13-bookworm /usr/lib/postgresql /usr/lib/postgresql -COPY --from=docker.io/library/postgres:13-bookworm /usr/share/postgresql /usr/share/postgresql +COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql +COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql ENV PATH="${PATH}:/usr/lib/postgresql/13/bin" ENV PGDATA=/var/lib/postgresql/data From bd08a01fc808d31e8c3e4d20b19dd22bd225df4b Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Tue, 25 Mar 2025 09:58:40 -0400 Subject: [PATCH 07/70] Dockerfile: set package arch via APT config option (#18271) --- changelog.d/18271.docker | 1 + docker/Dockerfile | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/18271.docker diff --git a/changelog.d/18271.docker b/changelog.d/18271.docker new file mode 100644 index 0000000000..fcb2ac1e00 --- /dev/null +++ b/changelog.d/18271.docker @@ -0,0 +1 @@ +Specify the architecture of installed packages via an APT config option, which is more reliable than appending package names with ":{arch}". diff --git a/docker/Dockerfile b/docker/Dockerfile index 27125c4c0b..54aa355370 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -148,7 +148,7 @@ RUN \ for arch in arm64 amd64; do \ mkdir -p /tmp/debs-${arch} && \ cd /tmp/debs-${arch} && \ - apt-get download $(sed "s/$/:${arch}/" /tmp/pkg-list); \ + apt-get -o APT::Architecture="${arch}" download $(cat /tmp/pkg-list); \ done # Extract the debs for each architecture From a227d20c25ff8fce45b9e3c3874cf3004633a9f4 Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Tue, 25 Mar 2025 10:09:38 -0400 Subject: [PATCH 08/70] Pass args to start_for_complement.sh (#18273) --- changelog.d/18273.docker | 1 + docker/complement/conf/start_for_complement.sh | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/18273.docker diff --git a/changelog.d/18273.docker b/changelog.d/18273.docker new file mode 100644 index 0000000000..1da0c3efb1 --- /dev/null +++ b/changelog.d/18273.docker @@ -0,0 +1 @@ +Allow passing arguments to start_for_complement.sh (to be sent to configure_workers_and_start.py). diff --git a/docker/complement/conf/start_for_complement.sh b/docker/complement/conf/start_for_complement.sh index cc798a3210..59b30e2051 100755 --- a/docker/complement/conf/start_for_complement.sh +++ b/docker/complement/conf/start_for_complement.sh @@ -5,12 +5,12 @@ set -e echo "Complement Synapse launcher" -echo " Args: $@" +echo " Args: $*" echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR" function log { d=$(date +"%Y-%m-%d %H:%M:%S,%3N") - echo "$d $@" + echo "$d $*" } # Set the server name of the homeserver @@ -131,4 +131,4 @@ export SYNAPSE_TLS_KEY=/conf/server.tls.key # Run the script that writes the necessary config files and starts supervisord, which in turn # starts everything else -exec /configure_workers_and_start.py +exec /configure_workers_and_start.py "$@" From 5e83434f3a9715895c10de4b8174fb02d94d025d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 25 Mar 2025 14:11:51 +0000 Subject: [PATCH 09/70] Bump log from 0.4.26 to 0.4.27 (#18267) --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f53d029cbd..20110694a5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -223,9 +223,9 @@ checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346" [[package]] name = "log" -version = "0.4.26" +version = "0.4.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e" +checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" [[package]] name = "memchr" From 2277df2a1eb685f85040ef98fa21d41aa4cdd389 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 26 Mar 2025 16:38:15 +0000 Subject: [PATCH 10/70] =?UTF-8?q?Fix=20GHSA-v56r-hwv5-mxg6=20=E2=80=94=20F?= =?UTF-8?q?ederation=20denial?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes https://github.com/element-hq/synapse/security/advisories/GHSA-v56r-hwv5-mxg6 Federation denial of service via malformed events. --- synapse/api/constants.py | 9 +++++++-- synapse/events/utils.py | 5 ++--- synapse/events/validator.py | 4 +--- synapse/federation/federation_base.py | 12 +++++++++++- synapse/federation/federation_client.py | 13 +++++-------- synapse/federation/federation_server.py | 21 ++++++++++++-------- synapse/federation/units.py | 26 +++++++++++++++++++++++-- 7 files changed, 63 insertions(+), 27 deletions(-) diff --git a/synapse/api/constants.py b/synapse/api/constants.py index 9806e2b0fe..c564a8635a 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -29,8 +29,13 @@ from typing import Final # the max size of a (canonical-json-encoded) event MAX_PDU_SIZE = 65536 -# the "depth" field on events is limited to 2**63 - 1 -MAX_DEPTH = 2**63 - 1 +# Max/min size of ints in canonical JSON +CANONICALJSON_MAX_INT = (2**53) - 1 +CANONICALJSON_MIN_INT = -CANONICALJSON_MAX_INT + +# the "depth" field on events is limited to the same as what +# canonicaljson accepts +MAX_DEPTH = CANONICALJSON_MAX_INT # the maximum length for a room alias is 255 characters MAX_ALIAS_LENGTH = 255 diff --git a/synapse/events/utils.py b/synapse/events/utils.py index 54f94add4d..eb18ba2db7 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -40,6 +40,8 @@ import attr from canonicaljson import encode_canonical_json from synapse.api.constants import ( + CANONICALJSON_MAX_INT, + CANONICALJSON_MIN_INT, MAX_PDU_SIZE, EventContentFields, EventTypes, @@ -61,9 +63,6 @@ SPLIT_FIELD_REGEX = re.compile(r"\\*\.") # Find escaped characters, e.g. those with a \ in front of them. ESCAPE_SEQUENCE_PATTERN = re.compile(r"\\(.)") -CANONICALJSON_MAX_INT = (2**53) - 1 -CANONICALJSON_MIN_INT = -CANONICALJSON_MAX_INT - # Module API callback that allows adding fields to the unsigned section of # events that are sent to clients. diff --git a/synapse/events/validator.py b/synapse/events/validator.py index 8aa8d7e017..d1fb026cd6 100644 --- a/synapse/events/validator.py +++ b/synapse/events/validator.py @@ -86,9 +86,7 @@ class EventValidator: # Depending on the room version, ensure the data is spec compliant JSON. if event.room_version.strict_canonicaljson: - # Note that only the client controlled portion of the event is - # checked, since we trust the portions of the event we created. - validate_canonicaljson(event.content) + validate_canonicaljson(event.get_pdu_json()) if event.type == EventTypes.Aliases: if "aliases" in event.content: diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index b101a389ef..3796bff5e7 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -20,7 +20,7 @@ # # import logging -from typing import TYPE_CHECKING, Awaitable, Callable, Optional +from typing import TYPE_CHECKING, Awaitable, Callable, List, Optional, Sequence from synapse.api.constants import MAX_DEPTH, EventContentFields, EventTypes, Membership from synapse.api.errors import Codes, SynapseError @@ -29,6 +29,7 @@ from synapse.crypto.event_signing import check_event_content_hash from synapse.crypto.keyring import Keyring from synapse.events import EventBase, make_event_from_dict from synapse.events.utils import prune_event, validate_canonicaljson +from synapse.federation.units import filter_pdus_for_valid_depth from synapse.http.servlet import assert_params_in_dict from synapse.logging.opentracing import log_kv, trace from synapse.types import JsonDict, get_domain_from_id @@ -267,6 +268,15 @@ def _is_invite_via_3pid(event: EventBase) -> bool: ) +def parse_events_from_pdu_json( + pdus_json: Sequence[JsonDict], room_version: RoomVersion +) -> List[EventBase]: + return [ + event_from_pdu_json(pdu_json, room_version) + for pdu_json in filter_pdus_for_valid_depth(pdus_json) + ] + + def event_from_pdu_json(pdu_json: JsonDict, room_version: RoomVersion) -> EventBase: """Construct an EventBase from an event json received over federation diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 7d80ff6998..9fc5b70e9a 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -68,6 +68,7 @@ from synapse.federation.federation_base import ( FederationBase, InvalidEventSignatureError, event_from_pdu_json, + parse_events_from_pdu_json, ) from synapse.federation.transport.client import SendJoinResponse from synapse.http.client import is_unknown_endpoint @@ -349,7 +350,7 @@ class FederationClient(FederationBase): room_version = await self.store.get_room_version(room_id) - pdus = [event_from_pdu_json(p, room_version) for p in transaction_data_pdus] + pdus = parse_events_from_pdu_json(transaction_data_pdus, room_version) # Check signatures and hash of pdus, removing any from the list that fail checks pdus[:] = await self._check_sigs_and_hash_for_pulled_events_and_fetch( @@ -393,9 +394,7 @@ class FederationClient(FederationBase): transaction_data, ) - pdu_list: List[EventBase] = [ - event_from_pdu_json(p, room_version) for p in transaction_data["pdus"] - ] + pdu_list = parse_events_from_pdu_json(transaction_data["pdus"], room_version) if pdu_list and pdu_list[0]: pdu = pdu_list[0] @@ -809,7 +808,7 @@ class FederationClient(FederationBase): room_version = await self.store.get_room_version(room_id) - auth_chain = [event_from_pdu_json(p, room_version) for p in res["auth_chain"]] + auth_chain = parse_events_from_pdu_json(res["auth_chain"], room_version) signed_auth = await self._check_sigs_and_hash_for_pulled_events_and_fetch( destination, auth_chain, room_version=room_version @@ -1529,9 +1528,7 @@ class FederationClient(FederationBase): room_version = await self.store.get_room_version(room_id) - events = [ - event_from_pdu_json(e, room_version) for e in content.get("events", []) - ] + events = parse_events_from_pdu_json(content.get("events", []), room_version) signed_events = await self._check_sigs_and_hash_for_pulled_events_and_fetch( destination, events, room_version=room_version diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 1932fa82a4..f9e97ea13e 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -66,7 +66,7 @@ from synapse.federation.federation_base import ( event_from_pdu_json, ) from synapse.federation.persistence import TransactionActions -from synapse.federation.units import Edu, Transaction +from synapse.federation.units import Edu, Transaction, serialize_and_filter_pdus from synapse.handlers.worker_lock import NEW_EVENT_DURING_PURGE_LOCK_NAME from synapse.http.servlet import assert_params_in_dict from synapse.logging.context import ( @@ -469,7 +469,12 @@ class FederationServer(FederationBase): logger.info("Ignoring PDU: %s", e) continue - event = event_from_pdu_json(p, room_version) + try: + event = event_from_pdu_json(p, room_version) + except SynapseError as e: + logger.info("Ignoring PDU for failing to deserialize: %s", e) + continue + pdus_by_room.setdefault(room_id, []).append(event) if event.origin_server_ts > newest_pdu_ts: @@ -636,8 +641,8 @@ class FederationServer(FederationBase): ) return { - "pdus": [pdu.get_pdu_json() for pdu in pdus], - "auth_chain": [pdu.get_pdu_json() for pdu in auth_chain], + "pdus": serialize_and_filter_pdus(pdus), + "auth_chain": serialize_and_filter_pdus(auth_chain), } async def on_pdu_request( @@ -761,8 +766,8 @@ class FederationServer(FederationBase): event_json = event.get_pdu_json(time_now) resp = { "event": event_json, - "state": [p.get_pdu_json(time_now) for p in state_events], - "auth_chain": [p.get_pdu_json(time_now) for p in auth_chain_events], + "state": serialize_and_filter_pdus(state_events, time_now), + "auth_chain": serialize_and_filter_pdus(auth_chain_events, time_now), "members_omitted": caller_supports_partial_state, } @@ -1005,7 +1010,7 @@ class FederationServer(FederationBase): time_now = self._clock.time_msec() auth_pdus = await self.handler.on_event_auth(event_id) - res = {"auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus]} + res = {"auth_chain": serialize_and_filter_pdus(auth_pdus, time_now)} return 200, res async def on_query_client_keys( @@ -1090,7 +1095,7 @@ class FederationServer(FederationBase): time_now = self._clock.time_msec() - return {"events": [ev.get_pdu_json(time_now) for ev in missing_events]} + return {"events": serialize_and_filter_pdus(missing_events, time_now)} async def on_openid_userinfo(self, token: str) -> Optional[str]: ts_now_ms = self._clock.time_msec() diff --git a/synapse/federation/units.py b/synapse/federation/units.py index d8b67a6a5b..3bb5f824b7 100644 --- a/synapse/federation/units.py +++ b/synapse/federation/units.py @@ -24,10 +24,12 @@ server protocol. """ import logging -from typing import List, Optional +from typing import List, Optional, Sequence import attr +from synapse.api.constants import CANONICALJSON_MAX_INT, CANONICALJSON_MIN_INT +from synapse.events import EventBase from synapse.types import JsonDict logger = logging.getLogger(__name__) @@ -104,8 +106,28 @@ class Transaction: result = { "origin": self.origin, "origin_server_ts": self.origin_server_ts, - "pdus": self.pdus, + "pdus": filter_pdus_for_valid_depth(self.pdus), } if self.edus: result["edus"] = self.edus return result + + +def filter_pdus_for_valid_depth(pdus: Sequence[JsonDict]) -> List[JsonDict]: + filtered_pdus = [] + for pdu in pdus: + # Drop PDUs that have a depth that is outside of the range allowed + # by canonical json. + if ( + "depth" in pdu + and CANONICALJSON_MIN_INT <= pdu["depth"] <= CANONICALJSON_MAX_INT + ): + filtered_pdus.append(pdu) + + return filtered_pdus + + +def serialize_and_filter_pdus( + pdus: Sequence[EventBase], time_now: Optional[int] = None +) -> List[JsonDict]: + return filter_pdus_for_valid_depth([pdu.get_pdu_json(time_now) for pdu in pdus]) From 31110f35d92ea8dfcae14d7a8897d6a89277640d Mon Sep 17 00:00:00 2001 From: Eric Eastwood Date: Wed, 26 Mar 2025 14:35:54 -0500 Subject: [PATCH 11/70] Add docs for how to clear out the Poetry wheel cache (#18283) As shared by @reivilibre, https://github.com/element-hq/synapse/pull/18261#issuecomment-2754607816 Relevant Poetry issue around how this should be handled by them: https://github.com/python-poetry/poetry/issues/10304 --- changelog.d/18283.doc | 1 + docs/development/dependencies.md | 22 ++++++++++++++++++++++ 2 files changed, 23 insertions(+) create mode 100644 changelog.d/18283.doc diff --git a/changelog.d/18283.doc b/changelog.d/18283.doc new file mode 100644 index 0000000000..77fa08fa6b --- /dev/null +++ b/changelog.d/18283.doc @@ -0,0 +1 @@ +Add docs for how to clear out the Poetry wheel cache. diff --git a/docs/development/dependencies.md b/docs/development/dependencies.md index 8e29ff3a57..fa5ff4dcf7 100644 --- a/docs/development/dependencies.md +++ b/docs/development/dependencies.md @@ -150,6 +150,28 @@ $ poetry shell $ poetry install --extras all ``` +If you want to go even further and remove the Poetry caches: + +```shell +# Find your Poetry cache directory +# Docs: https://github.com/python-poetry/poetry/blob/main/docs/configuration.md#cache-directory +$ poetry config cache-dir + +# Remove packages from all cached repositories +$ poetry cache clear --all . + +# Go completely nuclear and clear out everything Poetry cache related +# including the wheel artifacts which is not covered by the above command +# (see https://github.com/python-poetry/poetry/issues/10304) +# +# This is necessary in order to rebuild or fetch new wheels. For example, if you update +# the `icu` library in on your system, you will need to rebuild the PyICU Python package +# in order to incorporate the correct dynamically linked library locations otherwise you +# will run into errors like: `ImportError: libicui18n.so.75: cannot open shared object file: No such file or directory` +$ rm -rf $(poetry config cache-dir) +``` + + ## ...run a command in the `poetry` virtualenv? Use `poetry run cmd args` when you need the python virtualenv context. From ecc09b15f108a49348777c908af0187cf26d281e Mon Sep 17 00:00:00 2001 From: Andrew Morgan Date: Wed, 26 Mar 2025 21:08:00 +0000 Subject: [PATCH 12/70] 1.127.1 --- CHANGES.md | 7 +++++++ debian/changelog | 6 ++++++ pyproject.toml | 2 +- 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index f63eabb58a..0176c6e45d 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,10 @@ +# Synapse 1.127.1 (2025-03-26) + +## Security +- Fix [CVE-2025-30355](https://www.cve.org/CVERecord?id=CVE-2025-30355) / [GHSA-v56r-hwv5-mxg6](https://github.com/element-hq/synapse/security/advisories/GHSA-v56r-hwv5-mxg6). **High severity vulnerability affecting federation. The vulnerability has been exploited in the wild.** + + + # Synapse 1.127.0 (2025-03-25) No significant changes since 1.127.0rc1. diff --git a/debian/changelog b/debian/changelog index 2e36b368d0..f25c28c9dc 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.127.1) stable; urgency=medium + + * New Synapse release 1.127.1. + + -- Synapse Packaging team Wed, 26 Mar 2025 21:07:31 +0000 + matrix-synapse-py3 (1.127.0) stable; urgency=medium * New Synapse release 1.127.0. diff --git a/pyproject.toml b/pyproject.toml index 6a29362919..e91a75445c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,7 +97,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.127.0" +version = "1.127.1" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" From a39b856cf0e73def842ca9697f57dd9df646f080 Mon Sep 17 00:00:00 2001 From: Devon Hudson Date: Thu, 27 Mar 2025 14:56:16 +0000 Subject: [PATCH 13/70] Add DB delta to remove the old state group deletion job (#18284) This background DB delta removes the old state group deletion background update from the `background_updates` table if it exists. The `delete_unreferenced_state_groups_bg_update` update should only exist in that table if a homeserver ran v1.126.0rc1/v1.126.0rc2, and rolled back or forward to any other version of Synapse before letting the update finish. ### Pull Request Checklist * [X] Pull request is based on the develop branch * [X] Pull request includes a [changelog file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should: - Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.". - Use markdown where necessary, mostly for `code blocks`. - End with either a period (.) or an exclamation mark (!). - Start with a capital letter. - Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry. * [X] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters)) --- changelog.d/18284.misc | 1 + .../delta/90/03_remove_old_deletion_bg_update.sql | 15 +++++++++++++++ 2 files changed, 16 insertions(+) create mode 100644 changelog.d/18284.misc create mode 100644 synapse/storage/schema/state/delta/90/03_remove_old_deletion_bg_update.sql diff --git a/changelog.d/18284.misc b/changelog.d/18284.misc new file mode 100644 index 0000000000..69610adc65 --- /dev/null +++ b/changelog.d/18284.misc @@ -0,0 +1 @@ +Add DB delta to remove the old state group deletion job. diff --git a/synapse/storage/schema/state/delta/90/03_remove_old_deletion_bg_update.sql b/synapse/storage/schema/state/delta/90/03_remove_old_deletion_bg_update.sql new file mode 100644 index 0000000000..1cc6d612b6 --- /dev/null +++ b/synapse/storage/schema/state/delta/90/03_remove_old_deletion_bg_update.sql @@ -0,0 +1,15 @@ +-- +-- This file is licensed under the Affero General Public License (AGPL) version 3. +-- +-- Copyright (C) 2025 New Vector, Ltd +-- +-- This program is free software: you can redistribute it and/or modify +-- it under the terms of the GNU Affero General Public License as +-- published by the Free Software Foundation, either version 3 of the +-- License, or (at your option) any later version. +-- +-- See the GNU Affero General Public License for more details: +-- . + +-- Remove the old unreferenced state group deletion background update if it exists +DELETE FROM background_updates WHERE update_name = 'delete_unreferenced_state_groups_bg_update'; From d17295e5c3de642ba2c4e47f1bb2be7b2e4c9c06 Mon Sep 17 00:00:00 2001 From: Will Hunt Date: Thu, 27 Mar 2025 17:26:34 +0000 Subject: [PATCH 14/70] Store hashes of media files, and allow quarantining by hash. (#18277) This PR makes a few radical changes to media. This now stores the SHA256 hash of each file stored in the database (excluding thumbnails, more on that later). If a set of media is quarantined, any additional uploads of the same file contents or any other files with the same hash will be quarantined at the same time. Currently this does NOT: - De-duplicate media, although a future extension could be to do that. - Run any background jobs to identify the hashes of older files. This could also be a future extension, though the value of doing so is limited to combat the abuse of recent media. - Hash thumbnails. It's assumed that thumbnails are parented to some form of media, so you'd likely be wanting to quarantine the media and the thumbnail at the same time. --- changelog.d/18277.feature | 1 + synapse/media/media_repository.py | 46 ++++- synapse/media/media_storage.py | 84 ++++++++- .../databases/main/media_repository.py | 95 +++++++++- synapse/storage/databases/main/room.py | 172 +++++++++++++++--- synapse/storage/schema/__init__.py | 2 +- .../schema/main/delta/91/01_media_hash.sql | 21 +++ tests/handlers/test_profile.py | 1 + tests/media/test_media_retention.py | 11 +- tests/media/test_media_storage.py | 107 ++++++++++- tests/rest/admin/test_admin.py | 21 ++- tests/rest/admin/test_media.py | 74 ++++++-- tests/rest/client/test_media.py | 3 + tests/rest/media/test_domain_blocking.py | 1 + tests/test_utils/__init__.py | 2 + 15 files changed, 579 insertions(+), 62 deletions(-) create mode 100644 changelog.d/18277.feature create mode 100644 synapse/storage/schema/main/delta/91/01_media_hash.sql diff --git a/changelog.d/18277.feature b/changelog.d/18277.feature new file mode 100644 index 0000000000..3604e732d5 --- /dev/null +++ b/changelog.d/18277.feature @@ -0,0 +1 @@ +Hashes of media files are now tracked by Synapse. Media quarantines will now apply to all files with the same hash. \ No newline at end of file diff --git a/synapse/media/media_repository.py b/synapse/media/media_repository.py index cf4cba722a..859b30e029 100644 --- a/synapse/media/media_repository.py +++ b/synapse/media/media_repository.py @@ -59,7 +59,11 @@ from synapse.media._base import ( respond_with_responder, ) from synapse.media.filepath import MediaFilePaths -from synapse.media.media_storage import MediaStorage +from synapse.media.media_storage import ( + MediaStorage, + SHA256TransparentIOReader, + SHA256TransparentIOWriter, +) from synapse.media.storage_provider import StorageProviderWrapper from synapse.media.thumbnailer import Thumbnailer, ThumbnailError from synapse.media.url_previewer import UrlPreviewer @@ -301,15 +305,26 @@ class MediaRepository: auth_user: The user_id of the uploader """ file_info = FileInfo(server_name=None, file_id=media_id) - fname = await self.media_storage.store_file(content, file_info) + sha256reader = SHA256TransparentIOReader(content) + # This implements all of IO as it has a passthrough + fname = await self.media_storage.store_file(sha256reader.wrap(), file_info) + sha256 = sha256reader.hexdigest() + should_quarantine = await self.store.get_is_hash_quarantined(sha256) logger.info("Stored local media in file %r", fname) + if should_quarantine: + logger.warn( + "Media has been automatically quarantined as it matched existing quarantined media" + ) + await self.store.update_local_media( media_id=media_id, media_type=media_type, upload_name=upload_name, media_length=content_length, user_id=auth_user, + sha256=sha256, + quarantined_by="system" if should_quarantine else None, ) try: @@ -342,11 +357,19 @@ class MediaRepository: media_id = random_string(24) file_info = FileInfo(server_name=None, file_id=media_id) - - fname = await self.media_storage.store_file(content, file_info) + # This implements all of IO as it has a passthrough + sha256reader = SHA256TransparentIOReader(content) + fname = await self.media_storage.store_file(sha256reader.wrap(), file_info) + sha256 = sha256reader.hexdigest() + should_quarantine = await self.store.get_is_hash_quarantined(sha256) logger.info("Stored local media in file %r", fname) + if should_quarantine: + logger.warn( + "Media has been automatically quarantined as it matched existing quarantined media" + ) + await self.store.store_local_media( media_id=media_id, media_type=media_type, @@ -354,6 +377,9 @@ class MediaRepository: upload_name=upload_name, media_length=content_length, user_id=auth_user, + sha256=sha256, + # TODO: Better name? + quarantined_by="system" if should_quarantine else None, ) try: @@ -756,11 +782,13 @@ class MediaRepository: file_info = FileInfo(server_name=server_name, file_id=file_id) async with self.media_storage.store_into_file(file_info) as (f, fname): + sha256writer = SHA256TransparentIOWriter(f) try: length, headers = await self.client.download_media( server_name, media_id, - output_stream=f, + # This implements all of BinaryIO as it has a passthrough + output_stream=sha256writer.wrap(), max_size=self.max_upload_size, max_timeout_ms=max_timeout_ms, download_ratelimiter=download_ratelimiter, @@ -825,6 +853,7 @@ class MediaRepository: upload_name=upload_name, media_length=length, filesystem_id=file_id, + sha256=sha256writer.hexdigest(), ) logger.info("Stored remote media in file %r", fname) @@ -845,6 +874,7 @@ class MediaRepository: last_access_ts=time_now_ms, quarantined_by=None, authenticated=authenticated, + sha256=sha256writer.hexdigest(), ) async def _federation_download_remote_file( @@ -879,11 +909,13 @@ class MediaRepository: file_info = FileInfo(server_name=server_name, file_id=file_id) async with self.media_storage.store_into_file(file_info) as (f, fname): + sha256writer = SHA256TransparentIOWriter(f) try: res = await self.client.federation_download_media( server_name, media_id, - output_stream=f, + # This implements all of BinaryIO as it has a passthrough + output_stream=sha256writer.wrap(), max_size=self.max_upload_size, max_timeout_ms=max_timeout_ms, download_ratelimiter=download_ratelimiter, @@ -954,6 +986,7 @@ class MediaRepository: upload_name=upload_name, media_length=length, filesystem_id=file_id, + sha256=sha256writer.hexdigest(), ) logger.debug("Stored remote media in file %r", fname) @@ -974,6 +1007,7 @@ class MediaRepository: last_access_ts=time_now_ms, quarantined_by=None, authenticated=authenticated, + sha256=sha256writer.hexdigest(), ) def _get_thumbnail_requirements( diff --git a/synapse/media/media_storage.py b/synapse/media/media_storage.py index c25d1a9ba3..afd33c02a1 100644 --- a/synapse/media/media_storage.py +++ b/synapse/media/media_storage.py @@ -19,6 +19,7 @@ # # import contextlib +import hashlib import json import logging import os @@ -70,6 +71,88 @@ logger = logging.getLogger(__name__) CRLF = b"\r\n" +class SHA256TransparentIOWriter: + """Will generate a SHA256 hash from a source stream transparently. + + Args: + source: Source stream. + """ + + def __init__(self, source: BinaryIO): + self._hash = hashlib.sha256() + self._source = source + + def write(self, buffer: Union[bytes, bytearray]) -> int: + """Wrapper for source.write() + + Args: + buffer + + Returns: + the value of source.write() + """ + res = self._source.write(buffer) + self._hash.update(buffer) + return res + + def hexdigest(self) -> str: + """The digest of the written or read value. + + Returns: + The digest in hex formaat. + """ + return self._hash.hexdigest() + + def wrap(self) -> BinaryIO: + # This class implements a subset the IO interface and passes through everything else via __getattr__ + return cast(BinaryIO, self) + + # Passthrough any other calls + def __getattr__(self, attr_name: str) -> Any: + return getattr(self._source, attr_name) + + +class SHA256TransparentIOReader: + """Will generate a SHA256 hash from a source stream transparently. + + Args: + source: Source IO stream. + """ + + def __init__(self, source: IO): + self._hash = hashlib.sha256() + self._source = source + + def read(self, n: int = -1) -> bytes: + """Wrapper for source.read() + + Args: + n + + Returns: + the value of source.read() + """ + bytes = self._source.read(n) + self._hash.update(bytes) + return bytes + + def hexdigest(self) -> str: + """The digest of the written or read value. + + Returns: + The digest in hex formaat. + """ + return self._hash.hexdigest() + + def wrap(self) -> IO: + # This class implements a subset the IO interface and passes through everything else via __getattr__ + return cast(IO, self) + + # Passthrough any other calls + def __getattr__(self, attr_name: str) -> Any: + return getattr(self._source, attr_name) + + class MediaStorage: """Responsible for storing/fetching files from local sources. @@ -107,7 +190,6 @@ class MediaStorage: Returns: the file path written to in the primary media store """ - async with self.store_into_file(file_info) as (f, fname): # Write to the main media repository await self.write_to_file(source, f) diff --git a/synapse/storage/databases/main/media_repository.py b/synapse/storage/databases/main/media_repository.py index 7a96e25432..8a5ba44922 100644 --- a/synapse/storage/databases/main/media_repository.py +++ b/synapse/storage/databases/main/media_repository.py @@ -19,6 +19,7 @@ # [This file includes modifications made by New Vector Limited] # # +import logging from enum import Enum from typing import ( TYPE_CHECKING, @@ -51,6 +52,8 @@ BG_UPDATE_REMOVE_MEDIA_REPO_INDEX_WITHOUT_METHOD_2 = ( "media_repository_drop_index_wo_method_2" ) +logger = logging.getLogger(__name__) + @attr.s(slots=True, frozen=True, auto_attribs=True) class LocalMedia: @@ -65,6 +68,7 @@ class LocalMedia: safe_from_quarantine: bool user_id: Optional[str] authenticated: Optional[bool] + sha256: Optional[str] @attr.s(slots=True, frozen=True, auto_attribs=True) @@ -79,6 +83,7 @@ class RemoteMedia: last_access_ts: int quarantined_by: Optional[str] authenticated: Optional[bool] + sha256: Optional[str] @attr.s(slots=True, frozen=True, auto_attribs=True) @@ -154,6 +159,26 @@ class MediaRepositoryBackgroundUpdateStore(SQLBaseStore): unique=True, ) + self.db_pool.updates.register_background_index_update( + update_name="local_media_repository_sha256_idx", + index_name="local_media_repository_sha256", + table="local_media_repository", + where_clause="sha256 IS NOT NULL", + columns=[ + "sha256", + ], + ) + + self.db_pool.updates.register_background_index_update( + update_name="remote_media_cache_sha256_idx", + index_name="remote_media_cache_sha256", + table="remote_media_cache", + where_clause="sha256 IS NOT NULL", + columns=[ + "sha256", + ], + ) + self.db_pool.updates.register_background_update_handler( BG_UPDATE_REMOVE_MEDIA_REPO_INDEX_WITHOUT_METHOD_2, self._drop_media_index_without_method, @@ -221,6 +246,7 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore): "safe_from_quarantine", "user_id", "authenticated", + "sha256", ), allow_none=True, desc="get_local_media", @@ -239,6 +265,7 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore): safe_from_quarantine=row[7], user_id=row[8], authenticated=row[9], + sha256=row[10], ) async def get_local_media_by_user_paginate( @@ -295,7 +322,8 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore): quarantined_by, safe_from_quarantine, user_id, - authenticated + authenticated, + sha256 FROM local_media_repository WHERE user_id = ? ORDER BY {order_by_column} {order}, media_id ASC @@ -320,6 +348,7 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore): safe_from_quarantine=bool(row[8]), user_id=row[9], authenticated=row[10], + sha256=row[11], ) for row in txn ] @@ -449,6 +478,8 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore): media_length: int, user_id: UserID, url_cache: Optional[str] = None, + sha256: Optional[str] = None, + quarantined_by: Optional[str] = None, ) -> None: if self.hs.config.media.enable_authenticated_media: authenticated = True @@ -466,6 +497,8 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore): "user_id": user_id.to_string(), "url_cache": url_cache, "authenticated": authenticated, + "sha256": sha256, + "quarantined_by": quarantined_by, }, desc="store_local_media", ) @@ -477,20 +510,28 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore): upload_name: Optional[str], media_length: int, user_id: UserID, + sha256: str, url_cache: Optional[str] = None, + quarantined_by: Optional[str] = None, ) -> None: + updatevalues = { + "media_type": media_type, + "upload_name": upload_name, + "media_length": media_length, + "url_cache": url_cache, + "sha256": sha256, + } + + # This should never be un-set by this function. + if quarantined_by is not None: + updatevalues["quarantined_by"] = quarantined_by + await self.db_pool.simple_update_one( "local_media_repository", keyvalues={ - "user_id": user_id.to_string(), "media_id": media_id, }, - updatevalues={ - "media_type": media_type, - "upload_name": upload_name, - "media_length": media_length, - "url_cache": url_cache, - }, + updatevalues=updatevalues, desc="update_local_media", ) @@ -657,6 +698,7 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore): "last_access_ts", "quarantined_by", "authenticated", + "sha256", ), allow_none=True, desc="get_cached_remote_media", @@ -674,6 +716,7 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore): last_access_ts=row[5], quarantined_by=row[6], authenticated=row[7], + sha256=row[8], ) async def store_cached_remote_media( @@ -685,6 +728,7 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore): time_now_ms: int, upload_name: Optional[str], filesystem_id: str, + sha256: Optional[str], ) -> None: if self.hs.config.media.enable_authenticated_media: authenticated = True @@ -703,6 +747,7 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore): "filesystem_id": filesystem_id, "last_access_ts": time_now_ms, "authenticated": authenticated, + "sha256": sha256, }, desc="store_cached_remote_media", ) @@ -946,3 +991,37 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore): await self.db_pool.runInteraction( "delete_url_cache_media", _delete_url_cache_media_txn ) + + async def get_is_hash_quarantined(self, sha256: str) -> bool: + """Get whether a specific sha256 hash digest matches any quarantined media. + + Returns: + None if the media_id doesn't exist. + """ + + def get_matching_media_txn( + txn: LoggingTransaction, table: str, sha256: str + ) -> bool: + # Return on first match + sql = """ + SELECT 1 + FROM local_media_repository + WHERE sha256 = ? AND quarantined_by IS NOT NULL + + UNION ALL + + SELECT 1 + FROM remote_media_cache + WHERE sha256 = ? AND quarantined_by IS NOT NULL + LIMIT 1 + """ + txn.execute(sql, (sha256, sha256)) + row = txn.fetchone() + return row is not None + + return await self.db_pool.runInteraction( + "get_matching_media_txn", + get_matching_media_txn, + "local_media_repository", + sha256, + ) diff --git a/synapse/storage/databases/main/room.py b/synapse/storage/databases/main/room.py index d673adba16..56217fccdf 100644 --- a/synapse/storage/databases/main/room.py +++ b/synapse/storage/databases/main/room.py @@ -51,11 +51,15 @@ from synapse.api.room_versions import RoomVersion, RoomVersions from synapse.config.homeserver import HomeServerConfig from synapse.events import EventBase from synapse.replication.tcp.streams.partial_state import UnPartialStatedRoomStream -from synapse.storage._base import db_to_json, make_in_list_sql_clause +from synapse.storage._base import ( + db_to_json, + make_in_list_sql_clause, +) from synapse.storage.database import ( DatabasePool, LoggingDatabaseConnection, LoggingTransaction, + make_tuple_in_list_sql_clause, ) from synapse.storage.databases.main.cache import CacheInvalidationWorkerStore from synapse.storage.types import Cursor @@ -1127,6 +1131,109 @@ class RoomWorkerStore(CacheInvalidationWorkerStore): return local_media_ids + def _quarantine_local_media_txn( + self, + txn: LoggingTransaction, + hashes: Set[str], + media_ids: Set[str], + quarantined_by: Optional[str], + ) -> int: + """Quarantine and unquarantine local media items. + + Args: + txn (cursor) + hashes: A set of sha256 hashes for any media that should be quarantined + media_ids: A set of media IDs for any media that should be quarantined + quarantined_by: The ID of the user who initiated the quarantine request + If it is `None` media will be removed from quarantine + Returns: + The total number of media items quarantined + """ + total_media_quarantined = 0 + + # Effectively a legacy path, update any media that was explicitly named. + if media_ids: + sql_many_clause_sql, sql_many_clause_args = make_in_list_sql_clause( + txn.database_engine, "media_id", media_ids + ) + sql = f""" + UPDATE local_media_repository + SET quarantined_by = ? + WHERE {sql_many_clause_sql}""" + + if quarantined_by is not None: + sql += " AND safe_from_quarantine = FALSE" + + txn.execute(sql, [quarantined_by] + sql_many_clause_args) + # Note that a rowcount of -1 can be used to indicate no rows were affected. + total_media_quarantined += txn.rowcount if txn.rowcount > 0 else 0 + + # Update any media that was identified via hash. + if hashes: + sql_many_clause_sql, sql_many_clause_args = make_in_list_sql_clause( + txn.database_engine, "sha256", hashes + ) + sql = f""" + UPDATE local_media_repository + SET quarantined_by = ? + WHERE {sql_many_clause_sql}""" + + if quarantined_by is not None: + sql += " AND safe_from_quarantine = FALSE" + + txn.execute(sql, [quarantined_by] + sql_many_clause_args) + total_media_quarantined += txn.rowcount if txn.rowcount > 0 else 0 + + return total_media_quarantined + + def _quarantine_remote_media_txn( + self, + txn: LoggingTransaction, + hashes: Set[str], + media: Set[Tuple[str, str]], + quarantined_by: Optional[str], + ) -> int: + """Quarantine and unquarantine remote items + + Args: + txn (cursor) + hashes: A set of sha256 hashes for any media that should be quarantined + media_ids: A set of tuples (media_origin, media_id) for any media that should be quarantined + quarantined_by: The ID of the user who initiated the quarantine request + If it is `None` media will be removed from quarantine + Returns: + The total number of media items quarantined + """ + total_media_quarantined = 0 + + if media: + sql_in_list_clause, sql_args = make_tuple_in_list_sql_clause( + txn.database_engine, + ("media_origin", "media_id"), + media, + ) + sql = f""" + UPDATE remote_media_cache + SET quarantined_by = ? + WHERE {sql_in_list_clause}""" + + txn.execute(sql, [quarantined_by] + sql_args) + total_media_quarantined += txn.rowcount if txn.rowcount > 0 else 0 + + total_media_quarantined = 0 + if hashes: + sql_many_clause_sql, sql_many_clause_args = make_in_list_sql_clause( + txn.database_engine, "sha256", hashes + ) + sql = f""" + UPDATE remote_media_cache + SET quarantined_by = ? + WHERE {sql_many_clause_sql}""" + txn.execute(sql, [quarantined_by] + sql_many_clause_args) + total_media_quarantined += txn.rowcount if txn.rowcount > 0 else 0 + + return total_media_quarantined + def _quarantine_media_txn( self, txn: LoggingTransaction, @@ -1146,40 +1253,49 @@ class RoomWorkerStore(CacheInvalidationWorkerStore): Returns: The total number of media items quarantined """ + hashes = set() + media_ids = set() + remote_media = set() - # Update all the tables to set the quarantined_by flag - sql = """ - UPDATE local_media_repository - SET quarantined_by = ? - WHERE media_id = ? - """ - - # set quarantine - if quarantined_by is not None: - sql += "AND safe_from_quarantine = FALSE" - txn.executemany( - sql, [(quarantined_by, media_id) for media_id in local_mxcs] + # First, determine the hashes of the media we want to delete. + # We also want the media_ids for any media that lacks a hash. + if local_mxcs: + hash_sql_many_clause_sql, hash_sql_many_clause_args = ( + make_in_list_sql_clause(txn.database_engine, "media_id", local_mxcs) ) - # remove from quarantine - else: - txn.executemany( - sql, [(quarantined_by, media_id) for media_id in local_mxcs] + hash_sql = f"SELECT sha256, media_id FROM local_media_repository WHERE {hash_sql_many_clause_sql}" + if quarantined_by is not None: + hash_sql += " AND safe_from_quarantine = FALSE" + + txn.execute(hash_sql, hash_sql_many_clause_args) + for sha256, media_id in txn: + if sha256: + hashes.add(sha256) + else: + media_ids.add(media_id) + + # Do the same for remote media + if remote_mxcs: + hash_sql_in_list_clause, hash_sql_args = make_tuple_in_list_sql_clause( + txn.database_engine, + ("media_origin", "media_id"), + remote_mxcs, ) - # Note that a rowcount of -1 can be used to indicate no rows were affected. - total_media_quarantined = txn.rowcount if txn.rowcount > 0 else 0 + hash_sql = f"SELECT sha256, media_origin, media_id FROM remote_media_cache WHERE {hash_sql_in_list_clause}" + txn.execute(hash_sql, hash_sql_args) + for sha256, media_origin, media_id in txn: + if sha256: + hashes.add(sha256) + else: + remote_media.add((media_origin, media_id)) - txn.executemany( - """ - UPDATE remote_media_cache - SET quarantined_by = ? - WHERE media_origin = ? AND media_id = ? - """, - [(quarantined_by, origin, media_id) for origin, media_id in remote_mxcs], + count = self._quarantine_local_media_txn(txn, hashes, media_ids, quarantined_by) + count += self._quarantine_remote_media_txn( + txn, hashes, remote_media, quarantined_by ) - total_media_quarantined += txn.rowcount if txn.rowcount > 0 else 0 - return total_media_quarantined + return count async def block_room(self, room_id: str, user_id: str) -> None: """Marks the room as blocked. diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index 2160edb014..ad683a3a07 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -19,7 +19,7 @@ # # -SCHEMA_VERSION = 90 # remember to update the list below when updating +SCHEMA_VERSION = 91 # remember to update the list below when updating """Represents the expectations made by the codebase about the database schema This should be incremented whenever the codebase changes its requirements on the diff --git a/synapse/storage/schema/main/delta/91/01_media_hash.sql b/synapse/storage/schema/main/delta/91/01_media_hash.sql new file mode 100644 index 0000000000..2dbd2c7df7 --- /dev/null +++ b/synapse/storage/schema/main/delta/91/01_media_hash.sql @@ -0,0 +1,21 @@ +-- +-- This file is licensed under the Affero General Public License (AGPL) version 3. +-- +-- Copyright (C) 2025 New Vector, Ltd +-- +-- This program is free software: you can redistribute it and/or modify +-- it under the terms of the GNU Affero General Public License as +-- published by the Free Software Foundation, either version 3 of the +-- License, or (at your option) any later version. +-- +-- See the GNU Affero General Public License for more details: +-- . + +-- Store the SHA256 content hash of media files. +ALTER TABLE local_media_repository ADD COLUMN sha256 TEXT; +ALTER TABLE remote_media_cache ADD COLUMN sha256 TEXT; + +-- Add a background updates to handle creating the new index. +INSERT INTO background_updates (ordering, update_name, progress_json) VALUES + (9101, 'local_media_repository_sha256_idx', '{}'), + (9101, 'remote_media_cache_sha256_idx', '{}'); \ No newline at end of file diff --git a/tests/handlers/test_profile.py b/tests/handlers/test_profile.py index cb1c6fbb80..2b9b56da95 100644 --- a/tests/handlers/test_profile.py +++ b/tests/handlers/test_profile.py @@ -369,6 +369,7 @@ class ProfileTestCase(unittest.HomeserverTestCase): time_now_ms=self.clock.time_msec(), upload_name=None, filesystem_id="xyz", + sha256="abcdefg12345", ) ) diff --git a/tests/media/test_media_retention.py b/tests/media/test_media_retention.py index 417d17ebd2..d8f4f57c8c 100644 --- a/tests/media/test_media_retention.py +++ b/tests/media/test_media_retention.py @@ -31,6 +31,9 @@ from synapse.rest.client import login, register, room from synapse.server import HomeServer from synapse.types import UserID from synapse.util import Clock +from synapse.util.stringutils import ( + random_string, +) from tests import unittest from tests.unittest import override_config @@ -65,7 +68,6 @@ class MediaRetentionTestCase(unittest.HomeserverTestCase): # quarantined media) into both the local store and the remote cache, plus # one additional local media that is marked as protected from quarantine. media_repository = hs.get_media_repository() - test_media_content = b"example string" def _create_media_and_set_attributes( last_accessed_ms: Optional[int], @@ -73,12 +75,14 @@ class MediaRetentionTestCase(unittest.HomeserverTestCase): is_protected: Optional[bool] = False, ) -> MXCUri: # "Upload" some media to the local media store + # If the meda + random_content = bytes(random_string(24), "utf-8") mxc_uri: MXCUri = self.get_success( media_repository.create_content( media_type="text/plain", upload_name=None, - content=io.BytesIO(test_media_content), - content_length=len(test_media_content), + content=io.BytesIO(random_content), + content_length=len(random_content), auth_user=UserID.from_string(test_user_id), ) ) @@ -129,6 +133,7 @@ class MediaRetentionTestCase(unittest.HomeserverTestCase): time_now_ms=clock.time_msec(), upload_name="testfile.txt", filesystem_id="abcdefg12345", + sha256=random_string(24), ) ) diff --git a/tests/media/test_media_storage.py b/tests/media/test_media_storage.py index c2e0e592d7..35e16a99ba 100644 --- a/tests/media/test_media_storage.py +++ b/tests/media/test_media_storage.py @@ -42,6 +42,7 @@ from twisted.web.resource import Resource from synapse.api.errors import Codes, HttpResponseException from synapse.api.ratelimiting import Ratelimiter from synapse.events import EventBase +from synapse.http.client import ByteWriteable from synapse.http.types import QueryParams from synapse.logging.context import make_deferred_yieldable from synapse.media._base import FileInfo, ThumbnailInfo @@ -59,7 +60,7 @@ from synapse.util import Clock from tests import unittest from tests.server import FakeChannel -from tests.test_utils import SMALL_CMYK_JPEG, SMALL_PNG +from tests.test_utils import SMALL_CMYK_JPEG, SMALL_PNG, SMALL_PNG_SHA256 from tests.unittest import override_config from tests.utils import default_config @@ -1257,3 +1258,107 @@ class RemoteDownloadLimiterTestCase(unittest.HomeserverTestCase): ) assert channel.code == 502 assert channel.json_body["errcode"] == "M_TOO_LARGE" + + +def read_body( + response: IResponse, stream: ByteWriteable, max_size: Optional[int] +) -> Deferred: + d: Deferred = defer.Deferred() + stream.write(SMALL_PNG) + d.callback(len(SMALL_PNG)) + return d + + +class MediaHashesTestCase(unittest.HomeserverTestCase): + servlets = [ + admin.register_servlets, + login.register_servlets, + media.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.user = self.register_user("user", "pass") + self.tok = self.login("user", "pass") + self.store = hs.get_datastores().main + self.client = hs.get_federation_http_client() + + def create_resource_dict(self) -> Dict[str, Resource]: + resources = super().create_resource_dict() + resources["/_matrix/media"] = self.hs.get_media_repository_resource() + return resources + + def test_ensure_correct_sha256(self) -> None: + """Check that the hash does not change""" + media = self.helper.upload_media(SMALL_PNG, tok=self.tok, expect_code=200) + mxc = media.get("content_uri") + assert mxc + store_media = self.get_success(self.store.get_local_media(mxc[11:])) + assert store_media + self.assertEqual( + store_media.sha256, + SMALL_PNG_SHA256, + ) + + def test_ensure_multiple_correct_sha256(self) -> None: + """Check that two media items have the same hash.""" + media_a = self.helper.upload_media(SMALL_PNG, tok=self.tok, expect_code=200) + mxc_a = media_a.get("content_uri") + assert mxc_a + store_media_a = self.get_success(self.store.get_local_media(mxc_a[11:])) + assert store_media_a + + media_b = self.helper.upload_media(SMALL_PNG, tok=self.tok, expect_code=200) + mxc_b = media_b.get("content_uri") + assert mxc_b + store_media_b = self.get_success(self.store.get_local_media(mxc_b[11:])) + assert store_media_b + + self.assertNotEqual( + store_media_a.media_id, + store_media_b.media_id, + ) + self.assertEqual( + store_media_a.sha256, + store_media_b.sha256, + ) + + @override_config( + { + "enable_authenticated_media": False, + } + ) + # mock actually reading file body + @patch( + "synapse.http.matrixfederationclient.read_body_with_max_size", + read_body, + ) + def test_ensure_correct_sha256_federated(self) -> None: + """Check that federated media have the same hash.""" + + # Mock getting a file over federation + async def _send_request(*args: Any, **kwargs: Any) -> IResponse: + resp = MagicMock(spec=IResponse) + resp.code = 200 + resp.length = 500 + resp.headers = Headers({"Content-Type": ["application/octet-stream"]}) + resp.phrase = b"OK" + return resp + + self.client._send_request = _send_request # type: ignore + + # first request should go through + channel = self.make_request( + "GET", + "/_matrix/media/v3/download/remote.org/abc", + shorthand=False, + access_token=self.tok, + ) + assert channel.code == 200 + store_media = self.get_success( + self.store.get_cached_remote_media("remote.org", "abc") + ) + assert store_media + self.assertEqual( + store_media.sha256, + SMALL_PNG_SHA256, + ) diff --git a/tests/rest/admin/test_admin.py b/tests/rest/admin/test_admin.py index 5483f8f37f..fc2a6c569b 100644 --- a/tests/rest/admin/test_admin.py +++ b/tests/rest/admin/test_admin.py @@ -20,7 +20,7 @@ # import urllib.parse -from typing import Dict +from typing import Dict, cast from parameterized import parameterized @@ -32,6 +32,7 @@ from synapse.http.server import JsonResource from synapse.rest.admin import VersionServlet from synapse.rest.client import login, media, room from synapse.server import HomeServer +from synapse.types import UserID from synapse.util import Clock from tests import unittest @@ -227,10 +228,25 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase): # Upload some media response_1 = self.helper.upload_media(SMALL_PNG, tok=non_admin_user_tok) response_2 = self.helper.upload_media(SMALL_PNG, tok=non_admin_user_tok) + response_3 = self.helper.upload_media(SMALL_PNG, tok=non_admin_user_tok) # Extract media IDs server_and_media_id_1 = response_1["content_uri"][6:] server_and_media_id_2 = response_2["content_uri"][6:] + server_and_media_id_3 = response_3["content_uri"][6:] + + # Remove the hash from the media to simulate historic media. + self.get_success( + self.hs.get_datastores().main.update_local_media( + media_id=server_and_media_id_3.split("/")[1], + media_type="image/png", + upload_name=None, + media_length=123, + user_id=UserID.from_string(non_admin_user), + # Hack to force some media to have no hash. + sha256=cast(str, None), + ) + ) # Quarantine all media by this user url = "/_synapse/admin/v1/user/%s/media/quarantine" % urllib.parse.quote( @@ -244,12 +260,13 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase): self.pump(1.0) self.assertEqual(200, channel.code, msg=channel.json_body) self.assertEqual( - channel.json_body, {"num_quarantined": 2}, "Expected 2 quarantined items" + channel.json_body, {"num_quarantined": 3}, "Expected 3 quarantined items" ) # Attempt to access each piece of media self._ensure_quarantined(admin_user_tok, server_and_media_id_1) self._ensure_quarantined(admin_user_tok, server_and_media_id_2) + self._ensure_quarantined(admin_user_tok, server_and_media_id_3) def test_cannot_quarantine_safe_media(self) -> None: self.register_user("user_admin", "pass", admin=True) diff --git a/tests/rest/admin/test_media.py b/tests/rest/admin/test_media.py index 19c244cfcf..da0e9749aa 100644 --- a/tests/rest/admin/test_media.py +++ b/tests/rest/admin/test_media.py @@ -35,7 +35,7 @@ from synapse.server import HomeServer from synapse.util import Clock from tests import unittest -from tests.test_utils import SMALL_PNG +from tests.test_utils import SMALL_CMYK_JPEG, SMALL_PNG from tests.unittest import override_config VALID_TIMESTAMP = 1609459200000 # 2021-01-01 in milliseconds @@ -598,23 +598,27 @@ class DeleteMediaByDateSizeTestCase(_AdminMediaTests): class QuarantineMediaByIDTestCase(_AdminMediaTests): + def upload_media_and_return_media_id(self, data: bytes) -> str: + # Upload some media into the room + response = self.helper.upload_media( + data, + tok=self.admin_user_tok, + expect_code=200, + ) + # Extract media ID from the response + server_and_media_id = response["content_uri"][6:] # Cut off 'mxc://' + return server_and_media_id.split("/")[1] + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.store = hs.get_datastores().main self.server_name = hs.hostname self.admin_user = self.register_user("admin", "pass", admin=True) self.admin_user_tok = self.login("admin", "pass") - - # Upload some media into the room - response = self.helper.upload_media( - SMALL_PNG, - tok=self.admin_user_tok, - expect_code=200, - ) - # Extract media ID from the response - server_and_media_id = response["content_uri"][6:] # Cut off 'mxc://' - self.media_id = server_and_media_id.split("/")[1] - + self.media_id = self.upload_media_and_return_media_id(SMALL_PNG) + self.media_id_2 = self.upload_media_and_return_media_id(SMALL_PNG) + self.media_id_3 = self.upload_media_and_return_media_id(SMALL_PNG) + self.media_id_other = self.upload_media_and_return_media_id(SMALL_CMYK_JPEG) self.url = "/_synapse/admin/v1/media/%s/%s/%s" @parameterized.expand(["quarantine", "unquarantine"]) @@ -686,6 +690,52 @@ class QuarantineMediaByIDTestCase(_AdminMediaTests): assert media_info is not None self.assertFalse(media_info.quarantined_by) + def test_quarantine_media_match_hash(self) -> None: + """ + Tests that quarantining removes all media with the same hash + """ + + media_info = self.get_success(self.store.get_local_media(self.media_id)) + assert media_info is not None + self.assertFalse(media_info.quarantined_by) + + # quarantining + channel = self.make_request( + "POST", + self.url % ("quarantine", self.server_name, self.media_id), + access_token=self.admin_user_tok, + ) + + self.assertEqual(200, channel.code, msg=channel.json_body) + self.assertFalse(channel.json_body) + + # Test that ALL similar media was quarantined. + for media in [self.media_id, self.media_id_2, self.media_id_3]: + media_info = self.get_success(self.store.get_local_media(media)) + assert media_info is not None + self.assertTrue(media_info.quarantined_by) + + # Test that other media was not. + media_info = self.get_success(self.store.get_local_media(self.media_id_other)) + assert media_info is not None + self.assertFalse(media_info.quarantined_by) + + # remove from quarantine + channel = self.make_request( + "POST", + self.url % ("unquarantine", self.server_name, self.media_id), + access_token=self.admin_user_tok, + ) + + self.assertEqual(200, channel.code, msg=channel.json_body) + self.assertFalse(channel.json_body) + + # Test that ALL similar media is now reset. + for media in [self.media_id, self.media_id_2, self.media_id_3]: + media_info = self.get_success(self.store.get_local_media(media)) + assert media_info is not None + self.assertFalse(media_info.quarantined_by) + def test_quarantine_protected_media(self) -> None: """ Tests that quarantining from protected media fails diff --git a/tests/rest/client/test_media.py b/tests/rest/client/test_media.py index 0e3e370ee8..1ea2a5c884 100644 --- a/tests/rest/client/test_media.py +++ b/tests/rest/client/test_media.py @@ -137,6 +137,7 @@ class MediaDomainBlockingTests(unittest.HomeserverTestCase): time_now_ms=clock.time_msec(), upload_name="test.png", filesystem_id=file_id, + sha256=file_id, ) ) self.register_user("user", "password") @@ -2593,6 +2594,7 @@ class AuthenticatedMediaTestCase(unittest.HomeserverTestCase): time_now_ms=self.clock.time_msec(), upload_name="remote_test.png", filesystem_id=file_id, + sha256=file_id, ) ) @@ -2725,6 +2727,7 @@ class AuthenticatedMediaTestCase(unittest.HomeserverTestCase): time_now_ms=self.clock.time_msec(), upload_name="remote_test.png", filesystem_id=file_id, + sha256=file_id, ) ) diff --git a/tests/rest/media/test_domain_blocking.py b/tests/rest/media/test_domain_blocking.py index 49d81f4b28..26453f70dd 100644 --- a/tests/rest/media/test_domain_blocking.py +++ b/tests/rest/media/test_domain_blocking.py @@ -61,6 +61,7 @@ class MediaDomainBlockingTests(unittest.HomeserverTestCase): time_now_ms=clock.time_msec(), upload_name="test.png", filesystem_id=file_id, + sha256=file_id, ) ) diff --git a/tests/test_utils/__init__.py b/tests/test_utils/__init__.py index e3611852b7..3e6fd03600 100644 --- a/tests/test_utils/__init__.py +++ b/tests/test_utils/__init__.py @@ -139,6 +139,8 @@ SMALL_PNG = unhexlify( b"0000001f15c4890000000a49444154789c63000100000500010d" b"0a2db40000000049454e44ae426082" ) +# The SHA256 hexdigest for the above bytes. +SMALL_PNG_SHA256 = "ebf4f635a17d10d6eb46ba680b70142419aa3220f228001a036d311a22ee9d2a" # A small CMYK-encoded JPEG image used in some tests. # From 3c188231c76ee8c05a6a40d12ccfdebada86b406 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marcel=20Pennewi=C3=9F?= Date: Thu, 27 Mar 2025 18:31:37 +0100 Subject: [PATCH 15/70] Update admin_faq - Fix how to obtain access token (#18225) Riot is now known as element and Access token moved to Help & About --- changelog.d/18225.doc | 1 + docs/usage/administration/admin_faq.md | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/18225.doc diff --git a/changelog.d/18225.doc b/changelog.d/18225.doc new file mode 100644 index 0000000000..854e275120 --- /dev/null +++ b/changelog.d/18225.doc @@ -0,0 +1 @@ +Fix how to obtain access token and change naming from riot to element diff --git a/docs/usage/administration/admin_faq.md b/docs/usage/administration/admin_faq.md index 0dce3d3e37..1be432000c 100644 --- a/docs/usage/administration/admin_faq.md +++ b/docs/usage/administration/admin_faq.md @@ -160,7 +160,7 @@ Using the following curl command: ```console curl -H 'Authorization: Bearer ' -X DELETE https://matrix.org/_matrix/client/r0/directory/room/ ``` -`` - can be obtained in riot by looking in the riot settings, down the bottom is: +`` - can be obtained in element by looking in All settings, clicking Help & About and down the bottom is: Access Token:\ `` - the room alias, eg. #my_room:matrix.org this possibly needs to be URL encoded also, for example %23my_room%3Amatrix.org From 76ffd3ba018f822d2309c1a69ab37251ce587f83 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 09:55:30 +0200 Subject: [PATCH 16/70] Bump actions/cache from 4.2.2 to 4.2.3 (#18266) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/cache](https://github.com/actions/cache) from 4.2.2 to 4.2.3.
Release notes

Sourced from actions/cache's releases.

v4.2.3

What's Changed

  • Update to use @​actions/cache 4.0.3 package & prepare for new release by @​salmanmkc in actions/cache#1577 (SAS tokens for cache entries are now masked in debug logs)

New Contributors

Full Changelog: https://github.com/actions/cache/compare/v4.2.2...v4.2.3

Changelog

Sourced from actions/cache's changelog.

Releases

4.2.3

  • Bump @actions/cache to v4.0.3 (obfuscates SAS token in debug logs for cache entries)

4.2.2

  • Bump @actions/cache to v4.0.2

4.2.1

  • Bump @actions/cache to v4.0.1

4.2.0

TLDR; The cache backend service has been rewritten from the ground up for improved performance and reliability. actions/cache now integrates with the new cache service (v2) APIs.

The new service will gradually roll out as of February 1st, 2025. The legacy service will also be sunset on the same date. Changes in these release are fully backward compatible.

We are deprecating some versions of this action. We recommend upgrading to version v4 or v3 as soon as possible before February 1st, 2025. (Upgrade instructions below).

If you are using pinned SHAs, please use the SHAs of versions v4.2.0 or v3.4.0

If you do not upgrade, all workflow runs using any of the deprecated actions/cache will fail.

Upgrading to the recommended versions will not break your workflows.

4.1.2

  • Add GitHub Enterprise Cloud instances hostname filters to inform API endpoint choices - #1474
  • Security fix: Bump braces from 3.0.2 to 3.0.3 - #1475

4.1.1

  • Restore original behavior of cache-hit output - #1467

4.1.0

  • Ensure cache-hit output is set when a cache is missed - #1404
  • Deprecate save-always input - #1452

4.0.2

  • Fixed restore fail-on-cache-miss not working.

4.0.1

  • Updated isGhes check

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/cache&package-manager=github_actions&previous-version=4.2.2&new-version=4.2.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/release-artifacts.yml | 2 +- .github/workflows/tests.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index 9985084a73..dda2b834fc 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -66,7 +66,7 @@ jobs: install: true - name: Set up docker layer caching - uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4.2.2 + uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 with: path: /tmp/.buildx-cache key: ${{ runner.os }}-buildx-${{ github.sha }} diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index ff83d6e365..4193d3e2f9 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -167,7 +167,7 @@ jobs: # Cribbed from # https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17 - name: Restore/persist mypy's cache - uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4.2.2 + uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3 with: path: | .mypy_cache From 4dba011c316139eb58971d4fa706f9aca36f02ee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 09:56:53 +0200 Subject: [PATCH 17/70] Bump dawidd6/action-download-artifact from 8 to 9 (#18204) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [dawidd6/action-download-artifact](https://github.com/dawidd6/action-download-artifact) from 8 to 9.
Release notes

Sourced from dawidd6/action-download-artifact's releases.

v9

What's Changed

New Contributors

Full Changelog: https://github.com/dawidd6/action-download-artifact/compare/v8...v9

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=dawidd6/action-download-artifact&package-manager=github_actions&previous-version=8&new-version=9)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/docs-pr-netlify.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docs-pr-netlify.yaml b/.github/workflows/docs-pr-netlify.yaml index d1b789c8d0..8a06ad6362 100644 --- a/.github/workflows/docs-pr-netlify.yaml +++ b/.github/workflows/docs-pr-netlify.yaml @@ -14,7 +14,7 @@ jobs: # There's a 'download artifact' action, but it hasn't been updated for the workflow_run action # (https://github.com/actions/download-artifact/issues/60) so instead we get this mess: - name: 📥 Download artifact - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 + uses: dawidd6/action-download-artifact@07ab29fd4a977ae4d2b275087cf67563dfdf0295 # v9 with: workflow: docs-pr.yaml run_id: ${{ github.event.workflow_run.id }} From 3bc04d05a4454e59c55688a26cea3f542f2697ed Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 09:58:27 +0200 Subject: [PATCH 18/70] Bump pygithub from 2.5.0 to 2.6.1 (#18243) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pygithub](https://github.com/pygithub/pygithub) from 2.5.0 to 2.6.1.
Release notes

Sourced from pygithub's releases.

v2.6.1

Bug Fixes

Maintenance

Full Changelog: https://github.com/PyGithub/PyGithub/compare/v2.6.0...v2.6.1

v2.6.0

Breaking Changes

  • Rework Views and Clones by @​EnricoMi in PyGithub/PyGithub#3168: View and clones traffic information returned by Repository.get_views_traffic and Repository.get_clones_traffic now return proper PyGithub objects, instead of a dict, with all information that used to be provided by the dict:

Code like

repo.get_views_traffic().["views"].timestamp
repo.get_clones_traffic().["clones"].timestamp

should be replaced with

repo.get_views_traffic().views.timestamp
repo.get_clones_traffic().clones.timestamp

New Features

Improvements

... (truncated)

Changelog

Sourced from pygithub's changelog.

Version 2.6.1 (February 21, 2025)

Bug Fixes ^^^^^^^^^

  • Fix broken pickle support for Auth classes ([#3211](https://github.com/pygithub/pygithub/issues/3211) <https://github.com/PyGithub/PyGithub/pull/3211>) (f975552a <https://github.com/PyGithub/PyGithub/commit/f975552a>)
  • Remove schema from Deployment, remove message attribute ([#3223](https://github.com/pygithub/pygithub/issues/3223) <https://github.com/PyGithub/PyGithub/pull/3223>) (d12e7d4c <https://github.com/PyGithub/PyGithub/commit/d12e7d4c>)
  • Fix incorrect deprecated import ([#3225](https://github.com/pygithub/pygithub/issues/3225) <https://github.com/PyGithub/PyGithub/pull/3225>) (93297440 <https://github.com/PyGithub/PyGithub/commit/93297440>)
  • Add CodeSecurityConfigRepository returned by get_repos_for_code_security_config ([#3219](https://github.com/pygithub/pygithub/issues/3219) <https://github.com/PyGithub/PyGithub/pull/3219>) (f997a2f6 <https://github.com/PyGithub/PyGithub/commit/f997a2f6>)
  • Make GitTag.verification return GitCommitVerification ([#3226](https://github.com/pygithub/pygithub/issues/3226) <https://github.com/PyGithub/PyGithub/pull/3226>) (048a1a38 <https://github.com/PyGithub/PyGithub/commit/048a1a38>)

Maintenance ^^^^^^^^^^^

  • Mention removal of AppAuth.private_key in changelog ([#3212](https://github.com/pygithub/pygithub/issues/3212) <https://github.com/PyGithub/PyGithub/pull/3212>) (f5dc1c76 <https://github.com/PyGithub/PyGithub/commit/f5dc1c76>)

Version 2.6.0 (February 15, 2025)

Breaking Changes ^^^^^^^^^^^^^^^^

  • Rework Views and Clones ([#3168](https://github.com/pygithub/pygithub/issues/3168) <https://github.com/PyGithub/PyGithub/pull/3168>) (f7d52249 <https://github.com/PyGithub/PyGithub/commit/f7d52249>):

    View and clones traffic information returned by Repository.get_views_traffic and Repository.get_clones_traffic now return proper PyGithub objects, instead of a dict, with all information that used to be provided by the dict:

Code like

.. code-block:: python

repo.get_views_traffic().["views"].timestamp repo.get_clones_traffic().["clones"].timestamp

should be replaced with

.. code-block:: python

repo.get_views_traffic().views.timestamp repo.get_clones_traffic().clones.timestamp

  • Add GitCommitVerification class ([#3028](https://github.com/pygithub/pygithub/issues/3028) <https://github.com/PyGithub/PyGithub/pull/3028>) (822e6d71 <https://github.com/PyGithub/PyGithub/commit/822e6d71>):

    Changes the return value of GitTag.verification and GitCommit.verification from dict to GitCommitVerification.

    Code like

    .. code-block:: python

    tag.verification["reason"] commit.verification["reason"]

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pygithub&package-manager=pip&previous-version=2.5.0&new-version=2.6.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 52 ++++++++++++++++++++++++++-------------------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/poetry.lock b/poetry.lock index ed98dd9f4e..cfbfdc427a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -39,7 +39,7 @@ description = "The ultimate Python library in building OAuth and OpenID Connect optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"oidc\" or extra == \"jwt\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"jwt\" or extra == \"oidc\"" files = [ {file = "Authlib-1.4.1-py2.py3-none-any.whl", hash = "sha256:edc29c3f6a3e72cd9e9f45fff67fc663a2c364022eb0371c003f22d5405915c1"}, {file = "authlib-1.4.1.tar.gz", hash = "sha256:30ead9ea4993cdbab821dc6e01e818362f92da290c04c7f6a1940f86507a790d"}, @@ -451,7 +451,7 @@ description = "XML bomb protection for Python stdlib modules" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" groups = ["main"] -markers = "extra == \"saml2\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"saml2\"" files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, @@ -494,7 +494,7 @@ description = "XPath 1.0/2.0/3.0/3.1 parsers and selectors for ElementTree and l optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"saml2\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"saml2\"" files = [ {file = "elementpath-4.1.5-py3-none-any.whl", hash = "sha256:2ac1a2fb31eb22bbbf817f8cf6752f844513216263f0e3892c8e79782fe4bb55"}, {file = "elementpath-4.1.5.tar.gz", hash = "sha256:c2d6dc524b29ef751ecfc416b0627668119d8812441c555d7471da41d4bacb8d"}, @@ -544,7 +544,7 @@ description = "Python wrapper for hiredis" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"redis\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"redis\"" files = [ {file = "hiredis-3.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:2892db9db21f0cf7cc298d09f85d3e1f6dc4c4c24463ab67f79bc7a006d51867"}, {file = "hiredis-3.1.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:93cfa6cc25ee2ceb0be81dc61eca9995160b9e16bdb7cca4a00607d57e998918"}, @@ -890,7 +890,7 @@ description = "Jaeger Python OpenTracing Tracer implementation" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"opentracing\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"opentracing\"" files = [ {file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"}, ] @@ -1028,7 +1028,7 @@ description = "A strictly RFC 4510 conforming LDAP V3 pure Python client library optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\"" files = [ {file = "ldap3-2.9.1-py2.py3-none-any.whl", hash = "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70"}, {file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"}, @@ -1044,7 +1044,7 @@ description = "Powerful and Pythonic XML processing library combining libxml2/li optional = true python-versions = ">=3.6" groups = ["main"] -markers = "extra == \"url-preview\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"url-preview\"" files = [ {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, @@ -1330,7 +1330,7 @@ description = "An LDAP3 auth provider for Synapse" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\"" files = [ {file = "matrix-synapse-ldap3-0.3.0.tar.gz", hash = "sha256:8bb6517173164d4b9cc44f49de411d8cebdb2e705d5dd1ea1f38733c4a009e1d"}, {file = "matrix_synapse_ldap3-0.3.0-py3-none-any.whl", hash = "sha256:8b4d701f8702551e98cc1d8c20dbed532de5613584c08d0df22de376ba99159d"}, @@ -1551,7 +1551,7 @@ description = "OpenTracing API for Python. See documentation at http://opentraci optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"opentracing\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"opentracing\"" files = [ {file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"}, ] @@ -1709,7 +1709,7 @@ description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"postgres\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"postgres\"" files = [ {file = "psycopg2-2.9.10-cp310-cp310-win32.whl", hash = "sha256:5df2b672140f95adb453af93a7d669d7a7bf0a56bcd26f1502329166f4a61716"}, {file = "psycopg2-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:c6f7b8561225f9e711a9c47087388a97fdc948211c10a4bccbf0ba68ab7b3b5a"}, @@ -1730,7 +1730,7 @@ description = ".. image:: https://travis-ci.org/chtd/psycopg2cffi.svg?branch=mas optional = true python-versions = "*" groups = ["main"] -markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")" +markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")" files = [ {file = "psycopg2cffi-2.9.0.tar.gz", hash = "sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52"}, ] @@ -1746,7 +1746,7 @@ description = "A Simple library to enable psycopg2 compatability" optional = true python-versions = "*" groups = ["main"] -markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")" +markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")" files = [ {file = "psycopg2cffi-compat-1.1.tar.gz", hash = "sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05"}, ] @@ -1929,14 +1929,14 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pygithub" -version = "2.5.0" +version = "2.6.1" description = "Use the full Github API v3" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "PyGithub-2.5.0-py3-none-any.whl", hash = "sha256:b0b635999a658ab8e08720bdd3318893ff20e2275f6446fcf35bf3f44f2c0fd2"}, - {file = "pygithub-2.5.0.tar.gz", hash = "sha256:e1613ac508a9be710920d26eb18b1905ebd9926aa49398e88151c1b526aad3cf"}, + {file = "PyGithub-2.6.1-py3-none-any.whl", hash = "sha256:6f2fa6d076ccae475f9fc392cc6cdbd54db985d4f69b8833a28397de75ed6ca3"}, + {file = "pygithub-2.6.1.tar.gz", hash = "sha256:b5c035392991cca63959e9453286b41b54d83bf2de2daa7d7ff7e4312cebf3bf"}, ] [package.dependencies] @@ -1969,7 +1969,7 @@ description = "Python extension wrapping the ICU C++ API" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"user-search\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"user-search\"" files = [ {file = "PyICU-2.14.tar.gz", hash = "sha256:acc7eb92bd5c554ed577249c6978450a4feda0aa6f01470152b3a7b382a02132"}, ] @@ -2018,7 +2018,7 @@ description = "A development tool to measure, monitor and analyze the memory beh optional = true python-versions = ">=3.6" groups = ["main"] -markers = "extra == \"cache-memory\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"cache-memory\"" files = [ {file = "Pympler-1.0.1-py3-none-any.whl", hash = "sha256:d260dda9ae781e1eab6ea15bacb84015849833ba5555f141d2d9b7b7473b307d"}, {file = "Pympler-1.0.1.tar.gz", hash = "sha256:993f1a3599ca3f4fcd7160c7545ad06310c9e12f70174ae7ae8d4e25f6c5d3fa"}, @@ -2077,7 +2077,7 @@ description = "Python implementation of SAML Version 2 Standard" optional = true python-versions = ">=3.9,<4.0" groups = ["main"] -markers = "extra == \"saml2\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"saml2\"" files = [ {file = "pysaml2-7.5.0-py3-none-any.whl", hash = "sha256:bc6627cc344476a83c757f440a73fda1369f13b6fda1b4e16bca63ffbabb5318"}, {file = "pysaml2-7.5.0.tar.gz", hash = "sha256:f36871d4e5ee857c6b85532e942550d2cf90ea4ee943d75eb681044bbc4f54f7"}, @@ -2102,7 +2102,7 @@ description = "Extensions to the standard Python datetime module" optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" groups = ["main"] -markers = "extra == \"saml2\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"saml2\"" files = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, @@ -2130,7 +2130,7 @@ description = "World timezone definitions, modern and historical" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"saml2\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"saml2\"" files = [ {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, @@ -2494,7 +2494,7 @@ description = "Python client for Sentry (https://sentry.io)" optional = true python-versions = ">=3.6" groups = ["main"] -markers = "extra == \"sentry\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"sentry\"" files = [ {file = "sentry_sdk-2.22.0-py2.py3-none-any.whl", hash = "sha256:3d791d631a6c97aad4da7074081a57073126c69487560c6f8bffcf586461de66"}, {file = "sentry_sdk-2.22.0.tar.gz", hash = "sha256:b4bf43bb38f547c84b2eadcefbe389b36ef75f3f38253d7a74d6b928c07ae944"}, @@ -2678,7 +2678,7 @@ description = "Tornado IOLoop Backed Concurrent Futures" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"opentracing\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"opentracing\"" files = [ {file = "threadloop-1.0.2-py2-none-any.whl", hash = "sha256:5c90dbefab6ffbdba26afb4829d2a9df8275d13ac7dc58dccb0e279992679599"}, {file = "threadloop-1.0.2.tar.gz", hash = "sha256:8b180aac31013de13c2ad5c834819771992d350267bddb854613ae77ef571944"}, @@ -2694,7 +2694,7 @@ description = "Python bindings for the Apache Thrift RPC system" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"opentracing\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"opentracing\"" files = [ {file = "thrift-0.16.0.tar.gz", hash = "sha256:2b5b6488fcded21f9d312aa23c9ff6a0195d0f6ae26ddbd5ad9e3e25dfc14408"}, ] @@ -2756,7 +2756,7 @@ description = "Tornado is a Python web framework and asynchronous networking lib optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"opentracing\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"opentracing\"" files = [ {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"}, {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"}, @@ -2890,7 +2890,7 @@ description = "non-blocking redis client for python" optional = true python-versions = "*" groups = ["main"] -markers = "extra == \"redis\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"redis\"" files = [ {file = "txredisapi-1.4.10-py3-none-any.whl", hash = "sha256:0a6ea77f27f8cf092f907654f08302a97b48fa35f24e0ad99dfb74115f018161"}, {file = "txredisapi-1.4.10.tar.gz", hash = "sha256:7609a6af6ff4619a3189c0adfb86aeda789afba69eb59fc1e19ac0199e725395"}, @@ -3218,7 +3218,7 @@ description = "An XML Schema validator and decoder" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"saml2\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"saml2\"" files = [ {file = "xmlschema-2.4.0-py3-none-any.whl", hash = "sha256:dc87be0caaa61f42649899189aab2fd8e0d567f2cf548433ba7b79278d231a4a"}, {file = "xmlschema-2.4.0.tar.gz", hash = "sha256:d74cd0c10866ac609e1ef94a5a69b018ad16e39077bc6393408b40c6babee793"}, From 9f8ed145356026c83d4af8d25ed60fbec8d99b32 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 08:08:57 +0000 Subject: [PATCH 19/70] Bump actions/download-artifact from 4.2.0 to 4.2.1 (#18268) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 4.2.0 to 4.2.1.
Release notes

Sourced from actions/download-artifact's releases.

v4.2.1

What's Changed

Full Changelog: https://github.com/actions/download-artifact/compare/v4.2.0...v4.2.1

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/download-artifact&package-manager=github_actions&previous-version=4.2.0&new-version=4.2.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/release-artifacts.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index dda2b834fc..e9e2ac2606 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -203,7 +203,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download all workflow run artifacts - uses: actions/download-artifact@b14cf4c92620c250e1c074ab0a5800e37df86765 # v4.2.0 + uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1 - name: Build a tarball for the debs # We need to merge all the debs uploads into one folder, then compress # that. From 02eed668b844b887e519be3a8995e1a7ebe7ff3f Mon Sep 17 00:00:00 2001 From: Will Hunt Date: Tue, 1 Apr 2025 11:43:05 +0100 Subject: [PATCH 20/70] Document media hashing changes (#18296) Essentially document the change in behaviour in #18277 ### Pull Request Checklist * [x] Pull request is based on the develop branch * [x] Pull request includes a [changelog file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should: - Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.". - Use markdown where necessary, mostly for `code blocks`. - End with either a period (.) or an exclamation mark (!). - Start with a capital letter. - Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry. * [x] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters)) --- changelog.d/18296.doc | 1 + docs/admin_api/media_admin_api.md | 8 ++++++++ 2 files changed, 9 insertions(+) create mode 100644 changelog.d/18296.doc diff --git a/changelog.d/18296.doc b/changelog.d/18296.doc new file mode 100644 index 0000000000..3604e732d5 --- /dev/null +++ b/changelog.d/18296.doc @@ -0,0 +1 @@ +Hashes of media files are now tracked by Synapse. Media quarantines will now apply to all files with the same hash. \ No newline at end of file diff --git a/docs/admin_api/media_admin_api.md b/docs/admin_api/media_admin_api.md index 30833f3109..1177711c1e 100644 --- a/docs/admin_api/media_admin_api.md +++ b/docs/admin_api/media_admin_api.md @@ -46,6 +46,14 @@ to any local media, and any locally-cached copies of remote media. The media file itself (and any thumbnails) is not deleted from the server. +Since Synapse 1.128.0, hashes of uploaded media are tracked. If this media +is quarantined, Synapse will: + + - Quarantine any media with a matching hash that has already been uploaded. + - Quarantine any future media. + - Quarantine any existing cached remote media. + - Quarantine any future remote media. + ## Quarantining media by ID This API quarantines a single piece of local or remote media. From 1deb6e03e0388c2acc76d74257968253107c40a2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 14:12:58 +0200 Subject: [PATCH 21/70] Bump pyo3-log from 0.12.1 to 0.12.2 (#18269) Bumps [pyo3-log](https://github.com/vorner/pyo3-log) from 0.12.1 to 0.12.2.
Changelog

Sourced from pyo3-log's changelog.

0.12.2

  • Allow pyo3 0.24.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pyo3-log&package-manager=cargo&previous-version=0.12.1&new-version=0.12.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 20110694a5..1b17e9910a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -316,9 +316,9 @@ dependencies = [ [[package]] name = "pyo3-log" -version = "0.12.1" +version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be5bb22b77965a7b5394e9aae9897a0607b51df5167561ffc3b02643b4200bc7" +checksum = "4b78e4983ba15bc62833a0e0941d965bc03690163f1127864f1408db25063466" dependencies = [ "arc-swap", "log", From cf02b8fea5659154274a50d98831d1e8e71cc758 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 12:31:59 +0000 Subject: [PATCH 22/70] Bump actions/setup-python from 5.4.0 to 5.5.0 (#18298) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/setup-python](https://github.com/actions/setup-python) from 5.4.0 to 5.5.0.
Release notes

Sourced from actions/setup-python's releases.

v5.5.0

What's Changed

Enhancements:

Bug fixes:

Dependency updates:

New Contributors

Full Changelog: https://github.com/actions/setup-python/compare/v5...v5.5.0

Commits
  • 8d9ed9a Add e2e Testing for free threaded and Bump @​action/cache from 4.0.0 to 4.0.3 ...
  • 19e4675 Add support for .tool-versions file in setup-python (#1043)
  • 6fd11e1 Bump @​actions/glob from 0.4.0 to 0.5.0 (#1015)
  • 9e62be8 Support free threaded Python versions like '3.13t' (#973)
  • 6ca8e85 Bump @​vercel/ncc from 0.38.1 to 0.38.3 (#1016)
  • 8039c45 fix: install PyPy on Linux ARM64 (#1011)
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/setup-python&package-manager=github_actions&previous-version=5.4.0&new-version=5.5.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/docs-pr.yaml | 2 +- .github/workflows/docs.yaml | 2 +- .github/workflows/latest_deps.yml | 2 +- .github/workflows/poetry_lockfile.yaml | 2 +- .github/workflows/release-artifacts.yml | 8 ++++---- .github/workflows/tests.yml | 12 ++++++------ 6 files changed, 14 insertions(+), 14 deletions(-) diff --git a/.github/workflows/docs-pr.yaml b/.github/workflows/docs-pr.yaml index 1b416407d8..4a865930ac 100644 --- a/.github/workflows/docs-pr.yaml +++ b/.github/workflows/docs-pr.yaml @@ -24,7 +24,7 @@ jobs: mdbook-version: '0.4.17' - name: Setup python - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 with: python-version: "3.x" diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 121c29571a..05ae608d06 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -64,7 +64,7 @@ jobs: run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js - name: Setup python - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 with: python-version: "3.x" diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index b7c1b727c9..e37e0c8598 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -86,7 +86,7 @@ jobs: -e POSTGRES_PASSWORD=postgres \ -e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \ postgres:${{ matrix.postgres-version }} - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 with: python-version: "3.x" - run: pip install .[all,test] diff --git a/.github/workflows/poetry_lockfile.yaml b/.github/workflows/poetry_lockfile.yaml index b3251d379e..31b9147e98 100644 --- a/.github/workflows/poetry_lockfile.yaml +++ b/.github/workflows/poetry_lockfile.yaml @@ -17,7 +17,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 with: python-version: '3.x' - run: pip install tomli diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index e9e2ac2606..9fd38e49a6 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -28,7 +28,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 with: python-version: '3.x' - id: set-distros @@ -74,7 +74,7 @@ jobs: ${{ runner.os }}-buildx- - name: Set up python - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 with: python-version: '3.x' @@ -132,7 +132,7 @@ jobs: steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 with: # setup-python@v4 doesn't impose a default python version. Need to use 3.x # here, because `python` on osx points to Python 2.7. @@ -177,7 +177,7 @@ jobs: steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 with: python-version: '3.10' diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 4193d3e2f9..9df1ec506b 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -102,7 +102,7 @@ jobs: steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 with: python-version: "3.x" - run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'" @@ -112,7 +112,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 with: python-version: "3.x" - run: .ci/scripts/check_lockfile.py @@ -192,7 +192,7 @@ jobs: with: ref: ${{ github.event.pull_request.head.sha }} fetch-depth: 0 - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 with: python-version: "3.x" - run: "pip install 'towncrier>=18.6.0rc1'" @@ -279,7 +279,7 @@ jobs: if: ${{ needs.changes.outputs.linting_readme == 'true' }} steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 with: python-version: "3.x" - run: "pip install rstcheck" @@ -327,7 +327,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 with: python-version: "3.x" - id: get-matrix @@ -414,7 +414,7 @@ jobs: sudo apt-get -qq install build-essential libffi-dev python3-dev \ libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev - - uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 + - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 with: python-version: '3.9' From f638a76ba4ef242794493146b944529d3a2bd5ad Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Tue, 1 Apr 2025 08:32:34 -0400 Subject: [PATCH 23/70] Avoid relying on rsync during Docker build (#18287) Use targeted COPY commands instead of rsync to avoid having a symlinked /lib as the destination of a COPY (which buildkit does not support). ### Pull Request Checklist * [x] Pull request is based on the develop branch * [x] Pull request includes a [changelog file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should: - Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.". - Use markdown where necessary, mostly for `code blocks`. - End with either a period (.) or an exclamation mark (!). - Start with a capital letter. - Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry. * [x] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters)) --- changelog.d/18287.docker | 1 + docker/Dockerfile | 16 ++++++++-------- 2 files changed, 9 insertions(+), 8 deletions(-) create mode 100644 changelog.d/18287.docker diff --git a/changelog.d/18287.docker b/changelog.d/18287.docker new file mode 100644 index 0000000000..ef45ad72ba --- /dev/null +++ b/changelog.d/18287.docker @@ -0,0 +1 @@ +Avoid needing to download & use rsync in a build layer. diff --git a/docker/Dockerfile b/docker/Dockerfile index 54aa355370..15c458fa28 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -134,7 +134,6 @@ RUN \ --mount=type=cache,target=/var/cache/apt,sharing=locked \ --mount=type=cache,target=/var/lib/apt,sharing=locked \ apt-get update -qq && \ - apt-get install -y --no-install-recommends rsync && \ apt-cache depends --recurse --no-recommends --no-suggests --no-conflicts --no-breaks --no-replaces --no-enhances --no-pre-depends \ curl \ gosu \ @@ -152,10 +151,6 @@ RUN \ done # Extract the debs for each architecture -# On the runtime image, /lib is a symlink to /usr/lib, so we need to copy the -# libraries to the right place, else the `COPY` won't work. -# On amd64, we'll also have a /lib64 folder with ld-linux-x86-64.so.2, which is -# already present in the runtime image. RUN \ for arch in arm64 amd64; do \ mkdir -p /install-${arch}/var/lib/dpkg/status.d/ && \ @@ -165,8 +160,6 @@ RUN \ dpkg --ctrl-tarfile $deb | tar -Ox ./control > /install-${arch}/var/lib/dpkg/status.d/${package_name}; \ dpkg --extract $deb /install-${arch}; \ done; \ - rsync -avr /install-${arch}/lib/ /install-${arch}/usr/lib; \ - rm -rf /install-${arch}/lib /install-${arch}/lib64; \ done @@ -183,7 +176,14 @@ LABEL org.opencontainers.image.documentation='https://github.com/element-hq/syna LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git' LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later' -COPY --from=runtime-deps /install-${TARGETARCH} / +# On the runtime image, /lib is a symlink to /usr/lib, so we need to copy the +# libraries to the right place, else the `COPY` won't work. +# On amd64, we'll also have a /lib64 folder with ld-linux-x86-64.so.2, which is +# already present in the runtime image. +COPY --from=runtime-deps /install-${TARGETARCH}/lib /usr/lib +COPY --from=runtime-deps /install-${TARGETARCH}/etc /etc +COPY --from=runtime-deps /install-${TARGETARCH}/usr /usr +COPY --from=runtime-deps /install-${TARGETARCH}/var /var COPY --from=builder /install /usr/local COPY ./docker/start.py /start.py COPY ./docker/conf /conf From 90f346183a267723c750f3f9bfe0d492cefc1cea Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Tue, 1 Apr 2025 08:32:56 -0400 Subject: [PATCH 24/70] Use uv pip to install supervisor in workers image (#18275) --- changelog.d/18275.docker | 1 + docker/Dockerfile-workers | 11 ++++++++--- 2 files changed, 9 insertions(+), 3 deletions(-) create mode 100644 changelog.d/18275.docker diff --git a/changelog.d/18275.docker b/changelog.d/18275.docker new file mode 100644 index 0000000000..154226e5f6 --- /dev/null +++ b/changelog.d/18275.docker @@ -0,0 +1 @@ +Use uv pip to install supervisor in the worker image. diff --git a/docker/Dockerfile-workers b/docker/Dockerfile-workers index 3dec4bba05..dd0bf59994 100644 --- a/docker/Dockerfile-workers +++ b/docker/Dockerfile-workers @@ -27,10 +27,15 @@ FROM docker.io/library/redis:7-${DEBIAN_VERSION} AS redis_base # now build the final image, based on the the regular Synapse docker image FROM $FROM - # Install supervisord with pip instead of apt, to avoid installing a second + # Install supervisord with uv pip instead of apt, to avoid installing a second # copy of python. - RUN --mount=type=cache,target=/root/.cache/pip \ - pip install supervisor~=4.2 + # --link-mode=copy silences a warning as uv isn't able to do hardlinks between its cache + # (mounted as --mount=type=cache) and the target directory. + RUN \ + --mount=type=bind,from=ghcr.io/astral-sh/uv:0.6.8,source=/uv,target=/uv \ + --mount=type=cache,target=/root/.cache/uv \ + /uv pip install --link-mode=copy --prefix="/usr/local" supervisor~=4.2 + RUN mkdir -p /etc/supervisor/conf.d # Copy over redis and nginx From 2f812c2eb61c147b3ca8db47ed56725a5333a74e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 12:42:01 +0000 Subject: [PATCH 25/70] Bump jinja2 from 3.1.5 to 3.1.6 (#18223) Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.5 to 3.1.6.
Release notes

Sourced from jinja2's releases.

3.1.6

This is the Jinja 3.1.6 security release, which fixes security issues but does not otherwise change behavior and should not result in breaking changes compared to the latest feature release.

PyPI: https://pypi.org/project/Jinja2/3.1.6/ Changes: https://jinja.palletsprojects.com/en/stable/changes/#version-3-1-6

Changelog

Sourced from jinja2's changelog.

Version 3.1.6

Released 2025-03-05

  • The |attr filter does not bypass the environment's attribute lookup, allowing the sandbox to apply its checks. :ghsa:cpwx-vrp4-4pq7
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=jinja2&package-manager=pip&previous-version=3.1.5&new-version=3.1.6)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/element-hq/synapse/network/alerts).
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index cfbfdc427a..236ff892ad 100644 --- a/poetry.lock +++ b/poetry.lock @@ -943,14 +943,14 @@ trio = ["async_generator ; python_version == \"3.6\"", "trio"] [[package]] name = "jinja2" -version = "3.1.5" +version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" groups = ["main", "dev"] files = [ - {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, - {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, ] [package.dependencies] From 7ace290f07c7e5a7ec6c6c7de4447ead62d55b7a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 13:13:58 +0000 Subject: [PATCH 26/70] Bump actions/add-to-project from f5473ace9aeee8b97717b281e26980aa5097023f to 280af8ae1f83a494cfad2cb10f02f6d13529caa9 (#18303) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/add-to-project](https://github.com/actions/add-to-project) from f5473ace9aeee8b97717b281e26980aa5097023f to 280af8ae1f83a494cfad2cb10f02f6d13529caa9.
Commits
  • 280af8a Merge pull request #688 from actions/dependabot/npm_and_yarn/vercel/ncc-0.38.3
  • a5abfeb Update licensed cache and dist/ directory
  • f30c2e6 Bump @​vercel/ncc from 0.38.1 to 0.38.3
  • 81dd5ce Merge pull request #687 from actions/dependabot/npm_and_yarn/types/jest-29.5.14
  • 122a803 Bump @​types/jest from 29.5.12 to 29.5.14
  • 29c72ac Merge pull request #686 from actions/dependabot/npm_and_yarn/types/node-22.13.14
  • 46316d9 Bump @​types/node from 16.18.101 to 22.13.14
  • 95df5ae Merge pull request #685 from actions/dependabot/npm_and_yarn/eslint-plugin-je...
  • f14f229 Bump eslint-plugin-jest from 28.6.0 to 28.11.0
  • cc69618 Exit without failure if nothing to commit
  • Additional commits viewable in compare view

Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/triage_labelled.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/triage_labelled.yml b/.github/workflows/triage_labelled.yml index c08c674c88..feab5906e0 100644 --- a/.github/workflows/triage_labelled.yml +++ b/.github/workflows/triage_labelled.yml @@ -11,7 +11,7 @@ jobs: if: > contains(github.event.issue.labels.*.name, 'X-Needs-Info') steps: - - uses: actions/add-to-project@f5473ace9aeee8b97717b281e26980aa5097023f # main (v1.0.2 + 10 commits) + - uses: actions/add-to-project@280af8ae1f83a494cfad2cb10f02f6d13529caa9 # main (v1.0.2 + 10 commits) id: add_project with: project-url: "https://github.com/orgs/matrix-org/projects/67" From 80b62d7903eddf5abbb92afb849ed12d49269334 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 13:30:44 +0000 Subject: [PATCH 27/70] Bump actions/upload-artifact from 4.6.1 to 4.6.2 (#18304) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 4.6.1 to 4.6.2.
Release notes

Sourced from actions/upload-artifact's releases.

v4.6.2

What's Changed

New Contributors

Full Changelog: https://github.com/actions/upload-artifact/compare/v4...v4.6.2

Commits
  • ea165f8 Merge pull request #685 from salmanmkc/salmanmkc/3-new-upload-artifacts-release
  • 0839620 Prepare for new release of actions/upload-artifact with new toolkit cache ver...
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/upload-artifact&package-manager=github_actions&previous-version=4.6.1&new-version=4.6.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/docs-pr.yaml | 2 +- .github/workflows/latest_deps.yml | 2 +- .github/workflows/release-artifacts.yml | 6 +++--- .github/workflows/tests.yml | 4 ++-- .github/workflows/twisted_trunk.yml | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/docs-pr.yaml b/.github/workflows/docs-pr.yaml index 4a865930ac..616ef0f9cf 100644 --- a/.github/workflows/docs-pr.yaml +++ b/.github/workflows/docs-pr.yaml @@ -39,7 +39,7 @@ jobs: cp book/welcome_and_overview.html book/index.html - name: Upload Artifact - uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: book path: book diff --git a/.github/workflows/latest_deps.yml b/.github/workflows/latest_deps.yml index e37e0c8598..e7378ec0d3 100644 --- a/.github/workflows/latest_deps.yml +++ b/.github/workflows/latest_deps.yml @@ -164,7 +164,7 @@ jobs: if: ${{ always() }} run: /sytest/scripts/tap_to_gha.pl /logs/results.tap - name: Upload SyTest logs - uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 if: ${{ always() }} with: name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }}) diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index 9fd38e49a6..573264229f 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -101,7 +101,7 @@ jobs: echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT" - name: Upload debs as artifacts - uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }} path: debs/* @@ -165,7 +165,7 @@ jobs: CARGO_NET_GIT_FETCH_WITH_CLI: true CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI - - uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 + - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: Wheel-${{ matrix.os }}-${{ matrix.arch }} path: ./wheelhouse/*.whl @@ -186,7 +186,7 @@ jobs: - name: Build sdist run: python -m build --sdist - - uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 + - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: Sdist path: dist/*.tar.gz diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 9df1ec506b..bb2e80a908 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -529,7 +529,7 @@ jobs: if: ${{ always() }} run: /sytest/scripts/tap_to_gha.pl /logs/results.tap - name: Upload SyTest logs - uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 if: ${{ always() }} with: name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }}) @@ -627,7 +627,7 @@ jobs: PGPASSWORD: postgres PGDATABASE: postgres - name: "Upload schema differences" - uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }} with: name: Schema dumps diff --git a/.github/workflows/twisted_trunk.yml b/.github/workflows/twisted_trunk.yml index aac1e350a2..0176f17401 100644 --- a/.github/workflows/twisted_trunk.yml +++ b/.github/workflows/twisted_trunk.yml @@ -138,7 +138,7 @@ jobs: if: ${{ always() }} run: /sytest/scripts/tap_to_gha.pl /logs/results.tap - name: Upload SyTest logs - uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 if: ${{ always() }} with: name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }}) From 1709234311d2395d4dd432b997db387bade1c677 Mon Sep 17 00:00:00 2001 From: reivilibre Date: Tue, 1 Apr 2025 13:31:19 +0000 Subject: [PATCH 28/70] Add an access token introspection cache to make Matrix Authentication Service integration (MSC3861) more efficient. (#18231) Evolution of https://github.com/element-hq/synapse/commit/cd78f3d2ee15ccf3e8229a1f529e0e2c16e15c45 This cache does not have any explicit invalidation, but this is deemed acceptable (see code comment). We may still prefer to add it eventually, letting us bump up the Time-To-Live (TTL) on the cache as we currently set a 2 minute expiry to balance the fact that we have no explicit invalidation. This cache makes several things more efficient: - reduces number of outbound requests from Synapse, reducing CPU utilisation + network I/O - reduces request handling time in Synapse, which improves client-visible latency - reduces load on MAS and its database --- Other than that, this PR also introduces support for `expires_in` (seconds) on the introspection response. This lets the cached responses expire at the proper expiry time of the access token, whilst avoiding clock skew issues. Corresponds to: https://github.com/element-hq/matrix-authentication-service/pull/4241 --------- Signed-off-by: Olivier 'reivilibre --- changelog.d/18231.feature | 1 + synapse/api/auth/msc3861_delegated.py | 113 ++++++++++++++++++++---- tests/handlers/test_oauth_delegation.py | 38 ++++++++ 3 files changed, 135 insertions(+), 17 deletions(-) create mode 100644 changelog.d/18231.feature diff --git a/changelog.d/18231.feature b/changelog.d/18231.feature new file mode 100644 index 0000000000..7fa65e4fa6 --- /dev/null +++ b/changelog.d/18231.feature @@ -0,0 +1 @@ +Add an access token introspection cache to make Matrix Authentication Service integration (MSC3861) more efficient. \ No newline at end of file diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index e6bf271a1f..74e526123f 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -19,6 +19,7 @@ # # import logging +from dataclasses import dataclass from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional from urllib.parse import urlencode @@ -47,6 +48,7 @@ from synapse.logging.context import make_deferred_yieldable from synapse.types import Requester, UserID, create_requester from synapse.util import json_decoder from synapse.util.caches.cached_call import RetryOnExceptionCachedCall +from synapse.util.caches.response_cache import ResponseCache if TYPE_CHECKING: from synapse.rest.admin.experimental_features import ExperimentalFeature @@ -76,6 +78,61 @@ def scope_to_list(scope: str) -> List[str]: return scope.strip().split(" ") +@dataclass +class IntrospectionResult: + _inner: IntrospectionToken + + # when we retrieved this token, + # in milliseconds since the Unix epoch + retrieved_at_ms: int + + def is_active(self, now_ms: int) -> bool: + if not self._inner.get("active"): + return False + + expires_in = self._inner.get("expires_in") + if expires_in is None: + return True + if not isinstance(expires_in, int): + raise InvalidClientTokenError("token `expires_in` is not an int") + + absolute_expiry_ms = expires_in * 1000 + self.retrieved_at_ms + return now_ms < absolute_expiry_ms + + def get_scope_list(self) -> List[str]: + value = self._inner.get("scope") + if not isinstance(value, str): + return [] + return scope_to_list(value) + + def get_sub(self) -> Optional[str]: + value = self._inner.get("sub") + if not isinstance(value, str): + return None + return value + + def get_username(self) -> Optional[str]: + value = self._inner.get("username") + if not isinstance(value, str): + return None + return value + + def get_name(self) -> Optional[str]: + value = self._inner.get("name") + if not isinstance(value, str): + return None + return value + + def get_device_id(self) -> Optional[str]: + value = self._inner.get("device_id") + if value is not None and not isinstance(value, str): + raise AuthError( + 500, + "Invalid device ID in introspection result", + ) + return value + + class PrivateKeyJWTWithKid(PrivateKeyJWT): # type: ignore[misc] """An implementation of the private_key_jwt client auth method that includes a kid header. @@ -121,6 +178,31 @@ class MSC3861DelegatedAuth(BaseAuth): self._hostname = hs.hostname self._admin_token: Callable[[], Optional[str]] = self._config.admin_token + # # Token Introspection Cache + # This remembers what users/devices are represented by which access tokens, + # in order to reduce overall system load: + # - on Synapse (as requests are relatively expensive) + # - on the network + # - on MAS + # + # Since there is no invalidation mechanism currently, + # the entries expire after 2 minutes. + # This does mean tokens can be treated as valid by Synapse + # for longer than reality. + # + # Ideally, tokens should logically be invalidated in the following circumstances: + # - If a session logout happens. + # In this case, MAS will delete the device within Synapse + # anyway and this is good enough as an invalidation. + # - If the client refreshes their token in MAS. + # In this case, the device still exists and it's not the end of the world for + # the old access token to continue working for a short time. + self._introspection_cache: ResponseCache[str] = ResponseCache( + self._clock, + "token_introspection", + timeout_ms=120_000, + ) + self._issuer_metadata = RetryOnExceptionCachedCall[OpenIDProviderMetadata]( self._load_metadata ) @@ -193,7 +275,7 @@ class MSC3861DelegatedAuth(BaseAuth): metadata = await self._issuer_metadata.get() return metadata.get("introspection_endpoint") - async def _introspect_token(self, token: str) -> IntrospectionToken: + async def _introspect_token(self, token: str) -> IntrospectionResult: """ Send a token to the introspection endpoint and returns the introspection response @@ -266,7 +348,9 @@ class MSC3861DelegatedAuth(BaseAuth): "The introspection endpoint returned an invalid JSON response." ) - return IntrospectionToken(**resp) + return IntrospectionResult( + IntrospectionToken(**resp), retrieved_at_ms=self._clock.time_msec() + ) async def is_server_admin(self, requester: Requester) -> bool: return "urn:synapse:admin:*" in requester.scope @@ -344,7 +428,9 @@ class MSC3861DelegatedAuth(BaseAuth): ) try: - introspection_result = await self._introspect_token(token) + introspection_result = await self._introspection_cache.wrap( + token, self._introspect_token, token + ) except Exception: logger.exception("Failed to introspect token") raise SynapseError(503, "Unable to introspect the access token") @@ -353,11 +439,11 @@ class MSC3861DelegatedAuth(BaseAuth): # TODO: introspection verification should be more extensive, especially: # - verify the audience - if not introspection_result.get("active"): + if not introspection_result.is_active(self._clock.time_msec()): raise InvalidClientTokenError("Token is not active") # Let's look at the scope - scope: List[str] = scope_to_list(introspection_result.get("scope", "")) + scope: List[str] = introspection_result.get_scope_list() # Determine type of user based on presence of particular scopes has_user_scope = SCOPE_MATRIX_API in scope @@ -367,7 +453,7 @@ class MSC3861DelegatedAuth(BaseAuth): raise InvalidClientTokenError("No scope in token granting user rights") # Match via the sub claim - sub: Optional[str] = introspection_result.get("sub") + sub: Optional[str] = introspection_result.get_sub() if sub is None: raise InvalidClientTokenError( "Invalid sub claim in the introspection result" @@ -381,7 +467,7 @@ class MSC3861DelegatedAuth(BaseAuth): # or the external_id was never recorded # TODO: claim mapping should be configurable - username: Optional[str] = introspection_result.get("username") + username: Optional[str] = introspection_result.get_username() if username is None or not isinstance(username, str): raise AuthError( 500, @@ -399,7 +485,7 @@ class MSC3861DelegatedAuth(BaseAuth): # TODO: claim mapping should be configurable # If present, use the name claim as the displayname - name: Optional[str] = introspection_result.get("name") + name: Optional[str] = introspection_result.get_name() await self.store.register_user( user_id=user_id.to_string(), create_profile_with_displayname=name @@ -414,15 +500,8 @@ class MSC3861DelegatedAuth(BaseAuth): # MAS 0.15+ will give us the device ID as an explicit value for compatibility sessions # If present, we get it from here, if not we get it in thee scope - device_id = introspection_result.get("device_id") - if device_id is not None: - # We got the device ID explicitly, just sanity check that it's a string - if not isinstance(device_id, str): - raise AuthError( - 500, - "Invalid device ID in introspection result", - ) - else: + device_id = introspection_result.get_device_id() + if device_id is None: # Find device_ids in scope # We only allow a single device_id in the scope, so we find them all in the # scope list, and raise if there are more than one. The OIDC server should be diff --git a/tests/handlers/test_oauth_delegation.py b/tests/handlers/test_oauth_delegation.py index 5f8c25557a..034a1594d9 100644 --- a/tests/handlers/test_oauth_delegation.py +++ b/tests/handlers/test_oauth_delegation.py @@ -539,6 +539,44 @@ class MSC3861OAuthDelegation(HomeserverTestCase): error = self.get_failure(self.auth.get_user_by_req(request), SynapseError) self.assertEqual(error.value.code, 503) + def test_cached_expired_introspection(self) -> None: + """The handler should raise an error if the introspection response gives + an expiry time, the introspection response is cached and then the entry is + re-requested after it has expired.""" + + self.http_client.request = introspection_mock = AsyncMock( + return_value=FakeResponse.json( + code=200, + payload={ + "active": True, + "sub": SUBJECT, + "scope": " ".join( + [ + MATRIX_USER_SCOPE, + f"{MATRIX_DEVICE_SCOPE_PREFIX}AABBCC", + ] + ), + "username": USERNAME, + "expires_in": 60, + }, + ) + ) + request = Mock(args={}) + request.args[b"access_token"] = [b"mockAccessToken"] + request.requestHeaders.getRawHeaders = mock_getRawHeaders() + + # The first CS-API request causes a successful introspection + self.get_success(self.auth.get_user_by_req(request)) + self.assertEqual(introspection_mock.call_count, 1) + + # Sleep for 60 seconds so the token expires. + self.reactor.advance(60.0) + + # Now the CS-API request fails because the token expired + self.get_failure(self.auth.get_user_by_req(request), InvalidClientTokenError) + # Ensure another introspection request was not sent + self.assertEqual(introspection_mock.call_count, 1) + def make_device_keys(self, user_id: str, device_id: str) -> JsonDict: # We only generate a master key to simplify the test. master_signing_key = generate_signing_key(device_id) From 87d374c639f82bd4bd398e0b0345f2c7b73031aa Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Tue, 1 Apr 2025 09:36:13 -0400 Subject: [PATCH 29/70] Tweaks to prefix-log (#18274) - Explicitly use `mawk` instead of `awk`, since an extension of the former is used - Use `fflush` to reduce interleaving the output of different processes & streams - Move the `mawk` command to a shell function, instead of writing it twice - Look up the `SUPERVISOR_PROCESS_NAME` environment variable in `mawk`, instead of reading it in the shell & using complex quoting to pass it to `mawk` ### Pull Request Checklist * [x] Pull request is based on the develop branch * [x] Pull request includes a [changelog file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should: - Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.". - Use markdown where necessary, mostly for `code blocks`. - End with either a period (.) or an exclamation mark (!). - Start with a capital letter. - Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry. * [x] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters)) --------- Co-authored-by: Quentin Gliech --- changelog.d/18274.docker | 1 + docker/prefix-log | 7 +++++-- 2 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 changelog.d/18274.docker diff --git a/changelog.d/18274.docker b/changelog.d/18274.docker new file mode 100644 index 0000000000..57b81d8f4e --- /dev/null +++ b/changelog.d/18274.docker @@ -0,0 +1 @@ +Make some improvements to the prefix-log script in the workers image. diff --git a/docker/prefix-log b/docker/prefix-log index 32dddbbfd4..2a38de5686 100755 --- a/docker/prefix-log +++ b/docker/prefix-log @@ -10,6 +10,9 @@ # '-W interactive' is a `mawk` extension which disables buffering on stdout and sets line-buffered reads on # stdin. The effect is that the output is flushed after each line, rather than being batched, which helps reduce # confusion due to to interleaving of the different processes. -exec 1> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&1) -exec 2> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&2) +prefixer() { + mawk -W interactive '{printf("%s | %s\n", ENVIRON["SUPERVISOR_PROCESS_NAME"], $0); fflush() }' +} +exec 1> >(prefixer) +exec 2> >(prefixer >&2) exec "$@" From 8eb991b746731d72891cf28ad298d527d28c724e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 15:36:25 +0200 Subject: [PATCH 30/70] Bump authlib from 1.4.1 to 1.5.1 (#18306) Bumps [authlib](https://github.com/lepture/authlib) from 1.4.1 to 1.5.1.
Release notes

Sourced from authlib's releases.

Version 1.5.1

Released on Feb 28, 2025

  • Fix RFC9207 iss parameter. #715

Version 1.5.0

  • Fix token introspection auth method for clients. #662
  • Optional typ claim in JWT tokens. #696
  • JWT validation leeway. #689
  • Implement server-side RFC9207. #700 #701
  • generate_id_token can take a kid parameter. #702
  • More detailed InvalidClientError. #706
  • OpenID Connect Dynamic Client Registration implementation. #707
Changelog

Sourced from authlib's changelog.

Version 1.5.1

Released on Feb 28, 2025

  • Fix RFC9207 iss parameter. :pr:715

Version 1.5.0

Released on Feb 25, 2025

  • Fix token introspection auth method for clients. :pr:662
  • Optional typ claim in JWT tokens. :pr:696
  • JWT validation leeway. :pr:689
  • Implement server-side :rfc:RFC9207 <9207>. :issue:700 :pr:701
  • generate_id_token can take a kid parameter. :pr:702
  • More detailed InvalidClientError. :pr:706
  • OpenID Connect Dynamic Client Registration implementation. :pr:707
Commits
  • 4eafdc2 chore: release 1.5.1
  • 0e7e344 Merge pull request #715 from azmeuk/rfc9207
  • b57932b fix: RFC9207 iss parameter
  • 7833a88 Merge pull request #713 from geigerzaehler/full-entropy
  • 642dfa3 doc: fix an example import for rfc9207
  • 5c507a8 fix: Use full entropy from specified oct key size
  • 2d0396e chore: release 1.5.0
  • da87c8b doc: update changelog
  • b79d868 Merge pull request #662 from AdamWill/oauth2-fix-introspect-endpoint
  • 24c2bd8 chore: add a dependency group for the documentation
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=authlib&package-manager=pip&previous-version=1.4.1&new-version=1.5.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 236ff892ad..7bff50e153 100644 --- a/poetry.lock +++ b/poetry.lock @@ -34,15 +34,15 @@ tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" a [[package]] name = "authlib" -version = "1.4.1" +version = "1.5.1" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." optional = true python-versions = ">=3.9" groups = ["main"] markers = "extra == \"all\" or extra == \"jwt\" or extra == \"oidc\"" files = [ - {file = "Authlib-1.4.1-py2.py3-none-any.whl", hash = "sha256:edc29c3f6a3e72cd9e9f45fff67fc663a2c364022eb0371c003f22d5405915c1"}, - {file = "authlib-1.4.1.tar.gz", hash = "sha256:30ead9ea4993cdbab821dc6e01e818362f92da290c04c7f6a1940f86507a790d"}, + {file = "authlib-1.5.1-py2.py3-none-any.whl", hash = "sha256:8408861cbd9b4ea2ff759b00b6f02fd7d81ac5a56d0b2b22c08606c6049aae11"}, + {file = "authlib-1.5.1.tar.gz", hash = "sha256:5cbc85ecb0667312c1cdc2f9095680bb735883b123fb509fde1e65b1c5df972e"}, ] [package.dependencies] From fdbcb821ff7c11b091e60563d57dc5a4ad9a0372 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Apr 2025 13:56:32 +0000 Subject: [PATCH 31/70] Bump phonenumbers from 8.13.50 to 9.0.2 (#18299) Bumps [phonenumbers](https://github.com/daviddrysdale/python-phonenumbers) from 8.13.50 to 9.0.2.
Commits
  • 73ef5e6 Prep for 9.0.2 release
  • 528a98b Generated files for metadata
  • 28f5958 Merge metadata changes from upstream 9.0.2
  • 25ae49c Prep for 9.0.1 release
  • b8a1459 Generated files for metadata
  • f6cd233 Merge metadata changes from upstream 9.0.1
  • c46f104 Prep for 9.0.0 release
  • d542ec2 Generated files for metadata
  • a4da80e Merge metadata changes from upstream 9.0.0
  • 45c822e Prep for 8.13.55 release
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=phonenumbers&package-manager=pip&previous-version=8.13.50&new-version=9.0.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7bff50e153..2bf511e8a6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1588,14 +1588,14 @@ dev = ["jinja2"] [[package]] name = "phonenumbers" -version = "8.13.50" +version = "9.0.2" description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." optional = false python-versions = "*" groups = ["main"] files = [ - {file = "phonenumbers-8.13.50-py2.py3-none-any.whl", hash = "sha256:bb95dbc0d9979c51f7ad94bcd780784938958861fbb4b75a2fe39ccd3d58954a"}, - {file = "phonenumbers-8.13.50.tar.gz", hash = "sha256:e05ac6fb7b98c6d719a87ea895b9fc153673b4a51f455ec9afaf557ef4629da6"}, + {file = "phonenumbers-9.0.2-py2.py3-none-any.whl", hash = "sha256:dbcec6bdfdf3973f60b81dc0fcac3f7b1638f877ac42da4d7b46724ed413e2b9"}, + {file = "phonenumbers-9.0.2.tar.gz", hash = "sha256:f590ee2b729bdd9873ca2d52989466add14c9953b48805c0aeb408348d4d6224"}, ] [[package]] From fa53a8512ac58419b87955e155a1b80aa6d0419a Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 1 Apr 2025 16:21:35 +0200 Subject: [PATCH 32/70] Make sure media hashes are not queried until the index is up (#18302) --- changelog.d/18302.feature | 1 + synapse/storage/databases/main/media_repository.py | 9 +++++++++ .../storage/schema/main/delta/91/01_media_hash.sql | 13 ++++++++++--- 3 files changed, 20 insertions(+), 3 deletions(-) create mode 100644 changelog.d/18302.feature diff --git a/changelog.d/18302.feature b/changelog.d/18302.feature new file mode 100644 index 0000000000..3604e732d5 --- /dev/null +++ b/changelog.d/18302.feature @@ -0,0 +1 @@ +Hashes of media files are now tracked by Synapse. Media quarantines will now apply to all files with the same hash. \ No newline at end of file diff --git a/synapse/storage/databases/main/media_repository.py b/synapse/storage/databases/main/media_repository.py index 8a5ba44922..04866524e3 100644 --- a/synapse/storage/databases/main/media_repository.py +++ b/synapse/storage/databases/main/media_repository.py @@ -999,6 +999,15 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore): None if the media_id doesn't exist. """ + # If we don't have the index yet, performance tanks, so we return False. + # In the background updates, remote_media_cache_sha256_idx is created + # after local_media_repository_sha256_idx, which is why we only need to + # check for the completion of the former. + if not await self.db_pool.updates.has_completed_background_update( + "remote_media_cache_sha256_idx" + ): + return False + def get_matching_media_txn( txn: LoggingTransaction, table: str, sha256: str ) -> bool: diff --git a/synapse/storage/schema/main/delta/91/01_media_hash.sql b/synapse/storage/schema/main/delta/91/01_media_hash.sql index 2dbd2c7df7..34a372f1ed 100644 --- a/synapse/storage/schema/main/delta/91/01_media_hash.sql +++ b/synapse/storage/schema/main/delta/91/01_media_hash.sql @@ -16,6 +16,13 @@ ALTER TABLE local_media_repository ADD COLUMN sha256 TEXT; ALTER TABLE remote_media_cache ADD COLUMN sha256 TEXT; -- Add a background updates to handle creating the new index. -INSERT INTO background_updates (ordering, update_name, progress_json) VALUES - (9101, 'local_media_repository_sha256_idx', '{}'), - (9101, 'remote_media_cache_sha256_idx', '{}'); \ No newline at end of file +-- +-- Note that the ordering of the update is not following the usual scheme. This +-- is because when upgrading from Synapse 1.127, this index is fairly important +-- to have up quickly, so that it doesn't tank performance, which is why it is +-- scheduled before other background updates in the 1.127 -> 1.128 upgrade +INSERT INTO + background_updates (ordering, update_name, progress_json) +VALUES + (8890, 'local_media_repository_sha256_idx', '{}'), + (8891, 'remote_media_cache_sha256_idx', '{}'); From 01efc49554dec58aa27fc29e538bde56f4ef2617 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 1 Apr 2025 16:41:42 +0200 Subject: [PATCH 33/70] 1.128.0rc1 --- CHANGES.md | 52 +++++++++++++++++++++++++++++++++++++++ changelog.d/18068.misc | 1 - changelog.d/18074.bugfix | 1 - changelog.d/18225.doc | 1 - changelog.d/18231.feature | 1 - changelog.d/18251.misc | 1 - changelog.d/18254.feature | 1 - changelog.d/18255.misc | 1 - changelog.d/18271.docker | 1 - changelog.d/18272.docker | 1 - changelog.d/18273.docker | 1 - changelog.d/18274.docker | 1 - changelog.d/18275.docker | 1 - changelog.d/18276.doc | 1 - changelog.d/18277.feature | 1 - changelog.d/18283.doc | 1 - changelog.d/18284.misc | 1 - changelog.d/18287.docker | 1 - changelog.d/18296.doc | 1 - changelog.d/18302.feature | 1 - debian/changelog | 5 ++-- pyproject.toml | 2 +- 22 files changed, 56 insertions(+), 22 deletions(-) delete mode 100644 changelog.d/18068.misc delete mode 100644 changelog.d/18074.bugfix delete mode 100644 changelog.d/18225.doc delete mode 100644 changelog.d/18231.feature delete mode 100644 changelog.d/18251.misc delete mode 100644 changelog.d/18254.feature delete mode 100644 changelog.d/18255.misc delete mode 100644 changelog.d/18271.docker delete mode 100644 changelog.d/18272.docker delete mode 100644 changelog.d/18273.docker delete mode 100644 changelog.d/18274.docker delete mode 100644 changelog.d/18275.docker delete mode 100644 changelog.d/18276.doc delete mode 100644 changelog.d/18277.feature delete mode 100644 changelog.d/18283.doc delete mode 100644 changelog.d/18284.misc delete mode 100644 changelog.d/18287.docker delete mode 100644 changelog.d/18296.doc delete mode 100644 changelog.d/18302.feature diff --git a/CHANGES.md b/CHANGES.md index 0176c6e45d..e591deff67 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,55 @@ +# Synapse 1.128.0rc1 (2025-04-01) + +### Features + +- Add an access token introspection cache to make Matrix Authentication Service integration ([MSC3861](https://github.com/matrix-org/matrix-doc/pull/3861)) more efficient. ([\#18231](https://github.com/element-hq/synapse/issues/18231)) +- Add background job to clear unreferenced state groups. ([\#18254](https://github.com/element-hq/synapse/issues/18254)) +- Hashes of media files are now tracked by Synapse. Media quarantines will now apply to all files with the same hash. ([\#18277](https://github.com/element-hq/synapse/issues/18277), [\#18302](https://github.com/element-hq/synapse/issues/18302)) + +### Bugfixes + +- Add index to sliding sync ([MSC4186](https://github.com/matrix-org/matrix-doc/pull/4186)) membership snapshot table, to fix a performance issue. ([\#18074](https://github.com/element-hq/synapse/issues/18074)) + +### Updates to the Docker image + +- Specify the architecture of installed packages via an APT config option, which is more reliable than appending package names with `:{arch}`. ([\#18271](https://github.com/element-hq/synapse/issues/18271)) +- Always specify base image debian versions with a build argument. ([\#18272](https://github.com/element-hq/synapse/issues/18272)) +- Allow passing arguments to `start_for_complement.sh (to be sent to `configure_workers_and_start.py`). ([\#18273](https://github.com/element-hq/synapse/issues/18273)) +- Make some improvements to the `prefix-log` script in the workers image. ([\#18274](https://github.com/element-hq/synapse/issues/18274)) +- Use `uv pip` to install `supervisor` in the worker image. ([\#18275](https://github.com/element-hq/synapse/issues/18275)) +- Avoid needing to download & use `rsync` in a build layer. ([\#18287](https://github.com/element-hq/synapse/issues/18287)) + +### Improved Documentation + +- Fix how to obtain access token and change naming from riot to element ([\#18225](https://github.com/element-hq/synapse/issues/18225)) +- Correct a small typo in the SSO mapping providers documentation. ([\#18276](https://github.com/element-hq/synapse/issues/18276)) +- Add docs for how to clear out the Poetry wheel cache. ([\#18283](https://github.com/element-hq/synapse/issues/18283)) +- Hashes of media files are now tracked by Synapse. Media quarantines will now apply to all files with the same hash. ([\#18296](https://github.com/element-hq/synapse/issues/18296)) + +### Internal Changes + +- Add a column `participant` to `room_memberships` table. ([\#18068](https://github.com/element-hq/synapse/issues/18068)) +- Update Poetry to 2.1.1, including updating the lock file version. ([\#18251](https://github.com/element-hq/synapse/issues/18251)) +- Pin GitHub Actions dependencies by commit hash. ([\#18255](https://github.com/element-hq/synapse/issues/18255)) +- Add DB delta to remove the old state group deletion job. ([\#18284](https://github.com/element-hq/synapse/issues/18284)) + + + +### Updates to locked dependencies + +* Bump actions/add-to-project from f5473ace9aeee8b97717b281e26980aa5097023f to 280af8ae1f83a494cfad2cb10f02f6d13529caa9. ([\#18303](https://github.com/element-hq/synapse/issues/18303)) +* Bump actions/cache from 4.2.2 to 4.2.3. ([\#18266](https://github.com/element-hq/synapse/issues/18266)) +* Bump actions/download-artifact from 4.2.0 to 4.2.1. ([\#18268](https://github.com/element-hq/synapse/issues/18268)) +* Bump actions/setup-python from 5.4.0 to 5.5.0. ([\#18298](https://github.com/element-hq/synapse/issues/18298)) +* Bump actions/upload-artifact from 4.6.1 to 4.6.2. ([\#18304](https://github.com/element-hq/synapse/issues/18304)) +* Bump authlib from 1.4.1 to 1.5.1. ([\#18306](https://github.com/element-hq/synapse/issues/18306)) +* Bump dawidd6/action-download-artifact from 8 to 9. ([\#18204](https://github.com/element-hq/synapse/issues/18204)) +* Bump jinja2 from 3.1.5 to 3.1.6. ([\#18223](https://github.com/element-hq/synapse/issues/18223)) +* Bump log from 0.4.26 to 0.4.27. ([\#18267](https://github.com/element-hq/synapse/issues/18267)) +* Bump phonenumbers from 8.13.50 to 9.0.2. ([\#18299](https://github.com/element-hq/synapse/issues/18299)) +* Bump pygithub from 2.5.0 to 2.6.1. ([\#18243](https://github.com/element-hq/synapse/issues/18243)) +* Bump pyo3-log from 0.12.1 to 0.12.2. ([\#18269](https://github.com/element-hq/synapse/issues/18269)) + # Synapse 1.127.1 (2025-03-26) ## Security diff --git a/changelog.d/18068.misc b/changelog.d/18068.misc deleted file mode 100644 index af6f78f549..0000000000 --- a/changelog.d/18068.misc +++ /dev/null @@ -1 +0,0 @@ -Add a column `participant` to `room_memberships` table. \ No newline at end of file diff --git a/changelog.d/18074.bugfix b/changelog.d/18074.bugfix deleted file mode 100644 index 03e0b95988..0000000000 --- a/changelog.d/18074.bugfix +++ /dev/null @@ -1 +0,0 @@ -Add index to sliding sync membership snapshot table, to fix a performance issue. \ No newline at end of file diff --git a/changelog.d/18225.doc b/changelog.d/18225.doc deleted file mode 100644 index 854e275120..0000000000 --- a/changelog.d/18225.doc +++ /dev/null @@ -1 +0,0 @@ -Fix how to obtain access token and change naming from riot to element diff --git a/changelog.d/18231.feature b/changelog.d/18231.feature deleted file mode 100644 index 7fa65e4fa6..0000000000 --- a/changelog.d/18231.feature +++ /dev/null @@ -1 +0,0 @@ -Add an access token introspection cache to make Matrix Authentication Service integration (MSC3861) more efficient. \ No newline at end of file diff --git a/changelog.d/18251.misc b/changelog.d/18251.misc deleted file mode 100644 index 3b557a06c4..0000000000 --- a/changelog.d/18251.misc +++ /dev/null @@ -1 +0,0 @@ -Update Poetry to 2.1.1, including updating the lock file version. \ No newline at end of file diff --git a/changelog.d/18254.feature b/changelog.d/18254.feature deleted file mode 100644 index 62e1b79a15..0000000000 --- a/changelog.d/18254.feature +++ /dev/null @@ -1 +0,0 @@ -Add background job to clear unreferenced state groups. diff --git a/changelog.d/18255.misc b/changelog.d/18255.misc deleted file mode 100644 index 49a5b0e7b6..0000000000 --- a/changelog.d/18255.misc +++ /dev/null @@ -1 +0,0 @@ -Pin GitHub Actions dependencies by commit hash. \ No newline at end of file diff --git a/changelog.d/18271.docker b/changelog.d/18271.docker deleted file mode 100644 index fcb2ac1e00..0000000000 --- a/changelog.d/18271.docker +++ /dev/null @@ -1 +0,0 @@ -Specify the architecture of installed packages via an APT config option, which is more reliable than appending package names with ":{arch}". diff --git a/changelog.d/18272.docker b/changelog.d/18272.docker deleted file mode 100644 index ceec619b6c..0000000000 --- a/changelog.d/18272.docker +++ /dev/null @@ -1 +0,0 @@ -Always specify base image debian versions with a build argument. diff --git a/changelog.d/18273.docker b/changelog.d/18273.docker deleted file mode 100644 index 1da0c3efb1..0000000000 --- a/changelog.d/18273.docker +++ /dev/null @@ -1 +0,0 @@ -Allow passing arguments to start_for_complement.sh (to be sent to configure_workers_and_start.py). diff --git a/changelog.d/18274.docker b/changelog.d/18274.docker deleted file mode 100644 index 57b81d8f4e..0000000000 --- a/changelog.d/18274.docker +++ /dev/null @@ -1 +0,0 @@ -Make some improvements to the prefix-log script in the workers image. diff --git a/changelog.d/18275.docker b/changelog.d/18275.docker deleted file mode 100644 index 154226e5f6..0000000000 --- a/changelog.d/18275.docker +++ /dev/null @@ -1 +0,0 @@ -Use uv pip to install supervisor in the worker image. diff --git a/changelog.d/18276.doc b/changelog.d/18276.doc deleted file mode 100644 index cacf6daf76..0000000000 --- a/changelog.d/18276.doc +++ /dev/null @@ -1 +0,0 @@ -Correct a small typo in the SSO mapping providers documentation. \ No newline at end of file diff --git a/changelog.d/18277.feature b/changelog.d/18277.feature deleted file mode 100644 index 3604e732d5..0000000000 --- a/changelog.d/18277.feature +++ /dev/null @@ -1 +0,0 @@ -Hashes of media files are now tracked by Synapse. Media quarantines will now apply to all files with the same hash. \ No newline at end of file diff --git a/changelog.d/18283.doc b/changelog.d/18283.doc deleted file mode 100644 index 77fa08fa6b..0000000000 --- a/changelog.d/18283.doc +++ /dev/null @@ -1 +0,0 @@ -Add docs for how to clear out the Poetry wheel cache. diff --git a/changelog.d/18284.misc b/changelog.d/18284.misc deleted file mode 100644 index 69610adc65..0000000000 --- a/changelog.d/18284.misc +++ /dev/null @@ -1 +0,0 @@ -Add DB delta to remove the old state group deletion job. diff --git a/changelog.d/18287.docker b/changelog.d/18287.docker deleted file mode 100644 index ef45ad72ba..0000000000 --- a/changelog.d/18287.docker +++ /dev/null @@ -1 +0,0 @@ -Avoid needing to download & use rsync in a build layer. diff --git a/changelog.d/18296.doc b/changelog.d/18296.doc deleted file mode 100644 index 3604e732d5..0000000000 --- a/changelog.d/18296.doc +++ /dev/null @@ -1 +0,0 @@ -Hashes of media files are now tracked by Synapse. Media quarantines will now apply to all files with the same hash. \ No newline at end of file diff --git a/changelog.d/18302.feature b/changelog.d/18302.feature deleted file mode 100644 index 3604e732d5..0000000000 --- a/changelog.d/18302.feature +++ /dev/null @@ -1 +0,0 @@ -Hashes of media files are now tracked by Synapse. Media quarantines will now apply to all files with the same hash. \ No newline at end of file diff --git a/debian/changelog b/debian/changelog index 645001ca52..6e338ecd74 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,8 +1,9 @@ -matrix-synapse-py3 (1.128.0~rc1+nmu1) UNRELEASED; urgency=medium +matrix-synapse-py3 (1.128.0~rc1) stable; urgency=medium * Update Poetry to 2.1.1. + * New synapse release 1.128.0rc1. - -- Synapse Packaging team Wed, 19 Mar 2025 17:38:49 +0000 + -- Synapse Packaging team Tue, 01 Apr 2025 14:35:33 +0000 matrix-synapse-py3 (1.127.1) stable; urgency=medium diff --git a/pyproject.toml b/pyproject.toml index e91a75445c..da469aab2c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,7 +97,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.127.1" +version = "1.128.0rc1" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" From b8b3896b1d6ce26525e5f4e8676914ff1f38ff88 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 1 Apr 2025 16:45:11 +0200 Subject: [PATCH 34/70] Fix rendering of the changelog --- CHANGES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index e591deff67..c8d879dc3a 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -14,7 +14,7 @@ - Specify the architecture of installed packages via an APT config option, which is more reliable than appending package names with `:{arch}`. ([\#18271](https://github.com/element-hq/synapse/issues/18271)) - Always specify base image debian versions with a build argument. ([\#18272](https://github.com/element-hq/synapse/issues/18272)) -- Allow passing arguments to `start_for_complement.sh (to be sent to `configure_workers_and_start.py`). ([\#18273](https://github.com/element-hq/synapse/issues/18273)) +- Allow passing arguments to `start_for_complement.sh` (to be sent to `configure_workers_and_start.py`). ([\#18273](https://github.com/element-hq/synapse/issues/18273)) - Make some improvements to the `prefix-log` script in the workers image. ([\#18274](https://github.com/element-hq/synapse/issues/18274)) - Use `uv pip` to install `supervisor` in the worker image. ([\#18275](https://github.com/element-hq/synapse/issues/18275)) - Avoid needing to download & use `rsync` in a build layer. ([\#18287](https://github.com/element-hq/synapse/issues/18287)) From 770768614b0743d03258000934a159c69026bcdc Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 1 Apr 2025 16:49:19 +0200 Subject: [PATCH 35/70] Merge changelog entries --- CHANGES.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index c8d879dc3a..35c3f22aaa 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -4,7 +4,7 @@ - Add an access token introspection cache to make Matrix Authentication Service integration ([MSC3861](https://github.com/matrix-org/matrix-doc/pull/3861)) more efficient. ([\#18231](https://github.com/element-hq/synapse/issues/18231)) - Add background job to clear unreferenced state groups. ([\#18254](https://github.com/element-hq/synapse/issues/18254)) -- Hashes of media files are now tracked by Synapse. Media quarantines will now apply to all files with the same hash. ([\#18277](https://github.com/element-hq/synapse/issues/18277), [\#18302](https://github.com/element-hq/synapse/issues/18302)) +- Hashes of media files are now tracked by Synapse. Media quarantines will now apply to all files with the same hash. ([\#18277](https://github.com/element-hq/synapse/issues/18277), [\#18302](https://github.com/element-hq/synapse/issues/18302), [\#18296](https://github.com/element-hq/synapse/issues/18296)) ### Bugfixes @@ -24,7 +24,6 @@ - Fix how to obtain access token and change naming from riot to element ([\#18225](https://github.com/element-hq/synapse/issues/18225)) - Correct a small typo in the SSO mapping providers documentation. ([\#18276](https://github.com/element-hq/synapse/issues/18276)) - Add docs for how to clear out the Poetry wheel cache. ([\#18283](https://github.com/element-hq/synapse/issues/18283)) -- Hashes of media files are now tracked by Synapse. Media quarantines will now apply to all files with the same hash. ([\#18296](https://github.com/element-hq/synapse/issues/18296)) ### Internal Changes From 5c84f258095535aaa2a4a04c850f439fd00735cc Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Tue, 1 Apr 2025 11:51:00 -0400 Subject: [PATCH 36/70] complement-synapse: COPY existing dir from base (#18294) The base postgres image already has the /var/run/postgresql directory, and COPY can set file ownership with chown=, so COPY it instead of making it from scratch & manually setting its ownership. ### Pull Request Checklist * [x] Pull request is based on the develop branch * [x] Pull request includes a [changelog file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should: - Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.". - Use markdown where necessary, mostly for `code blocks`. - End with either a period (.) or an exclamation mark (!). - Start with a capital letter. - Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry. * [x] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters)) --- changelog.d/18294.docker | 1 + docker/complement/Dockerfile | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/18294.docker diff --git a/changelog.d/18294.docker b/changelog.d/18294.docker new file mode 100644 index 0000000000..cc40ca90c0 --- /dev/null +++ b/changelog.d/18294.docker @@ -0,0 +1 @@ +Optimize the build of the complement-synapse image. diff --git a/docker/complement/Dockerfile b/docker/complement/Dockerfile index 3e7f808cc5..dd029c5fbc 100644 --- a/docker/complement/Dockerfile +++ b/docker/complement/Dockerfile @@ -25,7 +25,7 @@ FROM $FROM RUN adduser --system --uid 999 postgres --home /var/lib/postgresql COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql -RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql +COPY --from=postgres_base --chown=postgres /var/run/postgresql /var/run/postgresql ENV PATH="${PATH}:/usr/lib/postgresql/13/bin" ENV PGDATA=/var/lib/postgresql/data From 0e3c0aeee833e52121b3167de486dff34018ab27 Mon Sep 17 00:00:00 2001 From: Jason Little Date: Wed, 2 Apr 2025 09:37:50 -0500 Subject: [PATCH 37/70] Disable Postgres statement timeouts while purging room state (#18133) --- changelog.d/18133.misc | 1 + synapse/storage/databases/state/store.py | 7 +++++++ 2 files changed, 8 insertions(+) create mode 100644 changelog.d/18133.misc diff --git a/changelog.d/18133.misc b/changelog.d/18133.misc new file mode 100644 index 0000000000..151ceb2cab --- /dev/null +++ b/changelog.d/18133.misc @@ -0,0 +1 @@ +Disable statement timeout during room purge. diff --git a/synapse/storage/databases/state/store.py b/synapse/storage/databases/state/store.py index 90d7beb92f..c1a66dcba0 100644 --- a/synapse/storage/databases/state/store.py +++ b/synapse/storage/databases/state/store.py @@ -48,6 +48,7 @@ from synapse.storage.database import ( LoggingTransaction, ) from synapse.storage.databases.state.bg_updates import StateBackgroundUpdateStore +from synapse.storage.engines import PostgresEngine from synapse.storage.types import Cursor from synapse.storage.util.sequence import build_sequence_generator from synapse.types import MutableStateMap, StateKey, StateMap @@ -914,6 +915,12 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore): ) -> None: # Delete all edges that reference a state group linked to room_id logger.info("[purge] removing %s from state_group_edges", room_id) + + if isinstance(self.database_engine, PostgresEngine): + # Disable statement timeouts for this transaction; purging rooms can + # take a while! + txn.execute("SET LOCAL statement_timeout = 0") + txn.execute( """ DELETE FROM state_group_edges AS sge WHERE sge.state_group IN ( From b30fcb03cc976c444c84b5c7137df81439b7f60e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 8 Apr 2025 14:09:59 +0100 Subject: [PATCH 38/70] 1.128.0 --- CHANGES.md | 7 +++++++ debian/changelog | 6 ++++++ pyproject.toml | 2 +- 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 35c3f22aaa..4ac25a3786 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,10 @@ +# Synapse 1.128.0 (2025-04-08) + +No significant changes since 1.128.0rc1. + + + + # Synapse 1.128.0rc1 (2025-04-01) ### Features diff --git a/debian/changelog b/debian/changelog index 6e338ecd74..56839ac5b4 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.128.0) stable; urgency=medium + + * New Synapse release 1.128.0. + + -- Synapse Packaging team Tue, 08 Apr 2025 14:09:54 +0100 + matrix-synapse-py3 (1.128.0~rc1) stable; urgency=medium * Update Poetry to 2.1.1. diff --git a/pyproject.toml b/pyproject.toml index da469aab2c..8f8eb46e68 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,7 +97,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.128.0rc1" +version = "1.128.0" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" From dd05cc55eedbf086ae224a13c9ae9f0332d96b1f Mon Sep 17 00:00:00 2001 From: Olivier D Date: Thu, 10 Apr 2025 15:39:27 +0200 Subject: [PATCH 39/70] Add passthrough_authorization_parameters support to OIDC configuration (#18232) # Add passthrough_authorization_parameters support to OIDC configuration This PR adds `the passthrough_authorization_parameters` option to OIDC configuration, allowing specific query parameters (like `login_hint`) to be passed from the redirect endpoint to the authorization grant URL. This enables clients to provide additional context to identity providers during authentication flows. # Pull Request Checklist * [x] Pull request is based on the develop branch * [x] Pull request includes a [changelog file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should: - Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.". - Use markdown where necessary, mostly for `code blocks`. - End with either a period (.) or an exclamation mark (!). - Start with a capital letter. - Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry. * [x] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters)) --------- Co-authored-by: Quentin Gliech --- changelog.d/18232.feature | 1 + .../configuration/config_documentation.md | 4 +++ synapse/config/oidc.py | 6 +++++ synapse/handlers/oidc.py | 12 ++++++++- tests/handlers/test_oidc.py | 26 +++++++++++++++++++ 5 files changed, 48 insertions(+), 1 deletion(-) create mode 100644 changelog.d/18232.feature diff --git a/changelog.d/18232.feature b/changelog.d/18232.feature new file mode 100644 index 0000000000..ba5059ba80 --- /dev/null +++ b/changelog.d/18232.feature @@ -0,0 +1 @@ +Add `passthrough_authorization_parameters` in OIDC configuration to allow to pass parameters to the authorization grant URL. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index d2d282f203..73fd9622ce 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -3672,6 +3672,9 @@ Options for each entry include: * `additional_authorization_parameters`: String to string dictionary that will be passed as additional parameters to the authorization grant URL. +* `passthrough_authorization_parameters`: List of parameters that will be passed through from the redirect endpoint + to the authorization grant URL. + * `allow_existing_users`: set to true to allow a user logging in via OIDC to match a pre-existing account instead of failing. This could be used if switching from password logins to OIDC. Defaults to false. @@ -3798,6 +3801,7 @@ oidc_providers: jwks_uri: "https://accounts.example.com/.well-known/jwks.json" additional_authorization_parameters: acr_values: 2fa + passthrough_authorization_parameters: ["login_hint"] skip_verification: true enable_registration: true user_mapping_provider: diff --git a/synapse/config/oidc.py b/synapse/config/oidc.py index 8ba0ba2c36..3ddf65a3e9 100644 --- a/synapse/config/oidc.py +++ b/synapse/config/oidc.py @@ -356,6 +356,9 @@ def _parse_oidc_config_dict( additional_authorization_parameters=oidc_config.get( "additional_authorization_parameters", {} ), + passthrough_authorization_parameters=oidc_config.get( + "passthrough_authorization_parameters", [] + ), ) @@ -501,3 +504,6 @@ class OidcProviderConfig: # Additional parameters that will be passed to the authorization grant URL additional_authorization_parameters: Mapping[str, str] + + # Allow query parameters to the redirect endpoint that will be passed to the authorization grant URL + passthrough_authorization_parameters: Collection[str] diff --git a/synapse/handlers/oidc.py b/synapse/handlers/oidc.py index 18efdd9f6e..c4cf0636a3 100644 --- a/synapse/handlers/oidc.py +++ b/synapse/handlers/oidc.py @@ -467,6 +467,10 @@ class OidcProvider: self._sso_handler.register_identity_provider(self) + self.passthrough_authorization_parameters = ( + provider.passthrough_authorization_parameters + ) + def _validate_metadata(self, m: OpenIDProviderMetadata) -> None: """Verifies the provider metadata. @@ -1005,7 +1009,6 @@ class OidcProvider: when everything is done (or None for UI Auth) ui_auth_session_id: The session ID of the ongoing UI Auth (or None if this is a login). - Returns: The redirect URL to the authorization endpoint. @@ -1078,6 +1081,13 @@ class OidcProvider: ) ) + # add passthrough additional authorization parameters + passthrough_authorization_parameters = self.passthrough_authorization_parameters + for parameter in passthrough_authorization_parameters: + parameter_value = parse_string(request, parameter) + if parameter_value: + additional_authorization_parameters.update({parameter: parameter_value}) + authorization_endpoint = metadata.get("authorization_endpoint") return prepare_grant_uri( authorization_endpoint, diff --git a/tests/handlers/test_oidc.py b/tests/handlers/test_oidc.py index cfd9969563..a7cead83d0 100644 --- a/tests/handlers/test_oidc.py +++ b/tests/handlers/test_oidc.py @@ -484,6 +484,32 @@ class OidcHandlerTestCase(HomeserverTestCase): self.assertEqual(code_verifier, "") self.assertEqual(redirect, "http://client/redirect") + @override_config( + { + "oidc_config": { + **DEFAULT_CONFIG, + "passthrough_authorization_parameters": ["additional_parameter"], + } + } + ) + def test_passthrough_parameters(self) -> None: + """The redirect request has additional parameters, one is authorized, one is not""" + req = Mock(spec=["cookies", "args"]) + req.cookies = [] + req.args = {} + req.args[b"additional_parameter"] = ["a_value".encode("utf-8")] + req.args[b"not_authorized_parameter"] = ["any".encode("utf-8")] + + url = urlparse( + self.get_success( + self.provider.handle_redirect_request(req, b"http://client/redirect") + ) + ) + + params = parse_qs(url.query) + self.assertEqual(params["additional_parameter"], ["a_value"]) + self.assertNotIn("not_authorized_parameters", params) + @override_config({"oidc_config": DEFAULT_CONFIG}) def test_redirect_request_with_code_challenge(self) -> None: """The redirect request has the right arguments & generates a valid session cookie.""" From ae701e17090324ea5182450f42fbc9cfa9352835 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 14 Apr 2025 17:54:47 +0100 Subject: [PATCH 40/70] Add caches to new hot path functions (#18337) We call these two functions for every authed request when using delegated auth. --- changelog.d/18337.misc | 1 + synapse/handlers/device.py | 2 ++ synapse/storage/databases/main/devices.py | 9 ++++++- .../storage/databases/main/registration.py | 26 +++++++++---------- 4 files changed, 23 insertions(+), 15 deletions(-) create mode 100644 changelog.d/18337.misc diff --git a/changelog.d/18337.misc b/changelog.d/18337.misc new file mode 100644 index 0000000000..b78276fe76 --- /dev/null +++ b/changelog.d/18337.misc @@ -0,0 +1 @@ +Add cache to storage functions used to auth requests when using delegated auth. diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index d9622080b4..1efd039f22 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -163,6 +163,8 @@ class DeviceWorkerHandler: raise errors.NotFoundError() ips = await self.store.get_last_client_ip_by_device(user_id, device_id) + + device = dict(device) _update_device_from_client_ips(device, ips) set_tag("device", str(device)) diff --git a/synapse/storage/databases/main/devices.py b/synapse/storage/databases/main/devices.py index 0b6d1f2b05..3f0b2f5d84 100644 --- a/synapse/storage/databases/main/devices.py +++ b/synapse/storage/databases/main/devices.py @@ -282,9 +282,10 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore): "count_devices_by_users", count_devices_by_users_txn, user_ids ) + @cached() async def get_device( self, user_id: str, device_id: str - ) -> Optional[Dict[str, Any]]: + ) -> Optional[Mapping[str, Any]]: """Retrieve a device. Only returns devices that are not marked as hidden. @@ -1817,6 +1818,8 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore): }, desc="store_device", ) + await self.invalidate_cache_and_stream("get_device", (user_id, device_id)) + if not inserted: # if the device already exists, check if it's a real device, or # if the device ID is reserved by something else @@ -1882,6 +1885,9 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore): values=device_ids, keyvalues={"user_id": user_id}, ) + self._invalidate_cache_and_stream_bulk( + txn, self.get_device, [(user_id, device_id) for device_id in device_ids] + ) for batch in batch_iter(device_ids, 100): await self.db_pool.runInteraction( @@ -1915,6 +1921,7 @@ class DeviceStore(DeviceWorkerStore, DeviceBackgroundUpdateStore): updatevalues=updates, desc="update_device", ) + await self.invalidate_cache_and_stream("get_device", (user_id, device_id)) async def update_remote_device_list_cache_entry( self, user_id: str, device_id: str, content: JsonDict, stream_id: str diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py index 8380930c70..eadbf4901c 100644 --- a/synapse/storage/databases/main/registration.py +++ b/synapse/storage/databases/main/registration.py @@ -759,6 +759,9 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore): external_id: id on that system user_id: complete mxid that it is mapped to """ + self._invalidate_cache_and_stream( + txn, self.get_user_by_external_id, (auth_provider, external_id) + ) self.db_pool.simple_insert_txn( txn, @@ -789,6 +792,9 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore): }, desc="remove_user_external_id", ) + await self.invalidate_cache_and_stream( + "get_user_by_external_id", (auth_provider, external_id) + ) async def replace_user_external_id( self, @@ -809,29 +815,20 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore): ExternalIDReuseException if the new external_id could not be mapped. """ - def _remove_user_external_ids_txn( + def _replace_user_external_id_txn( txn: LoggingTransaction, - user_id: str, ) -> None: - """Remove all mappings from external user ids to a mxid - If these mappings are not found, this method does nothing. - - Args: - user_id: complete mxid that it is mapped to - """ - self.db_pool.simple_delete_txn( txn, table="user_external_ids", keyvalues={"user_id": user_id}, ) - def _replace_user_external_id_txn( - txn: LoggingTransaction, - ) -> None: - _remove_user_external_ids_txn(txn, user_id) - for auth_provider, external_id in record_external_ids: + self._invalidate_cache_and_stream( + txn, self.get_user_by_external_id, (auth_provider, external_id) + ) + self._record_user_external_id_txn( txn, auth_provider, @@ -847,6 +844,7 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore): except self.database_engine.module.IntegrityError: raise ExternalIDReuseException() + @cached() async def get_user_by_external_id( self, auth_provider: str, external_id: str ) -> Optional[str]: From a832375bfb5b0327e73d2b5cf9104b555308e827 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Tue, 15 Apr 2025 07:49:08 -0700 Subject: [PATCH 41/70] Add total event, unencrypted message, and e2ee event counts to stats reporting (#18260) Co-authored-by: Eric Eastwood --- changelog.d/18260.feature | 1 + .../reporting_homeserver_usage_statistics.md | 9 +- synapse/app/phone_stats_home.py | 41 ++- .../databases/main/events_bg_updates.py | 290 +++++++++++++++++- synapse/storage/databases/main/metrics.py | 38 +++ synapse/storage/schema/__init__.py | 8 +- .../schema/main/delta/92/01_event_stats.sql | 33 ++ synapse/types/storage/__init__.py | 2 + tests/metrics/test_phone_home_stats.py | 258 ++++++++++++++++ tests/storage/test_event_stats.py | 237 ++++++++++++++ 10 files changed, 907 insertions(+), 10 deletions(-) create mode 100644 changelog.d/18260.feature create mode 100644 synapse/storage/schema/main/delta/92/01_event_stats.sql create mode 100644 tests/metrics/test_phone_home_stats.py create mode 100644 tests/storage/test_event_stats.py diff --git a/changelog.d/18260.feature b/changelog.d/18260.feature new file mode 100644 index 0000000000..e44e3dc990 --- /dev/null +++ b/changelog.d/18260.feature @@ -0,0 +1 @@ +Add `total_event_count`, `total_message_count`, and `total_e2ee_event_count` fields to the homeserver usage statistics. diff --git a/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md b/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md index 4c0dbb5acd..cdec798410 100644 --- a/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md +++ b/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md @@ -30,10 +30,13 @@ The following statistics are sent to the configured reporting endpoint: | `python_version` | string | The Python version number in use (e.g "3.7.1"). Taken from `sys.version_info`. | | `total_users` | int | The number of registered users on the homeserver. | | `total_nonbridged_users` | int | The number of users, excluding those created by an Application Service. | -| `daily_user_type_native` | int | The number of native users created in the last 24 hours. | +| `daily_user_type_native` | int | The number of native, non-guest users created in the last 24 hours. | | `daily_user_type_guest` | int | The number of guest users created in the last 24 hours. | | `daily_user_type_bridged` | int | The number of users created by Application Services in the last 24 hours. | | `total_room_count` | int | The total number of rooms present on the homeserver. | +| `total_event_count` | int | The total number of events present on the homeserver. | +| `total_message_count` | int | The total number of non-state events with type `m.room.message` present on the homeserver. | +| `total_e2ee_event_count` | int | The total number of non-state events with type `m.room.encrypted` present on the homeserver. This can be used as a slight over-estimate for the number of encrypted messages. | | `daily_active_users` | int | The number of unique users[^1] that have used the homeserver in the last 24 hours. | | `monthly_active_users` | int | The number of unique users[^1] that have used the homeserver in the last 30 days. | | `daily_active_rooms` | int | The number of rooms that have had a (state) event with the type `m.room.message` sent in them in the last 24 hours. | @@ -50,8 +53,8 @@ The following statistics are sent to the configured reporting endpoint: | `cache_factor` | int | The configured [`global factor`](../../configuration/config_documentation.md#caching) value for caching. | | `event_cache_size` | int | The configured [`event_cache_size`](../../configuration/config_documentation.md#caching) value for caching. | | `database_engine` | string | The database engine that is in use. Either "psycopg2" meaning PostgreSQL is in use, or "sqlite3" for SQLite3. | -| `database_server_version` | string | The version of the database server. Examples being "10.10" for PostgreSQL server version 10.0, and "3.38.5" for SQLite 3.38.5 installed on the system. | -| `log_level` | string | The log level in use. Examples are "INFO", "WARNING", "ERROR", "DEBUG", etc. | +| `database_server_version` | string | The version of the database server. Examples being "10.10" for PostgreSQL server version 10.0, and "3.38.5" for SQLite 3.38.5 installed on the system. | +| `log_level` | string | The log level in use. Examples are "INFO", "WARNING", "ERROR", "DEBUG", etc. | [^1]: Native matrix users and guests are always counted. If the diff --git a/synapse/app/phone_stats_home.py b/synapse/app/phone_stats_home.py index f602bbbeea..fe55838416 100644 --- a/synapse/app/phone_stats_home.py +++ b/synapse/app/phone_stats_home.py @@ -34,6 +34,22 @@ if TYPE_CHECKING: logger = logging.getLogger("synapse.app.homeserver") +ONE_MINUTE_SECONDS = 60 +ONE_HOUR_SECONDS = 60 * ONE_MINUTE_SECONDS + +MILLISECONDS_PER_SECOND = 1000 + +INITIAL_DELAY_BEFORE_FIRST_PHONE_HOME_SECONDS = 5 * ONE_MINUTE_SECONDS +""" +We wait 5 minutes to send the first set of stats as the server can be quite busy the +first few minutes +""" + +PHONE_HOME_INTERVAL_SECONDS = 3 * ONE_HOUR_SECONDS +""" +Phone home stats are sent every 3 hours +""" + # Contains the list of processes we will be monitoring # currently either 0 or 1 _stats_process: List[Tuple[int, "resource.struct_rusage"]] = [] @@ -121,6 +137,9 @@ async def phone_stats_home( room_count = await store.get_room_count() stats["total_room_count"] = room_count + stats["total_event_count"] = await store.count_total_events() + stats["total_message_count"] = await store.count_total_messages() + stats["total_e2ee_event_count"] = await store.count_total_e2ee_events() stats["daily_active_users"] = common_metrics.daily_active_users stats["monthly_active_users"] = await store.count_monthly_users() @@ -185,12 +204,14 @@ def start_phone_stats_home(hs: "HomeServer") -> None: # If you increase the loop period, the accuracy of user_daily_visits # table will decrease clock.looping_call( - hs.get_datastores().main.generate_user_daily_visits, 5 * 60 * 1000 + hs.get_datastores().main.generate_user_daily_visits, + 5 * ONE_MINUTE_SECONDS * MILLISECONDS_PER_SECOND, ) # monthly active user limiting functionality clock.looping_call( - hs.get_datastores().main.reap_monthly_active_users, 1000 * 60 * 60 + hs.get_datastores().main.reap_monthly_active_users, + ONE_HOUR_SECONDS * MILLISECONDS_PER_SECOND, ) hs.get_datastores().main.reap_monthly_active_users() @@ -216,12 +237,20 @@ def start_phone_stats_home(hs: "HomeServer") -> None: if hs.config.server.limit_usage_by_mau or hs.config.server.mau_stats_only: generate_monthly_active_users() - clock.looping_call(generate_monthly_active_users, 5 * 60 * 1000) + clock.looping_call( + generate_monthly_active_users, + 5 * ONE_MINUTE_SECONDS * MILLISECONDS_PER_SECOND, + ) # End of monthly active user settings if hs.config.metrics.report_stats: logger.info("Scheduling stats reporting for 3 hour intervals") - clock.looping_call(phone_stats_home, 3 * 60 * 60 * 1000, hs, stats) + clock.looping_call( + phone_stats_home, + PHONE_HOME_INTERVAL_SECONDS * MILLISECONDS_PER_SECOND, + hs, + stats, + ) # We need to defer this init for the cases that we daemonize # otherwise the process ID we get is that of the non-daemon process @@ -229,4 +258,6 @@ def start_phone_stats_home(hs: "HomeServer") -> None: # We wait 5 minutes to send the first set of stats as the server can # be quite busy the first few minutes - clock.call_later(5 * 60, phone_stats_home, hs, stats) + clock.call_later( + INITIAL_DELAY_BEFORE_FIRST_PHONE_HOME_SECONDS, phone_stats_home, hs, stats + ) diff --git a/synapse/storage/databases/main/events_bg_updates.py b/synapse/storage/databases/main/events_bg_updates.py index 4b0bdd79c6..b821d1c1b4 100644 --- a/synapse/storage/databases/main/events_bg_updates.py +++ b/synapse/storage/databases/main/events_bg_updates.py @@ -47,7 +47,7 @@ from synapse.storage.databases.main.events_worker import ( ) from synapse.storage.databases.main.state_deltas import StateDeltasStore from synapse.storage.databases.main.stream import StreamWorkerStore -from synapse.storage.engines import PostgresEngine +from synapse.storage.engines import PostgresEngine, Sqlite3Engine from synapse.storage.types import Cursor from synapse.types import JsonDict, RoomStreamToken, StateMap, StrCollection from synapse.types.handlers import SLIDING_SYNC_DEFAULT_BUMP_EVENT_TYPES @@ -311,6 +311,12 @@ class EventsBackgroundUpdatesStore(StreamWorkerStore, StateDeltasStore, SQLBaseS self._sliding_sync_membership_snapshots_fix_forgotten_column_bg_update, ) + # Add a background update to add triggers which track event counts. + self.db_pool.updates.register_background_update_handler( + _BackgroundUpdates.EVENT_STATS_POPULATE_COUNTS_BG_UPDATE, + self._event_stats_populate_counts_bg_update, + ) + # We want this to run on the main database at startup before we start processing # events. # @@ -2547,6 +2553,288 @@ class EventsBackgroundUpdatesStore(StreamWorkerStore, StateDeltasStore, SQLBaseS return num_rows + async def _event_stats_populate_counts_bg_update( + self, progress: JsonDict, batch_size: int + ) -> int: + """ + Background update to populate the `event_stats` table with initial + values, and register DB triggers to continue updating it. + + We first register TRIGGERs on rows being added/removed from the `events` table, + which will keep the event counts continuously updated. We also mark the stopping + point for the main population step so we don't double count events. + + Then we will iterate through the `events` table in batches and update event + counts until we reach the stopping point. + + This data is intended to be used by the phone-home stats to keep track + of total event and message counts. A trigger is preferred to counting + rows in the `events` table, as said table can grow quite large. + + It is also preferable to adding an index on the `events` table, as even + an index can grow large. And calculating total counts would require + querying that entire index. + """ + # The last event `stream_ordering` we processed (starting place of this next + # batch). + last_event_stream_ordering = progress.get( + "last_event_stream_ordering", -(1 << 31) + ) + # The event `stream_ordering` we should stop at. This is used to avoid double + # counting events that are already accounted for because of the triggers. + stop_event_stream_ordering: Optional[int] = progress.get( + "stop_event_stream_ordering", None + ) + + def _add_triggers_txn( + txn: LoggingTransaction, + ) -> Optional[int]: + """ + Adds the triggers to the `events` table to keep the `event_stats` counts + up-to-date. + + Also populates the `stop_event_stream_ordering` background update progress + value. This marks the point at which we added the triggers, so we can avoid + double counting events that are already accounted for in the population + step. + + Returns: + The latest event `stream_ordering` in the `events` table when the triggers + were added or `None` if the `events` table is empty. + """ + + # Each time an event is inserted into the `events` table, update the stats. + # + # We're using `AFTER` triggers as we want to count successful inserts/deletes and + # not the ones that could potentially fail. + if isinstance(txn.database_engine, Sqlite3Engine): + txn.execute( + """ + CREATE TRIGGER IF NOT EXISTS event_stats_events_insert_trigger + AFTER INSERT ON events + BEGIN + -- Always increment total_event_count + UPDATE event_stats SET total_event_count = total_event_count + 1; + + -- Increment unencrypted_message_count for m.room.message events + UPDATE event_stats + SET unencrypted_message_count = unencrypted_message_count + 1 + WHERE NEW.type = 'm.room.message' AND NEW.state_key IS NULL; + + -- Increment e2ee_event_count for m.room.encrypted events + UPDATE event_stats + SET e2ee_event_count = e2ee_event_count + 1 + WHERE NEW.type = 'm.room.encrypted' AND NEW.state_key IS NULL; + END; + """ + ) + + txn.execute( + """ + CREATE TRIGGER IF NOT EXISTS event_stats_events_delete_trigger + AFTER DELETE ON events + BEGIN + -- Always decrement total_event_count + UPDATE event_stats SET total_event_count = total_event_count - 1; + + -- Decrement unencrypted_message_count for m.room.message events + UPDATE event_stats + SET unencrypted_message_count = unencrypted_message_count - 1 + WHERE OLD.type = 'm.room.message' AND OLD.state_key IS NULL; + + -- Decrement e2ee_event_count for m.room.encrypted events + UPDATE event_stats + SET e2ee_event_count = e2ee_event_count - 1 + WHERE OLD.type = 'm.room.encrypted' AND OLD.state_key IS NULL; + END; + """ + ) + elif isinstance(txn.database_engine, PostgresEngine): + txn.execute( + """ + CREATE OR REPLACE FUNCTION event_stats_increment_counts() RETURNS trigger AS $BODY$ + BEGIN + IF TG_OP = 'INSERT' THEN + -- Always increment total_event_count + UPDATE event_stats SET total_event_count = total_event_count + 1; + + -- Increment unencrypted_message_count for m.room.message events + IF NEW.type = 'm.room.message' AND NEW.state_key IS NULL THEN + UPDATE event_stats SET unencrypted_message_count = unencrypted_message_count + 1; + END IF; + + -- Increment e2ee_event_count for m.room.encrypted events + IF NEW.type = 'm.room.encrypted' AND NEW.state_key IS NULL THEN + UPDATE event_stats SET e2ee_event_count = e2ee_event_count + 1; + END IF; + + -- We're not modifying the row being inserted/deleted, so we return it unchanged. + RETURN NEW; + + ELSIF TG_OP = 'DELETE' THEN + -- Always decrement total_event_count + UPDATE event_stats SET total_event_count = total_event_count - 1; + + -- Decrement unencrypted_message_count for m.room.message events + IF OLD.type = 'm.room.message' AND OLD.state_key IS NULL THEN + UPDATE event_stats SET unencrypted_message_count = unencrypted_message_count - 1; + END IF; + + -- Decrement e2ee_event_count for m.room.encrypted events + IF OLD.type = 'm.room.encrypted' AND OLD.state_key IS NULL THEN + UPDATE event_stats SET e2ee_event_count = e2ee_event_count - 1; + END IF; + + -- "The usual idiom in DELETE triggers is to return OLD." + -- (https://www.postgresql.org/docs/current/plpgsql-trigger.html) + RETURN OLD; + END IF; + + RAISE EXCEPTION 'update_event_stats() was run with unexpected operation (%%). ' + 'This indicates a trigger misconfiguration as this function should only' + 'run with INSERT/DELETE operations.', TG_OP; + END; + $BODY$ LANGUAGE plpgsql; + """ + ) + + # We could use `CREATE OR REPLACE TRIGGER` but that's only available in Postgres + # 14 (https://www.postgresql.org/docs/14/sql-createtrigger.html) + txn.execute( + """ + DO + $$BEGIN + CREATE TRIGGER event_stats_increment_counts_trigger + AFTER INSERT OR DELETE ON events + FOR EACH ROW + EXECUTE PROCEDURE event_stats_increment_counts(); + EXCEPTION + -- This acts as a "CREATE TRIGGER IF NOT EXISTS" for Postgres + WHEN duplicate_object THEN + NULL; + END;$$; + """ + ) + else: + raise NotImplementedError("Unknown database engine") + + # Find the latest `stream_ordering` in the `events` table. We need to do + # this in the same transaction as where we add the triggers so we don't miss + # any events. + txn.execute( + """ + SELECT stream_ordering + FROM events + ORDER BY stream_ordering DESC + LIMIT 1 + """ + ) + row = cast(Optional[Tuple[int]], txn.fetchone()) + + # Update the progress + if row is not None: + (max_stream_ordering,) = row + self.db_pool.updates._background_update_progress_txn( + txn, + _BackgroundUpdates.EVENT_STATS_POPULATE_COUNTS_BG_UPDATE, + {"stop_event_stream_ordering": max_stream_ordering}, + ) + return max_stream_ordering + + return None + + # First, add the triggers to keep the `event_stats` values up-to-date. + # + # If we don't have a `stop_event_stream_ordering` yet, we need to add the + # triggers to the `events` table and set the stopping point so we don't + # double count `events` later. + if stop_event_stream_ordering is None: + stop_event_stream_ordering = await self.db_pool.runInteraction( + "_event_stats_populate_counts_bg_update_add_triggers", + _add_triggers_txn, + ) + + # If there is no `stop_event_stream_ordering`, then there are no events + # in the `events` table and we can end the background update altogether. + if stop_event_stream_ordering is None: + await self.db_pool.updates._end_background_update( + _BackgroundUpdates.EVENT_STATS_POPULATE_COUNTS_BG_UPDATE + ) + return batch_size + + def _populate_txn( + txn: LoggingTransaction, + ) -> int: + """ + Updates the `event_stats` table from this batch of events. + """ + + # Increment the counts based on the events present in this batch. + txn.execute( + """ + WITH event_batch AS ( + SELECT * + FROM events + WHERE stream_ordering > ? AND stream_ordering <= ? + ORDER BY stream_ordering ASC + LIMIT ? + ), + batch_stats AS ( + SELECT + MAX(stream_ordering) AS max_stream_ordering, + COALESCE(COUNT(*), 0) AS total_event_count, + COALESCE(SUM(CASE WHEN type = 'm.room.message' AND state_key IS NULL THEN 1 ELSE 0 END), 0) AS unencrypted_message_count, + COALESCE(SUM(CASE WHEN type = 'm.room.encrypted' AND state_key IS NULL THEN 1 ELSE 0 END), 0) AS e2ee_event_count + FROM event_batch + + UNION ALL + + SELECT null, 0, 0, 0 + WHERE NOT EXISTS (SELECT 1 FROM event_batch) + LIMIT 1 + ) + UPDATE event_stats + SET + total_event_count = total_event_count + (SELECT total_event_count FROM batch_stats), + unencrypted_message_count = unencrypted_message_count + (SELECT unencrypted_message_count FROM batch_stats), + e2ee_event_count = e2ee_event_count + (SELECT e2ee_event_count FROM batch_stats) + RETURNING + (SELECT total_event_count FROM batch_stats) AS total_event_count, + (SELECT max_stream_ordering FROM batch_stats) AS max_stream_ordering + """, + (last_event_stream_ordering, stop_event_stream_ordering, batch_size), + ) + + # Get the results of the update + (total_event_count, max_stream_ordering) = cast( + Tuple[int, Optional[int]], txn.fetchone() + ) + + # Update the progress + self.db_pool.updates._background_update_progress_txn( + txn, + _BackgroundUpdates.EVENT_STATS_POPULATE_COUNTS_BG_UPDATE, + { + "last_event_stream_ordering": max_stream_ordering, + "stop_event_stream_ordering": stop_event_stream_ordering, + }, + ) + + return total_event_count + + num_rows_processed = await self.db_pool.runInteraction( + "_event_stats_populate_counts_bg_update", + _populate_txn, + ) + + # No more rows to process, so our background update is complete. + if not num_rows_processed: + await self.db_pool.updates._end_background_update( + _BackgroundUpdates.EVENT_STATS_POPULATE_COUNTS_BG_UPDATE + ) + + return batch_size + def _resolve_stale_data_in_sliding_sync_tables( txn: LoggingTransaction, diff --git a/synapse/storage/databases/main/metrics.py b/synapse/storage/databases/main/metrics.py index 9ce1100b5c..a9cecc4bc1 100644 --- a/synapse/storage/databases/main/metrics.py +++ b/synapse/storage/databases/main/metrics.py @@ -126,6 +126,44 @@ class ServerMetricsStore(EventPushActionsWorkerStore, SQLBaseStore): return await self.db_pool.runInteraction("count_e2ee_messages", _count_messages) + async def count_total_events(self) -> int: + """ + Returns the total number of events present on the server. + """ + + return await self.db_pool.simple_select_one_onecol( + table="event_stats", + keyvalues={}, + retcol="total_event_count", + desc="count_total_events", + ) + + async def count_total_messages(self) -> int: + """ + Returns the total number of `m.room.message` events present on the + server. + """ + + return await self.db_pool.simple_select_one_onecol( + table="event_stats", + keyvalues={}, + retcol="unencrypted_message_count", + desc="count_total_messages", + ) + + async def count_total_e2ee_events(self) -> int: + """ + Returns the total number of `m.room.encrypted` events present on the + server. + """ + + return await self.db_pool.simple_select_one_onecol( + table="event_stats", + keyvalues={}, + retcol="e2ee_event_count", + desc="count_total_e2ee_events", + ) + async def count_daily_sent_e2ee_messages(self) -> int: def _count_messages(txn: LoggingTransaction) -> int: # This is good enough as if you have silly characters in your own diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index ad683a3a07..7474ba4542 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -19,7 +19,7 @@ # # -SCHEMA_VERSION = 91 # remember to update the list below when updating +SCHEMA_VERSION = 92 # remember to update the list below when updating """Represents the expectations made by the codebase about the database schema This should be incremented whenever the codebase changes its requirements on the @@ -162,6 +162,12 @@ Changes in SCHEMA_VERSION = 89 Changes in SCHEMA_VERSION = 90 - Add a column `participant` to `room_memberships` table - Add background update to delete unreferenced state groups. + +Changes in SCHEMA_VERSION = 91 + - TODO + +Changes in SCHEMA_VERSION = 92 + - Add `event_stats` table to store global event statistics like total counts """ diff --git a/synapse/storage/schema/main/delta/92/01_event_stats.sql b/synapse/storage/schema/main/delta/92/01_event_stats.sql new file mode 100644 index 0000000000..4bded03578 --- /dev/null +++ b/synapse/storage/schema/main/delta/92/01_event_stats.sql @@ -0,0 +1,33 @@ +-- +-- This file is licensed under the Affero General Public License (AGPL) version 3. +-- +-- Copyright (C) 2025 New Vector, Ltd +-- +-- This program is free software: you can redistribute it and/or modify +-- it under the terms of the GNU Affero General Public License as +-- published by the Free Software Foundation, either version 3 of the +-- License, or (at your option) any later version. +-- +-- See the GNU Affero General Public License for more details: +-- . + + +-- Create the `event_stats` table to store these statistics. +CREATE TABLE event_stats ( + total_event_count INTEGER NOT NULL DEFAULT 0, + unencrypted_message_count INTEGER NOT NULL DEFAULT 0, + e2ee_event_count INTEGER NOT NULL DEFAULT 0 +); + +-- Insert initial values into the table. +INSERT INTO event_stats ( + total_event_count, + unencrypted_message_count, + e2ee_event_count +) VALUES (0, 0, 0); + +-- Add a background update to populate the `event_stats` table with the current counts +-- from the `events` table and add triggers to keep this count up-to-date. +INSERT INTO background_updates (ordering, update_name, progress_json) VALUES + (9201, 'event_stats_populate_counts_bg_update', '{}'); + diff --git a/synapse/types/storage/__init__.py b/synapse/types/storage/__init__.py index e03ff7ffc8..73d19d91ed 100644 --- a/synapse/types/storage/__init__.py +++ b/synapse/types/storage/__init__.py @@ -52,3 +52,5 @@ class _BackgroundUpdates: MARK_UNREFERENCED_STATE_GROUPS_FOR_DELETION_BG_UPDATE = ( "mark_unreferenced_state_groups_for_deletion_bg_update" ) + + EVENT_STATS_POPULATE_COUNTS_BG_UPDATE = "event_stats_populate_counts_bg_update" diff --git a/tests/metrics/test_phone_home_stats.py b/tests/metrics/test_phone_home_stats.py new file mode 100644 index 0000000000..1b3eafed5f --- /dev/null +++ b/tests/metrics/test_phone_home_stats.py @@ -0,0 +1,258 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2025 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . + +import logging +from unittest.mock import AsyncMock + +from twisted.test.proto_helpers import MemoryReactor + +from synapse.app.phone_stats_home import ( + PHONE_HOME_INTERVAL_SECONDS, + start_phone_stats_home, +) +from synapse.rest import admin, login, register, room +from synapse.server import HomeServer +from synapse.types import JsonDict +from synapse.util import Clock + +from tests import unittest +from tests.server import ThreadedMemoryReactorClock + +TEST_REPORT_STATS_ENDPOINT = "https://fake.endpoint/stats" +TEST_SERVER_CONTEXT = "test-server-context" + + +class PhoneHomeStatsTestCase(unittest.HomeserverTestCase): + servlets = [ + admin.register_servlets_for_client_rest_resource, + room.register_servlets, + register.register_servlets, + login.register_servlets, + ] + + def make_homeserver( + self, reactor: ThreadedMemoryReactorClock, clock: Clock + ) -> HomeServer: + # Configure the homeserver to enable stats reporting. + config = self.default_config() + config["report_stats"] = True + config["report_stats_endpoint"] = TEST_REPORT_STATS_ENDPOINT + + # Configure the server context so we can check it ends up being reported + config["server_context"] = TEST_SERVER_CONTEXT + + # Allow guests to be registered + config["allow_guest_access"] = True + + hs = self.setup_test_homeserver(config=config) + + # Replace the proxied http client with a mock, so we can inspect outbound requests to + # the configured stats endpoint. + self.put_json_mock = AsyncMock(return_value={}) + hs.get_proxied_http_client().put_json = self.put_json_mock # type: ignore[method-assign] + return hs + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + + # Wait for the background updates to add the database triggers that keep the + # `event_stats` table up-to-date. + self.wait_for_background_updates() + + # Force stats reporting to occur + start_phone_stats_home(hs=hs) + + super().prepare(reactor, clock, hs) + + def _get_latest_phone_home_stats(self) -> JsonDict: + # Wait for `phone_stats_home` to be called again + a healthy margin (50s). + self.reactor.advance(2 * PHONE_HOME_INTERVAL_SECONDS + 50) + + # Extract the reported stats from our http client mock + mock_calls = self.put_json_mock.call_args_list + report_stats_calls = [] + for call in mock_calls: + if call.args[0] == TEST_REPORT_STATS_ENDPOINT: + report_stats_calls.append(call) + + self.assertGreaterEqual( + (len(report_stats_calls)), + 1, + "Expected at-least one call to the report_stats endpoint", + ) + + # Extract the phone home stats from the call + phone_home_stats = report_stats_calls[0].args[1] + + return phone_home_stats + + def _perform_user_actions(self) -> None: + """ + Perform some actions on the homeserver that would bump the phone home + stats. + """ + + # Create some users + user_1_mxid = self.register_user( + username="test_user_1", + password="test", + ) + user_2_mxid = self.register_user( + username="test_user_2", + password="test", + ) + # Note: `self.register_user` does not support guest registration, and updating the + # Admin API it calls to add a new parameter would cause the `mac` parameter to fail + # in a backwards-incompatible manner. Hence, we make a manual request here. + _guest_user_mxid = self.make_request( + method="POST", + path="/_matrix/client/v3/register?kind=guest", + content={ + "username": "guest_user", + "password": "test", + }, + shorthand=False, + ) + + # Log in to each user + user_1_token = self.login(username=user_1_mxid, password="test") + user_2_token = self.login(username=user_2_mxid, password="test") + + # Create a room between the two users + room_1_id = self.helper.create_room_as( + is_public=False, + tok=user_1_token, + ) + + # Mark this room as end-to-end encrypted + self.helper.send_state( + room_id=room_1_id, + event_type="m.room.encryption", + body={ + "algorithm": "m.megolm.v1.aes-sha2", + "rotation_period_ms": 604800000, + "rotation_period_msgs": 100, + }, + state_key="", + tok=user_1_token, + ) + + # User 1 invites user 2 + self.helper.invite( + room=room_1_id, + src=user_1_mxid, + targ=user_2_mxid, + tok=user_1_token, + ) + + # User 2 joins + self.helper.join( + room=room_1_id, + user=user_2_mxid, + tok=user_2_token, + ) + + # User 1 sends 10 unencrypted messages + for _ in range(10): + self.helper.send( + room_id=room_1_id, + body="Zoinks Scoob! A message!", + tok=user_1_token, + ) + + # User 2 sends 5 encrypted "messages" + for _ in range(5): + self.helper.send_event( + room_id=room_1_id, + type="m.room.encrypted", + content={ + "algorithm": "m.olm.v1.curve25519-aes-sha2", + "sender_key": "some_key", + "ciphertext": { + "some_key": { + "type": 0, + "body": "encrypted_payload", + }, + }, + }, + tok=user_2_token, + ) + + def test_phone_home_stats(self) -> None: + """ + Test that the phone home stats contain the stats we expect based on + the scenario carried out in `prepare` + """ + # Do things to bump the stats + self._perform_user_actions() + + # Wait for the stats to be reported + phone_home_stats = self._get_latest_phone_home_stats() + + self.assertEqual( + phone_home_stats["homeserver"], self.hs.config.server.server_name + ) + + self.assertTrue(isinstance(phone_home_stats["memory_rss"], int)) + self.assertTrue(isinstance(phone_home_stats["cpu_average"], int)) + + self.assertEqual(phone_home_stats["server_context"], TEST_SERVER_CONTEXT) + + self.assertTrue(isinstance(phone_home_stats["timestamp"], int)) + self.assertTrue(isinstance(phone_home_stats["uptime_seconds"], int)) + self.assertTrue(isinstance(phone_home_stats["python_version"], str)) + + # We expect only our test users to exist on the homeserver + self.assertEqual(phone_home_stats["total_users"], 3) + self.assertEqual(phone_home_stats["total_nonbridged_users"], 3) + self.assertEqual(phone_home_stats["daily_user_type_native"], 2) + self.assertEqual(phone_home_stats["daily_user_type_guest"], 1) + self.assertEqual(phone_home_stats["daily_user_type_bridged"], 0) + self.assertEqual(phone_home_stats["total_room_count"], 1) + self.assertEqual(phone_home_stats["total_event_count"], 24) + self.assertEqual(phone_home_stats["total_message_count"], 10) + self.assertEqual(phone_home_stats["total_e2ee_event_count"], 5) + self.assertEqual(phone_home_stats["daily_active_users"], 2) + self.assertEqual(phone_home_stats["monthly_active_users"], 2) + self.assertEqual(phone_home_stats["daily_active_rooms"], 1) + self.assertEqual(phone_home_stats["daily_active_e2ee_rooms"], 1) + self.assertEqual(phone_home_stats["daily_messages"], 10) + self.assertEqual(phone_home_stats["daily_e2ee_messages"], 5) + self.assertEqual(phone_home_stats["daily_sent_messages"], 10) + self.assertEqual(phone_home_stats["daily_sent_e2ee_messages"], 5) + + # Our users have not been around for >30 days, hence these are all 0. + self.assertEqual(phone_home_stats["r30v2_users_all"], 0) + self.assertEqual(phone_home_stats["r30v2_users_android"], 0) + self.assertEqual(phone_home_stats["r30v2_users_ios"], 0) + self.assertEqual(phone_home_stats["r30v2_users_electron"], 0) + self.assertEqual(phone_home_stats["r30v2_users_web"], 0) + self.assertEqual( + phone_home_stats["cache_factor"], self.hs.config.caches.global_factor + ) + self.assertEqual( + phone_home_stats["event_cache_size"], + self.hs.config.caches.event_cache_size, + ) + self.assertEqual( + phone_home_stats["database_engine"], + self.hs.config.database.databases[0].config["name"], + ) + self.assertEqual( + phone_home_stats["database_server_version"], + self.hs.get_datastores().main.database_engine.server_version, + ) + + synapse_logger = logging.getLogger("synapse") + log_level = synapse_logger.getEffectiveLevel() + self.assertEqual(phone_home_stats["log_level"], logging.getLevelName(log_level)) diff --git a/tests/storage/test_event_stats.py b/tests/storage/test_event_stats.py new file mode 100644 index 0000000000..791ed27018 --- /dev/null +++ b/tests/storage/test_event_stats.py @@ -0,0 +1,237 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2025 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . + + +from twisted.test.proto_helpers import MemoryReactor + +from synapse.rest import admin, login, register, room +from synapse.server import HomeServer +from synapse.types.storage import _BackgroundUpdates +from synapse.util import Clock + +from tests import unittest + + +class EventStatsTestCase(unittest.HomeserverTestCase): + """ + Tests for the `event_stats` table + """ + + servlets = [ + admin.register_servlets_for_client_rest_resource, + room.register_servlets, + register.register_servlets, + login.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + + # Wait for the background updates to add the database triggers that keep the + # `event_stats` table up-to-date. + # + # This also prevents background updates running during the tests and messing + # with the results. + self.wait_for_background_updates() + + super().prepare(reactor, clock, hs) + + def _perform_user_actions(self) -> None: + """ + Perform some actions on the homeserver that would bump the event counts. + """ + # Create some users + user_1_mxid = self.register_user( + username="test_user_1", + password="test", + ) + user_2_mxid = self.register_user( + username="test_user_2", + password="test", + ) + # Note: `self.register_user` does not support guest registration, and updating the + # Admin API it calls to add a new parameter would cause the `mac` parameter to fail + # in a backwards-incompatible manner. Hence, we make a manual request here. + _guest_user_mxid = self.make_request( + method="POST", + path="/_matrix/client/v3/register?kind=guest", + content={ + "username": "guest_user", + "password": "test", + }, + shorthand=False, + ) + + # Log in to each user + user_1_token = self.login(username=user_1_mxid, password="test") + user_2_token = self.login(username=user_2_mxid, password="test") + + # Create a room between the two users + room_1_id = self.helper.create_room_as( + is_public=False, + tok=user_1_token, + ) + + # Mark this room as end-to-end encrypted + self.helper.send_state( + room_id=room_1_id, + event_type="m.room.encryption", + body={ + "algorithm": "m.megolm.v1.aes-sha2", + "rotation_period_ms": 604800000, + "rotation_period_msgs": 100, + }, + state_key="", + tok=user_1_token, + ) + + # User 1 invites user 2 + self.helper.invite( + room=room_1_id, + src=user_1_mxid, + targ=user_2_mxid, + tok=user_1_token, + ) + + # User 2 joins + self.helper.join( + room=room_1_id, + user=user_2_mxid, + tok=user_2_token, + ) + + # User 1 sends 10 unencrypted messages + for _ in range(10): + self.helper.send( + room_id=room_1_id, + body="Zoinks Scoob! A message!", + tok=user_1_token, + ) + + # User 2 sends 5 encrypted "messages" + for _ in range(5): + self.helper.send_event( + room_id=room_1_id, + type="m.room.encrypted", + content={ + "algorithm": "m.olm.v1.curve25519-aes-sha2", + "sender_key": "some_key", + "ciphertext": { + "some_key": { + "type": 0, + "body": "encrypted_payload", + }, + }, + }, + tok=user_2_token, + ) + + def test_background_update_with_events(self) -> None: + """ + Test that the background update to populate the `event_stats` table works + correctly when there are events in the database. + """ + # Do things to bump the stats + self._perform_user_actions() + + # Keep in mind: These are already populated as the background update has already + # ran once when Synapse started and added the database triggers which are + # incrementing things as new events come in. + self.assertEqual(self.get_success(self.store.count_total_events()), 24) + self.assertEqual(self.get_success(self.store.count_total_messages()), 10) + self.assertEqual(self.get_success(self.store.count_total_e2ee_events()), 5) + + # Run the background update again + self.get_success( + self.store.db_pool.simple_insert( + "background_updates", + { + "update_name": _BackgroundUpdates.EVENT_STATS_POPULATE_COUNTS_BG_UPDATE, + "progress_json": "{}", + }, + ) + ) + self.store.db_pool.updates._all_done = False + self.wait_for_background_updates() + + # We expect these values to double as the background update is being run *again* + # and will double-count the `events`. + self.assertEqual(self.get_success(self.store.count_total_events()), 48) + self.assertEqual(self.get_success(self.store.count_total_messages()), 20) + self.assertEqual(self.get_success(self.store.count_total_e2ee_events()), 10) + + def test_background_update_without_events(self) -> None: + """ + Test that the background update to populate the `event_stats` table works + correctly without events in the database. + """ + # Keep in mind: These are already populated as the background update has already + # ran once when Synapse started and added the database triggers which are + # incrementing things as new events come in. + # + # In this case, no events have been sent, so we expect the counts to be 0. + self.assertEqual(self.get_success(self.store.count_total_events()), 0) + self.assertEqual(self.get_success(self.store.count_total_messages()), 0) + self.assertEqual(self.get_success(self.store.count_total_e2ee_events()), 0) + + # Run the background update again + self.get_success( + self.store.db_pool.simple_insert( + "background_updates", + { + "update_name": _BackgroundUpdates.EVENT_STATS_POPULATE_COUNTS_BG_UPDATE, + "progress_json": "{}", + }, + ) + ) + self.store.db_pool.updates._all_done = False + self.wait_for_background_updates() + + self.assertEqual(self.get_success(self.store.count_total_events()), 0) + self.assertEqual(self.get_success(self.store.count_total_messages()), 0) + self.assertEqual(self.get_success(self.store.count_total_e2ee_events()), 0) + + def test_background_update_resume_progress(self) -> None: + """ + Test that the background update to populate the `event_stats` table works + correctly to resume from `progress_json`. + """ + # Do things to bump the stats + self._perform_user_actions() + + # Keep in mind: These are already populated as the background update has already + # ran once when Synapse started and added the database triggers which are + # incrementing things as new events come in. + self.assertEqual(self.get_success(self.store.count_total_events()), 24) + self.assertEqual(self.get_success(self.store.count_total_messages()), 10) + self.assertEqual(self.get_success(self.store.count_total_e2ee_events()), 5) + + # Run the background update again + self.get_success( + self.store.db_pool.simple_insert( + "background_updates", + { + "update_name": _BackgroundUpdates.EVENT_STATS_POPULATE_COUNTS_BG_UPDATE, + "progress_json": '{ "last_event_stream_ordering": 14, "stop_event_stream_ordering": 21 }', + }, + ) + ) + self.store.db_pool.updates._all_done = False + self.wait_for_background_updates() + + # We expect these values to increase as the background update is being run + # *again* and will double-count some of the `events` over the range specified + # by the `progress_json`. + self.assertEqual(self.get_success(self.store.count_total_events()), 24 + 7) + self.assertEqual(self.get_success(self.store.count_total_messages()), 16) + self.assertEqual(self.get_success(self.store.count_total_e2ee_events()), 6) From 19b0e23c3d0af4a372194a6510281bd4ca3c1489 Mon Sep 17 00:00:00 2001 From: reivilibre Date: Tue, 15 Apr 2025 14:58:30 +0000 Subject: [PATCH 42/70] Fix the token introspection cache logging access tokens when MAS integration is in use. (#18335) The `ResponseCache` logs keys by default. Let's not do that for access tokens. --------- Signed-off-by: Olivier 'reivilibre --- changelog.d/18335.bugfix | 1 + synapse/api/auth/msc3861_delegated.py | 2 ++ synapse/util/caches/response_cache.py | 33 +++++++++++++++++++-------- 3 files changed, 26 insertions(+), 10 deletions(-) create mode 100644 changelog.d/18335.bugfix diff --git a/changelog.d/18335.bugfix b/changelog.d/18335.bugfix new file mode 100644 index 0000000000..50df5a1b1d --- /dev/null +++ b/changelog.d/18335.bugfix @@ -0,0 +1 @@ +Fix the token introspection cache logging access tokens when MAS integration is in use. \ No newline at end of file diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index 74e526123f..cc2c79fa96 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -201,6 +201,8 @@ class MSC3861DelegatedAuth(BaseAuth): self._clock, "token_introspection", timeout_ms=120_000, + # don't log because the keys are access tokens + enable_logging=False, ) self._issuer_metadata = RetryOnExceptionCachedCall[OpenIDProviderMetadata]( diff --git a/synapse/util/caches/response_cache.py b/synapse/util/caches/response_cache.py index 96b7ca83dc..54b99134b9 100644 --- a/synapse/util/caches/response_cache.py +++ b/synapse/util/caches/response_cache.py @@ -101,7 +101,13 @@ class ResponseCache(Generic[KV]): used rather than trying to compute a new response. """ - def __init__(self, clock: Clock, name: str, timeout_ms: float = 0): + def __init__( + self, + clock: Clock, + name: str, + timeout_ms: float = 0, + enable_logging: bool = True, + ): self._result_cache: Dict[KV, ResponseCacheEntry] = {} self.clock = clock @@ -109,6 +115,7 @@ class ResponseCache(Generic[KV]): self._name = name self._metrics = register_cache("response_cache", name, self, resizable=False) + self._enable_logging = enable_logging def size(self) -> int: return len(self._result_cache) @@ -246,9 +253,12 @@ class ResponseCache(Generic[KV]): """ entry = self._get(key) if not entry: - logger.debug( - "[%s]: no cached result for [%s], calculating new one", self._name, key - ) + if self._enable_logging: + logger.debug( + "[%s]: no cached result for [%s], calculating new one", + self._name, + key, + ) context = ResponseCacheContext(cache_key=key) if cache_context: kwargs["cache_context"] = context @@ -269,12 +279,15 @@ class ResponseCache(Generic[KV]): return await make_deferred_yieldable(entry.result.observe()) result = entry.result.observe() - if result.called: - logger.info("[%s]: using completed cached result for [%s]", self._name, key) - else: - logger.info( - "[%s]: using incomplete cached result for [%s]", self._name, key - ) + if self._enable_logging: + if result.called: + logger.info( + "[%s]: using completed cached result for [%s]", self._name, key + ) + else: + logger.info( + "[%s]: using incomplete cached result for [%s]", self._name, key + ) span_context = entry.opentracing_span_context with start_active_span_follows_from( From 45420b1d42416a3461f08aa3e6752c160c93366a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 15 Apr 2025 16:02:27 +0100 Subject: [PATCH 43/70] Fix `force_tracing_for_users` config when using MAS (#18334) This is a copy of what we do for internal auth, and we should figure out a way to deduplicate some of this stuff: https://github.com/element-hq/synapse/blob/dd05cc55eedbf086ae224a13c9ae9f0332d96b1f/synapse/api/auth/internal.py#L62-L110 --- changelog.d/18334.bugfix | 1 + synapse/api/auth/msc3861_delegated.py | 51 +++++++++++++++++++++++++++ 2 files changed, 52 insertions(+) create mode 100644 changelog.d/18334.bugfix diff --git a/changelog.d/18334.bugfix b/changelog.d/18334.bugfix new file mode 100644 index 0000000000..d82e522cb8 --- /dev/null +++ b/changelog.d/18334.bugfix @@ -0,0 +1 @@ +Fix `force_tracing_for_users` config when using delegated auth. diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index cc2c79fa96..0598286cf4 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -45,6 +45,7 @@ from synapse.api.errors import ( ) from synapse.http.site import SynapseRequest from synapse.logging.context import make_deferred_yieldable +from synapse.logging.opentracing import active_span, force_tracing, start_active_span from synapse.types import Requester, UserID, create_requester from synapse.util import json_decoder from synapse.util.caches.cached_call import RetryOnExceptionCachedCall @@ -177,6 +178,7 @@ class MSC3861DelegatedAuth(BaseAuth): self._http_client = hs.get_proxied_http_client() self._hostname = hs.hostname self._admin_token: Callable[[], Optional[str]] = self._config.admin_token + self._force_tracing_for_users = hs.config.tracing.force_tracing_for_users # # Token Introspection Cache # This remembers what users/devices are represented by which access tokens, @@ -363,6 +365,55 @@ class MSC3861DelegatedAuth(BaseAuth): allow_guest: bool = False, allow_expired: bool = False, allow_locked: bool = False, + ) -> Requester: + """Get a registered user's ID. + + Args: + request: An HTTP request with an access_token query parameter. + allow_guest: If False, will raise an AuthError if the user making the + request is a guest. + allow_expired: If True, allow the request through even if the account + is expired, or session token lifetime has ended. Note that + /login will deliver access tokens regardless of expiration. + + Returns: + Resolves to the requester + Raises: + InvalidClientCredentialsError if no user by that token exists or the token + is invalid. + AuthError if access is denied for the user in the access token + """ + parent_span = active_span() + with start_active_span("get_user_by_req"): + requester = await self._wrapped_get_user_by_req( + request, allow_guest, allow_expired, allow_locked + ) + + if parent_span: + if requester.authenticated_entity in self._force_tracing_for_users: + # request tracing is enabled for this user, so we need to force it + # tracing on for the parent span (which will be the servlet span). + # + # It's too late for the get_user_by_req span to inherit the setting, + # so we also force it on for that. + force_tracing() + force_tracing(parent_span) + parent_span.set_tag( + "authenticated_entity", requester.authenticated_entity + ) + parent_span.set_tag("user_id", requester.user.to_string()) + if requester.device_id is not None: + parent_span.set_tag("device_id", requester.device_id) + if requester.app_service is not None: + parent_span.set_tag("appservice_id", requester.app_service.id) + return requester + + async def _wrapped_get_user_by_req( + self, + request: SynapseRequest, + allow_guest: bool = False, + allow_expired: bool = False, + allow_locked: bool = False, ) -> Requester: access_token = self.get_access_token_from_request(request) From 2c7a61e311002ebec0e3f5aff054f46dfb0015c5 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Tue, 15 Apr 2025 17:30:45 +0200 Subject: [PATCH 44/70] Don't cache introspection failures (#18339) --- changelog.d/18339.bugfix | 1 + synapse/api/auth/msc3861_delegated.py | 12 +++++++++--- 2 files changed, 10 insertions(+), 3 deletions(-) create mode 100644 changelog.d/18339.bugfix diff --git a/changelog.d/18339.bugfix b/changelog.d/18339.bugfix new file mode 100644 index 0000000000..09d6d73420 --- /dev/null +++ b/changelog.d/18339.bugfix @@ -0,0 +1 @@ +Stop caching introspection failures when delegating auth to MAS. diff --git a/synapse/api/auth/msc3861_delegated.py b/synapse/api/auth/msc3861_delegated.py index 0598286cf4..9ded3366e3 100644 --- a/synapse/api/auth/msc3861_delegated.py +++ b/synapse/api/auth/msc3861_delegated.py @@ -49,7 +49,7 @@ from synapse.logging.opentracing import active_span, force_tracing, start_active from synapse.types import Requester, UserID, create_requester from synapse.util import json_decoder from synapse.util.caches.cached_call import RetryOnExceptionCachedCall -from synapse.util.caches.response_cache import ResponseCache +from synapse.util.caches.response_cache import ResponseCache, ResponseCacheContext if TYPE_CHECKING: from synapse.rest.admin.experimental_features import ExperimentalFeature @@ -279,7 +279,9 @@ class MSC3861DelegatedAuth(BaseAuth): metadata = await self._issuer_metadata.get() return metadata.get("introspection_endpoint") - async def _introspect_token(self, token: str) -> IntrospectionResult: + async def _introspect_token( + self, token: str, cache_context: ResponseCacheContext[str] + ) -> IntrospectionResult: """ Send a token to the introspection endpoint and returns the introspection response @@ -295,6 +297,8 @@ class MSC3861DelegatedAuth(BaseAuth): Returns: The introspection response """ + # By default, we shouldn't cache the result unless we know it's valid + cache_context.should_cache = False introspection_endpoint = await self._introspection_endpoint() raw_headers: Dict[str, str] = { "Content-Type": "application/x-www-form-urlencoded", @@ -352,6 +356,8 @@ class MSC3861DelegatedAuth(BaseAuth): "The introspection endpoint returned an invalid JSON response." ) + # We had a valid response, so we can cache it + cache_context.should_cache = True return IntrospectionResult( IntrospectionToken(**resp), retrieved_at_ms=self._clock.time_msec() ) @@ -482,7 +488,7 @@ class MSC3861DelegatedAuth(BaseAuth): try: introspection_result = await self._introspection_cache.wrap( - token, self._introspect_token, token + token, self._introspect_token, token, cache_context=True ) except Exception: logger.exception("Failed to introspect token") From 0046d7278bd8e350dcef40b95a05e116e6e66d90 Mon Sep 17 00:00:00 2001 From: Quentin Gliech Date: Wed, 16 Apr 2025 09:34:58 +0200 Subject: [PATCH 45/70] Fix ExternalIDReuse exception for concurrent transactions (#18342) --- changelog.d/18342.bugfix | 1 + .../storage/databases/main/registration.py | 29 +++++++++++++++---- 2 files changed, 24 insertions(+), 6 deletions(-) create mode 100644 changelog.d/18342.bugfix diff --git a/changelog.d/18342.bugfix b/changelog.d/18342.bugfix new file mode 100644 index 0000000000..6fa2fa679a --- /dev/null +++ b/changelog.d/18342.bugfix @@ -0,0 +1 @@ +Fix `ExternalIDReuse` exception after migrating to MAS on workers with a high traffic. diff --git a/synapse/storage/databases/main/registration.py b/synapse/storage/databases/main/registration.py index eadbf4901c..c43f31353b 100644 --- a/synapse/storage/databases/main/registration.py +++ b/synapse/storage/databases/main/registration.py @@ -763,16 +763,33 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore): txn, self.get_user_by_external_id, (auth_provider, external_id) ) - self.db_pool.simple_insert_txn( + # This INSERT ... ON CONFLICT DO NOTHING statement will cause a + # 'could not serialize access due to concurrent update' + # if the row is added concurrently by another transaction. + # This is exactly what we want, as it makes the transaction get retried + # in a new snapshot where we can check for a genuine conflict. + was_inserted = self.db_pool.simple_upsert_txn( txn, table="user_external_ids", - values={ - "auth_provider": auth_provider, - "external_id": external_id, - "user_id": user_id, - }, + keyvalues={"auth_provider": auth_provider, "external_id": external_id}, + values={}, + insertion_values={"user_id": user_id}, ) + if not was_inserted: + existing_id = self.db_pool.simple_select_one_onecol_txn( + txn, + table="user_external_ids", + keyvalues={"auth_provider": auth_provider, "user_id": user_id}, + retcol="external_id", + allow_none=True, + ) + + if existing_id != external_id: + raise ExternalIDReuseException( + f"{user_id!r} has external id {existing_id!r} for {auth_provider} but trying to add {external_id!r}" + ) + async def remove_user_external_id( self, auth_provider: str, external_id: str, user_id: str ) -> None: From c16a981f22dd559b56caa94a46392c206be9a265 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 16 Apr 2025 14:14:56 +0100 Subject: [PATCH 46/70] Fix query for room participation (#18345) Follow on from #18068 Currently the subquery in `UPDATE` is pointless, as it will still just update all `room_membership` rows. Instead, we should look at the current membership event ID (which is easily retrieved from `local_current_membership`). We also add a `AND NOT participant` to noop the `UPDATE` when the `participant` flag is already set. cc @H-Shay --- changelog.d/18345.bugfix | 1 + synapse/storage/databases/main/roommember.py | 20 ++++++++------------ 2 files changed, 9 insertions(+), 12 deletions(-) create mode 100644 changelog.d/18345.bugfix diff --git a/changelog.d/18345.bugfix b/changelog.d/18345.bugfix new file mode 100644 index 0000000000..c8a001d4a3 --- /dev/null +++ b/changelog.d/18345.bugfix @@ -0,0 +1 @@ +Fix minor performance regression caused by tracking of room participation. Regressed in v1.128.0. diff --git a/synapse/storage/databases/main/roommember.py b/synapse/storage/databases/main/roommember.py index a0a6dcd04e..dfa7dd48d9 100644 --- a/synapse/storage/databases/main/roommember.py +++ b/synapse/storage/databases/main/roommember.py @@ -1622,14 +1622,11 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore): sql = """ UPDATE room_memberships SET participant = true - WHERE (user_id, room_id) IN ( - SELECT user_id, room_id - FROM room_memberships - WHERE user_id = ? - AND room_id = ? - ORDER BY event_stream_ordering DESC - LIMIT 1 + WHERE event_id IN ( + SELECT event_id FROM local_current_membership + WHERE user_id = ? AND room_id = ? ) + AND NOT participant """ txn.execute(sql, (user_id, room_id)) @@ -1651,11 +1648,10 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore): ) -> bool: sql = """ SELECT participant - FROM room_memberships - WHERE user_id = ? - AND room_id = ? - ORDER BY event_stream_ordering DESC - LIMIT 1 + FROM local_current_membership AS l + INNER JOIN room_memberships AS r USING (event_id) + WHERE l.user_id = ? + AND l.room_id = ? """ txn.execute(sql, (user_id, room_id)) res = txn.fetchone() From 89cb613a4ef321d2eb52f13b94d1f1fc3205bad1 Mon Sep 17 00:00:00 2001 From: Devon Hudson Date: Wed, 16 Apr 2025 16:41:41 +0000 Subject: [PATCH 47/70] Revert "Add total event, unencrypted message, and e2ee event counts to stats reporting" (#18346) Reverts element-hq/synapse#18260 It is causing a failure when building release debs for `debian:bullseye` with the following error: ``` sqlite3.OperationalError: near "RETURNING": syntax error ``` --- changelog.d/18260.feature | 1 - .../reporting_homeserver_usage_statistics.md | 9 +- synapse/app/phone_stats_home.py | 41 +-- .../databases/main/events_bg_updates.py | 290 +----------------- synapse/storage/databases/main/metrics.py | 38 --- synapse/storage/schema/__init__.py | 8 +- .../schema/main/delta/92/01_event_stats.sql | 33 -- synapse/types/storage/__init__.py | 2 - tests/metrics/test_phone_home_stats.py | 258 ---------------- tests/storage/test_event_stats.py | 237 -------------- 10 files changed, 10 insertions(+), 907 deletions(-) delete mode 100644 changelog.d/18260.feature delete mode 100644 synapse/storage/schema/main/delta/92/01_event_stats.sql delete mode 100644 tests/metrics/test_phone_home_stats.py delete mode 100644 tests/storage/test_event_stats.py diff --git a/changelog.d/18260.feature b/changelog.d/18260.feature deleted file mode 100644 index e44e3dc990..0000000000 --- a/changelog.d/18260.feature +++ /dev/null @@ -1 +0,0 @@ -Add `total_event_count`, `total_message_count`, and `total_e2ee_event_count` fields to the homeserver usage statistics. diff --git a/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md b/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md index cdec798410..4c0dbb5acd 100644 --- a/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md +++ b/docs/usage/administration/monitoring/reporting_homeserver_usage_statistics.md @@ -30,13 +30,10 @@ The following statistics are sent to the configured reporting endpoint: | `python_version` | string | The Python version number in use (e.g "3.7.1"). Taken from `sys.version_info`. | | `total_users` | int | The number of registered users on the homeserver. | | `total_nonbridged_users` | int | The number of users, excluding those created by an Application Service. | -| `daily_user_type_native` | int | The number of native, non-guest users created in the last 24 hours. | +| `daily_user_type_native` | int | The number of native users created in the last 24 hours. | | `daily_user_type_guest` | int | The number of guest users created in the last 24 hours. | | `daily_user_type_bridged` | int | The number of users created by Application Services in the last 24 hours. | | `total_room_count` | int | The total number of rooms present on the homeserver. | -| `total_event_count` | int | The total number of events present on the homeserver. | -| `total_message_count` | int | The total number of non-state events with type `m.room.message` present on the homeserver. | -| `total_e2ee_event_count` | int | The total number of non-state events with type `m.room.encrypted` present on the homeserver. This can be used as a slight over-estimate for the number of encrypted messages. | | `daily_active_users` | int | The number of unique users[^1] that have used the homeserver in the last 24 hours. | | `monthly_active_users` | int | The number of unique users[^1] that have used the homeserver in the last 30 days. | | `daily_active_rooms` | int | The number of rooms that have had a (state) event with the type `m.room.message` sent in them in the last 24 hours. | @@ -53,8 +50,8 @@ The following statistics are sent to the configured reporting endpoint: | `cache_factor` | int | The configured [`global factor`](../../configuration/config_documentation.md#caching) value for caching. | | `event_cache_size` | int | The configured [`event_cache_size`](../../configuration/config_documentation.md#caching) value for caching. | | `database_engine` | string | The database engine that is in use. Either "psycopg2" meaning PostgreSQL is in use, or "sqlite3" for SQLite3. | -| `database_server_version` | string | The version of the database server. Examples being "10.10" for PostgreSQL server version 10.0, and "3.38.5" for SQLite 3.38.5 installed on the system. | -| `log_level` | string | The log level in use. Examples are "INFO", "WARNING", "ERROR", "DEBUG", etc. | +| `database_server_version` | string | The version of the database server. Examples being "10.10" for PostgreSQL server version 10.0, and "3.38.5" for SQLite 3.38.5 installed on the system. | +| `log_level` | string | The log level in use. Examples are "INFO", "WARNING", "ERROR", "DEBUG", etc. | [^1]: Native matrix users and guests are always counted. If the diff --git a/synapse/app/phone_stats_home.py b/synapse/app/phone_stats_home.py index fe55838416..f602bbbeea 100644 --- a/synapse/app/phone_stats_home.py +++ b/synapse/app/phone_stats_home.py @@ -34,22 +34,6 @@ if TYPE_CHECKING: logger = logging.getLogger("synapse.app.homeserver") -ONE_MINUTE_SECONDS = 60 -ONE_HOUR_SECONDS = 60 * ONE_MINUTE_SECONDS - -MILLISECONDS_PER_SECOND = 1000 - -INITIAL_DELAY_BEFORE_FIRST_PHONE_HOME_SECONDS = 5 * ONE_MINUTE_SECONDS -""" -We wait 5 minutes to send the first set of stats as the server can be quite busy the -first few minutes -""" - -PHONE_HOME_INTERVAL_SECONDS = 3 * ONE_HOUR_SECONDS -""" -Phone home stats are sent every 3 hours -""" - # Contains the list of processes we will be monitoring # currently either 0 or 1 _stats_process: List[Tuple[int, "resource.struct_rusage"]] = [] @@ -137,9 +121,6 @@ async def phone_stats_home( room_count = await store.get_room_count() stats["total_room_count"] = room_count - stats["total_event_count"] = await store.count_total_events() - stats["total_message_count"] = await store.count_total_messages() - stats["total_e2ee_event_count"] = await store.count_total_e2ee_events() stats["daily_active_users"] = common_metrics.daily_active_users stats["monthly_active_users"] = await store.count_monthly_users() @@ -204,14 +185,12 @@ def start_phone_stats_home(hs: "HomeServer") -> None: # If you increase the loop period, the accuracy of user_daily_visits # table will decrease clock.looping_call( - hs.get_datastores().main.generate_user_daily_visits, - 5 * ONE_MINUTE_SECONDS * MILLISECONDS_PER_SECOND, + hs.get_datastores().main.generate_user_daily_visits, 5 * 60 * 1000 ) # monthly active user limiting functionality clock.looping_call( - hs.get_datastores().main.reap_monthly_active_users, - ONE_HOUR_SECONDS * MILLISECONDS_PER_SECOND, + hs.get_datastores().main.reap_monthly_active_users, 1000 * 60 * 60 ) hs.get_datastores().main.reap_monthly_active_users() @@ -237,20 +216,12 @@ def start_phone_stats_home(hs: "HomeServer") -> None: if hs.config.server.limit_usage_by_mau or hs.config.server.mau_stats_only: generate_monthly_active_users() - clock.looping_call( - generate_monthly_active_users, - 5 * ONE_MINUTE_SECONDS * MILLISECONDS_PER_SECOND, - ) + clock.looping_call(generate_monthly_active_users, 5 * 60 * 1000) # End of monthly active user settings if hs.config.metrics.report_stats: logger.info("Scheduling stats reporting for 3 hour intervals") - clock.looping_call( - phone_stats_home, - PHONE_HOME_INTERVAL_SECONDS * MILLISECONDS_PER_SECOND, - hs, - stats, - ) + clock.looping_call(phone_stats_home, 3 * 60 * 60 * 1000, hs, stats) # We need to defer this init for the cases that we daemonize # otherwise the process ID we get is that of the non-daemon process @@ -258,6 +229,4 @@ def start_phone_stats_home(hs: "HomeServer") -> None: # We wait 5 minutes to send the first set of stats as the server can # be quite busy the first few minutes - clock.call_later( - INITIAL_DELAY_BEFORE_FIRST_PHONE_HOME_SECONDS, phone_stats_home, hs, stats - ) + clock.call_later(5 * 60, phone_stats_home, hs, stats) diff --git a/synapse/storage/databases/main/events_bg_updates.py b/synapse/storage/databases/main/events_bg_updates.py index b821d1c1b4..4b0bdd79c6 100644 --- a/synapse/storage/databases/main/events_bg_updates.py +++ b/synapse/storage/databases/main/events_bg_updates.py @@ -47,7 +47,7 @@ from synapse.storage.databases.main.events_worker import ( ) from synapse.storage.databases.main.state_deltas import StateDeltasStore from synapse.storage.databases.main.stream import StreamWorkerStore -from synapse.storage.engines import PostgresEngine, Sqlite3Engine +from synapse.storage.engines import PostgresEngine from synapse.storage.types import Cursor from synapse.types import JsonDict, RoomStreamToken, StateMap, StrCollection from synapse.types.handlers import SLIDING_SYNC_DEFAULT_BUMP_EVENT_TYPES @@ -311,12 +311,6 @@ class EventsBackgroundUpdatesStore(StreamWorkerStore, StateDeltasStore, SQLBaseS self._sliding_sync_membership_snapshots_fix_forgotten_column_bg_update, ) - # Add a background update to add triggers which track event counts. - self.db_pool.updates.register_background_update_handler( - _BackgroundUpdates.EVENT_STATS_POPULATE_COUNTS_BG_UPDATE, - self._event_stats_populate_counts_bg_update, - ) - # We want this to run on the main database at startup before we start processing # events. # @@ -2553,288 +2547,6 @@ class EventsBackgroundUpdatesStore(StreamWorkerStore, StateDeltasStore, SQLBaseS return num_rows - async def _event_stats_populate_counts_bg_update( - self, progress: JsonDict, batch_size: int - ) -> int: - """ - Background update to populate the `event_stats` table with initial - values, and register DB triggers to continue updating it. - - We first register TRIGGERs on rows being added/removed from the `events` table, - which will keep the event counts continuously updated. We also mark the stopping - point for the main population step so we don't double count events. - - Then we will iterate through the `events` table in batches and update event - counts until we reach the stopping point. - - This data is intended to be used by the phone-home stats to keep track - of total event and message counts. A trigger is preferred to counting - rows in the `events` table, as said table can grow quite large. - - It is also preferable to adding an index on the `events` table, as even - an index can grow large. And calculating total counts would require - querying that entire index. - """ - # The last event `stream_ordering` we processed (starting place of this next - # batch). - last_event_stream_ordering = progress.get( - "last_event_stream_ordering", -(1 << 31) - ) - # The event `stream_ordering` we should stop at. This is used to avoid double - # counting events that are already accounted for because of the triggers. - stop_event_stream_ordering: Optional[int] = progress.get( - "stop_event_stream_ordering", None - ) - - def _add_triggers_txn( - txn: LoggingTransaction, - ) -> Optional[int]: - """ - Adds the triggers to the `events` table to keep the `event_stats` counts - up-to-date. - - Also populates the `stop_event_stream_ordering` background update progress - value. This marks the point at which we added the triggers, so we can avoid - double counting events that are already accounted for in the population - step. - - Returns: - The latest event `stream_ordering` in the `events` table when the triggers - were added or `None` if the `events` table is empty. - """ - - # Each time an event is inserted into the `events` table, update the stats. - # - # We're using `AFTER` triggers as we want to count successful inserts/deletes and - # not the ones that could potentially fail. - if isinstance(txn.database_engine, Sqlite3Engine): - txn.execute( - """ - CREATE TRIGGER IF NOT EXISTS event_stats_events_insert_trigger - AFTER INSERT ON events - BEGIN - -- Always increment total_event_count - UPDATE event_stats SET total_event_count = total_event_count + 1; - - -- Increment unencrypted_message_count for m.room.message events - UPDATE event_stats - SET unencrypted_message_count = unencrypted_message_count + 1 - WHERE NEW.type = 'm.room.message' AND NEW.state_key IS NULL; - - -- Increment e2ee_event_count for m.room.encrypted events - UPDATE event_stats - SET e2ee_event_count = e2ee_event_count + 1 - WHERE NEW.type = 'm.room.encrypted' AND NEW.state_key IS NULL; - END; - """ - ) - - txn.execute( - """ - CREATE TRIGGER IF NOT EXISTS event_stats_events_delete_trigger - AFTER DELETE ON events - BEGIN - -- Always decrement total_event_count - UPDATE event_stats SET total_event_count = total_event_count - 1; - - -- Decrement unencrypted_message_count for m.room.message events - UPDATE event_stats - SET unencrypted_message_count = unencrypted_message_count - 1 - WHERE OLD.type = 'm.room.message' AND OLD.state_key IS NULL; - - -- Decrement e2ee_event_count for m.room.encrypted events - UPDATE event_stats - SET e2ee_event_count = e2ee_event_count - 1 - WHERE OLD.type = 'm.room.encrypted' AND OLD.state_key IS NULL; - END; - """ - ) - elif isinstance(txn.database_engine, PostgresEngine): - txn.execute( - """ - CREATE OR REPLACE FUNCTION event_stats_increment_counts() RETURNS trigger AS $BODY$ - BEGIN - IF TG_OP = 'INSERT' THEN - -- Always increment total_event_count - UPDATE event_stats SET total_event_count = total_event_count + 1; - - -- Increment unencrypted_message_count for m.room.message events - IF NEW.type = 'm.room.message' AND NEW.state_key IS NULL THEN - UPDATE event_stats SET unencrypted_message_count = unencrypted_message_count + 1; - END IF; - - -- Increment e2ee_event_count for m.room.encrypted events - IF NEW.type = 'm.room.encrypted' AND NEW.state_key IS NULL THEN - UPDATE event_stats SET e2ee_event_count = e2ee_event_count + 1; - END IF; - - -- We're not modifying the row being inserted/deleted, so we return it unchanged. - RETURN NEW; - - ELSIF TG_OP = 'DELETE' THEN - -- Always decrement total_event_count - UPDATE event_stats SET total_event_count = total_event_count - 1; - - -- Decrement unencrypted_message_count for m.room.message events - IF OLD.type = 'm.room.message' AND OLD.state_key IS NULL THEN - UPDATE event_stats SET unencrypted_message_count = unencrypted_message_count - 1; - END IF; - - -- Decrement e2ee_event_count for m.room.encrypted events - IF OLD.type = 'm.room.encrypted' AND OLD.state_key IS NULL THEN - UPDATE event_stats SET e2ee_event_count = e2ee_event_count - 1; - END IF; - - -- "The usual idiom in DELETE triggers is to return OLD." - -- (https://www.postgresql.org/docs/current/plpgsql-trigger.html) - RETURN OLD; - END IF; - - RAISE EXCEPTION 'update_event_stats() was run with unexpected operation (%%). ' - 'This indicates a trigger misconfiguration as this function should only' - 'run with INSERT/DELETE operations.', TG_OP; - END; - $BODY$ LANGUAGE plpgsql; - """ - ) - - # We could use `CREATE OR REPLACE TRIGGER` but that's only available in Postgres - # 14 (https://www.postgresql.org/docs/14/sql-createtrigger.html) - txn.execute( - """ - DO - $$BEGIN - CREATE TRIGGER event_stats_increment_counts_trigger - AFTER INSERT OR DELETE ON events - FOR EACH ROW - EXECUTE PROCEDURE event_stats_increment_counts(); - EXCEPTION - -- This acts as a "CREATE TRIGGER IF NOT EXISTS" for Postgres - WHEN duplicate_object THEN - NULL; - END;$$; - """ - ) - else: - raise NotImplementedError("Unknown database engine") - - # Find the latest `stream_ordering` in the `events` table. We need to do - # this in the same transaction as where we add the triggers so we don't miss - # any events. - txn.execute( - """ - SELECT stream_ordering - FROM events - ORDER BY stream_ordering DESC - LIMIT 1 - """ - ) - row = cast(Optional[Tuple[int]], txn.fetchone()) - - # Update the progress - if row is not None: - (max_stream_ordering,) = row - self.db_pool.updates._background_update_progress_txn( - txn, - _BackgroundUpdates.EVENT_STATS_POPULATE_COUNTS_BG_UPDATE, - {"stop_event_stream_ordering": max_stream_ordering}, - ) - return max_stream_ordering - - return None - - # First, add the triggers to keep the `event_stats` values up-to-date. - # - # If we don't have a `stop_event_stream_ordering` yet, we need to add the - # triggers to the `events` table and set the stopping point so we don't - # double count `events` later. - if stop_event_stream_ordering is None: - stop_event_stream_ordering = await self.db_pool.runInteraction( - "_event_stats_populate_counts_bg_update_add_triggers", - _add_triggers_txn, - ) - - # If there is no `stop_event_stream_ordering`, then there are no events - # in the `events` table and we can end the background update altogether. - if stop_event_stream_ordering is None: - await self.db_pool.updates._end_background_update( - _BackgroundUpdates.EVENT_STATS_POPULATE_COUNTS_BG_UPDATE - ) - return batch_size - - def _populate_txn( - txn: LoggingTransaction, - ) -> int: - """ - Updates the `event_stats` table from this batch of events. - """ - - # Increment the counts based on the events present in this batch. - txn.execute( - """ - WITH event_batch AS ( - SELECT * - FROM events - WHERE stream_ordering > ? AND stream_ordering <= ? - ORDER BY stream_ordering ASC - LIMIT ? - ), - batch_stats AS ( - SELECT - MAX(stream_ordering) AS max_stream_ordering, - COALESCE(COUNT(*), 0) AS total_event_count, - COALESCE(SUM(CASE WHEN type = 'm.room.message' AND state_key IS NULL THEN 1 ELSE 0 END), 0) AS unencrypted_message_count, - COALESCE(SUM(CASE WHEN type = 'm.room.encrypted' AND state_key IS NULL THEN 1 ELSE 0 END), 0) AS e2ee_event_count - FROM event_batch - - UNION ALL - - SELECT null, 0, 0, 0 - WHERE NOT EXISTS (SELECT 1 FROM event_batch) - LIMIT 1 - ) - UPDATE event_stats - SET - total_event_count = total_event_count + (SELECT total_event_count FROM batch_stats), - unencrypted_message_count = unencrypted_message_count + (SELECT unencrypted_message_count FROM batch_stats), - e2ee_event_count = e2ee_event_count + (SELECT e2ee_event_count FROM batch_stats) - RETURNING - (SELECT total_event_count FROM batch_stats) AS total_event_count, - (SELECT max_stream_ordering FROM batch_stats) AS max_stream_ordering - """, - (last_event_stream_ordering, stop_event_stream_ordering, batch_size), - ) - - # Get the results of the update - (total_event_count, max_stream_ordering) = cast( - Tuple[int, Optional[int]], txn.fetchone() - ) - - # Update the progress - self.db_pool.updates._background_update_progress_txn( - txn, - _BackgroundUpdates.EVENT_STATS_POPULATE_COUNTS_BG_UPDATE, - { - "last_event_stream_ordering": max_stream_ordering, - "stop_event_stream_ordering": stop_event_stream_ordering, - }, - ) - - return total_event_count - - num_rows_processed = await self.db_pool.runInteraction( - "_event_stats_populate_counts_bg_update", - _populate_txn, - ) - - # No more rows to process, so our background update is complete. - if not num_rows_processed: - await self.db_pool.updates._end_background_update( - _BackgroundUpdates.EVENT_STATS_POPULATE_COUNTS_BG_UPDATE - ) - - return batch_size - def _resolve_stale_data_in_sliding_sync_tables( txn: LoggingTransaction, diff --git a/synapse/storage/databases/main/metrics.py b/synapse/storage/databases/main/metrics.py index a9cecc4bc1..9ce1100b5c 100644 --- a/synapse/storage/databases/main/metrics.py +++ b/synapse/storage/databases/main/metrics.py @@ -126,44 +126,6 @@ class ServerMetricsStore(EventPushActionsWorkerStore, SQLBaseStore): return await self.db_pool.runInteraction("count_e2ee_messages", _count_messages) - async def count_total_events(self) -> int: - """ - Returns the total number of events present on the server. - """ - - return await self.db_pool.simple_select_one_onecol( - table="event_stats", - keyvalues={}, - retcol="total_event_count", - desc="count_total_events", - ) - - async def count_total_messages(self) -> int: - """ - Returns the total number of `m.room.message` events present on the - server. - """ - - return await self.db_pool.simple_select_one_onecol( - table="event_stats", - keyvalues={}, - retcol="unencrypted_message_count", - desc="count_total_messages", - ) - - async def count_total_e2ee_events(self) -> int: - """ - Returns the total number of `m.room.encrypted` events present on the - server. - """ - - return await self.db_pool.simple_select_one_onecol( - table="event_stats", - keyvalues={}, - retcol="e2ee_event_count", - desc="count_total_e2ee_events", - ) - async def count_daily_sent_e2ee_messages(self) -> int: def _count_messages(txn: LoggingTransaction) -> int: # This is good enough as if you have silly characters in your own diff --git a/synapse/storage/schema/__init__.py b/synapse/storage/schema/__init__.py index 7474ba4542..ad683a3a07 100644 --- a/synapse/storage/schema/__init__.py +++ b/synapse/storage/schema/__init__.py @@ -19,7 +19,7 @@ # # -SCHEMA_VERSION = 92 # remember to update the list below when updating +SCHEMA_VERSION = 91 # remember to update the list below when updating """Represents the expectations made by the codebase about the database schema This should be incremented whenever the codebase changes its requirements on the @@ -162,12 +162,6 @@ Changes in SCHEMA_VERSION = 89 Changes in SCHEMA_VERSION = 90 - Add a column `participant` to `room_memberships` table - Add background update to delete unreferenced state groups. - -Changes in SCHEMA_VERSION = 91 - - TODO - -Changes in SCHEMA_VERSION = 92 - - Add `event_stats` table to store global event statistics like total counts """ diff --git a/synapse/storage/schema/main/delta/92/01_event_stats.sql b/synapse/storage/schema/main/delta/92/01_event_stats.sql deleted file mode 100644 index 4bded03578..0000000000 --- a/synapse/storage/schema/main/delta/92/01_event_stats.sql +++ /dev/null @@ -1,33 +0,0 @@ --- --- This file is licensed under the Affero General Public License (AGPL) version 3. --- --- Copyright (C) 2025 New Vector, Ltd --- --- This program is free software: you can redistribute it and/or modify --- it under the terms of the GNU Affero General Public License as --- published by the Free Software Foundation, either version 3 of the --- License, or (at your option) any later version. --- --- See the GNU Affero General Public License for more details: --- . - - --- Create the `event_stats` table to store these statistics. -CREATE TABLE event_stats ( - total_event_count INTEGER NOT NULL DEFAULT 0, - unencrypted_message_count INTEGER NOT NULL DEFAULT 0, - e2ee_event_count INTEGER NOT NULL DEFAULT 0 -); - --- Insert initial values into the table. -INSERT INTO event_stats ( - total_event_count, - unencrypted_message_count, - e2ee_event_count -) VALUES (0, 0, 0); - --- Add a background update to populate the `event_stats` table with the current counts --- from the `events` table and add triggers to keep this count up-to-date. -INSERT INTO background_updates (ordering, update_name, progress_json) VALUES - (9201, 'event_stats_populate_counts_bg_update', '{}'); - diff --git a/synapse/types/storage/__init__.py b/synapse/types/storage/__init__.py index 73d19d91ed..e03ff7ffc8 100644 --- a/synapse/types/storage/__init__.py +++ b/synapse/types/storage/__init__.py @@ -52,5 +52,3 @@ class _BackgroundUpdates: MARK_UNREFERENCED_STATE_GROUPS_FOR_DELETION_BG_UPDATE = ( "mark_unreferenced_state_groups_for_deletion_bg_update" ) - - EVENT_STATS_POPULATE_COUNTS_BG_UPDATE = "event_stats_populate_counts_bg_update" diff --git a/tests/metrics/test_phone_home_stats.py b/tests/metrics/test_phone_home_stats.py deleted file mode 100644 index 1b3eafed5f..0000000000 --- a/tests/metrics/test_phone_home_stats.py +++ /dev/null @@ -1,258 +0,0 @@ -# -# This file is licensed under the Affero General Public License (AGPL) version 3. -# -# Copyright (C) 2025 New Vector, Ltd -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# See the GNU Affero General Public License for more details: -# . - -import logging -from unittest.mock import AsyncMock - -from twisted.test.proto_helpers import MemoryReactor - -from synapse.app.phone_stats_home import ( - PHONE_HOME_INTERVAL_SECONDS, - start_phone_stats_home, -) -from synapse.rest import admin, login, register, room -from synapse.server import HomeServer -from synapse.types import JsonDict -from synapse.util import Clock - -from tests import unittest -from tests.server import ThreadedMemoryReactorClock - -TEST_REPORT_STATS_ENDPOINT = "https://fake.endpoint/stats" -TEST_SERVER_CONTEXT = "test-server-context" - - -class PhoneHomeStatsTestCase(unittest.HomeserverTestCase): - servlets = [ - admin.register_servlets_for_client_rest_resource, - room.register_servlets, - register.register_servlets, - login.register_servlets, - ] - - def make_homeserver( - self, reactor: ThreadedMemoryReactorClock, clock: Clock - ) -> HomeServer: - # Configure the homeserver to enable stats reporting. - config = self.default_config() - config["report_stats"] = True - config["report_stats_endpoint"] = TEST_REPORT_STATS_ENDPOINT - - # Configure the server context so we can check it ends up being reported - config["server_context"] = TEST_SERVER_CONTEXT - - # Allow guests to be registered - config["allow_guest_access"] = True - - hs = self.setup_test_homeserver(config=config) - - # Replace the proxied http client with a mock, so we can inspect outbound requests to - # the configured stats endpoint. - self.put_json_mock = AsyncMock(return_value={}) - hs.get_proxied_http_client().put_json = self.put_json_mock # type: ignore[method-assign] - return hs - - def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: - self.store = hs.get_datastores().main - - # Wait for the background updates to add the database triggers that keep the - # `event_stats` table up-to-date. - self.wait_for_background_updates() - - # Force stats reporting to occur - start_phone_stats_home(hs=hs) - - super().prepare(reactor, clock, hs) - - def _get_latest_phone_home_stats(self) -> JsonDict: - # Wait for `phone_stats_home` to be called again + a healthy margin (50s). - self.reactor.advance(2 * PHONE_HOME_INTERVAL_SECONDS + 50) - - # Extract the reported stats from our http client mock - mock_calls = self.put_json_mock.call_args_list - report_stats_calls = [] - for call in mock_calls: - if call.args[0] == TEST_REPORT_STATS_ENDPOINT: - report_stats_calls.append(call) - - self.assertGreaterEqual( - (len(report_stats_calls)), - 1, - "Expected at-least one call to the report_stats endpoint", - ) - - # Extract the phone home stats from the call - phone_home_stats = report_stats_calls[0].args[1] - - return phone_home_stats - - def _perform_user_actions(self) -> None: - """ - Perform some actions on the homeserver that would bump the phone home - stats. - """ - - # Create some users - user_1_mxid = self.register_user( - username="test_user_1", - password="test", - ) - user_2_mxid = self.register_user( - username="test_user_2", - password="test", - ) - # Note: `self.register_user` does not support guest registration, and updating the - # Admin API it calls to add a new parameter would cause the `mac` parameter to fail - # in a backwards-incompatible manner. Hence, we make a manual request here. - _guest_user_mxid = self.make_request( - method="POST", - path="/_matrix/client/v3/register?kind=guest", - content={ - "username": "guest_user", - "password": "test", - }, - shorthand=False, - ) - - # Log in to each user - user_1_token = self.login(username=user_1_mxid, password="test") - user_2_token = self.login(username=user_2_mxid, password="test") - - # Create a room between the two users - room_1_id = self.helper.create_room_as( - is_public=False, - tok=user_1_token, - ) - - # Mark this room as end-to-end encrypted - self.helper.send_state( - room_id=room_1_id, - event_type="m.room.encryption", - body={ - "algorithm": "m.megolm.v1.aes-sha2", - "rotation_period_ms": 604800000, - "rotation_period_msgs": 100, - }, - state_key="", - tok=user_1_token, - ) - - # User 1 invites user 2 - self.helper.invite( - room=room_1_id, - src=user_1_mxid, - targ=user_2_mxid, - tok=user_1_token, - ) - - # User 2 joins - self.helper.join( - room=room_1_id, - user=user_2_mxid, - tok=user_2_token, - ) - - # User 1 sends 10 unencrypted messages - for _ in range(10): - self.helper.send( - room_id=room_1_id, - body="Zoinks Scoob! A message!", - tok=user_1_token, - ) - - # User 2 sends 5 encrypted "messages" - for _ in range(5): - self.helper.send_event( - room_id=room_1_id, - type="m.room.encrypted", - content={ - "algorithm": "m.olm.v1.curve25519-aes-sha2", - "sender_key": "some_key", - "ciphertext": { - "some_key": { - "type": 0, - "body": "encrypted_payload", - }, - }, - }, - tok=user_2_token, - ) - - def test_phone_home_stats(self) -> None: - """ - Test that the phone home stats contain the stats we expect based on - the scenario carried out in `prepare` - """ - # Do things to bump the stats - self._perform_user_actions() - - # Wait for the stats to be reported - phone_home_stats = self._get_latest_phone_home_stats() - - self.assertEqual( - phone_home_stats["homeserver"], self.hs.config.server.server_name - ) - - self.assertTrue(isinstance(phone_home_stats["memory_rss"], int)) - self.assertTrue(isinstance(phone_home_stats["cpu_average"], int)) - - self.assertEqual(phone_home_stats["server_context"], TEST_SERVER_CONTEXT) - - self.assertTrue(isinstance(phone_home_stats["timestamp"], int)) - self.assertTrue(isinstance(phone_home_stats["uptime_seconds"], int)) - self.assertTrue(isinstance(phone_home_stats["python_version"], str)) - - # We expect only our test users to exist on the homeserver - self.assertEqual(phone_home_stats["total_users"], 3) - self.assertEqual(phone_home_stats["total_nonbridged_users"], 3) - self.assertEqual(phone_home_stats["daily_user_type_native"], 2) - self.assertEqual(phone_home_stats["daily_user_type_guest"], 1) - self.assertEqual(phone_home_stats["daily_user_type_bridged"], 0) - self.assertEqual(phone_home_stats["total_room_count"], 1) - self.assertEqual(phone_home_stats["total_event_count"], 24) - self.assertEqual(phone_home_stats["total_message_count"], 10) - self.assertEqual(phone_home_stats["total_e2ee_event_count"], 5) - self.assertEqual(phone_home_stats["daily_active_users"], 2) - self.assertEqual(phone_home_stats["monthly_active_users"], 2) - self.assertEqual(phone_home_stats["daily_active_rooms"], 1) - self.assertEqual(phone_home_stats["daily_active_e2ee_rooms"], 1) - self.assertEqual(phone_home_stats["daily_messages"], 10) - self.assertEqual(phone_home_stats["daily_e2ee_messages"], 5) - self.assertEqual(phone_home_stats["daily_sent_messages"], 10) - self.assertEqual(phone_home_stats["daily_sent_e2ee_messages"], 5) - - # Our users have not been around for >30 days, hence these are all 0. - self.assertEqual(phone_home_stats["r30v2_users_all"], 0) - self.assertEqual(phone_home_stats["r30v2_users_android"], 0) - self.assertEqual(phone_home_stats["r30v2_users_ios"], 0) - self.assertEqual(phone_home_stats["r30v2_users_electron"], 0) - self.assertEqual(phone_home_stats["r30v2_users_web"], 0) - self.assertEqual( - phone_home_stats["cache_factor"], self.hs.config.caches.global_factor - ) - self.assertEqual( - phone_home_stats["event_cache_size"], - self.hs.config.caches.event_cache_size, - ) - self.assertEqual( - phone_home_stats["database_engine"], - self.hs.config.database.databases[0].config["name"], - ) - self.assertEqual( - phone_home_stats["database_server_version"], - self.hs.get_datastores().main.database_engine.server_version, - ) - - synapse_logger = logging.getLogger("synapse") - log_level = synapse_logger.getEffectiveLevel() - self.assertEqual(phone_home_stats["log_level"], logging.getLevelName(log_level)) diff --git a/tests/storage/test_event_stats.py b/tests/storage/test_event_stats.py deleted file mode 100644 index 791ed27018..0000000000 --- a/tests/storage/test_event_stats.py +++ /dev/null @@ -1,237 +0,0 @@ -# -# This file is licensed under the Affero General Public License (AGPL) version 3. -# -# Copyright (C) 2025 New Vector, Ltd -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# See the GNU Affero General Public License for more details: -# . - - -from twisted.test.proto_helpers import MemoryReactor - -from synapse.rest import admin, login, register, room -from synapse.server import HomeServer -from synapse.types.storage import _BackgroundUpdates -from synapse.util import Clock - -from tests import unittest - - -class EventStatsTestCase(unittest.HomeserverTestCase): - """ - Tests for the `event_stats` table - """ - - servlets = [ - admin.register_servlets_for_client_rest_resource, - room.register_servlets, - register.register_servlets, - login.register_servlets, - ] - - def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: - self.store = hs.get_datastores().main - - # Wait for the background updates to add the database triggers that keep the - # `event_stats` table up-to-date. - # - # This also prevents background updates running during the tests and messing - # with the results. - self.wait_for_background_updates() - - super().prepare(reactor, clock, hs) - - def _perform_user_actions(self) -> None: - """ - Perform some actions on the homeserver that would bump the event counts. - """ - # Create some users - user_1_mxid = self.register_user( - username="test_user_1", - password="test", - ) - user_2_mxid = self.register_user( - username="test_user_2", - password="test", - ) - # Note: `self.register_user` does not support guest registration, and updating the - # Admin API it calls to add a new parameter would cause the `mac` parameter to fail - # in a backwards-incompatible manner. Hence, we make a manual request here. - _guest_user_mxid = self.make_request( - method="POST", - path="/_matrix/client/v3/register?kind=guest", - content={ - "username": "guest_user", - "password": "test", - }, - shorthand=False, - ) - - # Log in to each user - user_1_token = self.login(username=user_1_mxid, password="test") - user_2_token = self.login(username=user_2_mxid, password="test") - - # Create a room between the two users - room_1_id = self.helper.create_room_as( - is_public=False, - tok=user_1_token, - ) - - # Mark this room as end-to-end encrypted - self.helper.send_state( - room_id=room_1_id, - event_type="m.room.encryption", - body={ - "algorithm": "m.megolm.v1.aes-sha2", - "rotation_period_ms": 604800000, - "rotation_period_msgs": 100, - }, - state_key="", - tok=user_1_token, - ) - - # User 1 invites user 2 - self.helper.invite( - room=room_1_id, - src=user_1_mxid, - targ=user_2_mxid, - tok=user_1_token, - ) - - # User 2 joins - self.helper.join( - room=room_1_id, - user=user_2_mxid, - tok=user_2_token, - ) - - # User 1 sends 10 unencrypted messages - for _ in range(10): - self.helper.send( - room_id=room_1_id, - body="Zoinks Scoob! A message!", - tok=user_1_token, - ) - - # User 2 sends 5 encrypted "messages" - for _ in range(5): - self.helper.send_event( - room_id=room_1_id, - type="m.room.encrypted", - content={ - "algorithm": "m.olm.v1.curve25519-aes-sha2", - "sender_key": "some_key", - "ciphertext": { - "some_key": { - "type": 0, - "body": "encrypted_payload", - }, - }, - }, - tok=user_2_token, - ) - - def test_background_update_with_events(self) -> None: - """ - Test that the background update to populate the `event_stats` table works - correctly when there are events in the database. - """ - # Do things to bump the stats - self._perform_user_actions() - - # Keep in mind: These are already populated as the background update has already - # ran once when Synapse started and added the database triggers which are - # incrementing things as new events come in. - self.assertEqual(self.get_success(self.store.count_total_events()), 24) - self.assertEqual(self.get_success(self.store.count_total_messages()), 10) - self.assertEqual(self.get_success(self.store.count_total_e2ee_events()), 5) - - # Run the background update again - self.get_success( - self.store.db_pool.simple_insert( - "background_updates", - { - "update_name": _BackgroundUpdates.EVENT_STATS_POPULATE_COUNTS_BG_UPDATE, - "progress_json": "{}", - }, - ) - ) - self.store.db_pool.updates._all_done = False - self.wait_for_background_updates() - - # We expect these values to double as the background update is being run *again* - # and will double-count the `events`. - self.assertEqual(self.get_success(self.store.count_total_events()), 48) - self.assertEqual(self.get_success(self.store.count_total_messages()), 20) - self.assertEqual(self.get_success(self.store.count_total_e2ee_events()), 10) - - def test_background_update_without_events(self) -> None: - """ - Test that the background update to populate the `event_stats` table works - correctly without events in the database. - """ - # Keep in mind: These are already populated as the background update has already - # ran once when Synapse started and added the database triggers which are - # incrementing things as new events come in. - # - # In this case, no events have been sent, so we expect the counts to be 0. - self.assertEqual(self.get_success(self.store.count_total_events()), 0) - self.assertEqual(self.get_success(self.store.count_total_messages()), 0) - self.assertEqual(self.get_success(self.store.count_total_e2ee_events()), 0) - - # Run the background update again - self.get_success( - self.store.db_pool.simple_insert( - "background_updates", - { - "update_name": _BackgroundUpdates.EVENT_STATS_POPULATE_COUNTS_BG_UPDATE, - "progress_json": "{}", - }, - ) - ) - self.store.db_pool.updates._all_done = False - self.wait_for_background_updates() - - self.assertEqual(self.get_success(self.store.count_total_events()), 0) - self.assertEqual(self.get_success(self.store.count_total_messages()), 0) - self.assertEqual(self.get_success(self.store.count_total_e2ee_events()), 0) - - def test_background_update_resume_progress(self) -> None: - """ - Test that the background update to populate the `event_stats` table works - correctly to resume from `progress_json`. - """ - # Do things to bump the stats - self._perform_user_actions() - - # Keep in mind: These are already populated as the background update has already - # ran once when Synapse started and added the database triggers which are - # incrementing things as new events come in. - self.assertEqual(self.get_success(self.store.count_total_events()), 24) - self.assertEqual(self.get_success(self.store.count_total_messages()), 10) - self.assertEqual(self.get_success(self.store.count_total_e2ee_events()), 5) - - # Run the background update again - self.get_success( - self.store.db_pool.simple_insert( - "background_updates", - { - "update_name": _BackgroundUpdates.EVENT_STATS_POPULATE_COUNTS_BG_UPDATE, - "progress_json": '{ "last_event_stream_ordering": 14, "stop_event_stream_ordering": 21 }', - }, - ) - ) - self.store.db_pool.updates._all_done = False - self.wait_for_background_updates() - - # We expect these values to increase as the background update is being run - # *again* and will double-count some of the `events` over the range specified - # by the `progress_json`. - self.assertEqual(self.get_success(self.store.count_total_events()), 24 + 7) - self.assertEqual(self.get_success(self.store.count_total_messages()), 16) - self.assertEqual(self.get_success(self.store.count_total_e2ee_events()), 6) From 33824495ba520f57eae3687db847175b40f71d73 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 25 Apr 2025 15:08:33 +0100 Subject: [PATCH 48/70] Move GET /devices/ off main process (#18355) We can't move PUT/DELETE as they do need to happen on main process (due to notification of device changes). --------- Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> --- changelog.d/18355.feature | 1 + docs/workers.md | 1 + synapse/rest/client/devices.py | 20 ++++++++++++++++++-- 3 files changed, 20 insertions(+), 2 deletions(-) create mode 100644 changelog.d/18355.feature diff --git a/changelog.d/18355.feature b/changelog.d/18355.feature new file mode 100644 index 0000000000..4813f0a291 --- /dev/null +++ b/changelog.d/18355.feature @@ -0,0 +1 @@ +Add support for handling `GET /devices/` on workers. diff --git a/docs/workers.md b/docs/workers.md index 08ee493da9..def902d24c 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -280,6 +280,7 @@ Additionally, the following REST endpoints can be handled for GET requests: ^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/ ^/_matrix/client/unstable/org.matrix.msc4140/delayed_events + ^/_matrix/client/(api/v1|r0|v3|unstable)/devices/ # Account data requests ^/_matrix/client/(r0|v3|unstable)/.*/tags diff --git a/synapse/rest/client/devices.py b/synapse/rest/client/devices.py index 4607b23494..0b075cc2f2 100644 --- a/synapse/rest/client/devices.py +++ b/synapse/rest/client/devices.py @@ -143,11 +143,11 @@ class DeviceRestServlet(RestServlet): self.hs = hs self.auth = hs.get_auth() handler = hs.get_device_handler() - assert isinstance(handler, DeviceHandler) self.device_handler = handler self.auth_handler = hs.get_auth_handler() self._msc3852_enabled = hs.config.experimental.msc3852_enabled self._msc3861_oauth_delegation_enabled = hs.config.experimental.msc3861.enabled + self._is_main_process = hs.config.worker.worker_app is None async def on_GET( self, request: SynapseRequest, device_id: str @@ -179,6 +179,14 @@ class DeviceRestServlet(RestServlet): async def on_DELETE( self, request: SynapseRequest, device_id: str ) -> Tuple[int, JsonDict]: + # Can only be run on main process, as changes to device lists must + # happen on main. + if not self._is_main_process: + error_message = "DELETE on /devices/ must be routed to main process" + logger.error(error_message) + raise SynapseError(500, error_message) + assert isinstance(self.device_handler, DeviceHandler) + requester = await self.auth.get_user_by_req(request) try: @@ -223,6 +231,14 @@ class DeviceRestServlet(RestServlet): async def on_PUT( self, request: SynapseRequest, device_id: str ) -> Tuple[int, JsonDict]: + # Can only be run on main process, as changes to device lists must + # happen on main. + if not self._is_main_process: + error_message = "PUT on /devices/ must be routed to main process" + logger.error(error_message) + raise SynapseError(500, error_message) + assert isinstance(self.device_handler, DeviceHandler) + requester = await self.auth.get_user_by_req(request, allow_guest=True) body = parse_and_validate_json_object_from_request(request, self.PutBody) @@ -585,9 +601,9 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: ): DeleteDevicesRestServlet(hs).register(http_server) DevicesRestServlet(hs).register(http_server) + DeviceRestServlet(hs).register(http_server) if hs.config.worker.worker_app is None: - DeviceRestServlet(hs).register(http_server) if hs.config.experimental.msc2697_enabled: DehydratedDeviceServlet(hs).register(http_server) ClaimDehydratedDeviceServlet(hs).register(http_server) From 5b89c9264380da8f9cc55460f8215758fe570010 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 25 Apr 2025 15:18:22 +0100 Subject: [PATCH 49/70] Allow /rooms/ admin API to be on workers (#18360) Tested by https://github.com/matrix-org/sytest/pull/1400 --- changelog.d/18360.misc | 1 + docs/workers.md | 1 + synapse/app/generic_worker.py | 3 ++- synapse/rest/admin/__init__.py | 5 +++-- 4 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 changelog.d/18360.misc diff --git a/changelog.d/18360.misc b/changelog.d/18360.misc new file mode 100644 index 0000000000..e5bf4f536f --- /dev/null +++ b/changelog.d/18360.misc @@ -0,0 +1 @@ +Allow `/rooms/` admin API to be run on workers. diff --git a/docs/workers.md b/docs/workers.md index def902d24c..9ebcc886b1 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -249,6 +249,7 @@ information. ^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$ ^/_matrix/client/(r0|v3|unstable)/capabilities$ ^/_matrix/client/(r0|v3|unstable)/notifications$ + ^/_synapse/admin/v1/rooms/ # Encryption requests ^/_matrix/client/(r0|v3|unstable)/keys/query$ diff --git a/synapse/app/generic_worker.py b/synapse/app/generic_worker.py index a528c3890d..e4120ed424 100644 --- a/synapse/app/generic_worker.py +++ b/synapse/app/generic_worker.py @@ -52,7 +52,7 @@ from synapse.logging.context import LoggingContext from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource from synapse.rest import ClientRestResource -from synapse.rest.admin import register_servlets_for_media_repo +from synapse.rest.admin import AdminRestResource, register_servlets_for_media_repo from synapse.rest.health import HealthResource from synapse.rest.key.v2 import KeyResource from synapse.rest.synapse.client import build_synapse_client_resource_tree @@ -190,6 +190,7 @@ class GenericWorkerServer(HomeServer): resources.update(build_synapse_client_resource_tree(self)) resources["/.well-known"] = well_known_resource(self) + resources["/_synapse/admin"] = AdminRestResource(self) elif name == "federation": resources[FEDERATION_PREFIX] = TransportLayerServer(self) diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index f3c99663e8..5977ded4a0 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -275,7 +275,9 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: """ Register all the admin servlets. """ - # Admin servlets aren't registered on workers. + RoomRestServlet(hs).register(http_server) + + # Admin servlets below may not work on workers. if hs.config.worker.worker_app is not None: return @@ -283,7 +285,6 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: BlockRoomRestServlet(hs).register(http_server) ListRoomRestServlet(hs).register(http_server) RoomStateRestServlet(hs).register(http_server) - RoomRestServlet(hs).register(http_server) RoomRestV2Servlet(hs).register(http_server) RoomMembersRestServlet(hs).register(http_server) DeleteRoomStatusByDeleteIdRestServlet(hs).register(http_server) From 1482ad1917ef5e022b2d2238d30be74f50b47953 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 29 Apr 2025 10:05:43 +0100 Subject: [PATCH 50/70] Bump sigstore/cosign-installer from 3.8.1 to 3.8.2 (#18366) --- .github/workflows/docker.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 052dcf800b..c617753c7a 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -30,7 +30,7 @@ jobs: run: docker buildx inspect - name: Install Cosign - uses: sigstore/cosign-installer@d7d6bc7722e3daa8354c50bcb52f4837da5e9b6a # v3.8.1 + uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2 - name: Checkout repository uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 From 2ff977a6c39caa24f35c58f2f5acd948dbdf122b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 29 Apr 2025 10:05:55 +0100 Subject: [PATCH 51/70] Bump actions/add-to-project from 280af8ae1f83a494cfad2cb10f02f6d13529caa9 to 5b1a254a3546aef88e0a7724a77a623fa2e47c36 (#18365) --- .github/workflows/triage_labelled.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/triage_labelled.yml b/.github/workflows/triage_labelled.yml index feab5906e0..e506be393f 100644 --- a/.github/workflows/triage_labelled.yml +++ b/.github/workflows/triage_labelled.yml @@ -11,7 +11,7 @@ jobs: if: > contains(github.event.issue.labels.*.name, 'X-Needs-Info') steps: - - uses: actions/add-to-project@280af8ae1f83a494cfad2cb10f02f6d13529caa9 # main (v1.0.2 + 10 commits) + - uses: actions/add-to-project@5b1a254a3546aef88e0a7724a77a623fa2e47c36 # main (v1.0.2 + 10 commits) id: add_project with: project-url: "https://github.com/orgs/matrix-org/projects/67" From a87981f673fe944690202cc4067a02f0c666eee4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 29 Apr 2025 10:06:13 +0100 Subject: [PATCH 52/70] Bump actions/download-artifact from 4.2.1 to 4.3.0 (#18364) --- .github/workflows/release-artifacts.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index 573264229f..e0b8f2faf4 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -203,7 +203,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download all workflow run artifacts - uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1 + uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0 - name: Build a tarball for the debs # We need to merge all the debs uploads into one folder, then compress # that. From 4c958c679a9c20930adfa25e64fc237fbf526591 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 29 Apr 2025 10:06:26 +0100 Subject: [PATCH 53/70] Bump stefanzweifel/git-auto-commit-action from 5.1.0 to 5.2.0 (#18354) --- .github/workflows/fix_lint.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fix_lint.yaml b/.github/workflows/fix_lint.yaml index fe699c1b2f..923e96a624 100644 --- a/.github/workflows/fix_lint.yaml +++ b/.github/workflows/fix_lint.yaml @@ -44,6 +44,6 @@ jobs: - run: cargo fmt continue-on-error: true - - uses: stefanzweifel/git-auto-commit-action@e348103e9026cc0eee72ae06630dbe30c8bf7a79 # v5.1.0 + - uses: stefanzweifel/git-auto-commit-action@b863ae1933cb653a53c021fe36dbb774e1fb9403 # v5.2.0 with: commit_message: "Attempt to fix linting" From 39e17856a37570bda2fa912c6751e31bad6f970b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 29 Apr 2025 10:06:36 +0100 Subject: [PATCH 54/70] Bump anyhow from 1.0.97 to 1.0.98 (#18336) --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1b17e9910a..e1c381e273 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -13,9 +13,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.97" +version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f" +checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" [[package]] name = "arc-swap" From 2ef782462011044718b0b3848f0cd33e5b2e1827 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 29 Apr 2025 10:07:06 +0100 Subject: [PATCH 55/70] Bump pyo3-log from 0.12.2 to 0.12.3 (#18317) --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e1c381e273..822eb2cdba 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -316,9 +316,9 @@ dependencies = [ [[package]] name = "pyo3-log" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b78e4983ba15bc62833a0e0941d965bc03690163f1127864f1408db25063466" +checksum = "7079e412e909af5d6be7c04a7f29f6a2837a080410e1c529c9dee2c367383db4" dependencies = [ "arc-swap", "log", From b0795d0cb670b2e8e66839e729cce42eb681832e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 29 Apr 2025 10:07:15 +0100 Subject: [PATCH 56/70] Bump types-psycopg2 from 2.9.21.20250121 to 2.9.21.20250318 (#18316) Bumps [types-psycopg2](https://github.com/python/typeshed) from 2.9.21.20250121 to 2.9.21.20250318.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=types-psycopg2&package-manager=pip&previous-version=2.9.21.20250121&new-version=2.9.21.20250318)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2bf511e8a6..51e73bae54 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3007,14 +3007,14 @@ files = [ [[package]] name = "types-psycopg2" -version = "2.9.21.20250121" +version = "2.9.21.20250318" description = "Typing stubs for psycopg2" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_psycopg2-2.9.21.20250121-py3-none-any.whl", hash = "sha256:b890dc6f5a08b6433f0ff73a4ec9a834deedad3e914f2a4a6fd43df021f745f1"}, - {file = "types_psycopg2-2.9.21.20250121.tar.gz", hash = "sha256:2b0e2cd0f3747af1ae25a7027898716d80209604770ef3cbf350fe055b9c349b"}, + {file = "types_psycopg2-2.9.21.20250318-py3-none-any.whl", hash = "sha256:7296d111ad950bbd2fc979a1ab0572acae69047f922280e77db657c00d2c79c0"}, + {file = "types_psycopg2-2.9.21.20250318.tar.gz", hash = "sha256:eb6eac5bfb16adfd5f16b818918b9e26a40ede147e0f2bbffdf53a6ef7025a87"}, ] [[package]] From 7346760aed018eaf46a0bff2d0459b39881d2af5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 29 Apr 2025 10:07:33 +0100 Subject: [PATCH 57/70] Bump pyopenssl from 24.3.0 to 25.0.0 (#18315) --- poetry.lock | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 51e73bae54..c6a6ce9826 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2053,18 +2053,19 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pyopenssl" -version = "24.3.0" +version = "25.0.0" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "pyOpenSSL-24.3.0-py3-none-any.whl", hash = "sha256:e474f5a473cd7f92221cc04976e48f4d11502804657a08a989fb3be5514c904a"}, - {file = "pyopenssl-24.3.0.tar.gz", hash = "sha256:49f7a019577d834746bc55c5fce6ecbcec0f2b4ec5ce1cf43a9a173b8138bb36"}, + {file = "pyOpenSSL-25.0.0-py3-none-any.whl", hash = "sha256:424c247065e46e76a37411b9ab1782541c23bb658bf003772c3405fbaa128e90"}, + {file = "pyopenssl-25.0.0.tar.gz", hash = "sha256:cd2cef799efa3936bb08e8ccb9433a575722b9dd986023f1cabc4ae64e9dac16"}, ] [package.dependencies] cryptography = ">=41.0.5,<45" +typing-extensions = {version = ">=4.9", markers = "python_version < \"3.13\" and python_version >= \"3.8\""} [package.extras] docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx_rtd_theme"] From 75832f25b08a058d01acde334033f76edc131ad5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 29 Apr 2025 10:07:49 +0100 Subject: [PATCH 58/70] Bump types-jsonschema from 4.23.0.20240813 to 4.23.0.20241208 (#18305) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index c6a6ce9826..abd97a785b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2957,14 +2957,14 @@ files = [ [[package]] name = "types-jsonschema" -version = "4.23.0.20240813" +version = "4.23.0.20241208" description = "Typing stubs for jsonschema" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "types-jsonschema-4.23.0.20240813.tar.gz", hash = "sha256:c93f48206f209a5bc4608d295ac39f172fb98b9e24159ce577dbd25ddb79a1c0"}, - {file = "types_jsonschema-4.23.0.20240813-py3-none-any.whl", hash = "sha256:be283e23f0b87547316c2ee6b0fd36d95ea30e921db06478029e10b5b6aa6ac3"}, + {file = "types_jsonschema-4.23.0.20241208-py3-none-any.whl", hash = "sha256:87934bd9231c99d8eff94cacfc06ba668f7973577a9bd9e1f9de957c5737313e"}, + {file = "types_jsonschema-4.23.0.20241208.tar.gz", hash = "sha256:e8b15ad01f290ecf6aea53f93fbdf7d4730e4600313e89e8a7f95622f7e87b7c"}, ] [package.dependencies] From 0384fd72eeaa77dd56b52f38f7b339b95babe8dd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 29 Apr 2025 10:08:20 +0100 Subject: [PATCH 59/70] Bump softprops/action-gh-release from 1 to 2 (#18264) --- .github/workflows/release-artifacts.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release-artifacts.yml b/.github/workflows/release-artifacts.yml index e0b8f2faf4..e03c9d2bd5 100644 --- a/.github/workflows/release-artifacts.yml +++ b/.github/workflows/release-artifacts.yml @@ -213,7 +213,7 @@ jobs: tar -cvJf debs.tar.xz debs - name: Attach to release # Pinned to work around https://github.com/softprops/action-gh-release/issues/445 - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v0.1.15 + uses: softprops/action-gh-release@c95fe1489396fe8a9eb87c0abf8aa5b2ef267fda # v0.1.15 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: From e47de2b32de6183fd0cb91dda9b232de5d263345 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 29 Apr 2025 14:08:11 +0100 Subject: [PATCH 60/70] Do not retry push during backoff period (#18363) This fixes a bug where if a pusher gets told about a new event to push it will ignore the backoff and immediately retry sending any pending push. --- changelog.d/18363.bugfix | 1 + synapse/push/httppusher.py | 6 +++ tests/push/test_http.py | 78 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 85 insertions(+) create mode 100644 changelog.d/18363.bugfix diff --git a/changelog.d/18363.bugfix b/changelog.d/18363.bugfix new file mode 100644 index 0000000000..bfa336d52f --- /dev/null +++ b/changelog.d/18363.bugfix @@ -0,0 +1 @@ +Fix longstanding bug where Synapse would immediately retry a failing push endpoint when a new event is received, ignoring any backoff timers. diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py index 69790ecab5..7df8a128c9 100644 --- a/synapse/push/httppusher.py +++ b/synapse/push/httppusher.py @@ -205,6 +205,12 @@ class HttpPusher(Pusher): if self._is_processing: return + # Check if we are trying, but failing, to contact the pusher. If so, we + # don't try and start processing immediately and instead wait for the + # retry loop to try again later (which is controlled by the timer). + if self.failing_since and self.timed_call and self.timed_call.active(): + return + run_as_background_process("httppush.process", self._process) async def _process(self) -> None: diff --git a/tests/push/test_http.py b/tests/push/test_http.py index 5c235bbe53..b42fd284b6 100644 --- a/tests/push/test_http.py +++ b/tests/push/test_http.py @@ -1167,3 +1167,81 @@ class HTTPPusherTests(HomeserverTestCase): self.assertEqual( self.push_attempts[0][2]["notification"]["counts"]["unread"], 1 ) + + def test_push_backoff(self) -> None: + """ + The HTTP pusher will backoff correctly if it fails to contact the pusher. + """ + + # Register the user who gets notified + user_id = self.register_user("user", "pass") + access_token = self.login("user", "pass") + + # Register the user who sends the message + other_user_id = self.register_user("otheruser", "pass") + other_access_token = self.login("otheruser", "pass") + + # Register the pusher + user_tuple = self.get_success( + self.hs.get_datastores().main.get_user_by_access_token(access_token) + ) + assert user_tuple is not None + device_id = user_tuple.device_id + + self.get_success( + self.hs.get_pusherpool().add_or_update_pusher( + user_id=user_id, + device_id=device_id, + kind="http", + app_id="m.http", + app_display_name="HTTP Push Notifications", + device_display_name="pushy push", + pushkey="a@example.com", + lang=None, + data={"url": "http://example.com/_matrix/push/v1/notify"}, + ) + ) + + # Create a room with the other user + room = self.helper.create_room_as(user_id, tok=access_token) + self.helper.join(room=room, user=other_user_id, tok=other_access_token) + + # The other user sends some messages + self.helper.send(room, body="Message 1", tok=other_access_token) + + # One push was attempted to be sent + self.assertEqual(len(self.push_attempts), 1) + self.assertEqual( + self.push_attempts[0][1], "http://example.com/_matrix/push/v1/notify" + ) + self.assertEqual( + self.push_attempts[0][2]["notification"]["content"]["body"], "Message 1" + ) + self.push_attempts[0][0].callback({}) + self.pump() + + # Send another message, this time it fails + self.helper.send(room, body="Message 2", tok=other_access_token) + self.assertEqual(len(self.push_attempts), 2) + self.push_attempts[1][0].errback(Exception("couldn't connect")) + self.pump() + + # Sending yet another message doesn't trigger a push immediately + self.helper.send(room, body="Message 3", tok=other_access_token) + self.pump() + self.assertEqual(len(self.push_attempts), 2) + + # .. but waiting for a bit will cause more pushes + self.reactor.advance(10) + self.assertEqual(len(self.push_attempts), 3) + self.assertEqual( + self.push_attempts[2][2]["notification"]["content"]["body"], "Message 2" + ) + self.push_attempts[2][0].callback({}) + self.pump() + + self.assertEqual(len(self.push_attempts), 4) + self.assertEqual( + self.push_attempts[3][2]["notification"]["content"]["body"], "Message 3" + ) + self.push_attempts[3][0].callback({}) From ad140130cc3db503de3fd15aa2923417f46b700b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 29 Apr 2025 14:08:22 +0100 Subject: [PATCH 61/70] Slight performance increase when using the ratelimiter (#18369) See the commits. --- changelog.d/18369.misc | 1 + synapse/api/ratelimiting.py | 19 ++++++++----------- synapse/rest/client/sync.py | 7 +++---- tests/api/test_ratelimiting.py | 4 +--- 4 files changed, 13 insertions(+), 18 deletions(-) create mode 100644 changelog.d/18369.misc diff --git a/changelog.d/18369.misc b/changelog.d/18369.misc new file mode 100644 index 0000000000..f4c0e5f006 --- /dev/null +++ b/changelog.d/18369.misc @@ -0,0 +1 @@ +Slight performance increase when using the ratelimiter. diff --git a/synapse/api/ratelimiting.py b/synapse/api/ratelimiting.py index 229329a5ae..8665b3b765 100644 --- a/synapse/api/ratelimiting.py +++ b/synapse/api/ratelimiting.py @@ -20,8 +20,7 @@ # # -from collections import OrderedDict -from typing import Hashable, Optional, Tuple +from typing import Dict, Hashable, Optional, Tuple from synapse.api.errors import LimitExceededError from synapse.config.ratelimiting import RatelimitSettings @@ -80,12 +79,14 @@ class Ratelimiter: self.store = store self._limiter_name = cfg.key - # An ordered dictionary representing the token buckets tracked by this rate + # A dictionary representing the token buckets tracked by this rate # limiter. Each entry maps a key of arbitrary type to a tuple representing: # * The number of tokens currently in the bucket, # * The time point when the bucket was last completely empty, and # * The rate_hz (leak rate) of this particular bucket. - self.actions: OrderedDict[Hashable, Tuple[float, float, float]] = OrderedDict() + self.actions: Dict[Hashable, Tuple[float, float, float]] = {} + + self.clock.looping_call(self._prune_message_counts, 60 * 1000) def _get_key( self, requester: Optional[Requester], key: Optional[Hashable] @@ -169,9 +170,6 @@ class Ratelimiter: rate_hz = rate_hz if rate_hz is not None else self.rate_hz burst_count = burst_count if burst_count is not None else self.burst_count - # Remove any expired entries - self._prune_message_counts(time_now_s) - # Check if there is an existing count entry for this key action_count, time_start, _ = self._get_action_counts(key, time_now_s) @@ -246,13 +244,12 @@ class Ratelimiter: action_count, time_start, rate_hz = self._get_action_counts(key, time_now_s) self.actions[key] = (action_count + n_actions, time_start, rate_hz) - def _prune_message_counts(self, time_now_s: float) -> None: + def _prune_message_counts(self) -> None: """Remove message count entries that have not exceeded their defined rate_hz limit - - Args: - time_now_s: The current time """ + time_now_s = self.clock.time() + # We create a copy of the key list here as the dictionary is modified during # the loop for key in list(self.actions.keys()): diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index 4fb9c0c8e7..bac02122d0 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -24,7 +24,7 @@ from collections import defaultdict from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Tuple, Union from synapse.api.constants import AccountDataTypes, EduTypes, Membership, PresenceState -from synapse.api.errors import Codes, LimitExceededError, StoreError, SynapseError +from synapse.api.errors import Codes, StoreError, SynapseError from synapse.api.filtering import FilterCollection from synapse.api.presence import UserPresenceState from synapse.api.ratelimiting import Ratelimiter @@ -248,9 +248,8 @@ class SyncRestServlet(RestServlet): await self._server_notices_sender.on_user_syncing(user.to_string()) # ignore the presence update if the ratelimit is exceeded but do not pause the request - try: - await self._presence_per_user_limiter.ratelimit(requester, pause=0.0) - except LimitExceededError: + allowed, _ = await self._presence_per_user_limiter.can_do_action(requester) + if not allowed: affect_presence = False logger.debug("User set_presence ratelimit exceeded; ignoring it.") else: diff --git a/tests/api/test_ratelimiting.py b/tests/api/test_ratelimiting.py index a59e168db1..1a1cbde74e 100644 --- a/tests/api/test_ratelimiting.py +++ b/tests/api/test_ratelimiting.py @@ -220,9 +220,7 @@ class TestRatelimiter(unittest.HomeserverTestCase): self.assertIn("test_id_1", limiter.actions) - self.get_success_or_raise( - limiter.can_do_action(None, key="test_id_2", _time_now_s=10) - ) + self.reactor.advance(60) self.assertNotIn("test_id_1", limiter.actions) From 4eaab31757f096a04f4278d722cdef1eb92a1743 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 29 Apr 2025 14:08:32 +0100 Subject: [PATCH 62/70] Minor performance improvements to notifier/replication (#18367) These are some improvements to `on_new_event` which is a hot path. Not sure how much this will save, but maybe like ~5%? Possibly easier to review commit-by-commit --- changelog.d/18367.misc | 1 + synapse/notifier.py | 59 +++++++++++++++++++++--------------------- 2 files changed, 31 insertions(+), 29 deletions(-) create mode 100644 changelog.d/18367.misc diff --git a/changelog.d/18367.misc b/changelog.d/18367.misc new file mode 100644 index 0000000000..2e8b897fa6 --- /dev/null +++ b/changelog.d/18367.misc @@ -0,0 +1 @@ +Minor performance improvements to the notifier. diff --git a/synapse/notifier.py b/synapse/notifier.py index 88f531182a..1914d0c914 100644 --- a/synapse/notifier.py +++ b/synapse/notifier.py @@ -66,7 +66,6 @@ from synapse.types import ( from synapse.util.async_helpers import ( timeout_deferred, ) -from synapse.util.metrics import Measure from synapse.util.stringutils import shortstr from synapse.visibility import filter_events_for_client @@ -520,20 +519,22 @@ class Notifier: users = users or [] rooms = rooms or [] - with Measure(self.clock, "on_new_event"): - user_streams: Set[_NotifierUserStream] = set() + user_streams: Set[_NotifierUserStream] = set() - log_kv( - { - "waking_up_explicit_users": len(users), - "waking_up_explicit_rooms": len(rooms), - "users": shortstr(users), - "rooms": shortstr(rooms), - "stream": stream_key, - "stream_id": new_token, - } - ) + log_kv( + { + "waking_up_explicit_users": len(users), + "waking_up_explicit_rooms": len(rooms), + "users": shortstr(users), + "rooms": shortstr(rooms), + "stream": stream_key, + "stream_id": new_token, + } + ) + # Only calculate which user streams to wake up if there are, in fact, + # any user streams registered. + if self.user_to_user_stream or self.room_to_user_streams: for user in users: user_stream = self.user_to_user_stream.get(str(user)) if user_stream is not None: @@ -565,25 +566,25 @@ class Notifier: # We resolve all these deferreds in one go so that we only need to # call `PreserveLoggingContext` once, as it has a bunch of overhead # (to calculate performance stats) - with PreserveLoggingContext(): - for listener in listeners: - listener.callback(current_token) + if listeners: + with PreserveLoggingContext(): + for listener in listeners: + listener.callback(current_token) - users_woken_by_stream_counter.labels(stream_key).inc(len(user_streams)) + if user_streams: + users_woken_by_stream_counter.labels(stream_key).inc(len(user_streams)) - self.notify_replication() + self.notify_replication() - # Notify appservices. - try: - self.appservice_handler.notify_interested_services_ephemeral( - stream_key, - new_token, - users, - ) - except Exception: - logger.exception( - "Error notifying application services of ephemeral events" - ) + # Notify appservices. + try: + self.appservice_handler.notify_interested_services_ephemeral( + stream_key, + new_token, + users, + ) + except Exception: + logger.exception("Error notifying application services of ephemeral events") def on_new_replication_data(self) -> None: """Used to inform replication listeners that something has happened From f79811ed80bebaa5b187637af6d16d413b07166e Mon Sep 17 00:00:00 2001 From: Kim Brose <2803622+HarHarLinks@users.noreply.github.com> Date: Wed, 30 Apr 2025 15:27:08 +0200 Subject: [PATCH 63/70] Fix typo in docs about `push` (#18320) --- changelog.d/18320.doc | 1 + docs/usage/configuration/config_documentation.md | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/18320.doc diff --git a/changelog.d/18320.doc b/changelog.d/18320.doc new file mode 100644 index 0000000000..d84c279940 --- /dev/null +++ b/changelog.d/18320.doc @@ -0,0 +1 @@ +Fix typo in docs about the `push` config option. Contributed by @HarHarLinks. diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 73fd9622ce..19dc9dd356 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -4018,7 +4018,7 @@ This option has a number of sub-options. They are as follows: * `include_content`: Clients requesting push notifications can either have the body of the message sent in the notification poke along with other details like the sender, or just the event ID and room ID (`event_id_only`). - If clients choose the to have the body sent, this option controls whether the + If clients choose to have the body sent, this option controls whether the notification request includes the content of the event (other details like the sender are still included). If `event_id_only` is enabled, it has no effect. From 4097ada89fefe12e7ec6d2b7a3bfbc61e64e14a0 Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Wed, 30 Apr 2025 09:54:30 -0400 Subject: [PATCH 64/70] Optimize `Dockerfile-workers` (#18292) - Use a `uv:python` image for the first build layer, to reduce the number of intermediate images required, as the main Dockerfile uses that image already - Use a cache mount for `apt` commands - Skip a pointless install of `redis-server`, since the redis Docker image is copied from instead - Move some RUN steps out of the final image layer & into the build layer Depends on https://github.com/element-hq/synapse/pull/18275 ### Pull Request Checklist * [x] Pull request is based on the develop branch * [x] Pull request includes a [changelog file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should: - Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.". - Use markdown where necessary, mostly for `code blocks`. - End with either a period (.) or an exclamation mark (!). - Start with a capital letter. - Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry. * [x] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters)) --- changelog.d/18292.docker | 1 + docker/Dockerfile-workers | 50 +++++++++++++++++++++------------------ 2 files changed, 28 insertions(+), 23 deletions(-) create mode 100644 changelog.d/18292.docker diff --git a/changelog.d/18292.docker b/changelog.d/18292.docker new file mode 100644 index 0000000000..cdb95b369b --- /dev/null +++ b/changelog.d/18292.docker @@ -0,0 +1 @@ +Optimize the build of the workers image. diff --git a/docker/Dockerfile-workers b/docker/Dockerfile-workers index dd0bf59994..a7f576184d 100644 --- a/docker/Dockerfile-workers +++ b/docker/Dockerfile-workers @@ -3,18 +3,37 @@ ARG SYNAPSE_VERSION=latest ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION ARG DEBIAN_VERSION=bookworm +ARG PYTHON_VERSION=3.12 -# first of all, we create a base image with an nginx which we can copy into the +# first of all, we create a base image with dependencies which we can copy into the # target image. For repeated rebuilds, this is much faster than apt installing # each time. -FROM docker.io/library/debian:${DEBIAN_VERSION}-slim AS deps_base +FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base + + # Tell apt to keep downloaded package files, as we're using cache mounts. + RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache + RUN \ --mount=type=cache,target=/var/cache/apt,sharing=locked \ --mount=type=cache,target=/var/lib/apt,sharing=locked \ apt-get update -qq && \ DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \ - redis-server nginx-light + nginx-light + + RUN \ + # remove default page + rm /etc/nginx/sites-enabled/default && \ + # have nginx log to stderr/out + ln -sf /dev/stdout /var/log/nginx/access.log && \ + ln -sf /dev/stderr /var/log/nginx/error.log + + # --link-mode=copy silences a warning as uv isn't able to do hardlinks between its cache + # (mounted as --mount=type=cache) and the target directory. + RUN --mount=type=cache,target=/root/.cache/uv \ + uv pip install --link-mode=copy --prefix="/uv/usr/local" supervisor~=4.2 + + RUN mkdir -p /uv/etc/supervisor/conf.d # Similarly, a base to copy the redis server from. # @@ -27,31 +46,16 @@ FROM docker.io/library/redis:7-${DEBIAN_VERSION} AS redis_base # now build the final image, based on the the regular Synapse docker image FROM $FROM - # Install supervisord with uv pip instead of apt, to avoid installing a second - # copy of python. - # --link-mode=copy silences a warning as uv isn't able to do hardlinks between its cache - # (mounted as --mount=type=cache) and the target directory. - RUN \ - --mount=type=bind,from=ghcr.io/astral-sh/uv:0.6.8,source=/uv,target=/uv \ - --mount=type=cache,target=/root/.cache/uv \ - /uv pip install --link-mode=copy --prefix="/usr/local" supervisor~=4.2 - - RUN mkdir -p /etc/supervisor/conf.d - - # Copy over redis and nginx + # Copy over dependencies COPY --from=redis_base /usr/local/bin/redis-server /usr/local/bin - + COPY --from=deps_base /uv / COPY --from=deps_base /usr/sbin/nginx /usr/sbin COPY --from=deps_base /usr/share/nginx /usr/share/nginx COPY --from=deps_base /usr/lib/nginx /usr/lib/nginx COPY --from=deps_base /etc/nginx /etc/nginx - RUN rm /etc/nginx/sites-enabled/default - RUN mkdir /var/log/nginx /var/lib/nginx - RUN chown www-data /var/lib/nginx - - # have nginx log to stderr/out - RUN ln -sf /dev/stdout /var/log/nginx/access.log - RUN ln -sf /dev/stderr /var/log/nginx/error.log + COPY --from=deps_base /var/log/nginx /var/log/nginx + # chown to allow non-root user to write to http-*-temp-path dirs + COPY --from=deps_base --chown=www-data:root /var/lib/nginx /var/lib/nginx # Copy Synapse worker, nginx and supervisord configuration template files COPY ./docker/conf-workers/* /conf/ From 7563b2a2a316a7b249ef847ddbf5b63064eb1cc2 Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Wed, 30 Apr 2025 10:22:09 -0400 Subject: [PATCH 65/70] configure_workers_and_start.py: unify python path (#18291) Use absolute path for python in script shebang, and invoke child python processes with sys.executable. This is consistent with the absolute path used to invoke python elsewhere (like in the supervisor config). ### Pull Request Checklist * [x] Pull request is based on the develop branch * [x] Pull request includes a [changelog file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should: - Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.". - Use markdown where necessary, mostly for `code blocks`. - End with either a period (.) or an exclamation mark (!). - Start with a capital letter. - Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry. * [x] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters)) --------- Co-authored-by: Quentin Gliech --- changelog.d/18291.docker | 1 + docker/configure_workers_and_start.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/18291.docker diff --git a/changelog.d/18291.docker b/changelog.d/18291.docker new file mode 100644 index 0000000000..b94c0e80e3 --- /dev/null +++ b/changelog.d/18291.docker @@ -0,0 +1 @@ +In configure_workers_and_start.py, use the same absolute path of Python in the interpreter shebang, and invoke child Python processes with `sys.executable`. diff --git a/docker/configure_workers_and_start.py b/docker/configure_workers_and_start.py index 6d73e8feaa..ff5cff3221 100755 --- a/docker/configure_workers_and_start.py +++ b/docker/configure_workers_and_start.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/local/bin/python # # This file is licensed under the Affero General Public License (AGPL) version 3. # @@ -604,7 +604,7 @@ def generate_base_homeserver_config() -> None: # start.py already does this for us, so just call that. # note that this script is copied in in the official, monolith dockerfile os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT) - subprocess.run(["/usr/local/bin/python", "/start.py", "migrate_config"], check=True) + subprocess.run([sys.executable, "/start.py", "migrate_config"], check=True) def parse_worker_types( From 5ab05e7b95a687967fe99be33cb33a9c62fee34b Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Wed, 30 Apr 2025 10:26:08 -0400 Subject: [PATCH 66/70] docker: use shebangs to invoke generated scripts (#18295) When generating scripts from templates, don't add a leading newline so that their shebangs may be handled correctly. ### Pull Request Checklist * [x] Pull request is based on the develop branch * [x] Pull request includes a [changelog file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should: - Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.". - Use markdown where necessary, mostly for `code blocks`. - End with either a period (.) or an exclamation mark (!). - Start with a capital letter. - Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry. * [x] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters)) --------- Co-authored-by: Quentin Gliech --- changelog.d/18295.docker | 1 + docker/Dockerfile-workers | 2 +- docker/complement/Dockerfile | 2 +- docker/configure_workers_and_start.py | 5 ++++- 4 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 changelog.d/18295.docker diff --git a/changelog.d/18295.docker b/changelog.d/18295.docker new file mode 100644 index 0000000000..239def1f54 --- /dev/null +++ b/changelog.d/18295.docker @@ -0,0 +1 @@ +When generating container scripts from templates, don't add a leading newline so that their shebangs may be handled correctly. diff --git a/docker/Dockerfile-workers b/docker/Dockerfile-workers index a7f576184d..6d0fc1440b 100644 --- a/docker/Dockerfile-workers +++ b/docker/Dockerfile-workers @@ -74,4 +74,4 @@ FROM $FROM # Replace the healthcheck with one which checks *all* the workers. The script # is generated by configure_workers_and_start.py. HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \ - CMD /bin/sh /healthcheck.sh + CMD ["/healthcheck.sh"] diff --git a/docker/complement/Dockerfile b/docker/complement/Dockerfile index dd029c5fbc..6ed084fe5d 100644 --- a/docker/complement/Dockerfile +++ b/docker/complement/Dockerfile @@ -58,4 +58,4 @@ ENTRYPOINT ["/start_for_complement.sh"] # Update the healthcheck to have a shorter check interval HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \ - CMD /bin/sh /healthcheck.sh + CMD ["/healthcheck.sh"] diff --git a/docker/configure_workers_and_start.py b/docker/configure_workers_and_start.py index ff5cff3221..8f96e57e50 100755 --- a/docker/configure_workers_and_start.py +++ b/docker/configure_workers_and_start.py @@ -376,9 +376,11 @@ def convert(src: str, dst: str, **template_vars: object) -> None: # # We use append mode in case the files have already been written to by something else # (for instance, as part of the instructions in a dockerfile). + exists = os.path.isfile(dst) with open(dst, "a") as outfile: # In case the existing file doesn't end with a newline - outfile.write("\n") + if exists: + outfile.write("\n") outfile.write(rendered) @@ -998,6 +1000,7 @@ def generate_worker_files( "/healthcheck.sh", healthcheck_urls=healthcheck_urls, ) + os.chmod("/healthcheck.sh", 0o755) # Ensure the logging directory exists log_dir = data_dir + "/logs" From 7be6c711d4a57f990003613c0b9715e3ac1502cb Mon Sep 17 00:00:00 2001 From: Andrew Ferrazzutti Date: Wed, 30 Apr 2025 11:53:15 -0400 Subject: [PATCH 67/70] start_for_complement.sh: use more shell builtins (#18293) Avoid calling external tools when shell builtins suffice. ### Pull Request Checklist * [x] Pull request is based on the develop branch * [x] Pull request includes a [changelog file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should: - Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.". - Use markdown where necessary, mostly for `code blocks`. - End with either a period (.) or an exclamation mark (!). - Start with a capital letter. - Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry. * [x] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters)) --------- Co-authored-by: Quentin Gliech --- changelog.d/18293.docker | 1 + docker/complement/conf/start_for_complement.sh | 11 +++++------ 2 files changed, 6 insertions(+), 6 deletions(-) create mode 100644 changelog.d/18293.docker diff --git a/changelog.d/18293.docker b/changelog.d/18293.docker new file mode 100644 index 0000000000..df47a68bfe --- /dev/null +++ b/changelog.d/18293.docker @@ -0,0 +1 @@ +In start_for_complement.sh, replace some external program calls with shell builtins. diff --git a/docker/complement/conf/start_for_complement.sh b/docker/complement/conf/start_for_complement.sh index 59b30e2051..a5e06396e2 100755 --- a/docker/complement/conf/start_for_complement.sh +++ b/docker/complement/conf/start_for_complement.sh @@ -9,7 +9,7 @@ echo " Args: $*" echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR" function log { - d=$(date +"%Y-%m-%d %H:%M:%S,%3N") + d=$(printf '%(%Y-%m-%d %H:%M:%S)T,%.3s\n' ${EPOCHREALTIME/./ }) echo "$d $*" } @@ -103,12 +103,11 @@ fi # Note that both the key and certificate are in PEM format (not DER). # First generate a configuration file to set up a Subject Alternative Name. -cat > /conf/server.tls.conf < /conf/server.tls.conf # Generate an RSA key openssl genrsa -out /conf/server.tls.key 2048 @@ -123,8 +122,8 @@ openssl x509 -req -in /conf/server.tls.csr \ -out /conf/server.tls.crt -extfile /conf/server.tls.conf -extensions SAN # Assert that we have a Subject Alternative Name in the certificate. -# (grep will exit with 1 here if there isn't a SAN in the certificate.) -openssl x509 -in /conf/server.tls.crt -noout -text | grep DNS: +# (the test will exit with 1 here if there isn't a SAN in the certificate.) +[[ $(openssl x509 -in /conf/server.tls.crt -noout -text) == *DNS:* ]] export SYNAPSE_TLS_CERT=/conf/server.tls.crt export SYNAPSE_TLS_KEY=/conf/server.tls.key From d59bbd8b6b342d41641fddf99035d38e3939f18c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Lav=C3=A9n?= Date: Wed, 30 Apr 2025 18:13:09 +0200 Subject: [PATCH 68/70] Added Pocket ID to openid.md (#18237) --- changelog.d/18237.doc | 1 + docs/openid.md | 27 +++++++++++++++++++++++++++ 2 files changed, 28 insertions(+) create mode 100644 changelog.d/18237.doc diff --git a/changelog.d/18237.doc b/changelog.d/18237.doc new file mode 100644 index 0000000000..872f7cab7d --- /dev/null +++ b/changelog.d/18237.doc @@ -0,0 +1 @@ +Add documentation for configuring [Pocket ID](https://github.com/pocket-id/pocket-id) as an OIDC provider. \ No newline at end of file diff --git a/docs/openid.md b/docs/openid.md index 5a3d7e9fba..f86ba189c7 100644 --- a/docs/openid.md +++ b/docs/openid.md @@ -23,6 +23,7 @@ such as [Github][github-idp]. [auth0]: https://auth0.com/ [authentik]: https://goauthentik.io/ [lemonldap]: https://lemonldap-ng.org/ +[pocket-id]: https://pocket-id.org/ [okta]: https://www.okta.com/ [dex-idp]: https://github.com/dexidp/dex [keycloak-idp]: https://www.keycloak.org/docs/latest/server_admin/#sso-protocols @@ -624,6 +625,32 @@ oidc_providers: Note that the fields `client_id` and `client_secret` are taken from the CURL response above. +### Pocket ID + +[Pocket ID][pocket-id] is a simple OIDC provider that allows users to authenticate with their passkeys. +1. Go to `OIDC Clients` +2. Click on `Add OIDC Client` +3. Add a name, for example `Synapse` +4. Add `"https://auth.example.org/_synapse/client/oidc/callback` to `Callback URLs` # Replace `auth.example.org` with your domain +5. Click on `Save` +6. Note down your `Client ID` and `Client secret`, these will be used later + +Synapse config: + +```yaml +oidc_providers: + - idp_id: pocket_id + idp_name: Pocket ID + issuer: "https://auth.example.org/" # Replace with your domain + client_id: "your-client-id" # Replace with the "Client ID" you noted down before + client_secret: "your-client-secret" # Replace with the "Client secret" you noted down before + scopes: ["openid", "profile"] + user_mapping_provider: + config: + localpart_template: "{{ user.preferred_username }}" + display_name_template: "{{ user.name }}" +``` + ### Shibboleth with OIDC Plugin [Shibboleth](https://www.shibboleth.net/) is an open Standard IdP solution widely used by Universities. From 2965c9970c0b2742885dc345f6d70df7d5686423 Mon Sep 17 00:00:00 2001 From: Sebastian Spaeth Date: Thu, 1 May 2025 16:11:59 +0200 Subject: [PATCH 69/70] docs/workers.md: Add ^/_matrix/federation/v1/event/ to list of delegatable endpoints (#18377) --- changelog.d/18377.doc | 1 + docker/configure_workers_and_start.py | 1 + docs/upgrade.md | 10 ++++++++++ docs/workers.md | 1 + 4 files changed, 13 insertions(+) create mode 100644 changelog.d/18377.doc diff --git a/changelog.d/18377.doc b/changelog.d/18377.doc new file mode 100644 index 0000000000..ceb2b64e5d --- /dev/null +++ b/changelog.d/18377.doc @@ -0,0 +1 @@ +Add `/_matrix/federation/v1/version` to list of federation endpoints that can be handled by workers. diff --git a/docker/configure_workers_and_start.py b/docker/configure_workers_and_start.py index 8f96e57e50..df34d51f77 100755 --- a/docker/configure_workers_and_start.py +++ b/docker/configure_workers_and_start.py @@ -202,6 +202,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = { "app": "synapse.app.generic_worker", "listener_resources": ["federation"], "endpoint_patterns": [ + "^/_matrix/federation/v1/version$", "^/_matrix/federation/(v1|v2)/event/", "^/_matrix/federation/(v1|v2)/state/", "^/_matrix/federation/(v1|v2)/state_ids/", diff --git a/docs/upgrade.md b/docs/upgrade.md index 07a9641fdd..d508e2231e 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -117,6 +117,16 @@ each upgrade are complete before moving on to the next upgrade, to avoid stacking them up. You can monitor the currently running background updates with [the Admin API](usage/administration/admin_api/background_updates.html#status). +# Upgrading to v1.130.0 + +## Documented endpoint which can be delegated to a federation worker + +The endpoint `^/_matrix/federation/v1/version$` can be delegated to a federation +worker. This is not new behaviour, but had not been documented yet. The +[list of delegatable endpoints](workers.md#synapseappgeneric_worker) has +been updated to include it. Make sure to check your reverse proxy rules if you +are using workers. + # Upgrading to v1.126.0 ## Room list publication rules change diff --git a/docs/workers.md b/docs/workers.md index 9ebcc886b1..2597e78217 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -200,6 +200,7 @@ information. ^/_matrix/client/(api/v1|r0|v3)/rooms/[^/]+/initialSync$ # Federation requests + ^/_matrix/federation/v1/version$ ^/_matrix/federation/v1/event/ ^/_matrix/federation/v1/state/ ^/_matrix/federation/v1/state_ids/ From 6dc1ecd35972c95ce62c5e0563245845c9c64e49 Mon Sep 17 00:00:00 2001 From: Shay Date: Thu, 1 May 2025 11:30:00 -0700 Subject: [PATCH 70/70] Add an Admin API endpoint to fetch scheduled tasks (#18214) --- changelog.d/18214.feature | 1 + docs/admin_api/scheduled_tasks.md | 54 +++++++ synapse/rest/admin/__init__.py | 2 + synapse/rest/admin/scheduled_tasks.py | 70 +++++++++ tests/rest/admin/test_scheduled_tasks.py | 192 +++++++++++++++++++++++ 5 files changed, 319 insertions(+) create mode 100644 changelog.d/18214.feature create mode 100644 docs/admin_api/scheduled_tasks.md create mode 100644 synapse/rest/admin/scheduled_tasks.py create mode 100644 tests/rest/admin/test_scheduled_tasks.py diff --git a/changelog.d/18214.feature b/changelog.d/18214.feature new file mode 100644 index 0000000000..751cb7d383 --- /dev/null +++ b/changelog.d/18214.feature @@ -0,0 +1 @@ +Add an Admin API endpoint `GET /_synapse/admin/v1/scheduled_tasks` to fetch scheduled tasks. \ No newline at end of file diff --git a/docs/admin_api/scheduled_tasks.md b/docs/admin_api/scheduled_tasks.md new file mode 100644 index 0000000000..1708871a6d --- /dev/null +++ b/docs/admin_api/scheduled_tasks.md @@ -0,0 +1,54 @@ +# Show scheduled tasks + +This API returns information about scheduled tasks. + +To use it, you will need to authenticate by providing an `access_token` +for a server admin: see [Admin API](../usage/administration/admin_api/). + +The api is: +``` +GET /_synapse/admin/v1/scheduled_tasks +``` + +It returns a JSON body like the following: + +```json +{ + "scheduled_tasks": [ + { + "id": "GSA124oegf1", + "action": "shutdown_room", + "status": "complete", + "timestamp": 23423523, + "resource_id": "!roomid", + "result": "some result", + "error": null + } + ] +} +``` + +**Query parameters:** + +* `action_name`: string - Is optional. Returns only the scheduled tasks with the given action name. +* `resource_id`: string - Is optional. Returns only the scheduled tasks with the given resource id. +* `status`: string - Is optional. Returns only the scheduled tasks matching the given status, one of + - "scheduled" - Task is scheduled but not active + - "active" - Task is active and probably running, and if not will be run on next scheduler loop run + - "complete" - Task has completed successfully + - "failed" - Task is over and either returned a failed status, or had an exception + +* `max_timestamp`: int - Is optional. Returns only the scheduled tasks with a timestamp inferior to the specified one. + +**Response** + +The following fields are returned in the JSON response body along with a `200` HTTP status code: + +* `id`: string - ID of scheduled task. +* `action`: string - The name of the scheduled task's action. +* `status`: string - The status of the scheduled task. +* `timestamp_ms`: integer - The timestamp (in milliseconds since the unix epoch) of the given task - If the status is "scheduled" then this represents when it should be launched. + Otherwise it represents the last time this task got a change of state. +* `resource_id`: Optional string - The resource id of the scheduled task, if it possesses one +* `result`: Optional Json - Any result of the scheduled task, if given +* `error`: Optional string - If the task has the status "failed", the error associated with this failure diff --git a/synapse/rest/admin/__init__.py b/synapse/rest/admin/__init__.py index 5977ded4a0..cf809d1a27 100644 --- a/synapse/rest/admin/__init__.py +++ b/synapse/rest/admin/__init__.py @@ -86,6 +86,7 @@ from synapse.rest.admin.rooms import ( RoomStateRestServlet, RoomTimestampToEventRestServlet, ) +from synapse.rest.admin.scheduled_tasks import ScheduledTasksRestServlet from synapse.rest.admin.server_notice_servlet import SendServerNoticeServlet from synapse.rest.admin.statistics import ( LargestRoomsStatistics, @@ -338,6 +339,7 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None: BackgroundUpdateStartJobRestServlet(hs).register(http_server) ExperimentalFeaturesRestServlet(hs).register(http_server) SuspendAccountRestServlet(hs).register(http_server) + ScheduledTasksRestServlet(hs).register(http_server) def register_servlets_for_client_rest_resource( diff --git a/synapse/rest/admin/scheduled_tasks.py b/synapse/rest/admin/scheduled_tasks.py new file mode 100644 index 0000000000..2ae13021b9 --- /dev/null +++ b/synapse/rest/admin/scheduled_tasks.py @@ -0,0 +1,70 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2025 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +# +# +from typing import TYPE_CHECKING, Tuple + +from synapse.http.servlet import RestServlet, parse_integer, parse_string +from synapse.http.site import SynapseRequest +from synapse.rest.admin import admin_patterns, assert_requester_is_admin +from synapse.types import JsonDict, TaskStatus + +if TYPE_CHECKING: + from synapse.server import HomeServer + + +class ScheduledTasksRestServlet(RestServlet): + """Get a list of scheduled tasks and their statuses + optionally filtered by action name, resource id, status, and max timestamp + """ + + PATTERNS = admin_patterns("/scheduled_tasks$") + + def __init__(self, hs: "HomeServer"): + self._auth = hs.get_auth() + self._store = hs.get_datastores().main + + async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]: + await assert_requester_is_admin(self._auth, request) + + # extract query params + action_name = parse_string(request, "action_name") + resource_id = parse_string(request, "resource_id") + status = parse_string(request, "job_status") + max_timestamp = parse_integer(request, "max_timestamp") + + actions = [action_name] if action_name else None + statuses = [TaskStatus(status)] if status else None + + tasks = await self._store.get_scheduled_tasks( + actions=actions, + resource_id=resource_id, + statuses=statuses, + max_timestamp=max_timestamp, + ) + + json_tasks = [] + for task in tasks: + result_task = { + "id": task.id, + "action": task.action, + "status": task.status, + "timestamp_ms": task.timestamp, + "resource_id": task.resource_id, + "result": task.result, + "error": task.error, + } + json_tasks.append(result_task) + + return 200, {"scheduled_tasks": json_tasks} diff --git a/tests/rest/admin/test_scheduled_tasks.py b/tests/rest/admin/test_scheduled_tasks.py new file mode 100644 index 0000000000..9654e9322b --- /dev/null +++ b/tests/rest/admin/test_scheduled_tasks.py @@ -0,0 +1,192 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2025 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +# +# +from typing import Mapping, Optional, Tuple + +from twisted.test.proto_helpers import MemoryReactor + +import synapse.rest.admin +from synapse.api.errors import Codes +from synapse.rest.client import login +from synapse.server import HomeServer +from synapse.types import JsonMapping, ScheduledTask, TaskStatus +from synapse.util import Clock + +from tests import unittest + + +class ScheduledTasksAdminApiTestCase(unittest.HomeserverTestCase): + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + self.admin_user = self.register_user("admin", "pass", admin=True) + self.admin_user_tok = self.login("admin", "pass") + self._task_scheduler = hs.get_task_scheduler() + + # create and schedule a few tasks + async def _test_task( + task: ScheduledTask, + ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + return TaskStatus.ACTIVE, None, None + + async def _finished_test_task( + task: ScheduledTask, + ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + return TaskStatus.COMPLETE, None, None + + async def _failed_test_task( + task: ScheduledTask, + ) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]: + return TaskStatus.FAILED, None, "Everything failed" + + self._task_scheduler.register_action(_test_task, "test_task") + self.get_success( + self._task_scheduler.schedule_task("test_task", resource_id="test") + ) + + self._task_scheduler.register_action(_finished_test_task, "finished_test_task") + self.get_success( + self._task_scheduler.schedule_task( + "finished_test_task", resource_id="finished_task" + ) + ) + + self._task_scheduler.register_action(_failed_test_task, "failed_test_task") + self.get_success( + self._task_scheduler.schedule_task( + "failed_test_task", resource_id="failed_task" + ) + ) + + def check_scheduled_tasks_response(self, scheduled_tasks: Mapping) -> list: + result = [] + for task in scheduled_tasks: + if task["resource_id"] == "test": + self.assertEqual(task["status"], TaskStatus.ACTIVE) + self.assertEqual(task["action"], "test_task") + result.append(task) + if task["resource_id"] == "finished_task": + self.assertEqual(task["status"], TaskStatus.COMPLETE) + self.assertEqual(task["action"], "finished_test_task") + result.append(task) + if task["resource_id"] == "failed_task": + self.assertEqual(task["status"], TaskStatus.FAILED) + self.assertEqual(task["action"], "failed_test_task") + result.append(task) + + return result + + def test_requester_is_not_admin(self) -> None: + """ + If the user is not a server admin, an error 403 is returned. + """ + + self.register_user("user", "pass", admin=False) + other_user_tok = self.login("user", "pass") + + channel = self.make_request( + "GET", + "/_synapse/admin/v1/scheduled_tasks", + content={}, + access_token=other_user_tok, + ) + + self.assertEqual(403, channel.code, msg=channel.json_body) + self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"]) + + def test_scheduled_tasks(self) -> None: + """ + Test that endpoint returns scheduled tasks. + """ + + channel = self.make_request( + "GET", + "/_synapse/admin/v1/scheduled_tasks", + content={}, + access_token=self.admin_user_tok, + ) + self.assertEqual(200, channel.code, msg=channel.json_body) + scheduled_tasks = channel.json_body["scheduled_tasks"] + + # make sure we got back all the scheduled tasks + found_tasks = self.check_scheduled_tasks_response(scheduled_tasks) + self.assertEqual(len(found_tasks), 3) + + def test_filtering_scheduled_tasks(self) -> None: + """ + Test that filtering the scheduled tasks response via query params works as expected. + """ + # filter via job_status + channel = self.make_request( + "GET", + "/_synapse/admin/v1/scheduled_tasks?job_status=active", + content={}, + access_token=self.admin_user_tok, + ) + self.assertEqual(200, channel.code, msg=channel.json_body) + scheduled_tasks = channel.json_body["scheduled_tasks"] + found_tasks = self.check_scheduled_tasks_response(scheduled_tasks) + + # only the active task should have been returned + self.assertEqual(len(found_tasks), 1) + self.assertEqual(found_tasks[0]["status"], "active") + + # filter via action_name + channel = self.make_request( + "GET", + "/_synapse/admin/v1/scheduled_tasks?action_name=test_task", + content={}, + access_token=self.admin_user_tok, + ) + self.assertEqual(200, channel.code, msg=channel.json_body) + scheduled_tasks = channel.json_body["scheduled_tasks"] + + # only test_task should have been returned + found_tasks = self.check_scheduled_tasks_response(scheduled_tasks) + self.assertEqual(len(found_tasks), 1) + self.assertEqual(found_tasks[0]["action"], "test_task") + + # filter via max_timestamp + channel = self.make_request( + "GET", + "/_synapse/admin/v1/scheduled_tasks?max_timestamp=0", + content={}, + access_token=self.admin_user_tok, + ) + self.assertEqual(200, channel.code, msg=channel.json_body) + scheduled_tasks = channel.json_body["scheduled_tasks"] + found_tasks = self.check_scheduled_tasks_response(scheduled_tasks) + + # none should have been returned + self.assertEqual(len(found_tasks), 0) + + # filter via resource id + channel = self.make_request( + "GET", + "/_synapse/admin/v1/scheduled_tasks?resource_id=failed_task", + content={}, + access_token=self.admin_user_tok, + ) + self.assertEqual(200, channel.code, msg=channel.json_body) + scheduled_tasks = channel.json_body["scheduled_tasks"] + found_tasks = self.check_scheduled_tasks_response(scheduled_tasks) + + # only the task with the matching resource id should have been returned + self.assertEqual(len(found_tasks), 1) + self.assertEqual(found_tasks[0]["resource_id"], "failed_task")