Compare commits
34 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 5b2036c61b | |||
| 1756352106 | |||
|
|
41938d6fd2 | ||
|
|
f4320b5a49 | ||
|
|
3989d22a37 | ||
|
|
0395b71e25 | ||
|
|
29fd0116a5 | ||
|
|
0f2b29511f | ||
|
|
466994743a | ||
|
|
df24e0f302 | ||
|
|
048629dd13 | ||
|
|
7347cc436e | ||
|
|
3f636386a6 | ||
|
|
1f7f16477d | ||
|
|
dfd00a986f | ||
|
|
cdf286d405 | ||
|
|
3aaa2e80b2 | ||
|
|
ba774e2311 | ||
|
|
acafac3bb6 | ||
|
|
8b0083cad9 | ||
|
|
09fd2645c2 | ||
|
|
891983f3f4 | ||
|
|
a096fba969 | ||
|
|
e8710e7c5e | ||
|
|
978ae0b080 | ||
|
|
93e658bd13 | ||
|
|
d688daf41c | ||
|
|
aff90a5245 | ||
|
|
83023ce1e0 | ||
|
|
39316672da | ||
|
|
f86918e562 | ||
|
|
3d28e2213f | ||
|
|
0dfc21ca9f | ||
|
|
ffd0b4c079 |
@@ -7,4 +7,4 @@ if command -v yum &> /dev/null; then
|
||||
fi
|
||||
|
||||
# Install a Rust toolchain
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain 1.82.0 -y --profile minimal
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable -y --profile minimal
|
||||
|
||||
@@ -1,146 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
#
|
||||
# Copyright (C) 2023 New Vector, Ltd
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# See the GNU Affero General Public License for more details:
|
||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
#
|
||||
# Originally licensed under the Apache License, Version 2.0:
|
||||
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
||||
#
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
|
||||
# Wraps `auditwheel repair` to first check if we're repairing a potentially abi3
|
||||
# compatible wheel, if so rename the wheel before repairing it.
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
from zipfile import ZipFile
|
||||
|
||||
from packaging.tags import Tag
|
||||
from packaging.utils import parse_wheel_filename
|
||||
from packaging.version import Version
|
||||
|
||||
|
||||
def check_is_abi3_compatible(wheel_file: str) -> None:
|
||||
"""Check the contents of the built wheel for any `.so` files that are *not*
|
||||
abi3 compatible.
|
||||
"""
|
||||
|
||||
with ZipFile(wheel_file, "r") as wheel:
|
||||
for file in wheel.namelist():
|
||||
if not file.endswith(".so"):
|
||||
continue
|
||||
|
||||
if not file.endswith(".abi3.so"):
|
||||
raise Exception(f"Found non-abi3 lib: {file}")
|
||||
|
||||
|
||||
def cpython(wheel_file: str, name: str, version: Version, tag: Tag) -> str:
|
||||
"""Replaces the cpython wheel file with a ABI3 compatible wheel"""
|
||||
|
||||
if tag.abi == "abi3":
|
||||
# Nothing to do.
|
||||
return wheel_file
|
||||
|
||||
check_is_abi3_compatible(wheel_file)
|
||||
|
||||
# HACK: it seems that some older versions of pip will consider a wheel marked
|
||||
# as macosx_11_0 as incompatible with Big Sur. I haven't done the full archaeology
|
||||
# here; there are some clues in
|
||||
# https://github.com/pantsbuild/pants/pull/12857
|
||||
# https://github.com/pypa/pip/issues/9138
|
||||
# https://github.com/pypa/packaging/pull/319
|
||||
# Empirically this seems to work, note that macOS 11 and 10.16 are the same,
|
||||
# both versions are valid for backwards compatibility.
|
||||
platform = tag.platform.replace("macosx_11_0", "macosx_10_16")
|
||||
abi3_tag = Tag(tag.interpreter, "abi3", platform)
|
||||
|
||||
dirname = os.path.dirname(wheel_file)
|
||||
new_wheel_file = os.path.join(
|
||||
dirname,
|
||||
f"{name}-{version}-{abi3_tag}.whl",
|
||||
)
|
||||
|
||||
os.rename(wheel_file, new_wheel_file)
|
||||
|
||||
print("Renamed wheel to", new_wheel_file)
|
||||
|
||||
return new_wheel_file
|
||||
|
||||
|
||||
def main(wheel_file: str, dest_dir: str, archs: str | None) -> None:
|
||||
"""Entry point"""
|
||||
|
||||
# Parse the wheel file name into its parts. Note that `parse_wheel_filename`
|
||||
# normalizes the package name (i.e. it converts matrix_synapse ->
|
||||
# matrix-synapse), which is not what we want.
|
||||
_, version, build, tags = parse_wheel_filename(os.path.basename(wheel_file))
|
||||
name = os.path.basename(wheel_file).split("-")[0]
|
||||
|
||||
if len(tags) != 1:
|
||||
# We expect only a wheel file with only a single tag
|
||||
raise Exception(f"Unexpectedly found multiple tags: {tags}")
|
||||
|
||||
tag = next(iter(tags))
|
||||
|
||||
if build:
|
||||
# We don't use build tags in Synapse
|
||||
raise Exception(f"Unexpected build tag: {build}")
|
||||
|
||||
# If the wheel is for cpython then convert it into an abi3 wheel.
|
||||
if tag.interpreter.startswith("cp"):
|
||||
wheel_file = cpython(wheel_file, name, version, tag)
|
||||
|
||||
# Finally, repair the wheel.
|
||||
if archs is not None:
|
||||
# If we are given archs then we are on macos and need to use
|
||||
# `delocate-listdeps`.
|
||||
subprocess.run(["delocate-listdeps", wheel_file], check=True)
|
||||
subprocess.run(
|
||||
["delocate-wheel", "--require-archs", archs, "-w", dest_dir, wheel_file],
|
||||
check=True,
|
||||
)
|
||||
else:
|
||||
subprocess.run(["auditwheel", "repair", "-w", dest_dir, wheel_file], check=True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Tag wheel as abi3 and repair it.")
|
||||
|
||||
parser.add_argument(
|
||||
"--wheel-dir",
|
||||
"-w",
|
||||
metavar="WHEEL_DIR",
|
||||
help="Directory to store delocated wheels",
|
||||
required=True,
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--require-archs",
|
||||
metavar="archs",
|
||||
default=None,
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"wheel_file",
|
||||
metavar="WHEEL_FILE",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
wheel_file = args.wheel_file
|
||||
wheel_dir = args.wheel_dir
|
||||
archs = args.require_archs
|
||||
|
||||
main(wheel_file, wheel_dir, archs)
|
||||
@@ -1,39 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# this script is run by GitHub Actions in a plain `jammy` container; it
|
||||
# - installs the minimal system requirements, and poetry;
|
||||
# - patches the project definition file to refer to old versions only;
|
||||
# - creates a venv with these old versions using poetry; and finally
|
||||
# - invokes `trial` to run the tests with old deps.
|
||||
|
||||
set -ex
|
||||
|
||||
# Prevent virtualenv from auto-updating pip to an incompatible version
|
||||
export VIRTUALENV_NO_DOWNLOAD=1
|
||||
|
||||
# TODO: in the future, we could use an implementation of
|
||||
# https://github.com/python-poetry/poetry/issues/3527
|
||||
# https://github.com/pypa/pip/issues/8085
|
||||
# to select the lowest possible versions, rather than resorting to this sed script.
|
||||
|
||||
# Patch the project definitions in-place:
|
||||
# - `-E` use extended regex syntax.
|
||||
# - Don't modify the line that defines required Python versions.
|
||||
# - Replace all lower and tilde bounds with exact bounds.
|
||||
# - Replace all caret bounds with exact bounds.
|
||||
# - Delete all lines referring to psycopg2 - so no testing of postgres support.
|
||||
# - Use pyopenssl 17.0, which is the oldest version that works with
|
||||
# a `cryptography` compiled against OpenSSL 1.1.
|
||||
# - Omit systemd: we're not logging to journal here.
|
||||
|
||||
sed -i -E '
|
||||
/^\s*requires-python\s*=/b
|
||||
s/[~>]=/==/g
|
||||
s/\^/==/g
|
||||
/psycopg2/d
|
||||
s/pyOpenSSL\s*==\s*16\.0\.0"/pyOpenSSL==17.0.0"/
|
||||
/systemd/d
|
||||
' pyproject.toml
|
||||
|
||||
echo "::group::Patched pyproject.toml"
|
||||
cat pyproject.toml
|
||||
echo "::endgroup::"
|
||||
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
@@ -0,0 +1 @@
|
||||
.github/workflows/* merge=ours
|
||||
69
.github/dependabot.yml
vendored
69
.github/dependabot.yml
vendored
@@ -1,23 +1,92 @@
|
||||
version: 2
|
||||
# As dependabot is currently only run on a weekly basis, we raise the
|
||||
# open-pull-requests-limit to 10 (from the default of 5) to better ensure we
|
||||
# don't continuously grow a backlog of updates.
|
||||
updates:
|
||||
- # "pip" is the correct setting for poetry, per https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#package-ecosystem
|
||||
package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
open-pull-requests-limit: 10
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
# Group patch updates to packages together into a single PR, as they rarely
|
||||
# if ever contain breaking changes that need to be reviewed separately.
|
||||
#
|
||||
# Less PRs means a streamlined review process.
|
||||
#
|
||||
# Python packages follow semantic versioning, and tend to only introduce
|
||||
# breaking changes in major version bumps. Thus, we'll group minor and patch
|
||||
# versions together.
|
||||
groups:
|
||||
minor-and-patches:
|
||||
applies-to: version-updates
|
||||
patterns:
|
||||
- "*"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
# Prevent pulling packages that were recently updated to help mitigate
|
||||
# supply chain attacks. 14 days was taken from the recommendation at
|
||||
# https://blog.yossarian.net/2025/11/21/We-should-all-be-using-dependency-cooldowns
|
||||
# where the author noted that 9/10 attacks would have been mitigated by a
|
||||
# two week cooldown.
|
||||
#
|
||||
# The cooldown only applies to general updates; security updates will still
|
||||
# be pulled in as soon as possible.
|
||||
cooldown:
|
||||
default-days: 14
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/docker"
|
||||
open-pull-requests-limit: 10
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
# For container versions, breaking changes are also typically only introduced in major
|
||||
# package bumps.
|
||||
groups:
|
||||
minor-and-patches:
|
||||
applies-to: version-updates
|
||||
patterns:
|
||||
- "*"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
cooldown:
|
||||
default-days: 14
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
open-pull-requests-limit: 10
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
# Similarly for GitHub Actions, breaking changes are typically only introduced in major
|
||||
# package bumps.
|
||||
groups:
|
||||
minor-and-patches:
|
||||
applies-to: version-updates
|
||||
patterns:
|
||||
- "*"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
cooldown:
|
||||
default-days: 14
|
||||
|
||||
- package-ecosystem: "cargo"
|
||||
directory: "/"
|
||||
open-pull-requests-limit: 10
|
||||
versioning-strategy: "lockfile-only"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
# The Rust ecosystem is special in that breaking changes are often introduced
|
||||
# in minor version bumps, as packages typically stay pre-1.0 for a long time.
|
||||
# Thus we specifically keep minor version bumps separate in their own PRs.
|
||||
groups:
|
||||
patches:
|
||||
applies-to: version-updates
|
||||
patterns:
|
||||
- "*"
|
||||
update-types:
|
||||
- "patch"
|
||||
cooldown:
|
||||
default-days: 14
|
||||
|
||||
155
.github/workflows/docker.yml
vendored
155
.github/workflows/docker.yml
vendored
@@ -1,155 +0,0 @@
|
||||
# GitHub actions workflow which builds and publishes the docker images.
|
||||
|
||||
name: Build docker images
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: ["v*"]
|
||||
branches: [master, main, develop]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write # needed for signing the images with GitHub OIDC Token
|
||||
jobs:
|
||||
build:
|
||||
name: Build and push image for ${{ matrix.platform }}
|
||||
runs-on: ${{ matrix.runs_on }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runs_on: ubuntu-24.04
|
||||
suffix: linux-amd64
|
||||
- platform: linux/arm64
|
||||
runs_on: ubuntu-24.04-arm
|
||||
suffix: linux-arm64
|
||||
steps:
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Extract version from pyproject.toml
|
||||
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
|
||||
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsshell
|
||||
shell: bash
|
||||
run: |
|
||||
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
push: true
|
||||
labels: |
|
||||
gitsha1=${{ github.sha }}
|
||||
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
|
||||
tags: |
|
||||
docker.io/matrixdotorg/synapse
|
||||
ghcr.io/element-hq/synapse
|
||||
file: "docker/Dockerfile"
|
||||
platforms: ${{ matrix.platform }}
|
||||
outputs: type=image,push-by-digest=true,name-canonical=true,push=true
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v5
|
||||
with:
|
||||
name: digests-${{ matrix.suffix }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
merge:
|
||||
name: Push merged images to ${{ matrix.repository }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
repository:
|
||||
- docker.io/matrixdotorg/synapse
|
||||
- ghcr.io/element-hq/synapse
|
||||
|
||||
needs:
|
||||
- build
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
if: ${{ startsWith(matrix.repository, 'docker.io') }}
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
if: ${{ startsWith(matrix.repository, 'ghcr.io') }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
|
||||
- name: Calculate docker image tag
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
with:
|
||||
images: ${{ matrix.repository }}
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
type=raw,value=develop,enable=${{ github.ref == 'refs/heads/develop' }}
|
||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
|
||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
||||
type=pep440,pattern={{raw}}
|
||||
type=sha
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
env:
|
||||
REPOSITORY: ${{ matrix.repository }}
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf "$REPOSITORY@sha256:%s " *)
|
||||
|
||||
- name: Sign each manifest
|
||||
env:
|
||||
REPOSITORY: ${{ matrix.repository }}
|
||||
run: |
|
||||
DIGESTS=""
|
||||
for TAG in $(echo "$DOCKER_METADATA_OUTPUT_JSON" | jq -r '.tags[]'); do
|
||||
DIGEST="$(docker buildx imagetools inspect $TAG --format '{{json .Manifest}}' | jq -r '.digest')"
|
||||
DIGESTS="$DIGESTS $REPOSITORY@$DIGEST"
|
||||
done
|
||||
cosign sign --yes $DIGESTS
|
||||
34
.github/workflows/docs-pr-netlify.yaml
vendored
34
.github/workflows/docs-pr-netlify.yaml
vendored
@@ -1,34 +0,0 @@
|
||||
name: Deploy documentation PR preview
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: [ "Prepare documentation PR preview" ]
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
netlify:
|
||||
if: github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.event == 'pull_request'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
||||
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
||||
- name: 📥 Download artifact
|
||||
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
|
||||
with:
|
||||
workflow: docs-pr.yaml
|
||||
run_id: ${{ github.event.workflow_run.id }}
|
||||
name: book
|
||||
path: book
|
||||
|
||||
- name: 📤 Deploy to Netlify
|
||||
uses: matrix-org/netlify-pr-preview@9805cd123fc9a7e421e35340a05e1ebc5dee46b5 # v3
|
||||
with:
|
||||
path: book
|
||||
owner: ${{ github.event.workflow_run.head_repository.owner.login }}
|
||||
branch: ${{ github.event.workflow_run.head_branch }}
|
||||
revision: ${{ github.event.workflow_run.head_sha }}
|
||||
token: ${{ secrets.NETLIFY_AUTH_TOKEN }}
|
||||
site_id: ${{ secrets.NETLIFY_SITE_ID }}
|
||||
desc: Documentation preview
|
||||
deployment_env: PR Documentation Preview
|
||||
71
.github/workflows/docs-pr.yaml
vendored
71
.github/workflows/docs-pr.yaml
vendored
@@ -1,71 +0,0 @@
|
||||
name: Prepare documentation PR preview
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- docs/**
|
||||
- book.toml
|
||||
- .github/workflows/docs-pr.yaml
|
||||
- scripts-dev/schema_versions.py
|
||||
|
||||
jobs:
|
||||
pages:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
# Fetch all history so that the schema_versions script works.
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- run: "pip install 'packaging>=20.0' 'GitPython>=3.1.20'"
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
||||
# as the default. Let's opt for the welcome page instead.
|
||||
run: |
|
||||
mdbook build
|
||||
cp book/welcome_and_overview.html book/index.html
|
||||
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: book
|
||||
path: book
|
||||
# We'll only use this in a workflow_run, then we're done with it
|
||||
retention-days: 1
|
||||
|
||||
link-check:
|
||||
name: Check links in documentation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Setup htmltest
|
||||
run: |
|
||||
wget https://github.com/wjdp/htmltest/releases/download/v0.17.0/htmltest_0.17.0_linux_amd64.tar.gz
|
||||
echo '775c597ee74899d6002cd2d93076f897f4ba68686bceabe2e5d72e84c57bc0fb htmltest_0.17.0_linux_amd64.tar.gz' | sha256sum -c
|
||||
tar zxf htmltest_0.17.0_linux_amd64.tar.gz
|
||||
|
||||
- name: Test links with htmltest
|
||||
# Build the book with `./` as the site URL (to make checks on 404.html possible)
|
||||
# Then run htmltest (without checking external links since that involves the network and is slow).
|
||||
run: |
|
||||
MDBOOK_OUTPUT__HTML__SITE_URL="./" mdbook build
|
||||
./htmltest book --skip-external
|
||||
99
.github/workflows/docs.yaml
vendored
99
.github/workflows/docs.yaml
vendored
@@ -1,99 +0,0 @@
|
||||
name: Deploy the documentation
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
# For bleeding-edge documentation
|
||||
- develop
|
||||
# For documentation specific to a release
|
||||
- 'release-v*'
|
||||
# stable docs
|
||||
- master
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
pre:
|
||||
name: Calculate variables for GitHub Pages deployment
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Figure out the target directory.
|
||||
#
|
||||
# The target directory depends on the name of the branch
|
||||
#
|
||||
- name: Get the target directory name
|
||||
id: vars
|
||||
run: |
|
||||
# first strip the 'refs/heads/' prefix with some shell foo
|
||||
branch="${GITHUB_REF#refs/heads/}"
|
||||
|
||||
case $branch in
|
||||
release-*)
|
||||
# strip 'release-' from the name for release branches.
|
||||
branch="${branch#release-}"
|
||||
;;
|
||||
master)
|
||||
# deploy to "latest" for the master branch.
|
||||
branch="latest"
|
||||
;;
|
||||
esac
|
||||
|
||||
# finally, set the 'branch-version' var.
|
||||
echo "branch-version=$branch" >> "$GITHUB_OUTPUT"
|
||||
outputs:
|
||||
branch-version: ${{ steps.vars.outputs.branch-version }}
|
||||
|
||||
################################################################################
|
||||
pages-docs:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- pre
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
# Fetch all history so that the schema_versions script works.
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Set version of docs
|
||||
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- run: "pip install 'packaging>=20.0' 'GitPython>=3.1.20'"
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
||||
# as the default. Let's opt for the welcome page instead.
|
||||
run: |
|
||||
mdbook build
|
||||
cp book/welcome_and_overview.html book/index.html
|
||||
|
||||
- name: Prepare and publish schema files
|
||||
run: |
|
||||
sudo apt-get update && sudo apt-get install -y yq
|
||||
mkdir -p book/schema
|
||||
# Remove developer notice before publishing.
|
||||
rm schema/v*/Do\ not\ edit\ files\ in\ this\ folder
|
||||
# Copy schema files that are independent from current Synapse version.
|
||||
cp -r -t book/schema schema/v*/
|
||||
# Convert config schema from YAML source file to JSON.
|
||||
yq < schema/synapse-config.schema.yaml \
|
||||
> book/schema/synapse-config.schema.json
|
||||
|
||||
# Deploy to the target directory.
|
||||
- name: Deploy to gh pages
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./book
|
||||
destination_dir: ./${{ needs.pre.outputs.branch-version }}
|
||||
52
.github/workflows/fix_lint.yaml
vendored
52
.github/workflows/fix_lint.yaml
vendored
@@ -1,52 +0,0 @@
|
||||
# A helper workflow to automatically fixup any linting errors on a PR. Must be
|
||||
# triggered manually.
|
||||
|
||||
name: Attempt to automatically fix linting errors
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
# We use nightly so that `fmt` correctly groups together imports, and
|
||||
# clippy correctly fixes up the benchmarks.
|
||||
RUST_VERSION: nightly-2025-06-24
|
||||
|
||||
jobs:
|
||||
fixup:
|
||||
name: Fix up
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
components: clippy, rustfmt
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
install-project: "false"
|
||||
poetry-version: "2.1.1"
|
||||
|
||||
- name: Run ruff check
|
||||
continue-on-error: true
|
||||
run: poetry run ruff check --fix .
|
||||
|
||||
- name: Run ruff format
|
||||
continue-on-error: true
|
||||
run: poetry run ruff format --quiet .
|
||||
|
||||
- run: cargo clippy --all-features --fix -- -D warnings
|
||||
continue-on-error: true
|
||||
|
||||
- run: cargo fmt
|
||||
continue-on-error: true
|
||||
|
||||
- uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
||||
with:
|
||||
commit_message: "Attempt to fix linting"
|
||||
243
.github/workflows/latest_deps.yml
vendored
243
.github/workflows/latest_deps.yml
vendored
@@ -1,243 +0,0 @@
|
||||
# People who are freshly `pip install`ing from PyPI will pull in the latest versions of
|
||||
# dependencies which match the broad requirements. Since most CI runs are against
|
||||
# the locked poetry environment, run specifically against the latest dependencies to
|
||||
# know if there's an upcoming breaking change.
|
||||
#
|
||||
# As an overview this workflow:
|
||||
# - checks out develop,
|
||||
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
|
||||
# - runs mypy and test suites in that checkout.
|
||||
#
|
||||
# Based on the twisted trunk CI job.
|
||||
|
||||
name: Latest dependencies
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 7 * * *
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
RUST_VERSION: 1.87.0
|
||||
|
||||
jobs:
|
||||
check_repo:
|
||||
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
||||
# only useful to the Synapse core team.
|
||||
# All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest
|
||||
# of the workflow will be skipped as well.
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }}
|
||||
steps:
|
||||
- id: check_condition
|
||||
run: echo "should_run_workflow=${{ github.repository == 'element-hq/synapse' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
mypy:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||
# poetry-core versions), so we install with poetry.
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "2.1.1"
|
||||
extras: "all"
|
||||
# Dump installed versions for debugging.
|
||||
- run: poetry run pip list > before.txt
|
||||
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
||||
# `pip install matrix-synapse[all]` as closely as possible.
|
||||
- run: poetry update --without dev
|
||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
||||
- name: Remove unhelpful options from mypy config
|
||||
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
|
||||
- run: poetry run mypy
|
||||
trial:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- database: "sqlite"
|
||||
- database: "postgres"
|
||||
postgres-version: "14"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
if: ${{ matrix.postgres-version }}
|
||||
run: |
|
||||
docker run -d -p 5432:5432 \
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: pip install .[all,test]
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
|
||||
# We nuke the local copy, as we've installed synapse into the virtualenv
|
||||
# (rather than use an editable install, which we no longer support). If we
|
||||
# don't do this then python can't find the native lib.
|
||||
- run: rm -rf synapse/
|
||||
|
||||
- run: python -m twisted.trial --jobs=2 tests
|
||||
env:
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
|
||||
sytest:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:testing
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- sytest-tag: bookworm
|
||||
|
||||
- sytest-tag: bookworm
|
||||
postgres: postgres
|
||||
workers: workers
|
||||
redis: redis
|
||||
env:
|
||||
POSTGRES: ${{ matrix.postgres && 1}}
|
||||
WORKERS: ${{ matrix.workers && 1 }}
|
||||
REDIS: ${{ matrix.redis && 1 }}
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- name: Ensure sytest runs `pip install`
|
||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||
run: rm /src/poetry.lock
|
||||
working-directory: /src
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
path: |
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
|
||||
complement:
|
||||
needs: check_repo
|
||||
if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arrangement: monolith
|
||||
database: SQLite
|
||||
|
||||
- arrangement: monolith
|
||||
database: Postgres
|
||||
|
||||
- arrangement: workers
|
||||
database: Postgres
|
||||
|
||||
steps:
|
||||
- name: Check out synapse codebase
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||
with:
|
||||
cache-dependency-path: complement/go.sum
|
||||
go-version-file: complement/go.mod
|
||||
|
||||
- run: |
|
||||
set -o pipefail
|
||||
TEST_ONLY_IGNORE_POETRY_LOCKFILE=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
|
||||
# Open an issue if the build fails, so we know about it.
|
||||
# Only do this if we're not experimenting with this action in a PR.
|
||||
open-issue:
|
||||
if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request' && needs.check_repo.outputs.should_run_workflow == 'true'"
|
||||
needs:
|
||||
# TODO: should mypy be included here? It feels more brittle than the others.
|
||||
- mypy
|
||||
- trial
|
||||
- sytest
|
||||
- complement
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
update_existing: true
|
||||
filename: .ci/latest_deps_build_failed_issue_template.md
|
||||
24
.github/workflows/poetry_lockfile.yaml
vendored
24
.github/workflows/poetry_lockfile.yaml
vendored
@@ -1,24 +0,0 @@
|
||||
on:
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
paths:
|
||||
- poetry.lock
|
||||
pull_request:
|
||||
paths:
|
||||
- poetry.lock
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check-sdists:
|
||||
name: "Check locked dependencies have sdists"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- run: pip install tomli
|
||||
- run: ./scripts-dev/check_locked_deps_have_sdists.py
|
||||
74
.github/workflows/push_complement_image.yml
vendored
74
.github/workflows/push_complement_image.yml
vendored
@@ -1,74 +0,0 @@
|
||||
# This task does not run complement tests, see tests.yaml instead.
|
||||
# This task does not build docker images for synapse for use on docker hub, see docker.yaml instead
|
||||
|
||||
name: Store complement-synapse image in ghcr.io
|
||||
on:
|
||||
push:
|
||||
branches: [ "master" ]
|
||||
schedule:
|
||||
- cron: '0 5 * * *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
required: true
|
||||
default: 'develop'
|
||||
type: choice
|
||||
options:
|
||||
- develop
|
||||
- master
|
||||
|
||||
# Only run this action once per pull request/branch; restart if a new commit arrives.
|
||||
# C.f. https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency
|
||||
# and https://docs.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#github-context
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build and push complement image
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout specific branch (debug build)
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
with:
|
||||
ref: ${{ inputs.branch }}
|
||||
- name: Checkout clean copy of develop (scheduled build)
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
if: github.event_name == 'schedule'
|
||||
with:
|
||||
ref: develop
|
||||
- name: Checkout clean copy of master (on-push)
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
if: github.event_name == 'push'
|
||||
with:
|
||||
ref: master
|
||||
- name: Login to registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Work out labels for complement image
|
||||
id: meta
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository }}/complement-synapse
|
||||
tags: |
|
||||
type=schedule,pattern=nightly,enable=${{ github.event_name == 'schedule'}}
|
||||
type=raw,value=develop,enable=${{ github.event_name == 'schedule' || inputs.branch == 'develop' }}
|
||||
type=raw,value=latest,enable=${{ github.event_name == 'push' || inputs.branch == 'master' }}
|
||||
type=sha,format=long
|
||||
- name: Run scripts-dev/complement.sh to generate complement-synapse:latest image.
|
||||
run: scripts-dev/complement.sh --build-only
|
||||
- name: Tag and push generated image
|
||||
run: |
|
||||
for TAG in ${{ join(fromJson(steps.meta.outputs.json).tags, ' ') }}; do
|
||||
echo "tag and push $TAG"
|
||||
docker tag complement-synapse $TAG
|
||||
docker push $TAG
|
||||
done
|
||||
208
.github/workflows/release-artifacts.yml
vendored
208
.github/workflows/release-artifacts.yml
vendored
@@ -1,208 +0,0 @@
|
||||
# GitHub actions workflow which builds the release artifacts.
|
||||
|
||||
name: Build release artifacts
|
||||
|
||||
on:
|
||||
# we build on PRs and develop to (hopefully) get early warning
|
||||
# of things breaking (but only build one set of debs). PRs skip
|
||||
# building wheels on macOS & ARM.
|
||||
pull_request:
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
|
||||
# we do the full build on tags.
|
||||
tags: ["v*"]
|
||||
merge_group:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
get-distros:
|
||||
name: "Calculate list of debian distros"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- id: set-distros
|
||||
run: |
|
||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||
# NOTE: inside the actual Dockerfile-dhvirtualenv, the image name is expanded into its full image path
|
||||
dists='["debian:sid"]'
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
|
||||
fi
|
||||
echo "distros=$dists" >> "$GITHUB_OUTPUT"
|
||||
# map the step outputs to job outputs
|
||||
outputs:
|
||||
distros: ${{ steps.set-distros.outputs.distros }}
|
||||
|
||||
# now build the packages with a matrix build.
|
||||
build-debs:
|
||||
needs: get-distros
|
||||
name: "Build .deb packages"
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
distro: ${{ fromJson(needs.get-distros.outputs.distros) }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
path: src
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
with:
|
||||
install: true
|
||||
|
||||
- name: Set up docker layer caching
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
- name: Set up python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Build the packages
|
||||
# see https://github.com/docker/build-push-action/issues/252
|
||||
# for the cache magic here
|
||||
run: |
|
||||
./src/scripts-dev/build_debian_packages.py \
|
||||
--docker-build-arg=--cache-from=type=local,src=/tmp/.buildx-cache \
|
||||
--docker-build-arg=--cache-to=type=local,mode=max,dest=/tmp/.buildx-cache-new \
|
||||
--docker-build-arg=--progress=plain \
|
||||
--docker-build-arg=--load \
|
||||
"${{ matrix.distro }}"
|
||||
rm -rf /tmp/.buildx-cache
|
||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
||||
|
||||
- name: Artifact name
|
||||
id: artifact-name
|
||||
# We can't have colons in the upload name of the artifact, so we convert
|
||||
# e.g. `debian:sid` to `sid`.
|
||||
env:
|
||||
DISTRO: ${{ matrix.distro }}
|
||||
run: |
|
||||
echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Upload debs as artifacts
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }}
|
||||
path: debs/*
|
||||
|
||||
build-wheels:
|
||||
name: Build wheels on ${{ matrix.os }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-24.04
|
||||
- ubuntu-24.04-arm
|
||||
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
||||
# It is not read by the rest of the workflow.
|
||||
is_pr:
|
||||
- ${{ startsWith(github.ref, 'refs/pull/') }}
|
||||
|
||||
exclude:
|
||||
# Don't build aarch64 wheels on PR CI.
|
||||
- is_pr: true
|
||||
os: "ubuntu-24.04-arm"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
||||
# here, because `python` on osx points to Python 2.7.
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install cibuildwheel
|
||||
run: python -m pip install cibuildwheel==3.2.1
|
||||
|
||||
- name: Only build a single wheel on PR
|
||||
if: startsWith(github.ref, 'refs/pull/')
|
||||
run: echo "CIBW_BUILD="cp310-manylinux_*"" >> $GITHUB_ENV
|
||||
|
||||
- name: Build wheels
|
||||
run: python -m cibuildwheel --output-dir wheelhouse
|
||||
env:
|
||||
# The platforms that we build for are determined by the
|
||||
# `tool.cibuildwheel.skip` option in `pyproject.toml`.
|
||||
|
||||
# We skip testing wheels for the following platforms in CI:
|
||||
#
|
||||
# pp3*-* (PyPy wheels) broke in CI (TODO: investigate).
|
||||
# musl: (TODO: investigate).
|
||||
CIBW_TEST_SKIP: pp3*-* *musl*
|
||||
|
||||
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: Wheel-${{ matrix.os }}
|
||||
path: ./wheelhouse/*.whl
|
||||
|
||||
build-sdist:
|
||||
name: Build sdist
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- run: pip install build
|
||||
|
||||
- name: Build sdist
|
||||
run: python -m build --sdist
|
||||
|
||||
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
name: Sdist
|
||||
path: dist/*.tar.gz
|
||||
|
||||
# if it's a tag, create a release and attach the artifacts to it
|
||||
attach-assets:
|
||||
name: "Attach assets to release"
|
||||
if: ${{ !failure() && !cancelled() && startsWith(github.ref, 'refs/tags/') }}
|
||||
needs:
|
||||
- build-debs
|
||||
- build-wheels
|
||||
- build-sdist
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download all workflow run artifacts
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
- name: Build a tarball for the debs
|
||||
# We need to merge all the debs uploads into one folder, then compress
|
||||
# that.
|
||||
run: |
|
||||
mkdir debs
|
||||
mv debs*/* debs/
|
||||
tar -cvJf debs.tar.xz debs
|
||||
- name: Attach to release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gh release upload "${{ github.ref_name }}" \
|
||||
Sdist/* \
|
||||
Wheel*/* \
|
||||
debs.tar.xz \
|
||||
--repo ${{ github.repository }}
|
||||
57
.github/workflows/schema.yaml
vendored
57
.github/workflows/schema.yaml
vendored
@@ -1,57 +0,0 @@
|
||||
name: Schema
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- schema/**
|
||||
- docs/usage/configuration/config_documentation.md
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
validate-schema:
|
||||
name: Ensure Synapse config schema is valid
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- name: Install check-jsonschema
|
||||
run: pip install check-jsonschema==0.33.0
|
||||
|
||||
- name: Validate meta schema
|
||||
run: check-jsonschema --check-metaschema schema/v*/meta.schema.json
|
||||
- name: Validate schema
|
||||
run: |-
|
||||
# Please bump on introduction of a new meta schema.
|
||||
LATEST_META_SCHEMA_VERSION=v1
|
||||
check-jsonschema \
|
||||
--schemafile="schema/$LATEST_META_SCHEMA_VERSION/meta.schema.json" \
|
||||
schema/synapse-config.schema.yaml
|
||||
- name: Validate default config
|
||||
# Populates the empty instance with default values and checks against the schema.
|
||||
run: |-
|
||||
echo "{}" | check-jsonschema \
|
||||
--fill-defaults --schemafile=schema/synapse-config.schema.yaml -
|
||||
|
||||
check-doc-generation:
|
||||
name: Ensure generated documentation is up-to-date
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- name: Install PyYAML
|
||||
run: pip install PyYAML==6.0.2
|
||||
|
||||
- name: Regenerate config documentation
|
||||
run: |
|
||||
scripts-dev/gen_config_documentation.py \
|
||||
schema/synapse-config.schema.yaml \
|
||||
> docs/usage/configuration/config_documentation.md
|
||||
- name: Error in case of any differences
|
||||
# Errors if there are now any modified files (untracked files are ignored).
|
||||
run: 'git diff --exit-code'
|
||||
792
.github/workflows/tests.yml
vendored
792
.github/workflows/tests.yml
vendored
@@ -1,792 +0,0 @@
|
||||
name: Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
pull_request:
|
||||
merge_group:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
RUST_VERSION: 1.87.0
|
||||
|
||||
jobs:
|
||||
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
|
||||
# don't modify Rust code.
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
rust: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.rust }}
|
||||
trial: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.trial }}
|
||||
integration: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.integration }}
|
||||
linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }}
|
||||
linting_readme: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting_readme }}
|
||||
steps:
|
||||
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
id: filter
|
||||
# We only check on PRs
|
||||
if: startsWith(github.ref, 'refs/pull/')
|
||||
with:
|
||||
filters: |
|
||||
rust:
|
||||
- 'rust/**'
|
||||
- 'Cargo.toml'
|
||||
- 'Cargo.lock'
|
||||
- '.rustfmt.toml'
|
||||
- '.github/workflows/tests.yml'
|
||||
|
||||
trial:
|
||||
- 'synapse/**'
|
||||
- 'tests/**'
|
||||
- 'rust/**'
|
||||
- '.ci/scripts/calculate_jobs.py'
|
||||
- 'Cargo.toml'
|
||||
- 'Cargo.lock'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- '.github/workflows/tests.yml'
|
||||
|
||||
integration:
|
||||
- 'synapse/**'
|
||||
- 'rust/**'
|
||||
- 'docker/**'
|
||||
- 'Cargo.toml'
|
||||
- 'Cargo.lock'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- 'docker/**'
|
||||
- '.ci/**'
|
||||
- 'scripts-dev/complement.sh'
|
||||
- '.github/workflows/tests.yml'
|
||||
|
||||
linting:
|
||||
- 'synapse/**'
|
||||
- 'docker/**'
|
||||
- 'tests/**'
|
||||
- 'scripts-dev/**'
|
||||
- 'contrib/**'
|
||||
- 'synmark/**'
|
||||
- 'stubs/**'
|
||||
- '.ci/**'
|
||||
- 'mypy.ini'
|
||||
- 'pyproject.toml'
|
||||
- 'poetry.lock'
|
||||
- '.github/workflows/tests.yml'
|
||||
|
||||
linting_readme:
|
||||
- 'README.rst'
|
||||
|
||||
check-sampleconfig:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "2.1.1"
|
||||
extras: "all"
|
||||
- run: poetry run scripts-dev/generate_sample_config.sh --check
|
||||
- run: poetry run scripts-dev/config-lint.sh
|
||||
|
||||
check-schema-delta:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20' 'sqlglot>=28.0.0'"
|
||||
- run: scripts-dev/check_schema_delta.py --force-colors
|
||||
|
||||
check-lockfile:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: .ci/scripts/check_lockfile.py
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
poetry-version: "2.1.1"
|
||||
install-project: "false"
|
||||
|
||||
- name: Run ruff check
|
||||
run: poetry run ruff check --output-format=github .
|
||||
|
||||
- name: Run ruff format
|
||||
run: poetry run ruff format --check .
|
||||
|
||||
lint-mypy:
|
||||
runs-on: ubuntu-latest
|
||||
name: Typechecking
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
# We want to make use of type hints in optional dependencies too.
|
||||
extras: all
|
||||
# We have seen odd mypy failures that were resolved when we started
|
||||
# installing the project again:
|
||||
# https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775
|
||||
# To make CI green, err towards caution and install the project.
|
||||
install-project: "true"
|
||||
poetry-version: "2.1.1"
|
||||
|
||||
# Cribbed from
|
||||
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
||||
- name: Restore/persist mypy's cache
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: |
|
||||
.mypy_cache
|
||||
key: mypy-cache-${{ github.context.sha }}
|
||||
restore-keys: mypy-cache-
|
||||
|
||||
- name: Run mypy
|
||||
run: poetry run mypy
|
||||
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- name: Check line endings
|
||||
run: scripts-dev/check_line_terminators.sh
|
||||
|
||||
lint-newsfile:
|
||||
# Only run on pull_request events, targeting develop/release branches, and skip when the PR author is dependabot[bot].
|
||||
if: ${{ github.event_name == 'pull_request' && (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.event.pull_request.user.login != 'dependabot[bot]' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
||||
- run: scripts-dev/check-newsfragment.sh
|
||||
env:
|
||||
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
||||
|
||||
lint-clippy:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
components: clippy
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- run: cargo clippy -- -D warnings
|
||||
|
||||
# We also lint against a nightly rustc so that we can lint the benchmark
|
||||
# suite, which requires a nightly compiler.
|
||||
lint-clippy-nightly:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: nightly-2025-04-23
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- run: cargo clippy --all-features -- -D warnings
|
||||
|
||||
lint-rust:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
# Install like a normal project from source with all optional dependencies
|
||||
extras: all
|
||||
install-project: "true"
|
||||
poetry-version: "2.1.1"
|
||||
|
||||
- name: Ensure `Cargo.lock` is up to date (no stray changes after install)
|
||||
# The `::error::` syntax is using GitHub Actions' error annotations, see
|
||||
# https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions
|
||||
run: |
|
||||
if git diff --quiet Cargo.lock; then
|
||||
echo "Cargo.lock is up to date"
|
||||
else
|
||||
echo "::error::Cargo.lock has uncommitted changes after install. Please run 'poetry install --extras all' and commit the Cargo.lock changes."
|
||||
git diff --exit-code Cargo.lock
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# This job is split from `lint-rust` because it requires a nightly Rust toolchain
|
||||
# for some of the unstable options we use in `.rustfmt.toml`.
|
||||
lint-rustfmt:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
# We use nightly so that we can use some unstable options that we use in
|
||||
# `.rustfmt.toml`.
|
||||
toolchain: nightly-2025-04-23
|
||||
components: rustfmt
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- run: cargo fmt --check
|
||||
|
||||
# This is to detect issues with the rst file, which can otherwise cause issues
|
||||
# when uploading packages to PyPi.
|
||||
lint-readme:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.linting_readme == 'true' }}
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: "pip install rstcheck"
|
||||
- run: "rstcheck --report-level=WARNING README.rst"
|
||||
|
||||
# Dummy step to gate other tests on without repeating the whole list
|
||||
linting-done:
|
||||
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
||||
needs:
|
||||
- lint
|
||||
- lint-mypy
|
||||
- lint-crlf
|
||||
- lint-newsfile
|
||||
- check-sampleconfig
|
||||
- check-schema-delta
|
||||
- check-lockfile
|
||||
- lint-clippy
|
||||
- lint-clippy-nightly
|
||||
- lint-rust
|
||||
- lint-rustfmt
|
||||
- lint-readme
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
# Various bits are skipped if there was no applicable changes.
|
||||
skippable: |
|
||||
check-sampleconfig
|
||||
check-schema-delta
|
||||
lint
|
||||
lint-mypy
|
||||
lint-newsfile
|
||||
lint-clippy
|
||||
lint-clippy-nightly
|
||||
lint-rust
|
||||
lint-rustfmt
|
||||
lint-readme
|
||||
|
||||
|
||||
calculate-test-jobs:
|
||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- id: get-matrix
|
||||
run: .ci/scripts/calculate_jobs.py
|
||||
outputs:
|
||||
trial_test_matrix: ${{ steps.get-matrix.outputs.trial_test_matrix }}
|
||||
sytest_test_matrix: ${{ steps.get-matrix.outputs.sytest_test_matrix }}
|
||||
|
||||
trial:
|
||||
if: ${{ !cancelled() && !failure() && needs.changes.outputs.trial == 'true' }} # Allow previous steps to be skipped, but not fail
|
||||
needs:
|
||||
- calculate-test-jobs
|
||||
- changes
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
# 1. Mount postgres data files onto a tmpfs in-memory filesystem to reduce overhead of docker's overlayfs layer.
|
||||
# 2. Expose the unix socket for postgres. This removes latency of using docker-proxy for connections.
|
||||
run: |
|
||||
docker run -d -p 5432:5432 \
|
||||
--tmpfs /var/lib/postgres:rw,size=6144m \
|
||||
--mount 'type=bind,src=/var/run/postgresql,dst=/var/run/postgresql' \
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.job.postgres-version }}
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.job.python-version }}
|
||||
poetry-version: "2.1.1"
|
||||
extras: ${{ matrix.job.extras }}
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: poetry run trial --jobs=6 tests
|
||||
env:
|
||||
SYNAPSE_POSTGRES: ${{ matrix.job.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: /var/run/postgresql
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
trial-olddeps:
|
||||
# Note: sqlite only; no postgres
|
||||
if: ${{ !cancelled() && !failure() && needs.changes.outputs.trial == 'true' }} # Allow previous steps to be skipped, but not fail
|
||||
needs:
|
||||
- linting-done
|
||||
- changes
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
# There aren't wheels for some of the older deps, so we need to install
|
||||
# their build dependencies
|
||||
- run: |
|
||||
sudo apt-get -qq update
|
||||
sudo apt-get -qq install build-essential libffi-dev python3-dev \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Prepare old deps
|
||||
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
||||
run: .ci/scripts/prepare_old_deps.sh
|
||||
|
||||
# Note: we install using `pip` here, not poetry. `poetry install` ignores the
|
||||
# build-system section (https://github.com/python-poetry/poetry/issues/6154), but
|
||||
# we explicitly want to test that you can `pip install` using the oldest version
|
||||
# of poetry-core and setuptools-rust.
|
||||
- run: pip install .[all,test]
|
||||
|
||||
# We nuke the local copy, as we've installed synapse into the virtualenv
|
||||
# (rather than use an editable install, which we no longer support). If we
|
||||
# don't do this then python can't find the native lib.
|
||||
- run: rm -rf synapse/
|
||||
|
||||
# Sanity check we can import/run Synapse
|
||||
- run: python -m synapse.app.homeserver --help
|
||||
|
||||
- run: python -m twisted.trial -j6 tests
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
trial-pypy:
|
||||
# Very slow; only run if the branch name includes 'pypy'
|
||||
# Note: sqlite only; no postgres. Completely untested since poetry move.
|
||||
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() && needs.changes.outputs.trial == 'true' }}
|
||||
needs:
|
||||
- linting-done
|
||||
- changes
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["pypy-3.10"]
|
||||
extras: ["all"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
poetry-version: "2.1.1"
|
||||
extras: ${{ matrix.extras }}
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
sytest:
|
||||
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true' }}
|
||||
needs:
|
||||
- calculate-test-jobs
|
||||
- changes
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:${{ matrix.job.sytest-tag }}
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
env:
|
||||
# If this is a pull request to a release branch, use that branch as default branch for sytest, else use develop
|
||||
# This works because the release script always create a branch on the sytest repo with the same name as the release branch
|
||||
SYTEST_DEFAULT_BRANCH: ${{ startsWith(github.base_ref, 'release-') && github.base_ref || 'develop' }}
|
||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
||||
POSTGRES: ${{ matrix.job.postgres && 1}}
|
||||
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') || '' }}
|
||||
ASYNCIO_REACTOR: ${{ (matrix.job.reactor == 'asyncio') || '' }}
|
||||
WORKERS: ${{ matrix.job.workers && 1 }}
|
||||
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
|
||||
TOP: ${{ github.workspace }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
||||
path: |
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
export-data:
|
||||
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true'}} # Allow previous steps to be skipped, but not fail
|
||||
needs: [linting-done, portdb, changes]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TOP: ${{ github.workspace }}
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres
|
||||
ports:
|
||||
- 5432:5432
|
||||
env:
|
||||
POSTGRES_PASSWORD: "postgres"
|
||||
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
poetry-version: "2.1.1"
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_export_data_command.sh
|
||||
env:
|
||||
PGHOST: localhost
|
||||
PGUSER: postgres
|
||||
PGPASSWORD: postgres
|
||||
PGDATABASE: postgres
|
||||
|
||||
|
||||
portdb:
|
||||
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true'}} # Allow previous steps to be skipped, but not fail
|
||||
needs:
|
||||
- linting-done
|
||||
- changes
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- python-version: "3.10"
|
||||
postgres-version: "14"
|
||||
|
||||
- python-version: "3.14"
|
||||
postgres-version: "17"
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:${{ matrix.postgres-version }}
|
||||
ports:
|
||||
- 5432:5432
|
||||
env:
|
||||
POSTGRES_PASSWORD: "postgres"
|
||||
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- name: Add PostgreSQL apt repository
|
||||
# We need a version of pg_dump that can handle the version of
|
||||
# PostgreSQL being tested against. The Ubuntu package repository lags
|
||||
# behind new releases, so we have to use the PostreSQL apt repository.
|
||||
# Steps taken from https://www.postgresql.org/download/linux/ubuntu/
|
||||
run: |
|
||||
sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
|
||||
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
|
||||
sudo apt-get update
|
||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
poetry-version: "2.1.1"
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_synapse_port_db.sh
|
||||
id: run_tester_script
|
||||
env:
|
||||
PGHOST: localhost
|
||||
PGUSER: postgres
|
||||
PGPASSWORD: postgres
|
||||
PGDATABASE: postgres
|
||||
- name: "Upload schema differences"
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
||||
with:
|
||||
name: Schema dumps
|
||||
path: |
|
||||
unported.sql
|
||||
ported.sql
|
||||
schema_diff
|
||||
|
||||
complement:
|
||||
if: "${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true' }}"
|
||||
needs:
|
||||
- linting-done
|
||||
- changes
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arrangement: monolith
|
||||
database: SQLite
|
||||
|
||||
- arrangement: monolith
|
||||
database: Postgres
|
||||
|
||||
- arrangement: workers
|
||||
database: Postgres
|
||||
|
||||
steps:
|
||||
- name: Checkout synapse codebase
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||
with:
|
||||
cache-dependency-path: complement/go.sum
|
||||
go-version-file: complement/go.mod
|
||||
|
||||
# use p=1 concurrency as GHA boxes are underpowered and don't like running tons of synapses at once.
|
||||
- run: |
|
||||
set -o pipefail
|
||||
COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -p 1 -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
||||
shell: bash
|
||||
env:
|
||||
POSTGRES: ${{ (matrix.database == 'Postgres') && 1 || '' }}
|
||||
WORKERS: ${{ (matrix.arrangement == 'workers') && 1 || '' }}
|
||||
name: Run Complement Tests
|
||||
|
||||
cargo-test:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- linting-done
|
||||
- changes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- run: cargo test
|
||||
|
||||
# We want to ensure that the cargo benchmarks still compile, which requires a
|
||||
# nightly compiler.
|
||||
cargo-bench:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- linting-done
|
||||
- changes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- run: cargo bench --no-run
|
||||
|
||||
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
|
||||
tests-done:
|
||||
if: ${{ always() }}
|
||||
needs:
|
||||
- trial
|
||||
- trial-olddeps
|
||||
- sytest
|
||||
- export-data
|
||||
- portdb
|
||||
- complement
|
||||
- cargo-test
|
||||
- cargo-bench
|
||||
- linting-done
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
|
||||
# Various bits are skipped if there was no applicable changes.
|
||||
# The newsfile lint may be skipped on non PR builds.
|
||||
skippable: |
|
||||
trial
|
||||
trial-olddeps
|
||||
sytest
|
||||
portdb
|
||||
export-data
|
||||
complement
|
||||
lint-newsfile
|
||||
cargo-test
|
||||
cargo-bench
|
||||
14
.github/workflows/triage-incoming.yml
vendored
14
.github/workflows/triage-incoming.yml
vendored
@@ -1,14 +0,0 @@
|
||||
name: Move new issues into the issue triage board
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [ opened ]
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@18beaf3c8e536108bd04d18e6c3dc40ba3931e28 # v2.0.3
|
||||
with:
|
||||
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
|
||||
content_id: ${{ github.event.issue.node_id }}
|
||||
secrets:
|
||||
github_access_token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
31
.github/workflows/triage_labelled.yml
vendored
31
.github/workflows/triage_labelled.yml
vendored
@@ -1,31 +0,0 @@
|
||||
name: Move labelled issues to correct projects
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [ labeled ]
|
||||
|
||||
jobs:
|
||||
move_needs_info:
|
||||
runs-on: ubuntu-latest
|
||||
if: >
|
||||
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
# This token must have the following scopes: ["repo:public_repo", "admin:org->read:org", "user->read:user", "project"]
|
||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
PROJECT_OWNER: matrix-org
|
||||
# Backend issue triage board.
|
||||
# https://github.com/orgs/matrix-org/projects/67/views/1
|
||||
PROJECT_NUMBER: 67
|
||||
ISSUE_URL: ${{ github.event.issue.html_url }}
|
||||
# This field is case-sensitive.
|
||||
TARGET_STATUS: Needs info
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
# Only clone the script file we care about, instead of the whole repo.
|
||||
sparse-checkout: .ci/scripts/triage_labelled_issue.sh
|
||||
|
||||
- name: Ensure issue exists on the board, then set Status
|
||||
run: .ci/scripts/triage_labelled_issue.sh
|
||||
226
.github/workflows/twisted_trunk.yml
vendored
226
.github/workflows/twisted_trunk.yml
vendored
@@ -1,226 +0,0 @@
|
||||
name: Twisted Trunk
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 8 * * *
|
||||
|
||||
workflow_dispatch:
|
||||
# NB: inputs are only present when this workflow is dispatched manually.
|
||||
# (The default below is the default field value in the form to trigger
|
||||
# a manual dispatch). Otherwise the inputs will evaluate to null.
|
||||
inputs:
|
||||
twisted_ref:
|
||||
description: Commit, branch or tag to checkout from upstream Twisted.
|
||||
required: false
|
||||
default: 'trunk'
|
||||
type: string
|
||||
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
RUST_VERSION: 1.87.0
|
||||
|
||||
jobs:
|
||||
check_repo:
|
||||
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
||||
# only useful to the Synapse core team.
|
||||
# All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest
|
||||
# of the workflow will be skipped as well.
|
||||
if: github.repository == 'element-hq/synapse'
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }}
|
||||
steps:
|
||||
- id: check_condition
|
||||
run: echo "should_run_workflow=${{ github.repository == 'element-hq/synapse' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
mypy:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
extras: "all"
|
||||
poetry-version: "2.1.1"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref || 'trunk' }}
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- name: Remove unhelpful options from mypy config
|
||||
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
|
||||
- run: poetry run mypy
|
||||
|
||||
trial:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
extras: "all test"
|
||||
poetry-version: "2.1.1"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- run: poetry run trial --jobs 2 tests
|
||||
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
sytest:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
# We're using bookworm because that's what Debian oldstable is at the time of writing.
|
||||
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
|
||||
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
|
||||
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
|
||||
image: matrixdotorg/sytest-synapse:bookworm
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
|
||||
- name: Patch dependencies
|
||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||
# but the sytest-synapse container expects it to be in /venv/.
|
||||
# We symlink it before running poetry so that poetry actually
|
||||
# ends up installing to `/venv`.
|
||||
run: |
|
||||
ln -s -T /venv /src/.venv
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
working-directory: /src
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
env:
|
||||
# Use offline mode to avoid reinstalling the pinned version of
|
||||
# twisted.
|
||||
OFFLINE: 1
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
path: |
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
complement:
|
||||
needs: check_repo
|
||||
if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arrangement: monolith
|
||||
database: SQLite
|
||||
|
||||
- arrangement: monolith
|
||||
database: Postgres
|
||||
|
||||
- arrangement: workers
|
||||
database: Postgres
|
||||
|
||||
steps:
|
||||
- name: Run actions/checkout@v4 for synapse
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||
with:
|
||||
cache-dependency-path: complement/go.sum
|
||||
go-version-file: complement/go.mod
|
||||
|
||||
# This step is specific to the 'Twisted trunk' test run:
|
||||
- name: Patch dependencies
|
||||
run: |
|
||||
set -x
|
||||
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
|
||||
pipx install poetry==2.1.1
|
||||
|
||||
poetry remove -n twisted
|
||||
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry lock
|
||||
working-directory: synapse
|
||||
|
||||
- run: |
|
||||
set -o pipefail
|
||||
TEST_ONLY_SKIP_DEP_HASH_VERIFICATION=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
|
||||
# open an issue if the build fails, so we know about it.
|
||||
open-issue:
|
||||
if: failure() && needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
needs:
|
||||
- mypy
|
||||
- trial
|
||||
- sytest
|
||||
- complement
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
update_existing: true
|
||||
filename: .ci/twisted_trunk_build_failed_issue_template.md
|
||||
1
changelog.d/18402.misc
Normal file
1
changelog.d/18402.misc
Normal file
@@ -0,0 +1 @@
|
||||
Group together dependabot update PRs to reduce the review load.
|
||||
1
changelog.d/19187.misc
Normal file
1
changelog.d/19187.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix `HomeServer.shutdown()` failing if the homeserver hasn't been setup yet.
|
||||
1
changelog.d/19206.bugfix
Normal file
1
changelog.d/19206.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix sliding sync performance slow down for long lived connections.
|
||||
1
changelog.d/19212.misc
Normal file
1
changelog.d/19212.misc
Normal file
@@ -0,0 +1 @@
|
||||
Respond with useful error codes with `Content-Length` header/s are invalid.
|
||||
1
changelog.d/19231.bugfix
Normal file
1
changelog.d/19231.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug where Mastodon posts (and possibly other embeds) have the wrong description for URL previews.
|
||||
1
changelog.d/19232.misc
Normal file
1
changelog.d/19232.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix `HomeServer.shutdown()` failing if the homeserver failed to `start`.
|
||||
1
changelog.d/19234.misc
Normal file
1
changelog.d/19234.misc
Normal file
@@ -0,0 +1 @@
|
||||
Switch the build backend from `poetry-core` to `maturin`.
|
||||
1
changelog.d/19253.misc
Normal file
1
changelog.d/19253.misc
Normal file
@@ -0,0 +1 @@
|
||||
Raise the limit for concurrently-open non-security @dependabot PRs from 5 to 10.
|
||||
1
changelog.d/19254.removal
Normal file
1
changelog.d/19254.removal
Normal file
@@ -0,0 +1 @@
|
||||
Remove the "Updates to locked dependencies" section from the changelog due to lack of use and the maintenance burden.
|
||||
1
changelog.d/19258.misc
Normal file
1
changelog.d/19258.misc
Normal file
@@ -0,0 +1 @@
|
||||
Require 14 days to pass before pulling in general dependency updates to help mitigate upstream supply chain attacks.
|
||||
1
changelog.d/19260.feature
Normal file
1
changelog.d/19260.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add `memberships` endpoint to the admin API. This is useful for forensics and T&S purpose.
|
||||
1
changelog.d/19262.misc
Normal file
1
changelog.d/19262.misc
Normal file
@@ -0,0 +1 @@
|
||||
Drop the broken netlify documentation workflow until a new one is implemented.
|
||||
1
changelog.d/19267.bugfix
Normal file
1
changelog.d/19267.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix bug where `Duration` was logged incorrectly.
|
||||
1
changelog.d/19268.feature
Normal file
1
changelog.d/19268.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add an admin API for retrieving a paginated list of quarantined media.
|
||||
1
changelog.d/19270.doc
Normal file
1
changelog.d/19270.doc
Normal file
@@ -0,0 +1 @@
|
||||
Document the importance of `public_baseurl` when configuring OpenID Connect authentication.
|
||||
1
changelog.d/19274.bugfix
Normal file
1
changelog.d/19274.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix bug introduced in 1.143.0 that broke support for versions of `zope-interface` older than 6.2.
|
||||
1
changelog.d/19275.feature
Normal file
1
changelog.d/19275.feature
Normal file
@@ -0,0 +1 @@
|
||||
Server admins can bypass the quarantine media check when downloading media by setting the `admin_unsafely_bypass_quarantine` query parameter to `true` on Client-Server API media download requests.
|
||||
1
changelog.d/19278.misc
Normal file
1
changelog.d/19278.misc
Normal file
@@ -0,0 +1 @@
|
||||
Don't include debug logs in `Clock` unless explicitly enabled.
|
||||
1
changelog.d/19279.feature
Normal file
1
changelog.d/19279.feature
Normal file
@@ -0,0 +1 @@
|
||||
Implemented pagination for the [MSC2666](https://github.com/matrix-org/matrix-spec-proposals/pull/2666) mutual rooms endpoint. Contributed by @tulir @ Beeper.
|
||||
1
changelog.d/19281.feature
Normal file
1
changelog.d/19281.feature
Normal file
@@ -0,0 +1 @@
|
||||
Admin API: add worker support to `GET /_synapse/admin/v2/users/<user_id>`.
|
||||
1
changelog.d/19289.misc
Normal file
1
changelog.d/19289.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use `uv` to test olddeps to ensure all transitive dependencies use minimum versions.
|
||||
1
changelog.d/19297.misc
Normal file
1
changelog.d/19297.misc
Normal file
@@ -0,0 +1 @@
|
||||
Log the original bind exception when encountering `Failed to listen on 0.0.0.0, continuing because listening on [::]`.
|
||||
1
changelog.d/19300.feature
Normal file
1
changelog.d/19300.feature
Normal file
@@ -0,0 +1 @@
|
||||
Improve proxy support for the `federation_client.py` dev script. Contributed by Denis Kasak (@dkasak).
|
||||
1
changelog.d/19302.misc
Normal file
1
changelog.d/19302.misc
Normal file
@@ -0,0 +1 @@
|
||||
Unpin the version of Rust we use to build Synapse wheels (was 1.82.0) now that MacOS support has been dropped.
|
||||
@@ -73,6 +73,33 @@ Response:
|
||||
}
|
||||
```
|
||||
|
||||
## Listing all quarantined media
|
||||
|
||||
This API returns a list of all quarantined media on the server. It is paginated, and can be scoped to either local or
|
||||
remote media. Note that the pagination values are also scoped to the request parameters - changing them but keeping the
|
||||
same pagination values will result in unexpected results.
|
||||
|
||||
Request:
|
||||
```http
|
||||
GET /_synapse/admin/v1/media/quarantined?from=0&limit=100&kind=local
|
||||
```
|
||||
|
||||
`from` and `limit` are optional parameters, and default to `0` and `100` respectively. They are the row index and number
|
||||
of rows to return - they are not timestamps.
|
||||
|
||||
`kind` *MUST* either be `local` or `remote`.
|
||||
|
||||
The API returns a JSON body containing MXC URIs for the quarantined media, like the following:
|
||||
|
||||
```json
|
||||
{
|
||||
"media": [
|
||||
"mxc://localhost/xwvutsrqponmlkjihgfedcba",
|
||||
"mxc://localhost/abcdefghijklmnopqrstuvwx"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
# Quarantine media
|
||||
|
||||
Quarantining media means that it is marked as inaccessible by users. It applies
|
||||
@@ -88,6 +115,20 @@ is quarantined, Synapse will:
|
||||
- Quarantine any existing cached remote media.
|
||||
- Quarantine any future remote media.
|
||||
|
||||
## Downloading quarantined media
|
||||
|
||||
Normally, when media is quarantined, it will return a 404 error when downloaded.
|
||||
Admins can bypass this by adding `?admin_unsafely_bypass_quarantine=true`
|
||||
to the [normal download URL](https://spec.matrix.org/v1.16/client-server-api/#get_matrixclientv1mediadownloadservernamemediaid).
|
||||
|
||||
Bypassing the quarantine check is not recommended. Media is typically quarantined
|
||||
to prevent harmful content from being served to users, which includes admins. Only
|
||||
set the bypass parameter if you intentionally want to access potentially harmful
|
||||
content.
|
||||
|
||||
Non-admin users cannot bypass quarantine checks, even when specifying the above
|
||||
query parameter.
|
||||
|
||||
## Quarantining media by ID
|
||||
|
||||
This API quarantines a single piece of local or remote media.
|
||||
|
||||
@@ -505,6 +505,55 @@ with a body of:
|
||||
}
|
||||
```
|
||||
|
||||
## List room memberships of a user
|
||||
|
||||
Gets a list of room memberships for a specific `user_id`. This
|
||||
endpoint differs from
|
||||
[`GET /_synapse/admin/v1/users/<user_id>/joined_rooms`](#list-joined-rooms-of-a-user)
|
||||
in that it returns rooms with memberships other than "join".
|
||||
|
||||
The API is:
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/users/<user_id>/memberships
|
||||
```
|
||||
|
||||
A response body like the following is returned:
|
||||
|
||||
```json
|
||||
{
|
||||
"memberships": {
|
||||
"!DuGcnbhHGaSZQoNQR:matrix.org": "join",
|
||||
"!ZtSaPCawyWtxfWiIy:matrix.org": "leave",
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
which is a list of room membership states for the given user. This endpoint can
|
||||
be used with both local and remote users, with the caveat that the homeserver will
|
||||
only be aware of the memberships for rooms that one of its local users has joined.
|
||||
|
||||
Remote user memberships may also be out of date if all local users have since left
|
||||
a room. The homeserver will thus no longer receive membership updates about it.
|
||||
|
||||
The list includes rooms that the user has since left; other membership states (knock,
|
||||
invite, etc.) are also possible.
|
||||
|
||||
Note that rooms will only disappear from this list if they are
|
||||
[purged](./rooms.md#delete-room-api) from the homeserver.
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following parameters should be set in the URL:
|
||||
|
||||
- `user_id` - fully qualified: for example, `@user:server.com`.
|
||||
|
||||
**Response**
|
||||
|
||||
The following fields are returned in the JSON response body:
|
||||
|
||||
- `memberships` - A map of `room_id` (string) to `membership` state (string).
|
||||
|
||||
## List joined rooms of a user
|
||||
|
||||
Gets a list of all `room_id` that a specific `user_id` is joined to and is a member of (participating in).
|
||||
|
||||
@@ -50,6 +50,11 @@ setting in your configuration file.
|
||||
See the [configuration manual](usage/configuration/config_documentation.md#oidc_providers) for some sample settings, as well as
|
||||
the text below for example configurations for specific providers.
|
||||
|
||||
For setups using [`.well-known` delegation](delegate.md), make sure
|
||||
[`public_baseurl`](usage/configuration/config_documentation.md#public_baseurl) is set
|
||||
appropriately. If unset, Synapse defaults to `https://<server_name>/` which is used in
|
||||
the OIDC callback URL.
|
||||
|
||||
## OIDC Back-Channel Logout
|
||||
|
||||
Synapse supports receiving [OpenID Connect Back-Channel Logout](https://openid.net/specs/openid-connect-backchannel-1_0.html) notifications.
|
||||
|
||||
@@ -255,6 +255,8 @@ information.
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$
|
||||
^/_matrix/client/(r0|v3|unstable)/capabilities$
|
||||
^/_matrix/client/(r0|v3|unstable)/notifications$
|
||||
|
||||
# Admin API requests
|
||||
^/_synapse/admin/v1/rooms/[^/]+$
|
||||
|
||||
# Encryption requests
|
||||
@@ -300,6 +302,9 @@ Additionally, the following REST endpoints can be handled for GET requests:
|
||||
# Presence requests
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/presence/
|
||||
|
||||
# Admin API requests
|
||||
^/_synapse/admin/v2/users/[^/]+$
|
||||
|
||||
Pagination requests can also be handled, but all requests for a given
|
||||
room must be routed to the same instance. Additionally, care must be taken to
|
||||
ensure that the purge history admin API is not used while pagination requests
|
||||
|
||||
616
poetry.lock
generated
616
poetry.lock
generated
@@ -31,7 +31,7 @@ description = "The ultimate Python library in building OAuth and OpenID Connect
|
||||
optional = true
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"jwt\" or extra == \"oidc\""
|
||||
markers = "extra == \"oidc\" or extra == \"jwt\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "authlib-1.6.5-py2.py3-none-any.whl", hash = "sha256:3e0e0507807f842b02175507bdee8957a1d5707fd4afb17c32fb43fee90b6e3a"},
|
||||
{file = "authlib-1.6.5.tar.gz", hash = "sha256:6aaf9c79b7cc96c900f0b284061691c5d4e61221640a948fe690b556a6d6d10b"},
|
||||
@@ -134,14 +134,14 @@ typecheck = ["mypy"]
|
||||
|
||||
[[package]]
|
||||
name = "bleach"
|
||||
version = "6.2.0"
|
||||
version = "6.3.0"
|
||||
description = "An easy safelist-based HTML-sanitizing tool."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e"},
|
||||
{file = "bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f"},
|
||||
{file = "bleach-6.3.0-py3-none-any.whl", hash = "sha256:fe10ec77c93ddf3d13a73b035abaac7a9f5e436513864ccdad516693213c65d6"},
|
||||
{file = "bleach-6.3.0.tar.gz", hash = "sha256:6f3b91b1c0a02bb9a78b5a454c92506aa0fdf197e1d5e114d2e00c6f64306d22"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -176,83 +176,100 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "cffi"
|
||||
version = "1.17.1"
|
||||
version = "2.0.0"
|
||||
description = "Foreign Function Interface for Python calling C code."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"},
|
||||
{file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"},
|
||||
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"},
|
||||
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"},
|
||||
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"},
|
||||
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"},
|
||||
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"},
|
||||
{file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"},
|
||||
{file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"},
|
||||
{file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"},
|
||||
{file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"},
|
||||
{file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"},
|
||||
{file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"},
|
||||
{file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"},
|
||||
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"},
|
||||
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"},
|
||||
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"},
|
||||
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"},
|
||||
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"},
|
||||
{file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"},
|
||||
{file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"},
|
||||
{file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"},
|
||||
{file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"},
|
||||
{file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"},
|
||||
{file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"},
|
||||
{file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"},
|
||||
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"},
|
||||
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"},
|
||||
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"},
|
||||
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"},
|
||||
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"},
|
||||
{file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"},
|
||||
{file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"},
|
||||
{file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"},
|
||||
{file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"},
|
||||
{file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a"},
|
||||
{file = "cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5"},
|
||||
{file = "cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5"},
|
||||
{file = "cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75"},
|
||||
{file = "cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25"},
|
||||
{file = "cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6"},
|
||||
{file = "cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a"},
|
||||
{file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"},
|
||||
{file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pycparser = "*"
|
||||
pycparser = {version = "*", markers = "implementation_name != \"PyPy\""}
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
@@ -381,62 +398,80 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "45.0.7"
|
||||
version = "46.0.3"
|
||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||
optional = false
|
||||
python-versions = "!=3.9.0,!=3.9.1,>=3.7"
|
||||
python-versions = "!=3.9.0,!=3.9.1,>=3.8"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "cryptography-45.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:3be4f21c6245930688bd9e162829480de027f8bf962ede33d4f8ba7d67a00cee"},
|
||||
{file = "cryptography-45.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:67285f8a611b0ebc0857ced2081e30302909f571a46bfa7a3cc0ad303fe015c6"},
|
||||
{file = "cryptography-45.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:577470e39e60a6cd7780793202e63536026d9b8641de011ed9d8174da9ca5339"},
|
||||
{file = "cryptography-45.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:4bd3e5c4b9682bc112d634f2c6ccc6736ed3635fc3319ac2bb11d768cc5a00d8"},
|
||||
{file = "cryptography-45.0.7-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:465ccac9d70115cd4de7186e60cfe989de73f7bb23e8a7aa45af18f7412e75bf"},
|
||||
{file = "cryptography-45.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:16ede8a4f7929b4b7ff3642eba2bf79aa1d71f24ab6ee443935c0d269b6bc513"},
|
||||
{file = "cryptography-45.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8978132287a9d3ad6b54fcd1e08548033cc09dc6aacacb6c004c73c3eb5d3ac3"},
|
||||
{file = "cryptography-45.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b6a0e535baec27b528cb07a119f321ac024592388c5681a5ced167ae98e9fff3"},
|
||||
{file = "cryptography-45.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:a24ee598d10befaec178efdff6054bc4d7e883f615bfbcd08126a0f4931c83a6"},
|
||||
{file = "cryptography-45.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa26fa54c0a9384c27fcdc905a2fb7d60ac6e47d14bc2692145f2b3b1e2cfdbd"},
|
||||
{file = "cryptography-45.0.7-cp311-abi3-win32.whl", hash = "sha256:bef32a5e327bd8e5af915d3416ffefdbe65ed975b646b3805be81b23580b57b8"},
|
||||
{file = "cryptography-45.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:3808e6b2e5f0b46d981c24d79648e5c25c35e59902ea4391a0dcb3e667bf7443"},
|
||||
{file = "cryptography-45.0.7-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bfb4c801f65dd61cedfc61a83732327fafbac55a47282e6f26f073ca7a41c3b2"},
|
||||
{file = "cryptography-45.0.7-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:81823935e2f8d476707e85a78a405953a03ef7b7b4f55f93f7c2d9680e5e0691"},
|
||||
{file = "cryptography-45.0.7-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3994c809c17fc570c2af12c9b840d7cea85a9fd3e5c0e0491f4fa3c029216d59"},
|
||||
{file = "cryptography-45.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dad43797959a74103cb59c5dac71409f9c27d34c8a05921341fb64ea8ccb1dd4"},
|
||||
{file = "cryptography-45.0.7-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ce7a453385e4c4693985b4a4a3533e041558851eae061a58a5405363b098fcd3"},
|
||||
{file = "cryptography-45.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b04f85ac3a90c227b6e5890acb0edbaf3140938dbecf07bff618bf3638578cf1"},
|
||||
{file = "cryptography-45.0.7-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:48c41a44ef8b8c2e80ca4527ee81daa4c527df3ecbc9423c41a420a9559d0e27"},
|
||||
{file = "cryptography-45.0.7-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f3df7b3d0f91b88b2106031fd995802a2e9ae13e02c36c1fc075b43f420f3a17"},
|
||||
{file = "cryptography-45.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd342f085542f6eb894ca00ef70236ea46070c8a13824c6bde0dfdcd36065b9b"},
|
||||
{file = "cryptography-45.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1993a1bb7e4eccfb922b6cd414f072e08ff5816702a0bdb8941c247a6b1b287c"},
|
||||
{file = "cryptography-45.0.7-cp37-abi3-win32.whl", hash = "sha256:18fcf70f243fe07252dcb1b268a687f2358025ce32f9f88028ca5c364b123ef5"},
|
||||
{file = "cryptography-45.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:7285a89df4900ed3bfaad5679b1e668cb4b38a8de1ccbfc84b05f34512da0a90"},
|
||||
{file = "cryptography-45.0.7-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:de58755d723e86175756f463f2f0bddd45cc36fbd62601228a3f8761c9f58252"},
|
||||
{file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a20e442e917889d1a6b3c570c9e3fa2fdc398c20868abcea268ea33c024c4083"},
|
||||
{file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:258e0dff86d1d891169b5af222d362468a9570e2532923088658aa866eb11130"},
|
||||
{file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d97cf502abe2ab9eff8bd5e4aca274da8d06dd3ef08b759a8d6143f4ad65d4b4"},
|
||||
{file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:c987dad82e8c65ebc985f5dae5e74a3beda9d0a2a4daf8a1115f3772b59e5141"},
|
||||
{file = "cryptography-45.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c13b1e3afd29a5b3b2656257f14669ca8fa8d7956d509926f0b130b600b50ab7"},
|
||||
{file = "cryptography-45.0.7-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a862753b36620af6fc54209264f92c716367f2f0ff4624952276a6bbd18cbde"},
|
||||
{file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:06ce84dc14df0bf6ea84666f958e6080cdb6fe1231be2a51f3fc1267d9f3fb34"},
|
||||
{file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d0c5c6bac22b177bf8da7435d9d27a6834ee130309749d162b26c3105c0795a9"},
|
||||
{file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:2f641b64acc00811da98df63df7d59fd4706c0df449da71cb7ac39a0732b40ae"},
|
||||
{file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:f5414a788ecc6ee6bc58560e85ca624258a55ca434884445440a810796ea0e0b"},
|
||||
{file = "cryptography-45.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f3d56f73595376f4244646dd5c5870c14c196949807be39e79e7bd9bac3da63"},
|
||||
{file = "cryptography-45.0.7.tar.gz", hash = "sha256:4b1654dfc64ea479c242508eb8c724044f1e964a47d1d1cacc5132292d851971"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb"},
|
||||
{file = "cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db"},
|
||||
{file = "cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f"},
|
||||
{file = "cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372"},
|
||||
{file = "cryptography-46.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a23582810fedb8c0bc47524558fb6c56aac3fc252cb306072fd2815da2a47c32"},
|
||||
{file = "cryptography-46.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e7aec276d68421f9574040c26e2a7c3771060bc0cff408bae1dcb19d3ab1e63c"},
|
||||
{file = "cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea"},
|
||||
{file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b"},
|
||||
{file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb"},
|
||||
{file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717"},
|
||||
{file = "cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9"},
|
||||
{file = "cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c"},
|
||||
{file = "cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cffi = {version = ">=1.14", markers = "platform_python_implementation != \"PyPy\""}
|
||||
cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9.0\" and platform_python_implementation != \"PyPy\""}
|
||||
typing-extensions = {version = ">=4.13.2", markers = "python_full_version < \"3.11.0\""}
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs ; python_full_version >= \"3.8.0\"", "sphinx-rtd-theme (>=3.0.0) ; python_full_version >= \"3.8.0\""]
|
||||
docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"]
|
||||
docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"]
|
||||
nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_full_version >= \"3.8.0\""]
|
||||
pep8test = ["check-sdist ; python_full_version >= \"3.8.0\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"]
|
||||
nox = ["nox[uv] (>=2024.4.15)"]
|
||||
pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"]
|
||||
sdist = ["build (>=1.0.0)"]
|
||||
ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["certifi (>=2024)", "cryptography-vectors (==45.0.7)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
test = ["certifi (>=2024)", "cryptography-vectors (==46.0.3)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
test-randomorder = ["pytest-randomly"]
|
||||
|
||||
[[package]]
|
||||
@@ -446,7 +481,7 @@ description = "XML bomb protection for Python stdlib modules"
|
||||
optional = true
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"saml2\""
|
||||
markers = "extra == \"saml2\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"},
|
||||
{file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"},
|
||||
@@ -471,7 +506,7 @@ description = "XPath 1.0/2.0/3.0/3.1 parsers and selectors for ElementTree and l
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"saml2\""
|
||||
markers = "extra == \"saml2\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "elementpath-4.1.5-py3-none-any.whl", hash = "sha256:2ac1a2fb31eb22bbbf817f8cf6752f844513216263f0e3892c8e79782fe4bb55"},
|
||||
{file = "elementpath-4.1.5.tar.gz", hash = "sha256:c2d6dc524b29ef751ecfc416b0627668119d8812441c555d7471da41d4bacb8d"},
|
||||
@@ -521,7 +556,7 @@ description = "Python wrapper for hiredis"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"redis\""
|
||||
markers = "extra == \"redis\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "hiredis-3.3.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:9937d9b69321b393fbace69f55423480f098120bc55a3316e1ca3508c4dbbd6f"},
|
||||
{file = "hiredis-3.3.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:50351b77f89ba6a22aff430b993653847f36b71d444509036baa0f2d79d1ebf4"},
|
||||
@@ -844,7 +879,7 @@ description = "Jaeger Python OpenTracing Tracer implementation"
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"opentracing\""
|
||||
markers = "extra == \"opentracing\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "jaeger-client-4.8.0.tar.gz", hash = "sha256:3157836edab8e2c209bd2d6ae61113db36f7ee399e66b1dcbb715d87ab49bfe0"},
|
||||
]
|
||||
@@ -982,7 +1017,7 @@ description = "A strictly RFC 4510 conforming LDAP V3 pure Python client library
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\""
|
||||
markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "ldap3-2.9.1-py2.py3-none-any.whl", hash = "sha256:5869596fc4948797020d3f03b7939da938778a0f9e2009f7a072ccf92b8e8d70"},
|
||||
{file = "ldap3-2.9.1.tar.gz", hash = "sha256:f3e7fc4718e3f09dda568b57100095e0ce58633bcabbed8667ce3f8fbaa4229f"},
|
||||
@@ -991,6 +1026,92 @@ files = [
|
||||
[package.dependencies]
|
||||
pyasn1 = ">=0.4.6"
|
||||
|
||||
[[package]]
|
||||
name = "librt"
|
||||
version = "0.6.3"
|
||||
description = "Mypyc runtime library"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "librt-0.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:45660d26569cc22ed30adf583389d8a0d1b468f8b5e518fcf9bfe2cd298f9dd1"},
|
||||
{file = "librt-0.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:54f3b2177fb892d47f8016f1087d21654b44f7fc4cf6571c1c6b3ea531ab0fcf"},
|
||||
{file = "librt-0.6.3-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c5b31bed2c2f2fa1fcb4815b75f931121ae210dc89a3d607fb1725f5907f1437"},
|
||||
{file = "librt-0.6.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8f8ed5053ef9fb08d34f1fd80ff093ccbd1f67f147633a84cf4a7d9b09c0f089"},
|
||||
{file = "librt-0.6.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3f0e4bd9bcb0ee34fa3dbedb05570da50b285f49e52c07a241da967840432513"},
|
||||
{file = "librt-0.6.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d8f89c8d20dfa648a3f0a56861946eb00e5b00d6b00eea14bc5532b2fcfa8ef1"},
|
||||
{file = "librt-0.6.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ecc2c526547eacd20cb9fbba19a5268611dbc70c346499656d6cf30fae328977"},
|
||||
{file = "librt-0.6.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fbedeb9b48614d662822ee514567d2d49a8012037fc7b4cd63f282642c2f4b7d"},
|
||||
{file = "librt-0.6.3-cp310-cp310-win32.whl", hash = "sha256:0765b0fe0927d189ee14b087cd595ae636bef04992e03fe6dfdaa383866c8a46"},
|
||||
{file = "librt-0.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:8c659f9fb8a2f16dc4131b803fa0144c1dadcb3ab24bb7914d01a6da58ae2457"},
|
||||
{file = "librt-0.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:61348cc488b18d1b1ff9f3e5fcd5ac43ed22d3e13e862489d2267c2337285c08"},
|
||||
{file = "librt-0.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64645b757d617ad5f98c08e07620bc488d4bced9ced91c6279cec418f16056fa"},
|
||||
{file = "librt-0.6.3-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:26b8026393920320bb9a811b691d73c5981385d537ffc5b6e22e53f7b65d4122"},
|
||||
{file = "librt-0.6.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d998b432ed9ffccc49b820e913c8f327a82026349e9c34fa3690116f6b70770f"},
|
||||
{file = "librt-0.6.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e18875e17ef69ba7dfa9623f2f95f3eda6f70b536079ee6d5763ecdfe6cc9040"},
|
||||
{file = "librt-0.6.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a218f85081fc3f70cddaed694323a1ad7db5ca028c379c214e3a7c11c0850523"},
|
||||
{file = "librt-0.6.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1ef42ff4edd369e84433ce9b188a64df0837f4f69e3d34d3b34d4955c599d03f"},
|
||||
{file = "librt-0.6.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0e0f2b79993fec23a685b3e8107ba5f8675eeae286675a216da0b09574fa1e47"},
|
||||
{file = "librt-0.6.3-cp311-cp311-win32.whl", hash = "sha256:fd98cacf4e0fabcd4005c452cb8a31750258a85cab9a59fb3559e8078da408d7"},
|
||||
{file = "librt-0.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:e17b5b42c8045867ca9d1f54af00cc2275198d38de18545edaa7833d7e9e4ac8"},
|
||||
{file = "librt-0.6.3-cp311-cp311-win_arm64.whl", hash = "sha256:87597e3d57ec0120a3e1d857a708f80c02c42ea6b00227c728efbc860f067c45"},
|
||||
{file = "librt-0.6.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:74418f718083009108dc9a42c21bf2e4802d49638a1249e13677585fcc9ca176"},
|
||||
{file = "librt-0.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:514f3f363d1ebc423357d36222c37e5c8e6674b6eae8d7195ac9a64903722057"},
|
||||
{file = "librt-0.6.3-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cf1115207a5049d1f4b7b4b72de0e52f228d6c696803d94843907111cbf80610"},
|
||||
{file = "librt-0.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ad8ba80cdcea04bea7b78fcd4925bfbf408961e9d8397d2ee5d3ec121e20c08c"},
|
||||
{file = "librt-0.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4018904c83eab49c814e2494b4e22501a93cdb6c9f9425533fe693c3117126f9"},
|
||||
{file = "librt-0.6.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8983c5c06ac9c990eac5eb97a9f03fe41dc7e9d7993df74d9e8682a1056f596c"},
|
||||
{file = "librt-0.6.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7769c579663a6f8dbf34878969ac71befa42067ce6bf78e6370bf0d1194997c"},
|
||||
{file = "librt-0.6.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d3c9a07eafdc70556f8c220da4a538e715668c0c63cabcc436a026e4e89950bf"},
|
||||
{file = "librt-0.6.3-cp312-cp312-win32.whl", hash = "sha256:38320386a48a15033da295df276aea93a92dfa94a862e06893f75ea1d8bbe89d"},
|
||||
{file = "librt-0.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:c0ecf4786ad0404b072196b5df774b1bb23c8aacdcacb6c10b4128bc7b00bd01"},
|
||||
{file = "librt-0.6.3-cp312-cp312-win_arm64.whl", hash = "sha256:9f2a6623057989ebc469cd9cc8fe436c40117a0147627568d03f84aef7854c55"},
|
||||
{file = "librt-0.6.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9e716f9012148a81f02f46a04fc4c663420c6fbfeacfac0b5e128cf43b4413d3"},
|
||||
{file = "librt-0.6.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:669ff2495728009a96339c5ad2612569c6d8be4474e68f3f3ac85d7c3261f5f5"},
|
||||
{file = "librt-0.6.3-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:349b6873ebccfc24c9efd244e49da9f8a5c10f60f07575e248921aae2123fc42"},
|
||||
{file = "librt-0.6.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0c74c26736008481c9f6d0adf1aedb5a52aff7361fea98276d1f965c0256ee70"},
|
||||
{file = "librt-0.6.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:408a36ddc75e91918cb15b03460bdc8a015885025d67e68c6f78f08c3a88f522"},
|
||||
{file = "librt-0.6.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e61ab234624c9ffca0248a707feffe6fac2343758a36725d8eb8a6efef0f8c30"},
|
||||
{file = "librt-0.6.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:324462fe7e3896d592b967196512491ec60ca6e49c446fe59f40743d08c97917"},
|
||||
{file = "librt-0.6.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:36b2ec8c15030002c7f688b4863e7be42820d7c62d9c6eece3db54a2400f0530"},
|
||||
{file = "librt-0.6.3-cp313-cp313-win32.whl", hash = "sha256:25b1b60cb059471c0c0c803e07d0dfdc79e41a0a122f288b819219ed162672a3"},
|
||||
{file = "librt-0.6.3-cp313-cp313-win_amd64.whl", hash = "sha256:10a95ad074e2a98c9e4abc7f5b7d40e5ecbfa84c04c6ab8a70fabf59bd429b88"},
|
||||
{file = "librt-0.6.3-cp313-cp313-win_arm64.whl", hash = "sha256:17000df14f552e86877d67e4ab7966912224efc9368e998c96a6974a8d609bf9"},
|
||||
{file = "librt-0.6.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8e695f25d1a425ad7a272902af8ab8c8d66c1998b177e4b5f5e7b4e215d0c88a"},
|
||||
{file = "librt-0.6.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3e84a4121a7ae360ca4da436548a9c1ca8ca134a5ced76c893cc5944426164bd"},
|
||||
{file = "librt-0.6.3-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:05f385a414de3f950886ea0aad8f109650d4b712cf9cc14cc17f5f62a9ab240b"},
|
||||
{file = "librt-0.6.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36a8e337461150b05ca2c7bdedb9e591dfc262c5230422cea398e89d0c746cdc"},
|
||||
{file = "librt-0.6.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcbe48f6a03979384f27086484dc2a14959be1613cb173458bd58f714f2c48f3"},
|
||||
{file = "librt-0.6.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4bca9e4c260233fba37b15c4ec2f78aa99c1a79fbf902d19dd4a763c5c3fb751"},
|
||||
{file = "librt-0.6.3-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:760c25ed6ac968e24803eb5f7deb17ce026902d39865e83036bacbf5cf242aa8"},
|
||||
{file = "librt-0.6.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4aa4a93a353ccff20df6e34fa855ae8fd788832c88f40a9070e3ddd3356a9f0e"},
|
||||
{file = "librt-0.6.3-cp314-cp314-win32.whl", hash = "sha256:cb92741c2b4ea63c09609b064b26f7f5d9032b61ae222558c55832ec3ad0bcaf"},
|
||||
{file = "librt-0.6.3-cp314-cp314-win_amd64.whl", hash = "sha256:fdcd095b1b812d756fa5452aca93b962cf620694c0cadb192cec2bb77dcca9a2"},
|
||||
{file = "librt-0.6.3-cp314-cp314-win_arm64.whl", hash = "sha256:822ca79e28720a76a935c228d37da6579edef048a17cd98d406a2484d10eda78"},
|
||||
{file = "librt-0.6.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:078cd77064d1640cb7b0650871a772956066174d92c8aeda188a489b58495179"},
|
||||
{file = "librt-0.6.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5cc22f7f5c0cc50ed69f4b15b9c51d602aabc4500b433aaa2ddd29e578f452f7"},
|
||||
{file = "librt-0.6.3-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:14b345eb7afb61b9fdcdfda6738946bd11b8e0f6be258666b0646af3b9bb5916"},
|
||||
{file = "librt-0.6.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d46aa46aa29b067f0b8b84f448fd9719aaf5f4c621cc279164d76a9dc9ab3e8"},
|
||||
{file = "librt-0.6.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1b51ba7d9d5d9001494769eca8c0988adce25d0a970c3ba3f2eb9df9d08036fc"},
|
||||
{file = "librt-0.6.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ced0925a18fddcff289ef54386b2fc230c5af3c83b11558571124bfc485b8c07"},
|
||||
{file = "librt-0.6.3-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:6bac97e51f66da2ca012adddbe9fd656b17f7368d439de30898f24b39512f40f"},
|
||||
{file = "librt-0.6.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b2922a0e8fa97395553c304edc3bd36168d8eeec26b92478e292e5d4445c1ef0"},
|
||||
{file = "librt-0.6.3-cp314-cp314t-win32.whl", hash = "sha256:f33462b19503ba68d80dac8a1354402675849259fb3ebf53b67de86421735a3a"},
|
||||
{file = "librt-0.6.3-cp314-cp314t-win_amd64.whl", hash = "sha256:04f8ce401d4f6380cfc42af0f4e67342bf34c820dae01343f58f472dbac75dcf"},
|
||||
{file = "librt-0.6.3-cp314-cp314t-win_arm64.whl", hash = "sha256:afb39550205cc5e5c935762c6bf6a2bb34f7d21a68eadb25e2db7bf3593fecc0"},
|
||||
{file = "librt-0.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09262cb2445b6f15d09141af20b95bb7030c6f13b00e876ad8fdd1a9045d6aa5"},
|
||||
{file = "librt-0.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57705e8eec76c5b77130d729c0f70190a9773366c555c5457c51eace80afd873"},
|
||||
{file = "librt-0.6.3-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3ac2a7835434b31def8ed5355dd9b895bbf41642d61967522646d1d8b9681106"},
|
||||
{file = "librt-0.6.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71f0a5918aebbea1e7db2179a8fe87e8a8732340d9e8b8107401fb407eda446e"},
|
||||
{file = "librt-0.6.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa346e202e6e1ebc01fe1c69509cffe486425884b96cb9ce155c99da1ecbe0e9"},
|
||||
{file = "librt-0.6.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:92267f865c7bbd12327a0d394666948b9bf4b51308b52947c0cc453bfa812f5d"},
|
||||
{file = "librt-0.6.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:86605d5bac340beb030cbc35859325982a79047ebdfba1e553719c7126a2389d"},
|
||||
{file = "librt-0.6.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:98e4bbecbef8d2a60ecf731d735602feee5ac0b32117dbbc765e28b054bac912"},
|
||||
{file = "librt-0.6.3-cp39-cp39-win32.whl", hash = "sha256:3caa0634c02d5ff0b2ae4a28052e0d8c5f20d497623dc13f629bd4a9e2a6efad"},
|
||||
{file = "librt-0.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:b47395091e7e0ece1e6ebac9b98bf0c9084d1e3d3b2739aa566be7e56e3f7bf2"},
|
||||
{file = "librt-0.6.3.tar.gz", hash = "sha256:c724a884e642aa2bbad52bb0203ea40406ad742368a5f90da1b220e970384aae"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lxml"
|
||||
version = "6.0.2"
|
||||
@@ -998,7 +1119,7 @@ description = "Powerful and Pythonic XML processing library combining libxml2/li
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"url-preview\""
|
||||
markers = "extra == \"url-preview\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "lxml-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e77dd455b9a16bbd2a5036a63ddbd479c19572af81b624e79ef422f929eef388"},
|
||||
{file = "lxml-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d444858b9f07cefff6455b983aea9a67f7462ba1f6cbe4a21e8bf6791bf2153"},
|
||||
@@ -1284,7 +1405,7 @@ description = "An LDAP3 auth provider for Synapse"
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"matrix-synapse-ldap3\""
|
||||
markers = "extra == \"matrix-synapse-ldap3\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "matrix-synapse-ldap3-0.3.0.tar.gz", hash = "sha256:8bb6517173164d4b9cc44f49de411d8cebdb2e705d5dd1ea1f38733c4a009e1d"},
|
||||
{file = "matrix_synapse_ldap3-0.3.0-py3-none-any.whl", hash = "sha256:8b4d701f8702551e98cc1d8c20dbed532de5613584c08d0df22de376ba99159d"},
|
||||
@@ -1413,53 +1534,54 @@ docs = ["sphinx (>=8,<9)", "sphinx-autobuild"]
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "1.17.1"
|
||||
version = "1.19.0"
|
||||
description = "Optional static typing for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "mypy-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3fbe6d5555bf608c47203baa3e72dbc6ec9965b3d7c318aa9a4ca76f465bd972"},
|
||||
{file = "mypy-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80ef5c058b7bce08c83cac668158cb7edea692e458d21098c7d3bce35a5d43e7"},
|
||||
{file = "mypy-1.17.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a580f8a70c69e4a75587bd925d298434057fe2a428faaf927ffe6e4b9a98df"},
|
||||
{file = "mypy-1.17.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dd86bb649299f09d987a2eebb4d52d10603224500792e1bee18303bbcc1ce390"},
|
||||
{file = "mypy-1.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a76906f26bd8d51ea9504966a9c25419f2e668f012e0bdf3da4ea1526c534d94"},
|
||||
{file = "mypy-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:e79311f2d904ccb59787477b7bd5d26f3347789c06fcd7656fa500875290264b"},
|
||||
{file = "mypy-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad37544be07c5d7fba814eb370e006df58fed8ad1ef33ed1649cb1889ba6ff58"},
|
||||
{file = "mypy-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:064e2ff508e5464b4bd807a7c1625bc5047c5022b85c70f030680e18f37273a5"},
|
||||
{file = "mypy-1.17.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70401bbabd2fa1aa7c43bb358f54037baf0586f41e83b0ae67dd0534fc64edfd"},
|
||||
{file = "mypy-1.17.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e92bdc656b7757c438660f775f872a669b8ff374edc4d18277d86b63edba6b8b"},
|
||||
{file = "mypy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c1fdf4abb29ed1cb091cf432979e162c208a5ac676ce35010373ff29247bcad5"},
|
||||
{file = "mypy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:ff2933428516ab63f961644bc49bc4cbe42bbffb2cd3b71cc7277c07d16b1a8b"},
|
||||
{file = "mypy-1.17.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:69e83ea6553a3ba79c08c6e15dbd9bfa912ec1e493bf75489ef93beb65209aeb"},
|
||||
{file = "mypy-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b16708a66d38abb1e6b5702f5c2c87e133289da36f6a1d15f6a5221085c6403"},
|
||||
{file = "mypy-1.17.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:89e972c0035e9e05823907ad5398c5a73b9f47a002b22359b177d40bdaee7056"},
|
||||
{file = "mypy-1.17.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03b6d0ed2b188e35ee6d5c36b5580cffd6da23319991c49ab5556c023ccf1341"},
|
||||
{file = "mypy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c837b896b37cd103570d776bda106eabb8737aa6dd4f248451aecf53030cdbeb"},
|
||||
{file = "mypy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:665afab0963a4b39dff7c1fa563cc8b11ecff7910206db4b2e64dd1ba25aed19"},
|
||||
{file = "mypy-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93378d3203a5c0800c6b6d850ad2f19f7a3cdf1a3701d3416dbf128805c6a6a7"},
|
||||
{file = "mypy-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:15d54056f7fe7a826d897789f53dd6377ec2ea8ba6f776dc83c2902b899fee81"},
|
||||
{file = "mypy-1.17.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:209a58fed9987eccc20f2ca94afe7257a8f46eb5df1fb69958650973230f91e6"},
|
||||
{file = "mypy-1.17.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:099b9a5da47de9e2cb5165e581f158e854d9e19d2e96b6698c0d64de911dd849"},
|
||||
{file = "mypy-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ffadfbe6994d724c5a1bb6123a7d27dd68fc9c059561cd33b664a79578e14"},
|
||||
{file = "mypy-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:9a2b7d9180aed171f033c9f2fc6c204c1245cf60b0cb61cf2e7acc24eea78e0a"},
|
||||
{file = "mypy-1.17.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:15a83369400454c41ed3a118e0cc58bd8123921a602f385cb6d6ea5df050c733"},
|
||||
{file = "mypy-1.17.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:55b918670f692fc9fba55c3298d8a3beae295c5cded0a55dccdc5bbead814acd"},
|
||||
{file = "mypy-1.17.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:62761474061feef6f720149d7ba876122007ddc64adff5ba6f374fda35a018a0"},
|
||||
{file = "mypy-1.17.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c49562d3d908fd49ed0938e5423daed8d407774a479b595b143a3d7f87cdae6a"},
|
||||
{file = "mypy-1.17.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:397fba5d7616a5bc60b45c7ed204717eaddc38f826e3645402c426057ead9a91"},
|
||||
{file = "mypy-1.17.1-cp314-cp314-win_amd64.whl", hash = "sha256:9d6b20b97d373f41617bd0708fd46aa656059af57f2ef72aa8c7d6a2b73b74ed"},
|
||||
{file = "mypy-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5d1092694f166a7e56c805caaf794e0585cabdbf1df36911c414e4e9abb62ae9"},
|
||||
{file = "mypy-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79d44f9bfb004941ebb0abe8eff6504223a9c1ac51ef967d1263c6572bbebc99"},
|
||||
{file = "mypy-1.17.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b01586eed696ec905e61bd2568f48740f7ac4a45b3a468e6423a03d3788a51a8"},
|
||||
{file = "mypy-1.17.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43808d9476c36b927fbcd0b0255ce75efe1b68a080154a38ae68a7e62de8f0f8"},
|
||||
{file = "mypy-1.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:feb8cc32d319edd5859da2cc084493b3e2ce5e49a946377663cc90f6c15fb259"},
|
||||
{file = "mypy-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d7598cf74c3e16539d4e2f0b8d8c318e00041553d83d4861f87c7a72e95ac24d"},
|
||||
{file = "mypy-1.17.1-py3-none-any.whl", hash = "sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9"},
|
||||
{file = "mypy-1.17.1.tar.gz", hash = "sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01"},
|
||||
{file = "mypy-1.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6148ede033982a8c5ca1143de34c71836a09f105068aaa8b7d5edab2b053e6c8"},
|
||||
{file = "mypy-1.19.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a9ac09e52bb0f7fb912f5d2a783345c72441a08ef56ce3e17c1752af36340a39"},
|
||||
{file = "mypy-1.19.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:11f7254c15ab3f8ed68f8e8f5cbe88757848df793e31c36aaa4d4f9783fd08ab"},
|
||||
{file = "mypy-1.19.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:318ba74f75899b0e78b847d8c50821e4c9637c79d9a59680fc1259f29338cb3e"},
|
||||
{file = "mypy-1.19.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cf7d84f497f78b682edd407f14a7b6e1a2212b433eedb054e2081380b7395aa3"},
|
||||
{file = "mypy-1.19.0-cp310-cp310-win_amd64.whl", hash = "sha256:c3385246593ac2b97f155a0e9639be906e73534630f663747c71908dfbf26134"},
|
||||
{file = "mypy-1.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a31e4c28e8ddb042c84c5e977e28a21195d086aaffaf08b016b78e19c9ef8106"},
|
||||
{file = "mypy-1.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34ec1ac66d31644f194b7c163d7f8b8434f1b49719d403a5d26c87fff7e913f7"},
|
||||
{file = "mypy-1.19.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cb64b0ba5980466a0f3f9990d1c582bcab8db12e29815ecb57f1408d99b4bff7"},
|
||||
{file = "mypy-1.19.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:120cffe120cca5c23c03c77f84abc0c14c5d2e03736f6c312480020082f1994b"},
|
||||
{file = "mypy-1.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7a500ab5c444268a70565e374fc803972bfd1f09545b13418a5174e29883dab7"},
|
||||
{file = "mypy-1.19.0-cp311-cp311-win_amd64.whl", hash = "sha256:c14a98bc63fd867530e8ec82f217dae29d0550c86e70debc9667fff1ec83284e"},
|
||||
{file = "mypy-1.19.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0fb3115cb8fa7c5f887c8a8d81ccdcb94cff334684980d847e5a62e926910e1d"},
|
||||
{file = "mypy-1.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3e19e3b897562276bb331074d64c076dbdd3e79213f36eed4e592272dabd760"},
|
||||
{file = "mypy-1.19.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9d491295825182fba01b6ffe2c6fe4e5a49dbf4e2bb4d1217b6ced3b4797bc6"},
|
||||
{file = "mypy-1.19.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6016c52ab209919b46169651b362068f632efcd5eb8ef9d1735f6f86da7853b2"},
|
||||
{file = "mypy-1.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f188dcf16483b3e59f9278c4ed939ec0254aa8a60e8fc100648d9ab5ee95a431"},
|
||||
{file = "mypy-1.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:0e3c3d1e1d62e678c339e7ade72746a9e0325de42cd2cccc51616c7b2ed1a018"},
|
||||
{file = "mypy-1.19.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7686ed65dbabd24d20066f3115018d2dce030d8fa9db01aa9f0a59b6813e9f9e"},
|
||||
{file = "mypy-1.19.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fd4a985b2e32f23bead72e2fb4bbe5d6aceee176be471243bd831d5b2644672d"},
|
||||
{file = "mypy-1.19.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc51a5b864f73a3a182584b1ac75c404396a17eced54341629d8bdcb644a5bba"},
|
||||
{file = "mypy-1.19.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:37af5166f9475872034b56c5efdcf65ee25394e9e1d172907b84577120714364"},
|
||||
{file = "mypy-1.19.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:510c014b722308c9bd377993bcbf9a07d7e0692e5fa8fc70e639c1eb19fc6bee"},
|
||||
{file = "mypy-1.19.0-cp313-cp313-win_amd64.whl", hash = "sha256:cabbee74f29aa9cd3b444ec2f1e4fa5a9d0d746ce7567a6a609e224429781f53"},
|
||||
{file = "mypy-1.19.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f2e36bed3c6d9b5f35d28b63ca4b727cb0228e480826ffc8953d1892ddc8999d"},
|
||||
{file = "mypy-1.19.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a18d8abdda14035c5718acb748faec09571432811af129bf0d9e7b2d6699bf18"},
|
||||
{file = "mypy-1.19.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f75e60aca3723a23511948539b0d7ed514dda194bc3755eae0bfc7a6b4887aa7"},
|
||||
{file = "mypy-1.19.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f44f2ae3c58421ee05fe609160343c25f70e3967f6e32792b5a78006a9d850f"},
|
||||
{file = "mypy-1.19.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:63ea6a00e4bd6822adbfc75b02ab3653a17c02c4347f5bb0cf1d5b9df3a05835"},
|
||||
{file = "mypy-1.19.0-cp314-cp314-win_amd64.whl", hash = "sha256:3ad925b14a0bb99821ff6f734553294aa6a3440a8cb082fe1f5b84dfb662afb1"},
|
||||
{file = "mypy-1.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0dde5cb375cb94deff0d4b548b993bec52859d1651e073d63a1386d392a95495"},
|
||||
{file = "mypy-1.19.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1cf9c59398db1c68a134b0b5354a09a1e124523f00bacd68e553b8bd16ff3299"},
|
||||
{file = "mypy-1.19.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3210d87b30e6af9c8faed61be2642fcbe60ef77cec64fa1ef810a630a4cf671c"},
|
||||
{file = "mypy-1.19.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e2c1101ab41d01303103ab6ef82cbbfedb81c1a060c868fa7cc013d573d37ab5"},
|
||||
{file = "mypy-1.19.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ea4fd21bb48f0da49e6d3b37ef6bd7e8228b9fe41bbf4d80d9364d11adbd43c"},
|
||||
{file = "mypy-1.19.0-cp39-cp39-win_amd64.whl", hash = "sha256:16f76ff3f3fd8137aadf593cb4607d82634fca675e8211ad75c43d86033ee6c6"},
|
||||
{file = "mypy-1.19.0-py3-none-any.whl", hash = "sha256:0c01c99d626380752e527d5ce8e69ffbba2046eb8a060db0329690849cf9b6f9"},
|
||||
{file = "mypy-1.19.0.tar.gz", hash = "sha256:f6b874ca77f733222641e5c46e4711648c4037ea13646fd0cdc814c2eaec2528"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
librt = ">=0.6.2"
|
||||
mypy_extensions = ">=1.0.0"
|
||||
pathspec = ">=0.9.0"
|
||||
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
||||
@@ -1486,18 +1608,18 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "mypy-zope"
|
||||
version = "1.0.13"
|
||||
version = "1.0.14"
|
||||
description = "Plugin for mypy to support zope interfaces"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "mypy_zope-1.0.13-py3-none-any.whl", hash = "sha256:13740c4cbc910cca2c143c6709e1c483c991abeeeb7b629ad6f73d8ac1edad15"},
|
||||
{file = "mypy_zope-1.0.13.tar.gz", hash = "sha256:63fb4d035ea874baf280dc69e714dcde4bd2a4a4837a0fd8d90ce91bea510f99"},
|
||||
{file = "mypy_zope-1.0.14-py3-none-any.whl", hash = "sha256:8842ade93630421dbec0c9906d6515f6e65c6407ef8b9b2eb7f4f73ae1e8a42a"},
|
||||
{file = "mypy_zope-1.0.14.tar.gz", hash = "sha256:42555ad4703f2e50c912de3ebe0c7197619c3f71864817fabc5385ecea0f8449"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
mypy = ">=1.0.0,<1.18.0"
|
||||
mypy = ">=1.0.0,<1.20.0"
|
||||
"zope.interface" = "*"
|
||||
"zope.schema" = "*"
|
||||
|
||||
@@ -1526,7 +1648,7 @@ description = "OpenTracing API for Python. See documentation at http://opentraci
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"opentracing\""
|
||||
markers = "extra == \"opentracing\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "opentracing-2.4.0.tar.gz", hash = "sha256:a173117e6ef580d55874734d1fa7ecb6f3655160b8b8974a2a1e98e5ec9c840d"},
|
||||
]
|
||||
@@ -1575,14 +1697,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "phonenumbers"
|
||||
version = "9.0.18"
|
||||
version = "9.0.19"
|
||||
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "phonenumbers-9.0.18-py2.py3-none-any.whl", hash = "sha256:d3354454ac31c97f8a08121df97a7145b8dca641f734c6f1518a41c2f60c5764"},
|
||||
{file = "phonenumbers-9.0.18.tar.gz", hash = "sha256:5537c61ba95b11b992c95e804da6e49193cc06b1224f632ade64631518a48ed1"},
|
||||
{file = "phonenumbers-9.0.19-py2.py3-none-any.whl", hash = "sha256:004abdfe2010518c2383f148515664a742e8a5d5540e07c049735c139d7e8b09"},
|
||||
{file = "phonenumbers-9.0.19.tar.gz", hash = "sha256:e0674e31554362f4d95383558f7aefde738ef2e7bf96d28a10afd3e87d63a65c"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1716,7 +1838,7 @@ description = "psycopg2 - Python-PostgreSQL Database Adapter"
|
||||
optional = true
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"postgres\""
|
||||
markers = "extra == \"postgres\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "psycopg2-2.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:103e857f46bb76908768ead4e2d0ba1d1a130e7b8ed77d3ae91e8b33481813e8"},
|
||||
{file = "psycopg2-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:210daed32e18f35e3140a1ebe059ac29209dd96468f2f7559aa59f75ee82a5cb"},
|
||||
@@ -1734,7 +1856,7 @@ description = ".. image:: https://travis-ci.org/chtd/psycopg2cffi.svg?branch=mas
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")"
|
||||
markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")"
|
||||
files = [
|
||||
{file = "psycopg2cffi-2.9.0.tar.gz", hash = "sha256:7e272edcd837de3a1d12b62185eb85c45a19feda9e62fa1b120c54f9e8d35c52"},
|
||||
]
|
||||
@@ -1750,7 +1872,7 @@ description = "A Simple library to enable psycopg2 compatability"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "platform_python_implementation == \"PyPy\" and (extra == \"all\" or extra == \"postgres\")"
|
||||
markers = "platform_python_implementation == \"PyPy\" and (extra == \"postgres\" or extra == \"all\")"
|
||||
files = [
|
||||
{file = "psycopg2cffi-compat-1.1.tar.gz", hash = "sha256:d25e921748475522b33d13420aad5c2831c743227dc1f1f2585e0fdb5c914e05"},
|
||||
]
|
||||
@@ -1792,6 +1914,7 @@ description = "C parser in Python"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
groups = ["main", "dev"]
|
||||
markers = "implementation_name != \"PyPy\""
|
||||
files = [
|
||||
{file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
|
||||
{file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
|
||||
@@ -2031,7 +2154,7 @@ description = "A development tool to measure, monitor and analyze the memory beh
|
||||
optional = true
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"cache-memory\""
|
||||
markers = "extra == \"cache-memory\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "Pympler-1.0.1-py3-none-any.whl", hash = "sha256:d260dda9ae781e1eab6ea15bacb84015849833ba5555f141d2d9b7b7473b307d"},
|
||||
{file = "Pympler-1.0.1.tar.gz", hash = "sha256:993f1a3599ca3f4fcd7160c7545ad06310c9e12f70174ae7ae8d4e25f6c5d3fa"},
|
||||
@@ -2084,6 +2207,63 @@ typing-extensions = {version = ">=4.9", markers = "python_version < \"3.13\" and
|
||||
docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx_rtd_theme"]
|
||||
test = ["pretend", "pytest (>=3.0.1)", "pytest-rerunfailures"]
|
||||
|
||||
[[package]]
|
||||
name = "pyparsing"
|
||||
version = "3.2.5"
|
||||
description = "pyparsing - Classes and methods to define and execute parsing grammars"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pyparsing-3.2.5-py3-none-any.whl", hash = "sha256:e38a4f02064cf41fe6593d328d0512495ad1f3d8a91c4f73fc401b3079a59a5e"},
|
||||
{file = "pyparsing-3.2.5.tar.gz", hash = "sha256:2df8d5b7b2802ef88e8d016a2eb9c7aeaa923529cd251ed0fe4608275d4105b6"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
diagrams = ["jinja2", "railroad-diagrams"]
|
||||
|
||||
[[package]]
|
||||
name = "pyrsistent"
|
||||
version = "0.20.0"
|
||||
description = "Persistent/Functional/Immutable data structures"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"},
|
||||
{file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"},
|
||||
{file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"},
|
||||
{file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"},
|
||||
{file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"},
|
||||
{file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"},
|
||||
{file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"},
|
||||
{file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"},
|
||||
{file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"},
|
||||
{file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"},
|
||||
{file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"},
|
||||
{file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"},
|
||||
{file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"},
|
||||
{file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"},
|
||||
{file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"},
|
||||
{file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"},
|
||||
{file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"},
|
||||
{file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"},
|
||||
{file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"},
|
||||
{file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"},
|
||||
{file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"},
|
||||
{file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"},
|
||||
{file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"},
|
||||
{file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"},
|
||||
{file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"},
|
||||
{file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"},
|
||||
{file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"},
|
||||
{file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"},
|
||||
{file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"},
|
||||
{file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"},
|
||||
{file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"},
|
||||
{file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pysaml2"
|
||||
version = "7.5.0"
|
||||
@@ -2091,7 +2271,7 @@ description = "Python implementation of SAML Version 2 Standard"
|
||||
optional = true
|
||||
python-versions = ">=3.9,<4.0"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"saml2\""
|
||||
markers = "extra == \"saml2\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "pysaml2-7.5.0-py3-none-any.whl", hash = "sha256:bc6627cc344476a83c757f440a73fda1369f13b6fda1b4e16bca63ffbabb5318"},
|
||||
{file = "pysaml2-7.5.0.tar.gz", hash = "sha256:f36871d4e5ee857c6b85532e942550d2cf90ea4ee943d75eb681044bbc4f54f7"},
|
||||
@@ -2116,7 +2296,7 @@ description = "Extensions to the standard Python datetime module"
|
||||
optional = true
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"saml2\""
|
||||
markers = "extra == \"saml2\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
|
||||
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
|
||||
@@ -2139,15 +2319,15 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pytz"
|
||||
version = "2022.7.1"
|
||||
version = "2025.2"
|
||||
description = "World timezone definitions, modern and historical"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"saml2\""
|
||||
markers = "extra == \"saml2\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"},
|
||||
{file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"},
|
||||
{file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"},
|
||||
{file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2481,31 +2661,31 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.14.5"
|
||||
version = "0.14.6"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "ruff-0.14.5-py3-none-linux_armv6l.whl", hash = "sha256:f3b8248123b586de44a8018bcc9fefe31d23dda57a34e6f0e1e53bd51fd63594"},
|
||||
{file = "ruff-0.14.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f7a75236570318c7a30edd7f5491945f0169de738d945ca8784500b517163a72"},
|
||||
{file = "ruff-0.14.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6d146132d1ee115f8802356a2dc9a634dbf58184c51bff21f313e8cd1c74899a"},
|
||||
{file = "ruff-0.14.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2380596653dcd20b057794d55681571a257a42327da8894b93bbd6111aa801f"},
|
||||
{file = "ruff-0.14.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d1fa985a42b1f075a098fa1ab9d472b712bdb17ad87a8ec86e45e7fa6273e68"},
|
||||
{file = "ruff-0.14.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88f0770d42b7fa02bbefddde15d235ca3aa24e2f0137388cc15b2dcbb1f7c7a7"},
|
||||
{file = "ruff-0.14.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3676cb02b9061fee7294661071c4709fa21419ea9176087cb77e64410926eb78"},
|
||||
{file = "ruff-0.14.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b595bedf6bc9cab647c4a173a61acf4f1ac5f2b545203ba82f30fcb10b0318fb"},
|
||||
{file = "ruff-0.14.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f55382725ad0bdb2e8ee2babcbbfb16f124f5a59496a2f6a46f1d9d99d93e6e2"},
|
||||
{file = "ruff-0.14.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7497d19dce23976bdaca24345ae131a1d38dcfe1b0850ad8e9e6e4fa321a6e19"},
|
||||
{file = "ruff-0.14.5-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:410e781f1122d6be4f446981dd479470af86537fb0b8857f27a6e872f65a38e4"},
|
||||
{file = "ruff-0.14.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c01be527ef4c91a6d55e53b337bfe2c0f82af024cc1a33c44792d6844e2331e1"},
|
||||
{file = "ruff-0.14.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f66e9bb762e68d66e48550b59c74314168ebb46199886c5c5aa0b0fbcc81b151"},
|
||||
{file = "ruff-0.14.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d93be8f1fa01022337f1f8f3bcaa7ffee2d0b03f00922c45c2207954f351f465"},
|
||||
{file = "ruff-0.14.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c135d4b681f7401fe0e7312017e41aba9b3160861105726b76cfa14bc25aa367"},
|
||||
{file = "ruff-0.14.5-py3-none-win32.whl", hash = "sha256:c83642e6fccfb6dea8b785eb9f456800dcd6a63f362238af5fc0c83d027dd08b"},
|
||||
{file = "ruff-0.14.5-py3-none-win_amd64.whl", hash = "sha256:9d55d7af7166f143c94eae1db3312f9ea8f95a4defef1979ed516dbb38c27621"},
|
||||
{file = "ruff-0.14.5-py3-none-win_arm64.whl", hash = "sha256:4b700459d4649e2594b31f20a9de33bc7c19976d4746d8d0798ad959621d64a4"},
|
||||
{file = "ruff-0.14.5.tar.gz", hash = "sha256:8d3b48d7d8aad423d3137af7ab6c8b1e38e4de104800f0d596990f6ada1a9fc1"},
|
||||
{file = "ruff-0.14.6-py3-none-linux_armv6l.whl", hash = "sha256:d724ac2f1c240dbd01a2ae98db5d1d9a5e1d9e96eba999d1c48e30062df578a3"},
|
||||
{file = "ruff-0.14.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9f7539ea257aa4d07b7ce87aed580e485c40143f2473ff2f2b75aee003186004"},
|
||||
{file = "ruff-0.14.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7f6007e55b90a2a7e93083ba48a9f23c3158c433591c33ee2e99a49b889c6332"},
|
||||
{file = "ruff-0.14.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a8e7b9d73d8728b68f632aa8e824ef041d068d231d8dbc7808532d3629a6bef"},
|
||||
{file = "ruff-0.14.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d50d45d4553a3ebcbd33e7c5e0fe6ca4aafd9a9122492de357205c2c48f00775"},
|
||||
{file = "ruff-0.14.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:118548dd121f8a21bfa8ab2c5b80e5b4aed67ead4b7567790962554f38e598ce"},
|
||||
{file = "ruff-0.14.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:57256efafbfefcb8748df9d1d766062f62b20150691021f8ab79e2d919f7c11f"},
|
||||
{file = "ruff-0.14.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff18134841e5c68f8e5df1999a64429a02d5549036b394fafbe410f886e1989d"},
|
||||
{file = "ruff-0.14.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:29c4b7ec1e66a105d5c27bd57fa93203637d66a26d10ca9809dc7fc18ec58440"},
|
||||
{file = "ruff-0.14.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167843a6f78680746d7e226f255d920aeed5e4ad9c03258094a2d49d3028b105"},
|
||||
{file = "ruff-0.14.6-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:16a33af621c9c523b1ae006b1b99b159bf5ac7e4b1f20b85b2572455018e0821"},
|
||||
{file = "ruff-0.14.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1432ab6e1ae2dc565a7eea707d3b03a0c234ef401482a6f1621bc1f427c2ff55"},
|
||||
{file = "ruff-0.14.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4c55cfbbe7abb61eb914bfd20683d14cdfb38a6d56c6c66efa55ec6570ee4e71"},
|
||||
{file = "ruff-0.14.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:efea3c0f21901a685fff4befda6d61a1bf4cb43de16da87e8226a281d614350b"},
|
||||
{file = "ruff-0.14.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:344d97172576d75dc6afc0e9243376dbe1668559c72de1864439c4fc95f78185"},
|
||||
{file = "ruff-0.14.6-py3-none-win32.whl", hash = "sha256:00169c0c8b85396516fdd9ce3446c7ca20c2a8f90a77aa945ba6b8f2bfe99e85"},
|
||||
{file = "ruff-0.14.6-py3-none-win_amd64.whl", hash = "sha256:390e6480c5e3659f8a4c8d6a0373027820419ac14fa0d2713bd8e6c3e125b8b9"},
|
||||
{file = "ruff-0.14.6-py3-none-win_arm64.whl", hash = "sha256:d43c81fbeae52cfa8728d8766bbf46ee4298c888072105815b392da70ca836b2"},
|
||||
{file = "ruff-0.14.6.tar.gz", hash = "sha256:6f0c742ca6a7783a736b867a263b9a7a80a45ce9bee391eeda296895f1b4e1cc"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2548,7 +2728,7 @@ description = "Python client for Sentry (https://sentry.io)"
|
||||
optional = true
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"sentry\""
|
||||
markers = "extra == \"sentry\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "sentry_sdk-2.46.0-py2.py3-none-any.whl", hash = "sha256:4eeeb60198074dff8d066ea153fa6f241fef1668c10900ea53a4200abc8da9b1"},
|
||||
{file = "sentry_sdk-2.46.0.tar.gz", hash = "sha256:91821a23460725734b7741523021601593f35731808afc0bb2ba46c27b8acd91"},
|
||||
@@ -2758,7 +2938,7 @@ description = "Tornado IOLoop Backed Concurrent Futures"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"opentracing\""
|
||||
markers = "extra == \"opentracing\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "threadloop-1.0.2-py2-none-any.whl", hash = "sha256:5c90dbefab6ffbdba26afb4829d2a9df8275d13ac7dc58dccb0e279992679599"},
|
||||
{file = "threadloop-1.0.2.tar.gz", hash = "sha256:8b180aac31013de13c2ad5c834819771992d350267bddb854613ae77ef571944"},
|
||||
@@ -2774,7 +2954,7 @@ description = "Python bindings for the Apache Thrift RPC system"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"opentracing\""
|
||||
markers = "extra == \"opentracing\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "thrift-0.16.0.tar.gz", hash = "sha256:2b5b6488fcded21f9d312aa23c9ff6a0195d0f6ae26ddbd5ad9e3e25dfc14408"},
|
||||
]
|
||||
@@ -2847,7 +3027,7 @@ description = "Tornado is a Python web framework and asynchronous networking lib
|
||||
optional = true
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"opentracing\""
|
||||
markers = "extra == \"opentracing\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "tornado-6.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:f81067dad2e4443b015368b24e802d0083fecada4f0a4572fdb72fc06e54a9a6"},
|
||||
{file = "tornado-6.5-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9ac1cbe1db860b3cbb251e795c701c41d343f06a96049d6274e7c77559117e41"},
|
||||
@@ -2981,7 +3161,7 @@ description = "non-blocking redis client for python"
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"redis\""
|
||||
markers = "extra == \"redis\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "txredisapi-1.4.11-py3-none-any.whl", hash = "sha256:ac64d7a9342b58edca13ef267d4fa7637c1aa63f8595e066801c1e8b56b22d0b"},
|
||||
{file = "txredisapi-1.4.11.tar.gz", hash = "sha256:3eb1af99aefdefb59eb877b1dd08861efad60915e30ad5bf3d5bf6c5cedcdbc6"},
|
||||
@@ -3192,21 +3372,21 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.5.0"
|
||||
version = "2.6.0"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"},
|
||||
{file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"},
|
||||
{file = "urllib3-2.6.0-py3-none-any.whl", hash = "sha256:c90f7a39f716c572c4e3e58509581ebd83f9b59cced005b7db7ad2d22b0db99f"},
|
||||
{file = "urllib3-2.6.0.tar.gz", hash = "sha256:cb9bcef5a4b345d5da5d145dc3e30834f58e8018828cbc724d30b4cb7d4d49f1"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""]
|
||||
brotli = ["brotli (>=1.2.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.2.0.0) ; platform_python_implementation != \"CPython\""]
|
||||
h2 = ["h2 (>=4,<5)"]
|
||||
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
zstd = ["zstandard (>=0.18.0)"]
|
||||
zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""]
|
||||
|
||||
[[package]]
|
||||
name = "webencodings"
|
||||
@@ -3227,7 +3407,7 @@ description = "An XML Schema validator and decoder"
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"all\" or extra == \"saml2\""
|
||||
markers = "extra == \"saml2\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "xmlschema-2.4.0-py3-none-any.whl", hash = "sha256:dc87be0caaa61f42649899189aab2fd8e0d567f2cf548433ba7b79278d231a4a"},
|
||||
{file = "xmlschema-2.4.0.tar.gz", hash = "sha256:d74cd0c10866ac609e1ef94a5a69b018ad16e39077bc6393408b40c6babee793"},
|
||||
@@ -3345,15 +3525,15 @@ docs = ["Sphinx", "repoze.sphinx.autointerface"]
|
||||
test = ["zope.i18nmessageid", "zope.testing", "zope.testrunner"]
|
||||
|
||||
[extras]
|
||||
all = ["authlib", "hiredis", "jaeger-client", "lxml", "matrix-synapse-ldap3", "opentracing", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pympler", "pysaml2", "sentry-sdk", "txredisapi"]
|
||||
all = ["authlib", "defusedxml", "hiredis", "jaeger-client", "lxml", "matrix-synapse-ldap3", "opentracing", "psycopg2", "psycopg2cffi", "psycopg2cffi-compat", "pympler", "pysaml2", "pytz", "sentry-sdk", "thrift", "tornado", "txredisapi"]
|
||||
cache-memory = ["pympler"]
|
||||
jwt = ["authlib"]
|
||||
matrix-synapse-ldap3 = ["matrix-synapse-ldap3"]
|
||||
oidc = ["authlib"]
|
||||
opentracing = ["jaeger-client", "opentracing"]
|
||||
opentracing = ["jaeger-client", "opentracing", "thrift", "tornado"]
|
||||
postgres = ["psycopg2", "psycopg2cffi", "psycopg2cffi-compat"]
|
||||
redis = ["hiredis", "txredisapi"]
|
||||
saml2 = ["pysaml2"]
|
||||
saml2 = ["defusedxml", "pysaml2", "pytz"]
|
||||
sentry = ["sentry-sdk"]
|
||||
systemd = ["systemd-python"]
|
||||
test = ["idna", "parameterized"]
|
||||
@@ -3362,4 +3542,4 @@ url-preview = ["lxml"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10.0,<4.0.0"
|
||||
content-hash = "98b9062f48205a3bcc99b43ae665083d360a15d4a208927fa978df9c36fd5315"
|
||||
content-hash = "1caa5072f6304122c89377420f993a54f54587f3618ccc8094ec31642264592c"
|
||||
|
||||
264
pyproject.toml
264
pyproject.toml
@@ -42,7 +42,8 @@ dependencies = [
|
||||
"Twisted[tls]>=21.2.0",
|
||||
"treq>=21.5.0",
|
||||
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
|
||||
"pyOpenSSL>=16.0.0",
|
||||
# pyOpenSSL 16.2.0 fixes compatibility with OpenSSL 1.1.0.
|
||||
"pyOpenSSL>=16.2.0",
|
||||
"PyYAML>=5.3",
|
||||
"pyasn1>=0.1.9",
|
||||
"pyasn1-modules>=0.0.7",
|
||||
@@ -95,6 +96,25 @@ dependencies = [
|
||||
|
||||
# This is used for parsing multipart responses
|
||||
"python-multipart>=0.0.9",
|
||||
|
||||
# Transitive dependency constraints
|
||||
# These dependencies aren't directly required by Synapse.
|
||||
# However, in order for Synapse to build, Synapse requires a higher minimum version
|
||||
# for these dependencies than the minimum specified by the direct dependency.
|
||||
# We should periodically check to see if these dependencies are still necessary and
|
||||
# remove any that are no longer required.
|
||||
"cffi>=1.15", # via cryptography
|
||||
"pynacl>=1.3", # via signedjson
|
||||
"pyparsing>=2.4", # via packaging
|
||||
"pyrsistent>=0.18.0", # via jsonschema
|
||||
"requests>=2.16.0", # 2.16.0+ no longer vendors urllib3, avoiding Python 3.10+ incompatibility
|
||||
"urllib3>=1.26.5", # via treq; 1.26.5 fixes Python 3.10+ collections.abc compatibility
|
||||
# 5.2 is the current version in Debian oldstable. If we don't care to support that, then 5.4 is
|
||||
# the minimum version from Ubuntu 22.04 and RHEL 9. (as of 2025-12)
|
||||
# When bumping this version to 6.2 or above, refer to https://github.com/element-hq/synapse/pull/19274
|
||||
# for details of Synapse improvements that may be unlocked. Particularly around the use of `|`
|
||||
# syntax with zope interface types.
|
||||
"zope-interface>=5.2", # via twisted
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
@@ -104,7 +124,16 @@ postgres = [
|
||||
"psycopg2cffi>=2.8;platform_python_implementation == 'PyPy'",
|
||||
"psycopg2cffi-compat==1.1;platform_python_implementation == 'PyPy'",
|
||||
]
|
||||
saml2 = ["pysaml2>=4.5.0"]
|
||||
saml2 = [
|
||||
"pysaml2>=4.5.0",
|
||||
|
||||
# Transitive dependencies from pysaml2
|
||||
# These dependencies aren't directly required by Synapse.
|
||||
# However, in order for Synapse to build, Synapse requires a higher minimum version
|
||||
# for these dependencies than the minimum specified by the direct dependency.
|
||||
"defusedxml>=0.7.1", # via pysaml2
|
||||
"pytz>=2018.3", # via pysaml2
|
||||
]
|
||||
oidc = ["authlib>=0.15.1"]
|
||||
# systemd-python is necessary for logging to the systemd journal via
|
||||
# `systemd.journal.JournalHandler`, as is documented in
|
||||
@@ -112,15 +141,25 @@ oidc = ["authlib>=0.15.1"]
|
||||
systemd = ["systemd-python>=231"]
|
||||
url-preview = ["lxml>=4.6.3"]
|
||||
sentry = ["sentry-sdk>=0.7.2"]
|
||||
opentracing = ["jaeger-client>=4.2.0", "opentracing>=2.2.0"]
|
||||
opentracing = [
|
||||
"jaeger-client>=4.2.0",
|
||||
"opentracing>=2.2.0",
|
||||
|
||||
# Transitive dependencies from jaeger-client
|
||||
# These dependencies aren't directly required by Synapse.
|
||||
# However, in order for Synapse to build, Synapse requires a higher minimum version
|
||||
# for these dependencies than the minimum specified by the direct dependency.
|
||||
"thrift>=0.10", # via jaeger-client
|
||||
"tornado>=6.0", # via jaeger-client
|
||||
]
|
||||
jwt = ["authlib"]
|
||||
# hiredis is not a *strict* dependency, but it makes things much faster.
|
||||
# (if it is not installed, we fall back to slow code.)
|
||||
redis = ["txredisapi>=1.4.7", "hiredis"]
|
||||
redis = ["txredisapi>=1.4.7", "hiredis>=0.3"]
|
||||
# Required to use experimental `caches.track_memory_usage` config option.
|
||||
cache-memory = ["pympler"]
|
||||
cache-memory = ["pympler>=1.0"]
|
||||
# If this is updated, don't forget to update the equivalent lines in
|
||||
# tool.poetry.group.dev.dependencies.
|
||||
# `dependency-groups.dev` below.
|
||||
test = ["parameterized>=0.9.0", "idna>=3.3"]
|
||||
|
||||
# The duplication here is awful.
|
||||
@@ -149,12 +188,22 @@ all = [
|
||||
# opentracing
|
||||
"jaeger-client>=4.2.0", "opentracing>=2.2.0",
|
||||
# redis
|
||||
"txredisapi>=1.4.7", "hiredis",
|
||||
"txredisapi>=1.4.7", "hiredis>=0.3",
|
||||
# cache-memory
|
||||
"pympler",
|
||||
# 1.0 added support for python 3.10, our current minimum supported python version
|
||||
"pympler>=1.0",
|
||||
# omitted:
|
||||
# - test: it's useful to have this separate from dev deps in the olddeps job
|
||||
# - systemd: this is a system-based requirement
|
||||
|
||||
# Transitive dependencies
|
||||
# These dependencies aren't directly required by Synapse.
|
||||
# However, in order for Synapse to build, Synapse requires a higher minimum version
|
||||
# for these dependencies than the minimum specified by the direct dependency.
|
||||
"defusedxml>=0.7.1", # via pysaml2
|
||||
"pytz>=2018.3", # via pysaml2
|
||||
"thrift>=0.10", # via jaeger-client
|
||||
"tornado>=6.0", # via jaeger-client
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
@@ -177,6 +226,85 @@ synapse_port_db = "synapse._scripts.synapse_port_db:main"
|
||||
synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main"
|
||||
update_synapse_database = "synapse._scripts.update_synapse_database:main"
|
||||
|
||||
[tool.poetry]
|
||||
packages = [{ include = "synapse" }]
|
||||
|
||||
[tool.poetry.build]
|
||||
# Compile our rust module when using `poetry install`. This is still required
|
||||
# while using `poetry` as the build frontend. Saves the developer from needing
|
||||
# to run both:
|
||||
#
|
||||
# $ poetry install
|
||||
# $ maturin develop
|
||||
script = "build_rust.py"
|
||||
# Create a `setup.py` file which will call the `build` method in our build
|
||||
# script.
|
||||
#
|
||||
# Our build script currently uses the "old" build method, where we define a
|
||||
# `build` method and `setup.py` calls it. Poetry developers have mentioned that
|
||||
# this will eventually be removed:
|
||||
# https://github.com/matrix-org/synapse/pull/14949#issuecomment-1418001859
|
||||
#
|
||||
# The new build method is defined here:
|
||||
# https://python-poetry.org/docs/building-extension-modules/#maturin-build-script
|
||||
# but is still marked as "unstable" at the time of writing. This would also
|
||||
# bump our minimum `poetry-core` version to 1.5.0.
|
||||
#
|
||||
# We can just drop this work-around entirely if migrating away from
|
||||
# Poetry, thus there's little motivation to update the build script.
|
||||
generate-setup-file = true
|
||||
|
||||
# Dependencies used for developing Synapse itself.
|
||||
#
|
||||
# Hold off on migrating these to `dev-dependencies` (PEP 735) for now until
|
||||
# Poetry 2.2.0+, pip 25.1+ are more widely available.
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
# We pin development dependencies in poetry.lock so that our tests don't start
|
||||
# failing on new releases. Keeping lower bounds loose here means that dependabot
|
||||
# can bump versions without having to update the content-hash in the lockfile.
|
||||
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
||||
ruff = "0.14.6"
|
||||
|
||||
# Typechecking
|
||||
lxml-stubs = ">=0.4.0"
|
||||
mypy = "*"
|
||||
mypy-zope = "*"
|
||||
types-bleach = ">=4.1.0"
|
||||
types-jsonschema = ">=3.2.0"
|
||||
types-netaddr = ">=0.8.0.6"
|
||||
types-opentracing = ">=2.4.2"
|
||||
types-Pillow = ">=8.3.4"
|
||||
types-psycopg2 = ">=2.9.9"
|
||||
types-pyOpenSSL = ">=20.0.7"
|
||||
types-PyYAML = ">=5.4.10"
|
||||
types-requests = ">=2.26.0"
|
||||
types-setuptools = ">=57.4.0"
|
||||
|
||||
# Dependencies which are exclusively required by unit test code. This is
|
||||
# NOT a list of all modules that are necessary to run the unit tests.
|
||||
# Tests assume that all optional dependencies are installed.
|
||||
#
|
||||
# If this is updated, don't forget to update the equivalent lines in
|
||||
# project.optional-dependencies.test.
|
||||
parameterized = ">=0.9.0"
|
||||
idna = ">=3.3"
|
||||
|
||||
# The following are used by the release script
|
||||
click = ">=8.1.3"
|
||||
# GitPython was == 3.1.14; bumped to 3.1.20, the first release with type hints.
|
||||
GitPython = ">=3.1.20"
|
||||
markdown-it-py = ">=3.0.0"
|
||||
pygithub = ">=1.59"
|
||||
# The following are executed as commands by the release script.
|
||||
twine = "*"
|
||||
# Towncrier min version comes from https://github.com/matrix-org/synapse/pull/3425. Rationale unclear.
|
||||
towncrier = ">=18.6.0rc1"
|
||||
|
||||
# Used for checking the Poetry lockfile
|
||||
tomli = ">=1.2.3"
|
||||
|
||||
# Used for checking the schema delta files
|
||||
sqlglot = ">=28.0.0"
|
||||
|
||||
[tool.towncrier]
|
||||
package = "synapse"
|
||||
@@ -260,15 +388,10 @@ select = [
|
||||
"G",
|
||||
# pyupgrade
|
||||
"UP006",
|
||||
"UP007",
|
||||
"UP045",
|
||||
]
|
||||
extend-safe-fixes = [
|
||||
# pyupgrade rules compatible with Python >= 3.9
|
||||
"UP006",
|
||||
"UP007",
|
||||
# pyupgrade rules compatible with Python >= 3.10
|
||||
"UP045",
|
||||
# Allow ruff to automatically fix trailing spaces within a multi-line string/comment.
|
||||
"W293"
|
||||
]
|
||||
@@ -291,88 +414,29 @@ line-ending = "auto"
|
||||
[tool.maturin]
|
||||
manifest-path = "rust/Cargo.toml"
|
||||
module-name = "synapse.synapse_rust"
|
||||
|
||||
[tool.poetry]
|
||||
packages = [
|
||||
{ include = "synapse" },
|
||||
python-source = "."
|
||||
sdist-include = [
|
||||
"AUTHORS.rst",
|
||||
"book.toml",
|
||||
"changelog.d",
|
||||
"CHANGES.md",
|
||||
"CONTRIBUTING.md",
|
||||
"demo",
|
||||
"docs",
|
||||
"INSTALL.md",
|
||||
"mypy.ini",
|
||||
"scripts-dev",
|
||||
"synmark",
|
||||
"sytest-blacklist",
|
||||
"tests",
|
||||
"UPGRADE.rst",
|
||||
"Cargo.toml",
|
||||
"Cargo.lock",
|
||||
"rust/Cargo.toml",
|
||||
"rust/build.rs",
|
||||
"rust/src/**",
|
||||
]
|
||||
include = [
|
||||
{ path = "AUTHORS.rst", format = "sdist" },
|
||||
{ path = "book.toml", format = "sdist" },
|
||||
{ path = "changelog.d", format = "sdist" },
|
||||
{ path = "CHANGES.md", format = "sdist" },
|
||||
{ path = "CONTRIBUTING.md", format = "sdist" },
|
||||
{ path = "demo", format = "sdist" },
|
||||
{ path = "docs", format = "sdist" },
|
||||
{ path = "INSTALL.md", format = "sdist" },
|
||||
{ path = "mypy.ini", format = "sdist" },
|
||||
{ path = "scripts-dev", format = "sdist" },
|
||||
{ path = "synmark", format="sdist" },
|
||||
{ path = "sytest-blacklist", format = "sdist" },
|
||||
{ path = "tests", format = "sdist" },
|
||||
{ path = "UPGRADE.rst", format = "sdist" },
|
||||
{ path = "Cargo.toml", format = "sdist" },
|
||||
{ path = "Cargo.lock", format = "sdist" },
|
||||
{ path = "rust/Cargo.toml", format = "sdist" },
|
||||
{ path = "rust/build.rs", format = "sdist" },
|
||||
{ path = "rust/src/**", format = "sdist" },
|
||||
]
|
||||
exclude = [
|
||||
{ path = "synapse/*.so", format = "sdist"}
|
||||
]
|
||||
|
||||
[tool.poetry.build]
|
||||
script = "build_rust.py"
|
||||
generate-setup-file = true
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
# We pin development dependencies in poetry.lock so that our tests don't start
|
||||
# failing on new releases. Keeping lower bounds loose here means that dependabot
|
||||
# can bump versions without having to update the content-hash in the lockfile.
|
||||
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
||||
ruff = "0.14.5"
|
||||
|
||||
# Typechecking
|
||||
lxml-stubs = ">=0.4.0"
|
||||
mypy = "*"
|
||||
mypy-zope = "*"
|
||||
types-bleach = ">=4.1.0"
|
||||
types-jsonschema = ">=3.2.0"
|
||||
types-netaddr = ">=0.8.0.6"
|
||||
types-opentracing = ">=2.4.2"
|
||||
types-Pillow = ">=8.3.4"
|
||||
types-psycopg2 = ">=2.9.9"
|
||||
types-pyOpenSSL = ">=20.0.7"
|
||||
types-PyYAML = ">=5.4.10"
|
||||
types-requests = ">=2.26.0"
|
||||
types-setuptools = ">=57.4.0"
|
||||
|
||||
# Dependencies which are exclusively required by unit test code. This is
|
||||
# NOT a list of all modules that are necessary to run the unit tests.
|
||||
# Tests assume that all optional dependencies are installed.
|
||||
#
|
||||
# If this is updated, don't forget to update the equivalent lines in
|
||||
# project.optional-dependencies.test.
|
||||
parameterized = ">=0.9.0"
|
||||
idna = ">=3.3"
|
||||
|
||||
# The following are used by the release script
|
||||
click = ">=8.1.3"
|
||||
# GitPython was == 3.1.14; bumped to 3.1.20, the first release with type hints.
|
||||
GitPython = ">=3.1.20"
|
||||
markdown-it-py = ">=3.0.0"
|
||||
pygithub = ">=1.59"
|
||||
# The following are executed as commands by the release script.
|
||||
twine = "*"
|
||||
# Towncrier min version comes from https://github.com/matrix-org/synapse/pull/3425. Rationale unclear.
|
||||
towncrier = ">=18.6.0rc1"
|
||||
|
||||
# Used for checking the Poetry lockfile
|
||||
tomli = ">=1.2.3"
|
||||
|
||||
# Used for checking the schema delta files
|
||||
sqlglot = ">=28.0.0"
|
||||
|
||||
sdist-exclude = ["synapse/*.so"]
|
||||
|
||||
[build-system]
|
||||
# The upper bounds here are defensive, intended to prevent situations like
|
||||
@@ -381,8 +445,8 @@ sqlglot = ">=28.0.0"
|
||||
# runtime errors caused by build system changes.
|
||||
# We are happy to raise these upper bounds upon request,
|
||||
# provided we check that it's safe to do so (i.e. that CI passes).
|
||||
requires = ["poetry-core>=2.0.0,<=2.1.3", "setuptools_rust>=1.3,<=1.11.1"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
requires = ["maturin>=1.0,<2.0"]
|
||||
build-backend = "maturin"
|
||||
|
||||
|
||||
[tool.cibuildwheel]
|
||||
@@ -407,9 +471,6 @@ skip = "cp3??t-* *i686* *macosx*"
|
||||
enable = "pypy"
|
||||
|
||||
# We need a rust compiler.
|
||||
#
|
||||
# We temporarily pin Rust to 1.82.0 to work around
|
||||
# https://github.com/element-hq/synapse/issues/17988
|
||||
before-all = "sh .ci/before_build_wheel.sh"
|
||||
environment= { PATH = "$PATH:$HOME/.cargo/bin" }
|
||||
|
||||
@@ -419,8 +480,3 @@ environment= { PATH = "$PATH:$HOME/.cargo/bin" }
|
||||
before-build = "rm -rf {project}/build"
|
||||
build-frontend = "build"
|
||||
test-command = "python -c 'from synapse.synapse_rust import sum_as_string; print(sum_as_string(1, 2))'"
|
||||
|
||||
|
||||
[tool.cibuildwheel.linux]
|
||||
# Wrap the repair command to correctly rename the built cpython wheels as ABI3.
|
||||
repair-wheel-command = "./.ci/scripts/auditwheel_wrapper.py -w {dest_dir} {wheel}"
|
||||
|
||||
@@ -14,7 +14,6 @@ import sqlglot.expressions
|
||||
|
||||
SCHEMA_FILE_REGEX = re.compile(r"^synapse/storage/schema/(.*)/delta/(.*)/(.*)$")
|
||||
|
||||
|
||||
# The base branch we want to check against. We use the main development branch
|
||||
# on the assumption that is what we are developing against.
|
||||
DEVELOP_BRANCH = "develop"
|
||||
|
||||
@@ -145,7 +145,7 @@ def request(
|
||||
print("Requesting %s" % dest, file=sys.stderr)
|
||||
|
||||
s = requests.Session()
|
||||
s.mount("matrix-federation://", MatrixConnectionAdapter())
|
||||
s.mount("matrix-federation://", MatrixConnectionAdapter(verify_tls=verify_tls))
|
||||
|
||||
headers: dict[str, str] = {
|
||||
"Authorization": authorization_headers[0],
|
||||
@@ -267,6 +267,17 @@ def read_args_from_config(args: argparse.Namespace) -> None:
|
||||
|
||||
|
||||
class MatrixConnectionAdapter(HTTPAdapter):
|
||||
"""
|
||||
A Matrix federation-aware HTTP Adapter.
|
||||
"""
|
||||
|
||||
verify_tls: bool
|
||||
"""whether to verify the remote server's TLS certificate."""
|
||||
|
||||
def __init__(self, verify_tls: bool = True) -> None:
|
||||
self.verify_tls = verify_tls
|
||||
super().__init__()
|
||||
|
||||
def send(
|
||||
self,
|
||||
request: PreparedRequest,
|
||||
@@ -280,7 +291,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
assert isinstance(request.url, str)
|
||||
parsed = urlparse.urlsplit(request.url)
|
||||
server_name = parsed.netloc
|
||||
well_known = self._get_well_known(parsed.netloc)
|
||||
well_known = self._get_well_known(parsed.netloc, verify_tls=self.verify_tls)
|
||||
|
||||
if well_known:
|
||||
server_name = well_known
|
||||
@@ -318,6 +329,21 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
print(
|
||||
f"Connecting to {host}:{port} with SNI {ssl_server_name}", file=sys.stderr
|
||||
)
|
||||
|
||||
if proxies:
|
||||
scheme = parsed.scheme
|
||||
if isinstance(scheme, bytes):
|
||||
scheme = scheme.decode("utf-8")
|
||||
|
||||
proxy_for_scheme = proxies.get(scheme)
|
||||
if proxy_for_scheme:
|
||||
return self.proxy_manager_for(proxy_for_scheme).connection_from_host(
|
||||
host,
|
||||
port=port,
|
||||
scheme="https",
|
||||
pool_kwargs={"server_hostname": ssl_server_name},
|
||||
)
|
||||
|
||||
return self.poolmanager.connection_from_host(
|
||||
host,
|
||||
port=port,
|
||||
@@ -368,7 +394,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
return server_name, 8448, server_name
|
||||
|
||||
@staticmethod
|
||||
def _get_well_known(server_name: str) -> str | None:
|
||||
def _get_well_known(server_name: str, verify_tls: bool = True) -> str | None:
|
||||
if ":" in server_name:
|
||||
# explicit port, or ipv6 literal. Either way, no .well-known
|
||||
return None
|
||||
@@ -379,7 +405,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
print(f"fetching {uri}", file=sys.stderr)
|
||||
|
||||
try:
|
||||
resp = requests.get(uri)
|
||||
resp = requests.get(uri, verify=verify_tls)
|
||||
if resp.status_code != 200:
|
||||
print("%s gave %i" % (uri, resp.status_code), file=sys.stderr)
|
||||
return None
|
||||
|
||||
@@ -32,7 +32,7 @@ import time
|
||||
import urllib.request
|
||||
from os import path
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, Match
|
||||
from typing import Any
|
||||
|
||||
import attr
|
||||
import click
|
||||
@@ -968,10 +968,6 @@ def generate_and_write_changelog(
|
||||
new_changes = new_changes.replace(
|
||||
"No significant changes.", f"No significant changes since {current_version}."
|
||||
)
|
||||
new_changes += build_dependabot_changelog(
|
||||
repo,
|
||||
current_version,
|
||||
)
|
||||
|
||||
# Prepend changes to changelog
|
||||
with open("CHANGES.md", "r+") as f:
|
||||
@@ -986,49 +982,5 @@ def generate_and_write_changelog(
|
||||
os.remove(filename)
|
||||
|
||||
|
||||
def build_dependabot_changelog(repo: Repo, current_version: version.Version) -> str:
|
||||
"""Summarise dependabot commits between `current_version` and `release_branch`.
|
||||
|
||||
Returns an empty string if there have been no such commits; otherwise outputs a
|
||||
third-level markdown header followed by an unordered list."""
|
||||
last_release_commit = repo.tag("v" + str(current_version)).commit
|
||||
rev_spec = f"{last_release_commit.hexsha}.."
|
||||
commits = list(git.objects.Commit.iter_items(repo, rev_spec))
|
||||
messages = []
|
||||
for commit in reversed(commits):
|
||||
if commit.author.name == "dependabot[bot]":
|
||||
message: str | bytes = commit.message
|
||||
if isinstance(message, bytes):
|
||||
message = message.decode("utf-8")
|
||||
messages.append(message.split("\n", maxsplit=1)[0])
|
||||
|
||||
if not messages:
|
||||
print(f"No dependabot commits in range {rev_spec}", file=sys.stderr)
|
||||
return ""
|
||||
|
||||
messages.sort()
|
||||
|
||||
def replacer(match: Match[str]) -> str:
|
||||
desc = match.group(1)
|
||||
number = match.group(2)
|
||||
return f"* {desc}. ([\\#{number}](https://github.com/element-hq/synapse/issues/{number}))"
|
||||
|
||||
for i, message in enumerate(messages):
|
||||
messages[i] = re.sub(r"(.*) \(#(\d+)\)$", replacer, message)
|
||||
messages.insert(0, "### Updates to locked dependencies\n")
|
||||
# Add an extra blank line to the bottom of the section
|
||||
messages.append("")
|
||||
return "\n".join(messages)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("since")
|
||||
def test_dependabot_changelog(since: str) -> None:
|
||||
"""Test building the dependabot changelog.
|
||||
|
||||
Summarises all dependabot commits between the SINCE tag and the current git HEAD."""
|
||||
print(build_dependabot_changelog(git.Repo("."), version.Version(since)))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli()
|
||||
|
||||
@@ -29,6 +29,19 @@ from typing import Final
|
||||
# the max size of a (canonical-json-encoded) event
|
||||
MAX_PDU_SIZE = 65536
|
||||
|
||||
# The maximum allowed size of an HTTP request.
|
||||
# Other than media uploads, the biggest request we expect to see is a fully-loaded
|
||||
# /federation/v1/send request.
|
||||
#
|
||||
# The main thing in such a request is up to 50 PDUs, and up to 100 EDUs. PDUs are
|
||||
# limited to 65536 bytes (possibly slightly more if the sender didn't use canonical
|
||||
# json encoding); there is no specced limit to EDUs (see
|
||||
# https://github.com/matrix-org/matrix-doc/issues/3121).
|
||||
#
|
||||
# in short, we somewhat arbitrarily limit requests to 200 * 64K (about 12.5M)
|
||||
#
|
||||
MAX_REQUEST_SIZE = 200 * MAX_PDU_SIZE
|
||||
|
||||
# Max/min size of ints in canonical JSON
|
||||
CANONICALJSON_MAX_INT = (2**53) - 1
|
||||
CANONICALJSON_MIN_INT = -CANONICALJSON_MAX_INT
|
||||
|
||||
@@ -856,6 +856,12 @@ class HttpResponseException(CodeMessageException):
|
||||
return ProxiedRequestError(self.code, errmsg, errcode, j)
|
||||
|
||||
|
||||
class HomeServerNotSetupException(Exception):
|
||||
"""
|
||||
Raised when an operation is attempted on the HomeServer before setup() has been called.
|
||||
"""
|
||||
|
||||
|
||||
class ShadowBanError(Exception):
|
||||
"""
|
||||
Raised when a shadow-banned user attempts to perform an action.
|
||||
|
||||
@@ -54,7 +54,9 @@ def check_bind_error(
|
||||
"""
|
||||
if address == "0.0.0.0" and "::" in bind_addresses:
|
||||
logger.warning(
|
||||
"Failed to listen on 0.0.0.0, continuing because listening on [::]"
|
||||
"Failed to listen on 0.0.0.0, continuing because listening on [::]. Original exception: %s: %s",
|
||||
type(e).__name__,
|
||||
str(e),
|
||||
)
|
||||
else:
|
||||
raise e
|
||||
|
||||
@@ -36,12 +36,13 @@ from typing import (
|
||||
Awaitable,
|
||||
Callable,
|
||||
NoReturn,
|
||||
Optional,
|
||||
cast,
|
||||
)
|
||||
from wsgiref.simple_server import WSGIServer
|
||||
|
||||
from cryptography.utils import CryptographyDeprecationWarning
|
||||
from typing_extensions import ParamSpec
|
||||
from typing_extensions import ParamSpec, assert_never
|
||||
|
||||
import twisted
|
||||
from twisted.internet import defer, error, reactor as _reactor
|
||||
@@ -59,12 +60,17 @@ from twisted.python.threadpool import ThreadPool
|
||||
from twisted.web.resource import Resource
|
||||
|
||||
import synapse.util.caches
|
||||
from synapse.api.constants import MAX_PDU_SIZE
|
||||
from synapse.api.constants import MAX_REQUEST_SIZE
|
||||
from synapse.app import check_bind_error
|
||||
from synapse.config import ConfigError
|
||||
from synapse.config._base import format_config_error
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.server import ListenerConfig, ManholeConfig, TCPListenerConfig
|
||||
from synapse.config.server import (
|
||||
ListenerConfig,
|
||||
ManholeConfig,
|
||||
TCPListenerConfig,
|
||||
UnixListenerConfig,
|
||||
)
|
||||
from synapse.crypto import context_factory
|
||||
from synapse.events.auto_accept_invites import InviteAutoAccepter
|
||||
from synapse.events.presence_router import load_legacy_presence_router
|
||||
@@ -413,13 +419,44 @@ def listen_unix(
|
||||
]
|
||||
|
||||
|
||||
class ListenerException(RuntimeError):
|
||||
"""
|
||||
An exception raised when we fail to listen with the given `ListenerConfig`.
|
||||
|
||||
Attributes:
|
||||
listener_config: The listener config that caused the exception.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
listener_config: ListenerConfig,
|
||||
):
|
||||
listener_human_name = ""
|
||||
port = ""
|
||||
if isinstance(listener_config, TCPListenerConfig):
|
||||
listener_human_name = "TCP port"
|
||||
port = str(listener_config.port)
|
||||
elif isinstance(listener_config, UnixListenerConfig):
|
||||
listener_human_name = "unix socket"
|
||||
port = listener_config.path
|
||||
else:
|
||||
assert_never(listener_config)
|
||||
|
||||
super().__init__(
|
||||
"Failed to listen on %s (%s) with the given listener config: %s"
|
||||
% (listener_human_name, port, listener_config)
|
||||
)
|
||||
|
||||
self.listener_config = listener_config
|
||||
|
||||
|
||||
def listen_http(
|
||||
hs: "HomeServer",
|
||||
listener_config: ListenerConfig,
|
||||
root_resource: Resource,
|
||||
version_string: str,
|
||||
max_request_body_size: int,
|
||||
context_factory: IOpenSSLContextFactory | None,
|
||||
context_factory: Optional[IOpenSSLContextFactory],
|
||||
reactor: ISynapseReactor = reactor,
|
||||
) -> list[Port]:
|
||||
"""
|
||||
@@ -447,39 +484,55 @@ def listen_http(
|
||||
hs=hs,
|
||||
)
|
||||
|
||||
if isinstance(listener_config, TCPListenerConfig):
|
||||
if listener_config.is_tls():
|
||||
# refresh_certificate should have been called before this.
|
||||
assert context_factory is not None
|
||||
ports = listen_ssl(
|
||||
listener_config.bind_addresses,
|
||||
listener_config.port,
|
||||
site,
|
||||
context_factory,
|
||||
reactor=reactor,
|
||||
try:
|
||||
if isinstance(listener_config, TCPListenerConfig):
|
||||
if listener_config.is_tls():
|
||||
# refresh_certificate should have been called before this.
|
||||
assert context_factory is not None
|
||||
ports = listen_ssl(
|
||||
listener_config.bind_addresses,
|
||||
listener_config.port,
|
||||
site,
|
||||
context_factory,
|
||||
reactor=reactor,
|
||||
)
|
||||
logger.info(
|
||||
"Synapse now listening on TCP port %d (TLS)", listener_config.port
|
||||
)
|
||||
else:
|
||||
ports = listen_tcp(
|
||||
listener_config.bind_addresses,
|
||||
listener_config.port,
|
||||
site,
|
||||
reactor=reactor,
|
||||
)
|
||||
logger.info(
|
||||
"Synapse now listening on TCP port %d", listener_config.port
|
||||
)
|
||||
|
||||
elif isinstance(listener_config, UnixListenerConfig):
|
||||
ports = listen_unix(
|
||||
listener_config.path, listener_config.mode, site, reactor=reactor
|
||||
)
|
||||
# getHost() returns a UNIXAddress which contains an instance variable of 'name'
|
||||
# encoded as a byte string. Decode as utf-8 so pretty.
|
||||
logger.info(
|
||||
"Synapse now listening on TCP port %d (TLS)", listener_config.port
|
||||
"Synapse now listening on Unix Socket at: %s",
|
||||
ports[0].getHost().name.decode("utf-8"),
|
||||
)
|
||||
else:
|
||||
ports = listen_tcp(
|
||||
listener_config.bind_addresses,
|
||||
listener_config.port,
|
||||
site,
|
||||
reactor=reactor,
|
||||
)
|
||||
logger.info("Synapse now listening on TCP port %d", listener_config.port)
|
||||
|
||||
else:
|
||||
ports = listen_unix(
|
||||
listener_config.path, listener_config.mode, site, reactor=reactor
|
||||
)
|
||||
# getHost() returns a UNIXAddress which contains an instance variable of 'name'
|
||||
# encoded as a byte string. Decode as utf-8 so pretty.
|
||||
logger.info(
|
||||
"Synapse now listening on Unix Socket at: %s",
|
||||
ports[0].getHost().name.decode("utf-8"),
|
||||
)
|
||||
assert_never(listener_config)
|
||||
except Exception as exc:
|
||||
# The Twisted interface says that "Users should not call this function
|
||||
# themselves!" but this appears to be the correct/only way handle proper cleanup
|
||||
# of the site when things go wrong. In the normal case, a `Port` is created
|
||||
# which we can call `Port.stopListening()` on to do the same thing (but no
|
||||
# `Port` is created when an error occurs).
|
||||
#
|
||||
# We use `site.stopFactory()` instead of `site.doStop()` as the latter assumes
|
||||
# that `site.doStart()` was called (which won't be the case if an error occurs).
|
||||
site.stopFactory()
|
||||
raise ListenerException(listener_config) from exc
|
||||
|
||||
return ports
|
||||
|
||||
@@ -843,17 +896,8 @@ def sdnotify(state: bytes) -> None:
|
||||
def max_request_body_size(config: HomeServerConfig) -> int:
|
||||
"""Get a suitable maximum size for incoming HTTP requests"""
|
||||
|
||||
# Other than media uploads, the biggest request we expect to see is a fully-loaded
|
||||
# /federation/v1/send request.
|
||||
#
|
||||
# The main thing in such a request is up to 50 PDUs, and up to 100 EDUs. PDUs are
|
||||
# limited to 65536 bytes (possibly slightly more if the sender didn't use canonical
|
||||
# json encoding); there is no specced limit to EDUs (see
|
||||
# https://github.com/matrix-org/matrix-doc/issues/3121).
|
||||
#
|
||||
# in short, we somewhat arbitrarily limit requests to 200 * 64K (about 12.5M)
|
||||
#
|
||||
max_request_size = 200 * MAX_PDU_SIZE
|
||||
# Baseline default for any request that isn't configured in the homeserver config
|
||||
max_request_size = MAX_REQUEST_SIZE
|
||||
|
||||
# if we have a media repo enabled, we may need to allow larger uploads than that
|
||||
if config.media.can_load_media_repo:
|
||||
|
||||
@@ -24,7 +24,7 @@ import logging
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Mapping, Sequence
|
||||
from typing import Mapping, Optional, Sequence
|
||||
|
||||
from twisted.internet import defer, task
|
||||
|
||||
@@ -291,7 +291,7 @@ def load_config(argv_options: list[str]) -> tuple[HomeServerConfig, argparse.Nam
|
||||
|
||||
def create_homeserver(
|
||||
config: HomeServerConfig,
|
||||
reactor: ISynapseReactor | None = None,
|
||||
reactor: Optional[ISynapseReactor] = None,
|
||||
) -> AdminCmdServer:
|
||||
"""
|
||||
Create a homeserver instance for the Synapse admin command process.
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
#
|
||||
import logging
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
from twisted.web.resource import Resource
|
||||
|
||||
@@ -335,7 +336,7 @@ def load_config(argv_options: list[str]) -> HomeServerConfig:
|
||||
|
||||
def create_homeserver(
|
||||
config: HomeServerConfig,
|
||||
reactor: ISynapseReactor | None = None,
|
||||
reactor: Optional[ISynapseReactor] = None,
|
||||
) -> GenericWorkerServer:
|
||||
"""
|
||||
Create a homeserver instance for the Synapse worker process.
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from typing import Iterable
|
||||
from typing import Iterable, Optional
|
||||
|
||||
from twisted.internet.tcp import Port
|
||||
from twisted.web.resource import EncodingResourceWrapper, Resource
|
||||
@@ -350,7 +350,7 @@ def load_or_generate_config(argv_options: list[str]) -> HomeServerConfig:
|
||||
|
||||
def create_homeserver(
|
||||
config: HomeServerConfig,
|
||||
reactor: ISynapseReactor | None = None,
|
||||
reactor: Optional[ISynapseReactor] = None,
|
||||
) -> SynapseHomeServer:
|
||||
"""
|
||||
Create a homeserver instance for the Synapse main process.
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
|
||||
import abc
|
||||
import logging
|
||||
from contextlib import ExitStack
|
||||
from typing import TYPE_CHECKING, Callable, Iterable
|
||||
|
||||
import attr
|
||||
@@ -150,57 +151,81 @@ class Keyring:
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, hs: "HomeServer", key_fetchers: "Iterable[KeyFetcher] | None" = None
|
||||
self,
|
||||
hs: "HomeServer",
|
||||
test_only_key_fetchers: "list[KeyFetcher] | None" = None,
|
||||
):
|
||||
self.server_name = hs.hostname
|
||||
"""
|
||||
Args:
|
||||
hs: The HomeServer instance
|
||||
test_only_key_fetchers: Dependency injection for tests only. If provided,
|
||||
these key fetchers will be used instead of the default ones.
|
||||
"""
|
||||
# Clean-up to avoid partial initialization leaving behind references.
|
||||
with ExitStack() as exit:
|
||||
self.server_name = hs.hostname
|
||||
|
||||
if key_fetchers is None:
|
||||
# Always fetch keys from the database.
|
||||
mutable_key_fetchers: list[KeyFetcher] = [StoreKeyFetcher(hs)]
|
||||
# Fetch keys from configured trusted key servers, if any exist.
|
||||
key_servers = hs.config.key.key_servers
|
||||
if key_servers:
|
||||
mutable_key_fetchers.append(PerspectivesKeyFetcher(hs))
|
||||
# Finally, fetch keys from the origin server directly.
|
||||
mutable_key_fetchers.append(ServerKeyFetcher(hs))
|
||||
self._key_fetchers: list[KeyFetcher] = []
|
||||
if test_only_key_fetchers is None:
|
||||
# Always fetch keys from the database.
|
||||
store_key_fetcher = StoreKeyFetcher(hs)
|
||||
exit.callback(store_key_fetcher.shutdown)
|
||||
self._key_fetchers.append(store_key_fetcher)
|
||||
|
||||
self._key_fetchers: Iterable[KeyFetcher] = tuple(mutable_key_fetchers)
|
||||
else:
|
||||
self._key_fetchers = key_fetchers
|
||||
# Fetch keys from configured trusted key servers, if any exist.
|
||||
key_servers = hs.config.key.key_servers
|
||||
if key_servers:
|
||||
perspectives_key_fetcher = PerspectivesKeyFetcher(hs)
|
||||
exit.callback(perspectives_key_fetcher.shutdown)
|
||||
self._key_fetchers.append(perspectives_key_fetcher)
|
||||
|
||||
self._fetch_keys_queue: BatchingQueue[
|
||||
_FetchKeyRequest, dict[str, dict[str, FetchKeyResult]]
|
||||
] = BatchingQueue(
|
||||
name="keyring_server",
|
||||
hs=hs,
|
||||
clock=hs.get_clock(),
|
||||
# The method called to fetch each key
|
||||
process_batch_callback=self._inner_fetch_key_requests,
|
||||
)
|
||||
# Finally, fetch keys from the origin server directly.
|
||||
server_key_fetcher = ServerKeyFetcher(hs)
|
||||
exit.callback(server_key_fetcher.shutdown)
|
||||
self._key_fetchers.append(server_key_fetcher)
|
||||
else:
|
||||
self._key_fetchers = test_only_key_fetchers
|
||||
|
||||
self._is_mine_server_name = hs.is_mine_server_name
|
||||
self._fetch_keys_queue: BatchingQueue[
|
||||
_FetchKeyRequest, dict[str, dict[str, FetchKeyResult]]
|
||||
] = BatchingQueue(
|
||||
name="keyring_server",
|
||||
hs=hs,
|
||||
clock=hs.get_clock(),
|
||||
# The method called to fetch each key
|
||||
process_batch_callback=self._inner_fetch_key_requests,
|
||||
)
|
||||
exit.callback(self._fetch_keys_queue.shutdown)
|
||||
|
||||
# build a FetchKeyResult for each of our own keys, to shortcircuit the
|
||||
# fetcher.
|
||||
self._local_verify_keys: dict[str, FetchKeyResult] = {}
|
||||
for key_id, key in hs.config.key.old_signing_keys.items():
|
||||
self._local_verify_keys[key_id] = FetchKeyResult(
|
||||
verify_key=key, valid_until_ts=key.expired
|
||||
self._is_mine_server_name = hs.is_mine_server_name
|
||||
|
||||
# build a FetchKeyResult for each of our own keys, to shortcircuit the
|
||||
# fetcher.
|
||||
self._local_verify_keys: dict[str, FetchKeyResult] = {}
|
||||
for key_id, key in hs.config.key.old_signing_keys.items():
|
||||
self._local_verify_keys[key_id] = FetchKeyResult(
|
||||
verify_key=key, valid_until_ts=key.expired
|
||||
)
|
||||
|
||||
vk = get_verify_key(hs.signing_key)
|
||||
self._local_verify_keys[f"{vk.alg}:{vk.version}"] = FetchKeyResult(
|
||||
verify_key=vk,
|
||||
valid_until_ts=2**63, # fake future timestamp
|
||||
)
|
||||
|
||||
vk = get_verify_key(hs.signing_key)
|
||||
self._local_verify_keys[f"{vk.alg}:{vk.version}"] = FetchKeyResult(
|
||||
verify_key=vk,
|
||||
valid_until_ts=2**63, # fake future timestamp
|
||||
)
|
||||
# We reached the end of the block which means everything was successful, so
|
||||
# no exit handlers are needed (remove them all).
|
||||
exit.pop_all()
|
||||
|
||||
def shutdown(self) -> None:
|
||||
"""
|
||||
Prepares the KeyRing for garbage collection by shutting down it's queues.
|
||||
"""
|
||||
self._fetch_keys_queue.shutdown()
|
||||
|
||||
for key_fetcher in self._key_fetchers:
|
||||
key_fetcher.shutdown()
|
||||
self._key_fetchers.clear()
|
||||
|
||||
async def verify_json_for_server(
|
||||
self,
|
||||
@@ -521,9 +546,21 @@ class StoreKeyFetcher(KeyFetcher):
|
||||
"""KeyFetcher impl which fetches keys from our data store"""
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__(hs)
|
||||
# Clean-up to avoid partial initialization leaving behind references.
|
||||
with ExitStack() as exit:
|
||||
super().__init__(hs)
|
||||
# `KeyFetcher` keeps a reference to `hs` which we need to clean up if
|
||||
# something goes wrong so we can cleanly shutdown the homeserver.
|
||||
exit.callback(super().shutdown)
|
||||
|
||||
self.store = hs.get_datastores().main
|
||||
# An error can be raised here if someone tried to create a `StoreKeyFetcher`
|
||||
# before the homeserver is fully set up (`HomeServerNotSetupException:
|
||||
# HomeServer.setup must be called before getting datastores`).
|
||||
self.store = hs.get_datastores().main
|
||||
|
||||
# We reached the end of the block which means everything was successful, so
|
||||
# no exit handlers are needed (remove them all).
|
||||
exit.pop_all()
|
||||
|
||||
async def _fetch_keys(
|
||||
self, keys_to_fetch: list[_FetchKeyRequest]
|
||||
@@ -543,9 +580,21 @@ class StoreKeyFetcher(KeyFetcher):
|
||||
|
||||
class BaseV2KeyFetcher(KeyFetcher):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__(hs)
|
||||
# Clean-up to avoid partial initialization leaving behind references.
|
||||
with ExitStack() as exit:
|
||||
super().__init__(hs)
|
||||
# `KeyFetcher` keeps a reference to `hs` which we need to clean up if
|
||||
# something goes wrong so we can cleanly shutdown the homeserver.
|
||||
exit.callback(super().shutdown)
|
||||
|
||||
self.store = hs.get_datastores().main
|
||||
# An error can be raised here if someone tried to create a `StoreKeyFetcher`
|
||||
# before the homeserver is fully set up (`HomeServerNotSetupException:
|
||||
# HomeServer.setup must be called before getting datastores`).
|
||||
self.store = hs.get_datastores().main
|
||||
|
||||
# We reached the end of the block which means everything was successful, so
|
||||
# no exit handlers are needed (remove them all).
|
||||
exit.pop_all()
|
||||
|
||||
async def process_v2_response(
|
||||
self, from_server: str, response_json: JsonDict, time_added_ms: int
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
#
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from twisted.internet.interfaces import IDelayedCall
|
||||
|
||||
@@ -74,7 +74,7 @@ class DelayedEventsHandler:
|
||||
cfg=self._config.ratelimiting.rc_delayed_event_mgmt,
|
||||
)
|
||||
|
||||
self._next_delayed_event_call: IDelayedCall | None = None
|
||||
self._next_delayed_event_call: Optional[IDelayedCall] = None
|
||||
|
||||
# The current position in the current_state_delta stream
|
||||
self._event_pos: int | None = None
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
import logging
|
||||
import random
|
||||
from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING, Any, Mapping, Sequence
|
||||
from typing import TYPE_CHECKING, Any, Mapping, Optional, Sequence
|
||||
|
||||
from canonicaljson import encode_canonical_json
|
||||
|
||||
@@ -111,7 +111,7 @@ class MessageHandler:
|
||||
|
||||
# The scheduled call to self._expire_event. None if no call is currently
|
||||
# scheduled.
|
||||
self._scheduled_expiry: IDelayedCall | None = None
|
||||
self._scheduled_expiry: Optional[IDelayedCall] = None
|
||||
|
||||
if not hs.config.worker.worker_app:
|
||||
self.hs.run_as_background_process(
|
||||
|
||||
@@ -874,7 +874,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
|
||||
if target_id == self._server_notices_mxid:
|
||||
raise SynapseError(HTTPStatus.FORBIDDEN, "Cannot invite this user")
|
||||
|
||||
block_invite_result = None
|
||||
block_invite_result: tuple[Codes, dict] | None = None
|
||||
|
||||
if (
|
||||
self._server_notices_mxid is not None
|
||||
|
||||
@@ -17,6 +17,7 @@ import logging
|
||||
from itertools import chain
|
||||
from typing import TYPE_CHECKING, AbstractSet, Mapping
|
||||
|
||||
import attr
|
||||
from prometheus_client import Histogram
|
||||
from typing_extensions import assert_never
|
||||
|
||||
@@ -62,6 +63,7 @@ from synapse.types.handlers.sliding_sync import (
|
||||
HaveSentRoomFlag,
|
||||
MutablePerConnectionState,
|
||||
PerConnectionState,
|
||||
RoomLazyMembershipChanges,
|
||||
RoomSyncConfig,
|
||||
SlidingSyncConfig,
|
||||
SlidingSyncResult,
|
||||
@@ -106,7 +108,7 @@ class SlidingSyncHandler:
|
||||
self.rooms_to_exclude_globally = hs.config.server.rooms_to_exclude_from_sync
|
||||
self.is_mine_id = hs.is_mine_id
|
||||
|
||||
self.connection_store = SlidingSyncConnectionStore(self.store)
|
||||
self.connection_store = SlidingSyncConnectionStore(self.clock, self.store)
|
||||
self.extensions = SlidingSyncExtensionHandler(hs)
|
||||
self.room_lists = SlidingSyncRoomLists(hs)
|
||||
|
||||
@@ -981,14 +983,15 @@ class SlidingSyncHandler:
|
||||
#
|
||||
# Calculate the `StateFilter` based on the `required_state` for the room
|
||||
required_state_filter = StateFilter.none()
|
||||
# The requested `required_state_map` with the lazy membership expanded and
|
||||
# `$ME` replaced with the user's ID. This allows us to see what membership we've
|
||||
# sent down to the client in the next request.
|
||||
#
|
||||
# Make a copy so we can modify it. Still need to be careful to make a copy of
|
||||
# the state key sets if we want to add/remove from them. We could make a deep
|
||||
# copy but this saves us some work.
|
||||
expanded_required_state_map = dict(room_sync_config.required_state_map)
|
||||
|
||||
# Keep track of which users' state we may need to fetch. We split this
|
||||
# into explicit users and lazy loaded users.
|
||||
explicit_user_state = set()
|
||||
lazy_load_user_ids = set()
|
||||
|
||||
# Whether lazy-loading of room members is enabled.
|
||||
lazy_load_room_members = False
|
||||
|
||||
if room_membership_for_user_at_to_token.membership not in (
|
||||
Membership.INVITE,
|
||||
Membership.KNOCK,
|
||||
@@ -1036,7 +1039,6 @@ class SlidingSyncHandler:
|
||||
else:
|
||||
required_state_types: list[tuple[str, str | None]] = []
|
||||
num_wild_state_keys = 0
|
||||
lazy_load_room_members = False
|
||||
num_others = 0
|
||||
for (
|
||||
state_type,
|
||||
@@ -1068,43 +1070,60 @@ class SlidingSyncHandler:
|
||||
timeline_event.state_key
|
||||
)
|
||||
|
||||
# The client needs to know the membership of everyone in
|
||||
# the timeline we're returning.
|
||||
lazy_load_user_ids.update(timeline_membership)
|
||||
|
||||
# Update the required state filter so we pick up the new
|
||||
# membership
|
||||
for user_id in timeline_membership:
|
||||
required_state_types.append(
|
||||
(EventTypes.Member, user_id)
|
||||
)
|
||||
if limited or initial:
|
||||
# If the timeline is limited, we only need to
|
||||
# return the membership changes for people in
|
||||
# the timeline.
|
||||
for user_id in timeline_membership:
|
||||
required_state_types.append(
|
||||
(EventTypes.Member, user_id)
|
||||
)
|
||||
else:
|
||||
# For non-limited timelines we always return all
|
||||
# membership changes. This is so that clients
|
||||
# who have fetched the full membership list
|
||||
# already can continue to maintain it for
|
||||
# non-limited syncs.
|
||||
#
|
||||
# This assumes that for non-limited syncs there
|
||||
# won't be many membership changes that wouldn't
|
||||
# have been included already (this can only
|
||||
# happen if membership state was rolled back due
|
||||
# to state resolution anyway).
|
||||
#
|
||||
# `None` is a wildcard in the `StateFilter`
|
||||
required_state_types.append((EventTypes.Member, None))
|
||||
|
||||
# Add an explicit entry for each user in the timeline
|
||||
#
|
||||
# Make a new set or copy of the state key set so we can
|
||||
# modify it without affecting the original
|
||||
# `required_state_map`
|
||||
expanded_required_state_map[EventTypes.Member] = (
|
||||
expanded_required_state_map.get(
|
||||
EventTypes.Member, set()
|
||||
# Record the extra members we're returning.
|
||||
lazy_load_user_ids.update(
|
||||
state_key
|
||||
for event_type, state_key in room_state_delta_id_map
|
||||
if event_type == EventTypes.Member
|
||||
)
|
||||
| timeline_membership
|
||||
)
|
||||
elif state_key == StateValues.ME:
|
||||
else:
|
||||
num_others += 1
|
||||
required_state_types.append((state_type, user.to_string()))
|
||||
|
||||
# Replace `$ME` with the user's ID so we can deduplicate
|
||||
# when someone requests the same state with `$ME` or with
|
||||
# their user ID.
|
||||
#
|
||||
# Make a new set or copy of the state key set so we can
|
||||
# modify it without affecting the original
|
||||
# `required_state_map`
|
||||
expanded_required_state_map[EventTypes.Member] = (
|
||||
expanded_required_state_map.get(
|
||||
EventTypes.Member, set()
|
||||
)
|
||||
| {user.to_string()}
|
||||
normalized_state_key = state_key
|
||||
if state_key == StateValues.ME:
|
||||
normalized_state_key = user.to_string()
|
||||
|
||||
if state_type == EventTypes.Member:
|
||||
# Also track explicitly requested member state for
|
||||
# lazy membership tracking.
|
||||
explicit_user_state.add(normalized_state_key)
|
||||
|
||||
required_state_types.append(
|
||||
(state_type, normalized_state_key)
|
||||
)
|
||||
else:
|
||||
num_others += 1
|
||||
required_state_types.append((state_type, state_key))
|
||||
|
||||
set_tag(
|
||||
SynapseTags.FUNC_ARG_PREFIX
|
||||
@@ -1122,6 +1141,10 @@ class SlidingSyncHandler:
|
||||
|
||||
required_state_filter = StateFilter.from_types(required_state_types)
|
||||
|
||||
# Remove any explicitly requested user state from the lazy-loaded set,
|
||||
# as we track them separately.
|
||||
lazy_load_user_ids -= explicit_user_state
|
||||
|
||||
# We need this base set of info for the response so let's just fetch it along
|
||||
# with the `required_state` for the room
|
||||
hero_room_state = [
|
||||
@@ -1149,6 +1172,22 @@ class SlidingSyncHandler:
|
||||
# We can return all of the state that was requested if this was the first
|
||||
# time we've sent the room down this connection.
|
||||
room_state: StateMap[EventBase] = {}
|
||||
|
||||
# Includes the state for the heroes if we need them (may contain other
|
||||
# state as well).
|
||||
hero_membership_state: StateMap[EventBase] = {}
|
||||
|
||||
# By default, we mark all `lazy_load_user_ids` as being sent down
|
||||
# for the first time in this sync. We later check if we sent any of them
|
||||
# down previously and update `returned_user_id_to_last_seen_ts_map` if
|
||||
# we have.
|
||||
returned_user_id_to_last_seen_ts_map = {}
|
||||
if lazy_load_room_members:
|
||||
returned_user_id_to_last_seen_ts_map = dict.fromkeys(lazy_load_user_ids)
|
||||
new_connection_state.room_lazy_membership[room_id] = RoomLazyMembershipChanges(
|
||||
returned_user_id_to_last_seen_ts_map=returned_user_id_to_last_seen_ts_map
|
||||
)
|
||||
|
||||
if initial:
|
||||
room_state = await self.get_current_state_at(
|
||||
room_id=room_id,
|
||||
@@ -1156,28 +1195,97 @@ class SlidingSyncHandler:
|
||||
state_filter=state_filter,
|
||||
to_token=to_token,
|
||||
)
|
||||
|
||||
# The `room_state` includes the hero membership state if needed.
|
||||
# We'll later filter this down so we don't need to do so here.
|
||||
hero_membership_state = room_state
|
||||
else:
|
||||
assert from_token is not None
|
||||
assert from_bound is not None
|
||||
|
||||
if prev_room_sync_config is not None:
|
||||
# Define `all_required_user_state` as all user state we want, which
|
||||
# is the explicitly requested members, any needed for lazy
|
||||
# loading, and users whose membership has changed.
|
||||
all_required_user_state = explicit_user_state | lazy_load_user_ids
|
||||
for state_type, state_key in room_state_delta_id_map:
|
||||
if state_type == EventTypes.Member:
|
||||
all_required_user_state.add(state_key)
|
||||
|
||||
# We need to know what user state we previously sent down the
|
||||
# connection so we can determine what has changed.
|
||||
#
|
||||
# We need to fetch all users whose memberships we may want
|
||||
# to send down this sync. This includes (and matches
|
||||
# `all_required_user_state`):
|
||||
# 1. Explicitly requested user state
|
||||
# 2. Lazy loaded members, i.e. users who appear in the
|
||||
# timeline.
|
||||
# 3. The users whose membership has changed in the room, i.e.
|
||||
# in the state deltas.
|
||||
#
|
||||
# This is to correctly handle the cases where a user was
|
||||
# previously sent down as a lazy loaded member:
|
||||
# - and is now explicitly requested (so shouldn't be sent down
|
||||
# again); or
|
||||
# - their membership has changed (so we need to invalidate
|
||||
# their entry in the lazy loaded table if we don't send the
|
||||
# change down).
|
||||
if all_required_user_state:
|
||||
previously_returned_user_to_last_seen = (
|
||||
await self.store.get_sliding_sync_connection_lazy_members(
|
||||
connection_position=from_token.connection_position,
|
||||
room_id=room_id,
|
||||
user_ids=all_required_user_state,
|
||||
)
|
||||
)
|
||||
|
||||
# Update the room lazy membership changes to track which
|
||||
# lazy loaded members were needed for this sync. This is so
|
||||
# that we can correctly track the last time we sent down
|
||||
# users' membership (and so can evict old membership state
|
||||
# from the DB tables).
|
||||
returned_user_id_to_last_seen_ts_map.update(
|
||||
(user_id, timestamp)
|
||||
for user_id, timestamp in previously_returned_user_to_last_seen.items()
|
||||
if user_id in lazy_load_user_ids
|
||||
)
|
||||
else:
|
||||
previously_returned_user_to_last_seen = {}
|
||||
|
||||
# Check if there are any changes to the required state config
|
||||
# that we need to handle.
|
||||
changed_required_state_map, added_state_filter = (
|
||||
_required_state_changes(
|
||||
user.to_string(),
|
||||
prev_required_state_map=prev_room_sync_config.required_state_map,
|
||||
request_required_state_map=expanded_required_state_map,
|
||||
state_deltas=room_state_delta_id_map,
|
||||
)
|
||||
changes_return = _required_state_changes(
|
||||
user.to_string(),
|
||||
prev_required_state_map=prev_room_sync_config.required_state_map,
|
||||
request_required_state_map=room_sync_config.required_state_map,
|
||||
previously_returned_lazy_user_ids=previously_returned_user_to_last_seen.keys(),
|
||||
request_lazy_load_user_ids=lazy_load_user_ids,
|
||||
state_deltas=room_state_delta_id_map,
|
||||
)
|
||||
changed_required_state_map = changes_return.changed_required_state_map
|
||||
|
||||
if added_state_filter:
|
||||
new_connection_state.room_lazy_membership[
|
||||
room_id
|
||||
].invalidated_user_ids = changes_return.lazy_members_invalidated
|
||||
|
||||
# Add any previously returned explicit memberships to the lazy
|
||||
# loaded table. This happens when a client requested explicit
|
||||
# members and then converted them to lazy loading.
|
||||
for user_id in changes_return.extra_users_to_add_to_lazy_cache:
|
||||
# We don't know the right timestamp to use here, as we don't
|
||||
# know the last time we would have sent the membership down.
|
||||
# So we don't overwrite it if we have a timestamp already,
|
||||
# and fallback to `None` (which means now) if we don't.
|
||||
returned_user_id_to_last_seen_ts_map.setdefault(user_id, None)
|
||||
|
||||
if changes_return.added_state_filter:
|
||||
# Some state entries got added, so we pull out the current
|
||||
# state for them. If we don't do this we'd only send down new deltas.
|
||||
state_ids = await self.get_current_state_ids_at(
|
||||
room_id=room_id,
|
||||
room_membership_for_user_at_to_token=room_membership_for_user_at_to_token,
|
||||
state_filter=added_state_filter,
|
||||
state_filter=changes_return.added_state_filter,
|
||||
to_token=to_token,
|
||||
)
|
||||
room_state_delta_id_map.update(state_ids)
|
||||
@@ -1189,6 +1297,7 @@ class SlidingSyncHandler:
|
||||
|
||||
# If the membership changed and we have to get heroes, get the remaining
|
||||
# heroes from the state
|
||||
hero_membership_state = {}
|
||||
if hero_user_ids:
|
||||
hero_membership_state = await self.get_current_state_at(
|
||||
room_id=room_id,
|
||||
@@ -1196,7 +1305,6 @@ class SlidingSyncHandler:
|
||||
state_filter=StateFilter.from_types(hero_room_state),
|
||||
to_token=to_token,
|
||||
)
|
||||
room_state.update(hero_membership_state)
|
||||
|
||||
required_room_state: StateMap[EventBase] = {}
|
||||
if required_state_filter != StateFilter.none():
|
||||
@@ -1219,7 +1327,7 @@ class SlidingSyncHandler:
|
||||
# Assemble heroes: extract the info from the state we just fetched
|
||||
heroes: list[SlidingSyncResult.RoomResult.StrippedHero] = []
|
||||
for hero_user_id in hero_user_ids:
|
||||
member_event = room_state.get((EventTypes.Member, hero_user_id))
|
||||
member_event = hero_membership_state.get((EventTypes.Member, hero_user_id))
|
||||
if member_event is not None:
|
||||
heroes.append(
|
||||
SlidingSyncResult.RoomResult.StrippedHero(
|
||||
@@ -1281,7 +1389,7 @@ class SlidingSyncHandler:
|
||||
bump_stamp = 0
|
||||
|
||||
room_sync_required_state_map_to_persist: Mapping[str, AbstractSet[str]] = (
|
||||
expanded_required_state_map
|
||||
room_sync_config.required_state_map
|
||||
)
|
||||
if changed_required_state_map:
|
||||
room_sync_required_state_map_to_persist = changed_required_state_map
|
||||
@@ -1471,13 +1579,37 @@ class SlidingSyncHandler:
|
||||
return None
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True)
|
||||
class _RequiredStateChangesReturn:
|
||||
"""Return type for _required_state_changes."""
|
||||
|
||||
changed_required_state_map: Mapping[str, AbstractSet[str]] | None
|
||||
"""The updated required state map to store in the room config, or None if
|
||||
there is no change."""
|
||||
|
||||
added_state_filter: StateFilter
|
||||
"""The state filter to use to fetch any additional current state that needs
|
||||
to be returned to the client."""
|
||||
|
||||
extra_users_to_add_to_lazy_cache: AbstractSet[str] = frozenset()
|
||||
"""The set of user IDs we should add to the lazy members cache that we had
|
||||
previously returned. Handles the case where a user was previously sent down
|
||||
explicitly but is now being lazy loaded."""
|
||||
|
||||
lazy_members_invalidated: AbstractSet[str] = frozenset()
|
||||
"""The set of user IDs whose membership has changed but we didn't send down,
|
||||
so we need to invalidate them from the cache."""
|
||||
|
||||
|
||||
def _required_state_changes(
|
||||
user_id: str,
|
||||
*,
|
||||
prev_required_state_map: Mapping[str, AbstractSet[str]],
|
||||
request_required_state_map: Mapping[str, AbstractSet[str]],
|
||||
previously_returned_lazy_user_ids: AbstractSet[str],
|
||||
request_lazy_load_user_ids: AbstractSet[str],
|
||||
state_deltas: StateMap[str],
|
||||
) -> tuple[Mapping[str, AbstractSet[str]] | None, StateFilter]:
|
||||
) -> _RequiredStateChangesReturn:
|
||||
"""Calculates the changes between the required state room config from the
|
||||
previous requests compared with the current request.
|
||||
|
||||
@@ -1491,14 +1623,62 @@ def _required_state_changes(
|
||||
added, removed and then added again to the required state. In that case we
|
||||
only want to re-send that entry down sync if it has changed.
|
||||
|
||||
Returns:
|
||||
A 2-tuple of updated required state config (or None if there is no update)
|
||||
and the state filter to use to fetch extra current state that we need to
|
||||
return.
|
||||
Args:
|
||||
user_id: The user ID of the user making the request.
|
||||
prev_required_state_map: The required state map from the previous
|
||||
request.
|
||||
request_required_state_map: The required state map from the current
|
||||
request.
|
||||
previously_returned_lazy_user_ids: The set of user IDs whose membership
|
||||
we have previously returned to the client due to lazy loading. This
|
||||
is filtered to only include users who have either sent events in the
|
||||
`timeline`, `required_state` or whose membership changed.
|
||||
request_lazy_load_user_ids: The set of user IDs whose lazy-loaded
|
||||
membership is required for this request.
|
||||
state_deltas: The state deltas in the room in the request token range,
|
||||
considering user membership. See `get_current_state_deltas_for_room`
|
||||
for more details.
|
||||
"""
|
||||
|
||||
# First we find any lazy members that have been invalidated due to state
|
||||
# changes that we are not sending down.
|
||||
lazy_members_invalidated = set()
|
||||
for event_type, state_key in state_deltas:
|
||||
if event_type != EventTypes.Member:
|
||||
continue
|
||||
|
||||
if state_key in request_lazy_load_user_ids:
|
||||
# Because it's part of the `request_lazy_load_user_ids`, we're going to
|
||||
# send this member change down.
|
||||
continue
|
||||
|
||||
if state_key not in previously_returned_lazy_user_ids:
|
||||
# We've not previously returned this member so nothing to
|
||||
# invalidate.
|
||||
continue
|
||||
|
||||
lazy_members_invalidated.add(state_key)
|
||||
|
||||
if prev_required_state_map == request_required_state_map:
|
||||
# There has been no change. Return immediately.
|
||||
return None, StateFilter.none()
|
||||
# There has been no change in state, just need to check lazy members.
|
||||
newly_returned_lazy_members = (
|
||||
request_lazy_load_user_ids - previously_returned_lazy_user_ids
|
||||
)
|
||||
if newly_returned_lazy_members:
|
||||
# There are some new lazy members we need to fetch.
|
||||
added_types: list[tuple[str, str | None]] = []
|
||||
for new_user_id in newly_returned_lazy_members:
|
||||
added_types.append((EventTypes.Member, new_user_id))
|
||||
|
||||
added_state_filter = StateFilter.from_types(added_types)
|
||||
else:
|
||||
added_state_filter = StateFilter.none()
|
||||
|
||||
return _RequiredStateChangesReturn(
|
||||
changed_required_state_map=None,
|
||||
added_state_filter=added_state_filter,
|
||||
lazy_members_invalidated=lazy_members_invalidated,
|
||||
)
|
||||
|
||||
prev_wildcard = prev_required_state_map.get(StateValues.WILDCARD, set())
|
||||
request_wildcard = request_required_state_map.get(StateValues.WILDCARD, set())
|
||||
@@ -1508,17 +1688,29 @@ def _required_state_changes(
|
||||
# already fetching everything, we don't have to fetch anything now that they've
|
||||
# narrowed.
|
||||
if StateValues.WILDCARD in prev_wildcard:
|
||||
return request_required_state_map, StateFilter.none()
|
||||
return _RequiredStateChangesReturn(
|
||||
changed_required_state_map=request_required_state_map,
|
||||
added_state_filter=StateFilter.none(),
|
||||
lazy_members_invalidated=lazy_members_invalidated,
|
||||
)
|
||||
|
||||
# If a event type wildcard has been added or removed we don't try and do
|
||||
# anything fancy, and instead always update the effective room required
|
||||
# state config to match the request.
|
||||
if request_wildcard - prev_wildcard:
|
||||
# Some keys were added, so we need to fetch everything
|
||||
return request_required_state_map, StateFilter.all()
|
||||
return _RequiredStateChangesReturn(
|
||||
changed_required_state_map=request_required_state_map,
|
||||
added_state_filter=StateFilter.all(),
|
||||
lazy_members_invalidated=lazy_members_invalidated,
|
||||
)
|
||||
if prev_wildcard - request_wildcard:
|
||||
# Keys were only removed, so we don't have to fetch everything.
|
||||
return request_required_state_map, StateFilter.none()
|
||||
return _RequiredStateChangesReturn(
|
||||
changed_required_state_map=request_required_state_map,
|
||||
added_state_filter=StateFilter.none(),
|
||||
lazy_members_invalidated=lazy_members_invalidated,
|
||||
)
|
||||
|
||||
# Contains updates to the required state map compared with the previous room
|
||||
# config. This has the same format as `RoomSyncConfig.required_state`
|
||||
@@ -1550,6 +1742,17 @@ def _required_state_changes(
|
||||
# Nothing *added*, so we skip. Removals happen below.
|
||||
continue
|
||||
|
||||
# Handle the special case of adding `$LAZY` membership, where we want to
|
||||
# always record the change to be lazy loading, as we immediately start
|
||||
# using the lazy loading tables so there is no point *not* recording the
|
||||
# change to lazy load in the effective room config.
|
||||
if event_type == EventTypes.Member:
|
||||
old_state_key_lazy = StateValues.LAZY in old_state_keys
|
||||
request_state_key_lazy = StateValues.LAZY in request_state_keys
|
||||
if not old_state_key_lazy and request_state_key_lazy:
|
||||
changes[event_type] = request_state_keys
|
||||
continue
|
||||
|
||||
# We only remove state keys from the effective state if they've been
|
||||
# removed from the request *and* the state has changed. This ensures
|
||||
# that if a client removes and then re-adds a state key, we only send
|
||||
@@ -1620,9 +1823,31 @@ def _required_state_changes(
|
||||
# LAZY values should also be ignore for event types that are
|
||||
# not membership.
|
||||
pass
|
||||
elif event_type == EventTypes.Member:
|
||||
if state_key not in previously_returned_lazy_user_ids:
|
||||
# Only add *explicit* members we haven't previously sent
|
||||
# down.
|
||||
added.append((event_type, state_key))
|
||||
else:
|
||||
added.append((event_type, state_key))
|
||||
|
||||
previously_required_state_members = set(
|
||||
prev_required_state_map.get(EventTypes.Member, ())
|
||||
)
|
||||
if StateValues.ME in previously_required_state_members:
|
||||
previously_required_state_members.add(user_id)
|
||||
|
||||
# We also need to pull out any lazy members that are now required but
|
||||
# haven't previously been returned.
|
||||
for required_user_id in (
|
||||
request_lazy_load_user_ids
|
||||
# Remove previously returned users
|
||||
- previously_returned_lazy_user_ids
|
||||
# Exclude previously explicitly requested members.
|
||||
- previously_required_state_members
|
||||
):
|
||||
added.append((EventTypes.Member, required_user_id))
|
||||
|
||||
added_state_filter = StateFilter.from_types(added)
|
||||
|
||||
# Figure out what changes we need to apply to the effective required state
|
||||
@@ -1663,13 +1888,25 @@ def _required_state_changes(
|
||||
changes[event_type] = request_state_keys
|
||||
continue
|
||||
|
||||
# When handling $LAZY membership, we want to either a) not update the
|
||||
# state or b) update it to match the request. This is to avoid churn of
|
||||
# the effective required state for rooms (we deduplicate required state
|
||||
# between rooms), and because we can store the previously returned
|
||||
# explicit memberships with the lazy loaded memberships.
|
||||
if event_type == EventTypes.Member:
|
||||
old_state_key_lazy = StateValues.LAZY in old_state_keys
|
||||
request_state_key_lazy = StateValues.LAZY in request_state_keys
|
||||
has_lazy = old_state_key_lazy or request_state_key_lazy
|
||||
|
||||
# If a "$LAZY" has been added or removed we always update to match
|
||||
# the request.
|
||||
if old_state_key_lazy != request_state_key_lazy:
|
||||
# If a "$LAZY" has been added or removed we always update the effective room
|
||||
# required state config to match the request.
|
||||
changes[event_type] = request_state_keys
|
||||
continue
|
||||
|
||||
# Or if we have lazy membership and there are invalidated
|
||||
# explicit memberships.
|
||||
if has_lazy and invalidated_state_keys:
|
||||
changes[event_type] = request_state_keys
|
||||
continue
|
||||
|
||||
@@ -1684,6 +1921,28 @@ def _required_state_changes(
|
||||
if invalidated_state_keys:
|
||||
changes[event_type] = old_state_keys - invalidated_state_keys
|
||||
|
||||
# Check for any explicit membership changes that were removed that we can
|
||||
# add to the lazy members previously returned. This is so that we don't
|
||||
# return a user due to lazy loading if they were previously returned as an
|
||||
# explicit membership.
|
||||
users_to_add_to_lazy_cache: set[str] = set()
|
||||
|
||||
membership_changes = changes.get(EventTypes.Member, set())
|
||||
if membership_changes and StateValues.LAZY in request_state_keys:
|
||||
for state_key in prev_required_state_map.get(EventTypes.Member, set()):
|
||||
if state_key == StateValues.WILDCARD or state_key == StateValues.LAZY:
|
||||
# Ignore non-user IDs.
|
||||
continue
|
||||
|
||||
if state_key == StateValues.ME:
|
||||
# Normalize to proper user ID
|
||||
state_key = user_id
|
||||
|
||||
# We remember the user if they haven't been invalidated
|
||||
if (EventTypes.Member, state_key) not in state_deltas:
|
||||
users_to_add_to_lazy_cache.add(state_key)
|
||||
|
||||
new_required_state_map = None
|
||||
if changes:
|
||||
# Update the required state config based on the changes.
|
||||
new_required_state_map = dict(prev_required_state_map)
|
||||
@@ -1694,6 +1953,9 @@ def _required_state_changes(
|
||||
# Remove entries with empty state keys.
|
||||
new_required_state_map.pop(event_type, None)
|
||||
|
||||
return new_required_state_map, added_state_filter
|
||||
else:
|
||||
return None, added_state_filter
|
||||
return _RequiredStateChangesReturn(
|
||||
changed_required_state_map=new_required_state_map,
|
||||
added_state_filter=added_state_filter,
|
||||
lazy_members_invalidated=lazy_members_invalidated,
|
||||
extra_users_to_add_to_lazy_cache=users_to_add_to_lazy_cache,
|
||||
)
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
#
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import attr
|
||||
|
||||
@@ -25,9 +24,7 @@ from synapse.types.handlers.sliding_sync import (
|
||||
PerConnectionState,
|
||||
SlidingSyncConfig,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
pass
|
||||
from synapse.util.clock import Clock
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -61,7 +58,8 @@ class SlidingSyncConnectionStore:
|
||||
to mapping of room ID to `HaveSentRoom`.
|
||||
"""
|
||||
|
||||
store: "DataStore"
|
||||
clock: Clock
|
||||
store: DataStore
|
||||
|
||||
async def get_and_clear_connection_positions(
|
||||
self,
|
||||
@@ -101,7 +99,7 @@ class SlidingSyncConnectionStore:
|
||||
If there are no changes to the state this may return the same token as
|
||||
the existing per-connection state.
|
||||
"""
|
||||
if not new_connection_state.has_updates():
|
||||
if not new_connection_state.has_updates(self.clock):
|
||||
if from_token is not None:
|
||||
return from_token.connection_position
|
||||
else:
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
|
||||
import logging
|
||||
from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from twisted.internet.interfaces import IDelayedCall
|
||||
|
||||
@@ -125,7 +125,7 @@ class UserDirectoryHandler(StateDeltasHandler):
|
||||
# Guard to ensure we only have one process for refreshing remote profiles
|
||||
self._is_refreshing_remote_profiles = False
|
||||
# Handle to cancel the `call_later` of `kick_off_remote_profile_refresh_process`
|
||||
self._refresh_remote_profiles_call_later: IDelayedCall | None = None
|
||||
self._refresh_remote_profiles_call_later: Optional[IDelayedCall] = None
|
||||
|
||||
# Guard to ensure we only have one process for refreshing remote profiles
|
||||
# for the given servers.
|
||||
|
||||
@@ -28,6 +28,7 @@ from typing import (
|
||||
BinaryIO,
|
||||
Callable,
|
||||
Mapping,
|
||||
Optional,
|
||||
Protocol,
|
||||
)
|
||||
|
||||
@@ -313,7 +314,7 @@ class BlocklistingAgentWrapper(Agent):
|
||||
method: bytes,
|
||||
uri: bytes,
|
||||
headers: Headers | None = None,
|
||||
bodyProducer: IBodyProducer | None = None,
|
||||
bodyProducer: Optional[IBodyProducer] = None,
|
||||
) -> defer.Deferred:
|
||||
h = urllib.parse.urlparse(uri.decode("ascii"))
|
||||
|
||||
@@ -1033,7 +1034,7 @@ class BodyExceededMaxSize(Exception):
|
||||
class _DiscardBodyWithMaxSizeProtocol(protocol.Protocol):
|
||||
"""A protocol which immediately errors upon receiving data."""
|
||||
|
||||
transport: ITCPTransport | None = None
|
||||
transport: Optional[ITCPTransport] = None
|
||||
|
||||
def __init__(self, deferred: defer.Deferred):
|
||||
self.deferred = deferred
|
||||
@@ -1075,7 +1076,7 @@ class _MultipartParserProtocol(protocol.Protocol):
|
||||
Protocol to read and parse a MSC3916 multipart/mixed response
|
||||
"""
|
||||
|
||||
transport: ITCPTransport | None = None
|
||||
transport: Optional[ITCPTransport] = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -1188,7 +1189,7 @@ class _MultipartParserProtocol(protocol.Protocol):
|
||||
class _ReadBodyWithMaxSizeProtocol(protocol.Protocol):
|
||||
"""A protocol which reads body to a stream, erroring if the body exceeds a maximum size."""
|
||||
|
||||
transport: ITCPTransport | None = None
|
||||
transport: Optional[ITCPTransport] = None
|
||||
|
||||
def __init__(
|
||||
self, stream: ByteWriteable, deferred: defer.Deferred, max_size: int | None
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
#
|
||||
import logging
|
||||
import urllib.parse
|
||||
from typing import Any, Generator
|
||||
from typing import Any, Generator, Optional
|
||||
from urllib.request import ( # type: ignore[attr-defined]
|
||||
proxy_bypass_environment,
|
||||
)
|
||||
@@ -173,7 +173,7 @@ class MatrixFederationAgent:
|
||||
method: bytes,
|
||||
uri: bytes,
|
||||
headers: Headers | None = None,
|
||||
bodyProducer: IBodyProducer | None = None,
|
||||
bodyProducer: Optional[IBodyProducer] = None,
|
||||
) -> Generator[defer.Deferred, Any, IResponse]:
|
||||
"""
|
||||
Args:
|
||||
|
||||
@@ -33,6 +33,7 @@ from typing import (
|
||||
Callable,
|
||||
Generic,
|
||||
Literal,
|
||||
Optional,
|
||||
TextIO,
|
||||
TypeVar,
|
||||
cast,
|
||||
@@ -691,7 +692,7 @@ class MatrixFederationHttpClient:
|
||||
destination_bytes, method_bytes, url_to_sign_bytes, json
|
||||
)
|
||||
data = encode_canonical_json(json)
|
||||
producer: IBodyProducer | None = QuieterFileBodyProducer(
|
||||
producer: Optional[IBodyProducer] = QuieterFileBodyProducer(
|
||||
BytesIO(data), cooperator=self._cooperator
|
||||
)
|
||||
else:
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
import json
|
||||
import logging
|
||||
import urllib.parse
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
from typing import TYPE_CHECKING, Any, Optional, cast
|
||||
|
||||
from twisted.internet import protocol
|
||||
from twisted.internet.interfaces import ITCPTransport
|
||||
@@ -237,7 +237,7 @@ class _ProxyResponseBody(protocol.Protocol):
|
||||
request.
|
||||
"""
|
||||
|
||||
transport: ITCPTransport | None = None
|
||||
transport: Optional[ITCPTransport] = None
|
||||
|
||||
def __init__(self, request: "SynapseRequest") -> None:
|
||||
self._request = request
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
import logging
|
||||
import random
|
||||
import re
|
||||
from typing import Any, Collection, Sequence, cast
|
||||
from typing import Any, Collection, Optional, Sequence, cast
|
||||
from urllib.parse import urlparse
|
||||
from urllib.request import ( # type: ignore[attr-defined]
|
||||
proxy_bypass_environment,
|
||||
@@ -119,8 +119,8 @@ class ProxyAgent(_AgentBase):
|
||||
self,
|
||||
*,
|
||||
reactor: IReactorCore,
|
||||
proxy_reactor: IReactorCore | None = None,
|
||||
contextFactory: IPolicyForHTTPS | None = None,
|
||||
proxy_reactor: Optional[IReactorCore] = None,
|
||||
contextFactory: Optional[IPolicyForHTTPS] = None,
|
||||
connectTimeout: float | None = None,
|
||||
bindAddress: bytes | None = None,
|
||||
pool: HTTPConnectionPool | None = None,
|
||||
@@ -175,7 +175,7 @@ class ProxyAgent(_AgentBase):
|
||||
self._policy_for_https = contextFactory
|
||||
self._reactor = cast(IReactorTime, reactor)
|
||||
|
||||
self._federation_proxy_endpoint: IStreamClientEndpoint | None = None
|
||||
self._federation_proxy_endpoint: Optional[IStreamClientEndpoint] = None
|
||||
self._federation_proxy_credentials: ProxyCredentials | None = None
|
||||
if federation_proxy_locations:
|
||||
assert federation_proxy_credentials is not None, (
|
||||
@@ -221,7 +221,7 @@ class ProxyAgent(_AgentBase):
|
||||
method: bytes,
|
||||
uri: bytes,
|
||||
headers: Headers | None = None,
|
||||
bodyProducer: IBodyProducer | None = None,
|
||||
bodyProducer: Optional[IBodyProducer] = None,
|
||||
) -> "defer.Deferred[IResponse]":
|
||||
"""
|
||||
Issue a request to the server indicated by the given uri.
|
||||
@@ -365,11 +365,11 @@ class ProxyAgent(_AgentBase):
|
||||
def http_proxy_endpoint(
|
||||
proxy: bytes | None,
|
||||
reactor: IReactorCore,
|
||||
tls_options_factory: IPolicyForHTTPS | None,
|
||||
tls_options_factory: Optional[IPolicyForHTTPS],
|
||||
timeout: float = 30,
|
||||
bindAddress: bytes | str | tuple[bytes | str, int] | None = None,
|
||||
attemptDelay: float | None = None,
|
||||
) -> tuple[IStreamClientEndpoint | None, ProxyCredentials | None]:
|
||||
) -> tuple[Optional[IStreamClientEndpoint], ProxyCredentials | None]:
|
||||
"""Parses an http proxy setting and returns an endpoint for the proxy
|
||||
|
||||
Args:
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
#
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from zope.interface import implementer
|
||||
|
||||
@@ -149,7 +150,7 @@ class ReplicationAgent(_AgentBase):
|
||||
method: bytes,
|
||||
uri: bytes,
|
||||
headers: Headers | None = None,
|
||||
bodyProducer: IBodyProducer | None = None,
|
||||
bodyProducer: Optional[IBodyProducer] = None,
|
||||
) -> "defer.Deferred[IResponse]":
|
||||
"""
|
||||
Issue a request to the server indicated by the given uri.
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
#
|
||||
#
|
||||
import contextlib
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
from http import HTTPStatus
|
||||
@@ -36,6 +37,7 @@ from twisted.web.http import HTTPChannel
|
||||
from twisted.web.resource import IResource, Resource
|
||||
from twisted.web.server import Request
|
||||
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.config.server import ListenerConfig
|
||||
from synapse.http import get_request_user_agent, redact_uri
|
||||
from synapse.http.proxy import ProxySite
|
||||
@@ -59,6 +61,10 @@ logger = logging.getLogger(__name__)
|
||||
_next_request_seq = 0
|
||||
|
||||
|
||||
class ContentLengthError(SynapseError):
|
||||
"""Raised when content-length validation fails."""
|
||||
|
||||
|
||||
class SynapseRequest(Request):
|
||||
"""Class which encapsulates an HTTP request to synapse.
|
||||
|
||||
@@ -144,36 +150,150 @@ class SynapseRequest(Request):
|
||||
self.synapse_site.site_tag,
|
||||
)
|
||||
|
||||
def _respond_with_error(self, synapse_error: SynapseError) -> None:
|
||||
"""Send an error response and close the connection."""
|
||||
self.setResponseCode(synapse_error.code)
|
||||
error_response_bytes = json.dumps(synapse_error.error_dict(None)).encode()
|
||||
|
||||
self.responseHeaders.setRawHeaders(b"Content-Type", [b"application/json"])
|
||||
self.responseHeaders.setRawHeaders(
|
||||
b"Content-Length", [f"{len(error_response_bytes)}"]
|
||||
)
|
||||
self.write(error_response_bytes)
|
||||
self.loseConnection()
|
||||
|
||||
def _get_content_length_from_headers(self) -> int | None:
|
||||
"""Attempts to obtain the `Content-Length` value from the request's headers.
|
||||
|
||||
Returns:
|
||||
Content length as `int` if present. Otherwise `None`.
|
||||
|
||||
Raises:
|
||||
ContentLengthError: if multiple `Content-Length` headers are present or the
|
||||
value is not an `int`.
|
||||
"""
|
||||
content_length_headers = self.requestHeaders.getRawHeaders(b"Content-Length")
|
||||
if content_length_headers is None:
|
||||
return None
|
||||
|
||||
# If there are multiple `Content-Length` headers return an error.
|
||||
# We don't want to even try to pick the right one if there are multiple
|
||||
# as we could run into problems similar to request smuggling vulnerabilities
|
||||
# which rely on the mismatch of how different systems interpret information.
|
||||
if len(content_length_headers) != 1:
|
||||
raise ContentLengthError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Multiple Content-Length headers received",
|
||||
Codes.UNKNOWN,
|
||||
)
|
||||
|
||||
try:
|
||||
return int(content_length_headers[0])
|
||||
except (ValueError, TypeError):
|
||||
raise ContentLengthError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Content-Length header value is not a valid integer",
|
||||
Codes.UNKNOWN,
|
||||
)
|
||||
|
||||
def _validate_content_length(self) -> None:
|
||||
"""Validate Content-Length header and actual content size.
|
||||
|
||||
Raises:
|
||||
ContentLengthError: If validation fails.
|
||||
"""
|
||||
# we should have a `content` by now.
|
||||
assert self.content, "_validate_content_length() called before gotLength()"
|
||||
content_length = self._get_content_length_from_headers()
|
||||
|
||||
if content_length is None:
|
||||
return
|
||||
|
||||
actual_content_length = self.content.tell()
|
||||
|
||||
if content_length > self._max_request_body_size:
|
||||
logger.info(
|
||||
"Rejecting request from %s because Content-Length %d exceeds maximum size %d: %s %s",
|
||||
self.client,
|
||||
content_length,
|
||||
self._max_request_body_size,
|
||||
self.get_method(),
|
||||
self.get_redacted_uri(),
|
||||
)
|
||||
raise ContentLengthError(
|
||||
HTTPStatus.REQUEST_ENTITY_TOO_LARGE,
|
||||
f"Request content is too large (>{self._max_request_body_size})",
|
||||
Codes.TOO_LARGE,
|
||||
)
|
||||
|
||||
if content_length != actual_content_length:
|
||||
comparison = (
|
||||
"smaller" if content_length < actual_content_length else "larger"
|
||||
)
|
||||
logger.info(
|
||||
"Rejecting request from %s because Content-Length %d is %s than the request content size %d: %s %s",
|
||||
self.client,
|
||||
content_length,
|
||||
comparison,
|
||||
actual_content_length,
|
||||
self.get_method(),
|
||||
self.get_redacted_uri(),
|
||||
)
|
||||
raise ContentLengthError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
f"Rejecting request as the Content-Length header value {content_length} "
|
||||
f"is {comparison} than the actual request content size {actual_content_length}",
|
||||
Codes.UNKNOWN,
|
||||
)
|
||||
|
||||
# Twisted machinery: this method is called by the Channel once the full request has
|
||||
# been received, to dispatch the request to a resource.
|
||||
#
|
||||
# We're patching Twisted to bail/abort early when we see someone trying to upload
|
||||
# `multipart/form-data` so we can avoid Twisted parsing the entire request body into
|
||||
# in-memory (specific problem of this specific `Content-Type`). This protects us
|
||||
# from an attacker uploading something bigger than the available RAM and crashing
|
||||
# the server with a `MemoryError`, or carefully block just enough resources to cause
|
||||
# all other requests to fail.
|
||||
#
|
||||
# FIXME: This can be removed once we Twisted releases a fix and we update to a
|
||||
# version that is patched
|
||||
def requestReceived(self, command: bytes, path: bytes, version: bytes) -> None:
|
||||
# In the case of a Content-Length header being present, and it's value being too
|
||||
# large, throw a proper error to make debugging issues due to overly large requests much
|
||||
# easier. Currently we handle such cases in `handleContentChunk` and abort the
|
||||
# connection without providing a proper HTTP response.
|
||||
#
|
||||
# Attempting to write an HTTP response from within `handleContentChunk` does not
|
||||
# work, so the code here has been added to at least provide a response in the
|
||||
# case of the Content-Length header being present.
|
||||
self.method, self.uri = command, path
|
||||
self.clientproto = version
|
||||
|
||||
try:
|
||||
self._validate_content_length()
|
||||
except ContentLengthError as e:
|
||||
self._respond_with_error(e)
|
||||
return
|
||||
|
||||
# We're patching Twisted to bail/abort early when we see someone trying to upload
|
||||
# `multipart/form-data` so we can avoid Twisted parsing the entire request body into
|
||||
# in-memory (specific problem of this specific `Content-Type`). This protects us
|
||||
# from an attacker uploading something bigger than the available RAM and crashing
|
||||
# the server with a `MemoryError`, or carefully block just enough resources to cause
|
||||
# all other requests to fail.
|
||||
#
|
||||
# FIXME: This can be removed once Twisted releases a fix and we update to a
|
||||
# version that is patched
|
||||
# See: https://github.com/element-hq/synapse/security/advisories/GHSA-rfq8-j7rh-8hf2
|
||||
if command == b"POST":
|
||||
ctype = self.requestHeaders.getRawHeaders(b"content-type")
|
||||
if ctype and b"multipart/form-data" in ctype[0]:
|
||||
self.method, self.uri = command, path
|
||||
self.clientproto = version
|
||||
logger.warning(
|
||||
"Aborting connection from %s because `content-type: multipart/form-data` is unsupported: %s %s",
|
||||
self.client,
|
||||
self.get_method(),
|
||||
self.get_redacted_uri(),
|
||||
)
|
||||
|
||||
self.code = HTTPStatus.UNSUPPORTED_MEDIA_TYPE.value
|
||||
self.code_message = bytes(
|
||||
HTTPStatus.UNSUPPORTED_MEDIA_TYPE.phrase, "ascii"
|
||||
)
|
||||
self.responseHeaders.setRawHeaders(b"content-length", [b"0"])
|
||||
|
||||
logger.warning(
|
||||
"Aborting connection from %s because `content-type: multipart/form-data` is unsupported: %s %s",
|
||||
self.client,
|
||||
command,
|
||||
path,
|
||||
)
|
||||
# FIXME: Return a better error response here similar to the
|
||||
# `error_response_json` returned in other code paths here.
|
||||
self.responseHeaders.setRawHeaders(b"Content-Length", [b"0"])
|
||||
self.write(b"")
|
||||
self.loseConnection()
|
||||
return
|
||||
@@ -815,6 +935,13 @@ class SynapseSite(ProxySite):
|
||||
protocol.transport.loseConnection()
|
||||
self.connections.clear()
|
||||
|
||||
# Replace the resource tree with an empty resource to break circular references
|
||||
# to the resource tree which holds a bunch of homeserver references. This is
|
||||
# important if we try to call `hs.shutdown()` after `start` fails. For some
|
||||
# reason, this doesn't seem to be necessary in the normal case where `start`
|
||||
# succeeds and we call `hs.shutdown()` later.
|
||||
self.resource = Resource()
|
||||
|
||||
def log(self, request: SynapseRequest) -> None: # type: ignore[override]
|
||||
pass
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ import traceback
|
||||
from collections import deque
|
||||
from ipaddress import IPv4Address, IPv6Address, ip_address
|
||||
from math import floor
|
||||
from typing import Callable
|
||||
from typing import Callable, Optional
|
||||
|
||||
import attr
|
||||
from zope.interface import implementer
|
||||
@@ -113,7 +113,7 @@ class RemoteHandler(logging.Handler):
|
||||
port: int,
|
||||
maximum_buffer: int = 1000,
|
||||
level: int = logging.NOTSET,
|
||||
_reactor: IReactorTime | None = None,
|
||||
_reactor: Optional[IReactorTime] = None,
|
||||
):
|
||||
super().__init__(level=level)
|
||||
self.host = host
|
||||
|
||||
@@ -89,7 +89,7 @@ class TerseJsonFormatter(JsonFormatter):
|
||||
"log": record.getMessage(),
|
||||
"namespace": record.name,
|
||||
"level": record.levelname,
|
||||
"time": round(record.created, 2),
|
||||
"time": record.created,
|
||||
}
|
||||
|
||||
return self._format(record, event)
|
||||
|
||||
@@ -3,7 +3,7 @@ import time
|
||||
from logging import Handler, LogRecord
|
||||
from logging.handlers import MemoryHandler
|
||||
from threading import Thread
|
||||
from typing import cast
|
||||
from typing import Optional, cast
|
||||
|
||||
from twisted.internet.interfaces import IReactorCore
|
||||
|
||||
@@ -26,7 +26,7 @@ class PeriodicallyFlushingMemoryHandler(MemoryHandler):
|
||||
target: Handler | None = None,
|
||||
flushOnClose: bool = True,
|
||||
period: float = 5.0,
|
||||
reactor: IReactorCore | None = None,
|
||||
reactor: Optional[IReactorCore] = None,
|
||||
) -> None:
|
||||
"""
|
||||
period: the period between automatic flushes
|
||||
|
||||
@@ -30,6 +30,7 @@ from typing import (
|
||||
Awaitable,
|
||||
BinaryIO,
|
||||
Generator,
|
||||
Optional,
|
||||
)
|
||||
|
||||
import attr
|
||||
@@ -705,7 +706,7 @@ class ThreadedFileSender:
|
||||
|
||||
self.file: BinaryIO | None = None
|
||||
self.deferred: "Deferred[None]" = Deferred()
|
||||
self.consumer: interfaces.IConsumer | None = None
|
||||
self.consumer: Optional[IConsumer] = None
|
||||
|
||||
# Signals if the thread should keep reading/sending data. Set means
|
||||
# continue, clear means pause.
|
||||
|
||||
@@ -439,7 +439,11 @@ class MediaRepository:
|
||||
return await self.store.get_cached_remote_media(origin, media_id)
|
||||
|
||||
async def get_local_media_info(
|
||||
self, request: SynapseRequest, media_id: str, max_timeout_ms: int
|
||||
self,
|
||||
request: SynapseRequest,
|
||||
media_id: str,
|
||||
max_timeout_ms: int,
|
||||
bypass_quarantine: bool = False,
|
||||
) -> LocalMedia | None:
|
||||
"""Gets the info dictionary for given local media ID. If the media has
|
||||
not been uploaded yet, this function will wait up to ``max_timeout_ms``
|
||||
@@ -451,6 +455,7 @@ class MediaRepository:
|
||||
the file_id for local content.)
|
||||
max_timeout_ms: the maximum number of milliseconds to wait for the
|
||||
media to be uploaded.
|
||||
bypass_quarantine: whether to bypass quarantine checks
|
||||
|
||||
Returns:
|
||||
Either the info dictionary for the given local media ID or
|
||||
@@ -466,7 +471,7 @@ class MediaRepository:
|
||||
respond_404(request)
|
||||
return None
|
||||
|
||||
if media_info.quarantined_by:
|
||||
if media_info.quarantined_by and not bypass_quarantine:
|
||||
logger.info("Media %s is quarantined", media_id)
|
||||
respond_404(request)
|
||||
return None
|
||||
@@ -500,6 +505,7 @@ class MediaRepository:
|
||||
max_timeout_ms: int,
|
||||
allow_authenticated: bool = True,
|
||||
federation: bool = False,
|
||||
bypass_quarantine: bool = False,
|
||||
) -> None:
|
||||
"""Responds to requests for local media, if exists, or returns 404.
|
||||
|
||||
@@ -513,11 +519,14 @@ class MediaRepository:
|
||||
media to be uploaded.
|
||||
allow_authenticated: whether media marked as authenticated may be served to this request
|
||||
federation: whether the local media being fetched is for a federation request
|
||||
bypass_quarantine: whether to bypass quarantine checks
|
||||
|
||||
Returns:
|
||||
Resolves once a response has successfully been written to request
|
||||
"""
|
||||
media_info = await self.get_local_media_info(request, media_id, max_timeout_ms)
|
||||
media_info = await self.get_local_media_info(
|
||||
request, media_id, max_timeout_ms, bypass_quarantine=bypass_quarantine
|
||||
)
|
||||
if not media_info:
|
||||
return
|
||||
|
||||
@@ -561,6 +570,7 @@ class MediaRepository:
|
||||
ip_address: str,
|
||||
use_federation_endpoint: bool,
|
||||
allow_authenticated: bool = True,
|
||||
bypass_quarantine: bool = False,
|
||||
) -> None:
|
||||
"""Respond to requests for remote media.
|
||||
|
||||
@@ -577,6 +587,7 @@ class MediaRepository:
|
||||
federation `/download` endpoint
|
||||
allow_authenticated: whether media marked as authenticated may be served to this
|
||||
request
|
||||
bypass_quarantine: whether to bypass quarantine checks
|
||||
|
||||
Returns:
|
||||
Resolves once a response has successfully been written to request
|
||||
@@ -609,6 +620,7 @@ class MediaRepository:
|
||||
ip_address,
|
||||
use_federation_endpoint,
|
||||
allow_authenticated,
|
||||
bypass_quarantine=bypass_quarantine,
|
||||
)
|
||||
|
||||
# Check if the media is cached on the client, if so return 304. We need
|
||||
@@ -697,6 +709,7 @@ class MediaRepository:
|
||||
ip_address: str,
|
||||
use_federation_endpoint: bool,
|
||||
allow_authenticated: bool,
|
||||
bypass_quarantine: bool = False,
|
||||
) -> tuple[Responder | None, RemoteMedia]:
|
||||
"""Looks for media in local cache, if not there then attempt to
|
||||
download from remote server.
|
||||
@@ -712,6 +725,7 @@ class MediaRepository:
|
||||
ip_address: the IP address of the requester
|
||||
use_federation_endpoint: whether to request the remote media over the new federation
|
||||
/download endpoint
|
||||
bypass_quarantine: whether to bypass quarantine checks
|
||||
|
||||
Returns:
|
||||
A tuple of responder and the media info of the file.
|
||||
@@ -732,7 +746,7 @@ class MediaRepository:
|
||||
file_id = media_info.filesystem_id
|
||||
file_info = FileInfo(server_name, file_id)
|
||||
|
||||
if media_info.quarantined_by:
|
||||
if media_info.quarantined_by and not bypass_quarantine:
|
||||
logger.info("Media is quarantined")
|
||||
raise NotFoundError()
|
||||
|
||||
@@ -914,6 +928,7 @@ class MediaRepository:
|
||||
filesystem_id=file_id,
|
||||
last_access_ts=time_now_ms,
|
||||
quarantined_by=None,
|
||||
quarantined_ts=None,
|
||||
authenticated=authenticated,
|
||||
sha256=sha256writer.hexdigest(),
|
||||
)
|
||||
@@ -1047,6 +1062,7 @@ class MediaRepository:
|
||||
filesystem_id=file_id,
|
||||
last_access_ts=time_now_ms,
|
||||
quarantined_by=None,
|
||||
quarantined_ts=None,
|
||||
authenticated=authenticated,
|
||||
sha256=sha256writer.hexdigest(),
|
||||
)
|
||||
|
||||
@@ -331,10 +331,16 @@ class UrlPreviewer:
|
||||
# response failed or is incomplete.
|
||||
og_from_html = parse_html_to_open_graph(tree)
|
||||
|
||||
# Compile the Open Graph response by using the scraped
|
||||
# information from the HTML and overlaying any information
|
||||
# from the oEmbed response.
|
||||
og = {**og_from_html, **og_from_oembed}
|
||||
# Compile an Open Graph response by combining the oEmbed response
|
||||
# and the information from the HTML, with information in the HTML
|
||||
# preferred.
|
||||
#
|
||||
# The ordering here is intentional: certain websites (especially
|
||||
# SPA JavaScript-based ones) including Mastodon and YouTube provide
|
||||
# almost complete OpenGraph descriptions but only stubs for oEmbed,
|
||||
# with further oEmbed information being populated with JavaScript,
|
||||
# that Synapse won't execute.
|
||||
og = og_from_oembed | og_from_html
|
||||
|
||||
await self._precache_image_url(user, media_info, og)
|
||||
else:
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
#
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from twisted.internet.error import AlreadyCalled, AlreadyCancelled
|
||||
from twisted.internet.interfaces import IDelayedCall
|
||||
@@ -71,7 +71,7 @@ class EmailPusher(Pusher):
|
||||
self.server_name = hs.hostname
|
||||
self.store = self.hs.get_datastores().main
|
||||
self.email = pusher_config.pushkey
|
||||
self.timed_call: IDelayedCall | None = None
|
||||
self.timed_call: Optional[IDelayedCall] = None
|
||||
self.throttle_params: dict[str, ThrottleParams] = {}
|
||||
self._inited = False
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
import logging
|
||||
import random
|
||||
import urllib.parse
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from prometheus_client import Counter
|
||||
|
||||
@@ -120,7 +120,7 @@ class HttpPusher(Pusher):
|
||||
self.data = pusher_config.data
|
||||
self.backoff_delay = HttpPusher.INITIAL_BACKOFF_SEC
|
||||
self.failing_since = pusher_config.failing_since
|
||||
self.timed_call: IDelayedCall | None = None
|
||||
self.timed_call: Optional[IDelayedCall] = None
|
||||
self._is_processing = False
|
||||
self._group_unread_count_by_room = (
|
||||
hs.config.push.push_group_unread_count_by_room
|
||||
|
||||
@@ -114,10 +114,12 @@ from synapse.rest.admin.users import (
|
||||
UserByThreePid,
|
||||
UserInvitesCount,
|
||||
UserJoinedRoomCount,
|
||||
UserMembershipRestServlet,
|
||||
UserJoinedRoomsRestServlet,
|
||||
UserMembershipsRestServlet,
|
||||
UserRegisterServlet,
|
||||
UserReplaceMasterCrossSigningKeyRestServlet,
|
||||
UserRestServletV2,
|
||||
UserRestServletV2Get,
|
||||
UsersRestServletV2,
|
||||
UsersRestServletV3,
|
||||
UserTokenRestServlet,
|
||||
@@ -280,6 +282,8 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
# matrix_authentication_service integration uses the dedicated MAS API.
|
||||
if hs.config.experimental.msc3861.enabled:
|
||||
register_servlets_for_msc3861_delegation(hs, http_server)
|
||||
else:
|
||||
UserRestServletV2Get(hs).register(http_server)
|
||||
|
||||
return
|
||||
|
||||
@@ -297,7 +301,8 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
VersionServlet(hs).register(http_server)
|
||||
if not auth_delegated:
|
||||
UserAdminServlet(hs).register(http_server)
|
||||
UserMembershipRestServlet(hs).register(http_server)
|
||||
UserJoinedRoomsRestServlet(hs).register(http_server)
|
||||
UserMembershipsRestServlet(hs).register(http_server)
|
||||
if not auth_delegated:
|
||||
UserTokenRestServlet(hs).register(http_server)
|
||||
UserRestServletV2(hs).register(http_server)
|
||||
|
||||
@@ -293,6 +293,38 @@ class ListMediaInRoom(RestServlet):
|
||||
return HTTPStatus.OK, {"local": local_mxcs, "remote": remote_mxcs}
|
||||
|
||||
|
||||
class ListQuarantinedMedia(RestServlet):
|
||||
"""Lists all quarantined media on the server."""
|
||||
|
||||
PATTERNS = admin_patterns("/media/quarantined$")
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.store = hs.get_datastores().main
|
||||
self.auth = hs.get_auth()
|
||||
|
||||
async def on_GET(
|
||||
self,
|
||||
request: SynapseRequest,
|
||||
) -> tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
|
||||
start = parse_integer(request, "from", default=0)
|
||||
limit = parse_integer(request, "limit", default=100)
|
||||
local_or_remote = parse_string(request, "kind", required=True)
|
||||
|
||||
if local_or_remote not in ["local", "remote"]:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Query parameter `kind` must be either 'local' or 'remote'.",
|
||||
)
|
||||
|
||||
mxcs = await self.store.get_quarantined_media_mxcs(
|
||||
start, limit, local_or_remote == "local"
|
||||
)
|
||||
|
||||
return HTTPStatus.OK, {"media": mxcs}
|
||||
|
||||
|
||||
class PurgeMediaCacheRestServlet(RestServlet):
|
||||
PATTERNS = admin_patterns("/purge_media_cache$")
|
||||
|
||||
@@ -532,6 +564,7 @@ def register_servlets_for_media_repo(hs: "HomeServer", http_server: HttpServer)
|
||||
ProtectMediaByID(hs).register(http_server)
|
||||
UnprotectMediaByID(hs).register(http_server)
|
||||
ListMediaInRoom(hs).register(http_server)
|
||||
ListQuarantinedMedia(hs).register(http_server)
|
||||
# XXX DeleteMediaByDateSize must be registered before DeleteMediaByID as
|
||||
# their URL routes overlap.
|
||||
DeleteMediaByDateSize(hs).register(http_server)
|
||||
|
||||
@@ -210,7 +210,7 @@ class UsersRestServletV3(UsersRestServletV2):
|
||||
return parse_boolean(request, "deactivated")
|
||||
|
||||
|
||||
class UserRestServletV2(RestServlet):
|
||||
class UserRestServletV2Get(RestServlet):
|
||||
PATTERNS = admin_patterns("/users/(?P<user_id>[^/]*)$", "v2")
|
||||
|
||||
"""Get request to list user details.
|
||||
@@ -220,22 +220,6 @@ class UserRestServletV2(RestServlet):
|
||||
|
||||
returns:
|
||||
200 OK with user details if success otherwise an error.
|
||||
|
||||
Put request to allow an administrator to add or modify a user.
|
||||
This needs user to have administrator access in Synapse.
|
||||
We use PUT instead of POST since we already know the id of the user
|
||||
object to create. POST could be used to create guests.
|
||||
|
||||
PUT /_synapse/admin/v2/users/<user_id>
|
||||
{
|
||||
"password": "secret",
|
||||
"displayname": "User"
|
||||
}
|
||||
|
||||
returns:
|
||||
201 OK with new user object if user was created or
|
||||
200 OK with modified user object if user was modified
|
||||
otherwise an error.
|
||||
"""
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
@@ -267,6 +251,28 @@ class UserRestServletV2(RestServlet):
|
||||
|
||||
return HTTPStatus.OK, user_info_dict
|
||||
|
||||
|
||||
class UserRestServletV2(UserRestServletV2Get):
|
||||
"""
|
||||
Put request to allow an administrator to add or modify a user.
|
||||
This needs user to have administrator access in Synapse.
|
||||
We use PUT instead of POST since we already know the id of the user
|
||||
object to create. POST could be used to create guests.
|
||||
|
||||
Note: This inherits from `UserRestServletV2Get`, so also supports the `GET` route.
|
||||
|
||||
PUT /_synapse/admin/v2/users/<user_id>
|
||||
{
|
||||
"password": "secret",
|
||||
"displayname": "User"
|
||||
}
|
||||
|
||||
returns:
|
||||
201 OK with new user object if user was created or
|
||||
200 OK with modified user object if user was modified
|
||||
otherwise an error.
|
||||
"""
|
||||
|
||||
async def on_PUT(
|
||||
self, request: SynapseRequest, user_id: str
|
||||
) -> tuple[int, JsonMapping]:
|
||||
@@ -1031,7 +1037,7 @@ class UserAdminServlet(RestServlet):
|
||||
return HTTPStatus.OK, {}
|
||||
|
||||
|
||||
class UserMembershipRestServlet(RestServlet):
|
||||
class UserJoinedRoomsRestServlet(RestServlet):
|
||||
"""
|
||||
Get list of joined room ID's for a user.
|
||||
"""
|
||||
@@ -1054,6 +1060,28 @@ class UserMembershipRestServlet(RestServlet):
|
||||
return HTTPStatus.OK, rooms_response
|
||||
|
||||
|
||||
class UserMembershipsRestServlet(RestServlet):
|
||||
"""
|
||||
Get list of room memberships for a user.
|
||||
"""
|
||||
|
||||
PATTERNS = admin_patterns("/users/(?P<user_id>[^/]*)/memberships$")
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.is_mine = hs.is_mine
|
||||
self.auth = hs.get_auth()
|
||||
self.store = hs.get_datastores().main
|
||||
|
||||
async def on_GET(
|
||||
self, request: SynapseRequest, user_id: str
|
||||
) -> tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self.auth, request)
|
||||
|
||||
memberships = await self.store.get_memberships_for_user(user_id)
|
||||
|
||||
return HTTPStatus.OK, {"memberships": memberships}
|
||||
|
||||
|
||||
class PushersRestServlet(RestServlet):
|
||||
"""
|
||||
Gets information about all pushers for a specific `user_id`.
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
import logging
|
||||
import re
|
||||
|
||||
from synapse.api.errors import Codes, cs_error
|
||||
from synapse.http.server import (
|
||||
HttpServer,
|
||||
respond_with_json,
|
||||
@@ -235,7 +236,23 @@ class DownloadResource(RestServlet):
|
||||
# Validate the server name, raising if invalid
|
||||
parse_and_validate_server_name(server_name)
|
||||
|
||||
await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
requester = await self.auth.get_user_by_req(request, allow_guest=True)
|
||||
is_admin = await self.auth.is_server_admin(requester)
|
||||
bypass_quarantine = False
|
||||
if parse_string(request, "admin_unsafely_bypass_quarantine") == "true":
|
||||
if is_admin:
|
||||
logger.info("Admin bypassing quarantine for media download")
|
||||
bypass_quarantine = True
|
||||
else:
|
||||
respond_with_json(
|
||||
request,
|
||||
400,
|
||||
cs_error(
|
||||
"Must be a server admin to bypass quarantine",
|
||||
code=Codes.UNKNOWN,
|
||||
),
|
||||
send_cors=True,
|
||||
)
|
||||
|
||||
set_cors_headers(request)
|
||||
set_corp_headers(request)
|
||||
@@ -259,7 +276,11 @@ class DownloadResource(RestServlet):
|
||||
|
||||
if self._is_mine_server_name(server_name):
|
||||
await self.media_repo.get_local_media(
|
||||
request, media_id, file_name, max_timeout_ms
|
||||
request,
|
||||
media_id,
|
||||
file_name,
|
||||
max_timeout_ms,
|
||||
bypass_quarantine=bypass_quarantine,
|
||||
)
|
||||
else:
|
||||
ip_address = request.getClientAddress().host
|
||||
@@ -271,6 +292,7 @@ class DownloadResource(RestServlet):
|
||||
max_timeout_ms,
|
||||
ip_address,
|
||||
True,
|
||||
bypass_quarantine=bypass_quarantine,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -19,9 +19,12 @@
|
||||
#
|
||||
#
|
||||
import logging
|
||||
from bisect import bisect
|
||||
from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from unpaddedbase64 import decode_base64, encode_base64
|
||||
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import RestServlet, parse_strings_from_args
|
||||
@@ -35,10 +38,34 @@ if TYPE_CHECKING:
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
MUTUAL_ROOMS_BATCH_LIMIT = 100
|
||||
|
||||
|
||||
def _parse_mutual_rooms_batch_token_args(args: dict[bytes, list[bytes]]) -> str | None:
|
||||
from_batches = parse_strings_from_args(args, "from")
|
||||
if not from_batches:
|
||||
return None
|
||||
if len(from_batches) > 1:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Duplicate from query parameter",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
if from_batches[0]:
|
||||
try:
|
||||
return decode_base64(from_batches[0]).decode("utf-8")
|
||||
except Exception:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Malformed from token",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
class UserMutualRoomsServlet(RestServlet):
|
||||
"""
|
||||
GET /uk.half-shot.msc2666/user/mutual_rooms?user_id={user_id} HTTP/1.1
|
||||
GET /uk.half-shot.msc2666/user/mutual_rooms?user_id={user_id}&from={token} HTTP/1.1
|
||||
"""
|
||||
|
||||
PATTERNS = client_patterns(
|
||||
@@ -56,6 +83,7 @@ class UserMutualRoomsServlet(RestServlet):
|
||||
args: dict[bytes, list[bytes]] = request.args # type: ignore
|
||||
|
||||
user_ids = parse_strings_from_args(args, "user_id", required=True)
|
||||
from_batch = _parse_mutual_rooms_batch_token_args(args)
|
||||
|
||||
if len(user_ids) > 1:
|
||||
raise SynapseError(
|
||||
@@ -64,29 +92,52 @@ class UserMutualRoomsServlet(RestServlet):
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
# We don't do batching, so a batch token is illegal by default
|
||||
if b"batch_token" in args:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"Unknown batch_token",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
user_id = user_ids[0]
|
||||
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
if user_id == requester.user.to_string():
|
||||
raise SynapseError(
|
||||
HTTPStatus.UNPROCESSABLE_ENTITY,
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"You cannot request a list of shared rooms with yourself",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
errcode=Codes.UNKNOWN,
|
||||
)
|
||||
|
||||
rooms = await self.store.get_mutual_rooms_between_users(
|
||||
frozenset((requester.user.to_string(), user_id))
|
||||
# Sort here instead of the database function, so that we don't expose
|
||||
# clients to any unrelated changes to the sorting algorithm.
|
||||
rooms = sorted(
|
||||
await self.store.get_mutual_rooms_between_users(
|
||||
frozenset((requester.user.to_string(), user_id))
|
||||
)
|
||||
)
|
||||
|
||||
return 200, {"joined": list(rooms)}
|
||||
if from_batch:
|
||||
# A from_batch token was provided, so cut off any rooms where the ID is
|
||||
# lower than or equal to the token. This method doesn't care whether the
|
||||
# provided token room still exists, nor whether it's even a real room ID.
|
||||
#
|
||||
# However, if rooms with a lower ID are added after the token was issued,
|
||||
# they will not be included until the client makes a new request without a
|
||||
# from token. This is considered acceptable, as clients generally won't
|
||||
# persist these results for long periods.
|
||||
rooms = rooms[bisect(rooms, from_batch) :]
|
||||
|
||||
if len(rooms) <= MUTUAL_ROOMS_BATCH_LIMIT:
|
||||
# We've reached the end of the list, don't return a batch token
|
||||
return 200, {"joined": rooms}
|
||||
|
||||
rooms = rooms[:MUTUAL_ROOMS_BATCH_LIMIT]
|
||||
# We use urlsafe unpadded base64 encoding for the batch token in order to
|
||||
# handle funny room IDs in old pre-v12 rooms properly. We also truncate it
|
||||
# to stay within the 255-character limit of opaque tokens.
|
||||
next_batch = encode_base64(rooms[-1].encode("utf-8"), urlsafe=True)[:255]
|
||||
# Due to the truncation, it is technically possible to have conflicting next
|
||||
# batches by creating hundreds of rooms with the same 191 character prefix
|
||||
# in the room ID. In the event that some silly user does that, don't let
|
||||
# them paginate further.
|
||||
if next_batch == from_batch:
|
||||
return 200, {"joined": rooms}
|
||||
|
||||
return 200, {"joined": list(rooms), "next_batch": next_batch}
|
||||
|
||||
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
|
||||
@@ -34,6 +34,7 @@ from typing import (
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Optional,
|
||||
TypeVar,
|
||||
cast,
|
||||
)
|
||||
@@ -54,6 +55,7 @@ from synapse.api.auth import Auth
|
||||
from synapse.api.auth.internal import InternalAuth
|
||||
from synapse.api.auth.mas import MasDelegatedAuth
|
||||
from synapse.api.auth_blocking import AuthBlocking
|
||||
from synapse.api.errors import HomeServerNotSetupException
|
||||
from synapse.api.filtering import Filtering
|
||||
from synapse.api.ratelimiting import Ratelimiter, RequestRatelimiter
|
||||
from synapse.app._base import unregister_sighups
|
||||
@@ -319,7 +321,7 @@ class HomeServer(metaclass=abc.ABCMeta):
|
||||
self,
|
||||
hostname: str,
|
||||
config: HomeServerConfig,
|
||||
reactor: ISynapseReactor | None = None,
|
||||
reactor: Optional[ISynapseReactor] = None,
|
||||
):
|
||||
"""
|
||||
Args:
|
||||
@@ -352,7 +354,7 @@ class HomeServer(metaclass=abc.ABCMeta):
|
||||
self._module_web_resources_consumed = False
|
||||
|
||||
# This attribute is set by the free function `refresh_certificate`.
|
||||
self.tls_server_context_factory: IOpenSSLContextFactory | None = None
|
||||
self.tls_server_context_factory: Optional[IOpenSSLContextFactory] = None
|
||||
|
||||
self._is_shutdown = False
|
||||
self._async_shutdown_handlers: list[ShutdownInfo] = []
|
||||
@@ -399,7 +401,7 @@ class HomeServer(metaclass=abc.ABCMeta):
|
||||
"""
|
||||
if self._is_shutdown:
|
||||
raise Exception(
|
||||
f"Cannot start background process. HomeServer has been shutdown {len(self._background_processes)} {len(self.get_clock()._looping_calls)} {len(self.get_clock()._call_id_to_delayed_call)}"
|
||||
"Cannot start background process. HomeServer has been shutdown"
|
||||
)
|
||||
|
||||
# Ignore linter error as this is the one location this should be called.
|
||||
@@ -466,7 +468,17 @@ class HomeServer(metaclass=abc.ABCMeta):
|
||||
|
||||
# TODO: Cleanup replication pieces
|
||||
|
||||
self.get_keyring().shutdown()
|
||||
keyring: Keyring | None = None
|
||||
try:
|
||||
keyring = self.get_keyring()
|
||||
except HomeServerNotSetupException:
|
||||
# If the homeserver wasn't fully setup, keyring won't have existed before
|
||||
# this and will fail to be initialized but it cleans itself up for any
|
||||
# partial initialization problem.
|
||||
pass
|
||||
|
||||
if keyring:
|
||||
keyring.shutdown()
|
||||
|
||||
# Cleanup metrics associated with the homeserver
|
||||
for later_gauge in all_later_gauges_to_clean_up_on_shutdown.values():
|
||||
@@ -478,8 +490,12 @@ class HomeServer(metaclass=abc.ABCMeta):
|
||||
self.config.server.server_name
|
||||
)
|
||||
|
||||
for db in self.get_datastores().databases:
|
||||
db.stop_background_updates()
|
||||
try:
|
||||
for db in self.get_datastores().databases:
|
||||
db.stop_background_updates()
|
||||
except HomeServerNotSetupException:
|
||||
# If the homeserver wasn't fully setup, the datastores won't exist
|
||||
pass
|
||||
|
||||
if self.should_send_federation():
|
||||
try:
|
||||
@@ -513,8 +529,12 @@ class HomeServer(metaclass=abc.ABCMeta):
|
||||
pass
|
||||
self._background_processes.clear()
|
||||
|
||||
for db in self.get_datastores().databases:
|
||||
db._db_pool.close()
|
||||
try:
|
||||
for db in self.get_datastores().databases:
|
||||
db._db_pool.close()
|
||||
except HomeServerNotSetupException:
|
||||
# If the homeserver wasn't fully setup, the datastores won't exist
|
||||
pass
|
||||
|
||||
def register_async_shutdown_handler(
|
||||
self,
|
||||
@@ -677,7 +697,9 @@ class HomeServer(metaclass=abc.ABCMeta):
|
||||
|
||||
def get_datastores(self) -> Databases:
|
||||
if not self.datastores:
|
||||
raise Exception("HomeServer.setup must be called before getting datastores")
|
||||
raise HomeServerNotSetupException(
|
||||
"HomeServer.setup must be called before getting datastores"
|
||||
)
|
||||
|
||||
return self.datastores
|
||||
|
||||
|
||||
@@ -61,6 +61,7 @@ class LocalMedia:
|
||||
url_cache: str | None
|
||||
last_access_ts: int
|
||||
quarantined_by: str | None
|
||||
quarantined_ts: int | None
|
||||
safe_from_quarantine: bool
|
||||
user_id: str | None
|
||||
authenticated: bool | None
|
||||
@@ -78,6 +79,7 @@ class RemoteMedia:
|
||||
created_ts: int
|
||||
last_access_ts: int
|
||||
quarantined_by: str | None
|
||||
quarantined_ts: int | None
|
||||
authenticated: bool | None
|
||||
sha256: str | None
|
||||
|
||||
@@ -243,6 +245,7 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
|
||||
"user_id",
|
||||
"authenticated",
|
||||
"sha256",
|
||||
"quarantined_ts",
|
||||
),
|
||||
allow_none=True,
|
||||
desc="get_local_media",
|
||||
@@ -262,6 +265,7 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
|
||||
user_id=row[8],
|
||||
authenticated=row[9],
|
||||
sha256=row[10],
|
||||
quarantined_ts=row[11],
|
||||
)
|
||||
|
||||
async def get_local_media_by_user_paginate(
|
||||
@@ -319,7 +323,8 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
|
||||
safe_from_quarantine,
|
||||
user_id,
|
||||
authenticated,
|
||||
sha256
|
||||
sha256,
|
||||
quarantined_ts
|
||||
FROM local_media_repository
|
||||
WHERE user_id = ?
|
||||
ORDER BY {order_by_column} {order}, media_id ASC
|
||||
@@ -345,6 +350,7 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
|
||||
user_id=row[9],
|
||||
authenticated=row[10],
|
||||
sha256=row[11],
|
||||
quarantined_ts=row[12],
|
||||
)
|
||||
for row in txn
|
||||
]
|
||||
@@ -695,6 +701,7 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
|
||||
"quarantined_by",
|
||||
"authenticated",
|
||||
"sha256",
|
||||
"quarantined_ts",
|
||||
),
|
||||
allow_none=True,
|
||||
desc="get_cached_remote_media",
|
||||
@@ -713,6 +720,7 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
|
||||
quarantined_by=row[6],
|
||||
authenticated=row[7],
|
||||
sha256=row[8],
|
||||
quarantined_ts=row[9],
|
||||
)
|
||||
|
||||
async def store_cached_remote_media(
|
||||
|
||||
@@ -945,6 +945,50 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
|
||||
max_lifetime=max_lifetime,
|
||||
)
|
||||
|
||||
async def get_quarantined_media_mxcs(
|
||||
self, index_start: int, index_limit: int, local: bool
|
||||
) -> list[str]:
|
||||
"""Retrieves all the quarantined media MXC URIs starting from the given position,
|
||||
ordered from oldest quarantined timestamp, then alphabetically by media ID
|
||||
(including origin).
|
||||
|
||||
Note that on established servers the "quarantined timestamp" may be zero due to
|
||||
being introduced after the quarantine timestamp field was introduced.
|
||||
|
||||
Args:
|
||||
index_start: The position to start from.
|
||||
index_limit: The maximum number of results to return.
|
||||
local: When true, only local media will be returned. When false, only remote media will be returned.
|
||||
|
||||
Returns:
|
||||
The quarantined media as a list of media IDs.
|
||||
"""
|
||||
|
||||
def _get_quarantined_media_mxcs_txn(
|
||||
txn: LoggingTransaction,
|
||||
) -> list[str]:
|
||||
# We order by quarantined timestamp *and* media ID (including origin, when
|
||||
# known) to ensure the ordering is stable for established servers.
|
||||
if local:
|
||||
sql = "SELECT '' as media_origin, media_id FROM local_media_repository WHERE quarantined_by IS NOT NULL ORDER BY quarantined_ts, media_id ASC LIMIT ? OFFSET ?"
|
||||
else:
|
||||
sql = "SELECT media_origin, media_id FROM remote_media_cache WHERE quarantined_by IS NOT NULL ORDER BY quarantined_ts, media_origin, media_id ASC LIMIT ? OFFSET ?"
|
||||
txn.execute(sql, (index_limit, index_start))
|
||||
|
||||
mxcs = []
|
||||
|
||||
for media_origin, media_id in txn:
|
||||
if local:
|
||||
media_origin = self.hs.hostname
|
||||
mxcs.append(f"mxc://{media_origin}/{media_id}")
|
||||
|
||||
return mxcs
|
||||
|
||||
return await self.db_pool.runInteraction(
|
||||
"get_quarantined_media_mxcs",
|
||||
_get_quarantined_media_mxcs_txn,
|
||||
)
|
||||
|
||||
async def get_media_mxcs_in_room(self, room_id: str) -> tuple[list[str], list[str]]:
|
||||
"""Retrieves all the local and remote media MXC URIs in a given room
|
||||
|
||||
@@ -952,7 +996,7 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
|
||||
room_id
|
||||
|
||||
Returns:
|
||||
The local and remote media as a lists of the media IDs.
|
||||
The local and remote media as lists of the media IDs.
|
||||
"""
|
||||
|
||||
def _get_media_mxcs_in_room_txn(
|
||||
@@ -1147,6 +1191,10 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
|
||||
The total number of media items quarantined
|
||||
"""
|
||||
total_media_quarantined = 0
|
||||
now_ts: int | None = self.clock.time_msec()
|
||||
|
||||
if quarantined_by is None:
|
||||
now_ts = None
|
||||
|
||||
# Effectively a legacy path, update any media that was explicitly named.
|
||||
if media_ids:
|
||||
@@ -1155,13 +1203,13 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
|
||||
)
|
||||
sql = f"""
|
||||
UPDATE local_media_repository
|
||||
SET quarantined_by = ?
|
||||
SET quarantined_by = ?, quarantined_ts = ?
|
||||
WHERE {sql_many_clause_sql}"""
|
||||
|
||||
if quarantined_by is not None:
|
||||
sql += " AND safe_from_quarantine = FALSE"
|
||||
|
||||
txn.execute(sql, [quarantined_by] + sql_many_clause_args)
|
||||
txn.execute(sql, [quarantined_by, now_ts] + sql_many_clause_args)
|
||||
# Note that a rowcount of -1 can be used to indicate no rows were affected.
|
||||
total_media_quarantined += txn.rowcount if txn.rowcount > 0 else 0
|
||||
|
||||
@@ -1172,13 +1220,13 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
|
||||
)
|
||||
sql = f"""
|
||||
UPDATE local_media_repository
|
||||
SET quarantined_by = ?
|
||||
SET quarantined_by = ?, quarantined_ts = ?
|
||||
WHERE {sql_many_clause_sql}"""
|
||||
|
||||
if quarantined_by is not None:
|
||||
sql += " AND safe_from_quarantine = FALSE"
|
||||
|
||||
txn.execute(sql, [quarantined_by] + sql_many_clause_args)
|
||||
txn.execute(sql, [quarantined_by, now_ts] + sql_many_clause_args)
|
||||
total_media_quarantined += txn.rowcount if txn.rowcount > 0 else 0
|
||||
|
||||
return total_media_quarantined
|
||||
@@ -1202,6 +1250,10 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
|
||||
The total number of media items quarantined
|
||||
"""
|
||||
total_media_quarantined = 0
|
||||
now_ts: int | None = self.clock.time_msec()
|
||||
|
||||
if quarantined_by is None:
|
||||
now_ts = None
|
||||
|
||||
if media:
|
||||
sql_in_list_clause, sql_args = make_tuple_in_list_sql_clause(
|
||||
@@ -1211,10 +1263,10 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
|
||||
)
|
||||
sql = f"""
|
||||
UPDATE remote_media_cache
|
||||
SET quarantined_by = ?
|
||||
SET quarantined_by = ?, quarantined_ts = ?
|
||||
WHERE {sql_in_list_clause}"""
|
||||
|
||||
txn.execute(sql, [quarantined_by] + sql_args)
|
||||
txn.execute(sql, [quarantined_by, now_ts] + sql_args)
|
||||
total_media_quarantined += txn.rowcount if txn.rowcount > 0 else 0
|
||||
|
||||
total_media_quarantined = 0
|
||||
@@ -1224,9 +1276,9 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
|
||||
)
|
||||
sql = f"""
|
||||
UPDATE remote_media_cache
|
||||
SET quarantined_by = ?
|
||||
SET quarantined_by = ?, quarantined_ts = ?
|
||||
WHERE {sql_many_clause_sql}"""
|
||||
txn.execute(sql, [quarantined_by] + sql_many_clause_args)
|
||||
txn.execute(sql, [quarantined_by, now_ts] + sql_many_clause_args)
|
||||
total_media_quarantined += txn.rowcount if txn.rowcount > 0 else 0
|
||||
|
||||
return total_media_quarantined
|
||||
|
||||
@@ -747,6 +747,27 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore):
|
||||
|
||||
return frozenset(room_ids)
|
||||
|
||||
async def get_memberships_for_user(self, user_id: str) -> dict[str, str]:
|
||||
"""Returns a dict of room_id to membership state for a given user.
|
||||
|
||||
If a remote user only returns rooms this server is currently
|
||||
participating in.
|
||||
"""
|
||||
|
||||
rows = cast(
|
||||
list[tuple[str, str]],
|
||||
await self.db_pool.simple_select_list(
|
||||
"current_state_events",
|
||||
keyvalues={
|
||||
"type": EventTypes.Member,
|
||||
"state_key": user_id,
|
||||
},
|
||||
retcols=["room_id", "membership"],
|
||||
desc="get_memberships_for_user",
|
||||
),
|
||||
)
|
||||
return dict(rows)
|
||||
|
||||
@cached(max_entries=500000, iterable=True)
|
||||
async def get_rooms_for_user(self, user_id: str) -> frozenset[str]:
|
||||
"""Returns a set of room_ids the user is currently joined to.
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Mapping, cast
|
||||
from typing import TYPE_CHECKING, AbstractSet, Mapping, cast
|
||||
|
||||
import attr
|
||||
|
||||
@@ -26,13 +26,16 @@ from synapse.storage.database import (
|
||||
DatabasePool,
|
||||
LoggingDatabaseConnection,
|
||||
LoggingTransaction,
|
||||
make_in_list_sql_clause,
|
||||
)
|
||||
from synapse.storage.engines import PostgresEngine
|
||||
from synapse.types import MultiWriterStreamToken, RoomStreamToken
|
||||
from synapse.types.handlers.sliding_sync import (
|
||||
HaveSentRoom,
|
||||
HaveSentRoomFlag,
|
||||
MutablePerConnectionState,
|
||||
PerConnectionState,
|
||||
RoomLazyMembershipChanges,
|
||||
RoomStatusMap,
|
||||
RoomSyncConfig,
|
||||
)
|
||||
@@ -373,6 +376,13 @@ class SlidingSyncStore(SQLBaseStore):
|
||||
value_values=values,
|
||||
)
|
||||
|
||||
self._persist_sliding_sync_connection_lazy_members_txn(
|
||||
txn,
|
||||
connection_key,
|
||||
connection_position,
|
||||
per_connection_state.room_lazy_membership,
|
||||
)
|
||||
|
||||
return connection_position
|
||||
|
||||
@cached(iterable=True, max_entries=100000)
|
||||
@@ -446,6 +456,23 @@ class SlidingSyncStore(SQLBaseStore):
|
||||
"""
|
||||
txn.execute(sql, (connection_key, connection_position))
|
||||
|
||||
# Move any lazy membership entries for this connection position to have
|
||||
# `NULL` connection position, indicating that it applies to all future
|
||||
# positions on this connection. This is safe because we have deleted all
|
||||
# other (potentially forked) connection positions, and so all future
|
||||
# positions in this connection will be a continuation of the current
|
||||
# position. Thus any lazy membership entries we have sent down will still
|
||||
# be valid.
|
||||
self.db_pool.simple_update_txn(
|
||||
txn,
|
||||
table="sliding_sync_connection_lazy_members",
|
||||
keyvalues={
|
||||
"connection_key": connection_key,
|
||||
"connection_position": connection_position,
|
||||
},
|
||||
updatevalues={"connection_position": None},
|
||||
)
|
||||
|
||||
# Fetch and create a mapping from required state ID to the actual
|
||||
# required state for the connection.
|
||||
rows = self.db_pool.simple_select_list_txn(
|
||||
@@ -525,8 +552,153 @@ class SlidingSyncStore(SQLBaseStore):
|
||||
receipts=RoomStatusMap(receipts),
|
||||
account_data=RoomStatusMap(account_data),
|
||||
room_configs=room_configs,
|
||||
room_lazy_membership={},
|
||||
)
|
||||
|
||||
async def get_sliding_sync_connection_lazy_members(
|
||||
self,
|
||||
connection_position: int,
|
||||
room_id: str,
|
||||
user_ids: AbstractSet[str],
|
||||
) -> Mapping[str, int]:
|
||||
"""Get which user IDs in the room we have previously sent lazy
|
||||
membership for.
|
||||
|
||||
Args:
|
||||
connection_position: The sliding sync connection position.
|
||||
room_id: The room ID to get lazy members for.
|
||||
user_ids: The user IDs to check whether we've previously sent
|
||||
because of lazy membership.
|
||||
|
||||
Returns:
|
||||
The mapping of user IDs to the last seen timestamp for those user
|
||||
IDs. Only includes user IDs that we have previously sent lazy
|
||||
membership for, and so may be a subset of the `user_ids` passed in.
|
||||
"""
|
||||
|
||||
def get_sliding_sync_connection_lazy_members_txn(
|
||||
txn: LoggingTransaction,
|
||||
) -> Mapping[str, int]:
|
||||
user_clause, user_args = make_in_list_sql_clause(
|
||||
txn.database_engine, "user_id", user_ids
|
||||
)
|
||||
|
||||
# Fetch all the lazy membership entries for the given connection,
|
||||
# room and user IDs. We don't have the `connection_key` here, so we
|
||||
# join against `sliding_sync_connection_positions` to get it.
|
||||
#
|
||||
# Beware that there are two `connection_position` columns in the
|
||||
# query which are different, the one in
|
||||
# `sliding_sync_connection_positions` is the one we match to get the
|
||||
# connection_key, whereas the one in
|
||||
# `sliding_sync_connection_lazy_members` is what we filter against
|
||||
# (it may be null or the same as the one passed in).
|
||||
#
|
||||
# FIXME: We should pass in `connection_key` here to avoid the join.
|
||||
# We don't do this currently as the caller doesn't have it handy.
|
||||
sql = f"""
|
||||
SELECT user_id, members.connection_position, last_seen_ts
|
||||
FROM sliding_sync_connection_lazy_members AS members
|
||||
INNER JOIN sliding_sync_connection_positions AS pos USING (connection_key)
|
||||
WHERE pos.connection_position = ? AND room_id = ? AND {user_clause}
|
||||
"""
|
||||
|
||||
txn.execute(sql, (connection_position, room_id, *user_args))
|
||||
|
||||
# Filter out any cache entries that only apply to forked connection
|
||||
# positions. Entries with `NULL` `connection_position` apply to all
|
||||
# positions on the connection.
|
||||
return {
|
||||
user_id: last_seen_ts
|
||||
for user_id, db_connection_position, last_seen_ts in txn
|
||||
if db_connection_position == connection_position
|
||||
or db_connection_position is None
|
||||
}
|
||||
|
||||
return await self.db_pool.runInteraction(
|
||||
"get_sliding_sync_connection_lazy_members",
|
||||
get_sliding_sync_connection_lazy_members_txn,
|
||||
db_autocommit=True, # Avoid transaction for single read
|
||||
)
|
||||
|
||||
def _persist_sliding_sync_connection_lazy_members_txn(
|
||||
self,
|
||||
txn: LoggingTransaction,
|
||||
connection_key: int,
|
||||
new_connection_position: int,
|
||||
all_changes: dict[str, RoomLazyMembershipChanges],
|
||||
) -> None:
|
||||
"""Persist that we have sent lazy membership for the given user IDs."""
|
||||
|
||||
now = self.clock.time_msec()
|
||||
|
||||
# Figure out which cache entries to add or update.
|
||||
#
|
||||
# These are either a) new entries we've never sent before (i.e. with a
|
||||
# None last_seen_ts), or b) where the `last_seen_ts` is old enough that
|
||||
# we want to update it.
|
||||
#
|
||||
# We don't update the timestamp every time to avoid hammering the DB
|
||||
# with writes, and we don't need the timestamp to be precise. It is used
|
||||
# to evict old entries that haven't been used in a while.
|
||||
to_update: list[tuple[str, str]] = []
|
||||
for room_id, room_changes in all_changes.items():
|
||||
user_ids_to_update = room_changes.get_returned_user_ids_to_update(
|
||||
self.clock
|
||||
)
|
||||
to_update.extend((room_id, user_id) for user_id in user_ids_to_update)
|
||||
|
||||
if to_update:
|
||||
# Upsert the new/updated entries.
|
||||
#
|
||||
# Ignore conflicts where the existing entry has a different
|
||||
# connection position (i.e. from a forked connection position). This
|
||||
# may mean that we lose some updates, but that's acceptable as this
|
||||
# is a cache and its fine for it to *not* include rows. (Downstream
|
||||
# this will cause us to maybe send a few extra lazy members down
|
||||
# sync, but we're allowed to send extra members).
|
||||
sql = """
|
||||
INSERT INTO sliding_sync_connection_lazy_members
|
||||
(connection_key, connection_position, room_id, user_id, last_seen_ts)
|
||||
VALUES {value_placeholder}
|
||||
ON CONFLICT (connection_key, room_id, user_id)
|
||||
DO UPDATE SET last_seen_ts = EXCLUDED.last_seen_ts
|
||||
WHERE sliding_sync_connection_lazy_members.connection_position IS NULL
|
||||
OR sliding_sync_connection_lazy_members.connection_position = EXCLUDED.connection_position
|
||||
"""
|
||||
|
||||
args = [
|
||||
(connection_key, new_connection_position, room_id, user_id, now)
|
||||
for room_id, user_id in to_update
|
||||
]
|
||||
|
||||
if isinstance(self.database_engine, PostgresEngine):
|
||||
sql = sql.format(value_placeholder="?")
|
||||
txn.execute_values(sql, args, fetch=False)
|
||||
else:
|
||||
sql = sql.format(value_placeholder="(?, ?, ?, ?, ?)")
|
||||
txn.execute_batch(sql, args)
|
||||
|
||||
# Remove any invalidated entries.
|
||||
to_remove: list[tuple[str, str]] = []
|
||||
for room_id, room_changes in all_changes.items():
|
||||
for user_id in room_changes.invalidated_user_ids:
|
||||
to_remove.append((room_id, user_id))
|
||||
|
||||
if to_remove:
|
||||
# We don't try and match on connection position here: it's fine to
|
||||
# remove it from all forks. This is a cache so it's fine to expire
|
||||
# arbitrary entries, the worst that happens is we send a few extra
|
||||
# lazy members down sync.
|
||||
self.db_pool.simple_delete_many_batch_txn(
|
||||
txn,
|
||||
table="sliding_sync_connection_lazy_members",
|
||||
keys=("connection_key", "room_id", "user_id"),
|
||||
values=[
|
||||
(connection_key, room_id, user_id) for room_id, user_id in to_remove
|
||||
],
|
||||
)
|
||||
|
||||
@wrap_as_background_process("delete_old_sliding_sync_connections")
|
||||
async def delete_old_sliding_sync_connections(self) -> None:
|
||||
"""Delete sliding sync connections that have not been used for a long time."""
|
||||
@@ -564,6 +736,10 @@ class PerConnectionStateDB:
|
||||
|
||||
room_configs: Mapping[str, "RoomSyncConfig"]
|
||||
|
||||
room_lazy_membership: dict[str, RoomLazyMembershipChanges]
|
||||
"""Lazy membership changes to persist alongside this state. Only used
|
||||
when persisting."""
|
||||
|
||||
@staticmethod
|
||||
async def from_state(
|
||||
per_connection_state: "MutablePerConnectionState", store: "DataStore"
|
||||
@@ -618,6 +794,7 @@ class PerConnectionStateDB:
|
||||
receipts=RoomStatusMap(receipts),
|
||||
account_data=RoomStatusMap(account_data),
|
||||
room_configs=per_connection_state.room_configs.maps[0],
|
||||
room_lazy_membership=per_connection_state.room_lazy_membership,
|
||||
)
|
||||
|
||||
async def to_state(self, store: "DataStore") -> "PerConnectionState":
|
||||
|
||||
@@ -983,7 +983,7 @@ class StateMapWrapper(dict[StateKey, str]):
|
||||
raise Exception("State map was filtered and doesn't include: %s", key)
|
||||
return super().__getitem__(key)
|
||||
|
||||
@overload # type: ignore[override]
|
||||
@overload
|
||||
def get(self, key: StateKey, default: None = None, /) -> str | None: ...
|
||||
@overload
|
||||
def get(self, key: StateKey, default: str, /) -> str: ...
|
||||
|
||||
@@ -0,0 +1,60 @@
|
||||
--
|
||||
-- This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
--
|
||||
-- Copyright (C) 2025 Element Creations Ltd
|
||||
--
|
||||
-- This program is free software: you can redistribute it and/or modify
|
||||
-- it under the terms of the GNU Affero General Public License as
|
||||
-- published by the Free Software Foundation, either version 3 of the
|
||||
-- License, or (at your option) any later version.
|
||||
--
|
||||
-- See the GNU Affero General Public License for more details:
|
||||
-- <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
|
||||
|
||||
-- Tracks which member states have been sent to the client for lazy-loaded
|
||||
-- members in sliding sync. This is a *cache* as it doesn't matter if we send
|
||||
-- down members we've previously sent down, i.e. it's safe to delete any rows.
|
||||
--
|
||||
-- We could have tracked these as part of the
|
||||
-- `sliding_sync_connection_required_state` table, but that would bloat that
|
||||
-- table significantly as most rooms will have lazy-loaded members. We want to
|
||||
-- keep that table small as we always pull out all rows for the connection for
|
||||
-- every request, so storing lots of data there would be bad for performance. To
|
||||
-- keep that table small we also deduplicate the requested state across
|
||||
-- different rooms, which if we stored lazy members there would prevent.
|
||||
--
|
||||
-- We track a *rough* `last_seen_ts` for each user in each room which indicates
|
||||
-- when we last would've sent their member state to the client. `last_seen_ts`
|
||||
-- is used so that we can remove members which haven't been seen for a while to
|
||||
-- save space. This is a *rough* timestamp as we don't want to update the
|
||||
-- timestamp every time to avoid hammering the DB with writes, and we don't need
|
||||
-- the timestamp to be precise (as it is used to evict old entries that haven't
|
||||
-- been used in a while).
|
||||
--
|
||||
-- Care must be taken when handling "forked" positions, i.e. we have responded
|
||||
-- to a request with a position and then get another different request using the
|
||||
-- previous position as a base. We track this by including a
|
||||
-- `connection_position` for newly inserted rows. When we advance the position
|
||||
-- we set this to NULL for all rows which were present at that position, and
|
||||
-- delete all other rows. When reading rows we can then filter out any rows
|
||||
-- which have a non-NULL `connection_position` which is not the current
|
||||
-- position.
|
||||
--
|
||||
-- I.e. `connection_position` is NULL for rows which are valid for *all*
|
||||
-- positions on the connection, and is non-NULL for rows which are only valid
|
||||
-- for a specific position.
|
||||
--
|
||||
-- When invalidating rows, we can just delete them. Technically this could
|
||||
-- invalidate for a forked position, but this is acceptable as equivalent to a
|
||||
-- cache eviction.
|
||||
CREATE TABLE sliding_sync_connection_lazy_members (
|
||||
connection_key BIGINT NOT NULL REFERENCES sliding_sync_connections(connection_key) ON DELETE CASCADE,
|
||||
connection_position BIGINT REFERENCES sliding_sync_connection_positions(connection_position) ON DELETE CASCADE,
|
||||
room_id TEXT NOT NULL,
|
||||
user_id TEXT NOT NULL,
|
||||
last_seen_ts BIGINT NOT NULL
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX sliding_sync_connection_lazy_members_idx ON sliding_sync_connection_lazy_members (connection_key, room_id, user_id);
|
||||
CREATE INDEX sliding_sync_connection_lazy_members_pos_idx ON sliding_sync_connection_lazy_members (connection_key, connection_position) WHERE connection_position IS NOT NULL;
|
||||
@@ -0,0 +1,27 @@
|
||||
--
|
||||
-- This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
--
|
||||
-- Copyright (C) 2025 Element Creations, Ltd
|
||||
--
|
||||
-- This program is free software: you can redistribute it and/or modify
|
||||
-- it under the terms of the GNU Affero General Public License as
|
||||
-- published by the Free Software Foundation, either version 3 of the
|
||||
-- License, or (at your option) any later version.
|
||||
--
|
||||
-- See the GNU Affero General Public License for more details:
|
||||
-- <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
|
||||
-- Add a timestamp for when the sliding sync connection position was last used,
|
||||
-- only updated with a small granularity.
|
||||
--
|
||||
-- This should be NOT NULL, but we need to consider existing rows. In future we
|
||||
-- may want to either backfill this or delete all rows with a NULL value (and
|
||||
-- then make it NOT NULL).
|
||||
ALTER TABLE local_media_repository ADD COLUMN quarantined_ts BIGINT;
|
||||
ALTER TABLE remote_media_cache ADD COLUMN quarantined_ts BIGINT;
|
||||
|
||||
UPDATE local_media_repository SET quarantined_ts = 0 WHERE quarantined_by IS NOT NULL;
|
||||
UPDATE remote_media_cache SET quarantined_ts = 0 WHERE quarantined_by IS NOT NULL;
|
||||
|
||||
-- Note: We *probably* should have an index on quarantined_ts, but we're going
|
||||
-- to try to defer that to a future migration after seeing the performance impact.
|
||||
@@ -49,12 +49,21 @@ from synapse.types import (
|
||||
UserID,
|
||||
)
|
||||
from synapse.types.rest.client import SlidingSyncBody
|
||||
from synapse.util.clock import Clock
|
||||
from synapse.util.duration import Duration
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.handlers.relations import BundledAggregations
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# How often to update the last seen timestamp for lazy members.
|
||||
#
|
||||
# We don't update the timestamp every time to avoid hammering the DB with
|
||||
# writes, and we don't need the timestamp to be precise (as it is used to evict
|
||||
# old entries that haven't been used in a while).
|
||||
LAZY_MEMBERS_UPDATE_INTERVAL = Duration(hours=1)
|
||||
|
||||
|
||||
class SlidingSyncConfig(SlidingSyncBody):
|
||||
"""
|
||||
@@ -891,6 +900,69 @@ class PerConnectionState:
|
||||
return len(self.rooms) + len(self.receipts) + len(self.room_configs)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True)
|
||||
class RoomLazyMembershipChanges:
|
||||
"""Changes to lazily-loaded room memberships for a given room."""
|
||||
|
||||
returned_user_id_to_last_seen_ts_map: Mapping[str, int | None] = attr.Factory(dict)
|
||||
"""Map from user ID to timestamp for users whose membership we have lazily
|
||||
loaded in this room an request. The timestamp indicates the time we
|
||||
previously needed the membership, or None if we sent it down for the first
|
||||
time in this request.
|
||||
|
||||
We track a *rough* `last_seen_ts` for each user in each room which indicates
|
||||
when we last would've sent their member state to the client. This is used so
|
||||
that we can remove members which haven't been seen for a while to save
|
||||
space.
|
||||
|
||||
Note: this will include users whose membership we would have sent down but
|
||||
didn't due to us having previously sent them.
|
||||
"""
|
||||
|
||||
invalidated_user_ids: AbstractSet[str] = attr.Factory(set)
|
||||
"""Set of user IDs whose latest membership we have *not* sent down"""
|
||||
|
||||
def get_returned_user_ids_to_update(self, clock: Clock) -> StrCollection:
|
||||
"""Get the user IDs whose last seen timestamp we need to update in the
|
||||
database.
|
||||
|
||||
This is a subset of user IDs in `returned_user_id_to_last_seen_ts_map`,
|
||||
whose timestamp is either None (first time we've sent them) or older
|
||||
than `LAZY_MEMBERS_UPDATE_INTERVAL`.
|
||||
|
||||
We only update the timestamp in the database every so often to avoid
|
||||
hammering the DB with writes. We don't need the timestamp to be precise,
|
||||
as the timestamp is used to evict old entries that haven't been used in
|
||||
a while.
|
||||
"""
|
||||
|
||||
now_ms = clock.time_msec()
|
||||
return [
|
||||
user_id
|
||||
for user_id, last_seen_ts in self.returned_user_id_to_last_seen_ts_map.items()
|
||||
if last_seen_ts is None
|
||||
or now_ms - last_seen_ts >= LAZY_MEMBERS_UPDATE_INTERVAL.as_millis()
|
||||
]
|
||||
|
||||
def has_updates(self, clock: Clock) -> bool:
|
||||
"""Check if there are any updates to the lazy membership changes.
|
||||
|
||||
Called to check if we need to persist changes to the lazy membership
|
||||
state for the room. We want to avoid persisting the state if there are
|
||||
no changes, to avoid unnecessary writes (and cache misses due to new
|
||||
connection position).
|
||||
"""
|
||||
|
||||
# We consider there to be updates if there are any invalidated user
|
||||
# IDs...
|
||||
if self.invalidated_user_ids:
|
||||
return True
|
||||
|
||||
# ...or if any of the returned user IDs need their last seen timestamp
|
||||
# updating in the database.
|
||||
return bool(self.get_returned_user_ids_to_update(clock))
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True)
|
||||
class MutablePerConnectionState(PerConnectionState):
|
||||
"""A mutable version of `PerConnectionState`"""
|
||||
@@ -903,12 +975,28 @@ class MutablePerConnectionState(PerConnectionState):
|
||||
|
||||
room_configs: typing.ChainMap[str, RoomSyncConfig]
|
||||
|
||||
def has_updates(self) -> bool:
|
||||
# A map from room ID to the lazily-loaded memberships needed for the
|
||||
# request in that room.
|
||||
room_lazy_membership: dict[str, RoomLazyMembershipChanges] = attr.Factory(dict)
|
||||
|
||||
def has_updates(self, clock: Clock) -> bool:
|
||||
"""Check if there are any updates to the per-connection state that need
|
||||
persisting.
|
||||
|
||||
It is important that we don't spuriously do persistence, as that will
|
||||
always generate a new connection position which will invalidate some of
|
||||
the caches. It doesn't need to be perfect, but we should avoid always
|
||||
generating new connection positions when doing lazy loading
|
||||
"""
|
||||
return (
|
||||
bool(self.rooms.get_updates())
|
||||
or bool(self.receipts.get_updates())
|
||||
or bool(self.account_data.get_updates())
|
||||
or bool(self.get_room_config_updates())
|
||||
or any(
|
||||
change.has_updates(clock)
|
||||
for change in self.room_lazy_membership.values()
|
||||
)
|
||||
)
|
||||
|
||||
def get_room_config_updates(self) -> Mapping[str, RoomSyncConfig]:
|
||||
|
||||
@@ -29,6 +29,7 @@ from twisted.internet.interfaces import IDelayedCall
|
||||
from twisted.internet.task import LoopingCall
|
||||
|
||||
from synapse.logging import context
|
||||
from synapse.logging.loggers import ExplicitlyConfiguredLogger
|
||||
from synapse.types import ISynapseThreadlessReactor
|
||||
from synapse.util import log_failure
|
||||
from synapse.util.duration import Duration
|
||||
@@ -39,6 +40,25 @@ P = ParamSpec("P")
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
original_logger_class = logging.getLoggerClass()
|
||||
logging.setLoggerClass(ExplicitlyConfiguredLogger)
|
||||
clock_debug_logger = logging.getLogger("synapse.util.clock.debug")
|
||||
"""
|
||||
A logger for debugging what is scheduling calls.
|
||||
|
||||
Ideally, these wouldn't be gated behind an `ExplicitlyConfiguredLogger` as including logs
|
||||
from this logger would be helpful to track when things are being scheduled. However, for
|
||||
these logs to be meaningful, they need to include a stack trace to show what initiated the
|
||||
call in the first place.
|
||||
|
||||
Since the stack traces can create a lot of noise and make the logs hard to read (unless you're
|
||||
specifically debugging scheduling issues) we want users to opt-in to seeing these logs. To enable
|
||||
this, they must explicitly set `synapse.util.clock.debug` in the logging configuration. Note that
|
||||
this setting won't inherit the log level from the parent logger.
|
||||
"""
|
||||
# Restore the original logger class
|
||||
logging.setLoggerClass(original_logger_class)
|
||||
|
||||
|
||||
class Clock:
|
||||
"""
|
||||
@@ -174,7 +194,7 @@ class Clock:
|
||||
looping_call_context_string = "looping_call_now"
|
||||
|
||||
def wrapped_f(*args: P.args, **kwargs: P.kwargs) -> Deferred:
|
||||
logger.debug(
|
||||
clock_debug_logger.debug(
|
||||
"%s(%s): Executing callback", looping_call_context_string, instance_id
|
||||
)
|
||||
|
||||
@@ -222,7 +242,7 @@ class Clock:
|
||||
d.addErrback(log_failure, "Looping call died", consumeErrors=False)
|
||||
self._looping_calls.append(call)
|
||||
|
||||
logger.debug(
|
||||
clock_debug_logger.debug(
|
||||
"%s(%s): Scheduled looping call every %sms later",
|
||||
looping_call_context_string,
|
||||
instance_id,
|
||||
@@ -283,7 +303,7 @@ class Clock:
|
||||
raise Exception("Cannot start delayed call. Clock has been shutdown")
|
||||
|
||||
def wrapped_callback(*args: Any, **kwargs: Any) -> None:
|
||||
logger.debug("call_later(%s): Executing callback", call_id)
|
||||
clock_debug_logger.debug("call_later(%s): Executing callback", call_id)
|
||||
|
||||
assert context.current_context() is context.SENTINEL_CONTEXT, (
|
||||
"Expected `call_later` callback from the reactor to start with the sentinel logcontext "
|
||||
@@ -327,7 +347,7 @@ class Clock:
|
||||
delay.as_secs(), wrapped_callback, *args, **kwargs
|
||||
) # type: ignore[call-later-not-tracked]
|
||||
|
||||
logger.debug(
|
||||
clock_debug_logger.debug(
|
||||
"call_later(%s): Scheduled call for %ss later (tracked for shutdown: %s)",
|
||||
call_id,
|
||||
delay,
|
||||
@@ -347,7 +367,7 @@ class Clock:
|
||||
self, wrapped_call: "DelayedCallWrapper", ignore_errs: bool = False
|
||||
) -> None:
|
||||
try:
|
||||
logger.debug(
|
||||
clock_debug_logger.debug(
|
||||
"cancel_call_later: cancelling scheduled call %s", wrapped_call.call_id
|
||||
)
|
||||
wrapped_call.delayed_call.cancel()
|
||||
@@ -367,7 +387,7 @@ class Clock:
|
||||
# will result in the call removing itself from the map mid-iteration.
|
||||
for call_id, call in list(self._call_id_to_delayed_call.items()):
|
||||
try:
|
||||
logger.debug(
|
||||
clock_debug_logger.debug(
|
||||
"cancel_all_delayed_calls: cancelling scheduled call %s", call_id
|
||||
)
|
||||
call.cancel()
|
||||
@@ -396,7 +416,9 @@ class Clock:
|
||||
instance_id = random_string_insecure_fast(5)
|
||||
|
||||
def wrapped_callback(*args: Any, **kwargs: Any) -> None:
|
||||
logger.debug("call_when_running(%s): Executing callback", instance_id)
|
||||
clock_debug_logger.debug(
|
||||
"call_when_running(%s): Executing callback", instance_id
|
||||
)
|
||||
|
||||
# Since this callback can be invoked immediately if the reactor is already
|
||||
# running, we can't always assume that we're running in the sentinel
|
||||
@@ -436,7 +458,7 @@ class Clock:
|
||||
# callWhenRunning should be called.
|
||||
self._reactor.callWhenRunning(wrapped_callback, *args, **kwargs) # type: ignore[prefer-synapse-clock-call-when-running]
|
||||
|
||||
logger.debug(
|
||||
clock_debug_logger.debug(
|
||||
"call_when_running(%s): Scheduled call",
|
||||
instance_id,
|
||||
# Find out who is scheduling the call which makes it easy to follow in the
|
||||
@@ -472,7 +494,7 @@ class Clock:
|
||||
instance_id = random_string_insecure_fast(5)
|
||||
|
||||
def wrapped_callback(*args: Any, **kwargs: Any) -> None:
|
||||
logger.debug(
|
||||
clock_debug_logger.debug(
|
||||
"add_system_event_trigger(%s): Executing %s %s callback",
|
||||
instance_id,
|
||||
phase,
|
||||
@@ -509,7 +531,7 @@ class Clock:
|
||||
# logcontext to the reactor
|
||||
context.run_in_background(callback, *args, **kwargs)
|
||||
|
||||
logger.debug(
|
||||
clock_debug_logger.debug(
|
||||
"add_system_event_trigger(%s) for %s %s",
|
||||
instance_id,
|
||||
phase,
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
#
|
||||
|
||||
import queue
|
||||
from typing import Any, BinaryIO, cast
|
||||
from typing import Any, BinaryIO, Optional, Union, cast
|
||||
|
||||
from twisted.internet import threads
|
||||
from twisted.internet.defer import Deferred
|
||||
@@ -50,7 +50,7 @@ class BackgroundFileConsumer:
|
||||
self._reactor: ISynapseReactor = reactor
|
||||
|
||||
# Producer we're registered with
|
||||
self._producer: IPushProducer | IPullProducer | None = None
|
||||
self._producer: Optional[Union[IPushProducer, IPullProducer]] = None
|
||||
|
||||
# True if PushProducer, false if PullProducer
|
||||
self.streaming = False
|
||||
@@ -72,7 +72,7 @@ class BackgroundFileConsumer:
|
||||
self._write_exception: Exception | None = None
|
||||
|
||||
def registerProducer(
|
||||
self, producer: IPushProducer | IPullProducer, streaming: bool
|
||||
self, producer: Union[IPushProducer, IPullProducer], streaming: bool
|
||||
) -> None:
|
||||
"""Part of IConsumer interface
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
import heapq
|
||||
from itertools import islice
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Collection,
|
||||
Generator,
|
||||
@@ -33,7 +34,7 @@ from typing import (
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
T = TypeVar("T")
|
||||
T = TypeVar("T", bound=Any)
|
||||
S = TypeVar("S", bound="_SelfSlice")
|
||||
|
||||
|
||||
|
||||
@@ -19,7 +19,10 @@
|
||||
#
|
||||
|
||||
import gc
|
||||
import sys
|
||||
import weakref
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
from synapse.app.homeserver import SynapseHomeServer
|
||||
from synapse.logging.context import LoggingContext
|
||||
@@ -81,45 +84,12 @@ class HomeserverCleanShutdownTestCase(HomeserverTestCase):
|
||||
|
||||
# Ensure the `HomeServer` hs been garbage collected by attempting to use the
|
||||
# weakref to it.
|
||||
if hs_ref() is not None:
|
||||
self.fail("HomeServer reference should not be valid at this point")
|
||||
|
||||
# To help debug this test when it fails, it is useful to leverage the
|
||||
# `objgraph` module.
|
||||
# The following code serves as an example of what I have found to be useful
|
||||
# when tracking down references holding the `SynapseHomeServer` in memory:
|
||||
#
|
||||
# all_objects = gc.get_objects()
|
||||
# for obj in all_objects:
|
||||
# try:
|
||||
# # These are a subset of types that are typically involved with
|
||||
# # holding the `HomeServer` in memory. You may want to inspect
|
||||
# # other types as well.
|
||||
# if isinstance(obj, DataStore):
|
||||
# print(sys.getrefcount(obj), "refs to", obj)
|
||||
# if not isinstance(obj, weakref.ProxyType):
|
||||
# db_obj = obj
|
||||
# if isinstance(obj, SynapseHomeServer):
|
||||
# print(sys.getrefcount(obj), "refs to", obj)
|
||||
# if not isinstance(obj, weakref.ProxyType):
|
||||
# synapse_hs = obj
|
||||
# if isinstance(obj, SynapseSite):
|
||||
# print(sys.getrefcount(obj), "refs to", obj)
|
||||
# if not isinstance(obj, weakref.ProxyType):
|
||||
# sysite = obj
|
||||
# if isinstance(obj, DatabasePool):
|
||||
# print(sys.getrefcount(obj), "refs to", obj)
|
||||
# if not isinstance(obj, weakref.ProxyType):
|
||||
# dbpool = obj
|
||||
# except Exception:
|
||||
# pass
|
||||
#
|
||||
# print(sys.getrefcount(hs_ref()), "refs to", hs_ref())
|
||||
#
|
||||
# # The following values for `max_depth` and `too_many` have been found to
|
||||
# # render a useful amount of information without taking an overly long time
|
||||
# # to generate the result.
|
||||
# objgraph.show_backrefs(synapse_hs, max_depth=10, too_many=10)
|
||||
hs_after_shutdown = hs_ref()
|
||||
if hs_after_shutdown is not None:
|
||||
self.fail(
|
||||
"HomeServer reference should not be valid at this point "
|
||||
f"{get_memory_debug_info_for_object(hs_after_shutdown)}",
|
||||
)
|
||||
|
||||
@logcontext_clean
|
||||
def test_clean_homeserver_shutdown_mid_background_updates(self) -> None:
|
||||
@@ -165,42 +135,137 @@ class HomeserverCleanShutdownTestCase(HomeserverTestCase):
|
||||
|
||||
# Ensure the `HomeServer` hs been garbage collected by attempting to use the
|
||||
# weakref to it.
|
||||
if hs_ref() is not None:
|
||||
self.fail("HomeServer reference should not be valid at this point")
|
||||
hs_after_shutdown = hs_ref()
|
||||
if hs_after_shutdown is not None:
|
||||
self.fail(
|
||||
"HomeServer reference should not be valid at this point "
|
||||
f"{get_memory_debug_info_for_object(hs_after_shutdown)}",
|
||||
)
|
||||
|
||||
# To help debug this test when it fails, it is useful to leverage the
|
||||
# `objgraph` module.
|
||||
# The following code serves as an example of what I have found to be useful
|
||||
# when tracking down references holding the `SynapseHomeServer` in memory:
|
||||
#
|
||||
# all_objects = gc.get_objects()
|
||||
# for obj in all_objects:
|
||||
# try:
|
||||
# # These are a subset of types that are typically involved with
|
||||
# # holding the `HomeServer` in memory. You may want to inspect
|
||||
# # other types as well.
|
||||
# if isinstance(obj, DataStore):
|
||||
# print(sys.getrefcount(obj), "refs to", obj)
|
||||
# if not isinstance(obj, weakref.ProxyType):
|
||||
# db_obj = obj
|
||||
# if isinstance(obj, SynapseHomeServer):
|
||||
# print(sys.getrefcount(obj), "refs to", obj)
|
||||
# if not isinstance(obj, weakref.ProxyType):
|
||||
# synapse_hs = obj
|
||||
# if isinstance(obj, SynapseSite):
|
||||
# print(sys.getrefcount(obj), "refs to", obj)
|
||||
# if not isinstance(obj, weakref.ProxyType):
|
||||
# sysite = obj
|
||||
# if isinstance(obj, DatabasePool):
|
||||
# print(sys.getrefcount(obj), "refs to", obj)
|
||||
# if not isinstance(obj, weakref.ProxyType):
|
||||
# dbpool = obj
|
||||
# except Exception:
|
||||
# pass
|
||||
#
|
||||
# print(sys.getrefcount(hs_ref()), "refs to", hs_ref())
|
||||
#
|
||||
# # The following values for `max_depth` and `too_many` have been found to
|
||||
# # render a useful amount of information without taking an overly long time
|
||||
# # to generate the result.
|
||||
# objgraph.show_backrefs(synapse_hs, max_depth=10, too_many=10)
|
||||
@logcontext_clean
|
||||
def test_clean_homeserver_shutdown_when_failed_to_setup(self) -> None:
|
||||
"""
|
||||
Ensure the `SynapseHomeServer` can be fully shutdown and garbage collected if it
|
||||
fails to be `setup`.
|
||||
"""
|
||||
self.reactor, self.clock = get_clock()
|
||||
|
||||
# Patch `hs.setup()` to do nothing, so that the homeserver is not fully setup.
|
||||
with patch.object(SynapseHomeServer, "setup", return_value=None) as mock_setup:
|
||||
# Patch out the call to `start_test_homeserver` since we want access to the
|
||||
# homeserver even before the server is setup (let alone started)
|
||||
with patch("tests.server.start_test_homeserver", return_value=None):
|
||||
self.hs = setup_test_homeserver(
|
||||
cleanup_func=self.addCleanup,
|
||||
reactor=self.reactor,
|
||||
homeserver_to_use=SynapseHomeServer,
|
||||
clock=self.clock,
|
||||
)
|
||||
# Sanity check that we patched the correct method (make sure it was the
|
||||
# thing that was called)
|
||||
mock_setup.assert_called_once_with()
|
||||
|
||||
hs_ref = weakref.ref(self.hs)
|
||||
|
||||
# Run the reactor so any `callWhenRunning` functions can be cleared out.
|
||||
self.reactor.run()
|
||||
# This would normally happen as part of `HomeServer.shutdown` but the `MemoryReactor`
|
||||
# we use in tests doesn't handle this properly (see doc comment)
|
||||
cleanup_test_reactor_system_event_triggers(self.reactor)
|
||||
|
||||
async def shutdown() -> None:
|
||||
# Use a logcontext just to double-check that we don't mangle the logcontext
|
||||
# during shutdown.
|
||||
with LoggingContext(name="hs_shutdown", server_name=self.hs.hostname):
|
||||
await self.hs.shutdown()
|
||||
|
||||
self.get_success(shutdown())
|
||||
|
||||
# Cleanup the internal reference in our test case
|
||||
del self.hs
|
||||
|
||||
# Force garbage collection.
|
||||
gc.collect()
|
||||
|
||||
# Ensure the `HomeServer` hs been garbage collected by attempting to use the
|
||||
# weakref to it.
|
||||
hs_after_shutdown = hs_ref()
|
||||
if hs_after_shutdown is not None:
|
||||
self.fail(
|
||||
"HomeServer reference should not be valid at this point "
|
||||
f"{get_memory_debug_info_for_object(hs_after_shutdown)}",
|
||||
)
|
||||
|
||||
|
||||
def get_memory_debug_info_for_object(object: Any) -> dict[str, Any]:
|
||||
"""
|
||||
Gathers some useful information to make it easier to figure out why the `object` is
|
||||
still in memory.
|
||||
|
||||
Args:
|
||||
object: The object to gather debug information for.
|
||||
"""
|
||||
debug: dict[str, Any] = {}
|
||||
if object is not None:
|
||||
# The simplest tracing we can do is show the reference count for the object.
|
||||
debug["reference_count"] = sys.getrefcount(object)
|
||||
|
||||
# Find the list of objects that directly refer to the object.
|
||||
#
|
||||
# Note: The `ref_count` can be >0 but `referrers` can be empty because
|
||||
# the all of the objects were frozen. Look at the
|
||||
# `frozen_object_count` to detect this scenario.
|
||||
referrers = gc.get_referrers(object)
|
||||
debug["gc_referrer_count"] = len(referrers)
|
||||
debug["gc_referrers"] = referrers
|
||||
|
||||
# We don't expect to see frozen objects in normal operation of the
|
||||
# `multi_synapse` shard.
|
||||
#
|
||||
# We can see frozen objects if you forget to `freeze=False` when
|
||||
# starting the `SynapseHomeServer`. Frozen objects mean they are
|
||||
# never considered for garbage collection. If the
|
||||
# `SynapseHomeServer` (or anything that references the homeserver)
|
||||
# is frozen, the homeserver can never be garbage collected and will
|
||||
# linger in memory forever.
|
||||
freeze_count = gc.get_freeze_count()
|
||||
debug["gc_global_frozen_object_count"] = freeze_count
|
||||
|
||||
# To help debug this test when it fails, it is useful to leverage the
|
||||
# `objgraph` module.
|
||||
# The following code serves as an example of what I have found to be useful
|
||||
# when tracking down references holding the `SynapseHomeServer` in memory:
|
||||
#
|
||||
# all_objects = gc.get_objects()
|
||||
# for obj in all_objects:
|
||||
# try:
|
||||
# # These are a subset of types that are typically involved with
|
||||
# # holding the `HomeServer` in memory. You may want to inspect
|
||||
# # other types as well.
|
||||
# if isinstance(obj, DataStore):
|
||||
# print(sys.getrefcount(obj), "refs to", obj)
|
||||
# if not isinstance(obj, weakref.ProxyType):
|
||||
# db_obj = obj
|
||||
# if isinstance(obj, SynapseHomeServer):
|
||||
# print(sys.getrefcount(obj), "refs to", obj)
|
||||
# if not isinstance(obj, weakref.ProxyType):
|
||||
# synapse_hs = obj
|
||||
# if isinstance(obj, SynapseSite):
|
||||
# print(sys.getrefcount(obj), "refs to", obj)
|
||||
# if not isinstance(obj, weakref.ProxyType):
|
||||
# sysite = obj
|
||||
# if isinstance(obj, DatabasePool):
|
||||
# print(sys.getrefcount(obj), "refs to", obj)
|
||||
# if not isinstance(obj, weakref.ProxyType):
|
||||
# dbpool = obj
|
||||
# except Exception:
|
||||
# pass
|
||||
#
|
||||
# print(sys.getrefcount(hs_ref()), "refs to", hs_ref())
|
||||
#
|
||||
# # The following values for `max_depth` and `too_many` have been found to
|
||||
# # render a useful amount of information without taking an overly long time
|
||||
# # to generate the result.
|
||||
# objgraph.show_backrefs(synapse_hs, max_depth=10, too_many=10)
|
||||
|
||||
return debug
|
||||
|
||||
@@ -95,7 +95,12 @@ class KeyringTestCase(unittest.HomeserverTestCase):
|
||||
def test_verify_json_objects_for_server_awaits_previous_requests(self) -> None:
|
||||
mock_fetcher = Mock()
|
||||
mock_fetcher.get_keys = Mock()
|
||||
kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher,))
|
||||
kr = keyring.Keyring(
|
||||
self.hs,
|
||||
test_only_key_fetchers=[
|
||||
mock_fetcher,
|
||||
],
|
||||
)
|
||||
|
||||
# a signed object that we are going to try to validate
|
||||
key1 = signedjson.key.generate_signing_key("1")
|
||||
@@ -286,7 +291,7 @@ class KeyringTestCase(unittest.HomeserverTestCase):
|
||||
mock_fetcher = Mock()
|
||||
mock_fetcher.get_keys = Mock(side_effect=get_keys)
|
||||
kr = keyring.Keyring(
|
||||
self.hs, key_fetchers=(StoreKeyFetcher(self.hs), mock_fetcher)
|
||||
self.hs, test_only_key_fetchers=[StoreKeyFetcher(self.hs), mock_fetcher]
|
||||
)
|
||||
|
||||
# sign the json
|
||||
@@ -313,7 +318,7 @@ class KeyringTestCase(unittest.HomeserverTestCase):
|
||||
|
||||
mock_fetcher = Mock()
|
||||
mock_fetcher.get_keys = Mock(side_effect=get_keys)
|
||||
kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher,))
|
||||
kr = keyring.Keyring(self.hs, test_only_key_fetchers=[mock_fetcher])
|
||||
|
||||
json1: JsonDict = {}
|
||||
signedjson.sign.sign_json(json1, "server1", key1)
|
||||
@@ -363,7 +368,9 @@ class KeyringTestCase(unittest.HomeserverTestCase):
|
||||
mock_fetcher1.get_keys = Mock(side_effect=get_keys1)
|
||||
mock_fetcher2 = Mock()
|
||||
mock_fetcher2.get_keys = Mock(side_effect=get_keys2)
|
||||
kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher1, mock_fetcher2))
|
||||
kr = keyring.Keyring(
|
||||
self.hs, test_only_key_fetchers=[mock_fetcher1, mock_fetcher2]
|
||||
)
|
||||
|
||||
json1: JsonDict = {}
|
||||
signedjson.sign.sign_json(json1, "server1", key1)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user