Compare commits
2 Commits
master
...
anoa/print
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ab2122b364 | ||
|
|
23c0c475da |
@@ -25,6 +25,7 @@
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
from typing import Optional
|
||||
from zipfile import ZipFile
|
||||
|
||||
from packaging.tags import Tag
|
||||
@@ -79,7 +80,7 @@ def cpython(wheel_file: str, name: str, version: Version, tag: Tag) -> str:
|
||||
return new_wheel_file
|
||||
|
||||
|
||||
def main(wheel_file: str, dest_dir: str, archs: str | None) -> None:
|
||||
def main(wheel_file: str, dest_dir: str, archs: Optional[str]) -> None:
|
||||
"""Entry point"""
|
||||
|
||||
# Parse the wheel file name into its parts. Note that `parse_wheel_filename`
|
||||
|
||||
@@ -35,55 +35,46 @@ IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
|
||||
|
||||
# First calculate the various trial jobs.
|
||||
#
|
||||
# For PRs, we only run each type of test with the oldest and newest Python
|
||||
# version that's supported. The oldest version ensures we don't accidentally
|
||||
# introduce syntax or code that's too new, and the newest ensures we don't use
|
||||
# code that's been dropped in the latest supported Python version.
|
||||
# For PRs, we only run each type of test with the oldest Python version supported (which
|
||||
# is Python 3.10 right now)
|
||||
|
||||
trial_sqlite_tests = [
|
||||
{
|
||||
"python-version": "3.10",
|
||||
"database": "sqlite",
|
||||
"extras": "all",
|
||||
},
|
||||
{
|
||||
"python-version": "3.14",
|
||||
"database": "sqlite",
|
||||
"extras": "all",
|
||||
},
|
||||
}
|
||||
]
|
||||
|
||||
if not IS_PR:
|
||||
# Otherwise, check all supported Python versions.
|
||||
#
|
||||
# Avoiding running all of these versions on every PR saves on CI time.
|
||||
trial_sqlite_tests.extend(
|
||||
{
|
||||
"python-version": version,
|
||||
"database": "sqlite",
|
||||
"extras": "all",
|
||||
}
|
||||
for version in ("3.11", "3.12", "3.13")
|
||||
for version in ("3.11", "3.12", "3.13", "3.14")
|
||||
)
|
||||
|
||||
# Only test postgres against the earliest and latest Python versions that we
|
||||
# support in order to save on CI time.
|
||||
trial_postgres_tests = [
|
||||
{
|
||||
"python-version": "3.10",
|
||||
"database": "postgres",
|
||||
"postgres-version": "14",
|
||||
"postgres-version": "13",
|
||||
"extras": "all",
|
||||
},
|
||||
{
|
||||
"python-version": "3.14",
|
||||
"database": "postgres",
|
||||
"postgres-version": "17",
|
||||
"extras": "all",
|
||||
},
|
||||
}
|
||||
]
|
||||
|
||||
# Ensure that Synapse passes unit tests even with no extra dependencies installed.
|
||||
if not IS_PR:
|
||||
trial_postgres_tests.append(
|
||||
{
|
||||
"python-version": "3.14",
|
||||
"database": "postgres",
|
||||
"postgres-version": "17",
|
||||
"extras": "all",
|
||||
}
|
||||
)
|
||||
|
||||
trial_no_extra_tests = [
|
||||
{
|
||||
"python-version": "3.10",
|
||||
|
||||
@@ -16,23 +16,20 @@ export VIRTUALENV_NO_DOWNLOAD=1
|
||||
# to select the lowest possible versions, rather than resorting to this sed script.
|
||||
|
||||
# Patch the project definitions in-place:
|
||||
# - `-E` use extended regex syntax.
|
||||
# - Don't modify the line that defines required Python versions.
|
||||
# - Replace all lower and tilde bounds with exact bounds.
|
||||
# - Replace all caret bounds with exact bounds.
|
||||
# - Delete all lines referring to psycopg2 - so no testing of postgres support.
|
||||
# - Replace all lower and tilde bounds with exact bounds
|
||||
# - Replace all caret bounds---but not the one that defines the supported Python version!
|
||||
# - Delete all lines referring to psycopg2 --- so no testing of postgres support.
|
||||
# - Use pyopenssl 17.0, which is the oldest version that works with
|
||||
# a `cryptography` compiled against OpenSSL 1.1.
|
||||
# - Omit systemd: we're not logging to journal here.
|
||||
|
||||
sed -i -E '
|
||||
/^\s*requires-python\s*=/b
|
||||
s/[~>]=/==/g
|
||||
s/\^/==/g
|
||||
/psycopg2/d
|
||||
s/pyOpenSSL\s*==\s*16\.0\.0"/pyOpenSSL==17.0.0"/
|
||||
/systemd/d
|
||||
' pyproject.toml
|
||||
sed -i \
|
||||
-e "s/[~>]=/==/g" \
|
||||
-e '/^python = "^/!s/\^/==/g' \
|
||||
-e "/psycopg2/d" \
|
||||
-e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
|
||||
-e '/systemd/d' \
|
||||
pyproject.toml
|
||||
|
||||
echo "::group::Patched pyproject.toml"
|
||||
cat pyproject.toml
|
||||
|
||||
@@ -26,8 +26,3 @@ c4268e3da64f1abb5b31deaeb5769adb6510c0a7
|
||||
# Update black to 23.1.0 (https://github.com/matrix-org/synapse/pull/15103)
|
||||
9bb2eac71962970d02842bca441f4bcdbbf93a11
|
||||
|
||||
# Use type hinting generics in standard collections (https://github.com/element-hq/synapse/pull/19046)
|
||||
fc244bb592aa481faf28214a2e2ce3bb4e95d990
|
||||
|
||||
# Write union types as X | Y where possible (https://github.com/element-hq/synapse/pull/19111)
|
||||
fcac7e0282b074d4bd3414d1c9c181e9701875d9
|
||||
|
||||
4
.github/workflows/docker.yml
vendored
4
.github/workflows/docker.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Extract version from pyproject.toml
|
||||
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
|
||||
@@ -123,7 +123,7 @@ jobs:
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
|
||||
- name: Calculate docker image tag
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
with:
|
||||
images: ${{ matrix.repository }}
|
||||
flavor: |
|
||||
|
||||
6
.github/workflows/docs-pr.yaml
vendored
6
.github/workflows/docs-pr.yaml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
# Fetch all history so that the schema_versions script works.
|
||||
fetch-depth: 0
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
name: Check links in documentation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||
|
||||
4
.github/workflows/docs.yaml
vendored
4
.github/workflows/docs.yaml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
needs:
|
||||
- pre
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
# Fetch all history so that the schema_versions script works.
|
||||
fetch-depth: 0
|
||||
@@ -64,7 +64,7 @@ jobs:
|
||||
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
|
||||
4
.github/workflows/fix_lint.yaml
vendored
4
.github/workflows/fix_lint.yaml
vendored
@@ -18,14 +18,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
components: clippy, rustfmt
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
|
||||
20
.github/workflows/latest_deps.yml
vendored
20
.github/workflows/latest_deps.yml
vendored
@@ -42,12 +42,12 @@ jobs:
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||
# poetry-core versions), so we install with poetry.
|
||||
@@ -77,13 +77,13 @@ jobs:
|
||||
postgres-version: "14"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
@@ -93,7 +93,7 @@ jobs:
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: pip install .[all,test]
|
||||
@@ -152,13 +152,13 @@ jobs:
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Ensure sytest runs `pip install`
|
||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||
@@ -202,14 +202,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out synapse codebase
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
||||
with:
|
||||
cache-dependency-path: complement/go.sum
|
||||
go-version-file: complement/go.mod
|
||||
@@ -234,7 +234,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
4
.github/workflows/poetry_lockfile.yaml
vendored
4
.github/workflows/poetry_lockfile.yaml
vendored
@@ -16,8 +16,8 @@ jobs:
|
||||
name: "Check locked dependencies have sdists"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- run: pip install tomli
|
||||
|
||||
8
.github/workflows/push_complement_image.yml
vendored
8
.github/workflows/push_complement_image.yml
vendored
@@ -33,17 +33,17 @@ jobs:
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout specific branch (debug build)
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
with:
|
||||
ref: ${{ inputs.branch }}
|
||||
- name: Checkout clean copy of develop (scheduled build)
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
if: github.event_name == 'schedule'
|
||||
with:
|
||||
ref: develop
|
||||
- name: Checkout clean copy of master (on-push)
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
if: github.event_name == 'push'
|
||||
with:
|
||||
ref: master
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Work out labels for complement image
|
||||
id: meta
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository }}/complement-synapse
|
||||
tags: |
|
||||
|
||||
35
.github/workflows/release-artifacts.yml
vendored
35
.github/workflows/release-artifacts.yml
vendored
@@ -27,8 +27,8 @@ jobs:
|
||||
name: "Calculate list of debian distros"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- id: set-distros
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: src
|
||||
|
||||
@@ -74,7 +74,7 @@ jobs:
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
- name: Set up python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
@@ -114,20 +114,27 @@ jobs:
|
||||
os:
|
||||
- ubuntu-24.04
|
||||
- ubuntu-24.04-arm
|
||||
- macos-14 # This uses arm64
|
||||
- macos-15-intel # This uses x86-64
|
||||
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
||||
# It is not read by the rest of the workflow.
|
||||
is_pr:
|
||||
- ${{ startsWith(github.ref, 'refs/pull/') }}
|
||||
|
||||
exclude:
|
||||
# Don't build macos wheels on PR CI.
|
||||
- is_pr: true
|
||||
os: "macos-15-intel"
|
||||
- is_pr: true
|
||||
os: "macos-14"
|
||||
# Don't build aarch64 wheels on PR CI.
|
||||
- is_pr: true
|
||||
os: "ubuntu-24.04-arm"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
||||
# here, because `python` on osx points to Python 2.7.
|
||||
@@ -143,14 +150,12 @@ jobs:
|
||||
- name: Build wheels
|
||||
run: python -m cibuildwheel --output-dir wheelhouse
|
||||
env:
|
||||
# The platforms that we build for are determined by the
|
||||
# `tool.cibuildwheel.skip` option in `pyproject.toml`.
|
||||
|
||||
# We skip testing wheels for the following platforms in CI:
|
||||
# Skip testing for platforms which various libraries don't have wheels
|
||||
# for, and so need extra build deps.
|
||||
#
|
||||
# pp3*-* (PyPy wheels) broke in CI (TODO: investigate).
|
||||
# musl: (TODO: investigate).
|
||||
CIBW_TEST_SKIP: pp3*-* *musl*
|
||||
# cp39-*: Python 3.9 is EOL.
|
||||
# cp3??t-*: Free-threaded builds are not currently supported.
|
||||
CIBW_TEST_SKIP: pp3*-* cp39-* cp3??t-* *i686* *musl*
|
||||
|
||||
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
@@ -163,8 +168,8 @@ jobs:
|
||||
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
|
||||
8
.github/workflows/schema.yaml
vendored
8
.github/workflows/schema.yaml
vendored
@@ -14,8 +14,8 @@ jobs:
|
||||
name: Ensure Synapse config schema is valid
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- name: Install check-jsonschema
|
||||
@@ -40,8 +40,8 @@ jobs:
|
||||
name: Ensure generated documentation is up-to-date
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- name: Install PyYAML
|
||||
|
||||
89
.github/workflows/tests.yml
vendored
89
.github/workflows/tests.yml
vendored
@@ -86,12 +86,12 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
@@ -106,18 +106,18 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20' 'sqlglot>=28.0.0'"
|
||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
||||
- run: scripts-dev/check_schema_delta.py --force-colors
|
||||
|
||||
check-lockfile:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: .ci/scripts/check_lockfile.py
|
||||
@@ -129,7 +129,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
@@ -151,13 +151,13 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
@@ -187,20 +187,19 @@ jobs:
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Check line endings
|
||||
run: scripts-dev/check_line_terminators.sh
|
||||
|
||||
lint-newsfile:
|
||||
# Only run on pull_request events, targeting develop/release branches, and skip when the PR author is dependabot[bot].
|
||||
if: ${{ github.event_name == 'pull_request' && (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.event.pull_request.user.login != 'dependabot[bot]' }}
|
||||
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
||||
@@ -214,14 +213,14 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
components: clippy
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo clippy -- -D warnings
|
||||
|
||||
@@ -233,14 +232,14 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: nightly-2025-04-23
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo clippy --all-features -- -D warnings
|
||||
|
||||
@@ -251,13 +250,13 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
@@ -287,7 +286,7 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
@@ -296,7 +295,7 @@ jobs:
|
||||
# `.rustfmt.toml`.
|
||||
toolchain: nightly-2025-04-23
|
||||
components: rustfmt
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo fmt --check
|
||||
|
||||
@@ -307,8 +306,8 @@ jobs:
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.linting_readme == 'true' }}
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: "pip install rstcheck"
|
||||
@@ -355,8 +354,8 @@ jobs:
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- id: get-matrix
|
||||
@@ -376,7 +375,7 @@ jobs:
|
||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
@@ -394,7 +393,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
@@ -432,13 +431,13 @@ jobs:
|
||||
- changes
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
# There aren't wheels for some of the older deps, so we need to install
|
||||
# their build dependencies
|
||||
@@ -447,7 +446,7 @@ jobs:
|
||||
sudo apt-get -qq install build-essential libffi-dev python3-dev \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
|
||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
@@ -497,7 +496,7 @@ jobs:
|
||||
extras: ["all"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
@@ -547,7 +546,7 @@ jobs:
|
||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
|
||||
@@ -555,7 +554,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
@@ -594,7 +593,7 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
@@ -618,7 +617,7 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- python-version: "3.10"
|
||||
postgres-version: "14"
|
||||
postgres-version: "13"
|
||||
|
||||
- python-version: "3.14"
|
||||
postgres-version: "17"
|
||||
@@ -638,7 +637,7 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Add PostgreSQL apt repository
|
||||
# We need a version of pg_dump that can handle the version of
|
||||
# PostgreSQL being tested against. The Ubuntu package repository lags
|
||||
@@ -693,7 +692,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout synapse codebase
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
@@ -701,12 +700,12 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
||||
with:
|
||||
cache-dependency-path: complement/go.sum
|
||||
go-version-file: complement/go.mod
|
||||
@@ -729,13 +728,13 @@ jobs:
|
||||
- changes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo test
|
||||
|
||||
@@ -749,13 +748,13 @@ jobs:
|
||||
- changes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo bench --no-run
|
||||
|
||||
|
||||
2
.github/workflows/triage_labelled.yml
vendored
2
.github/workflows/triage_labelled.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
# This field is case-sensitive.
|
||||
TARGET_STATUS: Needs info
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
# Only clone the script file we care about, instead of the whole repo.
|
||||
sparse-checkout: .ci/scripts/triage_labelled_issue.sh
|
||||
|
||||
18
.github/workflows/twisted_trunk.yml
vendored
18
.github/workflows/twisted_trunk.yml
vendored
@@ -43,13 +43,13 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
@@ -70,14 +70,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
@@ -117,13 +117,13 @@ jobs:
|
||||
- ${{ github.workspace }}:/src
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Patch dependencies
|
||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||
@@ -175,14 +175,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Run actions/checkout@v4 for synapse
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
|
||||
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
|
||||
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
||||
with:
|
||||
cache-dependency-path: complement/go.sum
|
||||
go-version-file: complement/go.mod
|
||||
@@ -217,7 +217,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
195
CHANGES.md
195
CHANGES.md
@@ -1,180 +1,4 @@
|
||||
# Synapse 1.144.0 (2025-12-09)
|
||||
|
||||
## Deprecation of MacOS Python wheels
|
||||
|
||||
The team has decided to deprecate and stop publishing python wheels for MacOS.
|
||||
Synapse docker images will continue to work on MacOS, as will building Synapse
|
||||
from source (though note this requires a Rust compiler).
|
||||
|
||||
## Unstable mutual rooms endpoint is now behind an experimental feature flag
|
||||
|
||||
Admins using the unstable [MSC2666](https://github.com/matrix-org/matrix-spec-proposals/pull/2666) endpoint (`/_matrix/client/unstable/uk.half-shot.msc2666/user/mutual_rooms`),
|
||||
please check [the relevant section in the upgrade notes](https://github.com/element-hq/synapse/blob/develop/docs/upgrade.md#upgrading-to-v11440) as this release contains changes
|
||||
that disable that endpoint by default.
|
||||
|
||||
|
||||
|
||||
No significant changes since 1.144.0rc1.
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.144.0rc1 (2025-12-02)
|
||||
|
||||
Admins using the unstable [MSC2666](https://github.com/matrix-org/matrix-spec-proposals/pull/2666) endpoint (`/_matrix/client/unstable/uk.half-shot.msc2666/user/mutual_rooms`), please check [the relevant section in the upgrade notes](https://github.com/element-hq/synapse/blob/develop/docs/upgrade.md#upgrading-to-v11440) as this release contains changes that disable that endpoint by default.
|
||||
|
||||
## Features
|
||||
|
||||
- Add experimentatal implememntation of [MSC4380](https://github.com/matrix-org/matrix-spec-proposals/pull/4380) (invite blocking). ([\#19203](https://github.com/element-hq/synapse/issues/19203))
|
||||
- Allow restarting delayed event timeouts on workers. ([\#19207](https://github.com/element-hq/synapse/issues/19207))
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug in the database function for fetching state deltas that could result in unnecessarily long query times. ([\#18960](https://github.com/element-hq/synapse/issues/18960))
|
||||
- Fix v12 rooms when running with `use_frozen_dicts: True`. ([\#19235](https://github.com/element-hq/synapse/issues/19235))
|
||||
- Fix bug where invalid `canonical_alias` content would return 500 instead of 400. ([\#19240](https://github.com/element-hq/synapse/issues/19240))
|
||||
- Fix bug where `Duration` was logged incorrectly. ([\#19267](https://github.com/element-hq/synapse/issues/19267))
|
||||
|
||||
## Improved Documentation
|
||||
|
||||
- Document in the `--config-path` help how multiple files are merged - by merging them shallowly. ([\#19243](https://github.com/element-hq/synapse/issues/19243))
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
- Stop building release wheels for MacOS. ([\#19225](https://github.com/element-hq/synapse/issues/19225))
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Improve event filtering for Simplified Sliding Sync. ([\#17782](https://github.com/element-hq/synapse/issues/17782))
|
||||
- Export `SYNAPSE_SUPPORTED_COMPLEMENT_TEST_PACKAGES` environment variable from `scripts-dev/complement.sh`. ([\#19208](https://github.com/element-hq/synapse/issues/19208))
|
||||
- Refactor `scripts-dev/complement.sh` logic to avoid `exit` to facilitate being able to source it from other scripts (composable). ([\#19209](https://github.com/element-hq/synapse/issues/19209))
|
||||
- Expire sliding sync connections that are too old or have too much pending data. ([\#19211](https://github.com/element-hq/synapse/issues/19211))
|
||||
- Require an experimental feature flag to be enabled in order for the unstable [MSC2666](https://github.com/matrix-org/matrix-spec-proposals/pull/2666) endpoint (`/_matrix/client/unstable/uk.half-shot.msc2666/user/mutual_rooms`) to be available. ([\#19219](https://github.com/element-hq/synapse/issues/19219))
|
||||
- Prevent changelog check CI running on @dependabot's PRs even when a human has modified the branch. ([\#19220](https://github.com/element-hq/synapse/issues/19220))
|
||||
- Auto-fix trailing spaces in multi-line strings and comments when running the lint script. ([\#19221](https://github.com/element-hq/synapse/issues/19221))
|
||||
- Move towards using a dedicated `Duration` type. ([\#19223](https://github.com/element-hq/synapse/issues/19223), [\#19229](https://github.com/element-hq/synapse/issues/19229))
|
||||
- Improve robustness of the SQL schema linting in CI. ([\#19224](https://github.com/element-hq/synapse/issues/19224))
|
||||
- Add log to determine whether clients are using `/messages` as expected. ([\#19226](https://github.com/element-hq/synapse/issues/19226))
|
||||
- Simplify README and add ESS Getting started section. ([\#19228](https://github.com/element-hq/synapse/issues/19228), [\#19259](https://github.com/element-hq/synapse/issues/19259))
|
||||
- Add a unit test for ensuring associated refresh tokens are erased when a device is deleted. ([\#19230](https://github.com/element-hq/synapse/issues/19230))
|
||||
- Prompt user to consider adding future deprecations to the changelog in release script. ([\#19239](https://github.com/element-hq/synapse/issues/19239))
|
||||
- Fix check of the Rust compiled code being outdated when using source checkout and `.egg-info`. ([\#19251](https://github.com/element-hq/synapse/issues/19251))
|
||||
- Stop building macos wheels in CI pipeline. ([\#19263](https://github.com/element-hq/synapse/issues/19263))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump Swatinem/rust-cache from 2.8.1 to 2.8.2. ([\#19244](https://github.com/element-hq/synapse/issues/19244))
|
||||
* Bump actions/checkout from 5.0.0 to 6.0.0. ([\#19213](https://github.com/element-hq/synapse/issues/19213))
|
||||
* Bump actions/setup-go from 6.0.0 to 6.1.0. ([\#19214](https://github.com/element-hq/synapse/issues/19214))
|
||||
* Bump actions/setup-python from 6.0.0 to 6.1.0. ([\#19245](https://github.com/element-hq/synapse/issues/19245))
|
||||
* Bump attrs from 25.3.0 to 25.4.0. ([\#19215](https://github.com/element-hq/synapse/issues/19215))
|
||||
* Bump docker/metadata-action from 5.9.0 to 5.10.0. ([\#19246](https://github.com/element-hq/synapse/issues/19246))
|
||||
* Bump http from 1.3.1 to 1.4.0. ([\#19249](https://github.com/element-hq/synapse/issues/19249))
|
||||
* Bump pydantic from 2.12.4 to 2.12.5. ([\#19250](https://github.com/element-hq/synapse/issues/19250))
|
||||
* Bump pyopenssl from 25.1.0 to 25.3.0. ([\#19248](https://github.com/element-hq/synapse/issues/19248))
|
||||
* Bump rpds-py from 0.28.0 to 0.29.0. ([\#19216](https://github.com/element-hq/synapse/issues/19216))
|
||||
* Bump rpds-py from 0.29.0 to 0.30.0. ([\#19247](https://github.com/element-hq/synapse/issues/19247))
|
||||
* Bump sentry-sdk from 2.44.0 to 2.46.0. ([\#19218](https://github.com/element-hq/synapse/issues/19218))
|
||||
* Bump types-bleach from 6.2.0.20250809 to 6.3.0.20251115. ([\#19217](https://github.com/element-hq/synapse/issues/19217))
|
||||
* Bump types-jsonschema from 4.25.1.20250822 to 4.25.1.20251009. ([\#19252](https://github.com/element-hq/synapse/issues/19252))
|
||||
|
||||
# Synapse 1.143.0 (2025-11-25)
|
||||
|
||||
## Dropping support for PostgreSQL 13
|
||||
|
||||
In line with our [deprecation policy](https://github.com/element-hq/synapse/blob/develop/docs/deprecation_policy.md), we've dropped
|
||||
support for PostgreSQL 13, as it is no longer supported upstream.
|
||||
This release of Synapse requires PostgreSQL 14+.
|
||||
|
||||
No significant changes since 1.143.0rc2.
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.143.0rc2 (2025-11-18)
|
||||
|
||||
## Dropping support for PostgreSQL 13
|
||||
|
||||
In line with our [deprecation policy](https://github.com/element-hq/synapse/blob/develop/docs/deprecation_policy.md), we've dropped
|
||||
support for PostgreSQL 13, as it is no longer supported upstream.
|
||||
This release of Synapse requires PostgreSQL 14+.
|
||||
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Fixes docker image creation in the release workflow.
|
||||
|
||||
|
||||
|
||||
# Synapse 1.143.0rc1 (2025-11-18)
|
||||
|
||||
## Features
|
||||
|
||||
- Support multiple config files in `register_new_matrix_user`. ([\#18784](https://github.com/element-hq/synapse/issues/18784))
|
||||
- Remove authentication from `POST /_matrix/client/v1/delayed_events`, and allow calling this endpoint with the update action to take (`send`/`cancel`/`restart`) in the request path instead of the body. ([\#19152](https://github.com/element-hq/synapse/issues/19152))
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fixed a longstanding bug where background updates were only run on the `main` database. ([\#19181](https://github.com/element-hq/synapse/issues/19181))
|
||||
- Fixed a bug introduced in v1.142.0 preventing subpaths in MAS endpoints from working. ([\#19186](https://github.com/element-hq/synapse/issues/19186))
|
||||
- Fix the SQLite-to-PostgreSQL migration script to correctly migrate a boolean column in the `delayed_events` table. ([\#19155](https://github.com/element-hq/synapse/issues/19155))
|
||||
|
||||
## Improved Documentation
|
||||
|
||||
- Improve documentation around streams, particularly ID generators and adding new streams. ([\#18943](https://github.com/element-hq/synapse/issues/18943))
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
- Remove support for PostgreSQL 13. ([\#19170](https://github.com/element-hq/synapse/issues/19170))
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Provide additional servers with federation room directory results. ([\#18970](https://github.com/element-hq/synapse/issues/18970))
|
||||
- Add a shortcut return when there are no events to purge. ([\#19093](https://github.com/element-hq/synapse/issues/19093))
|
||||
- Write union types as `X | Y` where possible, as per PEP 604, added in Python 3.10. ([\#19111](https://github.com/element-hq/synapse/issues/19111))
|
||||
- Reduce cardinality of `synapse_storage_events_persisted_events_sep_total` metric by removing `origin_entity` label. This also separates out events sent by local application services by changing the `origin_type` for such events to `application_service`. The `type` field also only tracks common event types, and anything else is bucketed under `*other*`. ([\#19133](https://github.com/element-hq/synapse/issues/19133), [\#19168](https://github.com/element-hq/synapse/issues/19168))
|
||||
- Run trial tests on Python 3.14 for PRs. ([\#19135](https://github.com/element-hq/synapse/issues/19135))
|
||||
- Update `pyproject.toml` project metadata to be compatible with standard Python packaging tooling. ([\#19137](https://github.com/element-hq/synapse/issues/19137))
|
||||
- Minor speed up of processing of inbound replication. ([\#19138](https://github.com/element-hq/synapse/issues/19138), [\#19145](https://github.com/element-hq/synapse/issues/19145), [\#19146](https://github.com/element-hq/synapse/issues/19146))
|
||||
- Ignore recent Python language refactors from git blame (`.git-blame-ignore-revs`). ([\#19150](https://github.com/element-hq/synapse/issues/19150))
|
||||
- Bump lower bounds of dependencies `parameterized` to `0.9.0` and `idna` to `3.3` as those are the first to advertise support for Python 3.10. ([\#19167](https://github.com/element-hq/synapse/issues/19167))
|
||||
- Point out which event caused the exception when checking [MSC4293](https://github.com/matrix-org/matrix-spec-proposals/pull/4293) redactions. ([\#19169](https://github.com/element-hq/synapse/issues/19169))
|
||||
- Restore printing `sentinel` for the log record `request` when no logcontext is active. ([\#19172](https://github.com/element-hq/synapse/issues/19172))
|
||||
- Add debug logs to track `Clock` utilities. ([\#19173](https://github.com/element-hq/synapse/issues/19173))
|
||||
- Remove explicit python version skips in `cibuildwheel` config as it's no longer required after [#19137](https://github.com/element-hq/synapse/pull/19137). ([\#19177](https://github.com/element-hq/synapse/issues/19177))
|
||||
- Fix potential lost logcontext when `PerDestinationQueue.shutdown(...)` is called. ([\#19178](https://github.com/element-hq/synapse/issues/19178))
|
||||
- Fix bad deferred logcontext handling across the codebase. ([\#19180](https://github.com/element-hq/synapse/issues/19180))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump bytes from 1.10.1 to 1.11.0. ([\#19193](https://github.com/element-hq/synapse/issues/19193))
|
||||
* Bump click from 8.1.8 to 8.3.1. ([\#19195](https://github.com/element-hq/synapse/issues/19195))
|
||||
* Bump cryptography from 43.0.3 to 45.0.7. ([\#19159](https://github.com/element-hq/synapse/issues/19159))
|
||||
* Bump docker/metadata-action from 5.8.0 to 5.9.0. ([\#19161](https://github.com/element-hq/synapse/issues/19161))
|
||||
* Bump pydantic from 2.12.3 to 2.12.4. ([\#19158](https://github.com/element-hq/synapse/issues/19158))
|
||||
* Bump pyo3-log from 0.13.1 to 0.13.2. ([\#19156](https://github.com/element-hq/synapse/issues/19156))
|
||||
* Bump ruff from 0.14.3 to 0.14.5. ([\#19196](https://github.com/element-hq/synapse/issues/19196))
|
||||
* Bump sentry-sdk from 2.34.1 to 2.43.0. ([\#19157](https://github.com/element-hq/synapse/issues/19157))
|
||||
* Bump sentry-sdk from 2.43.0 to 2.44.0. ([\#19197](https://github.com/element-hq/synapse/issues/19197))
|
||||
* Bump tomli from 2.2.1 to 2.3.0. ([\#19194](https://github.com/element-hq/synapse/issues/19194))
|
||||
* Bump types-netaddr from 1.3.0.20240530 to 1.3.0.20251108. ([\#19160](https://github.com/element-hq/synapse/issues/19160))
|
||||
|
||||
|
||||
|
||||
# Synapse 1.142.1 (2025-11-18)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fixed a bug introduced in v1.142.0 preventing subpaths in MAS endpoints from working. ([\#19186](https://github.com/element-hq/synapse/issues/19186))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.142.0 (2025-11-11)
|
||||
# Synapse 1.142.0rc3 (2025-11-04)
|
||||
|
||||
## Dropped support for Python 3.9
|
||||
|
||||
@@ -205,23 +29,6 @@ of these wheels downstream, please reach out to us in
|
||||
[#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org). We'd
|
||||
love to hear from you!
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Properly stop building wheels for Python 3.9 and free-threaded CPython. ([\#19154](https://github.com/element-hq/synapse/issues/19154))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.142.0rc4 (2025-11-07)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug introduced in 1.142.0rc1 where any attempt to configure `matrix_authentication_service.secret_path` would prevent the homeserver from starting up. ([\#19144](https://github.com/element-hq/synapse/issues/19144))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.142.0rc3 (2025-11-04)
|
||||
|
||||
## Internal Changes
|
||||
|
||||
|
||||
13
Cargo.lock
generated
13
Cargo.lock
generated
@@ -73,9 +73,9 @@ checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
|
||||
|
||||
[[package]]
|
||||
name = "bytes"
|
||||
version = "1.11.0"
|
||||
version = "1.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3"
|
||||
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
@@ -374,11 +374,12 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
||||
|
||||
[[package]]
|
||||
name = "http"
|
||||
version = "1.4.0"
|
||||
version = "1.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a"
|
||||
checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"fnv",
|
||||
"itoa",
|
||||
]
|
||||
|
||||
@@ -850,9 +851,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-log"
|
||||
version = "0.13.2"
|
||||
version = "0.13.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2f8bae9ad5ba08b0b0ed2bb9c2bdbaeccc69cafca96d78cf0fbcea0d45d122bb"
|
||||
checksum = "d359e20231345f21a3b5b6aea7e73f4dc97e1712ef3bfe2d88997ac6a308d784"
|
||||
dependencies = [
|
||||
"arc-swap",
|
||||
"log",
|
||||
|
||||
242
README.rst
242
README.rst
@@ -7,48 +7,170 @@
|
||||
|
||||
Synapse is an open source `Matrix <https://matrix.org>`__ homeserver
|
||||
implementation, written and maintained by `Element <https://element.io>`_.
|
||||
`Matrix <https://github.com/matrix-org>`__ is the open standard for secure and
|
||||
interoperable real-time communications. You can directly run and manage the
|
||||
source code in this repository, available under an AGPL license (or
|
||||
alternatively under a commercial license from Element).
|
||||
`Matrix <https://github.com/matrix-org>`__ is the open standard for
|
||||
secure and interoperable real-time communications. You can directly run
|
||||
and manage the source code in this repository, available under an AGPL
|
||||
license (or alternatively under a commercial license from Element).
|
||||
There is no support provided by Element unless you have a
|
||||
subscription from Element.
|
||||
|
||||
There is no support provided by Element unless you have a subscription from
|
||||
Element.
|
||||
Subscription
|
||||
============
|
||||
|
||||
🚀 Getting started
|
||||
==================
|
||||
For those that need an enterprise-ready solution, Element
|
||||
Server Suite (ESS) is `available via subscription <https://element.io/pricing>`_.
|
||||
ESS builds on Synapse to offer a complete Matrix-based backend including the full
|
||||
`Admin Console product <https://element.io/enterprise-functionality/admin-console>`_,
|
||||
giving admins the power to easily manage an organization-wide
|
||||
deployment. It includes advanced identity management, auditing,
|
||||
moderation and data retention options as well as Long-Term Support and
|
||||
SLAs. ESS supports any Matrix-compatible client.
|
||||
|
||||
This component is developed and maintained by `Element <https://element.io>`_.
|
||||
It gets shipped as part of the **Element Server Suite (ESS)** which provides the
|
||||
official means of deployment.
|
||||
.. contents::
|
||||
|
||||
ESS is a Matrix distribution from Element with focus on quality and ease of use.
|
||||
It ships a full Matrix stack tailored to the respective use case.
|
||||
🛠️ Installation and configuration
|
||||
==================================
|
||||
|
||||
There are three editions of ESS:
|
||||
The Synapse documentation describes `how to install Synapse <https://element-hq.github.io/synapse/latest/setup/installation.html>`_. We recommend using
|
||||
`Docker images <https://element-hq.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
|
||||
<https://element-hq.github.io/synapse/latest/setup/installation.html#matrixorg-packages>`_.
|
||||
|
||||
- `ESS Community <https://github.com/element-hq/ess-helm>`_ - the free Matrix
|
||||
distribution from Element tailored to small-/mid-scale, non-commercial
|
||||
community use cases
|
||||
- `ESS Pro <https://element.io/server-suite>`_ - the commercial Matrix
|
||||
distribution from Element for professional use
|
||||
- `ESS TI-M <https://element.io/server-suite/ti-messenger>`_ - a special version
|
||||
of ESS Pro focused on the requirements of TI-Messenger Pro and ePA as
|
||||
specified by the German National Digital Health Agency Gematik
|
||||
.. _federation:
|
||||
|
||||
Synapse has a variety of `config options
|
||||
<https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html>`_
|
||||
which can be used to customise its behaviour after installation.
|
||||
There are additional details on how to `configure Synapse for federation here
|
||||
<https://element-hq.github.io/synapse/latest/federate.html>`_.
|
||||
|
||||
.. _reverse-proxy:
|
||||
|
||||
Using a reverse proxy with Synapse
|
||||
----------------------------------
|
||||
|
||||
It is recommended to put a reverse proxy such as
|
||||
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
||||
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
||||
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
|
||||
`HAProxy <https://www.haproxy.org/>`_ or
|
||||
`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
|
||||
doing so is that it means that you can expose the default https port (443) to
|
||||
Matrix clients without needing to run Synapse with root privileges.
|
||||
For information on configuring one, see `the reverse proxy docs
|
||||
<https://element-hq.github.io/synapse/latest/reverse_proxy.html>`_.
|
||||
|
||||
Upgrading an existing Synapse
|
||||
-----------------------------
|
||||
|
||||
The instructions for upgrading Synapse are in `the upgrade notes`_.
|
||||
Please check these instructions as upgrading may require extra steps for some
|
||||
versions of Synapse.
|
||||
|
||||
.. _the upgrade notes: https://element-hq.github.io/synapse/develop/upgrade.html
|
||||
|
||||
|
||||
🛠️ Standalone installation and configuration
|
||||
============================================
|
||||
Platform dependencies
|
||||
---------------------
|
||||
|
||||
The Synapse documentation describes `options for installing Synapse standalone
|
||||
<https://element-hq.github.io/synapse/latest/setup/installation.html>`_. See
|
||||
below for more useful documentation links.
|
||||
Synapse uses a number of platform dependencies such as Python and PostgreSQL,
|
||||
and aims to follow supported upstream versions. See the
|
||||
`deprecation policy <https://element-hq.github.io/synapse/latest/deprecation_policy.html>`_
|
||||
for more details.
|
||||
|
||||
- `Synapse configuration options <https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html>`_
|
||||
- `Synapse configuration for federation <https://element-hq.github.io/synapse/latest/federate.html>`_
|
||||
- `Using a reverse proxy with Synapse <https://element-hq.github.io/synapse/latest/reverse_proxy.html>`_
|
||||
- `Upgrading Synapse <https://element-hq.github.io/synapse/develop/upgrade.html>`_
|
||||
|
||||
Security note
|
||||
-------------
|
||||
|
||||
Matrix serves raw, user-supplied data in some APIs -- specifically the `content
|
||||
repository endpoints`_.
|
||||
|
||||
.. _content repository endpoints: https://matrix.org/docs/spec/client_server/latest.html#get-matrix-media-r0-download-servername-mediaid
|
||||
|
||||
Whilst we make a reasonable effort to mitigate against XSS attacks (for
|
||||
instance, by using `CSP`_), a Matrix homeserver should not be hosted on a
|
||||
domain hosting other web applications. This especially applies to sharing
|
||||
the domain with Matrix web clients and other sensitive applications like
|
||||
webmail. See
|
||||
https://developer.github.com/changes/2014-04-25-user-content-security for more
|
||||
information.
|
||||
|
||||
.. _CSP: https://github.com/matrix-org/synapse/pull/1021
|
||||
|
||||
Ideally, the homeserver should not simply be on a different subdomain, but on
|
||||
a completely different `registered domain`_ (also known as top-level site or
|
||||
eTLD+1). This is because `some attacks`_ are still possible as long as the two
|
||||
applications share the same registered domain.
|
||||
|
||||
.. _registered domain: https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-2.3
|
||||
|
||||
.. _some attacks: https://en.wikipedia.org/wiki/Session_fixation#Attacks_using_cross-subdomain_cookie
|
||||
|
||||
To illustrate this with an example, if your Element Web or other sensitive web
|
||||
application is hosted on ``A.example1.com``, you should ideally host Synapse on
|
||||
``example2.com``. Some amount of protection is offered by hosting on
|
||||
``B.example1.com`` instead, so this is also acceptable in some scenarios.
|
||||
However, you should *not* host your Synapse on ``A.example1.com``.
|
||||
|
||||
Note that all of the above refers exclusively to the domain used in Synapse's
|
||||
``public_baseurl`` setting. In particular, it has no bearing on the domain
|
||||
mentioned in MXIDs hosted on that server.
|
||||
|
||||
Following this advice ensures that even if an XSS is found in Synapse, the
|
||||
impact to other applications will be minimal.
|
||||
|
||||
|
||||
🧪 Testing a new installation
|
||||
=============================
|
||||
|
||||
The easiest way to try out your new Synapse installation is by connecting to it
|
||||
from a web client.
|
||||
|
||||
Unless you are running a test instance of Synapse on your local machine, in
|
||||
general, you will need to enable TLS support before you can successfully
|
||||
connect from a client: see
|
||||
`TLS certificates <https://element-hq.github.io/synapse/latest/setup/installation.html#tls-certificates>`_.
|
||||
|
||||
An easy way to get started is to login or register via Element at
|
||||
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
||||
You will need to change the server you are logging into from ``matrix.org``
|
||||
and instead specify a homeserver URL of ``https://<server_name>:8448``
|
||||
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
||||
If you prefer to use another client, refer to our
|
||||
`client breakdown <https://matrix.org/ecosystem/clients/>`_.
|
||||
|
||||
If all goes well you should at least be able to log in, create a room, and
|
||||
start sending messages.
|
||||
|
||||
.. _`client-user-reg`:
|
||||
|
||||
Registering a new user from a client
|
||||
------------------------------------
|
||||
|
||||
By default, registration of new users via Matrix clients is disabled. To enable
|
||||
it:
|
||||
|
||||
1. In the
|
||||
`registration config section <https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#registration>`_
|
||||
set ``enable_registration: true`` in ``homeserver.yaml``.
|
||||
2. Then **either**:
|
||||
|
||||
a. set up a `CAPTCHA <https://element-hq.github.io/synapse/latest/CAPTCHA_SETUP.html>`_, or
|
||||
b. set ``enable_registration_without_verification: true`` in ``homeserver.yaml``.
|
||||
|
||||
We **strongly** recommend using a CAPTCHA, particularly if your homeserver is exposed to
|
||||
the public internet. Without it, anyone can freely register accounts on your homeserver.
|
||||
This can be exploited by attackers to create spambots targeting the rest of the Matrix
|
||||
federation.
|
||||
|
||||
Your new Matrix ID will be formed partly from the ``server_name``, and partly
|
||||
from a localpart you specify when you create the account in the form of::
|
||||
|
||||
@localpart:my.domain.name
|
||||
|
||||
(pronounced "at localpart on my dot domain dot name").
|
||||
|
||||
As when logging in, you will need to specify a "Custom server". Specify your
|
||||
desired ``localpart`` in the 'Username' box.
|
||||
|
||||
🎯 Troubleshooting and support
|
||||
==============================
|
||||
@@ -60,7 +182,7 @@ Enterprise quality support for Synapse including SLAs is available as part of an
|
||||
`Element Server Suite (ESS) <https://element.io/pricing>`_ subscription.
|
||||
|
||||
If you are an existing ESS subscriber then you can raise a `support request <https://ems.element.io/support>`_
|
||||
and access the `Element product documentation <https://docs.element.io>`_.
|
||||
and access the `knowledge base <https://ems-docs.element.io>`_.
|
||||
|
||||
🤝 Community support
|
||||
--------------------
|
||||
@@ -79,6 +201,35 @@ issues for support requests, only for bug reports and feature requests.
|
||||
.. |docs| replace:: ``docs``
|
||||
.. _docs: docs
|
||||
|
||||
🪪 Identity Servers
|
||||
===================
|
||||
|
||||
Identity servers have the job of mapping email addresses and other 3rd Party
|
||||
IDs (3PIDs) to Matrix user IDs, as well as verifying the ownership of 3PIDs
|
||||
before creating that mapping.
|
||||
|
||||
**Identity servers do not store accounts or credentials - these are stored and managed on homeservers.
|
||||
Identity Servers are just for mapping 3rd Party IDs to Matrix IDs.**
|
||||
|
||||
This process is highly security-sensitive, as there is an obvious risk of spam if it
|
||||
is too easy to sign up for Matrix accounts or harvest 3PID data. In the longer
|
||||
term, we hope to create a decentralised system to manage it (`matrix-doc #712
|
||||
<https://github.com/matrix-org/matrix-doc/issues/712>`_), but in the meantime,
|
||||
the role of managing trusted identity in the Matrix ecosystem is farmed out to
|
||||
a cluster of known trusted ecosystem partners, who run 'Matrix Identity
|
||||
Servers' such as `Sydent <https://github.com/matrix-org/sydent>`_, whose role
|
||||
is purely to authenticate and track 3PID logins and publish end-user public
|
||||
keys.
|
||||
|
||||
You can host your own copy of Sydent, but this will prevent you reaching other
|
||||
users in the Matrix ecosystem via their email address, and prevent them finding
|
||||
you. We therefore recommend that you use one of the centralised identity servers
|
||||
at ``https://matrix.org`` or ``https://vector.im`` for now.
|
||||
|
||||
To reiterate: the Identity server will only be used if you choose to associate
|
||||
an email address with your account, or send an invite to another user via their
|
||||
email address.
|
||||
|
||||
|
||||
🛠️ Development
|
||||
==============
|
||||
@@ -101,29 +252,20 @@ Alongside all that, join our developer community on Matrix:
|
||||
Copyright and Licensing
|
||||
=======================
|
||||
|
||||
| Copyright 2014–2017 OpenMarket Ltd
|
||||
| Copyright 2017 Vector Creations Ltd
|
||||
| Copyright 2017–2025 New Vector Ltd
|
||||
| Copyright 2025 Element Creations Ltd
|
||||
| Copyright 2014-2017 OpenMarket Ltd
|
||||
| Copyright 2017 Vector Creations Ltd
|
||||
| Copyright 2017-2025 New Vector Ltd
|
||||
|
|
||||
|
||||
This software is dual-licensed by Element Creations Ltd (Element). It can be
|
||||
used either:
|
||||
This software is dual-licensed by New Vector Ltd (Element). It can be used either:
|
||||
|
||||
(1) for free under the terms of the GNU Affero General Public License (as
|
||||
published by the Free Software Foundation, either version 3 of the License,
|
||||
or (at your option) any later version); OR
|
||||
(1) for free under the terms of the GNU Affero General Public License (as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version); OR
|
||||
|
||||
(2) under the terms of a paid-for Element Commercial License agreement between
|
||||
you and Element (the terms of which may vary depending on what you and
|
||||
Element have agreed to).
|
||||
(2) under the terms of a paid-for Element Commercial License agreement between you and Element (the terms of which may vary depending on what you and Element have agreed to).
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software distributed
|
||||
under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
|
||||
CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the
|
||||
specific language governing permissions and limitations under the Licenses.
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the specific language governing permissions and limitations under the Licenses.
|
||||
|
||||
Please contact `licensing@element.io <mailto:licensing@element.io>`_ to purchase
|
||||
an Element commercial license for this software.
|
||||
Please contact `licensing@element.io <mailto:licensing@element.io>`_ to purchase an Element commercial license for this software.
|
||||
|
||||
|
||||
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
|
||||
|
||||
@@ -33,6 +33,7 @@ import sys
|
||||
import time
|
||||
import urllib
|
||||
from http import TwistedHttpClient
|
||||
from typing import Optional
|
||||
|
||||
import urlparse
|
||||
from signedjson.key import NACL_ED25519, decode_verify_key_bytes
|
||||
@@ -725,7 +726,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
method,
|
||||
path,
|
||||
data=None,
|
||||
query_params: dict | None = None,
|
||||
query_params: Optional[dict] = None,
|
||||
alt_text=None,
|
||||
):
|
||||
"""Runs an HTTP request and pretty prints the output.
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
import json
|
||||
import urllib
|
||||
from pprint import pformat
|
||||
from typing import Optional
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web.client import Agent, readBody
|
||||
@@ -89,7 +90,7 @@ class TwistedHttpClient(HttpClient):
|
||||
body = yield readBody(response)
|
||||
return json.loads(body)
|
||||
|
||||
def _create_put_request(self, url, json_data, headers_dict: dict | None = None):
|
||||
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
|
||||
"""Wrapper of _create_request to issue a PUT request"""
|
||||
headers_dict = headers_dict or {}
|
||||
|
||||
@@ -100,7 +101,7 @@ class TwistedHttpClient(HttpClient):
|
||||
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
||||
)
|
||||
|
||||
def _create_get_request(self, url, headers_dict: dict | None = None):
|
||||
def _create_get_request(self, url, headers_dict: Optional[dict] = None):
|
||||
"""Wrapper of _create_request to issue a GET request"""
|
||||
return self._create_request("GET", url, headers_dict=headers_dict or {})
|
||||
|
||||
@@ -112,7 +113,7 @@ class TwistedHttpClient(HttpClient):
|
||||
data=None,
|
||||
qparams=None,
|
||||
jsonreq=True,
|
||||
headers: dict | None = None,
|
||||
headers: Optional[dict] = None,
|
||||
):
|
||||
headers = headers or {}
|
||||
|
||||
@@ -137,7 +138,7 @@ class TwistedHttpClient(HttpClient):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _create_request(
|
||||
self, method, url, producer=None, headers_dict: dict | None = None
|
||||
self, method, url, producer=None, headers_dict: Optional[dict] = None
|
||||
):
|
||||
"""Creates and sends a request to the given url"""
|
||||
headers_dict = headers_dict or {}
|
||||
|
||||
@@ -2166,10 +2166,10 @@
|
||||
"datasource": {
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"expr": "rate(synapse_storage_events_persisted_events_sep_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_storage_events_persisted_by_source_type{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 2,
|
||||
"legendFormat": "{{origin_type}}",
|
||||
"legendFormat": "{{type}}",
|
||||
"refId": "D"
|
||||
}
|
||||
],
|
||||
@@ -2254,7 +2254,7 @@
|
||||
"datasource": {
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"expr": "sum by(type) (rate(synapse_storage_events_persisted_events_sep_total{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"expr": "rate(synapse_storage_events_persisted_by_event_type{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"instant": false,
|
||||
"intervalFactor": 2,
|
||||
@@ -2294,6 +2294,99 @@
|
||||
"align": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {
|
||||
"irc-freenode (local)": "#EAB839"
|
||||
},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": {
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"decimals": 1,
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 44
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 44,
|
||||
"legend": {
|
||||
"alignAsTable": true,
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"hideEmpty": true,
|
||||
"hideZero": true,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"alertThreshold": true
|
||||
},
|
||||
"percentage": false,
|
||||
"pluginVersion": "9.2.2",
|
||||
"pointradius": 5,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"expr": "rate(synapse_storage_events_persisted_by_origin{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 2,
|
||||
"legendFormat": "{{origin_entity}} ({{origin_type}})",
|
||||
"refId": "A",
|
||||
"step": 20
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeRegions": [],
|
||||
"title": "Events/s by Origin",
|
||||
"tooltip": {
|
||||
"shared": false,
|
||||
"sort": 2,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"mode": "time",
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "hertz",
|
||||
"logBase": 1,
|
||||
"min": "0",
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
|
||||
@@ -44,3 +44,31 @@ groups:
|
||||
###
|
||||
### End of 'Prometheus Console Only' rules block
|
||||
###
|
||||
|
||||
|
||||
###
|
||||
### Grafana Only
|
||||
### The following rules are only needed if you use the Grafana dashboard
|
||||
### in contrib/grafana/synapse.json
|
||||
###
|
||||
- record: synapse_storage_events_persisted_by_source_type
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_type="remote"})
|
||||
labels:
|
||||
type: remote
|
||||
- record: synapse_storage_events_persisted_by_source_type
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity="*client*",origin_type="local"})
|
||||
labels:
|
||||
type: local
|
||||
- record: synapse_storage_events_persisted_by_source_type
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity!="*client*",origin_type="local"})
|
||||
labels:
|
||||
type: bridges
|
||||
|
||||
- record: synapse_storage_events_persisted_by_event_type
|
||||
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep_total)
|
||||
|
||||
- record: synapse_storage_events_persisted_by_origin
|
||||
expr: sum without(type) (synapse_storage_events_persisted_events_sep_total)
|
||||
###
|
||||
### End of 'Grafana Only' rules block
|
||||
###
|
||||
|
||||
48
debian/changelog
vendored
48
debian/changelog
vendored
@@ -1,51 +1,3 @@
|
||||
matrix-synapse-py3 (1.144.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.144.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 09 Dec 2025 08:30:40 -0700
|
||||
|
||||
matrix-synapse-py3 (1.144.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.144.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Dec 2025 09:11:19 -0700
|
||||
|
||||
matrix-synapse-py3 (1.143.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.143.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Nov 2025 08:44:56 -0700
|
||||
|
||||
matrix-synapse-py3 (1.143.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.143.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 17:36:08 -0700
|
||||
|
||||
matrix-synapse-py3 (1.143.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.143.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 13:08:39 -0700
|
||||
|
||||
matrix-synapse-py3 (1.142.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.142.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 12:25:23 -0700
|
||||
|
||||
matrix-synapse-py3 (1.142.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.142.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Nov 2025 09:45:51 +0000
|
||||
|
||||
matrix-synapse-py3 (1.142.0~rc4) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.142.0rc4.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Nov 2025 10:54:42 +0000
|
||||
|
||||
matrix-synapse-py3 (1.142.0~rc3) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.142.0rc3.
|
||||
|
||||
@@ -11,7 +11,7 @@ ARG SYNAPSE_VERSION=latest
|
||||
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||
ARG DEBIAN_VERSION=trixie
|
||||
|
||||
FROM docker.io/library/postgres:14-${DEBIAN_VERSION} AS postgres_base
|
||||
FROM docker.io/library/postgres:13-${DEBIAN_VERSION} AS postgres_base
|
||||
|
||||
FROM $FROM
|
||||
# First of all, we copy postgres server from the official postgres image,
|
||||
@@ -26,7 +26,7 @@ RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
|
||||
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
|
||||
COPY --from=postgres_base --chown=postgres /var/run/postgresql /var/run/postgresql
|
||||
ENV PATH="${PATH}:/usr/lib/postgresql/14/bin"
|
||||
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
||||
ENV PGDATA=/var/lib/postgresql/data
|
||||
|
||||
# We also initialize the database at build time, rather than runtime, so that it's faster to spin up the image.
|
||||
|
||||
@@ -68,6 +68,7 @@ from typing import (
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
NoReturn,
|
||||
Optional,
|
||||
SupportsIndex,
|
||||
)
|
||||
|
||||
@@ -196,7 +197,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/device_signing/upload$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/signatures/upload$",
|
||||
"^/_matrix/client/unstable/org.matrix.msc4140/delayed_events(/.*/restart)?$",
|
||||
],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
@@ -468,7 +468,7 @@ def add_worker_roles_to_shared_config(
|
||||
|
||||
|
||||
def merge_worker_template_configs(
|
||||
existing_dict: dict[str, Any] | None,
|
||||
existing_dict: Optional[dict[str, Any]],
|
||||
to_be_merged_dict: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
"""When given an existing dict of worker template configuration consisting with both
|
||||
@@ -1026,7 +1026,7 @@ def generate_worker_log_config(
|
||||
Returns: the path to the generated file
|
||||
"""
|
||||
# Check whether we should write worker logs to disk, in addition to the console
|
||||
extra_log_template_args: dict[str, str | None] = {}
|
||||
extra_log_template_args: dict[str, Optional[str]] = {}
|
||||
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
||||
extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log"
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Any, Mapping, MutableMapping, NoReturn
|
||||
from typing import Any, Mapping, MutableMapping, NoReturn, Optional
|
||||
|
||||
import jinja2
|
||||
|
||||
@@ -50,7 +50,7 @@ def generate_config_from_template(
|
||||
config_dir: str,
|
||||
config_path: str,
|
||||
os_environ: Mapping[str, str],
|
||||
ownership: str | None,
|
||||
ownership: Optional[str],
|
||||
) -> None:
|
||||
"""Generate a homeserver.yaml from environment variables
|
||||
|
||||
@@ -147,7 +147,7 @@ def generate_config_from_template(
|
||||
subprocess.run(args, check=True)
|
||||
|
||||
|
||||
def run_generate_config(environ: Mapping[str, str], ownership: str | None) -> None:
|
||||
def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> None:
|
||||
"""Run synapse with a --generate-config param to generate a template config file
|
||||
|
||||
Args:
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
# Setup
|
||||
- [Installation](setup/installation.md)
|
||||
- [Security](setup/security.md)
|
||||
- [Using Postgres](postgres.md)
|
||||
- [Configuring a Reverse Proxy](reverse_proxy.md)
|
||||
- [Configuring a Forward/Outbound Proxy](setup/forward_proxy.md)
|
||||
|
||||
@@ -299,7 +299,7 @@ logcontext is not finished before the `async` processing completes.
|
||||
|
||||
**Bad**:
|
||||
```python
|
||||
cache: ObservableDeferred[None] | None = None
|
||||
cache: Optional[ObservableDeferred[None]] = None
|
||||
|
||||
async def do_something_else(
|
||||
to_resolve: Deferred[None]
|
||||
@@ -326,7 +326,7 @@ with LoggingContext("request-1"):
|
||||
|
||||
**Good**:
|
||||
```python
|
||||
cache: ObservableDeferred[None] | None = None
|
||||
cache: Optional[ObservableDeferred[None]] = None
|
||||
|
||||
async def do_something_else(
|
||||
to_resolve: Deferred[None]
|
||||
@@ -358,7 +358,7 @@ with LoggingContext("request-1"):
|
||||
|
||||
**OK**:
|
||||
```python
|
||||
cache: ObservableDeferred[None] | None = None
|
||||
cache: Optional[ObservableDeferred[None]] = None
|
||||
|
||||
async def do_something_else(
|
||||
to_resolve: Deferred[None]
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Streams
|
||||
## Streams
|
||||
|
||||
Synapse has a concept of "streams", which are roughly described in [`id_generators.py`](
|
||||
https://github.com/element-hq/synapse/blob/develop/synapse/storage/util/id_generators.py
|
||||
@@ -19,7 +19,7 @@ To that end, let's describe streams formally, paraphrasing from the docstring of
|
||||
https://github.com/element-hq/synapse/blob/a719b703d9bd0dade2565ddcad0e2f3a7a9d4c37/synapse/storage/util/id_generators.py#L96
|
||||
).
|
||||
|
||||
## Definition
|
||||
### Definition
|
||||
|
||||
A stream is an append-only log `T1, T2, ..., Tn, ...` of facts[^1] which grows over time.
|
||||
Only "writers" can add facts to a stream, and there may be multiple writers.
|
||||
@@ -47,7 +47,7 @@ But unhappy cases (e.g. transaction rollback due to an error) also count as comp
|
||||
Once completed, the rows written with that stream ID are fixed, and no new rows
|
||||
will be inserted with that ID.
|
||||
|
||||
## Current stream ID
|
||||
### Current stream ID
|
||||
|
||||
For any given stream reader (including writers themselves), we may define a per-writer current stream ID:
|
||||
|
||||
@@ -93,7 +93,7 @@ Consider a single-writer stream which is initially at ID 1.
|
||||
| Complete 6 | 6 | |
|
||||
|
||||
|
||||
## Multi-writer streams
|
||||
### Multi-writer streams
|
||||
|
||||
There are two ways to view a multi-writer stream.
|
||||
|
||||
@@ -115,7 +115,7 @@ The facts this stream holds are instructions to "you should now invalidate these
|
||||
We only ever treat this as a multiple single-writer streams as there is no important ordering between cache invalidations.
|
||||
(Invalidations are self-contained facts; and the invalidations commute/are idempotent).
|
||||
|
||||
## Writing to streams
|
||||
### Writing to streams
|
||||
|
||||
Writers need to track:
|
||||
- track their current position (i.e. its own per-writer stream ID).
|
||||
@@ -133,7 +133,7 @@ To complete a fact, first remove it from your map of facts currently awaiting co
|
||||
Then, if no earlier fact is awaiting completion, the writer can advance its current position in that stream.
|
||||
Upon doing so it should emit an `RDATA` message[^3], once for every fact between the old and the new stream ID.
|
||||
|
||||
## Subscribing to streams
|
||||
### Subscribing to streams
|
||||
|
||||
Readers need to track the current position of every writer.
|
||||
|
||||
@@ -146,44 +146,10 @@ The `RDATA` itself is not a self-contained representation of the fact;
|
||||
readers will have to query the stream tables for the full details.
|
||||
Readers must also advance their record of the writer's current position for that stream.
|
||||
|
||||
## Summary
|
||||
# Summary
|
||||
|
||||
In a nutshell: we have an append-only log with a "buffer/scratchpad" at the end where we have to wait for the sequence to be linear and contiguous.
|
||||
|
||||
---
|
||||
|
||||
## Cheatsheet for creating a new stream
|
||||
|
||||
These rough notes and links may help you to create a new stream and add all the
|
||||
necessary registration and event handling.
|
||||
|
||||
**Create your stream:**
|
||||
- [create a stream class and stream row class](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/replication/tcp/streams/_base.py#L728)
|
||||
- will need an [ID generator](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L75)
|
||||
- may need [writer configuration](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/config/workers.py#L177), if there isn't already an obvious source of configuration for which workers should be designated as writers to your new stream.
|
||||
- if adding new writer configuration, add Docker-worker configuration, which lets us configure the writer worker in Complement tests: [[1]](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/docker/configure_workers_and_start.py#L331), [[2]](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/docker/configure_workers_and_start.py#L440)
|
||||
- most of the time, you will likely introduce a new datastore class for the concept represented by the new stream, unless there is already an obvious datastore that covers it.
|
||||
- consider whether it may make sense to introduce a handler
|
||||
|
||||
**Register your stream in:**
|
||||
- [`STREAMS_MAP`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/replication/tcp/streams/__init__.py#L71)
|
||||
|
||||
**Advance your stream in:**
|
||||
- [`process_replication_position` of your appropriate datastore](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L111)
|
||||
- don't forget the super call
|
||||
|
||||
**If you're going to do any caching that needs invalidation from new rows:**
|
||||
- add invalidations to [`process_replication_rows` of your appropriate datastore](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L91)
|
||||
- don't forget the super call
|
||||
- add local-only [invalidations to your writer transactions](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L201)
|
||||
|
||||
**For streams to be used in sync:**
|
||||
- add a new field to [`StreamToken`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/types/__init__.py#L1003)
|
||||
- add a new [`StreamKeyType`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/types/__init__.py#L999)
|
||||
- add appropriate wake-up rules
|
||||
- in [`on_rdata`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/replication/tcp/client.py#L260)
|
||||
- locally on the same worker when completing a write, [e.g. in your handler](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/handlers/thread_subscriptions.py#L139)
|
||||
- add the stream in [`bound_future_token`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/streams/events.py#L127)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ _First introduced in Synapse v1.57.0_
|
||||
```python
|
||||
async def on_account_data_updated(
|
||||
user_id: str,
|
||||
room_id: str | None,
|
||||
room_id: Optional[str],
|
||||
account_data_type: str,
|
||||
content: "synapse.module_api.JsonDict",
|
||||
) -> None:
|
||||
@@ -82,7 +82,7 @@ class CustomAccountDataModule:
|
||||
async def log_new_account_data(
|
||||
self,
|
||||
user_id: str,
|
||||
room_id: str | None,
|
||||
room_id: Optional[str],
|
||||
account_data_type: str,
|
||||
content: JsonDict,
|
||||
) -> None:
|
||||
|
||||
@@ -12,7 +12,7 @@ The available account validity callbacks are:
|
||||
_First introduced in Synapse v1.39.0_
|
||||
|
||||
```python
|
||||
async def is_user_expired(user: str) -> bool | None
|
||||
async def is_user_expired(user: str) -> Optional[bool]
|
||||
```
|
||||
|
||||
Called when processing any authenticated request (except for logout requests). The module
|
||||
|
||||
@@ -11,7 +11,7 @@ The available media repository callbacks are:
|
||||
_First introduced in Synapse v1.132.0_
|
||||
|
||||
```python
|
||||
async def get_media_config_for_user(user_id: str) -> JsonDict | None
|
||||
async def get_media_config_for_user(user_id: str) -> Optional[JsonDict]
|
||||
```
|
||||
|
||||
**<span style="color:red">
|
||||
@@ -70,7 +70,7 @@ implementations of this callback.
|
||||
_First introduced in Synapse v1.139.0_
|
||||
|
||||
```python
|
||||
async def get_media_upload_limits_for_user(user_id: str, size: int) -> list[synapse.module_api.MediaUploadLimit] | None
|
||||
async def get_media_upload_limits_for_user(user_id: str, size: int) -> Optional[List[synapse.module_api.MediaUploadLimit]]
|
||||
```
|
||||
|
||||
**<span style="color:red">
|
||||
|
||||
@@ -23,7 +23,12 @@ async def check_auth(
|
||||
user: str,
|
||||
login_type: str,
|
||||
login_dict: "synapse.module_api.JsonDict",
|
||||
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None] | None
|
||||
) -> Optional[
|
||||
Tuple[
|
||||
str,
|
||||
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]]
|
||||
]
|
||||
]
|
||||
```
|
||||
|
||||
The login type and field names should be provided by the user in the
|
||||
@@ -62,7 +67,12 @@ async def check_3pid_auth(
|
||||
medium: str,
|
||||
address: str,
|
||||
password: str,
|
||||
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None]
|
||||
) -> Optional[
|
||||
Tuple[
|
||||
str,
|
||||
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]]
|
||||
]
|
||||
]
|
||||
```
|
||||
|
||||
Called when a user attempts to register or log in with a third party identifier,
|
||||
@@ -88,7 +98,7 @@ _First introduced in Synapse v1.46.0_
|
||||
```python
|
||||
async def on_logged_out(
|
||||
user_id: str,
|
||||
device_id: str | None,
|
||||
device_id: Optional[str],
|
||||
access_token: str
|
||||
) -> None
|
||||
```
|
||||
@@ -109,7 +119,7 @@ _First introduced in Synapse v1.52.0_
|
||||
async def get_username_for_registration(
|
||||
uia_results: Dict[str, Any],
|
||||
params: Dict[str, Any],
|
||||
) -> str | None
|
||||
) -> Optional[str]
|
||||
```
|
||||
|
||||
Called when registering a new user. The module can return a username to set for the user
|
||||
@@ -170,7 +180,7 @@ _First introduced in Synapse v1.54.0_
|
||||
async def get_displayname_for_registration(
|
||||
uia_results: Dict[str, Any],
|
||||
params: Dict[str, Any],
|
||||
) -> str | None
|
||||
) -> Optional[str]
|
||||
```
|
||||
|
||||
Called when registering a new user. The module can return a display name to set for the
|
||||
@@ -249,7 +259,12 @@ class MyAuthProvider:
|
||||
username: str,
|
||||
login_type: str,
|
||||
login_dict: "synapse.module_api.JsonDict",
|
||||
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None] | None:
|
||||
) -> Optional[
|
||||
Tuple[
|
||||
str,
|
||||
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]],
|
||||
]
|
||||
]:
|
||||
if login_type != "my.login_type":
|
||||
return None
|
||||
|
||||
@@ -261,7 +276,12 @@ class MyAuthProvider:
|
||||
username: str,
|
||||
login_type: str,
|
||||
login_dict: "synapse.module_api.JsonDict",
|
||||
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None] | None:
|
||||
) -> Optional[
|
||||
Tuple[
|
||||
str,
|
||||
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]],
|
||||
]
|
||||
]:
|
||||
if login_type != "m.login.password":
|
||||
return None
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ _First introduced in Synapse v1.42.0_
|
||||
```python
|
||||
async def get_users_for_states(
|
||||
state_updates: Iterable["synapse.api.UserPresenceState"],
|
||||
) -> dict[str, set["synapse.api.UserPresenceState"]]
|
||||
) -> Dict[str, Set["synapse.api.UserPresenceState"]]
|
||||
```
|
||||
**Requires** `get_interested_users` to also be registered
|
||||
|
||||
@@ -45,7 +45,7 @@ _First introduced in Synapse v1.42.0_
|
||||
```python
|
||||
async def get_interested_users(
|
||||
user_id: str
|
||||
) -> set[str] | "synapse.module_api.PRESENCE_ALL_USERS"
|
||||
) -> Union[Set[str], "synapse.module_api.PRESENCE_ALL_USERS"]
|
||||
```
|
||||
**Requires** `get_users_for_states` to also be registered
|
||||
|
||||
@@ -73,7 +73,7 @@ that `@alice:example.org` receives all presence updates from `@bob:example.com`
|
||||
`@charlie:somewhere.org`, regardless of whether Alice shares a room with any of them.
|
||||
|
||||
```python
|
||||
from typing import Iterable
|
||||
from typing import Dict, Iterable, Set, Union
|
||||
|
||||
from synapse.module_api import ModuleApi
|
||||
|
||||
@@ -90,7 +90,7 @@ class CustomPresenceRouter:
|
||||
async def get_users_for_states(
|
||||
self,
|
||||
state_updates: Iterable["synapse.api.UserPresenceState"],
|
||||
) -> dict[str, set["synapse.api.UserPresenceState"]]:
|
||||
) -> Dict[str, Set["synapse.api.UserPresenceState"]]:
|
||||
res = {}
|
||||
for update in state_updates:
|
||||
if (
|
||||
@@ -104,7 +104,7 @@ class CustomPresenceRouter:
|
||||
async def get_interested_users(
|
||||
self,
|
||||
user_id: str,
|
||||
) -> set[str] | "synapse.module_api.PRESENCE_ALL_USERS":
|
||||
) -> Union[Set[str], "synapse.module_api.PRESENCE_ALL_USERS"]:
|
||||
if user_id == "@alice:example.com":
|
||||
return {"@bob:example.com", "@charlie:somewhere.org"}
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ The available ratelimit callbacks are:
|
||||
_First introduced in Synapse v1.132.0_
|
||||
|
||||
```python
|
||||
async def get_ratelimit_override_for_user(user: str, limiter_name: str) -> synapse.module_api.RatelimitOverride | None
|
||||
async def get_ratelimit_override_for_user(user: str, limiter_name: str) -> Optional[synapse.module_api.RatelimitOverride]
|
||||
```
|
||||
|
||||
**<span style="color:red">
|
||||
|
||||
@@ -331,9 +331,9 @@ search results; otherwise return `False`.
|
||||
The profile is represented as a dictionary with the following keys:
|
||||
|
||||
* `user_id: str`. The Matrix ID for this user.
|
||||
* `display_name: str | None`. The user's display name, or `None` if this user
|
||||
* `display_name: Optional[str]`. The user's display name, or `None` if this user
|
||||
has not set a display name.
|
||||
* `avatar_url: str | None`. The `mxc://` URL to the user's avatar, or `None`
|
||||
* `avatar_url: Optional[str]`. The `mxc://` URL to the user's avatar, or `None`
|
||||
if this user has not set an avatar.
|
||||
|
||||
The module is given a copy of the original dictionary, so modifying it from within the
|
||||
@@ -352,10 +352,10 @@ _First introduced in Synapse v1.37.0_
|
||||
|
||||
```python
|
||||
async def check_registration_for_spam(
|
||||
email_threepid: dict | None,
|
||||
username: str | None,
|
||||
email_threepid: Optional[dict],
|
||||
username: Optional[str],
|
||||
request_info: Collection[Tuple[str, str]],
|
||||
auth_provider_id: str | None = None,
|
||||
auth_provider_id: Optional[str] = None,
|
||||
) -> "synapse.spam_checker_api.RegistrationBehaviour"
|
||||
```
|
||||
|
||||
@@ -438,10 +438,10 @@ _First introduced in Synapse v1.87.0_
|
||||
```python
|
||||
async def check_login_for_spam(
|
||||
user_id: str,
|
||||
device_id: str | None,
|
||||
initial_display_name: str | None,
|
||||
request_info: Collection[tuple[str | None, str]],
|
||||
auth_provider_id: str | None = None,
|
||||
device_id: Optional[str],
|
||||
initial_display_name: Optional[str],
|
||||
request_info: Collection[Tuple[Optional[str], str]],
|
||||
auth_provider_id: Optional[str] = None,
|
||||
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes"]
|
||||
```
|
||||
|
||||
@@ -509,7 +509,7 @@ class ListSpamChecker:
|
||||
resource=IsUserEvilResource(config),
|
||||
)
|
||||
|
||||
async def check_event_for_spam(self, event: "synapse.events.EventBase") -> Literal["NOT_SPAM"] | Codes:
|
||||
async def check_event_for_spam(self, event: "synapse.events.EventBase") -> Union[Literal["NOT_SPAM"], Codes]:
|
||||
if event.sender in self.evil_users:
|
||||
return Codes.FORBIDDEN
|
||||
else:
|
||||
|
||||
@@ -16,7 +16,7 @@ _First introduced in Synapse v1.39.0_
|
||||
async def check_event_allowed(
|
||||
event: "synapse.events.EventBase",
|
||||
state_events: "synapse.types.StateMap",
|
||||
) -> tuple[bool, dict | None]
|
||||
) -> Tuple[bool, Optional[dict]]
|
||||
```
|
||||
|
||||
**<span style="color:red">
|
||||
@@ -340,7 +340,7 @@ class EventCensorer:
|
||||
self,
|
||||
event: "synapse.events.EventBase",
|
||||
state_events: "synapse.types.StateMap",
|
||||
) -> Tuple[bool, dict | None]:
|
||||
) -> Tuple[bool, Optional[dict]]:
|
||||
event_dict = event.get_dict()
|
||||
new_event_content = await self.api.http_client.post_json_get_json(
|
||||
uri=self._endpoint, post_json=event_dict,
|
||||
|
||||
@@ -76,7 +76,7 @@ possible.
|
||||
#### `get_interested_users`
|
||||
|
||||
```python
|
||||
async def get_interested_users(self, user_id: str) -> set[str] | str
|
||||
async def get_interested_users(self, user_id: str) -> Union[Set[str], str]
|
||||
```
|
||||
|
||||
**Required.** An asynchronous method that is passed a single Matrix User ID. This
|
||||
@@ -182,7 +182,7 @@ class ExamplePresenceRouter:
|
||||
async def get_interested_users(
|
||||
self,
|
||||
user_id: str,
|
||||
) -> set[str] | PresenceRouter.ALL_USERS:
|
||||
) -> Union[Set[str], PresenceRouter.ALL_USERS]:
|
||||
"""
|
||||
Retrieve a list of users that `user_id` is interested in receiving the
|
||||
presence of. This will be in addition to those they share a room with.
|
||||
|
||||
@@ -86,45 +86,6 @@ server {
|
||||
}
|
||||
```
|
||||
|
||||
### Nginx Proxy Manager or NPMPlus
|
||||
|
||||
```nginx
|
||||
Add New Proxy-Host
|
||||
- Tab Details
|
||||
- Domain Names: matrix.example.com
|
||||
- Scheme: http
|
||||
- Forward Hostname / IP: localhost # IP address or hostname where Synapse is hosted. Bare-metal or Container.
|
||||
- Forward Port: 8008
|
||||
|
||||
- Tab Custom locations
|
||||
- Add Location
|
||||
- Define Location: /_matrix
|
||||
- Scheme: http
|
||||
- Forward Hostname / IP: localhost # IP address or hostname where Synapse is hosted. Bare-metal or Container.
|
||||
- Forward Port: 8008
|
||||
- Click on the gear icon to display a custom configuration field. Increase client_max_body_size to match max_upload_size defined in homeserver.yaml
|
||||
- Enter this in the Custom Field: client_max_body_size 50M;
|
||||
|
||||
- Tab SSL/TLS
|
||||
- Choose your SSL/TLS certificate and preferred settings.
|
||||
|
||||
- Tab Advanced
|
||||
- Enter this in the Custom Field. This means that port 8448 no longer needs to be opened in your Firewall.
|
||||
The Federation communication use now Port 443.
|
||||
|
||||
location /.well-known/matrix/server {
|
||||
return 200 '{"m.server": "matrix.example.com:443"}';
|
||||
add_header Content-Type application/json;
|
||||
}
|
||||
|
||||
location /.well-known/matrix/client {
|
||||
return 200 '{"m.homeserver": {"base_url": "https://matrix.example.com"}}';
|
||||
add_header Content-Type application/json;
|
||||
add_header "Access-Control-Allow-Origin" *;
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
### Caddy v2
|
||||
|
||||
```
|
||||
|
||||
@@ -16,15 +16,8 @@ that your email address is probably `user@example.com` rather than
|
||||
`user@email.example.com`) - but doing so may require more advanced setup: see
|
||||
[Setting up Federation](../federate.md).
|
||||
|
||||
⚠️ Before setting up Synapse please consult the [security page](security.md) for
|
||||
best practices. ⚠️
|
||||
|
||||
## Installing Synapse
|
||||
|
||||
Note: Synapse uses a number of platform dependencies such as Python and PostgreSQL,
|
||||
and aims to follow supported upstream versions. See the [deprecation
|
||||
policy](../deprecation_policy.md) for more details.
|
||||
|
||||
### Prebuilt packages
|
||||
|
||||
Prebuilt packages are available for a number of platforms. These are recommended
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
# Security
|
||||
|
||||
This page lays out security best-practices when running Synapse.
|
||||
|
||||
If you believe you have encountered a security issue, see our [Security
|
||||
Disclosure Policy](https://element.io/en/security/security-disclosure-policy).
|
||||
|
||||
## Content repository
|
||||
|
||||
Matrix serves raw, user-supplied data in some APIs — specifically the [content
|
||||
repository endpoints](https://matrix.org/docs/spec/client_server/latest.html#get-matrix-media-r0-download-servername-mediaid).
|
||||
|
||||
Whilst we make a reasonable effort to mitigate against XSS attacks (for
|
||||
instance, by using [CSP](https://github.com/matrix-org/synapse/pull/1021)), a
|
||||
Matrix homeserver should not be hosted on a domain hosting other web
|
||||
applications. This especially applies to sharing the domain with Matrix web
|
||||
clients and other sensitive applications like webmail. See
|
||||
https://developer.github.com/changes/2014-04-25-user-content-security for more
|
||||
information.
|
||||
|
||||
Ideally, the homeserver should not simply be on a different subdomain, but on a
|
||||
completely different [registered
|
||||
domain](https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-2.3)
|
||||
(also known as top-level site or eTLD+1). This is because [some
|
||||
attacks](https://en.wikipedia.org/wiki/Session_fixation#Attacks_using_cross-subdomain_cookie)
|
||||
are still possible as long as the two applications share the same registered
|
||||
domain.
|
||||
|
||||
|
||||
To illustrate this with an example, if your Element Web or other sensitive web
|
||||
application is hosted on `A.example1.com`, you should ideally host Synapse on
|
||||
`example2.com`. Some amount of protection is offered by hosting on
|
||||
`B.example1.com` instead, so this is also acceptable in some scenarios.
|
||||
However, you should *not* host your Synapse on `A.example1.com`.
|
||||
|
||||
Note that all of the above refers exclusively to the domain used in Synapse's
|
||||
`public_baseurl` setting. In particular, it has no bearing on the domain
|
||||
mentioned in MXIDs hosted on that server.
|
||||
|
||||
Following this advice ensures that even if an XSS is found in Synapse, the
|
||||
impact to other applications will be minimal.
|
||||
@@ -117,33 +117,6 @@ each upgrade are complete before moving on to the next upgrade, to avoid
|
||||
stacking them up. You can monitor the currently running background updates with
|
||||
[the Admin API](usage/administration/admin_api/background_updates.html#status).
|
||||
|
||||
# Upgrading to v1.144.0
|
||||
|
||||
## Worker support for unstable MSC4140 `/restart` endpoint
|
||||
|
||||
The following unstable endpoint pattern may now be routed to worker processes:
|
||||
|
||||
```
|
||||
^/_matrix/client/unstable/org.matrix.msc4140/delayed_events/.*/restart$
|
||||
```
|
||||
|
||||
## Unstable mutual rooms endpoint is now behind an experimental feature flag
|
||||
|
||||
The unstable mutual rooms endpoint from
|
||||
[MSC2666](https://github.com/matrix-org/matrix-spec-proposals/pull/2666)
|
||||
(`/_matrix/client/unstable/uk.half-shot.msc2666/user/mutual_rooms`) is now
|
||||
disabled by default. If you rely on this unstable endpoint, you must now set
|
||||
`experimental_features.msc2666_enabled: true` in your configuration to keep
|
||||
using it.
|
||||
|
||||
# Upgrading to v1.143.0
|
||||
|
||||
## Dropping support for PostgreSQL 13
|
||||
|
||||
In line with our [deprecation policy](deprecation_policy.md), we've dropped
|
||||
support for PostgreSQL 13, as it is no longer supported upstream.
|
||||
This release of Synapse requires PostgreSQL 14+.
|
||||
|
||||
# Upgrading to v1.142.0
|
||||
|
||||
## Python 3.10+ is now required
|
||||
|
||||
@@ -285,13 +285,10 @@ information.
|
||||
# User directory search requests
|
||||
^/_matrix/client/(r0|v3|unstable)/user_directory/search$
|
||||
|
||||
# Unstable MSC4140 support
|
||||
^/_matrix/client/unstable/org.matrix.msc4140/delayed_events(/.*/restart)?$
|
||||
|
||||
Additionally, the following REST endpoints can be handled for GET requests:
|
||||
|
||||
# Push rules requests
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/
|
||||
^/_matrix/client/unstable/org.matrix.msc4140/delayed_events
|
||||
|
||||
# Account data requests
|
||||
^/_matrix/client/(r0|v3|unstable)/.*/tags
|
||||
|
||||
1006
poetry.lock
generated
1006
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
428
pyproject.toml
428
pyproject.toml
@@ -1,183 +1,3 @@
|
||||
[project]
|
||||
name = "matrix-synapse"
|
||||
version = "1.144.0"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
readme = "README.rst"
|
||||
authors = [
|
||||
{ name = "Matrix.org Team and Contributors", email = "packages@matrix.org" }
|
||||
]
|
||||
requires-python = ">=3.10.0,<4.0.0"
|
||||
license = "AGPL-3.0-or-later OR LicenseRef-Element-Commercial"
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Topic :: Communications :: Chat",
|
||||
]
|
||||
|
||||
# Mandatory Dependencies
|
||||
dependencies = [
|
||||
# we use the TYPE_CHECKER.redefine method added in jsonschema 3.0.0
|
||||
"jsonschema>=3.0.0",
|
||||
# 0.25.0 is the first version to support Python 3.14.
|
||||
# We can remove this once https://github.com/python-jsonschema/jsonschema/issues/1426 is fixed
|
||||
# and included in a release.
|
||||
"rpds-py>=0.25.0",
|
||||
# We choose 2.0 as a lower bound: the most recent backwards incompatible release.
|
||||
# It seems generally available, judging by https://pkgs.org/search/?q=immutabledict
|
||||
"immutabledict>=2.0",
|
||||
# We require 2.1.0 or higher for type hints. Previous guard was >= 1.1.0
|
||||
"unpaddedbase64>=2.1.0",
|
||||
# We require 2.0.0 for immutabledict support.
|
||||
"canonicaljson>=2.0.0,<3.0.0",
|
||||
# we use the type definitions added in signedjson 1.1.
|
||||
"signedjson>=1.1.0,<2.0.0",
|
||||
# validating SSL certs for IP addresses requires service_identity 18.1.
|
||||
"service-identity>=18.1.0",
|
||||
# Twisted 18.9 introduces some logger improvements that the structured
|
||||
# logger utilises
|
||||
# Twisted 19.7.0 moves test helpers to a new module and deprecates the old location.
|
||||
# Twisted 21.2.0 introduces contextvar support.
|
||||
# We could likely bump this to 22.1 without making distro packagers'
|
||||
# lives hard (as of 2025-07, distro support is Ubuntu LTS: 22.1, Debian stable: 22.4,
|
||||
# RHEL 9: 22.10)
|
||||
"Twisted[tls]>=21.2.0",
|
||||
"treq>=21.5.0",
|
||||
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
|
||||
"pyOpenSSL>=16.0.0",
|
||||
"PyYAML>=5.3",
|
||||
"pyasn1>=0.1.9",
|
||||
"pyasn1-modules>=0.0.7",
|
||||
"bcrypt>=3.1.7",
|
||||
# 10.0.1 minimum is mandatory here because of libwebp CVE-2023-4863.
|
||||
# Packagers that already took care of libwebp can lower that down to 5.4.0.
|
||||
"Pillow>=10.0.1",
|
||||
# We use SortedDict.peekitem(), which was added in sortedcontainers 1.5.2.
|
||||
# 2.0.5 updates collections.abc imports to avoid Python 3.10 incompatibility.
|
||||
"sortedcontainers>=2.0.5",
|
||||
"pymacaroons>=0.13.0",
|
||||
"msgpack>=0.5.2",
|
||||
"phonenumbers>=8.2.0",
|
||||
# we use GaugeHistogramMetric, which was added in prom-client 0.4.0.
|
||||
# `prometheus_client.metrics` was added in 0.5.0, so we require that too.
|
||||
# We chose 0.6.0 as that is the current version in Debian Buster (oldstable).
|
||||
"prometheus-client>=0.6.0",
|
||||
# we use `order`, which arrived in attrs 19.2.0.
|
||||
# Note: 21.1.0 broke `/sync`, see https://github.com/matrix-org/synapse/issues/9936
|
||||
"attrs>=19.2.0,!=21.1.0",
|
||||
"netaddr>=0.7.18",
|
||||
# Jinja 2.x is incompatible with MarkupSafe>=2.1. To ensure that admins do not
|
||||
# end up with a broken installation, with recent MarkupSafe but old Jinja, we
|
||||
# add a lower bound to the Jinja2 dependency.
|
||||
"Jinja2>=3.0",
|
||||
# 3.2.0 updates collections.abc imports to avoid Python 3.10 incompatibility.
|
||||
"bleach>=3.2.0",
|
||||
# pydantic 2.12 depends on typing-extensions>=4.14.1
|
||||
"typing-extensions>=4.14.1",
|
||||
# We enforce that we have a `cryptography` version that bundles an `openssl`
|
||||
# with the latest security patches.
|
||||
"cryptography>=3.4.7",
|
||||
# ijson 3.1.4 fixes a bug with "." in property names
|
||||
"ijson>=3.1.4",
|
||||
"matrix-common>=1.3.0,<2.0.0",
|
||||
# We need packaging.verison.Version(...).major added in 20.0.
|
||||
"packaging>=20.0",
|
||||
"pydantic>=2.8;python_version < '3.14'",
|
||||
"pydantic>=2.12;python_version >= '3.14'",
|
||||
|
||||
# This is for building the rust components during "poetry install", which
|
||||
# currently ignores the `build-system.requires` directive (c.f.
|
||||
# https://github.com/python-poetry/poetry/issues/6154). Both `pip install` and
|
||||
# `poetry build` do the right thing without this explicit dependency.
|
||||
#
|
||||
# This isn't really a dev-dependency, as `poetry install --without dev` will fail,
|
||||
# but the alternative is to add it to the main list of deps where it isn't
|
||||
# needed.
|
||||
"setuptools_rust>=1.3",
|
||||
|
||||
# This is used for parsing multipart responses
|
||||
"python-multipart>=0.0.9",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
matrix-synapse-ldap3 = ["matrix-synapse-ldap3>=0.1"]
|
||||
postgres = [
|
||||
"psycopg2>=2.8;platform_python_implementation != 'PyPy'",
|
||||
"psycopg2cffi>=2.8;platform_python_implementation == 'PyPy'",
|
||||
"psycopg2cffi-compat==1.1;platform_python_implementation == 'PyPy'",
|
||||
]
|
||||
saml2 = ["pysaml2>=4.5.0"]
|
||||
oidc = ["authlib>=0.15.1"]
|
||||
# systemd-python is necessary for logging to the systemd journal via
|
||||
# `systemd.journal.JournalHandler`, as is documented in
|
||||
# `contrib/systemd/log_config.yaml`.
|
||||
systemd = ["systemd-python>=231"]
|
||||
url-preview = ["lxml>=4.6.3"]
|
||||
sentry = ["sentry-sdk>=0.7.2"]
|
||||
opentracing = ["jaeger-client>=4.2.0", "opentracing>=2.2.0"]
|
||||
jwt = ["authlib"]
|
||||
# hiredis is not a *strict* dependency, but it makes things much faster.
|
||||
# (if it is not installed, we fall back to slow code.)
|
||||
redis = ["txredisapi>=1.4.7", "hiredis"]
|
||||
# Required to use experimental `caches.track_memory_usage` config option.
|
||||
cache-memory = ["pympler"]
|
||||
# If this is updated, don't forget to update the equivalent lines in
|
||||
# tool.poetry.group.dev.dependencies.
|
||||
test = ["parameterized>=0.9.0", "idna>=3.3"]
|
||||
|
||||
# The duplication here is awful.
|
||||
#
|
||||
# TODO: This can be resolved via PEP 735 dependency groups, which poetry supports
|
||||
# since 2.2.0. However, switching to that would require updating the command
|
||||
# developers use to install the `all` group. This would require some coordination.
|
||||
#
|
||||
# NB: the strings in this list must be *package* names, not extra names.
|
||||
# Some of our extra names _are_ package names, which can lead to great confusion.
|
||||
all = [
|
||||
# matrix-synapse-ldap3
|
||||
"matrix-synapse-ldap3>=0.1",
|
||||
# postgres
|
||||
"psycopg2>=2.8;platform_python_implementation != 'PyPy'",
|
||||
"psycopg2cffi>=2.8;platform_python_implementation == 'PyPy'",
|
||||
"psycopg2cffi-compat==1.1;platform_python_implementation == 'PyPy'",
|
||||
# saml2
|
||||
"pysaml2>=4.5.0",
|
||||
# oidc and jwt
|
||||
"authlib>=0.15.1",
|
||||
# url-preview
|
||||
"lxml>=4.6.3",
|
||||
# sentry
|
||||
"sentry-sdk>=0.7.2",
|
||||
# opentracing
|
||||
"jaeger-client>=4.2.0", "opentracing>=2.2.0",
|
||||
# redis
|
||||
"txredisapi>=1.4.7", "hiredis",
|
||||
# cache-memory
|
||||
"pympler",
|
||||
# omitted:
|
||||
# - test: it's useful to have this separate from dev deps in the olddeps job
|
||||
# - systemd: this is a system-based requirement
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
repository = "https://github.com/element-hq/synapse"
|
||||
documentation = "https://element-hq.github.io/synapse/latest"
|
||||
"Issue Tracker" = "https://github.com/element-hq/synapse/issues"
|
||||
|
||||
[project.scripts]
|
||||
synapse_homeserver = "synapse.app.homeserver:main"
|
||||
synapse_worker = "synapse.app.generic_worker:main"
|
||||
synctl = "synapse._scripts.synctl:main"
|
||||
|
||||
export_signing_key = "synapse._scripts.export_signing_key:main"
|
||||
generate_config = "synapse._scripts.generate_config:main"
|
||||
generate_log_config = "synapse._scripts.generate_log_config:main"
|
||||
generate_signing_key = "synapse._scripts.generate_signing_key:main"
|
||||
hash_password = "synapse._scripts.hash_password:main"
|
||||
register_new_matrix_user = "synapse._scripts.register_new_matrix_user:main"
|
||||
synapse_port_db = "synapse._scripts.synapse_port_db:main"
|
||||
synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main"
|
||||
update_synapse_database = "synapse._scripts.update_synapse_database:main"
|
||||
|
||||
|
||||
[tool.towncrier]
|
||||
package = "synapse"
|
||||
filename = "CHANGES.md"
|
||||
@@ -260,17 +80,10 @@ select = [
|
||||
"G",
|
||||
# pyupgrade
|
||||
"UP006",
|
||||
"UP007",
|
||||
"UP045",
|
||||
]
|
||||
extend-safe-fixes = [
|
||||
# pyupgrade rules compatible with Python >= 3.9
|
||||
"UP006",
|
||||
"UP007",
|
||||
# pyupgrade rules compatible with Python >= 3.10
|
||||
"UP045",
|
||||
# Allow ruff to automatically fix trailing spaces within a multi-line string/comment.
|
||||
"W293"
|
||||
# pyupgrade
|
||||
"UP006"
|
||||
]
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
@@ -293,9 +106,20 @@ manifest-path = "rust/Cargo.toml"
|
||||
module-name = "synapse.synapse_rust"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.142.0rc3"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "AGPL-3.0-or-later OR LicenseRef-Element-Commercial"
|
||||
readme = "README.rst"
|
||||
repository = "https://github.com/element-hq/synapse"
|
||||
packages = [
|
||||
{ include = "synapse" },
|
||||
]
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Topic :: Communications :: Chat",
|
||||
]
|
||||
include = [
|
||||
{ path = "AUTHORS.rst", format = "sdist" },
|
||||
{ path = "book.toml", format = "sdist" },
|
||||
@@ -325,12 +149,197 @@ exclude = [
|
||||
script = "build_rust.py"
|
||||
generate-setup-file = true
|
||||
|
||||
[tool.poetry.scripts]
|
||||
synapse_homeserver = "synapse.app.homeserver:main"
|
||||
synapse_worker = "synapse.app.generic_worker:main"
|
||||
synctl = "synapse._scripts.synctl:main"
|
||||
|
||||
export_signing_key = "synapse._scripts.export_signing_key:main"
|
||||
generate_config = "synapse._scripts.generate_config:main"
|
||||
generate_log_config = "synapse._scripts.generate_log_config:main"
|
||||
generate_signing_key = "synapse._scripts.generate_signing_key:main"
|
||||
hash_password = "synapse._scripts.hash_password:main"
|
||||
register_new_matrix_user = "synapse._scripts.register_new_matrix_user:main"
|
||||
synapse_port_db = "synapse._scripts.synapse_port_db:main"
|
||||
synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main"
|
||||
update_synapse_database = "synapse._scripts.update_synapse_database:main"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.10.0"
|
||||
|
||||
# Mandatory Dependencies
|
||||
# ----------------------
|
||||
# we use the TYPE_CHECKER.redefine method added in jsonschema 3.0.0
|
||||
jsonschema = ">=3.0.0"
|
||||
# 0.25.0 is the first version to support Python 3.14.
|
||||
# We can remove this once https://github.com/python-jsonschema/jsonschema/issues/1426 is fixed
|
||||
# and included in a release.
|
||||
rpds-py = ">=0.25.0"
|
||||
# We choose 2.0 as a lower bound: the most recent backwards incompatible release.
|
||||
# It seems generally available, judging by https://pkgs.org/search/?q=immutabledict
|
||||
immutabledict = ">=2.0"
|
||||
# We require 2.1.0 or higher for type hints. Previous guard was >= 1.1.0
|
||||
unpaddedbase64 = ">=2.1.0"
|
||||
# We require 2.0.0 for immutabledict support.
|
||||
canonicaljson = "^2.0.0"
|
||||
# we use the type definitions added in signedjson 1.1.
|
||||
signedjson = "^1.1.0"
|
||||
# validating SSL certs for IP addresses requires service_identity 18.1.
|
||||
service-identity = ">=18.1.0"
|
||||
# Twisted 18.9 introduces some logger improvements that the structured
|
||||
# logger utilises
|
||||
# Twisted 19.7.0 moves test helpers to a new module and deprecates the old location.
|
||||
# Twisted 21.2.0 introduces contextvar support.
|
||||
# We could likely bump this to 22.1 without making distro packagers'
|
||||
# lives hard (as of 2025-07, distro support is Ubuntu LTS: 22.1, Debian stable: 22.4,
|
||||
# RHEL 9: 22.10)
|
||||
Twisted = {extras = ["tls"], version = ">=21.2.0"}
|
||||
treq = ">=21.5.0"
|
||||
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
|
||||
pyOpenSSL = ">=16.0.0"
|
||||
PyYAML = ">=5.3"
|
||||
pyasn1 = ">=0.1.9"
|
||||
pyasn1-modules = ">=0.0.7"
|
||||
bcrypt = ">=3.1.7"
|
||||
# 10.0.1 minimum is mandatory here because of libwebp CVE-2023-4863.
|
||||
# Packagers that already took care of libwebp can lower that down to 5.4.0.
|
||||
Pillow = ">=10.0.1"
|
||||
# We use SortedDict.peekitem(), which was added in sortedcontainers 1.5.2.
|
||||
# 2.0.5 updates collections.abc imports to avoid Python 3.10 incompatibility.
|
||||
sortedcontainers = ">=2.0.5"
|
||||
pymacaroons = ">=0.13.0"
|
||||
msgpack = ">=0.5.2"
|
||||
phonenumbers = ">=8.2.0"
|
||||
# we use GaugeHistogramMetric, which was added in prom-client 0.4.0.
|
||||
# `prometheus_client.metrics` was added in 0.5.0, so we require that too.
|
||||
# We chose 0.6.0 as that is the current version in Debian Buster (oldstable).
|
||||
prometheus-client = ">=0.6.0"
|
||||
# we use `order`, which arrived in attrs 19.2.0.
|
||||
# Note: 21.1.0 broke `/sync`, see https://github.com/matrix-org/synapse/issues/9936
|
||||
attrs = ">=19.2.0,!=21.1.0"
|
||||
netaddr = ">=0.7.18"
|
||||
# Jinja 2.x is incompatible with MarkupSafe>=2.1. To ensure that admins do not
|
||||
# end up with a broken installation, with recent MarkupSafe but old Jinja, we
|
||||
# add a lower bound to the Jinja2 dependency.
|
||||
Jinja2 = ">=3.0"
|
||||
# 3.2.0 updates collections.abc imports to avoid Python 3.10 incompatibility.
|
||||
bleach = ">=3.2.0"
|
||||
# pydantic 2.12 depends on typing-extensions>=4.14.1
|
||||
typing-extensions = ">=4.14.1"
|
||||
# We enforce that we have a `cryptography` version that bundles an `openssl`
|
||||
# with the latest security patches.
|
||||
cryptography = ">=3.4.7"
|
||||
# ijson 3.1.4 fixes a bug with "." in property names
|
||||
ijson = ">=3.1.4"
|
||||
matrix-common = "^1.3.0"
|
||||
# We need packaging.verison.Version(...).major added in 20.0.
|
||||
packaging = ">=20.0"
|
||||
pydantic = [
|
||||
{ version = "~=2.8", python = "<3.14" },
|
||||
{ version = "~=2.12", python = ">=3.14" },
|
||||
]
|
||||
|
||||
# This is for building the rust components during "poetry install", which
|
||||
# currently ignores the `build-system.requires` directive (c.f.
|
||||
# https://github.com/python-poetry/poetry/issues/6154). Both `pip install` and
|
||||
# `poetry build` do the right thing without this explicit dependency.
|
||||
#
|
||||
# This isn't really a dev-dependency, as `poetry install --without dev` will fail,
|
||||
# but the alternative is to add it to the main list of deps where it isn't
|
||||
# needed.
|
||||
setuptools_rust = ">=1.3"
|
||||
|
||||
# This is used for parsing multipart responses
|
||||
python-multipart = ">=0.0.9"
|
||||
|
||||
# Optional Dependencies
|
||||
# ---------------------
|
||||
matrix-synapse-ldap3 = { version = ">=0.1", optional = true }
|
||||
psycopg2 = { version = ">=2.8", markers = "platform_python_implementation != 'PyPy'", optional = true }
|
||||
psycopg2cffi = { version = ">=2.8", markers = "platform_python_implementation == 'PyPy'", optional = true }
|
||||
psycopg2cffi-compat = { version = "==1.1", markers = "platform_python_implementation == 'PyPy'", optional = true }
|
||||
pysaml2 = { version = ">=4.5.0", optional = true }
|
||||
authlib = { version = ">=0.15.1", optional = true }
|
||||
# systemd-python is necessary for logging to the systemd journal via
|
||||
# `systemd.journal.JournalHandler`, as is documented in
|
||||
# `contrib/systemd/log_config.yaml`.
|
||||
# Note: systemd-python 231 appears to have been yanked from pypi
|
||||
systemd-python = { version = ">=231", optional = true }
|
||||
# 4.6.3 removes usage of _PyGen_Send which is unavailable in CPython as of Python 3.10.
|
||||
lxml = { version = ">=4.6.3", optional = true }
|
||||
sentry-sdk = { version = ">=0.7.2", optional = true }
|
||||
opentracing = { version = ">=2.2.0", optional = true }
|
||||
# 4.2.0 updates collections.abc imports to avoid Python 3.10 incompatibility.
|
||||
jaeger-client = { version = ">=4.2.0", optional = true }
|
||||
txredisapi = { version = ">=1.4.7", optional = true }
|
||||
hiredis = { version = "*", optional = true }
|
||||
Pympler = { version = "*", optional = true }
|
||||
parameterized = { version = ">=0.7.4", optional = true }
|
||||
idna = { version = ">=2.5", optional = true }
|
||||
|
||||
[tool.poetry.extras]
|
||||
# NB: Packages that should be part of `pip install matrix-synapse[all]` need to be specified
|
||||
# twice: once here, and once in the `all` extra.
|
||||
matrix-synapse-ldap3 = ["matrix-synapse-ldap3"]
|
||||
postgres = ["psycopg2", "psycopg2cffi", "psycopg2cffi-compat"]
|
||||
saml2 = ["pysaml2"]
|
||||
oidc = ["authlib"]
|
||||
# systemd-python is necessary for logging to the systemd journal via
|
||||
# `systemd.journal.JournalHandler`, as is documented in
|
||||
# `contrib/systemd/log_config.yaml`.
|
||||
systemd = ["systemd-python"]
|
||||
url-preview = ["lxml"]
|
||||
sentry = ["sentry-sdk"]
|
||||
opentracing = ["jaeger-client", "opentracing"]
|
||||
jwt = ["authlib"]
|
||||
# hiredis is not a *strict* dependency, but it makes things much faster.
|
||||
# (if it is not installed, we fall back to slow code.)
|
||||
redis = ["txredisapi", "hiredis"]
|
||||
# Required to use experimental `caches.track_memory_usage` config option.
|
||||
cache-memory = ["pympler"]
|
||||
test = ["parameterized", "idna"]
|
||||
|
||||
# The duplication here is awful. I hate hate hate hate hate it. However, for now I want
|
||||
# to ensure you can still `pip install matrix-synapse[all]` like today. Two motivations:
|
||||
# 1) for new installations, I want instructions in existing documentation and tutorials
|
||||
# out there to still work.
|
||||
# 2) I don't want to hard-code a list of extras into CI if I can help it. The ideal
|
||||
# solution here would be something like https://github.com/python-poetry/poetry/issues/3413
|
||||
# Poetry 1.2's dependency groups might make this easier. But I'm not trying that out
|
||||
# until there's a stable release of 1.2.
|
||||
#
|
||||
# NB: the strings in this list must be *package* names, not extra names.
|
||||
# Some of our extra names _are_ package names, which can lead to great confusion.
|
||||
all = [
|
||||
# matrix-synapse-ldap3
|
||||
"matrix-synapse-ldap3",
|
||||
# postgres
|
||||
"psycopg2", "psycopg2cffi", "psycopg2cffi-compat",
|
||||
# saml2
|
||||
"pysaml2",
|
||||
# oidc and jwt
|
||||
"authlib",
|
||||
# url-preview
|
||||
"lxml",
|
||||
# sentry
|
||||
"sentry-sdk",
|
||||
# opentracing
|
||||
"jaeger-client", "opentracing",
|
||||
# redis
|
||||
"txredisapi", "hiredis",
|
||||
# cache-memory
|
||||
"pympler",
|
||||
# omitted:
|
||||
# - test: it's useful to have this separate from dev deps in the olddeps job
|
||||
# - systemd: this is a system-based requirement
|
||||
]
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
# We pin development dependencies in poetry.lock so that our tests don't start
|
||||
# failing on new releases. Keeping lower bounds loose here means that dependabot
|
||||
# can bump versions without having to update the content-hash in the lockfile.
|
||||
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
||||
ruff = "0.14.5"
|
||||
ruff = "0.14.3"
|
||||
|
||||
# Typechecking
|
||||
lxml-stubs = ">=0.4.0"
|
||||
@@ -350,11 +359,10 @@ types-setuptools = ">=57.4.0"
|
||||
# Dependencies which are exclusively required by unit test code. This is
|
||||
# NOT a list of all modules that are necessary to run the unit tests.
|
||||
# Tests assume that all optional dependencies are installed.
|
||||
#
|
||||
# If this is updated, don't forget to update the equivalent lines in
|
||||
# project.optional-dependencies.test.
|
||||
parameterized = ">=0.9.0"
|
||||
idna = ">=3.3"
|
||||
# parameterized<0.7.4 can create classes with names that would normally be invalid
|
||||
# identifiers. trial really does not like this when running with multiple workers.
|
||||
parameterized = ">=0.7.4"
|
||||
idna = ">=2.5"
|
||||
|
||||
# The following are used by the release script
|
||||
click = ">=8.1.3"
|
||||
@@ -370,9 +378,6 @@ towncrier = ">=18.6.0rc1"
|
||||
# Used for checking the Poetry lockfile
|
||||
tomli = ">=1.2.3"
|
||||
|
||||
# Used for checking the schema delta files
|
||||
sqlglot = ">=28.0.0"
|
||||
|
||||
|
||||
[build-system]
|
||||
# The upper bounds here are defensive, intended to prevent situations like
|
||||
@@ -381,28 +386,19 @@ sqlglot = ">=28.0.0"
|
||||
# runtime errors caused by build system changes.
|
||||
# We are happy to raise these upper bounds upon request,
|
||||
# provided we check that it's safe to do so (i.e. that CI passes).
|
||||
requires = ["poetry-core>=2.0.0,<=2.1.3", "setuptools_rust>=1.3,<=1.11.1"]
|
||||
requires = ["poetry-core>=1.1.0,<=2.1.3", "setuptools_rust>=1.3,<=1.11.1"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
|
||||
[tool.cibuildwheel]
|
||||
# Skip unsupported platforms (by us or by Rust).
|
||||
#
|
||||
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the
|
||||
# list of supported build targets.
|
||||
#
|
||||
# Also see `.github/workflows/release-artifacts.yml` for the list of
|
||||
# architectures we build for (based on the runner OS types we use), as well as
|
||||
# the platforms we exclude from testing in CI.
|
||||
#
|
||||
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets.
|
||||
# We skip:
|
||||
# - free-threaded cpython builds: these are not currently supported.
|
||||
# - i686: We don't support 32-bit platforms.
|
||||
# - *macosx*: we don't support building wheels for MacOS.
|
||||
skip = "cp3??t-* *i686* *macosx*"
|
||||
# Enable non-default builds. See the list of available options:
|
||||
# https://cibuildwheel.pypa.io/en/stable/options#enable
|
||||
#
|
||||
# - CPython 3.8: EOLed
|
||||
# - musllinux i686: excluded to reduce number of wheels we build.
|
||||
# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677
|
||||
skip = "cp38* *-musllinux_i686"
|
||||
# Enable non-default builds.
|
||||
# "pypy" used to be included by default up until cibuildwheel 3.
|
||||
enable = "pypy"
|
||||
|
||||
@@ -424,3 +420,7 @@ test-command = "python -c 'from synapse.synapse_rust import sum_as_string; print
|
||||
[tool.cibuildwheel.linux]
|
||||
# Wrap the repair command to correctly rename the built cpython wheels as ABI3.
|
||||
repair-wheel-command = "./.ci/scripts/auditwheel_wrapper.py -w {dest_dir} {wheel}"
|
||||
|
||||
[tool.cibuildwheel.macos]
|
||||
# Wrap the repair command to correctly rename the built cpython wheels as ABI3.
|
||||
repair-wheel-command = "./.ci/scripts/auditwheel_wrapper.py --require-archs {delocate_archs} -w {dest_dir} {wheel}"
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
/*
|
||||
* This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
*
|
||||
* Copyright (C) 2025 Element Creations, Ltd
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU Affero General Public License as
|
||||
* published by the Free Software Foundation, either version 3 of the
|
||||
* License, or (at your option) any later version.
|
||||
*
|
||||
* See the GNU Affero General Public License for more details:
|
||||
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
*/
|
||||
|
||||
use once_cell::sync::OnceCell;
|
||||
use pyo3::{
|
||||
types::{IntoPyDict, PyAnyMethods},
|
||||
Bound, BoundObject, IntoPyObject, Py, PyAny, PyErr, PyResult, Python,
|
||||
};
|
||||
|
||||
/// A reference to the `synapse.util.duration` module.
|
||||
static DURATION: OnceCell<Py<PyAny>> = OnceCell::new();
|
||||
|
||||
/// Access to the `synapse.util.duration` module.
|
||||
fn duration_module(py: Python<'_>) -> PyResult<&Bound<'_, PyAny>> {
|
||||
Ok(DURATION
|
||||
.get_or_try_init(|| py.import("synapse.util.duration").map(Into::into))?
|
||||
.bind(py))
|
||||
}
|
||||
|
||||
/// Mirrors the `synapse.util.duration.Duration` Python class.
|
||||
pub struct SynapseDuration {
|
||||
microseconds: u64,
|
||||
}
|
||||
|
||||
impl SynapseDuration {
|
||||
/// For now we only need to create durations from milliseconds.
|
||||
pub fn from_milliseconds(milliseconds: u64) -> Self {
|
||||
Self {
|
||||
microseconds: milliseconds * 1_000,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'py> IntoPyObject<'py> for &SynapseDuration {
|
||||
type Target = PyAny;
|
||||
type Output = Bound<'py, Self::Target>;
|
||||
type Error = PyErr;
|
||||
|
||||
fn into_pyobject(self, py: Python<'py>) -> Result<Self::Output, Self::Error> {
|
||||
let duration_module = duration_module(py)?;
|
||||
let kwargs = [("microseconds", self.microseconds)].into_py_dict(py)?;
|
||||
let duration_instance = duration_module.call_method("Duration", (), Some(&kwargs))?;
|
||||
Ok(duration_instance.into_bound())
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,6 @@ use pyo3::prelude::*;
|
||||
use pyo3_log::ResetHandle;
|
||||
|
||||
pub mod acl;
|
||||
pub mod duration;
|
||||
pub mod errors;
|
||||
pub mod events;
|
||||
pub mod http;
|
||||
|
||||
@@ -35,7 +35,6 @@ use ulid::Ulid;
|
||||
|
||||
use self::session::Session;
|
||||
use crate::{
|
||||
duration::SynapseDuration,
|
||||
errors::{NotFoundError, SynapseError},
|
||||
http::{http_request_from_twisted, http_response_to_twisted, HeaderMapPyExt},
|
||||
UnwrapInfallible,
|
||||
@@ -133,8 +132,6 @@ impl RendezvousHandler {
|
||||
.unwrap_infallible()
|
||||
.unbind();
|
||||
|
||||
let eviction_duration = SynapseDuration::from_milliseconds(eviction_interval);
|
||||
|
||||
// Construct a Python object so that we can get a reference to the
|
||||
// evict method and schedule it to run.
|
||||
let self_ = Py::new(
|
||||
@@ -152,7 +149,7 @@ impl RendezvousHandler {
|
||||
let evict = self_.getattr(py, "_evict")?;
|
||||
homeserver.call_method0("get_clock")?.call_method(
|
||||
"looping_call",
|
||||
(evict, &eviction_duration),
|
||||
(evict, eviction_interval),
|
||||
None,
|
||||
)?;
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
$schema: https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json
|
||||
$id: https://element-hq.github.io/synapse/schema/synapse/v1.144/synapse-config.schema.json
|
||||
$id: https://element-hq.github.io/synapse/schema/synapse/v1.142/synapse-config.schema.json
|
||||
type: object
|
||||
properties:
|
||||
modules:
|
||||
|
||||
@@ -18,7 +18,7 @@ import sys
|
||||
import threading
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from types import FrameType
|
||||
from typing import Collection, Sequence
|
||||
from typing import Collection, Optional, Sequence
|
||||
|
||||
# These are expanded inside the dockerfile to be a fully qualified image name.
|
||||
# e.g. docker.io/library/debian:bookworm
|
||||
@@ -49,7 +49,7 @@ class Builder:
|
||||
def __init__(
|
||||
self,
|
||||
redirect_stdout: bool = False,
|
||||
docker_build_args: Sequence[str] | None = None,
|
||||
docker_build_args: Optional[Sequence[str]] = None,
|
||||
):
|
||||
self.redirect_stdout = redirect_stdout
|
||||
self._docker_build_args = tuple(docker_build_args or ())
|
||||
@@ -167,7 +167,7 @@ class Builder:
|
||||
def run_builds(
|
||||
builder: Builder, dists: Collection[str], jobs: int = 1, skip_tests: bool = False
|
||||
) -> None:
|
||||
def sig(signum: int, _frame: FrameType | None) -> None:
|
||||
def sig(signum: int, _frame: Optional[FrameType]) -> None:
|
||||
print("Caught SIGINT")
|
||||
builder.kill_containers()
|
||||
|
||||
|
||||
@@ -9,11 +9,15 @@ from typing import Any
|
||||
|
||||
import click
|
||||
import git
|
||||
import sqlglot
|
||||
import sqlglot.expressions
|
||||
|
||||
SCHEMA_FILE_REGEX = re.compile(r"^synapse/storage/schema/(.*)/delta/(.*)/(.*)$")
|
||||
|
||||
INDEX_CREATION_REGEX = re.compile(
|
||||
r"CREATE .*INDEX .*ON ([a-z_0-9]+)", flags=re.IGNORECASE
|
||||
)
|
||||
INDEX_DELETION_REGEX = re.compile(r"DROP .*INDEX ([a-z_0-9]+)", flags=re.IGNORECASE)
|
||||
TABLE_CREATION_REGEX = re.compile(
|
||||
r"CREATE .*TABLE.* ([a-z_0-9]+)\s*\(", flags=re.IGNORECASE
|
||||
)
|
||||
|
||||
# The base branch we want to check against. We use the main development branch
|
||||
# on the assumption that is what we are developing against.
|
||||
@@ -137,9 +141,6 @@ def main(force_colors: bool) -> None:
|
||||
color=force_colors,
|
||||
)
|
||||
|
||||
# Mark this run as not successful, but continue so that we report *all*
|
||||
# errors.
|
||||
return_code = 1
|
||||
else:
|
||||
click.secho(
|
||||
f"All deltas are in the correct folder: {current_schema_version}!",
|
||||
@@ -152,90 +153,60 @@ def main(force_colors: bool) -> None:
|
||||
# and delta files are also numbered in order.
|
||||
changed_delta_files.sort()
|
||||
|
||||
success = check_schema_delta(changed_delta_files, force_colors)
|
||||
if not success:
|
||||
return_code = 1
|
||||
# Now check that we're not trying to create or drop indices. If we want to
|
||||
# do that they should be in background updates. The exception is when we
|
||||
# create indices on tables we've just created.
|
||||
created_tables = set()
|
||||
for delta_file in changed_delta_files:
|
||||
with open(delta_file) as fd:
|
||||
delta_lines = fd.readlines()
|
||||
|
||||
for line in delta_lines:
|
||||
# Strip SQL comments
|
||||
line = line.split("--", maxsplit=1)[0]
|
||||
|
||||
# Check and track any tables we create
|
||||
match = TABLE_CREATION_REGEX.search(line)
|
||||
if match:
|
||||
table_name = match.group(1)
|
||||
created_tables.add(table_name)
|
||||
|
||||
# Check for dropping indices, these are always banned
|
||||
match = INDEX_DELETION_REGEX.search(line)
|
||||
if match:
|
||||
clause = match.group()
|
||||
|
||||
click.secho(
|
||||
f"Found delta with index deletion: '{clause}' in {delta_file}",
|
||||
fg="red",
|
||||
bold=True,
|
||||
color=force_colors,
|
||||
)
|
||||
click.secho(
|
||||
" ↪ These should be in background updates.",
|
||||
)
|
||||
return_code = 1
|
||||
|
||||
# Check for index creation, which is only allowed for tables we've
|
||||
# created.
|
||||
match = INDEX_CREATION_REGEX.search(line)
|
||||
if match:
|
||||
clause = match.group()
|
||||
table_name = match.group(1)
|
||||
if table_name not in created_tables:
|
||||
click.secho(
|
||||
f"Found delta with index creation for existing table: '{clause}' in {delta_file}",
|
||||
fg="red",
|
||||
bold=True,
|
||||
color=force_colors,
|
||||
)
|
||||
click.secho(
|
||||
" ↪ These should be in background updates (or the table should be created in the same delta).",
|
||||
)
|
||||
return_code = 1
|
||||
|
||||
click.get_current_context().exit(return_code)
|
||||
|
||||
|
||||
def check_schema_delta(delta_files: list[str], force_colors: bool) -> bool:
|
||||
"""Check that the given schema delta files do not create or drop indices
|
||||
inappropriately.
|
||||
|
||||
Index creation is only allowed on tables created in the same set of deltas.
|
||||
|
||||
Index deletion is never allowed and should be done in background updates.
|
||||
|
||||
Returns:
|
||||
True if all checks succeeded, False if at least one failed.
|
||||
"""
|
||||
|
||||
# The tables created in this delta
|
||||
created_tables = set[str]()
|
||||
|
||||
# The indices created/dropped in this delta, each a tuple of (table_name, sql)
|
||||
created_indices = list[tuple[str, str]]()
|
||||
|
||||
# The indices dropped in this delta, just the sql
|
||||
dropped_indices = list[str]()
|
||||
|
||||
for delta_file in delta_files:
|
||||
with open(delta_file) as fd:
|
||||
delta_contents = fd.read()
|
||||
|
||||
# Assume the SQL dialect from the file extension, defaulting to Postgres.
|
||||
sql_lang = "postgres"
|
||||
if delta_file.endswith(".sqlite"):
|
||||
sql_lang = "sqlite"
|
||||
|
||||
statements = sqlglot.parse(delta_contents, read=sql_lang)
|
||||
|
||||
for statement in statements:
|
||||
if isinstance(statement, sqlglot.expressions.Create):
|
||||
if statement.kind == "TABLE":
|
||||
assert isinstance(statement.this, sqlglot.expressions.Schema)
|
||||
assert isinstance(statement.this.this, sqlglot.expressions.Table)
|
||||
|
||||
table_name = statement.this.this.name
|
||||
created_tables.add(table_name)
|
||||
elif statement.kind == "INDEX":
|
||||
assert isinstance(statement.this, sqlglot.expressions.Index)
|
||||
|
||||
table_name = statement.this.args["table"].name
|
||||
created_indices.append((table_name, statement.sql()))
|
||||
elif isinstance(statement, sqlglot.expressions.Drop):
|
||||
if statement.kind == "INDEX":
|
||||
dropped_indices.append(statement.sql())
|
||||
|
||||
success = True
|
||||
for table_name, clause in created_indices:
|
||||
if table_name not in created_tables:
|
||||
click.secho(
|
||||
f"Found delta with index creation for existing table: '{clause}'",
|
||||
fg="red",
|
||||
bold=True,
|
||||
color=force_colors,
|
||||
)
|
||||
click.secho(
|
||||
" ↪ These should be in background updates (or the table should be created in the same delta).",
|
||||
)
|
||||
success = False
|
||||
|
||||
for clause in dropped_indices:
|
||||
click.secho(
|
||||
f"Found delta with index deletion: '{clause}'",
|
||||
fg="red",
|
||||
bold=True,
|
||||
color=force_colors,
|
||||
)
|
||||
click.secho(
|
||||
" ↪ These should be in background updates.",
|
||||
)
|
||||
success = False
|
||||
|
||||
return success
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -72,151 +72,153 @@ For help on arguments to 'go test', run 'go help testflag'.
|
||||
EOF
|
||||
}
|
||||
|
||||
# We use a function to wrap the script logic so that we can use `return` to exit early
|
||||
# if needed. This is particularly useful so that this script can be sourced by other
|
||||
# scripts without exiting the calling subshell (composable). This allows us to share
|
||||
# variables like `SYNAPSE_SUPPORTED_COMPLEMENT_TEST_PACKAGES` with other scripts.
|
||||
#
|
||||
# Returns an exit code of 0 on success, or 1 on failure.
|
||||
main() {
|
||||
# parse our arguments
|
||||
skip_docker_build=""
|
||||
skip_complement_run=""
|
||||
while [ $# -ge 1 ]; do
|
||||
# parse our arguments
|
||||
skip_docker_build=""
|
||||
skip_complement_run=""
|
||||
while [ $# -ge 1 ]; do
|
||||
arg=$1
|
||||
case "$arg" in
|
||||
"-h")
|
||||
usage
|
||||
return 1
|
||||
;;
|
||||
"-f"|"--fast")
|
||||
skip_docker_build=1
|
||||
;;
|
||||
"--build-only")
|
||||
skip_complement_run=1
|
||||
;;
|
||||
"-e"|"--editable")
|
||||
use_editable_synapse=1
|
||||
;;
|
||||
"--rebuild-editable")
|
||||
rebuild_editable_synapse=1
|
||||
;;
|
||||
*)
|
||||
# unknown arg: presumably an argument to gotest. break the loop.
|
||||
break
|
||||
"-h")
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
"-f"|"--fast")
|
||||
skip_docker_build=1
|
||||
;;
|
||||
"--build-only")
|
||||
skip_complement_run=1
|
||||
;;
|
||||
"-e"|"--editable")
|
||||
use_editable_synapse=1
|
||||
;;
|
||||
"--rebuild-editable")
|
||||
rebuild_editable_synapse=1
|
||||
;;
|
||||
*)
|
||||
# unknown arg: presumably an argument to gotest. break the loop.
|
||||
break
|
||||
esac
|
||||
shift
|
||||
done
|
||||
done
|
||||
|
||||
# enable buildkit for the docker builds
|
||||
export DOCKER_BUILDKIT=1
|
||||
# enable buildkit for the docker builds
|
||||
export DOCKER_BUILDKIT=1
|
||||
|
||||
# Determine whether to use the docker or podman container runtime.
|
||||
if [ -n "$PODMAN" ]; then
|
||||
export CONTAINER_RUNTIME=podman
|
||||
export DOCKER_HOST=unix://$XDG_RUNTIME_DIR/podman/podman.sock
|
||||
export BUILDAH_FORMAT=docker
|
||||
export COMPLEMENT_HOSTNAME_RUNNING_COMPLEMENT=host.containers.internal
|
||||
else
|
||||
export CONTAINER_RUNTIME=docker
|
||||
fi
|
||||
# Determine whether to use the docker or podman container runtime.
|
||||
if [ -n "$PODMAN" ]; then
|
||||
export CONTAINER_RUNTIME=podman
|
||||
export DOCKER_HOST=unix://$XDG_RUNTIME_DIR/podman/podman.sock
|
||||
export BUILDAH_FORMAT=docker
|
||||
export COMPLEMENT_HOSTNAME_RUNNING_COMPLEMENT=host.containers.internal
|
||||
else
|
||||
export CONTAINER_RUNTIME=docker
|
||||
fi
|
||||
|
||||
# Change to the repository root
|
||||
cd "$(dirname $0)/.."
|
||||
# Change to the repository root
|
||||
cd "$(dirname $0)/.."
|
||||
|
||||
# Check for a user-specified Complement checkout
|
||||
if [[ -z "$COMPLEMENT_DIR" ]]; then
|
||||
COMPLEMENT_REF=${COMPLEMENT_REF:-main}
|
||||
echo "COMPLEMENT_DIR not set. Fetching Complement checkout from ${COMPLEMENT_REF}..."
|
||||
wget -Nq https://github.com/matrix-org/complement/archive/${COMPLEMENT_REF}.tar.gz
|
||||
tar -xzf ${COMPLEMENT_REF}.tar.gz
|
||||
COMPLEMENT_DIR=complement-${COMPLEMENT_REF}
|
||||
echo "Checkout available at 'complement-${COMPLEMENT_REF}'"
|
||||
fi
|
||||
# Check for a user-specified Complement checkout
|
||||
if [[ -z "$COMPLEMENT_DIR" ]]; then
|
||||
COMPLEMENT_REF=${COMPLEMENT_REF:-main}
|
||||
echo "COMPLEMENT_DIR not set. Fetching Complement checkout from ${COMPLEMENT_REF}..."
|
||||
wget -Nq https://github.com/matrix-org/complement/archive/${COMPLEMENT_REF}.tar.gz
|
||||
tar -xzf ${COMPLEMENT_REF}.tar.gz
|
||||
COMPLEMENT_DIR=complement-${COMPLEMENT_REF}
|
||||
echo "Checkout available at 'complement-${COMPLEMENT_REF}'"
|
||||
fi
|
||||
|
||||
if [ -n "$use_editable_synapse" ]; then
|
||||
if [ -n "$use_editable_synapse" ]; then
|
||||
if [[ -e synapse/synapse_rust.abi3.so ]]; then
|
||||
# In an editable install, back up the host's compiled Rust module to prevent
|
||||
# inconvenience; the container will overwrite the module with its own copy.
|
||||
mv -n synapse/synapse_rust.abi3.so synapse/synapse_rust.abi3.so~host
|
||||
# And restore it on exit:
|
||||
synapse_pkg=`realpath synapse`
|
||||
trap "mv -f '$synapse_pkg/synapse_rust.abi3.so~host' '$synapse_pkg/synapse_rust.abi3.so'" EXIT
|
||||
# In an editable install, back up the host's compiled Rust module to prevent
|
||||
# inconvenience; the container will overwrite the module with its own copy.
|
||||
mv -n synapse/synapse_rust.abi3.so synapse/synapse_rust.abi3.so~host
|
||||
# And restore it on exit:
|
||||
synapse_pkg=`realpath synapse`
|
||||
trap "mv -f '$synapse_pkg/synapse_rust.abi3.so~host' '$synapse_pkg/synapse_rust.abi3.so'" EXIT
|
||||
fi
|
||||
|
||||
editable_mount="$(realpath .):/editable-src:z"
|
||||
if [ -n "$rebuild_editable_synapse" ]; then
|
||||
unset skip_docker_build
|
||||
elif $CONTAINER_RUNTIME inspect complement-synapse-editable &>/dev/null; then
|
||||
# complement-synapse-editable already exists: see if we can still use it:
|
||||
# - The Rust module must still be importable; it will fail to import if the Rust source has changed.
|
||||
# - The Poetry lock file must be the same (otherwise we assume dependencies have changed)
|
||||
|
||||
# First set up the module in the right place for an editable installation.
|
||||
$CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
|
||||
|
||||
if ($CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'python' complement-synapse-editable -c 'import synapse.synapse_rust' \
|
||||
&& $CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'diff' complement-synapse-editable --brief /editable-src/poetry.lock /poetry.lock.bak); then
|
||||
skip_docker_build=1
|
||||
else
|
||||
echo "Editable Synapse image is stale. Will rebuild."
|
||||
unset skip_docker_build
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
elif $CONTAINER_RUNTIME inspect complement-synapse-editable &>/dev/null; then
|
||||
# complement-synapse-editable already exists: see if we can still use it:
|
||||
# - The Rust module must still be importable; it will fail to import if the Rust source has changed.
|
||||
# - The Poetry lock file must be the same (otherwise we assume dependencies have changed)
|
||||
|
||||
if [ -z "$skip_docker_build" ]; then
|
||||
# First set up the module in the right place for an editable installation.
|
||||
$CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
|
||||
|
||||
if ($CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'python' complement-synapse-editable -c 'import synapse.synapse_rust' \
|
||||
&& $CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'diff' complement-synapse-editable --brief /editable-src/poetry.lock /poetry.lock.bak); then
|
||||
skip_docker_build=1
|
||||
else
|
||||
echo "Editable Synapse image is stale. Will rebuild."
|
||||
unset skip_docker_build
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$skip_docker_build" ]; then
|
||||
if [ -n "$use_editable_synapse" ]; then
|
||||
|
||||
# Build a special image designed for use in development with editable
|
||||
# installs.
|
||||
$CONTAINER_RUNTIME build -t synapse-editable \
|
||||
-f "docker/editable.Dockerfile" .
|
||||
# Build a special image designed for use in development with editable
|
||||
# installs.
|
||||
$CONTAINER_RUNTIME build -t synapse-editable \
|
||||
-f "docker/editable.Dockerfile" .
|
||||
|
||||
$CONTAINER_RUNTIME build -t synapse-workers-editable \
|
||||
--build-arg FROM=synapse-editable \
|
||||
-f "docker/Dockerfile-workers" .
|
||||
$CONTAINER_RUNTIME build -t synapse-workers-editable \
|
||||
--build-arg FROM=synapse-editable \
|
||||
-f "docker/Dockerfile-workers" .
|
||||
|
||||
$CONTAINER_RUNTIME build -t complement-synapse-editable \
|
||||
--build-arg FROM=synapse-workers-editable \
|
||||
-f "docker/complement/Dockerfile" "docker/complement"
|
||||
$CONTAINER_RUNTIME build -t complement-synapse-editable \
|
||||
--build-arg FROM=synapse-workers-editable \
|
||||
-f "docker/complement/Dockerfile" "docker/complement"
|
||||
|
||||
# Prepare the Rust module
|
||||
$CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
|
||||
# Prepare the Rust module
|
||||
$CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
|
||||
|
||||
else
|
||||
|
||||
# Build the base Synapse image from the local checkout
|
||||
echo_if_github "::group::Build Docker image: matrixdotorg/synapse"
|
||||
$CONTAINER_RUNTIME build -t matrixdotorg/synapse \
|
||||
--build-arg TEST_ONLY_SKIP_DEP_HASH_VERIFICATION \
|
||||
--build-arg TEST_ONLY_IGNORE_POETRY_LOCKFILE \
|
||||
-f "docker/Dockerfile" .
|
||||
echo_if_github "::endgroup::"
|
||||
# Build the base Synapse image from the local checkout
|
||||
echo_if_github "::group::Build Docker image: matrixdotorg/synapse"
|
||||
$CONTAINER_RUNTIME build -t matrixdotorg/synapse \
|
||||
--build-arg TEST_ONLY_SKIP_DEP_HASH_VERIFICATION \
|
||||
--build-arg TEST_ONLY_IGNORE_POETRY_LOCKFILE \
|
||||
-f "docker/Dockerfile" .
|
||||
echo_if_github "::endgroup::"
|
||||
|
||||
# Build the workers docker image (from the base Synapse image we just built).
|
||||
echo_if_github "::group::Build Docker image: matrixdotorg/synapse-workers"
|
||||
$CONTAINER_RUNTIME build -t matrixdotorg/synapse-workers -f "docker/Dockerfile-workers" .
|
||||
echo_if_github "::endgroup::"
|
||||
# Build the workers docker image (from the base Synapse image we just built).
|
||||
echo_if_github "::group::Build Docker image: matrixdotorg/synapse-workers"
|
||||
$CONTAINER_RUNTIME build -t matrixdotorg/synapse-workers -f "docker/Dockerfile-workers" .
|
||||
echo_if_github "::endgroup::"
|
||||
|
||||
# Build the unified Complement image (from the worker Synapse image we just built).
|
||||
echo_if_github "::group::Build Docker image: complement/Dockerfile"
|
||||
$CONTAINER_RUNTIME build -t complement-synapse \
|
||||
`# This is the tag we end up pushing to the registry (see` \
|
||||
`# .github/workflows/push_complement_image.yml) so let's just label it now` \
|
||||
`# so people can reference it by the same name locally.` \
|
||||
-t ghcr.io/element-hq/synapse/complement-synapse \
|
||||
-f "docker/complement/Dockerfile" "docker/complement"
|
||||
echo_if_github "::endgroup::"
|
||||
# Build the unified Complement image (from the worker Synapse image we just built).
|
||||
echo_if_github "::group::Build Docker image: complement/Dockerfile"
|
||||
$CONTAINER_RUNTIME build -t complement-synapse \
|
||||
`# This is the tag we end up pushing to the registry (see` \
|
||||
`# .github/workflows/push_complement_image.yml) so let's just label it now` \
|
||||
`# so people can reference it by the same name locally.` \
|
||||
-t ghcr.io/element-hq/synapse/complement-synapse \
|
||||
-f "docker/complement/Dockerfile" "docker/complement"
|
||||
echo_if_github "::endgroup::"
|
||||
|
||||
fi
|
||||
|
||||
echo "Docker images built."
|
||||
else
|
||||
echo "Skipping Docker image build as requested."
|
||||
fi
|
||||
fi
|
||||
|
||||
test_packages=(
|
||||
if [ -n "$skip_complement_run" ]; then
|
||||
echo "Skipping Complement run as requested."
|
||||
exit
|
||||
fi
|
||||
|
||||
export COMPLEMENT_BASE_IMAGE=complement-synapse
|
||||
if [ -n "$use_editable_synapse" ]; then
|
||||
export COMPLEMENT_BASE_IMAGE=complement-synapse-editable
|
||||
export COMPLEMENT_HOST_MOUNTS="$editable_mount"
|
||||
fi
|
||||
|
||||
extra_test_args=()
|
||||
|
||||
test_packages=(
|
||||
./tests/csapi
|
||||
./tests
|
||||
./tests/msc3874
|
||||
@@ -229,104 +231,71 @@ main() {
|
||||
./tests/msc4140
|
||||
./tests/msc4155
|
||||
./tests/msc4306
|
||||
)
|
||||
)
|
||||
|
||||
# Export the list of test packages as a space-separated environment variable, so other
|
||||
# scripts can use it.
|
||||
export SYNAPSE_SUPPORTED_COMPLEMENT_TEST_PACKAGES="${test_packages[@]}"
|
||||
# Enable dirty runs, so tests will reuse the same container where possible.
|
||||
# This significantly speeds up tests, but increases the possibility of test pollution.
|
||||
export COMPLEMENT_ENABLE_DIRTY_RUNS=1
|
||||
|
||||
export COMPLEMENT_BASE_IMAGE=complement-synapse
|
||||
if [ -n "$use_editable_synapse" ]; then
|
||||
export COMPLEMENT_BASE_IMAGE=complement-synapse-editable
|
||||
export COMPLEMENT_HOST_MOUNTS="$editable_mount"
|
||||
fi
|
||||
# All environment variables starting with PASS_ will be shared.
|
||||
# (The prefix is stripped off before reaching the container.)
|
||||
export COMPLEMENT_SHARE_ENV_PREFIX=PASS_
|
||||
|
||||
# Enable dirty runs, so tests will reuse the same container where possible.
|
||||
# This significantly speeds up tests, but increases the possibility of test pollution.
|
||||
export COMPLEMENT_ENABLE_DIRTY_RUNS=1
|
||||
# It takes longer than 10m to run the whole suite.
|
||||
extra_test_args+=("-timeout=60m")
|
||||
|
||||
# All environment variables starting with PASS_ will be shared.
|
||||
# (The prefix is stripped off before reaching the container.)
|
||||
export COMPLEMENT_SHARE_ENV_PREFIX=PASS_
|
||||
if [[ -n "$WORKERS" ]]; then
|
||||
# Use workers.
|
||||
export PASS_SYNAPSE_COMPLEMENT_USE_WORKERS=true
|
||||
|
||||
# * -count=1: Only run tests once, and disable caching for tests.
|
||||
# * -v: Output test logs, even if those tests pass.
|
||||
# * -tags=synapse_blacklist: Enable the `synapse_blacklist` build tag, which is
|
||||
# necessary for `runtime.Synapse` checks/skips to work in the tests
|
||||
test_args=(
|
||||
-v
|
||||
-tags="synapse_blacklist"
|
||||
-count=1
|
||||
)
|
||||
# Pass through the workers defined. If none, it will be an empty string
|
||||
export PASS_SYNAPSE_WORKER_TYPES="$WORKER_TYPES"
|
||||
|
||||
# It takes longer than 10m to run the whole suite.
|
||||
test_args+=("-timeout=60m")
|
||||
# Workers can only use Postgres as a database.
|
||||
export PASS_SYNAPSE_COMPLEMENT_DATABASE=postgres
|
||||
|
||||
if [[ -n "$WORKERS" ]]; then
|
||||
# Use workers.
|
||||
export PASS_SYNAPSE_COMPLEMENT_USE_WORKERS=true
|
||||
# And provide some more configuration to complement.
|
||||
|
||||
# Pass through the workers defined. If none, it will be an empty string
|
||||
export PASS_SYNAPSE_WORKER_TYPES="$WORKER_TYPES"
|
||||
|
||||
# Workers can only use Postgres as a database.
|
||||
# It can take quite a while to spin up a worker-mode Synapse for the first
|
||||
# time (the main problem is that we start 14 python processes for each test,
|
||||
# and complement likes to do two of them in parallel).
|
||||
export COMPLEMENT_SPAWN_HS_TIMEOUT_SECS=120
|
||||
else
|
||||
export PASS_SYNAPSE_COMPLEMENT_USE_WORKERS=
|
||||
if [[ -n "$POSTGRES" ]]; then
|
||||
export PASS_SYNAPSE_COMPLEMENT_DATABASE=postgres
|
||||
|
||||
# And provide some more configuration to complement.
|
||||
|
||||
# It can take quite a while to spin up a worker-mode Synapse for the first
|
||||
# time (the main problem is that we start 14 python processes for each test,
|
||||
# and complement likes to do two of them in parallel).
|
||||
export COMPLEMENT_SPAWN_HS_TIMEOUT_SECS=120
|
||||
else
|
||||
export PASS_SYNAPSE_COMPLEMENT_USE_WORKERS=
|
||||
if [[ -n "$POSTGRES" ]]; then
|
||||
export PASS_SYNAPSE_COMPLEMENT_DATABASE=postgres
|
||||
else
|
||||
export PASS_SYNAPSE_COMPLEMENT_DATABASE=sqlite
|
||||
fi
|
||||
export PASS_SYNAPSE_COMPLEMENT_DATABASE=sqlite
|
||||
fi
|
||||
|
||||
if [[ -n "$ASYNCIO_REACTOR" ]]; then
|
||||
# Enable the Twisted asyncio reactor
|
||||
export PASS_SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=true
|
||||
fi
|
||||
|
||||
if [[ -n "$UNIX_SOCKETS" ]]; then
|
||||
# Enable full on Unix socket mode for Synapse, Redis and Postgresql
|
||||
export PASS_SYNAPSE_USE_UNIX_SOCKET=1
|
||||
fi
|
||||
|
||||
if [[ -n "$SYNAPSE_TEST_LOG_LEVEL" ]]; then
|
||||
# Set the log level to what is desired
|
||||
export PASS_SYNAPSE_LOG_LEVEL="$SYNAPSE_TEST_LOG_LEVEL"
|
||||
|
||||
# Allow logging sensitive things (currently SQL queries & parameters).
|
||||
# (This won't have any effect if we're not logging at DEBUG level overall.)
|
||||
# Since this is just a test suite, this is fine and won't reveal anyone's
|
||||
# personal information
|
||||
export PASS_SYNAPSE_LOG_SENSITIVE=1
|
||||
fi
|
||||
|
||||
# Log a few more useful things for a developer attempting to debug something
|
||||
# particularly tricky.
|
||||
export PASS_SYNAPSE_LOG_TESTING=1
|
||||
|
||||
if [ -n "$skip_complement_run" ]; then
|
||||
echo "Skipping Complement run as requested."
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Run the tests!
|
||||
echo "Running Complement with ${test_args[@]} $@ ${test_packages[@]}"
|
||||
cd "$COMPLEMENT_DIR"
|
||||
go test "${test_args[@]}" "$@" "${test_packages[@]}"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
# For any non-zero exit code (indicating some sort of error happened), we want to exit
|
||||
# with that code.
|
||||
exit_code=$?
|
||||
if [ $exit_code -ne 0 ]; then
|
||||
exit $exit_code
|
||||
fi
|
||||
|
||||
if [[ -n "$ASYNCIO_REACTOR" ]]; then
|
||||
# Enable the Twisted asyncio reactor
|
||||
export PASS_SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=true
|
||||
fi
|
||||
|
||||
if [[ -n "$UNIX_SOCKETS" ]]; then
|
||||
# Enable full on Unix socket mode for Synapse, Redis and Postgresql
|
||||
export PASS_SYNAPSE_USE_UNIX_SOCKET=1
|
||||
fi
|
||||
|
||||
if [[ -n "$SYNAPSE_TEST_LOG_LEVEL" ]]; then
|
||||
# Set the log level to what is desired
|
||||
export PASS_SYNAPSE_LOG_LEVEL="$SYNAPSE_TEST_LOG_LEVEL"
|
||||
|
||||
# Allow logging sensitive things (currently SQL queries & parameters).
|
||||
# (This won't have any effect if we're not logging at DEBUG level overall.)
|
||||
# Since this is just a test suite, this is fine and won't reveal anyone's
|
||||
# personal information
|
||||
export PASS_SYNAPSE_LOG_SENSITIVE=1
|
||||
fi
|
||||
|
||||
# Log a few more useful things for a developer attempting to debug something
|
||||
# particularly tricky.
|
||||
export PASS_SYNAPSE_LOG_TESTING=1
|
||||
|
||||
# Run the tests!
|
||||
echo "Images built; running complement with ${extra_test_args[@]} $@ ${test_packages[@]}"
|
||||
cd "$COMPLEMENT_DIR"
|
||||
|
||||
go test -v -tags "synapse_blacklist" -count=1 "${extra_test_args[@]}" "$@" "${test_packages[@]}"
|
||||
|
||||
@@ -43,7 +43,7 @@ import argparse
|
||||
import base64
|
||||
import json
|
||||
import sys
|
||||
from typing import Any, Mapping
|
||||
from typing import Any, Mapping, Optional, Union
|
||||
from urllib import parse as urlparse
|
||||
|
||||
import requests
|
||||
@@ -103,12 +103,12 @@ def sign_json(
|
||||
|
||||
|
||||
def request(
|
||||
method: str | None,
|
||||
method: Optional[str],
|
||||
origin_name: str,
|
||||
origin_key: signedjson.types.SigningKey,
|
||||
destination: str,
|
||||
path: str,
|
||||
content: str | None,
|
||||
content: Optional[str],
|
||||
verify_tls: bool,
|
||||
) -> requests.Response:
|
||||
if method is None:
|
||||
@@ -301,9 +301,9 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
def get_connection_with_tls_context(
|
||||
self,
|
||||
request: PreparedRequest,
|
||||
verify: bool | str | None,
|
||||
proxies: Mapping[str, str] | None = None,
|
||||
cert: tuple[str, str] | str | None = None,
|
||||
verify: Optional[Union[bool, str]],
|
||||
proxies: Optional[Mapping[str, str]] = None,
|
||||
cert: Optional[Union[tuple[str, str], str]] = None,
|
||||
) -> HTTPConnectionPool:
|
||||
# overrides the get_connection_with_tls_context() method in the base class
|
||||
parsed = urlparse.urlsplit(request.url)
|
||||
@@ -368,7 +368,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
return server_name, 8448, server_name
|
||||
|
||||
@staticmethod
|
||||
def _get_well_known(server_name: str) -> str | None:
|
||||
def _get_well_known(server_name: str) -> Optional[str]:
|
||||
if ":" in server_name:
|
||||
# explicit port, or ipv6 literal. Either way, no .well-known
|
||||
return None
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
from typing import Any
|
||||
from typing import Any, Optional
|
||||
|
||||
import yaml
|
||||
|
||||
@@ -259,17 +259,17 @@ def indent(text: str, first_line: bool = True) -> str:
|
||||
return text
|
||||
|
||||
|
||||
def em(s: str | None) -> str:
|
||||
def em(s: Optional[str]) -> str:
|
||||
"""Add emphasis to text."""
|
||||
return f"*{s}*" if s else ""
|
||||
|
||||
|
||||
def a(s: str | None, suffix: str = " ") -> str:
|
||||
def a(s: Optional[str], suffix: str = " ") -> str:
|
||||
"""Appends a space if the given string is not empty."""
|
||||
return s + suffix if s else ""
|
||||
|
||||
|
||||
def p(s: str | None, prefix: str = " ") -> str:
|
||||
def p(s: Optional[str], prefix: str = " ") -> str:
|
||||
"""Prepend a space if the given string is not empty."""
|
||||
return prefix + s if s else ""
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ can crop up, e.g the cache descriptors.
|
||||
"""
|
||||
|
||||
import enum
|
||||
from typing import Callable, Mapping
|
||||
from typing import Callable, Mapping, Optional, Union
|
||||
|
||||
import attr
|
||||
import mypy.types
|
||||
@@ -123,7 +123,7 @@ class ArgLocation:
|
||||
"""
|
||||
|
||||
|
||||
prometheus_metric_fullname_to_label_arg_map: Mapping[str, ArgLocation | None] = {
|
||||
prometheus_metric_fullname_to_label_arg_map: Mapping[str, Optional[ArgLocation]] = {
|
||||
# `Collector` subclasses:
|
||||
"prometheus_client.metrics.MetricWrapperBase": ArgLocation("labelnames", 2),
|
||||
"prometheus_client.metrics.Counter": ArgLocation("labelnames", 2),
|
||||
@@ -211,7 +211,7 @@ class SynapsePlugin(Plugin):
|
||||
|
||||
def get_base_class_hook(
|
||||
self, fullname: str
|
||||
) -> Callable[[ClassDefContext], None] | None:
|
||||
) -> Optional[Callable[[ClassDefContext], None]]:
|
||||
def _get_base_class_hook(ctx: ClassDefContext) -> None:
|
||||
# Run any `get_base_class_hook` checks from other plugins first.
|
||||
#
|
||||
@@ -232,7 +232,7 @@ class SynapsePlugin(Plugin):
|
||||
|
||||
def get_function_signature_hook(
|
||||
self, fullname: str
|
||||
) -> Callable[[FunctionSigContext], FunctionLike] | None:
|
||||
) -> Optional[Callable[[FunctionSigContext], FunctionLike]]:
|
||||
# Strip off the unique identifier for classes that are dynamically created inside
|
||||
# functions. ex. `synapse.metrics.jemalloc.JemallocCollector@185` (this is the line
|
||||
# number)
|
||||
@@ -262,7 +262,7 @@ class SynapsePlugin(Plugin):
|
||||
|
||||
def get_method_signature_hook(
|
||||
self, fullname: str
|
||||
) -> Callable[[MethodSigContext], CallableType] | None:
|
||||
) -> Optional[Callable[[MethodSigContext], CallableType]]:
|
||||
if fullname.startswith(
|
||||
(
|
||||
"synapse.util.caches.descriptors.CachedFunction.__call__",
|
||||
@@ -721,7 +721,7 @@ def check_is_cacheable_wrapper(ctx: MethodSigContext) -> CallableType:
|
||||
|
||||
def check_is_cacheable(
|
||||
signature: CallableType,
|
||||
ctx: MethodSigContext | FunctionSigContext,
|
||||
ctx: Union[MethodSigContext, FunctionSigContext],
|
||||
) -> None:
|
||||
"""
|
||||
Check if a callable returns a type which can be cached.
|
||||
@@ -795,7 +795,7 @@ AT_CACHED_MUTABLE_RETURN = ErrorCode(
|
||||
|
||||
def is_cacheable(
|
||||
rt: mypy.types.Type, signature: CallableType, verbose: bool
|
||||
) -> tuple[bool, str | None]:
|
||||
) -> tuple[bool, Optional[str]]:
|
||||
"""
|
||||
Check if a particular type is cachable.
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ import time
|
||||
import urllib.request
|
||||
from os import path
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, Match
|
||||
from typing import Any, Match, Optional, Union
|
||||
|
||||
import attr
|
||||
import click
|
||||
@@ -291,12 +291,6 @@ def _prepare() -> None:
|
||||
synapse_repo.git.add("-u")
|
||||
subprocess.run("git diff --cached", shell=True)
|
||||
|
||||
print(
|
||||
"Consider any upcoming platform deprecations that should be mentioned in the changelog. (e.g. upcoming Python, PostgreSQL or SQLite deprecations)"
|
||||
)
|
||||
print(
|
||||
"Platform deprecations should be mentioned at least 1 release prior to being unsupported."
|
||||
)
|
||||
if click.confirm("Edit changelog?", default=False):
|
||||
click.edit(filename="CHANGES.md")
|
||||
|
||||
@@ -333,11 +327,11 @@ def _prepare() -> None:
|
||||
|
||||
@cli.command()
|
||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"])
|
||||
def tag(gh_token: str | None) -> None:
|
||||
def tag(gh_token: Optional[str]) -> None:
|
||||
_tag(gh_token)
|
||||
|
||||
|
||||
def _tag(gh_token: str | None) -> None:
|
||||
def _tag(gh_token: Optional[str]) -> None:
|
||||
"""Tags the release and generates a draft GitHub release"""
|
||||
|
||||
# Test that the GH Token is valid before continuing.
|
||||
@@ -477,11 +471,11 @@ def _publish(gh_token: str) -> None:
|
||||
|
||||
@cli.command()
|
||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=False)
|
||||
def upload(gh_token: str | None) -> None:
|
||||
def upload(gh_token: Optional[str]) -> None:
|
||||
_upload(gh_token)
|
||||
|
||||
|
||||
def _upload(gh_token: str | None) -> None:
|
||||
def _upload(gh_token: Optional[str]) -> None:
|
||||
"""Upload release to pypi."""
|
||||
|
||||
# Test that the GH Token is valid before continuing.
|
||||
@@ -582,11 +576,11 @@ def _merge_into(repo: Repo, source: str, target: str) -> None:
|
||||
|
||||
@cli.command()
|
||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=False)
|
||||
def wait_for_actions(gh_token: str | None) -> None:
|
||||
def wait_for_actions(gh_token: Optional[str]) -> None:
|
||||
_wait_for_actions(gh_token)
|
||||
|
||||
|
||||
def _wait_for_actions(gh_token: str | None) -> None:
|
||||
def _wait_for_actions(gh_token: Optional[str]) -> None:
|
||||
# Test that the GH Token is valid before continuing.
|
||||
check_valid_gh_token(gh_token)
|
||||
|
||||
@@ -664,7 +658,7 @@ def _notify(message: str) -> None:
|
||||
envvar=["GH_TOKEN", "GITHUB_TOKEN"],
|
||||
required=False,
|
||||
)
|
||||
def merge_back(_gh_token: str | None) -> None:
|
||||
def merge_back(_gh_token: Optional[str]) -> None:
|
||||
_merge_back()
|
||||
|
||||
|
||||
@@ -721,7 +715,7 @@ def _merge_back() -> None:
|
||||
envvar=["GH_TOKEN", "GITHUB_TOKEN"],
|
||||
required=False,
|
||||
)
|
||||
def announce(_gh_token: str | None) -> None:
|
||||
def announce(_gh_token: Optional[str]) -> None:
|
||||
_announce()
|
||||
|
||||
|
||||
@@ -857,7 +851,7 @@ def get_repo_and_check_clean_checkout(
|
||||
return repo
|
||||
|
||||
|
||||
def check_valid_gh_token(gh_token: str | None) -> None:
|
||||
def check_valid_gh_token(gh_token: Optional[str]) -> None:
|
||||
"""Check that a github token is valid, if supplied"""
|
||||
|
||||
if not gh_token:
|
||||
@@ -873,7 +867,7 @@ def check_valid_gh_token(gh_token: str | None) -> None:
|
||||
raise click.ClickException(f"Github credentials are bad: {e}")
|
||||
|
||||
|
||||
def find_ref(repo: git.Repo, ref_name: str) -> git.HEAD | None:
|
||||
def find_ref(repo: git.Repo, ref_name: str) -> Optional[git.HEAD]:
|
||||
"""Find the branch/ref, looking first locally then in the remote."""
|
||||
if ref_name in repo.references:
|
||||
return repo.references[ref_name]
|
||||
@@ -910,7 +904,7 @@ def get_changes_for_version(wanted_version: version.Version) -> str:
|
||||
|
||||
# These are 0-based.
|
||||
start_line: int
|
||||
end_line: int | None = None # Is none if its the last entry
|
||||
end_line: Optional[int] = None # Is none if its the last entry
|
||||
|
||||
headings: list[VersionSection] = []
|
||||
for i, token in enumerate(tokens):
|
||||
@@ -997,7 +991,7 @@ def build_dependabot_changelog(repo: Repo, current_version: version.Version) ->
|
||||
messages = []
|
||||
for commit in reversed(commits):
|
||||
if commit.author.name == "dependabot[bot]":
|
||||
message: str | bytes = commit.message
|
||||
message: Union[str, bytes] = commit.message
|
||||
if isinstance(message, bytes):
|
||||
message = message.decode("utf-8")
|
||||
messages.append(message.split("\n", maxsplit=1)[0])
|
||||
|
||||
@@ -38,7 +38,7 @@ import io
|
||||
import json
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from typing import Any, Iterator
|
||||
from typing import Any, Iterator, Optional
|
||||
|
||||
import git
|
||||
from packaging import version
|
||||
@@ -57,7 +57,7 @@ SCHEMA_VERSION_FILES = (
|
||||
OLDEST_SHOWN_VERSION = version.parse("v1.0")
|
||||
|
||||
|
||||
def get_schema_versions(tag: git.Tag) -> tuple[int | None, int | None]:
|
||||
def get_schema_versions(tag: git.Tag) -> tuple[Optional[int], Optional[int]]:
|
||||
"""Get the schema and schema compat versions for a tag."""
|
||||
schema_version = None
|
||||
schema_compat_version = None
|
||||
|
||||
@@ -13,8 +13,10 @@ from typing import (
|
||||
Iterator,
|
||||
KeysView,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
TypeVar,
|
||||
Union,
|
||||
ValuesView,
|
||||
overload,
|
||||
)
|
||||
@@ -49,7 +51,7 @@ class SortedDict(dict[_KT, _VT]):
|
||||
self, __key: _Key[_KT], __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT
|
||||
) -> None: ...
|
||||
@property
|
||||
def key(self) -> _Key[_KT] | None: ...
|
||||
def key(self) -> Optional[_Key[_KT]]: ...
|
||||
@property
|
||||
def iloc(self) -> SortedKeysView[_KT]: ...
|
||||
def clear(self) -> None: ...
|
||||
@@ -77,10 +79,10 @@ class SortedDict(dict[_KT, _VT]):
|
||||
@overload
|
||||
def pop(self, key: _KT) -> _VT: ...
|
||||
@overload
|
||||
def pop(self, key: _KT, default: _T = ...) -> _VT | _T: ...
|
||||
def pop(self, key: _KT, default: _T = ...) -> Union[_VT, _T]: ...
|
||||
def popitem(self, index: int = ...) -> tuple[_KT, _VT]: ...
|
||||
def peekitem(self, index: int = ...) -> tuple[_KT, _VT]: ...
|
||||
def setdefault(self, key: _KT, default: _VT | None = ...) -> _VT: ...
|
||||
def setdefault(self, key: _KT, default: Optional[_VT] = ...) -> _VT: ...
|
||||
# Mypy now reports the first overload as an error, because typeshed widened the type
|
||||
# of `__map` to its internal `_typeshed.SupportsKeysAndGetItem` type in
|
||||
# https://github.com/python/typeshed/pull/6653
|
||||
@@ -104,8 +106,8 @@ class SortedDict(dict[_KT, _VT]):
|
||||
def _check(self) -> None: ...
|
||||
def islice(
|
||||
self,
|
||||
start: int | None = ...,
|
||||
stop: int | None = ...,
|
||||
start: Optional[int] = ...,
|
||||
stop: Optional[int] = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_KT]: ...
|
||||
def bisect_left(self, value: _KT) -> int: ...
|
||||
@@ -116,7 +118,7 @@ class SortedKeysView(KeysView[_KT_co], Sequence[_KT_co]):
|
||||
def __getitem__(self, index: int) -> _KT_co: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> list[_KT_co]: ...
|
||||
def __delitem__(self, index: int | slice) -> None: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
|
||||
class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[tuple[_KT_co, _VT_co]]):
|
||||
def __iter__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ...
|
||||
@@ -124,11 +126,11 @@ class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[tuple[_KT_co, _VT_co]]
|
||||
def __getitem__(self, index: int) -> tuple[_KT_co, _VT_co]: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> list[tuple[_KT_co, _VT_co]]: ...
|
||||
def __delitem__(self, index: int | slice) -> None: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
|
||||
class SortedValuesView(ValuesView[_VT_co], Sequence[_VT_co]):
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _VT_co: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> list[_VT_co]: ...
|
||||
def __delitem__(self, index: int | slice) -> None: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
|
||||
@@ -10,8 +10,10 @@ from typing import (
|
||||
Iterable,
|
||||
Iterator,
|
||||
MutableSequence,
|
||||
Optional,
|
||||
Sequence,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
@@ -27,8 +29,8 @@ class SortedList(MutableSequence[_T]):
|
||||
DEFAULT_LOAD_FACTOR: int = ...
|
||||
def __init__(
|
||||
self,
|
||||
iterable: Iterable[_T] | None = ...,
|
||||
key: _Key[_T] | None = ...,
|
||||
iterable: Optional[Iterable[_T]] = ...,
|
||||
key: Optional[_Key[_T]] = ...,
|
||||
): ...
|
||||
# NB: currently mypy does not honour return type, see mypy #3307
|
||||
@overload
|
||||
@@ -40,7 +42,7 @@ class SortedList(MutableSequence[_T]):
|
||||
@overload
|
||||
def __new__(cls, iterable: Iterable[_T], key: _Key[_T]) -> SortedKeyList[_T]: ...
|
||||
@property
|
||||
def key(self) -> Callable[[_T], Any] | None: ...
|
||||
def key(self) -> Optional[Callable[[_T], Any]]: ...
|
||||
def _reset(self, load: int) -> None: ...
|
||||
def clear(self) -> None: ...
|
||||
def _clear(self) -> None: ...
|
||||
@@ -55,7 +57,7 @@ class SortedList(MutableSequence[_T]):
|
||||
def _pos(self, idx: int) -> int: ...
|
||||
def _build_index(self) -> None: ...
|
||||
def __contains__(self, value: Any) -> bool: ...
|
||||
def __delitem__(self, index: int | slice) -> None: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _T: ...
|
||||
@overload
|
||||
@@ -74,8 +76,8 @@ class SortedList(MutableSequence[_T]):
|
||||
def reverse(self) -> None: ...
|
||||
def islice(
|
||||
self,
|
||||
start: int | None = ...,
|
||||
stop: int | None = ...,
|
||||
start: Optional[int] = ...,
|
||||
stop: Optional[int] = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def _islice(
|
||||
@@ -88,8 +90,8 @@ class SortedList(MutableSequence[_T]):
|
||||
) -> Iterator[_T]: ...
|
||||
def irange(
|
||||
self,
|
||||
minimum: int | None = ...,
|
||||
maximum: int | None = ...,
|
||||
minimum: Optional[int] = ...,
|
||||
maximum: Optional[int] = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
@@ -105,7 +107,7 @@ class SortedList(MutableSequence[_T]):
|
||||
def insert(self, index: int, value: _T) -> None: ...
|
||||
def pop(self, index: int = ...) -> _T: ...
|
||||
def index(
|
||||
self, value: _T, start: int | None = ..., stop: int | None = ...
|
||||
self, value: _T, start: Optional[int] = ..., stop: Optional[int] = ...
|
||||
) -> int: ...
|
||||
def __add__(self: _SL, other: Iterable[_T]) -> _SL: ...
|
||||
def __radd__(self: _SL, other: Iterable[_T]) -> _SL: ...
|
||||
@@ -124,10 +126,10 @@ class SortedList(MutableSequence[_T]):
|
||||
|
||||
class SortedKeyList(SortedList[_T]):
|
||||
def __init__(
|
||||
self, iterable: Iterable[_T] | None = ..., key: _Key[_T] = ...
|
||||
self, iterable: Optional[Iterable[_T]] = ..., key: _Key[_T] = ...
|
||||
) -> None: ...
|
||||
def __new__(
|
||||
cls, iterable: Iterable[_T] | None = ..., key: _Key[_T] = ...
|
||||
cls, iterable: Optional[Iterable[_T]] = ..., key: _Key[_T] = ...
|
||||
) -> SortedKeyList[_T]: ...
|
||||
@property
|
||||
def key(self) -> Callable[[_T], Any]: ...
|
||||
@@ -144,15 +146,15 @@ class SortedKeyList(SortedList[_T]):
|
||||
def _delete(self, pos: int, idx: int) -> None: ...
|
||||
def irange(
|
||||
self,
|
||||
minimum: int | None = ...,
|
||||
maximum: int | None = ...,
|
||||
minimum: Optional[int] = ...,
|
||||
maximum: Optional[int] = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def irange_key(
|
||||
self,
|
||||
min_key: Any | None = ...,
|
||||
max_key: Any | None = ...,
|
||||
min_key: Optional[Any] = ...,
|
||||
max_key: Optional[Any] = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reserve: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
@@ -168,7 +170,7 @@ class SortedKeyList(SortedList[_T]):
|
||||
def copy(self: _SKL) -> _SKL: ...
|
||||
def __copy__(self: _SKL) -> _SKL: ...
|
||||
def index(
|
||||
self, value: _T, start: int | None = ..., stop: int | None = ...
|
||||
self, value: _T, start: Optional[int] = ..., stop: Optional[int] = ...
|
||||
) -> int: ...
|
||||
def __add__(self: _SKL, other: Iterable[_T]) -> _SKL: ...
|
||||
def __radd__(self: _SKL, other: Iterable[_T]) -> _SKL: ...
|
||||
|
||||
@@ -11,8 +11,10 @@ from typing import (
|
||||
Iterable,
|
||||
Iterator,
|
||||
MutableSet,
|
||||
Optional,
|
||||
Sequence,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
@@ -26,19 +28,21 @@ _Key = Callable[[_T], Any]
|
||||
class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||
def __init__(
|
||||
self,
|
||||
iterable: Iterable[_T] | None = ...,
|
||||
key: _Key[_T] | None = ...,
|
||||
iterable: Optional[Iterable[_T]] = ...,
|
||||
key: Optional[_Key[_T]] = ...,
|
||||
) -> None: ...
|
||||
@classmethod
|
||||
def _fromset(cls, values: set[_T], key: _Key[_T] | None = ...) -> SortedSet[_T]: ...
|
||||
def _fromset(
|
||||
cls, values: set[_T], key: Optional[_Key[_T]] = ...
|
||||
) -> SortedSet[_T]: ...
|
||||
@property
|
||||
def key(self) -> _Key[_T] | None: ...
|
||||
def key(self) -> Optional[_Key[_T]]: ...
|
||||
def __contains__(self, value: Any) -> bool: ...
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _T: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> list[_T]: ...
|
||||
def __delitem__(self, index: int | slice) -> None: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
def __eq__(self, other: Any) -> bool: ...
|
||||
def __ne__(self, other: Any) -> bool: ...
|
||||
def __lt__(self, other: Iterable[_T]) -> bool: ...
|
||||
@@ -58,28 +62,32 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||
def _discard(self, value: _T) -> None: ...
|
||||
def pop(self, index: int = ...) -> _T: ...
|
||||
def remove(self, value: _T) -> None: ...
|
||||
def difference(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __sub__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def difference_update(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __isub__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def intersection(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __and__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __rand__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def intersection_update(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __iand__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def symmetric_difference(self, other: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __xor__(self, other: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __rxor__(self, other: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def difference(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __sub__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def difference_update(
|
||||
self, *iterables: Iterable[_S]
|
||||
) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __isub__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def intersection(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __and__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __rand__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def intersection_update(
|
||||
self, *iterables: Iterable[_S]
|
||||
) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __iand__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def symmetric_difference(self, other: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __xor__(self, other: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __rxor__(self, other: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def symmetric_difference_update(
|
||||
self, other: Iterable[_S]
|
||||
) -> SortedSet[_T | _S]: ...
|
||||
def __ixor__(self, other: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def union(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __or__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __ror__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def update(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __ior__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def _update(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __ixor__(self, other: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def union(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __or__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __ror__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def update(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __ior__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def _update(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __reduce__(
|
||||
self,
|
||||
) -> tuple[type[SortedSet[_T]], set[_T], Callable[[_T], Any]]: ...
|
||||
@@ -89,18 +97,18 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||
def bisect_right(self, value: _T) -> int: ...
|
||||
def islice(
|
||||
self,
|
||||
start: int | None = ...,
|
||||
stop: int | None = ...,
|
||||
start: Optional[int] = ...,
|
||||
stop: Optional[int] = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def irange(
|
||||
self,
|
||||
minimum: _T | None = ...,
|
||||
maximum: _T | None = ...,
|
||||
minimum: Optional[_T] = ...,
|
||||
maximum: Optional[_T] = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def index(
|
||||
self, value: _T, start: int | None = ..., stop: int | None = ...
|
||||
self, value: _T, start: Optional[int] = ..., stop: Optional[int] = ...
|
||||
) -> int: ...
|
||||
def _reset(self, load: int) -> None: ...
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
|
||||
"""Contains *incomplete* type hints for txredisapi."""
|
||||
|
||||
from typing import Any
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from twisted.internet import protocol
|
||||
from twisted.internet.defer import Deferred
|
||||
@@ -29,8 +29,8 @@ class RedisProtocol(protocol.Protocol):
|
||||
self,
|
||||
key: str,
|
||||
value: Any,
|
||||
expire: int | None = None,
|
||||
pexpire: int | None = None,
|
||||
expire: Optional[int] = None,
|
||||
pexpire: Optional[int] = None,
|
||||
only_if_not_exists: bool = False,
|
||||
only_if_exists: bool = False,
|
||||
) -> "Deferred[None]": ...
|
||||
@@ -38,8 +38,8 @@ class RedisProtocol(protocol.Protocol):
|
||||
|
||||
class SubscriberProtocol(RedisProtocol):
|
||||
def __init__(self, *args: object, **kwargs: object): ...
|
||||
password: str | None
|
||||
def subscribe(self, channels: str | list[str]) -> "Deferred[None]": ...
|
||||
password: Optional[str]
|
||||
def subscribe(self, channels: Union[str, list[str]]) -> "Deferred[None]": ...
|
||||
def connectionMade(self) -> None: ...
|
||||
# type-ignore: twisted.internet.protocol.Protocol provides a default argument for
|
||||
# `reason`. txredisapi's LineReceiver Protocol doesn't. But that's fine: it's what's
|
||||
@@ -49,12 +49,12 @@ class SubscriberProtocol(RedisProtocol):
|
||||
def lazyConnection(
|
||||
host: str = ...,
|
||||
port: int = ...,
|
||||
dbid: int | None = ...,
|
||||
dbid: Optional[int] = ...,
|
||||
reconnect: bool = ...,
|
||||
charset: str = ...,
|
||||
password: str | None = ...,
|
||||
connectTimeout: int | None = ...,
|
||||
replyTimeout: int | None = ...,
|
||||
password: Optional[str] = ...,
|
||||
connectTimeout: Optional[int] = ...,
|
||||
replyTimeout: Optional[int] = ...,
|
||||
convertNumbers: bool = ...,
|
||||
) -> RedisProtocol: ...
|
||||
|
||||
@@ -70,18 +70,18 @@ class RedisFactory(protocol.ReconnectingClientFactory):
|
||||
continueTrying: bool
|
||||
handler: ConnectionHandler
|
||||
pool: list[RedisProtocol]
|
||||
replyTimeout: int | None
|
||||
replyTimeout: Optional[int]
|
||||
def __init__(
|
||||
self,
|
||||
uuid: str,
|
||||
dbid: int | None,
|
||||
dbid: Optional[int],
|
||||
poolsize: int,
|
||||
isLazy: bool = False,
|
||||
handler: type = ConnectionHandler,
|
||||
charset: str = "utf-8",
|
||||
password: str | None = None,
|
||||
replyTimeout: int | None = None,
|
||||
convertNumbers: int | None = True,
|
||||
password: Optional[str] = None,
|
||||
replyTimeout: Optional[int] = None,
|
||||
convertNumbers: Optional[int] = True,
|
||||
): ...
|
||||
def buildProtocol(self, addr: IAddress) -> RedisProtocol: ...
|
||||
|
||||
|
||||
@@ -22,13 +22,13 @@
|
||||
import argparse
|
||||
import sys
|
||||
import time
|
||||
from typing import NoReturn
|
||||
from typing import NoReturn, Optional
|
||||
|
||||
from signedjson.key import encode_verify_key_base64, get_verify_key, read_signing_keys
|
||||
from signedjson.types import VerifyKey
|
||||
|
||||
|
||||
def exit(status: int = 0, message: str | None = None) -> NoReturn:
|
||||
def exit(status: int = 0, message: Optional[str] = None) -> NoReturn:
|
||||
if message:
|
||||
print(message, file=sys.stderr)
|
||||
sys.exit(status)
|
||||
|
||||
@@ -25,7 +25,7 @@ import logging
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from typing import Iterable, Pattern
|
||||
from typing import Iterable, Optional, Pattern
|
||||
|
||||
import yaml
|
||||
|
||||
@@ -46,7 +46,7 @@ logger = logging.getLogger("generate_workers_map")
|
||||
class MockHomeserver(HomeServer):
|
||||
DATASTORE_CLASS = DataStore
|
||||
|
||||
def __init__(self, config: HomeServerConfig, worker_app: str | None) -> None:
|
||||
def __init__(self, config: HomeServerConfig, worker_app: Optional[str]) -> None:
|
||||
super().__init__(config.server.server_name, config=config)
|
||||
self.config.worker.worker_app = worker_app
|
||||
|
||||
@@ -65,7 +65,7 @@ class EndpointDescription:
|
||||
|
||||
# The category of this endpoint. Is read from the `CATEGORY` constant in the servlet
|
||||
# class.
|
||||
category: str | None
|
||||
category: Optional[str]
|
||||
|
||||
# TODO:
|
||||
# - does it need to be routed based on a stream writer config?
|
||||
@@ -141,7 +141,7 @@ def get_registered_paths_for_hs(
|
||||
|
||||
|
||||
def get_registered_paths_for_default(
|
||||
worker_app: str | None, base_config: HomeServerConfig
|
||||
worker_app: Optional[str], base_config: HomeServerConfig
|
||||
) -> dict[tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Given the name of a worker application and a base homeserver configuration,
|
||||
@@ -271,7 +271,7 @@ def main() -> None:
|
||||
# TODO SSO endpoints (pick_idp etc) NOT REGISTERED BY THIS SCRIPT
|
||||
|
||||
categories_to_methods_and_paths: dict[
|
||||
str | None, dict[tuple[str, str], EndpointDescription]
|
||||
Optional[str], dict[tuple[str, str], EndpointDescription]
|
||||
] = defaultdict(dict)
|
||||
|
||||
for (method, path), desc in elided_worker_paths.items():
|
||||
@@ -282,7 +282,7 @@ def main() -> None:
|
||||
|
||||
|
||||
def print_category(
|
||||
category_name: str | None,
|
||||
category_name: Optional[str],
|
||||
elided_worker_paths: dict[tuple[str, str], EndpointDescription],
|
||||
) -> None:
|
||||
"""
|
||||
|
||||
@@ -26,7 +26,7 @@ import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
import sys
|
||||
from typing import Any, Callable, Iterable, TextIO
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
import requests
|
||||
import yaml
|
||||
@@ -54,7 +54,7 @@ def request_registration(
|
||||
server_location: str,
|
||||
shared_secret: str,
|
||||
admin: bool = False,
|
||||
user_type: str | None = None,
|
||||
user_type: Optional[str] = None,
|
||||
_print: Callable[[str], None] = print,
|
||||
exit: Callable[[int], None] = sys.exit,
|
||||
exists_ok: bool = False,
|
||||
@@ -123,13 +123,13 @@ def register_new_user(
|
||||
password: str,
|
||||
server_location: str,
|
||||
shared_secret: str,
|
||||
admin: bool | None,
|
||||
user_type: str | None,
|
||||
admin: Optional[bool],
|
||||
user_type: Optional[str],
|
||||
exists_ok: bool = False,
|
||||
) -> None:
|
||||
if not user:
|
||||
try:
|
||||
default_user: str | None = getpass.getuser()
|
||||
default_user: Optional[str] = getpass.getuser()
|
||||
except Exception:
|
||||
default_user = None
|
||||
|
||||
@@ -244,7 +244,6 @@ def main() -> None:
|
||||
group.add_argument(
|
||||
"-c",
|
||||
"--config",
|
||||
action="append",
|
||||
type=argparse.FileType("r"),
|
||||
help="Path to server config file. Used to read in shared secret.",
|
||||
)
|
||||
@@ -263,9 +262,9 @@ def main() -> None:
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
config: dict[str, Any] | None = None
|
||||
config: Optional[dict[str, Any]] = None
|
||||
if "config" in args and args.config:
|
||||
config = _read_config_files(args.config)
|
||||
config = yaml.safe_load(args.config)
|
||||
|
||||
if args.shared_secret:
|
||||
secret = args.shared_secret
|
||||
@@ -327,33 +326,6 @@ def main() -> None:
|
||||
)
|
||||
|
||||
|
||||
# Adapted from synapse.config._base.
|
||||
def _read_config_files(config_files: Iterable[TextIO]) -> dict[str, Any]:
|
||||
"""Read the config files and shallowly merge them into a dict.
|
||||
|
||||
Successive configurations are shallowly merged into ones provided earlier,
|
||||
i.e., entirely replacing top-level sections of the configuration.
|
||||
|
||||
Args:
|
||||
config_files: A list of the config files to read
|
||||
|
||||
Returns:
|
||||
The configuration dictionary.
|
||||
"""
|
||||
specified_config = {}
|
||||
for config_file in config_files:
|
||||
yaml_config = yaml.safe_load(config_file)
|
||||
|
||||
if not isinstance(yaml_config, dict):
|
||||
err = "File %r is empty or doesn't parse into a key-value map. IGNORING."
|
||||
print(err % (config_file,))
|
||||
continue
|
||||
|
||||
specified_config.update(yaml_config)
|
||||
|
||||
return specified_config
|
||||
|
||||
|
||||
def _read_file(file_path: Any, config_path: str) -> str:
|
||||
"""Check the given file exists, and read it into a string
|
||||
|
||||
@@ -378,7 +350,7 @@ def _read_file(file_path: Any, config_path: str) -> str:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _find_client_listener(config: dict[str, Any]) -> str | None:
|
||||
def _find_client_listener(config: dict[str, Any]) -> Optional[str]:
|
||||
# try to find a listener in the config. Returns a host:port pair
|
||||
for listener in config.get("listeners", []):
|
||||
if listener.get("type") != "http" or listener.get("tls", False):
|
||||
|
||||
@@ -58,7 +58,6 @@ from synapse.storage.database import DatabasePool, LoggingTransaction, make_conn
|
||||
from synapse.storage.databases.main import FilteringWorkerStore
|
||||
from synapse.storage.databases.main.account_data import AccountDataWorkerStore
|
||||
from synapse.storage.databases.main.client_ips import ClientIpBackgroundUpdateStore
|
||||
from synapse.storage.databases.main.delayed_events import DelayedEventsStore
|
||||
from synapse.storage.databases.main.deviceinbox import DeviceInboxBackgroundUpdateStore
|
||||
from synapse.storage.databases.main.devices import DeviceBackgroundUpdateStore
|
||||
from synapse.storage.databases.main.e2e_room_keys import EndToEndRoomKeyBackgroundStore
|
||||
@@ -108,7 +107,6 @@ logger = logging.getLogger("synapse_port_db")
|
||||
BOOLEAN_COLUMNS = {
|
||||
"access_tokens": ["used"],
|
||||
"account_validity": ["email_sent"],
|
||||
"delayed_events": ["is_processed"],
|
||||
"device_lists_changes_in_room": ["converted_to_destinations"],
|
||||
"device_lists_outbound_pokes": ["sent"],
|
||||
"devices": ["hidden"],
|
||||
@@ -235,14 +233,14 @@ IGNORED_BACKGROUND_UPDATES = {
|
||||
|
||||
# Error returned by the run function. Used at the top-level part of the script to
|
||||
# handle errors and return codes.
|
||||
end_error: str | None = None
|
||||
end_error: Optional[str] = None
|
||||
# The exec_info for the error, if any. If error is defined but not exec_info the script
|
||||
# will show only the error message without the stacktrace, if exec_info is defined but
|
||||
# not the error then the script will show nothing outside of what's printed in the run
|
||||
# function. If both are defined, the script will print both the error and the stacktrace.
|
||||
end_error_exec_info: tuple[type[BaseException], BaseException, TracebackType] | None = (
|
||||
None
|
||||
)
|
||||
end_error_exec_info: Optional[
|
||||
tuple[type[BaseException], BaseException, TracebackType]
|
||||
] = None
|
||||
|
||||
R = TypeVar("R")
|
||||
|
||||
@@ -274,7 +272,6 @@ class Store(
|
||||
RelationsWorkerStore,
|
||||
EventFederationWorkerStore,
|
||||
SlidingSyncStore,
|
||||
DelayedEventsStore,
|
||||
):
|
||||
def execute(self, f: Callable[..., R], *args: Any, **kwargs: Any) -> Awaitable[R]:
|
||||
return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs)
|
||||
@@ -488,7 +485,7 @@ class Porter:
|
||||
|
||||
def r(
|
||||
txn: LoggingTransaction,
|
||||
) -> tuple[list[str] | None, list[tuple], list[tuple]]:
|
||||
) -> tuple[Optional[list[str]], list[tuple], list[tuple]]:
|
||||
forward_rows = []
|
||||
backward_rows = []
|
||||
if do_forward[0]:
|
||||
@@ -505,7 +502,7 @@ class Porter:
|
||||
|
||||
if forward_rows or backward_rows:
|
||||
assert txn.description is not None
|
||||
headers: list[str] | None = [
|
||||
headers: Optional[list[str]] = [
|
||||
column[0] for column in txn.description
|
||||
]
|
||||
else:
|
||||
@@ -1155,7 +1152,9 @@ class Porter:
|
||||
return done, remaining + done
|
||||
|
||||
async def _setup_state_group_id_seq(self) -> None:
|
||||
curr_id: int | None = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||
curr_id: Optional[
|
||||
int
|
||||
] = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||
table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True
|
||||
)
|
||||
|
||||
@@ -1272,10 +1271,10 @@ class Porter:
|
||||
|
||||
await self.postgres_store.db_pool.runInteraction("_setup_%s" % (seq_name,), r)
|
||||
|
||||
async def _pg_get_serial_sequence(self, table: str, column: str) -> str | None:
|
||||
async def _pg_get_serial_sequence(self, table: str, column: str) -> Optional[str]:
|
||||
"""Returns the name of the postgres sequence associated with a column, or NULL."""
|
||||
|
||||
def r(txn: LoggingTransaction) -> str | None:
|
||||
def r(txn: LoggingTransaction) -> Optional[str]:
|
||||
txn.execute("SELECT pg_get_serial_sequence('%s', '%s')" % (table, column))
|
||||
result = txn.fetchone()
|
||||
if not result:
|
||||
@@ -1287,9 +1286,9 @@ class Porter:
|
||||
)
|
||||
|
||||
async def _setup_auth_chain_sequence(self) -> None:
|
||||
curr_chain_id: (
|
||||
int | None
|
||||
) = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||
curr_chain_id: Optional[
|
||||
int
|
||||
] = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||
table="event_auth_chains",
|
||||
keyvalues={},
|
||||
retcol="MAX(chain_id)",
|
||||
|
||||
@@ -30,7 +30,7 @@ import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from typing import Iterable, NoReturn, TextIO
|
||||
from typing import Iterable, NoReturn, Optional, TextIO
|
||||
|
||||
import yaml
|
||||
|
||||
@@ -135,7 +135,7 @@ def start(pidfile: str, app: str, config_files: Iterable[str], daemonize: bool)
|
||||
return False
|
||||
|
||||
|
||||
def stop(pidfile: str, app: str) -> int | None:
|
||||
def stop(pidfile: str, app: str) -> Optional[int]:
|
||||
"""Attempts to kill a synapse worker from the pidfile.
|
||||
Args:
|
||||
pidfile: path to file containing worker's pid
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
from typing import TYPE_CHECKING, Protocol
|
||||
from typing import TYPE_CHECKING, Optional, Protocol
|
||||
|
||||
from prometheus_client import Histogram
|
||||
|
||||
@@ -51,7 +51,7 @@ class Auth(Protocol):
|
||||
room_id: str,
|
||||
requester: Requester,
|
||||
allow_departed_users: bool = False,
|
||||
) -> tuple[str, str | None]:
|
||||
) -> tuple[str, Optional[str]]:
|
||||
"""Check if the user is in the room, or was at some point.
|
||||
Args:
|
||||
room_id: The room to check.
|
||||
@@ -190,7 +190,7 @@ class Auth(Protocol):
|
||||
|
||||
async def check_user_in_room_or_world_readable(
|
||||
self, room_id: str, requester: Requester, allow_departed_users: bool = False
|
||||
) -> tuple[str, str | None]:
|
||||
) -> tuple[str, Optional[str]]:
|
||||
"""Checks that the user is or was in the room or the room is world
|
||||
readable. If it isn't then an exception is raised.
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
#
|
||||
#
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from netaddr import IPAddress
|
||||
|
||||
@@ -64,7 +64,7 @@ class BaseAuth:
|
||||
room_id: str,
|
||||
requester: Requester,
|
||||
allow_departed_users: bool = False,
|
||||
) -> tuple[str, str | None]:
|
||||
) -> tuple[str, Optional[str]]:
|
||||
"""Check if the user is in the room, or was at some point.
|
||||
Args:
|
||||
room_id: The room to check.
|
||||
@@ -114,7 +114,7 @@ class BaseAuth:
|
||||
@trace
|
||||
async def check_user_in_room_or_world_readable(
|
||||
self, room_id: str, requester: Requester, allow_departed_users: bool = False
|
||||
) -> tuple[str, str | None]:
|
||||
) -> tuple[str, Optional[str]]:
|
||||
"""Checks that the user is or was in the room or the room is world
|
||||
readable. If it isn't then an exception is raised.
|
||||
|
||||
@@ -294,7 +294,7 @@ class BaseAuth:
|
||||
@cancellable
|
||||
async def get_appservice_user(
|
||||
self, request: Request, access_token: str
|
||||
) -> Requester | None:
|
||||
) -> Optional[Requester]:
|
||||
"""
|
||||
Given a request, reads the request parameters to determine:
|
||||
- whether it's an application service that's making this request
|
||||
|
||||
@@ -13,10 +13,11 @@
|
||||
#
|
||||
#
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from pydantic import (
|
||||
AnyHttpUrl,
|
||||
BaseModel,
|
||||
ConfigDict,
|
||||
StrictBool,
|
||||
@@ -73,11 +74,11 @@ class ServerMetadata(BaseModel):
|
||||
class IntrospectionResponse(BaseModel):
|
||||
retrieved_at_ms: StrictInt
|
||||
active: StrictBool
|
||||
scope: StrictStr | None = None
|
||||
username: StrictStr | None = None
|
||||
sub: StrictStr | None = None
|
||||
device_id: StrictStr | None = None
|
||||
expires_in: StrictInt | None = None
|
||||
scope: Optional[StrictStr] = None
|
||||
username: Optional[StrictStr] = None
|
||||
sub: Optional[StrictStr] = None
|
||||
device_id: Optional[StrictStr] = None
|
||||
expires_in: Optional[StrictInt] = None
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
def get_scope_set(self) -> set[str]:
|
||||
@@ -146,13 +147,33 @@ class MasDelegatedAuth(BaseAuth):
|
||||
|
||||
@property
|
||||
def _metadata_url(self) -> str:
|
||||
return (
|
||||
f"{str(self._config.endpoint).rstrip('/')}/.well-known/openid-configuration"
|
||||
return str(
|
||||
AnyHttpUrl.build(
|
||||
scheme=self._config.endpoint.scheme,
|
||||
username=self._config.endpoint.username,
|
||||
password=self._config.endpoint.password,
|
||||
host=self._config.endpoint.host or "",
|
||||
port=self._config.endpoint.port,
|
||||
path=".well-known/openid-configuration",
|
||||
query=None,
|
||||
fragment=None,
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def _introspection_endpoint(self) -> str:
|
||||
return f"{str(self._config.endpoint).rstrip('/')}/oauth2/introspect"
|
||||
return str(
|
||||
AnyHttpUrl.build(
|
||||
scheme=self._config.endpoint.scheme,
|
||||
username=self._config.endpoint.username,
|
||||
password=self._config.endpoint.password,
|
||||
host=self._config.endpoint.host or "",
|
||||
port=self._config.endpoint.port,
|
||||
path="oauth2/introspect",
|
||||
query=None,
|
||||
fragment=None,
|
||||
)
|
||||
)
|
||||
|
||||
async def _load_metadata(self) -> ServerMetadata:
|
||||
response = await self._http_client.get_json(self._metadata_url)
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
#
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any, Callable
|
||||
from typing import TYPE_CHECKING, Any, Callable, Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from authlib.oauth2 import ClientAuth
|
||||
@@ -102,25 +102,25 @@ class IntrospectionResult:
|
||||
return []
|
||||
return scope_to_list(value)
|
||||
|
||||
def get_sub(self) -> str | None:
|
||||
def get_sub(self) -> Optional[str]:
|
||||
value = self._inner.get("sub")
|
||||
if not isinstance(value, str):
|
||||
return None
|
||||
return value
|
||||
|
||||
def get_username(self) -> str | None:
|
||||
def get_username(self) -> Optional[str]:
|
||||
value = self._inner.get("username")
|
||||
if not isinstance(value, str):
|
||||
return None
|
||||
return value
|
||||
|
||||
def get_name(self) -> str | None:
|
||||
def get_name(self) -> Optional[str]:
|
||||
value = self._inner.get("name")
|
||||
if not isinstance(value, str):
|
||||
return None
|
||||
return value
|
||||
|
||||
def get_device_id(self) -> str | None:
|
||||
def get_device_id(self) -> Optional[str]:
|
||||
value = self._inner.get("device_id")
|
||||
if value is not None and not isinstance(value, str):
|
||||
raise AuthError(
|
||||
@@ -174,7 +174,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
self._clock = hs.get_clock()
|
||||
self._http_client = hs.get_proxied_http_client()
|
||||
self._hostname = hs.hostname
|
||||
self._admin_token: Callable[[], str | None] = self._config.admin_token
|
||||
self._admin_token: Callable[[], Optional[str]] = self._config.admin_token
|
||||
self._force_tracing_for_users = hs.config.tracing.force_tracing_for_users
|
||||
|
||||
self._rust_http_client = HttpClient(
|
||||
@@ -247,7 +247,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
metadata = await self._issuer_metadata.get()
|
||||
return metadata.issuer or self._config.issuer
|
||||
|
||||
async def account_management_url(self) -> str | None:
|
||||
async def account_management_url(self) -> Optional[str]:
|
||||
"""
|
||||
Get the configured account management URL
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
#
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from synapse.api.constants import LimitBlockingTypes, UserTypes
|
||||
from synapse.api.errors import Codes, ResourceLimitError
|
||||
@@ -51,10 +51,10 @@ class AuthBlocking:
|
||||
|
||||
async def check_auth_blocking(
|
||||
self,
|
||||
user_id: str | None = None,
|
||||
threepid: dict | None = None,
|
||||
user_type: str | None = None,
|
||||
requester: Requester | None = None,
|
||||
user_id: Optional[str] = None,
|
||||
threepid: Optional[dict] = None,
|
||||
user_type: Optional[str] = None,
|
||||
requester: Optional[Requester] = None,
|
||||
) -> None:
|
||||
"""Checks if the user should be rejected for some external reason,
|
||||
such as monthly active user limiting or global disable flag
|
||||
|
||||
@@ -307,10 +307,6 @@ class AccountDataTypes:
|
||||
MSC4155_INVITE_PERMISSION_CONFIG: Final = (
|
||||
"org.matrix.msc4155.invite_permission_config"
|
||||
)
|
||||
# MSC4380: Invite blocking
|
||||
MSC4380_INVITE_PERMISSION_CONFIG: Final = (
|
||||
"org.matrix.msc4380.invite_permission_config"
|
||||
)
|
||||
# Synapse-specific behaviour. See "Client-Server API Extensions" documentation
|
||||
# in Admin API for more information.
|
||||
SYNAPSE_ADMIN_CLIENT_CONFIG: Final = "io.element.synapse.admin_client_config"
|
||||
|
||||
@@ -26,7 +26,7 @@ import math
|
||||
import typing
|
||||
from enum import Enum
|
||||
from http import HTTPStatus
|
||||
from typing import Any, Optional
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from twisted.web import http
|
||||
|
||||
@@ -137,7 +137,7 @@ class Codes(str, Enum):
|
||||
PROFILE_TOO_LARGE = "M_PROFILE_TOO_LARGE"
|
||||
KEY_TOO_LARGE = "M_KEY_TOO_LARGE"
|
||||
|
||||
# Part of MSC4155/MSC4380
|
||||
# Part of MSC4155
|
||||
INVITE_BLOCKED = "ORG.MATRIX.MSC4155.M_INVITE_BLOCKED"
|
||||
|
||||
# Part of MSC4190
|
||||
@@ -164,9 +164,9 @@ class CodeMessageException(RuntimeError):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
code: int | HTTPStatus,
|
||||
code: Union[int, HTTPStatus],
|
||||
msg: str,
|
||||
headers: dict[str, str] | None = None,
|
||||
headers: Optional[dict[str, str]] = None,
|
||||
):
|
||||
super().__init__("%d: %s" % (code, msg))
|
||||
|
||||
@@ -223,8 +223,8 @@ class SynapseError(CodeMessageException):
|
||||
code: int,
|
||||
msg: str,
|
||||
errcode: str = Codes.UNKNOWN,
|
||||
additional_fields: dict | None = None,
|
||||
headers: dict[str, str] | None = None,
|
||||
additional_fields: Optional[dict] = None,
|
||||
headers: Optional[dict[str, str]] = None,
|
||||
):
|
||||
"""Constructs a synapse error.
|
||||
|
||||
@@ -244,7 +244,7 @@ class SynapseError(CodeMessageException):
|
||||
return cs_error(self.msg, self.errcode, **self._additional_fields)
|
||||
|
||||
@property
|
||||
def debug_context(self) -> str | None:
|
||||
def debug_context(self) -> Optional[str]:
|
||||
"""Override this to add debugging context that shouldn't be sent to clients."""
|
||||
return None
|
||||
|
||||
@@ -276,7 +276,7 @@ class ProxiedRequestError(SynapseError):
|
||||
code: int,
|
||||
msg: str,
|
||||
errcode: str = Codes.UNKNOWN,
|
||||
additional_fields: dict | None = None,
|
||||
additional_fields: Optional[dict] = None,
|
||||
):
|
||||
super().__init__(code, msg, errcode, additional_fields)
|
||||
|
||||
@@ -340,7 +340,7 @@ class FederationDeniedError(SynapseError):
|
||||
destination: The destination which has been denied
|
||||
"""
|
||||
|
||||
def __init__(self, destination: str | None):
|
||||
def __init__(self, destination: Optional[str]):
|
||||
"""Raised by federation client or server to indicate that we are
|
||||
are deliberately not attempting to contact a given server because it is
|
||||
not on our federation whitelist.
|
||||
@@ -399,7 +399,7 @@ class AuthError(SynapseError):
|
||||
code: int,
|
||||
msg: str,
|
||||
errcode: str = Codes.FORBIDDEN,
|
||||
additional_fields: dict | None = None,
|
||||
additional_fields: Optional[dict] = None,
|
||||
):
|
||||
super().__init__(code, msg, errcode, additional_fields)
|
||||
|
||||
@@ -432,7 +432,7 @@ class UnstableSpecAuthError(AuthError):
|
||||
msg: str,
|
||||
errcode: str,
|
||||
previous_errcode: str = Codes.FORBIDDEN,
|
||||
additional_fields: dict | None = None,
|
||||
additional_fields: Optional[dict] = None,
|
||||
):
|
||||
self.previous_errcode = previous_errcode
|
||||
super().__init__(code, msg, errcode, additional_fields)
|
||||
@@ -497,8 +497,8 @@ class ResourceLimitError(SynapseError):
|
||||
code: int,
|
||||
msg: str,
|
||||
errcode: str = Codes.RESOURCE_LIMIT_EXCEEDED,
|
||||
admin_contact: str | None = None,
|
||||
limit_type: str | None = None,
|
||||
admin_contact: Optional[str] = None,
|
||||
limit_type: Optional[str] = None,
|
||||
):
|
||||
self.admin_contact = admin_contact
|
||||
self.limit_type = limit_type
|
||||
@@ -542,7 +542,7 @@ class InvalidCaptchaError(SynapseError):
|
||||
self,
|
||||
code: int = 400,
|
||||
msg: str = "Invalid captcha.",
|
||||
error_url: str | None = None,
|
||||
error_url: Optional[str] = None,
|
||||
errcode: str = Codes.CAPTCHA_INVALID,
|
||||
):
|
||||
super().__init__(code, msg, errcode)
|
||||
@@ -563,9 +563,9 @@ class LimitExceededError(SynapseError):
|
||||
self,
|
||||
limiter_name: str,
|
||||
code: int = 429,
|
||||
retry_after_ms: int | None = None,
|
||||
retry_after_ms: Optional[int] = None,
|
||||
errcode: str = Codes.LIMIT_EXCEEDED,
|
||||
pause: float | None = None,
|
||||
pause: Optional[float] = None,
|
||||
):
|
||||
# Use HTTP header Retry-After to enable library-assisted retry handling.
|
||||
headers = (
|
||||
@@ -582,7 +582,7 @@ class LimitExceededError(SynapseError):
|
||||
return cs_error(self.msg, self.errcode, retry_after_ms=self.retry_after_ms)
|
||||
|
||||
@property
|
||||
def debug_context(self) -> str | None:
|
||||
def debug_context(self) -> Optional[str]:
|
||||
return self.limiter_name
|
||||
|
||||
|
||||
@@ -675,7 +675,7 @@ class RequestSendFailed(RuntimeError):
|
||||
|
||||
|
||||
class UnredactedContentDeletedError(SynapseError):
|
||||
def __init__(self, content_keep_ms: int | None = None):
|
||||
def __init__(self, content_keep_ms: Optional[int] = None):
|
||||
super().__init__(
|
||||
404,
|
||||
"The content for that event has already been erased from the database",
|
||||
@@ -751,7 +751,7 @@ class FederationError(RuntimeError):
|
||||
code: int,
|
||||
reason: str,
|
||||
affected: str,
|
||||
source: str | None = None,
|
||||
source: Optional[str] = None,
|
||||
):
|
||||
if level not in ["FATAL", "ERROR", "WARN"]:
|
||||
raise ValueError("Level is not valid: %s" % (level,))
|
||||
@@ -786,7 +786,7 @@ class FederationPullAttemptBackoffError(RuntimeError):
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, event_ids: "StrCollection", message: str | None, retry_after_ms: int
|
||||
self, event_ids: "StrCollection", message: Optional[str], retry_after_ms: int
|
||||
):
|
||||
event_ids = list(event_ids)
|
||||
|
||||
|
||||
@@ -28,7 +28,9 @@ from typing import (
|
||||
Collection,
|
||||
Iterable,
|
||||
Mapping,
|
||||
Optional,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
import jsonschema
|
||||
@@ -153,7 +155,7 @@ class Filtering:
|
||||
self.DEFAULT_FILTER_COLLECTION = FilterCollection(hs, {})
|
||||
|
||||
async def get_user_filter(
|
||||
self, user_id: UserID, filter_id: int | str
|
||||
self, user_id: UserID, filter_id: Union[int, str]
|
||||
) -> "FilterCollection":
|
||||
result = await self.store.get_user_filter(user_id, filter_id)
|
||||
return FilterCollection(self._hs, result)
|
||||
@@ -529,7 +531,7 @@ class Filter:
|
||||
return newFilter
|
||||
|
||||
|
||||
def _matches_wildcard(actual_value: str | None, filter_value: str) -> bool:
|
||||
def _matches_wildcard(actual_value: Optional[str], filter_value: str) -> bool:
|
||||
if filter_value.endswith("*") and isinstance(actual_value, str):
|
||||
type_prefix = filter_value[:-1]
|
||||
return actual_value.startswith(type_prefix)
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
from typing import Any
|
||||
from typing import Any, Optional
|
||||
|
||||
import attr
|
||||
|
||||
@@ -41,13 +41,15 @@ class UserDevicePresenceState:
|
||||
"""
|
||||
|
||||
user_id: str
|
||||
device_id: str | None
|
||||
device_id: Optional[str]
|
||||
state: str
|
||||
last_active_ts: int
|
||||
last_sync_ts: int
|
||||
|
||||
@classmethod
|
||||
def default(cls, user_id: str, device_id: str | None) -> "UserDevicePresenceState":
|
||||
def default(
|
||||
cls, user_id: str, device_id: Optional[str]
|
||||
) -> "UserDevicePresenceState":
|
||||
"""Returns a default presence state."""
|
||||
return cls(
|
||||
user_id=user_id,
|
||||
@@ -79,7 +81,7 @@ class UserPresenceState:
|
||||
last_active_ts: int
|
||||
last_federation_update_ts: int
|
||||
last_user_sync_ts: int
|
||||
status_msg: str | None
|
||||
status_msg: Optional[str]
|
||||
currently_active: bool
|
||||
|
||||
def as_dict(self) -> JsonDict:
|
||||
|
||||
@@ -27,7 +27,6 @@ from synapse.config.ratelimiting import RatelimitSettings
|
||||
from synapse.storage.databases.main import DataStore
|
||||
from synapse.types import Requester
|
||||
from synapse.util.clock import Clock
|
||||
from synapse.util.duration import Duration
|
||||
from synapse.util.wheel_timer import WheelTimer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -101,9 +100,11 @@ class Ratelimiter:
|
||||
# and doesn't affect correctness.
|
||||
self._timer: WheelTimer[Hashable] = WheelTimer()
|
||||
|
||||
self.clock.looping_call(self._prune_message_counts, Duration(seconds=15))
|
||||
self.clock.looping_call(self._prune_message_counts, 15 * 1000)
|
||||
|
||||
def _get_key(self, requester: Requester | None, key: Hashable | None) -> Hashable:
|
||||
def _get_key(
|
||||
self, requester: Optional[Requester], key: Optional[Hashable]
|
||||
) -> Hashable:
|
||||
"""Use the requester's MXID as a fallback key if no key is provided."""
|
||||
if key is None:
|
||||
if not requester:
|
||||
@@ -120,13 +121,13 @@ class Ratelimiter:
|
||||
|
||||
async def can_do_action(
|
||||
self,
|
||||
requester: Requester | None,
|
||||
key: Hashable | None = None,
|
||||
rate_hz: float | None = None,
|
||||
burst_count: int | None = None,
|
||||
requester: Optional[Requester],
|
||||
key: Optional[Hashable] = None,
|
||||
rate_hz: Optional[float] = None,
|
||||
burst_count: Optional[int] = None,
|
||||
update: bool = True,
|
||||
n_actions: int = 1,
|
||||
_time_now_s: float | None = None,
|
||||
_time_now_s: Optional[float] = None,
|
||||
) -> tuple[bool, float]:
|
||||
"""Can the entity (e.g. user or IP address) perform the action?
|
||||
|
||||
@@ -246,10 +247,10 @@ class Ratelimiter:
|
||||
|
||||
def record_action(
|
||||
self,
|
||||
requester: Requester | None,
|
||||
key: Hashable | None = None,
|
||||
requester: Optional[Requester],
|
||||
key: Optional[Hashable] = None,
|
||||
n_actions: int = 1,
|
||||
_time_now_s: float | None = None,
|
||||
_time_now_s: Optional[float] = None,
|
||||
) -> None:
|
||||
"""Record that an action(s) took place, even if they violate the rate limit.
|
||||
|
||||
@@ -331,14 +332,14 @@ class Ratelimiter:
|
||||
|
||||
async def ratelimit(
|
||||
self,
|
||||
requester: Requester | None,
|
||||
key: Hashable | None = None,
|
||||
rate_hz: float | None = None,
|
||||
burst_count: int | None = None,
|
||||
requester: Optional[Requester],
|
||||
key: Optional[Hashable] = None,
|
||||
rate_hz: Optional[float] = None,
|
||||
burst_count: Optional[int] = None,
|
||||
update: bool = True,
|
||||
n_actions: int = 1,
|
||||
_time_now_s: float | None = None,
|
||||
pause: float | None = 0.5,
|
||||
_time_now_s: Optional[float] = None,
|
||||
pause: Optional[float] = 0.5,
|
||||
) -> None:
|
||||
"""Checks if an action can be performed. If not, raises a LimitExceededError
|
||||
|
||||
@@ -395,7 +396,7 @@ class RequestRatelimiter:
|
||||
store: DataStore,
|
||||
clock: Clock,
|
||||
rc_message: RatelimitSettings,
|
||||
rc_admin_redaction: RatelimitSettings | None,
|
||||
rc_admin_redaction: Optional[RatelimitSettings],
|
||||
):
|
||||
self.store = store
|
||||
self.clock = clock
|
||||
@@ -411,7 +412,7 @@ class RequestRatelimiter:
|
||||
# Check whether ratelimiting room admin message redaction is enabled
|
||||
# by the presence of rate limits in the config
|
||||
if rc_admin_redaction:
|
||||
self.admin_redaction_ratelimiter: Ratelimiter | None = Ratelimiter(
|
||||
self.admin_redaction_ratelimiter: Optional[Ratelimiter] = Ratelimiter(
|
||||
store=self.store,
|
||||
clock=self.clock,
|
||||
cfg=rc_admin_redaction,
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
from typing import Callable
|
||||
from typing import Callable, Optional
|
||||
|
||||
import attr
|
||||
|
||||
@@ -503,7 +503,7 @@ class RoomVersionCapability:
|
||||
"""An object which describes the unique attributes of a room version."""
|
||||
|
||||
identifier: str # the identifier for this capability
|
||||
preferred_version: RoomVersion | None
|
||||
preferred_version: Optional[RoomVersion]
|
||||
support_check_lambda: Callable[[RoomVersion], bool]
|
||||
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@
|
||||
import hmac
|
||||
import urllib.parse
|
||||
from hashlib import sha256
|
||||
from typing import Optional
|
||||
from urllib.parse import urlencode, urljoin
|
||||
|
||||
from synapse.config import ConfigError
|
||||
@@ -74,7 +75,7 @@ class LoginSSORedirectURIBuilder:
|
||||
self._public_baseurl = hs_config.server.public_baseurl
|
||||
|
||||
def build_login_sso_redirect_uri(
|
||||
self, *, idp_id: str | None, client_redirect_url: str
|
||||
self, *, idp_id: Optional[str], client_redirect_url: str
|
||||
) -> str:
|
||||
"""Build a `/login/sso/redirect` URI for the given identity provider.
|
||||
|
||||
|
||||
@@ -36,6 +36,8 @@ from typing import (
|
||||
Awaitable,
|
||||
Callable,
|
||||
NoReturn,
|
||||
Optional,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
from wsgiref.simple_server import WSGIServer
|
||||
@@ -178,8 +180,8 @@ def start_worker_reactor(
|
||||
def start_reactor(
|
||||
appname: str,
|
||||
soft_file_limit: int,
|
||||
gc_thresholds: tuple[int, int, int] | None,
|
||||
pid_file: str | None,
|
||||
gc_thresholds: Optional[tuple[int, int, int]],
|
||||
pid_file: Optional[str],
|
||||
daemonize: bool,
|
||||
print_pidfile: bool,
|
||||
logger: logging.Logger,
|
||||
@@ -419,7 +421,7 @@ def listen_http(
|
||||
root_resource: Resource,
|
||||
version_string: str,
|
||||
max_request_body_size: int,
|
||||
context_factory: IOpenSSLContextFactory | None,
|
||||
context_factory: Optional[IOpenSSLContextFactory],
|
||||
reactor: ISynapseReactor = reactor,
|
||||
) -> list[Port]:
|
||||
"""
|
||||
@@ -562,7 +564,9 @@ def setup_sighup_handling() -> None:
|
||||
if _already_setup_sighup_handling:
|
||||
return
|
||||
|
||||
previous_sighup_handler: Callable[[int, FrameType | None], Any] | int | None = None
|
||||
previous_sighup_handler: Union[
|
||||
Callable[[int, Optional[FrameType]], Any], int, None
|
||||
] = None
|
||||
|
||||
# Set up the SIGHUP machinery.
|
||||
if hasattr(signal, "SIGHUP"):
|
||||
|
||||
@@ -24,7 +24,7 @@ import logging
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Mapping, Sequence
|
||||
from typing import Mapping, Optional, Sequence
|
||||
|
||||
from twisted.internet import defer, task
|
||||
|
||||
@@ -136,7 +136,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
to a temporary directory.
|
||||
"""
|
||||
|
||||
def __init__(self, user_id: str, directory: str | None = None):
|
||||
def __init__(self, user_id: str, directory: Optional[str] = None):
|
||||
self.user_id = user_id
|
||||
|
||||
if directory:
|
||||
@@ -291,7 +291,7 @@ def load_config(argv_options: list[str]) -> tuple[HomeServerConfig, argparse.Nam
|
||||
|
||||
def create_homeserver(
|
||||
config: HomeServerConfig,
|
||||
reactor: ISynapseReactor | None = None,
|
||||
reactor: Optional[ISynapseReactor] = None,
|
||||
) -> AdminCmdServer:
|
||||
"""
|
||||
Create a homeserver instance for the Synapse admin command process.
|
||||
|
||||
@@ -26,7 +26,7 @@ import os
|
||||
import signal
|
||||
import sys
|
||||
from types import FrameType
|
||||
from typing import Any, Callable
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
from twisted.internet.main import installReactor
|
||||
|
||||
@@ -172,7 +172,7 @@ def main() -> None:
|
||||
# Install signal handlers to propagate signals to all our children, so that they
|
||||
# shut down cleanly. This also inhibits our own exit, but that's good: we want to
|
||||
# wait until the children have exited.
|
||||
def handle_signal(signum: int, frame: FrameType | None) -> None:
|
||||
def handle_signal(signum: int, frame: Optional[FrameType]) -> None:
|
||||
print(
|
||||
f"complement_fork_starter: Caught signal {signum}. Stopping children.",
|
||||
file=sys.stderr,
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
#
|
||||
import logging
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
from twisted.web.resource import Resource
|
||||
|
||||
@@ -335,7 +336,7 @@ def load_config(argv_options: list[str]) -> HomeServerConfig:
|
||||
|
||||
def create_homeserver(
|
||||
config: HomeServerConfig,
|
||||
reactor: ISynapseReactor | None = None,
|
||||
reactor: Optional[ISynapseReactor] = None,
|
||||
) -> GenericWorkerServer:
|
||||
"""
|
||||
Create a homeserver instance for the Synapse worker process.
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from typing import Iterable
|
||||
from typing import Iterable, Optional
|
||||
|
||||
from twisted.internet.tcp import Port
|
||||
from twisted.web.resource import EncodingResourceWrapper, Resource
|
||||
@@ -350,7 +350,7 @@ def load_or_generate_config(argv_options: list[str]) -> HomeServerConfig:
|
||||
|
||||
def create_homeserver(
|
||||
config: HomeServerConfig,
|
||||
reactor: ISynapseReactor | None = None,
|
||||
reactor: Optional[ISynapseReactor] = None,
|
||||
) -> SynapseHomeServer:
|
||||
"""
|
||||
Create a homeserver instance for the Synapse main process.
|
||||
@@ -450,8 +450,7 @@ async def start(
|
||||
await _base.start(hs, freeze=freeze)
|
||||
|
||||
# TODO: Feels like this should be moved somewhere else.
|
||||
for db in hs.get_datastores().databases:
|
||||
db.updates.start_doing_background_updates()
|
||||
hs.get_datastores().main.db_pool.updates.start_doing_background_updates()
|
||||
|
||||
|
||||
def start_reactor(
|
||||
|
||||
@@ -30,20 +30,24 @@ from twisted.internet import defer
|
||||
|
||||
from synapse.metrics import SERVER_NAME_LABEL
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util.duration import Duration
|
||||
from synapse.util.constants import (
|
||||
MILLISECONDS_PER_SECOND,
|
||||
ONE_HOUR_SECONDS,
|
||||
ONE_MINUTE_SECONDS,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger("synapse.app.homeserver")
|
||||
|
||||
INITIAL_DELAY_BEFORE_FIRST_PHONE_HOME = Duration(minutes=5)
|
||||
INITIAL_DELAY_BEFORE_FIRST_PHONE_HOME_SECONDS = 5 * ONE_MINUTE_SECONDS
|
||||
"""
|
||||
We wait 5 minutes to send the first set of stats as the server can be quite busy the
|
||||
first few minutes
|
||||
"""
|
||||
|
||||
PHONE_HOME_INTERVAL = Duration(hours=3)
|
||||
PHONE_HOME_INTERVAL_SECONDS = 3 * ONE_HOUR_SECONDS
|
||||
"""
|
||||
Phone home stats are sent every 3 hours
|
||||
"""
|
||||
@@ -218,13 +222,13 @@ def start_phone_stats_home(hs: "HomeServer") -> None:
|
||||
# table will decrease
|
||||
clock.looping_call(
|
||||
hs.get_datastores().main.generate_user_daily_visits,
|
||||
Duration(minutes=5),
|
||||
5 * ONE_MINUTE_SECONDS * MILLISECONDS_PER_SECOND,
|
||||
)
|
||||
|
||||
# monthly active user limiting functionality
|
||||
clock.looping_call(
|
||||
hs.get_datastores().main.reap_monthly_active_users,
|
||||
Duration(hours=1),
|
||||
ONE_HOUR_SECONDS * MILLISECONDS_PER_SECOND,
|
||||
)
|
||||
hs.get_datastores().main.reap_monthly_active_users()
|
||||
|
||||
@@ -263,14 +267,14 @@ def start_phone_stats_home(hs: "HomeServer") -> None:
|
||||
|
||||
if hs.config.server.limit_usage_by_mau or hs.config.server.mau_stats_only:
|
||||
generate_monthly_active_users()
|
||||
clock.looping_call(generate_monthly_active_users, Duration(minutes=5))
|
||||
clock.looping_call(generate_monthly_active_users, 5 * 60 * 1000)
|
||||
# End of monthly active user settings
|
||||
|
||||
if hs.config.metrics.report_stats:
|
||||
logger.info("Scheduling stats reporting for 3 hour intervals")
|
||||
clock.looping_call(
|
||||
phone_stats_home,
|
||||
PHONE_HOME_INTERVAL,
|
||||
PHONE_HOME_INTERVAL_SECONDS * MILLISECONDS_PER_SECOND,
|
||||
hs,
|
||||
stats,
|
||||
)
|
||||
@@ -278,14 +282,14 @@ def start_phone_stats_home(hs: "HomeServer") -> None:
|
||||
# We need to defer this init for the cases that we daemonize
|
||||
# otherwise the process ID we get is that of the non-daemon process
|
||||
clock.call_later(
|
||||
Duration(seconds=0),
|
||||
0,
|
||||
performance_stats_init,
|
||||
)
|
||||
|
||||
# We wait 5 minutes to send the first set of stats as the server can
|
||||
# be quite busy the first few minutes
|
||||
clock.call_later(
|
||||
INITIAL_DELAY_BEFORE_FIRST_PHONE_HOME,
|
||||
INITIAL_DELAY_BEFORE_FIRST_PHONE_HOME_SECONDS,
|
||||
phone_stats_home,
|
||||
hs,
|
||||
stats,
|
||||
|
||||
@@ -26,6 +26,7 @@ from enum import Enum
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Iterable,
|
||||
Optional,
|
||||
Pattern,
|
||||
Sequence,
|
||||
cast,
|
||||
@@ -94,12 +95,12 @@ class ApplicationService:
|
||||
token: str,
|
||||
id: str,
|
||||
sender: UserID,
|
||||
url: str | None = None,
|
||||
namespaces: JsonDict | None = None,
|
||||
hs_token: str | None = None,
|
||||
protocols: Iterable[str] | None = None,
|
||||
url: Optional[str] = None,
|
||||
namespaces: Optional[JsonDict] = None,
|
||||
hs_token: Optional[str] = None,
|
||||
protocols: Optional[Iterable[str]] = None,
|
||||
rate_limited: bool = True,
|
||||
ip_range_whitelist: IPSet | None = None,
|
||||
ip_range_whitelist: Optional[IPSet] = None,
|
||||
supports_ephemeral: bool = False,
|
||||
msc3202_transaction_extensions: bool = False,
|
||||
msc4190_device_management: bool = False,
|
||||
@@ -141,7 +142,7 @@ class ApplicationService:
|
||||
self.rate_limited = rate_limited
|
||||
|
||||
def _check_namespaces(
|
||||
self, namespaces: JsonDict | None
|
||||
self, namespaces: Optional[JsonDict]
|
||||
) -> dict[str, list[Namespace]]:
|
||||
# Sanity check that it is of the form:
|
||||
# {
|
||||
@@ -178,7 +179,9 @@ class ApplicationService:
|
||||
|
||||
return result
|
||||
|
||||
def _matches_regex(self, namespace_key: str, test_string: str) -> Namespace | None:
|
||||
def _matches_regex(
|
||||
self, namespace_key: str, test_string: str
|
||||
) -> Optional[Namespace]:
|
||||
for namespace in self.namespaces[namespace_key]:
|
||||
if namespace.regex.match(test_string):
|
||||
return namespace
|
||||
|
||||
@@ -25,8 +25,10 @@ from typing import (
|
||||
TYPE_CHECKING,
|
||||
Iterable,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
from prometheus_client import Counter
|
||||
@@ -220,7 +222,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
assert service.hs_token is not None
|
||||
|
||||
try:
|
||||
args: Mapping[bytes, list[bytes] | str] = fields
|
||||
args: Mapping[bytes, Union[list[bytes], str]] = fields
|
||||
if self.config.use_appservice_legacy_authorization:
|
||||
args = {
|
||||
**fields,
|
||||
@@ -256,11 +258,11 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
|
||||
async def get_3pe_protocol(
|
||||
self, service: "ApplicationService", protocol: str
|
||||
) -> JsonDict | None:
|
||||
) -> Optional[JsonDict]:
|
||||
if service.url is None:
|
||||
return {}
|
||||
|
||||
async def _get() -> JsonDict | None:
|
||||
async def _get() -> Optional[JsonDict]:
|
||||
# This is required by the configuration.
|
||||
assert service.hs_token is not None
|
||||
try:
|
||||
@@ -298,7 +300,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
key = (service.id, protocol)
|
||||
return await self.protocol_meta_cache.wrap(key, _get)
|
||||
|
||||
async def ping(self, service: "ApplicationService", txn_id: str | None) -> None:
|
||||
async def ping(self, service: "ApplicationService", txn_id: Optional[str]) -> None:
|
||||
# The caller should check that url is set
|
||||
assert service.url is not None, "ping called without URL being set"
|
||||
|
||||
@@ -320,7 +322,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
one_time_keys_count: TransactionOneTimeKeysCount,
|
||||
unused_fallback_keys: TransactionUnusedFallbackKeys,
|
||||
device_list_summary: DeviceListUpdates,
|
||||
txn_id: int | None = None,
|
||||
txn_id: Optional[int] = None,
|
||||
) -> bool:
|
||||
"""
|
||||
Push data to an application service.
|
||||
|
||||
@@ -62,9 +62,12 @@ from typing import (
|
||||
Callable,
|
||||
Collection,
|
||||
Iterable,
|
||||
Optional,
|
||||
Sequence,
|
||||
)
|
||||
|
||||
from twisted.internet.interfaces import IDelayedCall
|
||||
|
||||
from synapse.appservice import (
|
||||
ApplicationService,
|
||||
ApplicationServiceState,
|
||||
@@ -76,8 +79,7 @@ from synapse.events import EventBase
|
||||
from synapse.logging.context import run_in_background
|
||||
from synapse.storage.databases.main import DataStore
|
||||
from synapse.types import DeviceListUpdates, JsonMapping
|
||||
from synapse.util.clock import Clock, DelayedCallWrapper
|
||||
from synapse.util.duration import Duration
|
||||
from synapse.util.clock import Clock
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
@@ -121,10 +123,10 @@ class ApplicationServiceScheduler:
|
||||
def enqueue_for_appservice(
|
||||
self,
|
||||
appservice: ApplicationService,
|
||||
events: Collection[EventBase] | None = None,
|
||||
ephemeral: Collection[JsonMapping] | None = None,
|
||||
to_device_messages: Collection[JsonMapping] | None = None,
|
||||
device_list_summary: DeviceListUpdates | None = None,
|
||||
events: Optional[Collection[EventBase]] = None,
|
||||
ephemeral: Optional[Collection[JsonMapping]] = None,
|
||||
to_device_messages: Optional[Collection[JsonMapping]] = None,
|
||||
device_list_summary: Optional[DeviceListUpdates] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Enqueue some data to be sent off to an application service.
|
||||
@@ -258,8 +260,8 @@ class _ServiceQueuer:
|
||||
):
|
||||
return
|
||||
|
||||
one_time_keys_count: TransactionOneTimeKeysCount | None = None
|
||||
unused_fallback_keys: TransactionUnusedFallbackKeys | None = None
|
||||
one_time_keys_count: Optional[TransactionOneTimeKeysCount] = None
|
||||
unused_fallback_keys: Optional[TransactionUnusedFallbackKeys] = None
|
||||
|
||||
if (
|
||||
self._msc3202_transaction_extensions_enabled
|
||||
@@ -367,11 +369,11 @@ class _TransactionController:
|
||||
self,
|
||||
service: ApplicationService,
|
||||
events: Sequence[EventBase],
|
||||
ephemeral: list[JsonMapping] | None = None,
|
||||
to_device_messages: list[JsonMapping] | None = None,
|
||||
one_time_keys_count: TransactionOneTimeKeysCount | None = None,
|
||||
unused_fallback_keys: TransactionUnusedFallbackKeys | None = None,
|
||||
device_list_summary: DeviceListUpdates | None = None,
|
||||
ephemeral: Optional[list[JsonMapping]] = None,
|
||||
to_device_messages: Optional[list[JsonMapping]] = None,
|
||||
one_time_keys_count: Optional[TransactionOneTimeKeysCount] = None,
|
||||
unused_fallback_keys: Optional[TransactionUnusedFallbackKeys] = None,
|
||||
device_list_summary: Optional[DeviceListUpdates] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Create a transaction with the given data and send to the provided
|
||||
@@ -502,11 +504,11 @@ class _Recoverer:
|
||||
self.service = service
|
||||
self.callback = callback
|
||||
self.backoff_counter = 1
|
||||
self.scheduled_recovery: DelayedCallWrapper | None = None
|
||||
self.scheduled_recovery: Optional[IDelayedCall] = None
|
||||
|
||||
def recover(self) -> None:
|
||||
delay = Duration(seconds=2**self.backoff_counter)
|
||||
logger.info("Scheduling retries on %s in %fs", self.service.id, delay.as_secs())
|
||||
delay = 2**self.backoff_counter
|
||||
logger.info("Scheduling retries on %s in %fs", self.service.id, delay)
|
||||
self.scheduled_recovery = self.clock.call_later(
|
||||
delay,
|
||||
self.hs.run_as_background_process,
|
||||
|
||||
@@ -36,7 +36,9 @@ from typing import (
|
||||
Iterable,
|
||||
Iterator,
|
||||
MutableMapping,
|
||||
Optional,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
import attr
|
||||
@@ -58,7 +60,7 @@ class ConfigError(Exception):
|
||||
the problem lies.
|
||||
"""
|
||||
|
||||
def __init__(self, msg: str, path: StrSequence | None = None):
|
||||
def __init__(self, msg: str, path: Optional[StrSequence] = None):
|
||||
self.msg = msg
|
||||
self.path = path
|
||||
|
||||
@@ -173,7 +175,7 @@ class Config:
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_size(value: str | int) -> int:
|
||||
def parse_size(value: Union[str, int]) -> int:
|
||||
"""Interpret `value` as a number of bytes.
|
||||
|
||||
If an integer is provided it is treated as bytes and is unchanged.
|
||||
@@ -200,7 +202,7 @@ class Config:
|
||||
raise TypeError(f"Bad byte size {value!r}")
|
||||
|
||||
@staticmethod
|
||||
def parse_duration(value: str | int) -> int:
|
||||
def parse_duration(value: Union[str, int]) -> int:
|
||||
"""Convert a duration as a string or integer to a number of milliseconds.
|
||||
|
||||
If an integer is provided it is treated as milliseconds and is unchanged.
|
||||
@@ -268,7 +270,7 @@ class Config:
|
||||
return path_exists(file_path)
|
||||
|
||||
@classmethod
|
||||
def check_file(cls, file_path: str | None, config_name: str) -> str:
|
||||
def check_file(cls, file_path: Optional[str], config_name: str) -> str:
|
||||
if file_path is None:
|
||||
raise ConfigError("Missing config for %s." % (config_name,))
|
||||
try:
|
||||
@@ -316,7 +318,7 @@ class Config:
|
||||
def read_templates(
|
||||
self,
|
||||
filenames: list[str],
|
||||
custom_template_directories: Iterable[str] | None = None,
|
||||
custom_template_directories: Optional[Iterable[str]] = None,
|
||||
) -> list[jinja2.Template]:
|
||||
"""Load a list of template files from disk using the given variables.
|
||||
|
||||
@@ -463,11 +465,11 @@ class RootConfig:
|
||||
data_dir_path: str,
|
||||
server_name: str,
|
||||
generate_secrets: bool = False,
|
||||
report_stats: bool | None = None,
|
||||
report_stats: Optional[bool] = None,
|
||||
open_private_ports: bool = False,
|
||||
listeners: list[dict] | None = None,
|
||||
tls_certificate_path: str | None = None,
|
||||
tls_private_key_path: str | None = None,
|
||||
listeners: Optional[list[dict]] = None,
|
||||
tls_certificate_path: Optional[str] = None,
|
||||
tls_private_key_path: Optional[str] = None,
|
||||
) -> str:
|
||||
"""
|
||||
Build a default configuration file
|
||||
@@ -653,7 +655,7 @@ class RootConfig:
|
||||
@classmethod
|
||||
def load_or_generate_config(
|
||||
cls: type[TRootConfig], description: str, argv_options: list[str]
|
||||
) -> TRootConfig | None:
|
||||
) -> Optional[TRootConfig]:
|
||||
"""Parse the commandline and config files
|
||||
|
||||
Supports generation of config files, so is used for the main homeserver app.
|
||||
@@ -672,8 +674,7 @@ class RootConfig:
|
||||
action="append",
|
||||
metavar="CONFIG_FILE",
|
||||
help="Specify config file. Can be given multiple times and"
|
||||
" may specify directories containing *.yaml files."
|
||||
" Top-level keys in later files overwrite ones in earlier files.",
|
||||
" may specify directories containing *.yaml files.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-secrets-in-config",
|
||||
@@ -897,7 +898,7 @@ class RootConfig:
|
||||
:returns: the previous config object, which no longer has a reference to this
|
||||
RootConfig.
|
||||
"""
|
||||
existing_config: Config | None = getattr(self, section_name, None)
|
||||
existing_config: Optional[Config] = getattr(self, section_name, None)
|
||||
if existing_config is None:
|
||||
raise ValueError(f"Unknown config section '{section_name}'")
|
||||
logger.info("Reloading config section '%s'", section_name)
|
||||
|
||||
@@ -6,7 +6,9 @@ from typing import (
|
||||
Iterator,
|
||||
Literal,
|
||||
MutableMapping,
|
||||
Optional,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
@@ -62,7 +64,7 @@ from synapse.config import ( # noqa: F401
|
||||
from synapse.types import StrSequence
|
||||
|
||||
class ConfigError(Exception):
|
||||
def __init__(self, msg: str, path: StrSequence | None = None):
|
||||
def __init__(self, msg: str, path: Optional[StrSequence] = None):
|
||||
self.msg = msg
|
||||
self.path = path
|
||||
|
||||
@@ -144,16 +146,16 @@ class RootConfig:
|
||||
data_dir_path: str,
|
||||
server_name: str,
|
||||
generate_secrets: bool = ...,
|
||||
report_stats: bool | None = ...,
|
||||
report_stats: Optional[bool] = ...,
|
||||
open_private_ports: bool = ...,
|
||||
listeners: Any | None = ...,
|
||||
tls_certificate_path: str | None = ...,
|
||||
tls_private_key_path: str | None = ...,
|
||||
listeners: Optional[Any] = ...,
|
||||
tls_certificate_path: Optional[str] = ...,
|
||||
tls_private_key_path: Optional[str] = ...,
|
||||
) -> str: ...
|
||||
@classmethod
|
||||
def load_or_generate_config(
|
||||
cls: type[TRootConfig], description: str, argv_options: list[str]
|
||||
) -> TRootConfig | None: ...
|
||||
) -> Optional[TRootConfig]: ...
|
||||
@classmethod
|
||||
def load_config(
|
||||
cls: type[TRootConfig], description: str, argv_options: list[str]
|
||||
@@ -181,11 +183,11 @@ class Config:
|
||||
default_template_dir: str
|
||||
def __init__(self, root_config: RootConfig = ...) -> None: ...
|
||||
@staticmethod
|
||||
def parse_size(value: str | int) -> int: ...
|
||||
def parse_size(value: Union[str, int]) -> int: ...
|
||||
@staticmethod
|
||||
def parse_duration(value: str | int) -> int: ...
|
||||
def parse_duration(value: Union[str, int]) -> int: ...
|
||||
@staticmethod
|
||||
def abspath(file_path: str | None) -> str: ...
|
||||
def abspath(file_path: Optional[str]) -> str: ...
|
||||
@classmethod
|
||||
def path_exists(cls, file_path: str) -> bool: ...
|
||||
@classmethod
|
||||
@@ -198,7 +200,7 @@ class Config:
|
||||
def read_templates(
|
||||
self,
|
||||
filenames: list[str],
|
||||
custom_template_directories: Iterable[str] | None = None,
|
||||
custom_template_directories: Optional[Iterable[str]] = None,
|
||||
) -> list[jinja2.Template]: ...
|
||||
|
||||
def read_config_files(config_files: Iterable[str]) -> dict[str, Any]: ...
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
#
|
||||
|
||||
import logging
|
||||
from typing import Any, Iterable
|
||||
from typing import Any, Iterable, Optional
|
||||
|
||||
from synapse.api.constants import EventTypes
|
||||
from synapse.config._base import Config, ConfigError
|
||||
@@ -46,7 +46,7 @@ class ApiConfig(Config):
|
||||
|
||||
def _get_prejoin_state_entries(
|
||||
self, config: JsonDict
|
||||
) -> Iterable[tuple[str, str | None]]:
|
||||
) -> Iterable[tuple[str, Optional[str]]]:
|
||||
"""Get the event types and state keys to include in the prejoin state."""
|
||||
room_prejoin_state_config = config.get("room_prejoin_state") or {}
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ import logging
|
||||
import os
|
||||
import re
|
||||
import threading
|
||||
from typing import Any, Callable, Mapping
|
||||
from typing import Any, Callable, Mapping, Optional
|
||||
|
||||
import attr
|
||||
|
||||
@@ -53,7 +53,7 @@ class CacheProperties:
|
||||
default_factor_size: float = float(
|
||||
os.environ.get(_CACHE_PREFIX, _DEFAULT_FACTOR_SIZE)
|
||||
)
|
||||
resize_all_caches_func: Callable[[], None] | None = None
|
||||
resize_all_caches_func: Optional[Callable[[], None]] = None
|
||||
|
||||
|
||||
properties = CacheProperties()
|
||||
@@ -107,7 +107,7 @@ class CacheConfig(Config):
|
||||
cache_factors: dict[str, float]
|
||||
global_factor: float
|
||||
track_memory_usage: bool
|
||||
expiry_time_msec: int | None
|
||||
expiry_time_msec: Optional[int]
|
||||
sync_response_cache_duration: int
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
from typing import Any
|
||||
from typing import Any, Optional
|
||||
|
||||
from synapse.config.sso import SsoAttributeRequirement
|
||||
from synapse.types import JsonDict
|
||||
@@ -49,7 +49,7 @@ class CasConfig(Config):
|
||||
|
||||
# TODO Update this to a _synapse URL.
|
||||
public_baseurl = self.root.server.public_baseurl
|
||||
self.cas_service_url: str | None = (
|
||||
self.cas_service_url: Optional[str] = (
|
||||
public_baseurl + "_matrix/client/r0/login/cas/ticket"
|
||||
)
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
#
|
||||
|
||||
from os import path
|
||||
from typing import Any
|
||||
from typing import Any, Optional
|
||||
|
||||
from synapse.config import ConfigError
|
||||
from synapse.types import JsonDict
|
||||
@@ -33,11 +33,11 @@ class ConsentConfig(Config):
|
||||
def __init__(self, *args: Any):
|
||||
super().__init__(*args)
|
||||
|
||||
self.user_consent_version: str | None = None
|
||||
self.user_consent_template_dir: str | None = None
|
||||
self.user_consent_server_notice_content: JsonDict | None = None
|
||||
self.user_consent_version: Optional[str] = None
|
||||
self.user_consent_template_dir: Optional[str] = None
|
||||
self.user_consent_server_notice_content: Optional[JsonDict] = None
|
||||
self.user_consent_server_notice_to_guests = False
|
||||
self.block_events_without_consent_error: str | None = None
|
||||
self.block_events_without_consent_error: Optional[str] = None
|
||||
self.user_consent_at_registration = False
|
||||
self.user_consent_policy_name = "Privacy Policy"
|
||||
|
||||
|
||||
@@ -59,7 +59,7 @@ class ClientAuthMethod(enum.Enum):
|
||||
PRIVATE_KEY_JWT = "private_key_jwt"
|
||||
|
||||
|
||||
def _parse_jwks(jwks: JsonDict | None) -> Optional["JsonWebKey"]:
|
||||
def _parse_jwks(jwks: Optional[JsonDict]) -> Optional["JsonWebKey"]:
|
||||
"""A helper function to parse a JWK dict into a JsonWebKey."""
|
||||
|
||||
if jwks is None:
|
||||
@@ -71,7 +71,7 @@ def _parse_jwks(jwks: JsonDict | None) -> Optional["JsonWebKey"]:
|
||||
|
||||
|
||||
def _check_client_secret(
|
||||
instance: "MSC3861", _attribute: attr.Attribute, _value: str | None
|
||||
instance: "MSC3861", _attribute: attr.Attribute, _value: Optional[str]
|
||||
) -> None:
|
||||
if instance._client_secret and instance._client_secret_path:
|
||||
raise ConfigError(
|
||||
@@ -88,7 +88,7 @@ def _check_client_secret(
|
||||
|
||||
|
||||
def _check_admin_token(
|
||||
instance: "MSC3861", _attribute: attr.Attribute, _value: str | None
|
||||
instance: "MSC3861", _attribute: attr.Attribute, _value: Optional[str]
|
||||
) -> None:
|
||||
if instance._admin_token and instance._admin_token_path:
|
||||
raise ConfigError(
|
||||
@@ -124,7 +124,7 @@ class MSC3861:
|
||||
issuer: str = attr.ib(default="", validator=attr.validators.instance_of(str))
|
||||
"""The URL of the OIDC Provider."""
|
||||
|
||||
issuer_metadata: JsonDict | None = attr.ib(default=None)
|
||||
issuer_metadata: Optional[JsonDict] = attr.ib(default=None)
|
||||
"""The issuer metadata to use, otherwise discovered from /.well-known/openid-configuration as per MSC2965."""
|
||||
|
||||
client_id: str = attr.ib(
|
||||
@@ -138,7 +138,7 @@ class MSC3861:
|
||||
)
|
||||
"""The auth method used when calling the introspection endpoint."""
|
||||
|
||||
_client_secret: str | None = attr.ib(
|
||||
_client_secret: Optional[str] = attr.ib(
|
||||
default=None,
|
||||
validator=[
|
||||
attr.validators.optional(attr.validators.instance_of(str)),
|
||||
@@ -150,7 +150,7 @@ class MSC3861:
|
||||
when using any of the client_secret_* client auth methods.
|
||||
"""
|
||||
|
||||
_client_secret_path: str | None = attr.ib(
|
||||
_client_secret_path: Optional[str] = attr.ib(
|
||||
default=None,
|
||||
validator=[
|
||||
attr.validators.optional(attr.validators.instance_of(str)),
|
||||
@@ -196,19 +196,19 @@ class MSC3861:
|
||||
("experimental", "msc3861", "client_auth_method"),
|
||||
)
|
||||
|
||||
introspection_endpoint: str | None = attr.ib(
|
||||
introspection_endpoint: Optional[str] = attr.ib(
|
||||
default=None,
|
||||
validator=attr.validators.optional(attr.validators.instance_of(str)),
|
||||
)
|
||||
"""The URL of the introspection endpoint used to validate access tokens."""
|
||||
|
||||
account_management_url: str | None = attr.ib(
|
||||
account_management_url: Optional[str] = attr.ib(
|
||||
default=None,
|
||||
validator=attr.validators.optional(attr.validators.instance_of(str)),
|
||||
)
|
||||
"""The URL of the My Account page on the OIDC Provider as per MSC2965."""
|
||||
|
||||
_admin_token: str | None = attr.ib(
|
||||
_admin_token: Optional[str] = attr.ib(
|
||||
default=None,
|
||||
validator=[
|
||||
attr.validators.optional(attr.validators.instance_of(str)),
|
||||
@@ -220,7 +220,7 @@ class MSC3861:
|
||||
This is used by the OIDC provider, to make admin calls to Synapse.
|
||||
"""
|
||||
|
||||
_admin_token_path: str | None = attr.ib(
|
||||
_admin_token_path: Optional[str] = attr.ib(
|
||||
default=None,
|
||||
validator=[
|
||||
attr.validators.optional(attr.validators.instance_of(str)),
|
||||
@@ -232,7 +232,7 @@ class MSC3861:
|
||||
external file.
|
||||
"""
|
||||
|
||||
def client_secret(self) -> str | None:
|
||||
def client_secret(self) -> Optional[str]:
|
||||
"""Returns the secret given via `client_secret` or `client_secret_path`."""
|
||||
if self._client_secret_path:
|
||||
return read_secret_from_file_once(
|
||||
@@ -241,7 +241,7 @@ class MSC3861:
|
||||
)
|
||||
return self._client_secret
|
||||
|
||||
def admin_token(self) -> str | None:
|
||||
def admin_token(self) -> Optional[str]:
|
||||
"""Returns the admin token given via `admin_token` or `admin_token_path`."""
|
||||
if self._admin_token_path:
|
||||
return read_secret_from_file_once(
|
||||
@@ -438,9 +438,6 @@ class ExperimentalConfig(Config):
|
||||
# previously calculated push actions.
|
||||
self.msc2654_enabled: bool = experimental.get("msc2654_enabled", False)
|
||||
|
||||
# MSC2666: Query mutual rooms between two users.
|
||||
self.msc2666_enabled: bool = experimental.get("msc2666_enabled", False)
|
||||
|
||||
# MSC2815 (allow room moderators to view redacted event content)
|
||||
self.msc2815_enabled: bool = experimental.get("msc2815_enabled", False)
|
||||
|
||||
@@ -529,7 +526,7 @@ class ExperimentalConfig(Config):
|
||||
# MSC4108: Mechanism to allow OIDC sign in and E2EE set up via QR code
|
||||
self.msc4108_enabled = experimental.get("msc4108_enabled", False)
|
||||
|
||||
self.msc4108_delegation_endpoint: str | None = experimental.get(
|
||||
self.msc4108_delegation_endpoint: Optional[str] = experimental.get(
|
||||
"msc4108_delegation_endpoint", None
|
||||
)
|
||||
|
||||
@@ -596,6 +593,3 @@ class ExperimentalConfig(Config):
|
||||
# MSC4306: Thread Subscriptions
|
||||
# (and MSC4308: Thread Subscriptions extension to Sliding Sync)
|
||||
self.msc4306_enabled: bool = experimental.get("msc4306_enabled", False)
|
||||
|
||||
# MSC4380: Invite blocking
|
||||
self.msc4380_enabled: bool = experimental.get("msc4380_enabled", False)
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
from typing import Any
|
||||
from typing import Any, Optional
|
||||
|
||||
from synapse.config._base import Config
|
||||
from synapse.config._util import validate_config
|
||||
@@ -32,7 +32,7 @@ class FederationConfig(Config):
|
||||
federation_config = config.setdefault("federation", {})
|
||||
|
||||
# FIXME: federation_domain_whitelist needs sytests
|
||||
self.federation_domain_whitelist: dict | None = None
|
||||
self.federation_domain_whitelist: Optional[dict] = None
|
||||
federation_domain_whitelist = config.get("federation_domain_whitelist", None)
|
||||
|
||||
if federation_domain_whitelist is not None:
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
from typing import TYPE_CHECKING, Any, Iterator
|
||||
from typing import TYPE_CHECKING, Any, Iterator, Optional
|
||||
|
||||
import attr
|
||||
import jsonschema
|
||||
@@ -110,7 +110,7 @@ class TrustedKeyServer:
|
||||
server_name: str
|
||||
|
||||
# map from key id to key object, or None to disable signature verification.
|
||||
verify_keys: dict[str, VerifyKey] | None = None
|
||||
verify_keys: Optional[dict[str, VerifyKey]] = None
|
||||
|
||||
|
||||
class KeyConfig(Config):
|
||||
@@ -219,7 +219,7 @@ class KeyConfig(Config):
|
||||
if form_secret_path:
|
||||
if form_secret:
|
||||
raise ConfigError(CONFLICTING_FORM_SECRET_OPTS_ERROR)
|
||||
self.form_secret: str | None = read_file(
|
||||
self.form_secret: Optional[str] = read_file(
|
||||
form_secret_path, ("form_secret_path",)
|
||||
).strip()
|
||||
else:
|
||||
@@ -279,7 +279,7 @@ class KeyConfig(Config):
|
||||
raise ConfigError("Error reading %s: %s" % (name, str(e)))
|
||||
|
||||
def read_old_signing_keys(
|
||||
self, old_signing_keys: JsonDict | None
|
||||
self, old_signing_keys: Optional[JsonDict]
|
||||
) -> dict[str, "VerifyKeyWithExpiry"]:
|
||||
if old_signing_keys is None:
|
||||
return {}
|
||||
@@ -408,7 +408,7 @@ def _parse_key_servers(
|
||||
server_name = server["server_name"]
|
||||
result = TrustedKeyServer(server_name=server_name)
|
||||
|
||||
verify_keys: dict[str, str] | None = server.get("verify_keys")
|
||||
verify_keys: Optional[dict[str, str]] = server.get("verify_keys")
|
||||
if verify_keys is not None:
|
||||
result.verify_keys = {}
|
||||
for key_id, key_base64 in verify_keys.items():
|
||||
|
||||
@@ -26,7 +26,7 @@ import os
|
||||
import sys
|
||||
import threading
|
||||
from string import Template
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from typing import TYPE_CHECKING, Any, Optional
|
||||
|
||||
import yaml
|
||||
from zope.interface import implementer
|
||||
@@ -280,7 +280,7 @@ def one_time_logging_setup(*, logBeginner: LogBeginner = globalLogBeginner) -> N
|
||||
|
||||
|
||||
def _setup_stdlib_logging(
|
||||
config: "HomeServerConfig", log_config_path: str | None
|
||||
config: "HomeServerConfig", log_config_path: Optional[str]
|
||||
) -> None:
|
||||
"""
|
||||
Set up Python standard library logging.
|
||||
@@ -327,7 +327,7 @@ def _load_logging_config(log_config_path: str) -> None:
|
||||
reset_logging_config()
|
||||
|
||||
|
||||
def _reload_logging_config(log_config_path: str | None) -> None:
|
||||
def _reload_logging_config(log_config_path: Optional[str]) -> None:
|
||||
"""
|
||||
Reload the log configuration from the file and apply it.
|
||||
"""
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
from typing import Any
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import (
|
||||
AnyHttpUrl,
|
||||
@@ -36,9 +36,9 @@ from ._base import Config, ConfigError, RootConfig
|
||||
class MasConfigModel(ParseModel):
|
||||
enabled: StrictBool = False
|
||||
endpoint: AnyHttpUrl = AnyHttpUrl("http://localhost:8080")
|
||||
secret: StrictStr | None = Field(default=None)
|
||||
secret: Optional[StrictStr] = Field(default=None)
|
||||
# We set `strict=False` to allow `str` instances.
|
||||
secret_path: FilePath | None = Field(default=None, strict=False)
|
||||
secret_path: Optional[FilePath] = Field(default=None, strict=False)
|
||||
|
||||
@model_validator(mode="after")
|
||||
def verify_secret(self) -> Self:
|
||||
@@ -70,7 +70,7 @@ class MasConfig(Config):
|
||||
parsed = MasConfigModel(**mas_config)
|
||||
except ValidationError as e:
|
||||
raise ConfigError(
|
||||
"Could not validate Matrix Authentication Service configuration",
|
||||
f"Could not validate Matrix Authentication Service configuration: {e.errors()[0]['msg']}",
|
||||
path=("matrix_authentication_service",),
|
||||
) from e
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
from typing import Any
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import Field, StrictStr, ValidationError, model_validator
|
||||
from typing_extensions import Self
|
||||
@@ -29,7 +29,7 @@ from ._base import Config, ConfigError
|
||||
class TransportConfigModel(ParseModel):
|
||||
type: StrictStr
|
||||
|
||||
livekit_service_url: StrictStr | None = Field(default=None)
|
||||
livekit_service_url: Optional[StrictStr] = Field(default=None)
|
||||
"""An optional livekit service URL. Only required if type is "livekit"."""
|
||||
|
||||
@model_validator(mode="after")
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user