Compare commits
7 Commits
v1.128.0rc
...
anoa/worke
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f4f5a706f8 | ||
|
|
28245e3908 | ||
|
|
b9c50043e0 | ||
|
|
e48479978b | ||
|
|
5caca2acd6 | ||
|
|
d3ed0ebebd | ||
|
|
af2a16370d |
@@ -1,10 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
set -xeu
|
|
||||||
|
|
||||||
# On 32-bit Linux platforms, we need libatomic1 to use rustup
|
|
||||||
if command -v yum &> /dev/null; then
|
|
||||||
yum install -y libatomic
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Install a Rust toolchain
|
|
||||||
curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain 1.82.0 -y --profile minimal
|
|
||||||
@@ -36,11 +36,11 @@ IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
|
|||||||
# First calculate the various trial jobs.
|
# First calculate the various trial jobs.
|
||||||
#
|
#
|
||||||
# For PRs, we only run each type of test with the oldest Python version supported (which
|
# For PRs, we only run each type of test with the oldest Python version supported (which
|
||||||
# is Python 3.9 right now)
|
# is Python 3.8 right now)
|
||||||
|
|
||||||
trial_sqlite_tests = [
|
trial_sqlite_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.9",
|
"python-version": "3.8",
|
||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
}
|
}
|
||||||
@@ -53,14 +53,14 @@ if not IS_PR:
|
|||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
}
|
}
|
||||||
for version in ("3.10", "3.11", "3.12", "3.13")
|
for version in ("3.9", "3.10", "3.11", "3.12")
|
||||||
)
|
)
|
||||||
|
|
||||||
trial_postgres_tests = [
|
trial_postgres_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.9",
|
"python-version": "3.8",
|
||||||
"database": "postgres",
|
"database": "postgres",
|
||||||
"postgres-version": "13",
|
"postgres-version": "11",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@@ -68,16 +68,16 @@ trial_postgres_tests = [
|
|||||||
if not IS_PR:
|
if not IS_PR:
|
||||||
trial_postgres_tests.append(
|
trial_postgres_tests.append(
|
||||||
{
|
{
|
||||||
"python-version": "3.13",
|
"python-version": "3.12",
|
||||||
"database": "postgres",
|
"database": "postgres",
|
||||||
"postgres-version": "17",
|
"postgres-version": "16",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
trial_no_extra_tests = [
|
trial_no_extra_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.9",
|
"python-version": "3.8",
|
||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "",
|
"extras": "",
|
||||||
}
|
}
|
||||||
@@ -99,24 +99,24 @@ set_output("trial_test_matrix", test_matrix)
|
|||||||
|
|
||||||
# First calculate the various sytest jobs.
|
# First calculate the various sytest jobs.
|
||||||
#
|
#
|
||||||
# For each type of test we only run on bullseye on PRs
|
# For each type of test we only run on focal on PRs
|
||||||
|
|
||||||
|
|
||||||
sytest_tests = [
|
sytest_tests = [
|
||||||
{
|
{
|
||||||
"sytest-tag": "bullseye",
|
"sytest-tag": "focal",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bullseye",
|
"sytest-tag": "focal",
|
||||||
"postgres": "postgres",
|
"postgres": "postgres",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bullseye",
|
"sytest-tag": "focal",
|
||||||
"postgres": "multi-postgres",
|
"postgres": "multi-postgres",
|
||||||
"workers": "workers",
|
"workers": "workers",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bullseye",
|
"sytest-tag": "focal",
|
||||||
"postgres": "multi-postgres",
|
"postgres": "multi-postgres",
|
||||||
"workers": "workers",
|
"workers": "workers",
|
||||||
"reactor": "asyncio",
|
"reactor": "asyncio",
|
||||||
@@ -127,11 +127,11 @@ if not IS_PR:
|
|||||||
sytest_tests.extend(
|
sytest_tests.extend(
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"sytest-tag": "bullseye",
|
"sytest-tag": "focal",
|
||||||
"reactor": "asyncio",
|
"reactor": "asyncio",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bullseye",
|
"sytest-tag": "focal",
|
||||||
"postgres": "postgres",
|
"postgres": "postgres",
|
||||||
"reactor": "asyncio",
|
"reactor": "asyncio",
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -11,12 +11,12 @@ with open("poetry.lock", "rb") as f:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
lock_version = lockfile["metadata"]["lock-version"]
|
lock_version = lockfile["metadata"]["lock-version"]
|
||||||
assert lock_version == "2.1"
|
assert lock_version == "2.0"
|
||||||
except Exception:
|
except Exception:
|
||||||
print(
|
print(
|
||||||
"""\
|
"""\
|
||||||
Lockfile is not version 2.1. You probably need to upgrade poetry on your local box
|
Lockfile is not version 2.0. You probably need to upgrade poetry on your local box
|
||||||
and re-run `poetry lock`. See the Poetry cheat sheet at
|
and re-run `poetry lock --no-update`. See the Poetry cheat sheet at
|
||||||
https://element-hq.github.io/synapse/develop/development/dependencies.html
|
https://element-hq.github.io/synapse/develop/development/dependencies.html
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
# this script is run by GitHub Actions in a plain `jammy` container; it
|
# this script is run by GitHub Actions in a plain `focal` container; it
|
||||||
# - installs the minimal system requirements, and poetry;
|
# - installs the minimal system requirements, and poetry;
|
||||||
# - patches the project definition file to refer to old versions only;
|
# - patches the project definition file to refer to old versions only;
|
||||||
# - creates a venv with these old versions using poetry; and finally
|
# - creates a venv with these old versions using poetry; and finally
|
||||||
|
|||||||
18
.github/workflows/docker.yml
vendored
18
.github/workflows/docker.yml
vendored
@@ -14,26 +14,26 @@ permissions:
|
|||||||
id-token: write # needed for signing the images with GitHub OIDC Token
|
id-token: write # needed for signing the images with GitHub OIDC Token
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
id: qemu
|
id: qemu
|
||||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
uses: docker/setup-qemu-action@v3
|
||||||
with:
|
with:
|
||||||
platforms: arm64
|
platforms: arm64
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Inspect builder
|
- name: Inspect builder
|
||||||
run: docker buildx inspect
|
run: docker buildx inspect
|
||||||
|
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
uses: sigstore/cosign-installer@d7d6bc7722e3daa8354c50bcb52f4837da5e9b6a # v3.8.1
|
uses: sigstore/cosign-installer@v3.6.0
|
||||||
|
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Extract version from pyproject.toml
|
- name: Extract version from pyproject.toml
|
||||||
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
|
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
|
||||||
@@ -43,13 +43,13 @@ jobs:
|
|||||||
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Log in to DockerHub
|
- name: Log in to DockerHub
|
||||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Log in to GHCR
|
- name: Log in to GHCR
|
||||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -57,7 +57,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Calculate docker image tag
|
- name: Calculate docker image tag
|
||||||
id: set-tag
|
id: set-tag
|
||||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
uses: docker/metadata-action@master
|
||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
docker.io/matrixdotorg/synapse
|
docker.io/matrixdotorg/synapse
|
||||||
@@ -72,7 +72,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Build and push all platforms
|
- name: Build and push all platforms
|
||||||
id: build-and-push
|
id: build-and-push
|
||||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
push: true
|
push: true
|
||||||
labels: |
|
labels: |
|
||||||
|
|||||||
4
.github/workflows/docs-pr-netlify.yaml
vendored
4
.github/workflows/docs-pr-netlify.yaml
vendored
@@ -14,7 +14,7 @@ jobs:
|
|||||||
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
||||||
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
||||||
- name: 📥 Download artifact
|
- name: 📥 Download artifact
|
||||||
uses: dawidd6/action-download-artifact@07ab29fd4a977ae4d2b275087cf67563dfdf0295 # v9
|
uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6
|
||||||
with:
|
with:
|
||||||
workflow: docs-pr.yaml
|
workflow: docs-pr.yaml
|
||||||
run_id: ${{ github.event.workflow_run.id }}
|
run_id: ${{ github.event.workflow_run.id }}
|
||||||
@@ -22,7 +22,7 @@ jobs:
|
|||||||
path: book
|
path: book
|
||||||
|
|
||||||
- name: 📤 Deploy to Netlify
|
- name: 📤 Deploy to Netlify
|
||||||
uses: matrix-org/netlify-pr-preview@9805cd123fc9a7e421e35340a05e1ebc5dee46b5 # v3
|
uses: matrix-org/netlify-pr-preview@v3
|
||||||
with:
|
with:
|
||||||
path: book
|
path: book
|
||||||
owner: ${{ github.event.workflow_run.head_repository.owner.login }}
|
owner: ${{ github.event.workflow_run.head_repository.owner.login }}
|
||||||
|
|||||||
8
.github/workflows/docs-pr.yaml
vendored
8
.github/workflows/docs-pr.yaml
vendored
@@ -13,7 +13,7 @@ jobs:
|
|||||||
name: GitHub Pages
|
name: GitHub Pages
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
# Fetch all history so that the schema_versions script works.
|
# Fetch all history so that the schema_versions script works.
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -24,7 +24,7 @@ jobs:
|
|||||||
mdbook-version: '0.4.17'
|
mdbook-version: '0.4.17'
|
||||||
|
|
||||||
- name: Setup python
|
- name: Setup python
|
||||||
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
|
||||||
@@ -39,7 +39,7 @@ jobs:
|
|||||||
cp book/welcome_and_overview.html book/index.html
|
cp book/welcome_and_overview.html book/index.html
|
||||||
|
|
||||||
- name: Upload Artifact
|
- name: Upload Artifact
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: book
|
name: book
|
||||||
path: book
|
path: book
|
||||||
@@ -50,7 +50,7 @@ jobs:
|
|||||||
name: Check links in documentation
|
name: Check links in documentation
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup mdbook
|
- name: Setup mdbook
|
||||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||||
|
|||||||
4
.github/workflows/docs.yaml
vendored
4
.github/workflows/docs.yaml
vendored
@@ -50,7 +50,7 @@ jobs:
|
|||||||
needs:
|
needs:
|
||||||
- pre
|
- pre
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
# Fetch all history so that the schema_versions script works.
|
# Fetch all history so that the schema_versions script works.
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -64,7 +64,7 @@ jobs:
|
|||||||
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
|
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
|
||||||
|
|
||||||
- name: Setup python
|
- name: Setup python
|
||||||
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
|
||||||
|
|||||||
13
.github/workflows/fix_lint.yaml
vendored
13
.github/workflows/fix_lint.yaml
vendored
@@ -13,22 +13,21 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
|
uses: dtolnay/rust-toolchain@master
|
||||||
with:
|
with:
|
||||||
# We use nightly so that `fmt` correctly groups together imports, and
|
# We use nightly so that `fmt` correctly groups together imports, and
|
||||||
# clippy correctly fixes up the benchmarks.
|
# clippy correctly fixes up the benchmarks.
|
||||||
toolchain: nightly-2022-12-01
|
toolchain: nightly-2022-12-01
|
||||||
components: clippy, rustfmt
|
components: rustfmt
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
install-project: "false"
|
install-project: "false"
|
||||||
poetry-version: "2.1.1"
|
|
||||||
|
|
||||||
- name: Run ruff check
|
- name: Run ruff check
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
@@ -44,6 +43,6 @@ jobs:
|
|||||||
- run: cargo fmt
|
- run: cargo fmt
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
|
|
||||||
- uses: stefanzweifel/git-auto-commit-action@e348103e9026cc0eee72ae06630dbe30c8bf7a79 # v5.1.0
|
- uses: stefanzweifel/git-auto-commit-action@v5
|
||||||
with:
|
with:
|
||||||
commit_message: "Attempt to fix linting"
|
commit_message: "Attempt to fix linting"
|
||||||
|
|||||||
38
.github/workflows/latest_deps.yml
vendored
38
.github/workflows/latest_deps.yml
vendored
@@ -39,17 +39,17 @@ jobs:
|
|||||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
uses: dtolnay/rust-toolchain@stable
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||||
# poetry-core versions), so we install with poetry.
|
# poetry-core versions), so we install with poetry.
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: "all"
|
extras: "all"
|
||||||
# Dump installed versions for debugging.
|
# Dump installed versions for debugging.
|
||||||
- run: poetry run pip list > before.txt
|
- run: poetry run pip list > before.txt
|
||||||
@@ -72,11 +72,11 @@ jobs:
|
|||||||
postgres-version: "14"
|
postgres-version: "14"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
uses: dtolnay/rust-toolchain@stable
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||||
@@ -86,7 +86,7 @@ jobs:
|
|||||||
-e POSTGRES_PASSWORD=postgres \
|
-e POSTGRES_PASSWORD=postgres \
|
||||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||||
postgres:${{ matrix.postgres-version }}
|
postgres:${{ matrix.postgres-version }}
|
||||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: pip install .[all,test]
|
- run: pip install .[all,test]
|
||||||
@@ -132,9 +132,9 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- sytest-tag: bullseye
|
- sytest-tag: focal
|
||||||
|
|
||||||
- sytest-tag: bullseye
|
- sytest-tag: focal
|
||||||
postgres: postgres
|
postgres: postgres
|
||||||
workers: workers
|
workers: workers
|
||||||
redis: redis
|
redis: redis
|
||||||
@@ -145,11 +145,11 @@ jobs:
|
|||||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
uses: dtolnay/rust-toolchain@stable
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: Ensure sytest runs `pip install`
|
- name: Ensure sytest runs `pip install`
|
||||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||||
@@ -164,7 +164,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@v4
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||||
@@ -192,15 +192,15 @@ jobs:
|
|||||||
database: Postgres
|
database: Postgres
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out synapse codebase
|
- name: Run actions/checkout@v4 for synapse
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
|
|
||||||
- uses: actions/setup-go@0aaccfd150d50ccaeb58ebd88d36e91967a5f35b # v5.4.0
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
cache-dependency-path: complement/go.sum
|
cache-dependency-path: complement/go.sum
|
||||||
go-version-file: complement/go.mod
|
go-version-file: complement/go.mod
|
||||||
@@ -225,7 +225,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
4
.github/workflows/poetry_lockfile.yaml
vendored
4
.github/workflows/poetry_lockfile.yaml
vendored
@@ -16,8 +16,8 @@ jobs:
|
|||||||
name: "Check locked dependencies have sdists"
|
name: "Check locked dependencies have sdists"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
- run: pip install tomli
|
- run: pip install tomli
|
||||||
|
|||||||
10
.github/workflows/push_complement_image.yml
vendored
10
.github/workflows/push_complement_image.yml
vendored
@@ -33,29 +33,29 @@ jobs:
|
|||||||
packages: write
|
packages: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout specific branch (debug build)
|
- name: Checkout specific branch (debug build)
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
uses: actions/checkout@v4
|
||||||
if: github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'workflow_dispatch'
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.branch }}
|
ref: ${{ inputs.branch }}
|
||||||
- name: Checkout clean copy of develop (scheduled build)
|
- name: Checkout clean copy of develop (scheduled build)
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
uses: actions/checkout@v4
|
||||||
if: github.event_name == 'schedule'
|
if: github.event_name == 'schedule'
|
||||||
with:
|
with:
|
||||||
ref: develop
|
ref: develop
|
||||||
- name: Checkout clean copy of master (on-push)
|
- name: Checkout clean copy of master (on-push)
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
uses: actions/checkout@v4
|
||||||
if: github.event_name == 'push'
|
if: github.event_name == 'push'
|
||||||
with:
|
with:
|
||||||
ref: master
|
ref: master
|
||||||
- name: Login to registry
|
- name: Login to registry
|
||||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Work out labels for complement image
|
- name: Work out labels for complement image
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: ghcr.io/${{ github.repository }}/complement-synapse
|
images: ghcr.io/${{ github.repository }}/complement-synapse
|
||||||
tags: |
|
tags: |
|
||||||
|
|||||||
65
.github/workflows/release-artifacts.yml
vendored
65
.github/workflows/release-artifacts.yml
vendored
@@ -27,8 +27,8 @@ jobs:
|
|||||||
name: "Calculate list of debian distros"
|
name: "Calculate list of debian distros"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
- id: set-distros
|
- id: set-distros
|
||||||
@@ -55,18 +55,18 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: src
|
path: src
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
uses: docker/setup-buildx-action@v3
|
||||||
with:
|
with:
|
||||||
install: true
|
install: true
|
||||||
|
|
||||||
- name: Set up docker layer caching
|
- name: Set up docker layer caching
|
||||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: /tmp/.buildx-cache
|
path: /tmp/.buildx-cache
|
||||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||||
@@ -74,7 +74,7 @@ jobs:
|
|||||||
${{ runner.os }}-buildx-
|
${{ runner.os }}-buildx-
|
||||||
|
|
||||||
- name: Set up python
|
- name: Set up python
|
||||||
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
|
|
||||||
@@ -91,19 +91,10 @@ jobs:
|
|||||||
rm -rf /tmp/.buildx-cache
|
rm -rf /tmp/.buildx-cache
|
||||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
||||||
|
|
||||||
- name: Artifact name
|
|
||||||
id: artifact-name
|
|
||||||
# We can't have colons in the upload name of the artifact, so we convert
|
|
||||||
# e.g. `debian:sid` to `sid`.
|
|
||||||
env:
|
|
||||||
DISTRO: ${{ matrix.distro }}
|
|
||||||
run: |
|
|
||||||
echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
- name: Upload debs as artifacts
|
- name: Upload debs as artifacts
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@v3 # Don't upgrade to v4; broken: https://github.com/actions/upload-artifact#breaking-changes
|
||||||
with:
|
with:
|
||||||
name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }}
|
name: debs
|
||||||
path: debs/*
|
path: debs/*
|
||||||
|
|
||||||
build-wheels:
|
build-wheels:
|
||||||
@@ -111,7 +102,7 @@ jobs:
|
|||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-22.04, macos-13]
|
os: [ubuntu-20.04, macos-12]
|
||||||
arch: [x86_64, aarch64]
|
arch: [x86_64, aarch64]
|
||||||
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
||||||
# It is not read by the rest of the workflow.
|
# It is not read by the rest of the workflow.
|
||||||
@@ -121,29 +112,29 @@ jobs:
|
|||||||
exclude:
|
exclude:
|
||||||
# Don't build macos wheels on PR CI.
|
# Don't build macos wheels on PR CI.
|
||||||
- is_pr: true
|
- is_pr: true
|
||||||
os: "macos-13"
|
os: "macos-12"
|
||||||
# Don't build aarch64 wheels on mac.
|
# Don't build aarch64 wheels on mac.
|
||||||
- os: "macos-13"
|
- os: "macos-12"
|
||||||
arch: aarch64
|
arch: aarch64
|
||||||
# Don't build aarch64 wheels on PR CI.
|
# Don't build aarch64 wheels on PR CI.
|
||||||
- is_pr: true
|
- is_pr: true
|
||||||
arch: aarch64
|
arch: aarch64
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
||||||
# here, because `python` on osx points to Python 2.7.
|
# here, because `python` on osx points to Python 2.7.
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
|
||||||
- name: Install cibuildwheel
|
- name: Install cibuildwheel
|
||||||
run: python -m pip install cibuildwheel==2.23.0
|
run: python -m pip install cibuildwheel==2.19.1
|
||||||
|
|
||||||
- name: Set up QEMU to emulate aarch64
|
- name: Set up QEMU to emulate aarch64
|
||||||
if: matrix.arch == 'aarch64'
|
if: matrix.arch == 'aarch64'
|
||||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
uses: docker/setup-qemu-action@v3
|
||||||
with:
|
with:
|
||||||
platforms: arm64
|
platforms: arm64
|
||||||
|
|
||||||
@@ -153,7 +144,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Only build a single wheel on PR
|
- name: Only build a single wheel on PR
|
||||||
if: startsWith(github.ref, 'refs/pull/')
|
if: startsWith(github.ref, 'refs/pull/')
|
||||||
run: echo "CIBW_BUILD="cp39-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
|
run: echo "CIBW_BUILD="cp38-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
run: python -m cibuildwheel --output-dir wheelhouse
|
run: python -m cibuildwheel --output-dir wheelhouse
|
||||||
@@ -165,9 +156,9 @@ jobs:
|
|||||||
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
||||||
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
|
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
|
||||||
|
|
||||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
- uses: actions/upload-artifact@v3 # Don't upgrade to v4; broken: https://github.com/actions/upload-artifact#breaking-changes
|
||||||
with:
|
with:
|
||||||
name: Wheel-${{ matrix.os }}-${{ matrix.arch }}
|
name: Wheel
|
||||||
path: ./wheelhouse/*.whl
|
path: ./wheelhouse/*.whl
|
||||||
|
|
||||||
build-sdist:
|
build-sdist:
|
||||||
@@ -176,8 +167,8 @@ jobs:
|
|||||||
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.10'
|
||||||
|
|
||||||
@@ -186,7 +177,7 @@ jobs:
|
|||||||
- name: Build sdist
|
- name: Build sdist
|
||||||
run: python -m build --sdist
|
run: python -m build --sdist
|
||||||
|
|
||||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
- uses: actions/upload-artifact@v3 # Don't upgrade to v4; broken: https://github.com/actions/upload-artifact#breaking-changes
|
||||||
with:
|
with:
|
||||||
name: Sdist
|
name: Sdist
|
||||||
path: dist/*.tar.gz
|
path: dist/*.tar.gz
|
||||||
@@ -203,23 +194,17 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Download all workflow run artifacts
|
- name: Download all workflow run artifacts
|
||||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4.2.1
|
uses: actions/download-artifact@v3 # Don't upgrade to v4, it should match upload-artifact
|
||||||
- name: Build a tarball for the debs
|
- name: Build a tarball for the debs
|
||||||
# We need to merge all the debs uploads into one folder, then compress
|
run: tar -cvJf debs.tar.xz debs
|
||||||
# that.
|
|
||||||
run: |
|
|
||||||
mkdir debs
|
|
||||||
mv debs*/* debs/
|
|
||||||
tar -cvJf debs.tar.xz debs
|
|
||||||
- name: Attach to release
|
- name: Attach to release
|
||||||
# Pinned to work around https://github.com/softprops/action-gh-release/issues/445
|
uses: softprops/action-gh-release@a929a66f232c1b11af63782948aa2210f981808a # PR#109
|
||||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v0.1.15
|
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
files: |
|
files: |
|
||||||
Sdist/*
|
Sdist/*
|
||||||
Wheel*/*
|
Wheel/*
|
||||||
debs.tar.xz
|
debs.tar.xz
|
||||||
# if it's not already published, keep the release as a draft.
|
# if it's not already published, keep the release as a draft.
|
||||||
draft: true
|
draft: true
|
||||||
|
|||||||
166
.github/workflows/tests.yml
vendored
166
.github/workflows/tests.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
|||||||
linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }}
|
linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }}
|
||||||
linting_readme: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting_readme }}
|
linting_readme: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting_readme }}
|
||||||
steps:
|
steps:
|
||||||
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
- uses: dorny/paths-filter@v3
|
||||||
id: filter
|
id: filter
|
||||||
# We only check on PRs
|
# We only check on PRs
|
||||||
if: startsWith(github.ref, 'refs/pull/')
|
if: startsWith(github.ref, 'refs/pull/')
|
||||||
@@ -83,14 +83,14 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
- uses: Swatinem/rust-cache@v2
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: "all"
|
extras: "all"
|
||||||
- run: poetry run scripts-dev/generate_sample_config.sh --check
|
- run: poetry run scripts-dev/generate_sample_config.sh --check
|
||||||
- run: poetry run scripts-dev/config-lint.sh
|
- run: poetry run scripts-dev/config-lint.sh
|
||||||
@@ -101,8 +101,8 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
||||||
@@ -111,8 +111,8 @@ jobs:
|
|||||||
check-lockfile:
|
check-lockfile:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: .ci/scripts/check_lockfile.py
|
- run: .ci/scripts/check_lockfile.py
|
||||||
@@ -124,12 +124,11 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
poetry-version: "2.1.1"
|
|
||||||
install-project: "false"
|
install-project: "false"
|
||||||
|
|
||||||
- name: Run ruff check
|
- name: Run ruff check
|
||||||
@@ -146,14 +145,14 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
# We want to make use of type hints in optional dependencies too.
|
# We want to make use of type hints in optional dependencies too.
|
||||||
extras: all
|
extras: all
|
||||||
@@ -162,12 +161,11 @@ jobs:
|
|||||||
# https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775
|
# https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775
|
||||||
# To make CI green, err towards caution and install the project.
|
# To make CI green, err towards caution and install the project.
|
||||||
install-project: "true"
|
install-project: "true"
|
||||||
poetry-version: "2.1.1"
|
|
||||||
|
|
||||||
# Cribbed from
|
# Cribbed from
|
||||||
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
||||||
- name: Restore/persist mypy's cache
|
- name: Restore/persist mypy's cache
|
||||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
.mypy_cache
|
.mypy_cache
|
||||||
@@ -180,7 +178,7 @@ jobs:
|
|||||||
lint-crlf:
|
lint-crlf:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
- name: Check line endings
|
- name: Check line endings
|
||||||
run: scripts-dev/check_line_terminators.sh
|
run: scripts-dev/check_line_terminators.sh
|
||||||
|
|
||||||
@@ -188,11 +186,11 @@ jobs:
|
|||||||
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
- run: "pip install 'towncrier>=18.6.0rc1'"
|
||||||
@@ -206,15 +204,15 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
- uses: Swatinem/rust-cache@v2
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: "all"
|
extras: "all"
|
||||||
- run: poetry run scripts-dev/check_pydantic_models.py
|
- run: poetry run scripts-dev/check_pydantic_models.py
|
||||||
|
|
||||||
@@ -224,13 +222,13 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
with:
|
with:
|
||||||
components: clippy
|
components: clippy
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- run: cargo clippy -- -D warnings
|
- run: cargo clippy -- -D warnings
|
||||||
|
|
||||||
@@ -242,14 +240,14 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
|
uses: dtolnay/rust-toolchain@master
|
||||||
with:
|
with:
|
||||||
toolchain: nightly-2022-12-01
|
toolchain: nightly-2022-12-01
|
||||||
components: clippy
|
components: clippy
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- run: cargo clippy --all-features -- -D warnings
|
- run: cargo clippy --all-features -- -D warnings
|
||||||
|
|
||||||
@@ -259,15 +257,15 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
|
uses: dtolnay/rust-toolchain@master
|
||||||
with:
|
with:
|
||||||
# We use nightly so that it correctly groups together imports
|
# We use nightly so that it correctly groups together imports
|
||||||
toolchain: nightly-2022-12-01
|
toolchain: nightly-2022-12-01
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- run: cargo fmt --check
|
- run: cargo fmt --check
|
||||||
|
|
||||||
@@ -278,8 +276,8 @@ jobs:
|
|||||||
needs: changes
|
needs: changes
|
||||||
if: ${{ needs.changes.outputs.linting_readme == 'true' }}
|
if: ${{ needs.changes.outputs.linting_readme == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: "pip install rstcheck"
|
- run: "pip install rstcheck"
|
||||||
@@ -303,7 +301,7 @@ jobs:
|
|||||||
- lint-readme
|
- lint-readme
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3
|
- uses: matrix-org/done-action@v3
|
||||||
with:
|
with:
|
||||||
needs: ${{ toJSON(needs) }}
|
needs: ${{ toJSON(needs) }}
|
||||||
|
|
||||||
@@ -326,8 +324,8 @@ jobs:
|
|||||||
needs: linting-done
|
needs: linting-done
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- id: get-matrix
|
- id: get-matrix
|
||||||
@@ -347,7 +345,7 @@ jobs:
|
|||||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||||
if: ${{ matrix.job.postgres-version }}
|
if: ${{ matrix.job.postgres-version }}
|
||||||
@@ -362,13 +360,13 @@ jobs:
|
|||||||
postgres:${{ matrix.job.postgres-version }}
|
postgres:${{ matrix.job.postgres-version }}
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.job.python-version }}
|
python-version: ${{ matrix.job.python-version }}
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: ${{ matrix.job.extras }}
|
extras: ${{ matrix.job.extras }}
|
||||||
- name: Await PostgreSQL
|
- name: Await PostgreSQL
|
||||||
if: ${{ matrix.job.postgres-version }}
|
if: ${{ matrix.job.postgres-version }}
|
||||||
@@ -399,24 +397,24 @@ jobs:
|
|||||||
needs:
|
needs:
|
||||||
- linting-done
|
- linting-done
|
||||||
- changes
|
- changes
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-20.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
# There aren't wheels for some of the older deps, so we need to install
|
# There aren't wheels for some of the older deps, so we need to install
|
||||||
# their build dependencies
|
# their build dependencies
|
||||||
- run: |
|
- run: |
|
||||||
sudo apt-get -qq update
|
sudo apt-get -qq update
|
||||||
sudo apt-get -qq install build-essential libffi-dev python3-dev \
|
sudo apt-get -qq install build-essential libffi-dev python-dev \
|
||||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||||
|
|
||||||
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: '3.9'
|
python-version: '3.8'
|
||||||
|
|
||||||
- name: Prepare old deps
|
- name: Prepare old deps
|
||||||
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
||||||
@@ -460,17 +458,17 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["pypy-3.9"]
|
python-version: ["pypy-3.8"]
|
||||||
extras: ["all"]
|
extras: ["all"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: ${{ matrix.extras }}
|
extras: ${{ matrix.extras }}
|
||||||
- run: poetry run trial --jobs=2 tests
|
- run: poetry run trial --jobs=2 tests
|
||||||
- name: Dump logs
|
- name: Dump logs
|
||||||
@@ -514,13 +512,13 @@ jobs:
|
|||||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
- name: Prepare test blacklist
|
- name: Prepare test blacklist
|
||||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: Run SyTest
|
- name: Run SyTest
|
||||||
run: /bootstrap.sh synapse
|
run: /bootstrap.sh synapse
|
||||||
@@ -529,7 +527,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@v4
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
||||||
@@ -559,11 +557,11 @@ jobs:
|
|||||||
--health-retries 5
|
--health-retries 5
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: "postgres"
|
extras: "postgres"
|
||||||
- run: .ci/scripts/test_export_data_command.sh
|
- run: .ci/scripts/test_export_data_command.sh
|
||||||
env:
|
env:
|
||||||
@@ -582,11 +580,11 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- python-version: "3.9"
|
- python-version: "3.8"
|
||||||
postgres-version: "13"
|
postgres-version: "11"
|
||||||
|
|
||||||
- python-version: "3.13"
|
- python-version: "3.11"
|
||||||
postgres-version: "17"
|
postgres-version: "15"
|
||||||
|
|
||||||
services:
|
services:
|
||||||
postgres:
|
postgres:
|
||||||
@@ -603,7 +601,7 @@ jobs:
|
|||||||
--health-retries 5
|
--health-retries 5
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
- name: Add PostgreSQL apt repository
|
- name: Add PostgreSQL apt repository
|
||||||
# We need a version of pg_dump that can handle the version of
|
# We need a version of pg_dump that can handle the version of
|
||||||
# PostgreSQL being tested against. The Ubuntu package repository lags
|
# PostgreSQL being tested against. The Ubuntu package repository lags
|
||||||
@@ -614,10 +612,10 @@ jobs:
|
|||||||
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
|
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
poetry-version: "2.1.1"
|
poetry-version: "1.3.2"
|
||||||
extras: "postgres"
|
extras: "postgres"
|
||||||
- run: .ci/scripts/test_synapse_port_db.sh
|
- run: .ci/scripts/test_synapse_port_db.sh
|
||||||
id: run_tester_script
|
id: run_tester_script
|
||||||
@@ -627,7 +625,7 @@ jobs:
|
|||||||
PGPASSWORD: postgres
|
PGPASSWORD: postgres
|
||||||
PGDATABASE: postgres
|
PGDATABASE: postgres
|
||||||
- name: "Upload schema differences"
|
- name: "Upload schema differences"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@v4
|
||||||
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
||||||
with:
|
with:
|
||||||
name: Schema dumps
|
name: Schema dumps
|
||||||
@@ -657,19 +655,19 @@ jobs:
|
|||||||
database: Postgres
|
database: Postgres
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout synapse codebase
|
- name: Run actions/checkout@v4 for synapse
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
|
|
||||||
- uses: actions/setup-go@0aaccfd150d50ccaeb58ebd88d36e91967a5f35b # v5.4.0
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
cache-dependency-path: complement/go.sum
|
cache-dependency-path: complement/go.sum
|
||||||
go-version-file: complement/go.mod
|
go-version-file: complement/go.mod
|
||||||
@@ -692,11 +690,11 @@ jobs:
|
|||||||
- changes
|
- changes
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
|
uses: dtolnay/rust-toolchain@1.66.0
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- run: cargo test
|
- run: cargo test
|
||||||
|
|
||||||
@@ -710,13 +708,13 @@ jobs:
|
|||||||
- changes
|
- changes
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
|
uses: dtolnay/rust-toolchain@master
|
||||||
with:
|
with:
|
||||||
toolchain: nightly-2022-12-01
|
toolchain: nightly-2022-12-01
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- run: cargo bench --no-run
|
- run: cargo bench --no-run
|
||||||
|
|
||||||
@@ -735,7 +733,7 @@ jobs:
|
|||||||
- linting-done
|
- linting-done
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3
|
- uses: matrix-org/done-action@v3
|
||||||
with:
|
with:
|
||||||
needs: ${{ toJSON(needs) }}
|
needs: ${{ toJSON(needs) }}
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/triage-incoming.yml
vendored
2
.github/workflows/triage-incoming.yml
vendored
@@ -6,7 +6,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
triage:
|
triage:
|
||||||
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@18beaf3c8e536108bd04d18e6c3dc40ba3931e28 # v2.0.3
|
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v2
|
||||||
with:
|
with:
|
||||||
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
|
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
|
||||||
content_id: ${{ github.event.issue.node_id }}
|
content_id: ${{ github.event.issue.node_id }}
|
||||||
|
|||||||
2
.github/workflows/triage_labelled.yml
vendored
2
.github/workflows/triage_labelled.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
|||||||
if: >
|
if: >
|
||||||
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/add-to-project@280af8ae1f83a494cfad2cb10f02f6d13529caa9 # main (v1.0.2 + 10 commits)
|
- uses: actions/add-to-project@main
|
||||||
id: add_project
|
id: add_project
|
||||||
with:
|
with:
|
||||||
project-url: "https://github.com/orgs/matrix-org/projects/67"
|
project-url: "https://github.com/orgs/matrix-org/projects/67"
|
||||||
|
|||||||
40
.github/workflows/twisted_trunk.yml
vendored
40
.github/workflows/twisted_trunk.yml
vendored
@@ -40,17 +40,16 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
uses: dtolnay/rust-toolchain@stable
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
extras: "all"
|
extras: "all"
|
||||||
poetry-version: "2.1.1"
|
|
||||||
- run: |
|
- run: |
|
||||||
poetry remove twisted
|
poetry remove twisted
|
||||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref || 'trunk' }}
|
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref || 'trunk' }}
|
||||||
@@ -65,18 +64,17 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
uses: dtolnay/rust-toolchain@stable
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
extras: "all test"
|
extras: "all test"
|
||||||
poetry-version: "2.1.1"
|
|
||||||
- run: |
|
- run: |
|
||||||
poetry remove twisted
|
poetry remove twisted
|
||||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||||
@@ -101,20 +99,20 @@ jobs:
|
|||||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
# We're using debian:bullseye because it uses Python 3.9 which is our minimum supported Python version.
|
# We're using ubuntu:focal because it uses Python 3.8 which is our minimum supported Python version.
|
||||||
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
|
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
|
||||||
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
|
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
|
||||||
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
|
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
|
||||||
image: matrixdotorg/sytest-synapse:bullseye
|
image: matrixdotorg/sytest-synapse:focal
|
||||||
volumes:
|
volumes:
|
||||||
- ${{ github.workspace }}:/src
|
- ${{ github.workspace }}:/src
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
|
uses: dtolnay/rust-toolchain@stable
|
||||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: Patch dependencies
|
- name: Patch dependencies
|
||||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||||
@@ -138,7 +136,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@v4
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||||
@@ -166,14 +164,14 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Run actions/checkout@v4 for synapse
|
- name: Run actions/checkout@v4 for synapse
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
|
|
||||||
- uses: actions/setup-go@0aaccfd150d50ccaeb58ebd88d36e91967a5f35b # v5.4.0
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
cache-dependency-path: complement/go.sum
|
cache-dependency-path: complement/go.sum
|
||||||
go-version-file: complement/go.mod
|
go-version-file: complement/go.mod
|
||||||
@@ -183,11 +181,11 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
set -x
|
set -x
|
||||||
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
|
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
|
||||||
pipx install poetry==2.1.1
|
pipx install poetry==1.3.2
|
||||||
|
|
||||||
poetry remove -n twisted
|
poetry remove -n twisted
|
||||||
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
|
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||||
poetry lock
|
poetry lock --no-update
|
||||||
working-directory: synapse
|
working-directory: synapse
|
||||||
|
|
||||||
- run: |
|
- run: |
|
||||||
@@ -208,7 +206,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@v4
|
||||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
3609
CHANGES.md
3609
CHANGES.md
File diff suppressed because it is too large
Load Diff
243
Cargo.lock
generated
243
Cargo.lock
generated
@@ -13,9 +13,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyhow"
|
name = "anyhow"
|
||||||
version = "1.0.97"
|
version = "1.0.89"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f"
|
checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arc-swap"
|
name = "arc-swap"
|
||||||
@@ -37,9 +37,9 @@ checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bitflags"
|
name = "bitflags"
|
||||||
version = "2.8.0"
|
version = "2.5.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36"
|
checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "blake2"
|
name = "blake2"
|
||||||
@@ -67,9 +67,9 @@ checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bytes"
|
name = "bytes"
|
||||||
version = "1.10.1"
|
version = "1.7.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
|
checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cfg-if"
|
name = "cfg-if"
|
||||||
@@ -125,14 +125,15 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "getrandom"
|
name = "getrandom"
|
||||||
version = "0.3.1"
|
version = "0.2.15"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8"
|
checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
|
"js-sys",
|
||||||
"libc",
|
"libc",
|
||||||
"wasi",
|
"wasi",
|
||||||
"windows-targets",
|
"wasm-bindgen",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -161,9 +162,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "heck"
|
name = "heck"
|
||||||
version = "0.5.0"
|
version = "0.4.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
|
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hex"
|
name = "hex"
|
||||||
@@ -173,9 +174,9 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "http"
|
name = "http"
|
||||||
version = "1.3.1"
|
version = "1.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565"
|
checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bytes",
|
"bytes",
|
||||||
"fnv",
|
"fnv",
|
||||||
@@ -222,10 +223,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346"
|
checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "log"
|
name = "lock_api"
|
||||||
version = "0.4.27"
|
version = "0.4.12"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
|
checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17"
|
||||||
|
dependencies = [
|
||||||
|
"autocfg",
|
||||||
|
"scopeguard",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "log"
|
||||||
|
version = "0.4.22"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "memchr"
|
name = "memchr"
|
||||||
@@ -254,6 +265,29 @@ version = "1.19.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "parking_lot"
|
||||||
|
version = "0.12.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7e4af0ca4f6caed20e900d564c242b8e5d4903fdacf31d3daf527b66fe6f42fb"
|
||||||
|
dependencies = [
|
||||||
|
"lock_api",
|
||||||
|
"parking_lot_core",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "parking_lot_core"
|
||||||
|
version = "0.9.10"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8"
|
||||||
|
dependencies = [
|
||||||
|
"cfg-if",
|
||||||
|
"libc",
|
||||||
|
"redox_syscall",
|
||||||
|
"smallvec",
|
||||||
|
"windows-targets",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "portable-atomic"
|
name = "portable-atomic"
|
||||||
version = "1.6.0"
|
version = "1.6.0"
|
||||||
@@ -268,25 +302,25 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.89"
|
version = "1.0.82"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e"
|
checksum = "8ad3d49ab951a01fbaafe34f2ec74122942fe18a3f9814c3268f1bb72042131b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3"
|
name = "pyo3"
|
||||||
version = "0.23.5"
|
version = "0.21.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7778bffd85cf38175ac1f545509665d0b9b92a198ca7941f131f85f7a4f9a872"
|
checksum = "a5e00b96a521718e08e03b1a622f01c8a8deb50719335de3f60b3b3950f069d8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"indoc",
|
"indoc",
|
||||||
"libc",
|
"libc",
|
||||||
"memoffset",
|
"memoffset",
|
||||||
"once_cell",
|
"parking_lot",
|
||||||
"portable-atomic",
|
"portable-atomic",
|
||||||
"pyo3-build-config",
|
"pyo3-build-config",
|
||||||
"pyo3-ffi",
|
"pyo3-ffi",
|
||||||
@@ -296,9 +330,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-build-config"
|
name = "pyo3-build-config"
|
||||||
version = "0.23.5"
|
version = "0.21.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "94f6cbe86ef3bf18998d9df6e0f3fc1050a8c5efa409bf712e661a4366e010fb"
|
checksum = "7883df5835fafdad87c0d888b266c8ec0f4c9ca48a5bed6bbb592e8dedee1b50"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"target-lexicon",
|
"target-lexicon",
|
||||||
@@ -306,9 +340,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-ffi"
|
name = "pyo3-ffi"
|
||||||
version = "0.23.5"
|
version = "0.21.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e9f1b4c431c0bb1c8fb0a338709859eed0d030ff6daa34368d3b152a63dfdd8d"
|
checksum = "01be5843dc60b916ab4dad1dca6d20b9b4e6ddc8e15f50c47fe6d85f1fb97403"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
"pyo3-build-config",
|
"pyo3-build-config",
|
||||||
@@ -316,9 +350,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-log"
|
name = "pyo3-log"
|
||||||
version = "0.12.2"
|
version = "0.10.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4b78e4983ba15bc62833a0e0941d965bc03690163f1127864f1408db25063466"
|
checksum = "2af49834b8d2ecd555177e63b273b708dea75150abc6f5341d0a6e1a9623976c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arc-swap",
|
"arc-swap",
|
||||||
"log",
|
"log",
|
||||||
@@ -327,9 +361,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-macros"
|
name = "pyo3-macros"
|
||||||
version = "0.23.5"
|
version = "0.21.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "fbc2201328f63c4710f68abdf653c89d8dbc2858b88c5d88b0ff38a75288a9da"
|
checksum = "77b34069fc0682e11b31dbd10321cbf94808394c56fd996796ce45217dfac53c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"pyo3-macros-backend",
|
"pyo3-macros-backend",
|
||||||
@@ -339,9 +373,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-macros-backend"
|
name = "pyo3-macros-backend"
|
||||||
version = "0.23.5"
|
version = "0.21.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "fca6726ad0f3da9c9de093d6f116a93c1a38e417ed73bf138472cf4064f72028"
|
checksum = "08260721f32db5e1a5beae69a55553f56b99bd0e1c3e6e0a5e8851a9d0f5a85c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"heck",
|
"heck",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
@@ -352,9 +386,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pythonize"
|
name = "pythonize"
|
||||||
version = "0.23.0"
|
version = "0.21.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "91a6ee7a084f913f98d70cdc3ebec07e852b735ae3059a1500db2661265da9ff"
|
checksum = "9d0664248812c38cc55a4ed07f88e4df516ce82604b93b1ffdc041aa77a6cb3c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"pyo3",
|
"pyo3",
|
||||||
"serde",
|
"serde",
|
||||||
@@ -371,20 +405,20 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rand"
|
name = "rand"
|
||||||
version = "0.9.0"
|
version = "0.8.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94"
|
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"libc",
|
||||||
"rand_chacha",
|
"rand_chacha",
|
||||||
"rand_core",
|
"rand_core",
|
||||||
"zerocopy",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rand_chacha"
|
name = "rand_chacha"
|
||||||
version = "0.9.0"
|
version = "0.3.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
|
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ppv-lite86",
|
"ppv-lite86",
|
||||||
"rand_core",
|
"rand_core",
|
||||||
@@ -392,19 +426,27 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rand_core"
|
name = "rand_core"
|
||||||
version = "0.9.0"
|
version = "0.6.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b08f3c9802962f7e1b25113931d94f43ed9725bebc59db9d0c3e9a23b67e15ff"
|
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"getrandom",
|
"getrandom",
|
||||||
"zerocopy",
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "redox_syscall"
|
||||||
|
version = "0.5.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "469052894dcb553421e483e4209ee581a45100d31b4018de03e5a7ad86374a7e"
|
||||||
|
dependencies = [
|
||||||
|
"bitflags",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex"
|
name = "regex"
|
||||||
version = "1.11.1"
|
version = "1.10.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
|
checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
@@ -414,9 +456,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-automata"
|
name = "regex-automata"
|
||||||
version = "0.4.8"
|
version = "0.4.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3"
|
checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
@@ -425,9 +467,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-syntax"
|
name = "regex-syntax"
|
||||||
version = "0.8.5"
|
version = "0.8.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
|
checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ryu"
|
name = "ryu"
|
||||||
@@ -436,19 +478,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f"
|
checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde"
|
name = "scopeguard"
|
||||||
version = "1.0.219"
|
version = "1.2.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
|
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde"
|
||||||
|
version = "1.0.210"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_derive"
|
name = "serde_derive"
|
||||||
version = "1.0.219"
|
version = "1.0.210"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
|
checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@@ -457,9 +505,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_json"
|
name = "serde_json"
|
||||||
version = "1.0.140"
|
version = "1.0.128"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
|
checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"itoa",
|
"itoa",
|
||||||
"memchr",
|
"memchr",
|
||||||
@@ -489,6 +537,12 @@ dependencies = [
|
|||||||
"digest",
|
"digest",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "smallvec"
|
||||||
|
version = "1.13.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "subtle"
|
name = "subtle"
|
||||||
version = "2.5.0"
|
version = "2.5.0"
|
||||||
@@ -497,9 +551,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.85"
|
version = "2.0.61"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5023162dfcd14ef8f32034d8bcd4cc5ddc61ef7a247c024a33e24e1f24d21b56"
|
checksum = "c993ed8ccba56ae856363b1845da7266a7cb78e1d146c8a32d54b45a8b831fc9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@@ -544,10 +598,11 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ulid"
|
name = "ulid"
|
||||||
version = "1.2.1"
|
version = "1.1.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "470dbf6591da1b39d43c14523b2b469c86879a53e8b758c8e090a470fe7b1fbe"
|
checksum = "04f903f293d11f31c0c29e4148f6dc0d033a7f80cebc0282bea147611667d289"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"getrandom",
|
||||||
"rand",
|
"rand",
|
||||||
"web-time",
|
"web-time",
|
||||||
]
|
]
|
||||||
@@ -572,12 +627,9 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasi"
|
name = "wasi"
|
||||||
version = "0.13.3+wasi-0.2.2"
|
version = "0.11.0+wasi-snapshot-preview1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2"
|
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||||
dependencies = [
|
|
||||||
"wit-bindgen-rt",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen"
|
name = "wasm-bindgen"
|
||||||
@@ -645,9 +697,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-targets"
|
name = "windows-targets"
|
||||||
version = "0.52.6"
|
version = "0.52.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
|
checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"windows_aarch64_gnullvm",
|
"windows_aarch64_gnullvm",
|
||||||
"windows_aarch64_msvc",
|
"windows_aarch64_msvc",
|
||||||
@@ -661,77 +713,48 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows_aarch64_gnullvm"
|
name = "windows_aarch64_gnullvm"
|
||||||
version = "0.52.6"
|
version = "0.52.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
|
checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows_aarch64_msvc"
|
name = "windows_aarch64_msvc"
|
||||||
version = "0.52.6"
|
version = "0.52.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
|
checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows_i686_gnu"
|
name = "windows_i686_gnu"
|
||||||
version = "0.52.6"
|
version = "0.52.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
|
checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows_i686_gnullvm"
|
name = "windows_i686_gnullvm"
|
||||||
version = "0.52.6"
|
version = "0.52.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
|
checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows_i686_msvc"
|
name = "windows_i686_msvc"
|
||||||
version = "0.52.6"
|
version = "0.52.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
|
checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows_x86_64_gnu"
|
name = "windows_x86_64_gnu"
|
||||||
version = "0.52.6"
|
version = "0.52.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
|
checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows_x86_64_gnullvm"
|
name = "windows_x86_64_gnullvm"
|
||||||
version = "0.52.6"
|
version = "0.52.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
|
checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows_x86_64_msvc"
|
name = "windows_x86_64_msvc"
|
||||||
version = "0.52.6"
|
version = "0.52.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wit-bindgen-rt"
|
|
||||||
version = "0.33.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c"
|
|
||||||
dependencies = [
|
|
||||||
"bitflags",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "zerocopy"
|
|
||||||
version = "0.8.17"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "aa91407dacce3a68c56de03abe2760159582b846c6a4acd2f456618087f12713"
|
|
||||||
dependencies = [
|
|
||||||
"zerocopy-derive",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "zerocopy-derive"
|
|
||||||
version = "0.8.17"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "06718a168365cad3d5ff0bb133aad346959a2074bd4a85c121255a11304a8626"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
]
|
|
||||||
|
|||||||
@@ -1,6 +0,0 @@
|
|||||||
Licensees holding a valid commercial license with Element may use this
|
|
||||||
software in accordance with the terms contained in a written agreement
|
|
||||||
between you and Element.
|
|
||||||
|
|
||||||
To purchase a commercial license please contact our sales team at
|
|
||||||
licensing@element.io
|
|
||||||
27
README.rst
27
README.rst
@@ -10,15 +10,14 @@ implementation, written and maintained by `Element <https://element.io>`_.
|
|||||||
`Matrix <https://github.com/matrix-org>`__ is the open standard for
|
`Matrix <https://github.com/matrix-org>`__ is the open standard for
|
||||||
secure and interoperable real time communications. You can directly run
|
secure and interoperable real time communications. You can directly run
|
||||||
and manage the source code in this repository, available under an AGPL
|
and manage the source code in this repository, available under an AGPL
|
||||||
license (or alternatively under a commercial license from Element).
|
license. There is no support provided from Element unless you have a
|
||||||
There is no support provided by Element unless you have a
|
subscription.
|
||||||
subscription from Element.
|
|
||||||
|
|
||||||
Subscription
|
Subscription alternative
|
||||||
============
|
========================
|
||||||
|
|
||||||
For those that need an enterprise-ready solution, Element
|
Alternatively, for those that need an enterprise-ready solution, Element
|
||||||
Server Suite (ESS) is `available via subscription <https://element.io/pricing>`_.
|
Server Suite (ESS) is `available as a subscription <https://element.io/pricing>`_.
|
||||||
ESS builds on Synapse to offer a complete Matrix-based backend including the full
|
ESS builds on Synapse to offer a complete Matrix-based backend including the full
|
||||||
`Admin Console product <https://element.io/enterprise-functionality/admin-console>`_,
|
`Admin Console product <https://element.io/enterprise-functionality/admin-console>`_,
|
||||||
giving admins the power to easily manage an organization-wide
|
giving admins the power to easily manage an organization-wide
|
||||||
@@ -250,20 +249,6 @@ Developers might be particularly interested in:
|
|||||||
Alongside all that, join our developer community on Matrix:
|
Alongside all that, join our developer community on Matrix:
|
||||||
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
||||||
|
|
||||||
Copyright and Licensing
|
|
||||||
=======================
|
|
||||||
|
|
||||||
Copyright 2014-2017 OpenMarket Ltd
|
|
||||||
Copyright 2017 Vector Creations Ltd
|
|
||||||
Copyright 2017-2025 New Vector Ltd
|
|
||||||
|
|
||||||
This software is dual-licensed by New Vector Ltd (Element). It can be used either:
|
|
||||||
|
|
||||||
(1) for free under the terms of the GNU Affero General Public License (as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version); OR
|
|
||||||
|
|
||||||
(2) under the terms of a paid-for Element Commercial License agreement between you and Element (the terms of which may vary depending on what you and Element have agreed to).
|
|
||||||
Unless required by applicable law or agreed to in writing, software distributed under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the specific language governing permissions and limitations under the Licenses.
|
|
||||||
|
|
||||||
|
|
||||||
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
|
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
|
||||||
:alt: (get community support in #synapse:matrix.org)
|
:alt: (get community support in #synapse:matrix.org)
|
||||||
|
|||||||
@@ -1,10 +1,8 @@
|
|||||||
# A build script for poetry that adds the rust extension.
|
# A build script for poetry that adds the rust extension.
|
||||||
|
|
||||||
import itertools
|
|
||||||
import os
|
import os
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
from packaging.specifiers import SpecifierSet
|
|
||||||
from setuptools_rust import Binding, RustExtension
|
from setuptools_rust import Binding, RustExtension
|
||||||
|
|
||||||
|
|
||||||
@@ -16,8 +14,6 @@ def build(setup_kwargs: Dict[str, Any]) -> None:
|
|||||||
target="synapse.synapse_rust",
|
target="synapse.synapse_rust",
|
||||||
path=cargo_toml_path,
|
path=cargo_toml_path,
|
||||||
binding=Binding.PyO3,
|
binding=Binding.PyO3,
|
||||||
# This flag is a no-op in the latest versions. Instead, we need to
|
|
||||||
# specify this in the `bdist_wheel` config below.
|
|
||||||
py_limited_api=True,
|
py_limited_api=True,
|
||||||
# We force always building in release mode, as we can't tell the
|
# We force always building in release mode, as we can't tell the
|
||||||
# difference between using `poetry` in development vs production.
|
# difference between using `poetry` in development vs production.
|
||||||
@@ -25,18 +21,3 @@ def build(setup_kwargs: Dict[str, Any]) -> None:
|
|||||||
)
|
)
|
||||||
setup_kwargs.setdefault("rust_extensions", []).append(extension)
|
setup_kwargs.setdefault("rust_extensions", []).append(extension)
|
||||||
setup_kwargs["zip_safe"] = False
|
setup_kwargs["zip_safe"] = False
|
||||||
|
|
||||||
# We lookup the minimum supported python version by looking at
|
|
||||||
# `python_requires` (e.g. ">=3.9.0,<4.0.0") and finding the first python
|
|
||||||
# version that matches. We then convert that into the `py_limited_api` form,
|
|
||||||
# e.g. cp39 for python 3.9.
|
|
||||||
py_limited_api: str
|
|
||||||
python_bounds = SpecifierSet(setup_kwargs["python_requires"])
|
|
||||||
for minor_version in itertools.count(start=8):
|
|
||||||
if f"3.{minor_version}.0" in python_bounds:
|
|
||||||
py_limited_api = f"cp3{minor_version}"
|
|
||||||
break
|
|
||||||
|
|
||||||
setup_kwargs.setdefault("options", {}).setdefault("bdist_wheel", {})[
|
|
||||||
"py_limited_api"
|
|
||||||
] = py_limited_api
|
|
||||||
|
|||||||
1
changelog.d/17749.doc
Normal file
1
changelog.d/17749.doc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Remove spurious "TODO UPDATE ALL THIS" note in the Debian installation docs.
|
||||||
@@ -245,7 +245,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
|
|
||||||
if "flows" not in json_res:
|
if "flows" not in json_res:
|
||||||
print("Failed to find any login flows.")
|
print("Failed to find any login flows.")
|
||||||
return False
|
defer.returnValue(False)
|
||||||
|
|
||||||
flow = json_res["flows"][0] # assume first is the one we want.
|
flow = json_res["flows"][0] # assume first is the one we want.
|
||||||
if "type" not in flow or "m.login.password" != flow["type"] or "stages" in flow:
|
if "type" not in flow or "m.login.password" != flow["type"] or "stages" in flow:
|
||||||
@@ -254,8 +254,8 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
"Unable to login via the command line client. Please visit "
|
"Unable to login via the command line client. Please visit "
|
||||||
"%s to login." % fallback_url
|
"%s to login." % fallback_url
|
||||||
)
|
)
|
||||||
return False
|
defer.returnValue(False)
|
||||||
return True
|
defer.returnValue(True)
|
||||||
|
|
||||||
def do_emailrequest(self, line):
|
def do_emailrequest(self, line):
|
||||||
"""Requests the association of a third party identifier
|
"""Requests the association of a third party identifier
|
||||||
|
|||||||
@@ -78,7 +78,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
url, data, headers_dict={"Content-Type": ["application/json"]}
|
url, data, headers_dict={"Content-Type": ["application/json"]}
|
||||||
)
|
)
|
||||||
body = yield readBody(response)
|
body = yield readBody(response)
|
||||||
return response.code, body
|
defer.returnValue((response.code, body))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_json(self, url, args=None):
|
def get_json(self, url, args=None):
|
||||||
@@ -88,7 +88,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
url = "%s?%s" % (url, qs)
|
url = "%s?%s" % (url, qs)
|
||||||
response = yield self._create_get_request(url)
|
response = yield self._create_get_request(url)
|
||||||
body = yield readBody(response)
|
body = yield readBody(response)
|
||||||
return json.loads(body)
|
defer.returnValue(json.loads(body))
|
||||||
|
|
||||||
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
|
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
|
||||||
"""Wrapper of _create_request to issue a PUT request"""
|
"""Wrapper of _create_request to issue a PUT request"""
|
||||||
@@ -134,7 +134,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
response = yield self._create_request(method, url)
|
response = yield self._create_request(method, url)
|
||||||
|
|
||||||
body = yield readBody(response)
|
body = yield readBody(response)
|
||||||
return json.loads(body)
|
defer.returnValue(json.loads(body))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _create_request(
|
def _create_request(
|
||||||
@@ -173,7 +173,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
if self.verbose:
|
if self.verbose:
|
||||||
print("Status %s %s" % (response.code, response.phrase))
|
print("Status %s %s" % (response.code, response.phrase))
|
||||||
print(pformat(list(response.headers.getAllRawHeaders())))
|
print(pformat(list(response.headers.getAllRawHeaders())))
|
||||||
return response
|
defer.returnValue(response)
|
||||||
|
|
||||||
def sleep(self, seconds):
|
def sleep(self, seconds):
|
||||||
d = defer.Deferred()
|
d = defer.Deferred()
|
||||||
|
|||||||
@@ -30,6 +30,3 @@ docker-compose up -d
|
|||||||
### More information
|
### More information
|
||||||
|
|
||||||
For more information on required environment variables and mounts, see the main docker documentation at [/docker/README.md](../../docker/README.md)
|
For more information on required environment variables and mounts, see the main docker documentation at [/docker/README.md](../../docker/README.md)
|
||||||
|
|
||||||
**For a more comprehensive Docker Compose example showcasing a full Matrix 2.0 stack, please see
|
|
||||||
https://github.com/element-hq/element-docker-demo**
|
|
||||||
@@ -51,7 +51,7 @@ services:
|
|||||||
- traefik.http.routers.https-synapse.tls.certResolver=le-ssl
|
- traefik.http.routers.https-synapse.tls.certResolver=le-ssl
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/postgres:15-alpine
|
image: docker.io/postgres:12-alpine
|
||||||
# Change that password, of course!
|
# Change that password, of course!
|
||||||
environment:
|
environment:
|
||||||
- POSTGRES_USER=synapse
|
- POSTGRES_USER=synapse
|
||||||
|
|||||||
@@ -8,9 +8,6 @@ All examples and snippets assume that your Synapse service is called `synapse` i
|
|||||||
|
|
||||||
An example Docker Compose file can be found [here](docker-compose.yaml).
|
An example Docker Compose file can be found [here](docker-compose.yaml).
|
||||||
|
|
||||||
**For a more comprehensive Docker Compose example, showcasing a full Matrix 2.0 stack (originally based on this
|
|
||||||
docker-compose.yaml), please see https://github.com/element-hq/element-docker-demo**
|
|
||||||
|
|
||||||
## Worker Service Examples in Docker Compose
|
## Worker Service Examples in Docker Compose
|
||||||
|
|
||||||
In order to start the Synapse container as a worker, you must specify an `entrypoint` that loads both the `homeserver.yaml` and the configuration for the worker (`synapse-generic-worker-1.yaml` in the example below). You must also include the worker type in the environment variable `SYNAPSE_WORKER` or alternatively pass `-m synapse.app.generic_worker` as part of the `entrypoint` after `"/start.py", "run"`).
|
In order to start the Synapse container as a worker, you must specify an `entrypoint` that loads both the `homeserver.yaml` and the configuration for the worker (`synapse-generic-worker-1.yaml` in the example below). You must also include the worker type in the environment variable `SYNAPSE_WORKER` or alternatively pass `-m synapse.app.generic_worker` as part of the `entrypoint` after `"/start.py", "run"`).
|
||||||
|
|||||||
2
debian/build_virtualenv
vendored
2
debian/build_virtualenv
vendored
@@ -35,7 +35,7 @@ TEMP_VENV="$(mktemp -d)"
|
|||||||
python3 -m venv "$TEMP_VENV"
|
python3 -m venv "$TEMP_VENV"
|
||||||
source "$TEMP_VENV/bin/activate"
|
source "$TEMP_VENV/bin/activate"
|
||||||
pip install -U pip
|
pip install -U pip
|
||||||
pip install poetry==2.1.1 poetry-plugin-export==1.9.0
|
pip install poetry==1.3.2
|
||||||
poetry export \
|
poetry export \
|
||||||
--extras all \
|
--extras all \
|
||||||
--extras test \
|
--extras test \
|
||||||
|
|||||||
199
debian/changelog
vendored
199
debian/changelog
vendored
@@ -1,202 +1,3 @@
|
|||||||
matrix-synapse-py3 (1.128.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* Update Poetry to 2.1.1.
|
|
||||||
* New synapse release 1.128.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Apr 2025 14:35:33 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.127.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.127.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 26 Mar 2025 21:07:31 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.127.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.127.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Mar 2025 12:04:15 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.127.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.127.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Mar 2025 13:30:05 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.126.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.126.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Mar 2025 13:11:29 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.126.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.126.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Mar 2025 15:45:05 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.126.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.126.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 05 Mar 2025 14:29:12 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.126.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.126.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Mar 2025 13:11:51 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.125.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.125.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Feb 2025 08:10:07 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.125.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.125.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Feb 2025 13:32:49 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.124.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.124.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Feb 2025 11:55:22 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.124.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.124.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Feb 2025 13:42:55 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.124.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.124.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 05 Feb 2025 16:35:53 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.124.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.124.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Feb 2025 11:53:05 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.123.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.123.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Jan 2025 08:37:34 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.123.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.123.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Jan 2025 14:39:57 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.122.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.122.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Jan 2025 14:14:14 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.122.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.122.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Jan 2025 14:06:19 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.121.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.121.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 11 Dec 2024 18:24:48 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.121.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.121.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 11 Dec 2024 13:12:30 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.121.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.121.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 04 Dec 2024 14:47:23 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.120.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.120.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Dec 2024 15:43:37 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.120.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.120.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Dec 2024 09:07:57 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.120.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.120.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Nov 2024 13:10:23 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.120.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.120.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 20 Nov 2024 15:02:21 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.119.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.119.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 13 Nov 2024 13:57:51 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.119.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.119.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Nov 2024 14:33:02 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.119.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.119.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 06 Nov 2024 08:59:43 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.118.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.118.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 29 Oct 2024 15:29:53 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.118.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.118.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Oct 2024 11:48:14 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.117.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.117.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Oct 2024 10:46:30 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.117.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.117.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Oct 2024 14:37:11 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.116.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.116.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Oct 2024 11:14:07 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.116.0~rc2) stable; urgency=medium
|
matrix-synapse-py3 (1.116.0~rc2) stable; urgency=medium
|
||||||
|
|
||||||
* New synapse release 1.116.0rc2.
|
* New synapse release 1.116.0rc2.
|
||||||
|
|||||||
@@ -138,13 +138,6 @@ for port in 8080 8081 8082; do
|
|||||||
per_user:
|
per_user:
|
||||||
per_second: 1000
|
per_second: 1000
|
||||||
burst_count: 1000
|
burst_count: 1000
|
||||||
rc_presence:
|
|
||||||
per_user:
|
|
||||||
per_second: 1000
|
|
||||||
burst_count: 1000
|
|
||||||
rc_delayed_event_mgmt:
|
|
||||||
per_second: 1000
|
|
||||||
burst_count: 1000
|
|
||||||
RC
|
RC
|
||||||
)
|
)
|
||||||
echo "${ratelimiting}" >> "$port.config"
|
echo "${ratelimiting}" >> "$port.config"
|
||||||
|
|||||||
@@ -20,16 +20,45 @@
|
|||||||
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
|
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
|
||||||
# in `poetry export` in the past.
|
# in `poetry export` in the past.
|
||||||
|
|
||||||
ARG DEBIAN_VERSION=bookworm
|
ARG PYTHON_VERSION=3.11
|
||||||
ARG PYTHON_VERSION=3.12
|
|
||||||
ARG POETRY_VERSION=2.1.1
|
|
||||||
|
|
||||||
###
|
###
|
||||||
### Stage 0: generate requirements.txt
|
### Stage 0: generate requirements.txt
|
||||||
###
|
###
|
||||||
### This stage is platform-agnostic, so we can use the build platform in case of cross-compilation.
|
# We hardcode the use of Debian bookworm here because this could change upstream
|
||||||
###
|
# and other Dockerfiles used for testing are expecting bookworm.
|
||||||
FROM --platform=$BUILDPLATFORM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS requirements
|
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm AS requirements
|
||||||
|
|
||||||
|
# RUN --mount is specific to buildkit and is documented at
|
||||||
|
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
|
||||||
|
# Here we use it to set up a cache for apt (and below for pip), to improve
|
||||||
|
# rebuild speeds on slow connections.
|
||||||
|
RUN \
|
||||||
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
|
apt-get update -qq && apt-get install -yqq \
|
||||||
|
build-essential curl git libffi-dev libssl-dev pkg-config \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install rust and ensure its in the PATH.
|
||||||
|
# (Rust may be needed to compile `cryptography`---which is one of poetry's
|
||||||
|
# dependencies---on platforms that don't have a `cryptography` wheel.
|
||||||
|
ENV RUSTUP_HOME=/rust
|
||||||
|
ENV CARGO_HOME=/cargo
|
||||||
|
ENV PATH=/cargo/bin:/rust/bin:$PATH
|
||||||
|
RUN mkdir /rust /cargo
|
||||||
|
|
||||||
|
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
||||||
|
|
||||||
|
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
|
||||||
|
# set to true, so we expose it as a build-arg.
|
||||||
|
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
|
||||||
|
ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
|
||||||
|
|
||||||
|
# We install poetry in its own build stage to avoid its dependencies conflicting with
|
||||||
|
# synapse's dependencies.
|
||||||
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
|
pip install --user "poetry==1.3.2"
|
||||||
|
|
||||||
WORKDIR /synapse
|
WORKDIR /synapse
|
||||||
|
|
||||||
@@ -46,30 +75,41 @@ ARG TEST_ONLY_SKIP_DEP_HASH_VERIFICATION
|
|||||||
# Instead, we'll just install what a regular `pip install` would from PyPI.
|
# Instead, we'll just install what a regular `pip install` would from PyPI.
|
||||||
ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
|
ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
|
||||||
|
|
||||||
# This silences a warning as uv isn't able to do hardlinks between its cache
|
|
||||||
# (mounted as --mount=type=cache) and the target directory.
|
|
||||||
ENV UV_LINK_MODE=copy
|
|
||||||
|
|
||||||
# Export the dependencies, but only if we're actually going to use the Poetry lockfile.
|
# Export the dependencies, but only if we're actually going to use the Poetry lockfile.
|
||||||
# Otherwise, just create an empty requirements file so that the Dockerfile can
|
# Otherwise, just create an empty requirements file so that the Dockerfile can
|
||||||
# proceed.
|
# proceed.
|
||||||
ARG POETRY_VERSION
|
RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
||||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
/root/.local/bin/poetry export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \
|
||||||
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
|
||||||
uvx --with poetry-plugin-export==1.9.0 \
|
|
||||||
poetry@${POETRY_VERSION} export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \
|
|
||||||
else \
|
else \
|
||||||
touch /synapse/requirements.txt; \
|
touch /synapse/requirements.txt; \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
###
|
###
|
||||||
### Stage 1: builder
|
### Stage 1: builder
|
||||||
###
|
###
|
||||||
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS builder
|
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm AS builder
|
||||||
|
|
||||||
|
# install the OS build deps
|
||||||
|
RUN \
|
||||||
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
|
apt-get update -qq && apt-get install -yqq \
|
||||||
|
build-essential \
|
||||||
|
libffi-dev \
|
||||||
|
libjpeg-dev \
|
||||||
|
libpq-dev \
|
||||||
|
libssl-dev \
|
||||||
|
libwebp-dev \
|
||||||
|
libxml++2.6-dev \
|
||||||
|
libxslt1-dev \
|
||||||
|
openssl \
|
||||||
|
zlib1g-dev \
|
||||||
|
git \
|
||||||
|
curl \
|
||||||
|
libicu-dev \
|
||||||
|
pkg-config \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# This silences a warning as uv isn't able to do hardlinks between its cache
|
|
||||||
# (mounted as --mount=type=cache) and the target directory.
|
|
||||||
ENV UV_LINK_MODE=copy
|
|
||||||
|
|
||||||
# Install rust and ensure its in the PATH
|
# Install rust and ensure its in the PATH
|
||||||
ENV RUSTUP_HOME=/rust
|
ENV RUSTUP_HOME=/rust
|
||||||
@@ -79,6 +119,7 @@ RUN mkdir /rust /cargo
|
|||||||
|
|
||||||
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
||||||
|
|
||||||
|
|
||||||
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
|
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
|
||||||
# set to true, so we expose it as a build-arg.
|
# set to true, so we expose it as a build-arg.
|
||||||
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
|
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
|
||||||
@@ -90,8 +131,8 @@ ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
|
|||||||
#
|
#
|
||||||
# This is aiming at installing the `[tool.poetry.depdendencies]` from pyproject.toml.
|
# This is aiming at installing the `[tool.poetry.depdendencies]` from pyproject.toml.
|
||||||
COPY --from=requirements /synapse/requirements.txt /synapse/
|
COPY --from=requirements /synapse/requirements.txt /synapse/
|
||||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
uv pip install --prefix="/install" --no-deps -r /synapse/requirements.txt
|
pip install --prefix="/install" --no-deps --no-warn-script-location -r /synapse/requirements.txt
|
||||||
|
|
||||||
# Copy over the rest of the synapse source code.
|
# Copy over the rest of the synapse source code.
|
||||||
COPY synapse /synapse/synapse/
|
COPY synapse /synapse/synapse/
|
||||||
@@ -105,85 +146,41 @@ ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
|
|||||||
# Install the synapse package itself.
|
# Install the synapse package itself.
|
||||||
# If we have populated requirements.txt, we don't install any dependencies
|
# If we have populated requirements.txt, we don't install any dependencies
|
||||||
# as we should already have those from the previous `pip install` step.
|
# as we should already have those from the previous `pip install` step.
|
||||||
RUN \
|
RUN --mount=type=cache,target=/synapse/target,sharing=locked \
|
||||||
--mount=type=cache,target=/root/.cache/uv \
|
|
||||||
--mount=type=cache,target=/synapse/target,sharing=locked \
|
|
||||||
--mount=type=cache,target=${CARGO_HOME}/registry,sharing=locked \
|
--mount=type=cache,target=${CARGO_HOME}/registry,sharing=locked \
|
||||||
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
||||||
uv pip install --prefix="/install" --no-deps /synapse[all]; \
|
pip install --prefix="/install" --no-deps --no-warn-script-location /synapse[all]; \
|
||||||
else \
|
else \
|
||||||
uv pip install --prefix="/install" /synapse[all]; \
|
pip install --prefix="/install" --no-warn-script-location /synapse[all]; \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
###
|
###
|
||||||
### Stage 2: runtime dependencies download for ARM64 and AMD64
|
### Stage 2: runtime
|
||||||
###
|
|
||||||
FROM --platform=$BUILDPLATFORM docker.io/library/debian:${DEBIAN_VERSION} AS runtime-deps
|
|
||||||
|
|
||||||
# Tell apt to keep downloaded package files, as we're using cache mounts.
|
|
||||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
|
||||||
|
|
||||||
# Add both target architectures
|
|
||||||
RUN dpkg --add-architecture arm64
|
|
||||||
RUN dpkg --add-architecture amd64
|
|
||||||
|
|
||||||
# Fetch the runtime dependencies debs for both architectures
|
|
||||||
# We do that by building a recursive list of packages we need to download with `apt-cache depends`
|
|
||||||
# and then downloading them with `apt-get download`.
|
|
||||||
RUN \
|
|
||||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
|
||||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
|
||||||
apt-get update -qq && \
|
|
||||||
apt-cache depends --recurse --no-recommends --no-suggests --no-conflicts --no-breaks --no-replaces --no-enhances --no-pre-depends \
|
|
||||||
curl \
|
|
||||||
gosu \
|
|
||||||
libjpeg62-turbo \
|
|
||||||
libpq5 \
|
|
||||||
libwebp7 \
|
|
||||||
xmlsec1 \
|
|
||||||
libjemalloc2 \
|
|
||||||
libicu \
|
|
||||||
| grep '^\w' > /tmp/pkg-list && \
|
|
||||||
for arch in arm64 amd64; do \
|
|
||||||
mkdir -p /tmp/debs-${arch} && \
|
|
||||||
cd /tmp/debs-${arch} && \
|
|
||||||
apt-get -o APT::Architecture="${arch}" download $(cat /tmp/pkg-list); \
|
|
||||||
done
|
|
||||||
|
|
||||||
# Extract the debs for each architecture
|
|
||||||
RUN \
|
|
||||||
for arch in arm64 amd64; do \
|
|
||||||
mkdir -p /install-${arch}/var/lib/dpkg/status.d/ && \
|
|
||||||
for deb in /tmp/debs-${arch}/*.deb; do \
|
|
||||||
package_name=$(dpkg-deb -I ${deb} | awk '/^ Package: .*$/ {print $2}'); \
|
|
||||||
echo "Extracting: ${package_name}"; \
|
|
||||||
dpkg --ctrl-tarfile $deb | tar -Ox ./control > /install-${arch}/var/lib/dpkg/status.d/${package_name}; \
|
|
||||||
dpkg --extract $deb /install-${arch}; \
|
|
||||||
done; \
|
|
||||||
done
|
|
||||||
|
|
||||||
|
|
||||||
###
|
|
||||||
### Stage 3: runtime
|
|
||||||
###
|
###
|
||||||
|
|
||||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-${DEBIAN_VERSION}
|
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
|
||||||
|
|
||||||
ARG TARGETARCH
|
|
||||||
|
|
||||||
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
||||||
LABEL org.opencontainers.image.documentation='https://github.com/element-hq/synapse/blob/master/docker/README.md'
|
LABEL org.opencontainers.image.documentation='https://github.com/element-hq/synapse/blob/master/docker/README.md'
|
||||||
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
|
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
|
||||||
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later'
|
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later'
|
||||||
|
|
||||||
# On the runtime image, /lib is a symlink to /usr/lib, so we need to copy the
|
RUN \
|
||||||
# libraries to the right place, else the `COPY` won't work.
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
# On amd64, we'll also have a /lib64 folder with ld-linux-x86-64.so.2, which is
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
# already present in the runtime image.
|
apt-get update -qq && apt-get install -yqq \
|
||||||
COPY --from=runtime-deps /install-${TARGETARCH}/lib /usr/lib
|
curl \
|
||||||
COPY --from=runtime-deps /install-${TARGETARCH}/etc /etc
|
gosu \
|
||||||
COPY --from=runtime-deps /install-${TARGETARCH}/usr /usr
|
libjpeg62-turbo \
|
||||||
COPY --from=runtime-deps /install-${TARGETARCH}/var /var
|
libpq5 \
|
||||||
|
libwebp7 \
|
||||||
|
xmlsec1 \
|
||||||
|
libjemalloc2 \
|
||||||
|
libicu72 \
|
||||||
|
libssl-dev \
|
||||||
|
openssl \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
COPY --from=builder /install /usr/local
|
COPY --from=builder /install /usr/local
|
||||||
COPY ./docker/start.py /start.py
|
COPY ./docker/start.py /start.py
|
||||||
COPY ./docker/conf /conf
|
COPY ./docker/conf /conf
|
||||||
|
|||||||
@@ -2,13 +2,12 @@
|
|||||||
|
|
||||||
ARG SYNAPSE_VERSION=latest
|
ARG SYNAPSE_VERSION=latest
|
||||||
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||||
ARG DEBIAN_VERSION=bookworm
|
|
||||||
|
|
||||||
# first of all, we create a base image with an nginx which we can copy into the
|
# first of all, we create a base image with an nginx which we can copy into the
|
||||||
# target image. For repeated rebuilds, this is much faster than apt installing
|
# target image. For repeated rebuilds, this is much faster than apt installing
|
||||||
# each time.
|
# each time.
|
||||||
|
|
||||||
FROM docker.io/library/debian:${DEBIAN_VERSION}-slim AS deps_base
|
FROM docker.io/library/debian:bookworm-slim AS deps_base
|
||||||
RUN \
|
RUN \
|
||||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
@@ -22,20 +21,15 @@ FROM docker.io/library/debian:${DEBIAN_VERSION}-slim AS deps_base
|
|||||||
# which makes it much easier to copy (but we need to make sure we use an image
|
# which makes it much easier to copy (but we need to make sure we use an image
|
||||||
# based on the same debian version as the synapse image, to make sure we get
|
# based on the same debian version as the synapse image, to make sure we get
|
||||||
# the expected version of libc.
|
# the expected version of libc.
|
||||||
FROM docker.io/library/redis:7-${DEBIAN_VERSION} AS redis_base
|
FROM docker.io/library/redis:7-bookworm AS redis_base
|
||||||
|
|
||||||
# now build the final image, based on the the regular Synapse docker image
|
# now build the final image, based on the the regular Synapse docker image
|
||||||
FROM $FROM
|
FROM $FROM
|
||||||
|
|
||||||
# Install supervisord with uv pip instead of apt, to avoid installing a second
|
# Install supervisord with pip instead of apt, to avoid installing a second
|
||||||
# copy of python.
|
# copy of python.
|
||||||
# --link-mode=copy silences a warning as uv isn't able to do hardlinks between its cache
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
# (mounted as --mount=type=cache) and the target directory.
|
pip install supervisor~=4.2
|
||||||
RUN \
|
|
||||||
--mount=type=bind,from=ghcr.io/astral-sh/uv:0.6.8,source=/uv,target=/uv \
|
|
||||||
--mount=type=cache,target=/root/.cache/uv \
|
|
||||||
/uv pip install --link-mode=copy --prefix="/usr/local" supervisor~=4.2
|
|
||||||
|
|
||||||
RUN mkdir -p /etc/supervisor/conf.d
|
RUN mkdir -p /etc/supervisor/conf.d
|
||||||
|
|
||||||
# Copy over redis and nginx
|
# Copy over redis and nginx
|
||||||
|
|||||||
@@ -114,9 +114,6 @@ The following environment variables are supported in `run` mode:
|
|||||||
is set via `docker run --user`, defaults to `991`, `991`. Note that this user
|
is set via `docker run --user`, defaults to `991`, `991`. Note that this user
|
||||||
must have permission to read the config files, and write to the data directories.
|
must have permission to read the config files, and write to the data directories.
|
||||||
* `TZ`: the [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) the container will run with. Defaults to `UTC`.
|
* `TZ`: the [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) the container will run with. Defaults to `UTC`.
|
||||||
* `SYNAPSE_HTTP_PROXY`: Passed through to the Synapse process as the `http_proxy` environment variable.
|
|
||||||
* `SYNAPSE_HTTPS_PROXY`: Passed through to the Synapse process as the `https_proxy` environment variable.
|
|
||||||
* `SYNAPSE_NO_PROXY`: Passed through to the Synapse process as `no_proxy` environment variable.
|
|
||||||
|
|
||||||
For more complex setups (e.g. for workers) you can also pass your args directly to synapse using `run` mode. For example like this:
|
For more complex setups (e.g. for workers) you can also pass your args directly to synapse using `run` mode. For example like this:
|
||||||
|
|
||||||
|
|||||||
@@ -9,9 +9,6 @@
|
|||||||
ARG SYNAPSE_VERSION=latest
|
ARG SYNAPSE_VERSION=latest
|
||||||
# This is an intermediate image, to be built locally (not pulled from a registry).
|
# This is an intermediate image, to be built locally (not pulled from a registry).
|
||||||
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||||
ARG DEBIAN_VERSION=bookworm
|
|
||||||
|
|
||||||
FROM docker.io/library/postgres:13-${DEBIAN_VERSION} AS postgres_base
|
|
||||||
|
|
||||||
FROM $FROM
|
FROM $FROM
|
||||||
# First of all, we copy postgres server from the official postgres image,
|
# First of all, we copy postgres server from the official postgres image,
|
||||||
@@ -23,8 +20,8 @@ FROM $FROM
|
|||||||
# the same debian version as Synapse's docker image (so the versions of the
|
# the same debian version as Synapse's docker image (so the versions of the
|
||||||
# shared libraries match).
|
# shared libraries match).
|
||||||
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||||
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
|
COPY --from=docker.io/library/postgres:13-bookworm /usr/lib/postgresql /usr/lib/postgresql
|
||||||
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
|
COPY --from=docker.io/library/postgres:13-bookworm /usr/share/postgresql /usr/share/postgresql
|
||||||
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
|
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
|
||||||
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
||||||
ENV PGDATA=/var/lib/postgresql/data
|
ENV PGDATA=/var/lib/postgresql/data
|
||||||
|
|||||||
@@ -5,12 +5,12 @@
|
|||||||
set -e
|
set -e
|
||||||
|
|
||||||
echo "Complement Synapse launcher"
|
echo "Complement Synapse launcher"
|
||||||
echo " Args: $*"
|
echo " Args: $@"
|
||||||
echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR"
|
echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR"
|
||||||
|
|
||||||
function log {
|
function log {
|
||||||
d=$(date +"%Y-%m-%d %H:%M:%S,%3N")
|
d=$(date +"%Y-%m-%d %H:%M:%S,%3N")
|
||||||
echo "$d $*"
|
echo "$d $@"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Set the server name of the homeserver
|
# Set the server name of the homeserver
|
||||||
@@ -131,4 +131,4 @@ export SYNAPSE_TLS_KEY=/conf/server.tls.key
|
|||||||
|
|
||||||
# Run the script that writes the necessary config files and starts supervisord, which in turn
|
# Run the script that writes the necessary config files and starts supervisord, which in turn
|
||||||
# starts everything else
|
# starts everything else
|
||||||
exec /configure_workers_and_start.py "$@"
|
exec /configure_workers_and_start.py
|
||||||
|
|||||||
@@ -7,7 +7,6 @@
|
|||||||
#}
|
#}
|
||||||
|
|
||||||
## Server ##
|
## Server ##
|
||||||
public_baseurl: http://127.0.0.1:8008/
|
|
||||||
report_stats: False
|
report_stats: False
|
||||||
trusted_key_servers: []
|
trusted_key_servers: []
|
||||||
enable_registration: true
|
enable_registration: true
|
||||||
@@ -85,18 +84,6 @@ rc_invites:
|
|||||||
per_user:
|
per_user:
|
||||||
per_second: 1000
|
per_second: 1000
|
||||||
burst_count: 1000
|
burst_count: 1000
|
||||||
per_issuer:
|
|
||||||
per_second: 1000
|
|
||||||
burst_count: 1000
|
|
||||||
|
|
||||||
rc_presence:
|
|
||||||
per_user:
|
|
||||||
per_second: 9999
|
|
||||||
burst_count: 9999
|
|
||||||
|
|
||||||
rc_delayed_event_mgmt:
|
|
||||||
per_second: 9999
|
|
||||||
burst_count: 9999
|
|
||||||
|
|
||||||
federation_rr_transactions_per_room_per_second: 9999
|
federation_rr_transactions_per_room_per_second: 9999
|
||||||
|
|
||||||
@@ -117,16 +104,6 @@ experimental_features:
|
|||||||
msc3967_enabled: true
|
msc3967_enabled: true
|
||||||
# Expose a room summary for public rooms
|
# Expose a room summary for public rooms
|
||||||
msc3266_enabled: true
|
msc3266_enabled: true
|
||||||
# Send to-device messages to application services
|
|
||||||
msc2409_to_device_messages_enabled: true
|
|
||||||
# Allow application services to masquerade devices
|
|
||||||
msc3202_device_masquerading: true
|
|
||||||
# Sending device list changes, one-time key counts and fallback key usage to application services
|
|
||||||
msc3202_transaction_extensions: true
|
|
||||||
# Proxy OTK claim requests to exclusive ASes
|
|
||||||
msc3983_appservice_otk_claims: true
|
|
||||||
# Proxy key queries to exclusive ASes
|
|
||||||
msc3984_appservice_key_query: true
|
|
||||||
|
|
||||||
server_notices:
|
server_notices:
|
||||||
system_mxid_localpart: _server
|
system_mxid_localpart: _server
|
||||||
@@ -143,9 +120,4 @@ caches:
|
|||||||
sync_response_cache_duration: 0
|
sync_response_cache_duration: 0
|
||||||
|
|
||||||
|
|
||||||
# Complement assumes that it can publish to the room list by default.
|
|
||||||
room_list_publication_rules:
|
|
||||||
- action: allow
|
|
||||||
|
|
||||||
|
|
||||||
{% include "shared-orig.yaml.j2" %}
|
{% include "shared-orig.yaml.j2" %}
|
||||||
|
|||||||
@@ -38,13 +38,10 @@ server {
|
|||||||
{% if using_unix_sockets %}
|
{% if using_unix_sockets %}
|
||||||
proxy_pass http://unix:/run/main_public.sock;
|
proxy_pass http://unix:/run/main_public.sock;
|
||||||
{% else %}
|
{% else %}
|
||||||
# note: do not add a path (even a single /) after the port in `proxy_pass`,
|
|
||||||
# otherwise nginx will canonicalise the URI and cause signature verification
|
|
||||||
# errors.
|
|
||||||
proxy_pass http://localhost:8080;
|
proxy_pass http://localhost:8080;
|
||||||
{% endif %}
|
{% endif %}
|
||||||
proxy_set_header X-Forwarded-For $remote_addr;
|
proxy_set_header X-Forwarded-For $remote_addr;
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
proxy_set_header Host $host:$server_port;
|
proxy_set_header Host $host;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
{% if use_forking_launcher %}
|
{% if use_forking_launcher %}
|
||||||
[program:synapse_fork]
|
[program:synapse_fork]
|
||||||
environment=http_proxy="%(ENV_SYNAPSE_HTTP_PROXY)s",https_proxy="%(ENV_SYNAPSE_HTTPS_PROXY)s",no_proxy="%(ENV_SYNAPSE_NO_PROXY)s"
|
|
||||||
command=/usr/local/bin/python -m synapse.app.complement_fork_starter
|
command=/usr/local/bin/python -m synapse.app.complement_fork_starter
|
||||||
{{ main_config_path }}
|
{{ main_config_path }}
|
||||||
synapse.app.homeserver
|
synapse.app.homeserver
|
||||||
@@ -21,7 +20,6 @@ exitcodes=0
|
|||||||
|
|
||||||
{% else %}
|
{% else %}
|
||||||
[program:synapse_main]
|
[program:synapse_main]
|
||||||
environment=http_proxy="%(ENV_SYNAPSE_HTTP_PROXY)s",https_proxy="%(ENV_SYNAPSE_HTTPS_PROXY)s",no_proxy="%(ENV_SYNAPSE_NO_PROXY)s"
|
|
||||||
command=/usr/local/bin/prefix-log /usr/local/bin/python -m synapse.app.homeserver
|
command=/usr/local/bin/prefix-log /usr/local/bin/python -m synapse.app.homeserver
|
||||||
--config-path="{{ main_config_path }}"
|
--config-path="{{ main_config_path }}"
|
||||||
--config-path=/conf/workers/shared.yaml
|
--config-path=/conf/workers/shared.yaml
|
||||||
@@ -38,7 +36,6 @@ exitcodes=0
|
|||||||
|
|
||||||
{% for worker in workers %}
|
{% for worker in workers %}
|
||||||
[program:synapse_{{ worker.name }}]
|
[program:synapse_{{ worker.name }}]
|
||||||
environment=http_proxy="%(ENV_SYNAPSE_HTTP_PROXY)s",https_proxy="%(ENV_SYNAPSE_HTTPS_PROXY)s",no_proxy="%(ENV_SYNAPSE_NO_PROXY)s"
|
|
||||||
command=/usr/local/bin/prefix-log /usr/local/bin/python -m {{ worker.app }}
|
command=/usr/local/bin/prefix-log /usr/local/bin/python -m {{ worker.app }}
|
||||||
--config-path="{{ main_config_path }}"
|
--config-path="{{ main_config_path }}"
|
||||||
--config-path=/conf/workers/shared.yaml
|
--config-path=/conf/workers/shared.yaml
|
||||||
|
|||||||
@@ -24,6 +24,15 @@
|
|||||||
# nginx and supervisord configs depending on the workers requested.
|
# nginx and supervisord configs depending on the workers requested.
|
||||||
#
|
#
|
||||||
# The environment variables it reads are:
|
# The environment variables it reads are:
|
||||||
|
# * SYNAPSE_CONFIG_PATH: The path where the generated `homeserver.yaml` will
|
||||||
|
# be stored.
|
||||||
|
# * SYNAPSE_CONFIG_DIR: The directory where generated config will be stored.
|
||||||
|
# If `SYNAPSE_CONFIG_PATH` is not set, it will default to
|
||||||
|
# SYNAPSE_CONFIG_DIR/homeserver.yaml.
|
||||||
|
# * SYNAPSE_DATA_DIR: Where the generated config will put persistent data
|
||||||
|
# such as the database and media store.
|
||||||
|
# * SYNAPSE_CONFIG_TEMPLATE_DIR: The directory containing jinja2 templates for
|
||||||
|
# configuration that this script will generate config from. Defaults to '/conf'.
|
||||||
# * SYNAPSE_SERVER_NAME: The desired server_name of the homeserver.
|
# * SYNAPSE_SERVER_NAME: The desired server_name of the homeserver.
|
||||||
# * SYNAPSE_REPORT_STATS: Whether to report stats.
|
# * SYNAPSE_REPORT_STATS: Whether to report stats.
|
||||||
# * SYNAPSE_WORKER_TYPES: A comma separated list of worker names as specified in WORKERS_CONFIG
|
# * SYNAPSE_WORKER_TYPES: A comma separated list of worker names as specified in WORKERS_CONFIG
|
||||||
@@ -35,6 +44,8 @@
|
|||||||
# SYNAPSE_WORKER_TYPES='event_persister, federation_sender, client_reader'
|
# SYNAPSE_WORKER_TYPES='event_persister, federation_sender, client_reader'
|
||||||
# SYNAPSE_WORKER_TYPES='event_persister:2, federation_sender:2, client_reader'
|
# SYNAPSE_WORKER_TYPES='event_persister:2, federation_sender:2, client_reader'
|
||||||
# SYNAPSE_WORKER_TYPES='stream_writers=account_data+presence+typing'
|
# SYNAPSE_WORKER_TYPES='stream_writers=account_data+presence+typing'
|
||||||
|
# * SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK: Whether worker logs should be written to disk,
|
||||||
|
# in addition to stdout.
|
||||||
# * SYNAPSE_AS_REGISTRATION_DIR: If specified, a directory in which .yaml and .yml files
|
# * SYNAPSE_AS_REGISTRATION_DIR: If specified, a directory in which .yaml and .yml files
|
||||||
# will be treated as Application Service registration files.
|
# will be treated as Application Service registration files.
|
||||||
# * SYNAPSE_TLS_CERT: Path to a TLS certificate in PEM format.
|
# * SYNAPSE_TLS_CERT: Path to a TLS certificate in PEM format.
|
||||||
@@ -48,7 +59,9 @@
|
|||||||
# * SYNAPSE_LOG_SENSITIVE: If unset, SQL and SQL values won't be logged,
|
# * SYNAPSE_LOG_SENSITIVE: If unset, SQL and SQL values won't be logged,
|
||||||
# regardless of the SYNAPSE_LOG_LEVEL setting.
|
# regardless of the SYNAPSE_LOG_LEVEL setting.
|
||||||
# * SYNAPSE_LOG_TESTING: if set, Synapse will log additional information useful
|
# * SYNAPSE_LOG_TESTING: if set, Synapse will log additional information useful
|
||||||
# for testing.
|
# for testing.
|
||||||
|
# * SYNAPSE_USE_UNIX_SOCKET: if set, workers will communicate via unix socket
|
||||||
|
# rather than TCP.
|
||||||
#
|
#
|
||||||
# NOTE: According to Complement's ENTRYPOINT expectations for a homeserver image (as defined
|
# NOTE: According to Complement's ENTRYPOINT expectations for a homeserver image (as defined
|
||||||
# in the project's README), this script may be run multiple times, and functionality should
|
# in the project's README), this script may be run multiple times, and functionality should
|
||||||
@@ -604,7 +617,9 @@ def generate_base_homeserver_config() -> None:
|
|||||||
# start.py already does this for us, so just call that.
|
# start.py already does this for us, so just call that.
|
||||||
# note that this script is copied in in the official, monolith dockerfile
|
# note that this script is copied in in the official, monolith dockerfile
|
||||||
os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT)
|
os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT)
|
||||||
subprocess.run(["/usr/local/bin/python", "/start.py", "migrate_config"], check=True)
|
|
||||||
|
# This script makes use of the `SYNAPSE_CONFIG_DIR` environment variable to
|
||||||
|
# determine where to place the generated homeserver config.
|
||||||
|
|
||||||
|
|
||||||
def parse_worker_types(
|
def parse_worker_types(
|
||||||
@@ -733,8 +748,10 @@ def parse_worker_types(
|
|||||||
|
|
||||||
def generate_worker_files(
|
def generate_worker_files(
|
||||||
environ: Mapping[str, str],
|
environ: Mapping[str, str],
|
||||||
|
config_dir: str,
|
||||||
config_path: str,
|
config_path: str,
|
||||||
data_dir: str,
|
data_dir: str,
|
||||||
|
template_dir: str,
|
||||||
requested_worker_types: Dict[str, Set[str]],
|
requested_worker_types: Dict[str, Set[str]],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Read the desired workers(if any) that is passed in and generate shared
|
"""Read the desired workers(if any) that is passed in and generate shared
|
||||||
@@ -742,9 +759,13 @@ def generate_worker_files(
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
environ: os.environ instance.
|
environ: os.environ instance.
|
||||||
config_path: The location of the generated Synapse main worker config file.
|
config_dir: The location of the configuration directory, where generated
|
||||||
data_dir: The location of the synapse data directory. Where log and
|
worker config files are written to.
|
||||||
user-facing config files live.
|
config_path: The location of the base Synapse homeserver config file.
|
||||||
|
data_dir: The location of the synapse data directory. Where logs will be
|
||||||
|
stored (if `SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK` is set).
|
||||||
|
template_dir: The location of the template directory. Where jinja2
|
||||||
|
templates for config files live.
|
||||||
requested_worker_types: A Dict containing requested workers in the format of
|
requested_worker_types: A Dict containing requested workers in the format of
|
||||||
{'worker_name1': {'worker_type', ...}}
|
{'worker_name1': {'worker_type', ...}}
|
||||||
"""
|
"""
|
||||||
@@ -807,7 +828,8 @@ def generate_worker_files(
|
|||||||
nginx_locations: Dict[str, str] = {}
|
nginx_locations: Dict[str, str] = {}
|
||||||
|
|
||||||
# Create the worker configuration directory if it doesn't already exist
|
# Create the worker configuration directory if it doesn't already exist
|
||||||
os.makedirs("/conf/workers", exist_ok=True)
|
workers_config_dir = os.path.join(config_dir, "workers")
|
||||||
|
os.makedirs(workers_config_dir, exist_ok=True)
|
||||||
|
|
||||||
# Start worker ports from this arbitrary port
|
# Start worker ports from this arbitrary port
|
||||||
worker_port = 18009
|
worker_port = 18009
|
||||||
@@ -854,7 +876,7 @@ def generate_worker_files(
|
|||||||
worker_config = insert_worker_name_for_worker_config(worker_config, worker_name)
|
worker_config = insert_worker_name_for_worker_config(worker_config, worker_name)
|
||||||
|
|
||||||
worker_config.update(
|
worker_config.update(
|
||||||
{"name": worker_name, "port": str(worker_port), "config_path": config_path}
|
{"name": worker_name, "port": str(worker_port)}
|
||||||
)
|
)
|
||||||
|
|
||||||
# Update the shared config with any worker_type specific options. The first of a
|
# Update the shared config with any worker_type specific options. The first of a
|
||||||
@@ -877,12 +899,14 @@ def generate_worker_files(
|
|||||||
worker_descriptors.append(worker_config)
|
worker_descriptors.append(worker_config)
|
||||||
|
|
||||||
# Write out the worker's logging config file
|
# Write out the worker's logging config file
|
||||||
log_config_filepath = generate_worker_log_config(environ, worker_name, data_dir)
|
log_config_filepath = generate_worker_log_config(
|
||||||
|
environ, worker_name, template_dir, workers_config_dir, data_dir
|
||||||
|
)
|
||||||
|
|
||||||
# Then a worker config file
|
# Then a worker config file
|
||||||
convert(
|
convert(
|
||||||
"/conf/worker.yaml.j2",
|
os.path.join(template_dir, "worker.yaml.j2"),
|
||||||
f"/conf/workers/{worker_name}.yaml",
|
os.path.join(workers_config_dir, f"{worker_name}.yaml"),
|
||||||
**worker_config,
|
**worker_config,
|
||||||
worker_log_config_filepath=log_config_filepath,
|
worker_log_config_filepath=log_config_filepath,
|
||||||
using_unix_sockets=using_unix_sockets,
|
using_unix_sockets=using_unix_sockets,
|
||||||
@@ -923,7 +947,9 @@ def generate_worker_files(
|
|||||||
# Finally, we'll write out the config files.
|
# Finally, we'll write out the config files.
|
||||||
|
|
||||||
# log config for the master process
|
# log config for the master process
|
||||||
master_log_config = generate_worker_log_config(environ, "master", data_dir)
|
master_log_config = generate_worker_log_config(
|
||||||
|
environ, "master", template_dir, workers_config_dir, data_dir
|
||||||
|
)
|
||||||
shared_config["log_config"] = master_log_config
|
shared_config["log_config"] = master_log_config
|
||||||
|
|
||||||
# Find application service registrations
|
# Find application service registrations
|
||||||
@@ -954,8 +980,8 @@ def generate_worker_files(
|
|||||||
|
|
||||||
# Shared homeserver config
|
# Shared homeserver config
|
||||||
convert(
|
convert(
|
||||||
"/conf/shared.yaml.j2",
|
os.path.join(template_dir, "shared.yaml.j2"),
|
||||||
"/conf/workers/shared.yaml",
|
os.path.join(workers_config_dir, "shared.yaml"),
|
||||||
shared_worker_config=yaml.dump(shared_config),
|
shared_worker_config=yaml.dump(shared_config),
|
||||||
appservice_registrations=appservice_registrations,
|
appservice_registrations=appservice_registrations,
|
||||||
enable_redis=workers_in_use,
|
enable_redis=workers_in_use,
|
||||||
@@ -965,7 +991,7 @@ def generate_worker_files(
|
|||||||
|
|
||||||
# Nginx config
|
# Nginx config
|
||||||
convert(
|
convert(
|
||||||
"/conf/nginx.conf.j2",
|
os.path.join(template_dir, "nginx.conf.j2"),
|
||||||
"/etc/nginx/conf.d/matrix-synapse.conf",
|
"/etc/nginx/conf.d/matrix-synapse.conf",
|
||||||
worker_locations=nginx_location_config,
|
worker_locations=nginx_location_config,
|
||||||
upstream_directives=nginx_upstream_config,
|
upstream_directives=nginx_upstream_config,
|
||||||
@@ -977,7 +1003,7 @@ def generate_worker_files(
|
|||||||
# Supervisord config
|
# Supervisord config
|
||||||
os.makedirs("/etc/supervisor", exist_ok=True)
|
os.makedirs("/etc/supervisor", exist_ok=True)
|
||||||
convert(
|
convert(
|
||||||
"/conf/supervisord.conf.j2",
|
os.path.join(template_dir, "supervisord.conf.j2"),
|
||||||
"/etc/supervisor/supervisord.conf",
|
"/etc/supervisor/supervisord.conf",
|
||||||
main_config_path=config_path,
|
main_config_path=config_path,
|
||||||
enable_redis=workers_in_use,
|
enable_redis=workers_in_use,
|
||||||
@@ -985,7 +1011,7 @@ def generate_worker_files(
|
|||||||
)
|
)
|
||||||
|
|
||||||
convert(
|
convert(
|
||||||
"/conf/synapse.supervisord.conf.j2",
|
os.path.join(template_dir, "synapse.supervisord.conf.j2"),
|
||||||
"/etc/supervisor/conf.d/synapse.conf",
|
"/etc/supervisor/conf.d/synapse.conf",
|
||||||
workers=worker_descriptors,
|
workers=worker_descriptors,
|
||||||
main_config_path=config_path,
|
main_config_path=config_path,
|
||||||
@@ -994,7 +1020,7 @@ def generate_worker_files(
|
|||||||
|
|
||||||
# healthcheck config
|
# healthcheck config
|
||||||
convert(
|
convert(
|
||||||
"/conf/healthcheck.sh.j2",
|
os.path.join(template_dir, "healthcheck.sh.j2"),
|
||||||
"/healthcheck.sh",
|
"/healthcheck.sh",
|
||||||
healthcheck_urls=healthcheck_urls,
|
healthcheck_urls=healthcheck_urls,
|
||||||
)
|
)
|
||||||
@@ -1006,10 +1032,24 @@ def generate_worker_files(
|
|||||||
|
|
||||||
|
|
||||||
def generate_worker_log_config(
|
def generate_worker_log_config(
|
||||||
environ: Mapping[str, str], worker_name: str, data_dir: str
|
environ: Mapping[str, str],
|
||||||
|
worker_name: str,
|
||||||
|
workers_config_dir: str,
|
||||||
|
template_dir: str,
|
||||||
|
data_dir: str,
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Generate a log.config file for the given worker.
|
"""Generate a log.config file for the given worker.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
environ: A mapping representing the environment variables that this script
|
||||||
|
is running with.
|
||||||
|
worker_name: The name of the worker. Used in generated file paths.
|
||||||
|
workers_config_dir: The location of the worker configuration directory,
|
||||||
|
where the generated worker log config will be saved.
|
||||||
|
template_dir: The directory containing jinja2 template files.
|
||||||
|
data_dir: The directory where log files will be written (if
|
||||||
|
`SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK` is set).
|
||||||
|
|
||||||
Returns: the path to the generated file
|
Returns: the path to the generated file
|
||||||
"""
|
"""
|
||||||
# Check whether we should write worker logs to disk, in addition to the console
|
# Check whether we should write worker logs to disk, in addition to the console
|
||||||
@@ -1024,9 +1064,9 @@ def generate_worker_log_config(
|
|||||||
extra_log_template_args["SYNAPSE_LOG_TESTING"] = environ.get("SYNAPSE_LOG_TESTING")
|
extra_log_template_args["SYNAPSE_LOG_TESTING"] = environ.get("SYNAPSE_LOG_TESTING")
|
||||||
|
|
||||||
# Render and write the file
|
# Render and write the file
|
||||||
log_config_filepath = f"/conf/workers/{worker_name}.log.config"
|
log_config_filepath = os.path.join(workers_config_dir, f"{worker_name}.log.config")
|
||||||
convert(
|
convert(
|
||||||
"/conf/log.config",
|
os.path.join(template_dir, "log.config"),
|
||||||
log_config_filepath,
|
log_config_filepath,
|
||||||
worker_name=worker_name,
|
worker_name=worker_name,
|
||||||
**extra_log_template_args,
|
**extra_log_template_args,
|
||||||
@@ -1049,6 +1089,7 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
|||||||
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
||||||
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
|
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
|
||||||
data_dir = environ.get("SYNAPSE_DATA_DIR", "/data")
|
data_dir = environ.get("SYNAPSE_DATA_DIR", "/data")
|
||||||
|
template_dir = environ.get("SYNAPSE_CONFIG_TEMPLATE_DIR", "/conf")
|
||||||
|
|
||||||
# override SYNAPSE_NO_TLS, we don't support TLS in worker mode,
|
# override SYNAPSE_NO_TLS, we don't support TLS in worker mode,
|
||||||
# this needs to be handled by a frontend proxy
|
# this needs to be handled by a frontend proxy
|
||||||
@@ -1060,9 +1101,10 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
|||||||
generate_base_homeserver_config()
|
generate_base_homeserver_config()
|
||||||
else:
|
else:
|
||||||
log("Base homeserver config exists—not regenerating")
|
log("Base homeserver config exists—not regenerating")
|
||||||
|
|
||||||
# This script may be run multiple times (mostly by Complement, see note at top of
|
# This script may be run multiple times (mostly by Complement, see note at top of
|
||||||
# file). Don't re-configure workers in this instance.
|
# file). Don't re-configure workers in this instance.
|
||||||
mark_filepath = "/conf/workers_have_been_configured"
|
mark_filepath = os.path.join(config_dir, "workers_have_been_configured")
|
||||||
if not os.path.exists(mark_filepath):
|
if not os.path.exists(mark_filepath):
|
||||||
# Collect and validate worker_type requests
|
# Collect and validate worker_type requests
|
||||||
# Read the desired worker configuration from the environment
|
# Read the desired worker configuration from the environment
|
||||||
@@ -1079,7 +1121,9 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
|||||||
|
|
||||||
# Always regenerate all other config files
|
# Always regenerate all other config files
|
||||||
log("Generating worker config files")
|
log("Generating worker config files")
|
||||||
generate_worker_files(environ, config_path, data_dir, requested_worker_types)
|
generate_worker_files(
|
||||||
|
environ, config_dir, config_path, data_dir, template_dir, requested_worker_types
|
||||||
|
)
|
||||||
|
|
||||||
# Mark workers as being configured
|
# Mark workers as being configured
|
||||||
with open(mark_filepath, "w") as f:
|
with open(mark_filepath, "w") as f:
|
||||||
@@ -1099,13 +1143,6 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
|||||||
else:
|
else:
|
||||||
log("Could not find %s, will not use" % (jemallocpath,))
|
log("Could not find %s, will not use" % (jemallocpath,))
|
||||||
|
|
||||||
# Empty strings are falsy in Python so this default is fine. We just can't have these
|
|
||||||
# be undefined because supervisord will complain about our
|
|
||||||
# `%(ENV_SYNAPSE_HTTP_PROXY)s` usage.
|
|
||||||
environ.setdefault("SYNAPSE_HTTP_PROXY", "")
|
|
||||||
environ.setdefault("SYNAPSE_HTTPS_PROXY", "")
|
|
||||||
environ.setdefault("SYNAPSE_NO_PROXY", "")
|
|
||||||
|
|
||||||
# Start supervisord, which will start Synapse, all of the configured worker
|
# Start supervisord, which will start Synapse, all of the configured worker
|
||||||
# processes, redis, nginx etc. according to the config we created above.
|
# processes, redis, nginx etc. according to the config we created above.
|
||||||
log("Starting supervisord")
|
log("Starting supervisord")
|
||||||
|
|||||||
@@ -10,9 +10,6 @@
|
|||||||
# '-W interactive' is a `mawk` extension which disables buffering on stdout and sets line-buffered reads on
|
# '-W interactive' is a `mawk` extension which disables buffering on stdout and sets line-buffered reads on
|
||||||
# stdin. The effect is that the output is flushed after each line, rather than being batched, which helps reduce
|
# stdin. The effect is that the output is flushed after each line, rather than being batched, which helps reduce
|
||||||
# confusion due to to interleaving of the different processes.
|
# confusion due to to interleaving of the different processes.
|
||||||
prefixer() {
|
exec 1> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&1)
|
||||||
mawk -W interactive '{printf("%s | %s\n", ENVIRON["SUPERVISOR_PROCESS_NAME"], $0); fflush() }'
|
exec 2> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&2)
|
||||||
}
|
|
||||||
exec 1> >(prefixer)
|
|
||||||
exec 2> >(prefixer >&2)
|
|
||||||
exec "$@"
|
exec "$@"
|
||||||
|
|||||||
@@ -42,6 +42,8 @@ def convert(src: str, dst: str, environ: Mapping[str, object]) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def generate_config_from_template(
|
def generate_config_from_template(
|
||||||
|
data_dir: str,
|
||||||
|
template_dir: str,
|
||||||
config_dir: str,
|
config_dir: str,
|
||||||
config_path: str,
|
config_path: str,
|
||||||
os_environ: Mapping[str, str],
|
os_environ: Mapping[str, str],
|
||||||
@@ -50,6 +52,9 @@ def generate_config_from_template(
|
|||||||
"""Generate a homeserver.yaml from environment variables
|
"""Generate a homeserver.yaml from environment variables
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
data_dir: where persistent data is stored
|
||||||
|
template_dir: The location of the template directory. Where jinja2
|
||||||
|
templates for config files live.
|
||||||
config_dir: where to put generated config files
|
config_dir: where to put generated config files
|
||||||
config_path: where to put the main config file
|
config_path: where to put the main config file
|
||||||
os_environ: environment mapping
|
os_environ: environment mapping
|
||||||
@@ -70,9 +75,10 @@ def generate_config_from_template(
|
|||||||
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
|
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
synapse_server_name = environ["SYNAPSE_SERVER_NAME"]
|
||||||
for name, secret in secrets.items():
|
for name, secret in secrets.items():
|
||||||
if secret not in environ:
|
if secret not in environ:
|
||||||
filename = "/data/%s.%s.key" % (environ["SYNAPSE_SERVER_NAME"], name)
|
filename = os.path.join(data_dir, f"{synapse_server_name}.{name}.key")
|
||||||
|
|
||||||
# if the file already exists, load in the existing value; otherwise,
|
# if the file already exists, load in the existing value; otherwise,
|
||||||
# generate a new secret and write it to a file
|
# generate a new secret and write it to a file
|
||||||
@@ -88,7 +94,7 @@ def generate_config_from_template(
|
|||||||
handle.write(value)
|
handle.write(value)
|
||||||
environ[secret] = value
|
environ[secret] = value
|
||||||
|
|
||||||
environ["SYNAPSE_APPSERVICES"] = glob.glob("/data/appservices/*.yaml")
|
environ["SYNAPSE_APPSERVICES"] = glob.glob(os.path.join(data_dir, "appservices", "*.yaml"))
|
||||||
if not os.path.exists(config_dir):
|
if not os.path.exists(config_dir):
|
||||||
os.mkdir(config_dir)
|
os.mkdir(config_dir)
|
||||||
|
|
||||||
@@ -111,12 +117,12 @@ def generate_config_from_template(
|
|||||||
environ["SYNAPSE_LOG_CONFIG"] = config_dir + "/log.config"
|
environ["SYNAPSE_LOG_CONFIG"] = config_dir + "/log.config"
|
||||||
|
|
||||||
log("Generating synapse config file " + config_path)
|
log("Generating synapse config file " + config_path)
|
||||||
convert("/conf/homeserver.yaml", config_path, environ)
|
convert(os.path.join(template_dir, "homeserver.yaml"), config_path, environ)
|
||||||
|
|
||||||
log_config_file = environ["SYNAPSE_LOG_CONFIG"]
|
log_config_file = environ["SYNAPSE_LOG_CONFIG"]
|
||||||
log("Generating log config file " + log_config_file)
|
log("Generating log config file " + log_config_file)
|
||||||
convert(
|
convert(
|
||||||
"/conf/log.config",
|
os.path.join(template_dir, "log.config"),
|
||||||
log_config_file,
|
log_config_file,
|
||||||
{**environ, "include_worker_name_in_log_line": False},
|
{**environ, "include_worker_name_in_log_line": False},
|
||||||
)
|
)
|
||||||
@@ -128,15 +134,15 @@ def generate_config_from_template(
|
|||||||
"synapse.app.homeserver",
|
"synapse.app.homeserver",
|
||||||
"--config-path",
|
"--config-path",
|
||||||
config_path,
|
config_path,
|
||||||
# tell synapse to put generated keys in /data rather than /compiled
|
# tell synapse to put generated keys in the data directory rather than /compiled
|
||||||
"--keys-directory",
|
"--keys-directory",
|
||||||
config_dir,
|
config_dir,
|
||||||
"--generate-keys",
|
"--generate-keys",
|
||||||
]
|
]
|
||||||
|
|
||||||
if ownership is not None:
|
if ownership is not None:
|
||||||
log(f"Setting ownership on /data to {ownership}")
|
log(f"Setting ownership on the data dir to {ownership}")
|
||||||
subprocess.run(["chown", "-R", ownership, "/data"], check=True)
|
subprocess.run(["chown", "-R", ownership, data_dir], check=True)
|
||||||
args = ["gosu", ownership] + args
|
args = ["gosu", ownership] + args
|
||||||
|
|
||||||
subprocess.run(args, check=True)
|
subprocess.run(args, check=True)
|
||||||
@@ -159,12 +165,13 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) ->
|
|||||||
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
||||||
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
|
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
|
||||||
data_dir = environ.get("SYNAPSE_DATA_DIR", "/data")
|
data_dir = environ.get("SYNAPSE_DATA_DIR", "/data")
|
||||||
|
template_dir = environ.get("SYNAPSE_CONFIG_TEMPLATE_DIR", "/conf")
|
||||||
|
|
||||||
# create a suitable log config from our template
|
# create a suitable log config from our template
|
||||||
log_config_file = "%s/%s.log.config" % (config_dir, server_name)
|
log_config_file = "%s/%s.log.config" % (config_dir, server_name)
|
||||||
if not os.path.exists(log_config_file):
|
if not os.path.exists(log_config_file):
|
||||||
log("Creating log config %s" % (log_config_file,))
|
log("Creating log config %s" % (log_config_file,))
|
||||||
convert("/conf/log.config", log_config_file, environ)
|
convert(os.path.join(template_dir, "log.config"), log_config_file, environ)
|
||||||
|
|
||||||
# generate the main config file, and a signing key.
|
# generate the main config file, and a signing key.
|
||||||
args = [
|
args = [
|
||||||
@@ -216,12 +223,14 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
|||||||
|
|
||||||
if mode == "migrate_config":
|
if mode == "migrate_config":
|
||||||
# generate a config based on environment vars.
|
# generate a config based on environment vars.
|
||||||
|
data_dir = environ.get("SYNAPSE_DATA_DIR", "/data")
|
||||||
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
||||||
config_path = environ.get(
|
config_path = environ.get(
|
||||||
"SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml"
|
"SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml"
|
||||||
)
|
)
|
||||||
|
template_dir = environ.get("SYNAPSE_CONFIG_TEMPLATE_DIR", "/conf")
|
||||||
return generate_config_from_template(
|
return generate_config_from_template(
|
||||||
config_dir, config_path, environ, ownership
|
data_dir, template_dir, config_dir, config_path, environ, ownership
|
||||||
)
|
)
|
||||||
|
|
||||||
if mode != "run":
|
if mode != "run":
|
||||||
|
|||||||
@@ -54,7 +54,6 @@
|
|||||||
- [Using `synctl` with Workers](synctl_workers.md)
|
- [Using `synctl` with Workers](synctl_workers.md)
|
||||||
- [Systemd](systemd-with-workers/README.md)
|
- [Systemd](systemd-with-workers/README.md)
|
||||||
- [Administration](usage/administration/README.md)
|
- [Administration](usage/administration/README.md)
|
||||||
- [Backups](usage/administration/backups.md)
|
|
||||||
- [Admin API](usage/administration/admin_api/README.md)
|
- [Admin API](usage/administration/admin_api/README.md)
|
||||||
- [Account Validity](admin_api/account_validity.md)
|
- [Account Validity](admin_api/account_validity.md)
|
||||||
- [Background Updates](usage/administration/admin_api/background_updates.md)
|
- [Background Updates](usage/administration/admin_api/background_updates.md)
|
||||||
|
|||||||
@@ -60,11 +60,10 @@ paginate through.
|
|||||||
anything other than the return value of `next_token` from a previous call. Defaults to `0`.
|
anything other than the return value of `next_token` from a previous call. Defaults to `0`.
|
||||||
* `dir`: string - Direction of event report order. Whether to fetch the most recent
|
* `dir`: string - Direction of event report order. Whether to fetch the most recent
|
||||||
first (`b`) or the oldest first (`f`). Defaults to `b`.
|
first (`b`) or the oldest first (`f`). Defaults to `b`.
|
||||||
* `user_id`: optional string - Filter by the user ID of the reporter. This is the user who reported the event
|
* `user_id`: string - Is optional and filters to only return users with user IDs that
|
||||||
and wrote the reason.
|
contain this value. This is the user who reported the event and wrote the reason.
|
||||||
* `room_id`: optional string - Filter by room id.
|
* `room_id`: string - Is optional and filters to only return rooms with room IDs that
|
||||||
* `event_sender_user_id`: optional string - Filter by the sender of the reported event. This is the user who
|
contain this value.
|
||||||
the report was made against.
|
|
||||||
|
|
||||||
**Response**
|
**Response**
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ basis. The currently supported features are:
|
|||||||
- [MSC3881](https://github.com/matrix-org/matrix-spec-proposals/pull/3881): enable remotely toggling push notifications
|
- [MSC3881](https://github.com/matrix-org/matrix-spec-proposals/pull/3881): enable remotely toggling push notifications
|
||||||
for another client
|
for another client
|
||||||
- [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575): enable experimental sliding sync support
|
- [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575): enable experimental sliding sync support
|
||||||
- [MSC4222](https://github.com/matrix-org/matrix-spec-proposals/pull/4222): adding `state_after` to sync v2
|
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token`
|
To use it, you will need to authenticate by providing an `access_token`
|
||||||
for a server admin: see [Admin API](../usage/administration/admin_api/).
|
for a server admin: see [Admin API](../usage/administration/admin_api/).
|
||||||
|
|||||||
@@ -46,14 +46,6 @@ to any local media, and any locally-cached copies of remote media.
|
|||||||
|
|
||||||
The media file itself (and any thumbnails) is not deleted from the server.
|
The media file itself (and any thumbnails) is not deleted from the server.
|
||||||
|
|
||||||
Since Synapse 1.128.0, hashes of uploaded media are tracked. If this media
|
|
||||||
is quarantined, Synapse will:
|
|
||||||
|
|
||||||
- Quarantine any media with a matching hash that has already been uploaded.
|
|
||||||
- Quarantine any future media.
|
|
||||||
- Quarantine any existing cached remote media.
|
|
||||||
- Quarantine any future remote media.
|
|
||||||
|
|
||||||
## Quarantining media by ID
|
## Quarantining media by ID
|
||||||
|
|
||||||
This API quarantines a single piece of local or remote media.
|
This API quarantines a single piece of local or remote media.
|
||||||
|
|||||||
@@ -385,13 +385,6 @@ The API is:
|
|||||||
GET /_synapse/admin/v1/rooms/<room_id>/state
|
GET /_synapse/admin/v1/rooms/<room_id>/state
|
||||||
```
|
```
|
||||||
|
|
||||||
**Parameters**
|
|
||||||
|
|
||||||
The following query parameter is available:
|
|
||||||
|
|
||||||
* `type` - The type of room state event to filter by, eg "m.room.create". If provided, only state events
|
|
||||||
of this type will be returned (regardless of their `state_key` value).
|
|
||||||
|
|
||||||
A response body like the following is returned:
|
A response body like the following is returned:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
|
|||||||
@@ -40,7 +40,6 @@ It returns a JSON body like the following:
|
|||||||
"erased": false,
|
"erased": false,
|
||||||
"shadow_banned": 0,
|
"shadow_banned": 0,
|
||||||
"creation_ts": 1560432506,
|
"creation_ts": 1560432506,
|
||||||
"last_seen_ts": 1732919539393,
|
|
||||||
"appservice_id": null,
|
"appservice_id": null,
|
||||||
"consent_server_notice_sent": null,
|
"consent_server_notice_sent": null,
|
||||||
"consent_version": null,
|
"consent_version": null,
|
||||||
@@ -56,8 +55,7 @@ It returns a JSON body like the following:
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"user_type": null,
|
"user_type": null,
|
||||||
"locked": false,
|
"locked": false
|
||||||
"suspended": false
|
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -414,32 +412,6 @@ The following actions are **NOT** performed. The list may be incomplete.
|
|||||||
- Remove from monthly active users
|
- Remove from monthly active users
|
||||||
- Remove user's consent information (consent version and timestamp)
|
- Remove user's consent information (consent version and timestamp)
|
||||||
|
|
||||||
## Suspend/Unsuspend Account
|
|
||||||
|
|
||||||
This API allows an admin to suspend/unsuspend an account. While an account is suspended, the user is
|
|
||||||
prohibited from sending invites, joining or knocking on rooms, sending messages, changing profile data, and redacting messages other than their own.
|
|
||||||
|
|
||||||
The api is:
|
|
||||||
|
|
||||||
```
|
|
||||||
PUT /_synapse/admin/v1/suspend/<user_id>
|
|
||||||
```
|
|
||||||
|
|
||||||
with a body of:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"suspend": true
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
To unsuspend a user, use the same endpoint with a body of:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"suspend": false
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Reset password
|
## Reset password
|
||||||
|
|
||||||
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
|
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
|
||||||
@@ -504,9 +476,9 @@ with a body of:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
## List joined rooms of a user
|
## List room memberships of a user
|
||||||
|
|
||||||
Gets a list of all `room_id` that a specific `user_id` is joined to and is a member of (participating in).
|
Gets a list of all `room_id` that a specific `user_id` is member.
|
||||||
|
|
||||||
The API is:
|
The API is:
|
||||||
|
|
||||||
@@ -543,73 +515,6 @@ The following fields are returned in the JSON response body:
|
|||||||
- `joined_rooms` - An array of `room_id`.
|
- `joined_rooms` - An array of `room_id`.
|
||||||
- `total` - Number of rooms.
|
- `total` - Number of rooms.
|
||||||
|
|
||||||
## Get the number of invites sent by the user
|
|
||||||
|
|
||||||
Fetches the number of invites sent by the provided user ID across all rooms
|
|
||||||
after the given timestamp.
|
|
||||||
|
|
||||||
```
|
|
||||||
GET /_synapse/admin/v1/users/$user_id/sent_invite_count
|
|
||||||
```
|
|
||||||
|
|
||||||
**Parameters**
|
|
||||||
|
|
||||||
The following parameters should be set in the URL:
|
|
||||||
|
|
||||||
* `user_id`: fully qualified: for example, `@user:server.com`
|
|
||||||
|
|
||||||
The following should be set as query parameters in the URL:
|
|
||||||
|
|
||||||
* `from_ts`: int, required. A timestamp in ms from the unix epoch. Only
|
|
||||||
invites sent at or after the provided timestamp will be returned.
|
|
||||||
This works by comparing the provided timestamp to the `received_ts`
|
|
||||||
column in the `events` table.
|
|
||||||
Note: https://currentmillis.com/ is a useful tool for converting dates
|
|
||||||
into timestamps and vice versa.
|
|
||||||
|
|
||||||
A response body like the following is returned:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"invite_count": 30
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
_Added in Synapse 1.122.0_
|
|
||||||
|
|
||||||
## Get the cumulative number of rooms a user has joined after a given timestamp
|
|
||||||
|
|
||||||
Fetches the number of rooms that the user joined after the given timestamp, even
|
|
||||||
if they have subsequently left/been banned from those rooms.
|
|
||||||
|
|
||||||
```
|
|
||||||
GET /_synapse/admin/v1/users/$<user_id/cumulative_joined_room_count
|
|
||||||
```
|
|
||||||
|
|
||||||
**Parameters**
|
|
||||||
|
|
||||||
The following parameters should be set in the URL:
|
|
||||||
|
|
||||||
* `user_id`: fully qualified: for example, `@user:server.com`
|
|
||||||
|
|
||||||
The following should be set as query parameters in the URL:
|
|
||||||
|
|
||||||
* `from_ts`: int, required. A timestamp in ms from the unix epoch. Only
|
|
||||||
invites sent at or after the provided timestamp will be returned.
|
|
||||||
This works by comparing the provided timestamp to the `received_ts`
|
|
||||||
column in the `events` table.
|
|
||||||
Note: https://currentmillis.com/ is a useful tool for converting dates
|
|
||||||
into timestamps and vice versa.
|
|
||||||
|
|
||||||
A response body like the following is returned:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"cumulative_joined_room_count": 30
|
|
||||||
}
|
|
||||||
```
|
|
||||||
_Added in Synapse 1.122.0_
|
|
||||||
|
|
||||||
## Account Data
|
## Account Data
|
||||||
Gets information about account data for a specific `user_id`.
|
Gets information about account data for a specific `user_id`.
|
||||||
|
|
||||||
@@ -1460,9 +1365,6 @@ _Added in Synapse 1.72.0._
|
|||||||
|
|
||||||
## Redact all the events of a user
|
## Redact all the events of a user
|
||||||
|
|
||||||
This endpoint allows an admin to redact the events of a given user. There are no restrictions on redactions for a
|
|
||||||
local user. By default, we puppet the user who sent the message to redact it themselves. Redactions for non-local users are issued using the admin user, and will fail in rooms where the admin user is not admin/does not have the specified power level to issue redactions.
|
|
||||||
|
|
||||||
The API is
|
The API is
|
||||||
```
|
```
|
||||||
POST /_synapse/admin/v1/user/$user_id/redact
|
POST /_synapse/admin/v1/user/$user_id/redact
|
||||||
@@ -1494,13 +1396,13 @@ The following JSON body parameter must be provided:
|
|||||||
- `rooms` - A list of rooms to redact the user's events in. If an empty list is provided all events in all rooms
|
- `rooms` - A list of rooms to redact the user's events in. If an empty list is provided all events in all rooms
|
||||||
the user is a member of will be redacted
|
the user is a member of will be redacted
|
||||||
|
|
||||||
|
_Added in Synapse 1.116.0._
|
||||||
|
|
||||||
The following JSON body parameters are optional:
|
The following JSON body parameters are optional:
|
||||||
|
|
||||||
- `reason` - Reason the redaction is being requested, ie "spam", "abuse", etc. This will be included in each redaction event, and be visible to users.
|
- `reason` - Reason the redaction is being requested, ie "spam", "abuse", etc. This will be included in each redaction event, and be visible to users.
|
||||||
- `limit` - a limit on the number of the user's events to search for ones that can be redacted (events are redacted newest to oldest) in each room, defaults to 1000 if not provided
|
- `limit` - a limit on the number of the user's events to search for ones that can be redacted (events are redacted newest to oldest) in each room, defaults to 1000 if not provided
|
||||||
|
|
||||||
_Added in Synapse 1.116.0._
|
|
||||||
|
|
||||||
|
|
||||||
## Check the status of a redaction process
|
## Check the status of a redaction process
|
||||||
|
|
||||||
@@ -1539,5 +1441,3 @@ The following fields are returned in the JSON response body:
|
|||||||
the corresponding error that caused the redaction to fail
|
the corresponding error that caused the redaction to fail
|
||||||
|
|
||||||
_Added in Synapse 1.116.0._
|
_Added in Synapse 1.116.0._
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -322,7 +322,7 @@ The following command will let you run the integration test with the most common
|
|||||||
configuration:
|
configuration:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bullseye
|
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:focal
|
||||||
```
|
```
|
||||||
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)
|
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)
|
||||||
|
|
||||||
|
|||||||
@@ -162,7 +162,7 @@ by a unique name, the current status (stored in JSON), and some dependency infor
|
|||||||
* Whether the update requires a previous update to be complete.
|
* Whether the update requires a previous update to be complete.
|
||||||
* A rough ordering for which to complete updates.
|
* A rough ordering for which to complete updates.
|
||||||
|
|
||||||
A new background update needs to be added to the `background_updates` table:
|
A new background updates needs to be added to the `background_updates` table:
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
INSERT INTO background_updates (ordering, update_name, depends_on, progress_json) VALUES
|
INSERT INTO background_updates (ordering, update_name, depends_on, progress_json) VALUES
|
||||||
|
|||||||
@@ -150,28 +150,6 @@ $ poetry shell
|
|||||||
$ poetry install --extras all
|
$ poetry install --extras all
|
||||||
```
|
```
|
||||||
|
|
||||||
If you want to go even further and remove the Poetry caches:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
# Find your Poetry cache directory
|
|
||||||
# Docs: https://github.com/python-poetry/poetry/blob/main/docs/configuration.md#cache-directory
|
|
||||||
$ poetry config cache-dir
|
|
||||||
|
|
||||||
# Remove packages from all cached repositories
|
|
||||||
$ poetry cache clear --all .
|
|
||||||
|
|
||||||
# Go completely nuclear and clear out everything Poetry cache related
|
|
||||||
# including the wheel artifacts which is not covered by the above command
|
|
||||||
# (see https://github.com/python-poetry/poetry/issues/10304)
|
|
||||||
#
|
|
||||||
# This is necessary in order to rebuild or fetch new wheels. For example, if you update
|
|
||||||
# the `icu` library in on your system, you will need to rebuild the PyICU Python package
|
|
||||||
# in order to incorporate the correct dynamically linked library locations otherwise you
|
|
||||||
# will run into errors like: `ImportError: libicui18n.so.75: cannot open shared object file: No such file or directory`
|
|
||||||
$ rm -rf $(poetry config cache-dir)
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## ...run a command in the `poetry` virtualenv?
|
## ...run a command in the `poetry` virtualenv?
|
||||||
|
|
||||||
Use `poetry run cmd args` when you need the python virtualenv context.
|
Use `poetry run cmd args` when you need the python virtualenv context.
|
||||||
@@ -209,7 +187,7 @@ useful.
|
|||||||
## ...add a new dependency?
|
## ...add a new dependency?
|
||||||
|
|
||||||
Either:
|
Either:
|
||||||
- manually update `pyproject.toml`; then `poetry lock`; or else
|
- manually update `pyproject.toml`; then `poetry lock --no-update`; or else
|
||||||
- `poetry add packagename`. See `poetry add --help`; note the `--dev`,
|
- `poetry add packagename`. See `poetry add --help`; note the `--dev`,
|
||||||
`--extras` and `--optional` flags in particular.
|
`--extras` and `--optional` flags in particular.
|
||||||
|
|
||||||
@@ -224,12 +202,12 @@ poetry remove packagename
|
|||||||
```
|
```
|
||||||
|
|
||||||
ought to do the trick. Alternatively, manually update `pyproject.toml` and
|
ought to do the trick. Alternatively, manually update `pyproject.toml` and
|
||||||
`poetry lock`. Include the updated `pyproject.toml` and `poetry.lock`
|
`poetry lock --no-update`. Include the updated `pyproject.toml` and `poetry.lock`
|
||||||
files in your commit.
|
files in your commit.
|
||||||
|
|
||||||
## ...update the version range for an existing dependency?
|
## ...update the version range for an existing dependency?
|
||||||
|
|
||||||
Best done by manually editing `pyproject.toml`, then `poetry lock`.
|
Best done by manually editing `pyproject.toml`, then `poetry lock --no-update`.
|
||||||
Include the updated `pyproject.toml` and `poetry.lock` in your commit.
|
Include the updated `pyproject.toml` and `poetry.lock` in your commit.
|
||||||
|
|
||||||
## ...update a dependency in the locked environment?
|
## ...update a dependency in the locked environment?
|
||||||
@@ -255,7 +233,7 @@ poetry add packagename==1.2.3
|
|||||||
|
|
||||||
# Get poetry to recompute the content-hash of pyproject.toml without changing
|
# Get poetry to recompute the content-hash of pyproject.toml without changing
|
||||||
# the locked package versions.
|
# the locked package versions.
|
||||||
poetry lock
|
poetry lock --no-update
|
||||||
```
|
```
|
||||||
|
|
||||||
Either way, include the updated `poetry.lock` file in your commit.
|
Either way, include the updated `poetry.lock` file in your commit.
|
||||||
|
|||||||
@@ -76,9 +76,8 @@ _Changed in Synapse v1.62.0: `synapse.module_api.NOT_SPAM` and `synapse.module_a
|
|||||||
async def user_may_invite(inviter: str, invitee: str, room_id: str) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
|
async def user_may_invite(inviter: str, invitee: str, room_id: str) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
|
||||||
```
|
```
|
||||||
|
|
||||||
Called when processing an invitation, both when one is created locally or when
|
Called when processing an invitation. Both inviter and invitee are
|
||||||
receiving an invite over federation. Both inviter and invitee are represented by
|
represented by their Matrix user ID (e.g. `@alice:example.com`).
|
||||||
their Matrix user ID (e.g. `@alice:example.com`).
|
|
||||||
|
|
||||||
|
|
||||||
The callback must return one of:
|
The callback must return one of:
|
||||||
@@ -113,9 +112,7 @@ async def user_may_send_3pid_invite(
|
|||||||
```
|
```
|
||||||
|
|
||||||
Called when processing an invitation using a third-party identifier (also called a 3PID,
|
Called when processing an invitation using a third-party identifier (also called a 3PID,
|
||||||
e.g. an email address or a phone number). It is only called when a 3PID invite is created
|
e.g. an email address or a phone number).
|
||||||
locally - not when one is received in a room over federation. If the 3PID is already associated
|
|
||||||
with a Matrix ID, the spam check will go through the `user_may_invite` callback instead.
|
|
||||||
|
|
||||||
The inviter is represented by their Matrix user ID (e.g. `@alice:example.com`), and the
|
The inviter is represented by their Matrix user ID (e.g. `@alice:example.com`), and the
|
||||||
invitee is represented by its medium (e.g. "email") and its address
|
invitee is represented by its medium (e.g. "email") and its address
|
||||||
@@ -245,7 +242,7 @@ this callback.
|
|||||||
_First introduced in Synapse v1.37.0_
|
_First introduced in Synapse v1.37.0_
|
||||||
|
|
||||||
```python
|
```python
|
||||||
async def check_username_for_spam(user_profile: synapse.module_api.UserProfile, requester_id: str) -> bool
|
async def check_username_for_spam(user_profile: synapse.module_api.UserProfile) -> bool
|
||||||
```
|
```
|
||||||
|
|
||||||
Called when computing search results in the user directory. The module must return a
|
Called when computing search results in the user directory. The module must return a
|
||||||
@@ -264,8 +261,6 @@ The profile is represented as a dictionary with the following keys:
|
|||||||
The module is given a copy of the original dictionary, so modifying it from within the
|
The module is given a copy of the original dictionary, so modifying it from within the
|
||||||
module cannot modify a user's profile when included in user directory search results.
|
module cannot modify a user's profile when included in user directory search results.
|
||||||
|
|
||||||
The requester_id parameter is the ID of the user that called the user directory API.
|
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a
|
If multiple modules implement this callback, they will be considered in order. If a
|
||||||
callback returns `False`, Synapse falls through to the next one. The value of the first
|
callback returns `False`, Synapse falls through to the next one. The value of the first
|
||||||
callback that does not return `False` will be used. If this happens, Synapse will not call
|
callback that does not return `False` will be used. If this happens, Synapse will not call
|
||||||
|
|||||||
@@ -336,36 +336,6 @@ but it has a `response_types_supported` which excludes "code" (which we rely on,
|
|||||||
is even mentioned in their [documentation](https://developers.facebook.com/docs/facebook-login/manually-build-a-login-flow#login)),
|
is even mentioned in their [documentation](https://developers.facebook.com/docs/facebook-login/manually-build-a-login-flow#login)),
|
||||||
so we have to disable discovery and configure the URIs manually.
|
so we have to disable discovery and configure the URIs manually.
|
||||||
|
|
||||||
### Forgejo
|
|
||||||
|
|
||||||
Forgejo is a fork of Gitea that can act as an OAuth2 provider.
|
|
||||||
|
|
||||||
The implementation of OAuth2 is improved compared to Gitea, as it provides a correctly defined `subject_claim` and `scopes`.
|
|
||||||
|
|
||||||
Synapse config:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
oidc_providers:
|
|
||||||
- idp_id: forgejo
|
|
||||||
idp_name: Forgejo
|
|
||||||
discover: false
|
|
||||||
issuer: "https://your-forgejo.com/"
|
|
||||||
client_id: "your-client-id" # TO BE FILLED
|
|
||||||
client_secret: "your-client-secret" # TO BE FILLED
|
|
||||||
client_auth_method: client_secret_post
|
|
||||||
scopes: ["openid", "profile", "email", "groups"]
|
|
||||||
authorization_endpoint: "https://your-forgejo.com/login/oauth/authorize"
|
|
||||||
token_endpoint: "https://your-forgejo.com/login/oauth/access_token"
|
|
||||||
userinfo_endpoint: "https://your-forgejo.com/api/v1/user"
|
|
||||||
user_mapping_provider:
|
|
||||||
config:
|
|
||||||
subject_claim: "sub"
|
|
||||||
picture_claim: "picture"
|
|
||||||
localpart_template: "{{ user.preferred_username }}"
|
|
||||||
display_name_template: "{{ user.name }}"
|
|
||||||
email_template: "{{ user.email }}"
|
|
||||||
```
|
|
||||||
|
|
||||||
### GitHub
|
### GitHub
|
||||||
|
|
||||||
[GitHub][github-idp] is a bit special as it is not an OpenID Connect compliant provider, but
|
[GitHub][github-idp] is a bit special as it is not an OpenID Connect compliant provider, but
|
||||||
|
|||||||
@@ -100,10 +100,6 @@ database:
|
|||||||
keepalives_count: 3
|
keepalives_count: 3
|
||||||
```
|
```
|
||||||
|
|
||||||
## Backups
|
|
||||||
|
|
||||||
Don't forget to [back up](./usage/administration/backups.md#database) your database!
|
|
||||||
|
|
||||||
## Tuning Postgres
|
## Tuning Postgres
|
||||||
|
|
||||||
The default settings should be fine for most deployments. For larger
|
The default settings should be fine for most deployments. For larger
|
||||||
|
|||||||
@@ -74,7 +74,7 @@ server {
|
|||||||
proxy_pass http://localhost:8008;
|
proxy_pass http://localhost:8008;
|
||||||
proxy_set_header X-Forwarded-For $remote_addr;
|
proxy_set_header X-Forwarded-For $remote_addr;
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
proxy_set_header Host $host:$server_port;
|
proxy_set_header Host $host;
|
||||||
|
|
||||||
# Nginx by default only allows file uploads up to 1M in size
|
# Nginx by default only allows file uploads up to 1M in size
|
||||||
# Increase client_max_body_size to match max_upload_size defined in homeserver.yaml
|
# Increase client_max_body_size to match max_upload_size defined in homeserver.yaml
|
||||||
|
|||||||
@@ -157,7 +157,7 @@ sudo pip install py-bcrypt
|
|||||||
|
|
||||||
#### Alpine Linux
|
#### Alpine Linux
|
||||||
|
|
||||||
Jahway603 maintains [Synapse packages for Alpine Linux](https://pkgs.alpinelinux.org/packages?name=synapse&branch=edge) in the community repository. Install with:
|
6543 maintains [Synapse packages for Alpine Linux](https://pkgs.alpinelinux.org/packages?name=synapse&branch=edge) in the community repository. Install with:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
sudo apk add synapse
|
sudo apk add synapse
|
||||||
@@ -208,7 +208,7 @@ When following this route please make sure that the [Platform-specific prerequis
|
|||||||
System requirements:
|
System requirements:
|
||||||
|
|
||||||
- POSIX-compliant system (tested on Linux & OS X)
|
- POSIX-compliant system (tested on Linux & OS X)
|
||||||
- Python 3.9 or later, up to Python 3.13.
|
- Python 3.8 or later, up to Python 3.11.
|
||||||
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||||
|
|
||||||
If building on an uncommon architecture for which pre-built wheels are
|
If building on an uncommon architecture for which pre-built wheels are
|
||||||
@@ -310,18 +310,29 @@ sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
|||||||
sudo dnf group install "Development Tools"
|
sudo dnf group install "Development Tools"
|
||||||
```
|
```
|
||||||
|
|
||||||
##### Red Hat Enterprise Linux / Rocky Linux / Oracle Linux
|
##### Red Hat Enterprise Linux / Rocky Linux
|
||||||
|
|
||||||
*Note: The term "RHEL" below refers to Red Hat Enterprise Linux, Oracle Linux and Rocky Linux. The distributions are 1:1 binary compatible.*
|
*Note: The term "RHEL" below refers to both Red Hat Enterprise Linux and Rocky Linux. The distributions are 1:1 binary compatible.*
|
||||||
|
|
||||||
It's recommended to use the latest Python versions.
|
It's recommended to use the latest Python versions.
|
||||||
|
|
||||||
RHEL 8 in particular ships with Python 3.6 by default which is EOL and therefore no longer supported by Synapse. RHEL 9 ships with Python 3.9 which is still supported by the Python core team as of this writing. However, newer Python versions provide significant performance improvements and they're available in official distributions' repositories. Therefore it's recommended to use them.
|
RHEL 8 in particular ships with Python 3.6 by default which is EOL and therefore no longer supported by Synapse. RHEL 9 ship with Python 3.9 which is still supported by the Python core team as of this writing. However, newer Python versions provide significant performance improvements and they're available in official distributions' repositories. Therefore it's recommended to use them.
|
||||||
|
|
||||||
Python 3.11 and 3.12 are available for both RHEL 8 and 9.
|
Python 3.11 and 3.12 are available for both RHEL 8 and 9.
|
||||||
|
|
||||||
These commands should be run as root user.
|
These commands should be run as root user.
|
||||||
|
|
||||||
|
RHEL 8
|
||||||
|
```bash
|
||||||
|
# Enable PowerTools repository
|
||||||
|
dnf config-manager --set-enabled powertools
|
||||||
|
```
|
||||||
|
RHEL 9
|
||||||
|
```bash
|
||||||
|
# Enable CodeReady Linux Builder repository
|
||||||
|
crb enable
|
||||||
|
```
|
||||||
|
|
||||||
Install new version of Python. You only need one of these:
|
Install new version of Python. You only need one of these:
|
||||||
```bash
|
```bash
|
||||||
# Python 3.11
|
# Python 3.11
|
||||||
@@ -645,10 +656,6 @@ This also requires the optional `lxml` python dependency to be installed. This
|
|||||||
in turn requires the `libxml2` library to be available - on Debian/Ubuntu this
|
in turn requires the `libxml2` library to be available - on Debian/Ubuntu this
|
||||||
means `apt-get install libxml2-dev`, or equivalent for your OS.
|
means `apt-get install libxml2-dev`, or equivalent for your OS.
|
||||||
|
|
||||||
### Backups
|
|
||||||
|
|
||||||
Don't forget to take [backups](../usage/administration/backups.md) of your new server!
|
|
||||||
|
|
||||||
### Troubleshooting Installation
|
### Troubleshooting Installation
|
||||||
|
|
||||||
`pip` seems to leak *lots* of memory during installation. For instance, a Linux
|
`pip` seems to leak *lots* of memory during installation. For instance, a Linux
|
||||||
|
|||||||
@@ -72,8 +72,8 @@ class ExampleSpamChecker:
|
|||||||
async def user_may_publish_room(self, userid, room_id):
|
async def user_may_publish_room(self, userid, room_id):
|
||||||
return True # allow publishing of all rooms
|
return True # allow publishing of all rooms
|
||||||
|
|
||||||
async def check_username_for_spam(self, user_profile, requester_id):
|
async def check_username_for_spam(self, user_profile):
|
||||||
return False # allow all usernames regardless of requester
|
return False # allow all usernames
|
||||||
|
|
||||||
async def check_registration_for_spam(
|
async def check_registration_for_spam(
|
||||||
self,
|
self,
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ As an example, a SSO service may return the email address
|
|||||||
to turn that into a displayname when creating a Matrix user for this individual.
|
to turn that into a displayname when creating a Matrix user for this individual.
|
||||||
It may choose `John Smith`, or `Smith, John [Example.com]` or any number of
|
It may choose `John Smith`, or `Smith, John [Example.com]` or any number of
|
||||||
variations. As each Synapse configuration may want something different, this is
|
variations. As each Synapse configuration may want something different, this is
|
||||||
where SSO mapping providers come into play.
|
where SAML mapping providers come into play.
|
||||||
|
|
||||||
SSO mapping providers are currently supported for OpenID and SAML SSO
|
SSO mapping providers are currently supported for OpenID and SAML SSO
|
||||||
configurations. Please see the details below for how to implement your own.
|
configurations. Please see the details below for how to implement your own.
|
||||||
|
|||||||
@@ -117,97 +117,6 @@ each upgrade are complete before moving on to the next upgrade, to avoid
|
|||||||
stacking them up. You can monitor the currently running background updates with
|
stacking them up. You can monitor the currently running background updates with
|
||||||
[the Admin API](usage/administration/admin_api/background_updates.html#status).
|
[the Admin API](usage/administration/admin_api/background_updates.html#status).
|
||||||
|
|
||||||
# Upgrading to v1.126.0
|
|
||||||
|
|
||||||
## Room list publication rules change
|
|
||||||
|
|
||||||
The default [`room_list_publication_rules`] setting was changed to disallow
|
|
||||||
anyone (except server admins) from publishing to the room list by default.
|
|
||||||
|
|
||||||
This is in line with Synapse policy of locking down features by default that can
|
|
||||||
be abused without moderation.
|
|
||||||
|
|
||||||
To keep the previous behavior of allowing publication by default, add the
|
|
||||||
following to the config:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
room_list_publication_rules:
|
|
||||||
- "action": "allow"
|
|
||||||
```
|
|
||||||
|
|
||||||
[`room_list_publication_rules`]: usage/configuration/config_documentation.md#room_list_publication_rules
|
|
||||||
|
|
||||||
## Change of signing key expiry date for the Debian/Ubuntu package repository
|
|
||||||
|
|
||||||
Administrators using the Debian/Ubuntu packages from `packages.matrix.org`,
|
|
||||||
please be aware that we have recently updated the expiry date on the repository's GPG signing key,
|
|
||||||
but this change must be imported into your keyring.
|
|
||||||
|
|
||||||
If you have the `matrix-org-archive-keyring` package installed and it updates before the current key expires, this should
|
|
||||||
happen automatically.
|
|
||||||
|
|
||||||
Otherwise, if you see an error similar to `The following signatures were invalid: EXPKEYSIG F473DD4473365DE1`, you
|
|
||||||
will need to get a fresh copy of the keys. You can do so with:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
|
|
||||||
```
|
|
||||||
|
|
||||||
The old version of the key will expire on `2025-03-15`.
|
|
||||||
|
|
||||||
# Upgrading to v1.122.0
|
|
||||||
|
|
||||||
## Dropping support for PostgreSQL 11 and 12
|
|
||||||
|
|
||||||
In line with our [deprecation policy](deprecation_policy.md), we've dropped
|
|
||||||
support for PostgreSQL 11 and 12, as they are no longer supported upstream.
|
|
||||||
This release of Synapse requires PostgreSQL 13+.
|
|
||||||
|
|
||||||
# Upgrading to v1.120.0
|
|
||||||
|
|
||||||
## Removal of experimental MSC3886 feature
|
|
||||||
|
|
||||||
[MSC3886](https://github.com/matrix-org/matrix-spec-proposals/pull/3886)
|
|
||||||
has been closed (and will not enter the Matrix spec). As such, we are
|
|
||||||
removing the experimental support for it in this release.
|
|
||||||
|
|
||||||
The `experimental_features.msc3886_endpoint` configuration option has
|
|
||||||
been removed.
|
|
||||||
|
|
||||||
## Authenticated media is now enforced by default
|
|
||||||
|
|
||||||
The [`enable_authenticated_media`] configuration option now defaults to true.
|
|
||||||
|
|
||||||
This means that clients and remote (federated) homeservers now need to use
|
|
||||||
the authenticated media endpoints in order to download media from your
|
|
||||||
homeserver.
|
|
||||||
|
|
||||||
As an exception, existing media that was stored on the server prior to
|
|
||||||
this option changing to `true` will still be accessible over the
|
|
||||||
unauthenticated endpoints.
|
|
||||||
|
|
||||||
The matrix.org homeserver has already been running with this option enabled
|
|
||||||
since September 2024, so most common clients and homeservers should already
|
|
||||||
be compatible.
|
|
||||||
|
|
||||||
With that said, administrators who wish to disable this feature for broader
|
|
||||||
compatibility can still do so by manually configuring
|
|
||||||
`enable_authenticated_media: False`.
|
|
||||||
|
|
||||||
[`enable_authenticated_media`]: usage/configuration/config_documentation.md#enable_authenticated_media
|
|
||||||
|
|
||||||
|
|
||||||
# Upgrading to v1.119.0
|
|
||||||
|
|
||||||
## Minimum supported Python version
|
|
||||||
|
|
||||||
The minimum supported Python version has been increased from v3.8 to v3.9.
|
|
||||||
You will need Python 3.9+ to run Synapse v1.119.0 (due out Nov 7th, 2024).
|
|
||||||
|
|
||||||
If you use current versions of the Matrix.org-distributed Docker images, no action is required.
|
|
||||||
Please note that support for Ubuntu `focal` was dropped as well since it uses Python 3.8.
|
|
||||||
|
|
||||||
|
|
||||||
# Upgrading to v1.111.0
|
# Upgrading to v1.111.0
|
||||||
|
|
||||||
## New worker endpoints for authenticated client and federation media
|
## New worker endpoints for authenticated client and federation media
|
||||||
|
|||||||
@@ -160,7 +160,7 @@ Using the following curl command:
|
|||||||
```console
|
```console
|
||||||
curl -H 'Authorization: Bearer <access-token>' -X DELETE https://matrix.org/_matrix/client/r0/directory/room/<room-alias>
|
curl -H 'Authorization: Bearer <access-token>' -X DELETE https://matrix.org/_matrix/client/r0/directory/room/<room-alias>
|
||||||
```
|
```
|
||||||
`<access-token>` - can be obtained in element by looking in All settings, clicking Help & About and down the bottom is:
|
`<access-token>` - can be obtained in riot by looking in the riot settings, down the bottom is:
|
||||||
Access Token:\<click to reveal\>
|
Access Token:\<click to reveal\>
|
||||||
|
|
||||||
`<room-alias>` - the room alias, eg. #my_room:matrix.org this possibly needs to be URL encoded also, for example %23my_room%3Amatrix.org
|
`<room-alias>` - the room alias, eg. #my_room:matrix.org this possibly needs to be URL encoded also, for example %23my_room%3Amatrix.org
|
||||||
@@ -255,8 +255,6 @@ line to `/etc/default/matrix-synapse`:
|
|||||||
|
|
||||||
LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.2
|
LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libjemalloc.so.2
|
||||||
|
|
||||||
*Note*: You may need to set `PYTHONMALLOC=malloc` to ensure that `jemalloc` can accurately calculate memory usage. By default, Python uses its internal small-object allocator, which may interfere with jemalloc's ability to track memory consumption correctly. This could prevent the [cache_autotuning](../configuration/config_documentation.md#caches-and-associated-values) feature from functioning as expected, as the Python allocator may not reach the memory threshold set by `max_cache_memory_usage`, thus not triggering the cache eviction process.
|
|
||||||
|
|
||||||
This made a significant difference on Python 2.7 - it's unclear how
|
This made a significant difference on Python 2.7 - it's unclear how
|
||||||
much of an improvement it provides on Python 3.x.
|
much of an improvement it provides on Python 3.x.
|
||||||
|
|
||||||
|
|||||||
@@ -1,125 +0,0 @@
|
|||||||
# How to back up a Synapse homeserver
|
|
||||||
|
|
||||||
It is critical to maintain good backups of your server, to guard against
|
|
||||||
hardware failure as well as potential corruption due to bugs or administrator
|
|
||||||
error.
|
|
||||||
|
|
||||||
This page documents the things you will need to consider backing up as part of
|
|
||||||
a Synapse installation.
|
|
||||||
|
|
||||||
## Configuration files
|
|
||||||
|
|
||||||
Keep a copy of your configuration file (`homeserver.yaml`), as well as any
|
|
||||||
auxiliary config files it refers to such as the
|
|
||||||
[`log_config`](../configuration/config_documentation.md#log_config) file,
|
|
||||||
[`app_service_config_files`](../configuration/config_documentation.md#app_service_config_files).
|
|
||||||
Often, all such config files will be kept in a single directory such as
|
|
||||||
`/etc/synapse`, which will make this easier.
|
|
||||||
|
|
||||||
## Server signing key
|
|
||||||
|
|
||||||
Your server has a [signing
|
|
||||||
key](../configuration/config_documentation.md#signing_key_path) which it uses
|
|
||||||
to sign events and outgoing federation requests. It is easiest to back it up
|
|
||||||
with your configuration files, but an alternative is to have Synapse create a
|
|
||||||
new signing key if you have to restore.
|
|
||||||
|
|
||||||
If you do decide to replace the signing key, you should add the old *public*
|
|
||||||
key to
|
|
||||||
[`old_signing_keys`](../configuration/config_documentation.md#old_signing_keys).
|
|
||||||
|
|
||||||
## Database
|
|
||||||
|
|
||||||
Synapse's support for SQLite is only suitable for testing purposes, so for the
|
|
||||||
purposes of this document, we'll assume you are using
|
|
||||||
[PostgreSQL](../../postgres.md).
|
|
||||||
|
|
||||||
A full discussion of backup strategies for PostgreSQL is out of scope for this
|
|
||||||
document; see the [PostgreSQL
|
|
||||||
documentation](https://www.postgresql.org/docs/current/backup.html) for
|
|
||||||
detailed information.
|
|
||||||
|
|
||||||
### Synapse-specfic details
|
|
||||||
|
|
||||||
* Be very careful not to restore into a database that already has tables
|
|
||||||
present. At best, this will error; at worst, it will lead to subtle database
|
|
||||||
inconsistencies.
|
|
||||||
|
|
||||||
* The `e2e_one_time_keys_json` table should **not** be backed up, or if it is
|
|
||||||
backed up, should be
|
|
||||||
[`TRUNCATE`d](https://www.postgresql.org/docs/current/sql-truncate.html)
|
|
||||||
after restoring the database before Synapse is started.
|
|
||||||
|
|
||||||
[Background: restoring the database to an older backup can cause
|
|
||||||
used one-time-keys to be re-issued, causing subsequent [message decryption
|
|
||||||
errors](https://github.com/element-hq/element-meta/issues/2155). Clearing
|
|
||||||
all one-time-keys from the database ensures that this cannot happen, and
|
|
||||||
will prompt clients to generate and upload new one-time-keys.]
|
|
||||||
|
|
||||||
### Quick and easy database backup and restore
|
|
||||||
|
|
||||||
Typically, the easiest solution is to use `pg_dump` to take a copy of the whole
|
|
||||||
database. We recommend `pg_dump`'s custom dump format, as it produces
|
|
||||||
significantly smaller backup files.
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo -u postgres pg_dump -Fc --exclude-table-data e2e_one_time_keys_json synapse > synapse.dump
|
|
||||||
```
|
|
||||||
|
|
||||||
There is no need to stop Postgres or Synapse while `pg_dump` is running: it
|
|
||||||
will take a consistent snapshot of the databse.
|
|
||||||
|
|
||||||
To restore, you will need to recreate the database as described in [Using
|
|
||||||
Postgres](../../postgres.md#set-up-database),
|
|
||||||
then load the dump into it with `pg_restore`:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo -u postgres createdb --encoding=UTF8 --locale=C --template=template0 --owner=synapse_user synapse
|
|
||||||
sudo -u postgres pg_restore -d synapse < synapse.dump
|
|
||||||
```
|
|
||||||
|
|
||||||
(If you forgot to exclude `e2e_one_time_keys_json` during `pg_dump`, remember
|
|
||||||
to connect to the new database and `TRUNCATE e2e_one_time_keys_json;` before
|
|
||||||
starting Synapse.)
|
|
||||||
|
|
||||||
To reiterate: do **not** restore a dump over an existing database.
|
|
||||||
|
|
||||||
Again, if you plan to run your homeserver at any sort of production level, we
|
|
||||||
recommend studying the PostgreSQL documentation on backup options.
|
|
||||||
|
|
||||||
## Media store
|
|
||||||
|
|
||||||
Synapse keeps a copy of media uploaded by users, including avatars and message
|
|
||||||
attachments, in its [Media
|
|
||||||
store](../configuration/config_documentation.md#media-store).
|
|
||||||
|
|
||||||
It is a directory on the local disk, containing the following directories:
|
|
||||||
|
|
||||||
* `local_content`: this is content uploaded by your local users. As a general
|
|
||||||
rule, you should back this up: it may represent the only copy of those
|
|
||||||
media files anywhere in the federation, and if they are lost, users will
|
|
||||||
see errors when viewing user or room avatars, and messages with attachments.
|
|
||||||
|
|
||||||
* `local_thumbnails`: "thumbnails" of images uploaded by your users. If
|
|
||||||
[`dynamic_thumbnails`](../configuration/config_documentation.md#dynamic_thumbnails)
|
|
||||||
is enabled, these will be regenerated if they are removed from the disk, and
|
|
||||||
there is therefore no need to back them up.
|
|
||||||
|
|
||||||
If `dynamic_thumbnails` is *not* enabled (the default): although this can
|
|
||||||
theoretically be regenerated from `local_content`, there is no tooling to do
|
|
||||||
so. We recommend that these are backed up too.
|
|
||||||
|
|
||||||
* `remote_content`: this is a cache of content that was uploaded by a user on
|
|
||||||
another server, and has since been requested by a user on your own server.
|
|
||||||
|
|
||||||
Typically there is no need to back up this directory: if a file in this directory
|
|
||||||
is removed, Synapse will attempt to fetch it again from the remote
|
|
||||||
server.
|
|
||||||
|
|
||||||
* `remote_thumbnails`: thumbnails of images uploaded by users on other
|
|
||||||
servers. As with `remote_content`, there is normally no need to back this
|
|
||||||
up.
|
|
||||||
|
|
||||||
* `url_cache`, `url_cache_thumbnails`: temporary caches of files downloaded
|
|
||||||
by the [URL previews](../../setup/installation.md#url-previews) feature.
|
|
||||||
These do not need to be backed up.
|
|
||||||
@@ -162,53 +162,6 @@ Example configuration:
|
|||||||
pid_file: DATADIR/homeserver.pid
|
pid_file: DATADIR/homeserver.pid
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
### `daemonize`
|
|
||||||
|
|
||||||
Specifies whether Synapse should be started as a daemon process. If Synapse is being
|
|
||||||
managed by [systemd](../../systemd-with-workers/), this option must be omitted or set to
|
|
||||||
`false`.
|
|
||||||
|
|
||||||
This can also be set by the `--daemonize` (`-D`) argument when starting Synapse.
|
|
||||||
|
|
||||||
See `worker_daemonize` for more information on daemonizing workers.
|
|
||||||
|
|
||||||
Example configuration:
|
|
||||||
```yaml
|
|
||||||
daemonize: true
|
|
||||||
```
|
|
||||||
---
|
|
||||||
### `print_pidfile`
|
|
||||||
|
|
||||||
Print the path to the pidfile just before daemonizing. Defaults to false.
|
|
||||||
|
|
||||||
This can also be set by the `--print-pidfile` argument when starting Synapse.
|
|
||||||
|
|
||||||
Example configuration:
|
|
||||||
```yaml
|
|
||||||
print_pidfile: true
|
|
||||||
```
|
|
||||||
---
|
|
||||||
### `user_agent_suffix`
|
|
||||||
|
|
||||||
A suffix that is appended to the Synapse user-agent (ex. `Synapse/v1.123.0`). Defaults
|
|
||||||
to None
|
|
||||||
|
|
||||||
Example configuration:
|
|
||||||
```yaml
|
|
||||||
user_agent_suffix: " (I'm a teapot; Linux x86_64)"
|
|
||||||
```
|
|
||||||
---
|
|
||||||
### `use_frozen_dicts`
|
|
||||||
|
|
||||||
Determines whether we should freeze the internal dict object in `FrozenEvent`. Freezing
|
|
||||||
prevents bugs where we accidentally share e.g. signature dicts. However, freezing a
|
|
||||||
dict is expensive. Defaults to false.
|
|
||||||
|
|
||||||
Example configuration:
|
|
||||||
```yaml
|
|
||||||
use_frozen_dicts: true
|
|
||||||
```
|
|
||||||
---
|
|
||||||
### `web_client_location`
|
### `web_client_location`
|
||||||
|
|
||||||
The absolute URL to the web client which `/` will redirect to. Defaults to none.
|
The absolute URL to the web client which `/` will redirect to. Defaults to none.
|
||||||
@@ -642,17 +595,6 @@ listeners:
|
|||||||
- names: [client, federation]
|
- names: [client, federation]
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
|
||||||
### `manhole`
|
|
||||||
|
|
||||||
Turn on the Twisted telnet manhole service on the given port. Defaults to none.
|
|
||||||
|
|
||||||
This can also be set by the `--manhole` argument when starting Synapse.
|
|
||||||
|
|
||||||
Example configuration:
|
|
||||||
```yaml
|
|
||||||
manhole: 1234
|
|
||||||
```
|
|
||||||
---
|
---
|
||||||
### `manhole_settings`
|
### `manhole_settings`
|
||||||
|
|
||||||
@@ -731,9 +673,8 @@ This setting has the following sub-options:
|
|||||||
TLS via STARTTLS *if the SMTP server supports it*. If this option is set,
|
TLS via STARTTLS *if the SMTP server supports it*. If this option is set,
|
||||||
Synapse will refuse to connect unless the server supports STARTTLS.
|
Synapse will refuse to connect unless the server supports STARTTLS.
|
||||||
* `enable_tls`: By default, if the server supports TLS, it will be used, and the server
|
* `enable_tls`: By default, if the server supports TLS, it will be used, and the server
|
||||||
must present a certificate that is valid for `tlsname`. If this option
|
must present a certificate that is valid for 'smtp_host'. If this option
|
||||||
is set to false, TLS will not be used.
|
is set to false, TLS will not be used.
|
||||||
* `tlsname`: The domain name the SMTP server's TLS certificate must be valid for, defaulting to `smtp_host`.
|
|
||||||
* `notif_from`: defines the "From" address to use when sending emails.
|
* `notif_from`: defines the "From" address to use when sending emails.
|
||||||
It must be set if email sending is enabled. The placeholder '%(app)s' will be replaced by the application name,
|
It must be set if email sending is enabled. The placeholder '%(app)s' will be replaced by the application name,
|
||||||
which is normally set in `app_name`, but may be overridden by the
|
which is normally set in `app_name`, but may be overridden by the
|
||||||
@@ -800,7 +741,6 @@ email:
|
|||||||
force_tls: true
|
force_tls: true
|
||||||
require_transport_security: true
|
require_transport_security: true
|
||||||
enable_tls: false
|
enable_tls: false
|
||||||
tlsname: mail.server.example.com
|
|
||||||
notif_from: "Your Friendly %(app)s homeserver <noreply@example.com>"
|
notif_from: "Your Friendly %(app)s homeserver <noreply@example.com>"
|
||||||
app_name: my_branded_matrix_server
|
app_name: my_branded_matrix_server
|
||||||
enable_notifs: true
|
enable_notifs: true
|
||||||
@@ -1494,7 +1434,7 @@ number of entries that can be stored.
|
|||||||
Please see the [Config Conventions](#config-conventions) for information on how to specify memory size and cache expiry
|
Please see the [Config Conventions](#config-conventions) for information on how to specify memory size and cache expiry
|
||||||
durations.
|
durations.
|
||||||
* `max_cache_memory_usage` sets a ceiling on how much memory the cache can use before caches begin to be continuously evicted.
|
* `max_cache_memory_usage` sets a ceiling on how much memory the cache can use before caches begin to be continuously evicted.
|
||||||
They will continue to be evicted until the memory usage drops below the `target_cache_memory_usage`, set in
|
They will continue to be evicted until the memory usage drops below the `target_memory_usage`, set in
|
||||||
the setting below, or until the `min_cache_ttl` is hit. There is no default value for this option.
|
the setting below, or until the `min_cache_ttl` is hit. There is no default value for this option.
|
||||||
* `target_cache_memory_usage` sets a rough target for the desired memory usage of the caches. There is no default value
|
* `target_cache_memory_usage` sets a rough target for the desired memory usage of the caches. There is no default value
|
||||||
for this option.
|
for this option.
|
||||||
@@ -1926,50 +1866,6 @@ rc_federation:
|
|||||||
concurrent: 5
|
concurrent: 5
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
### `rc_presence`
|
|
||||||
|
|
||||||
This option sets ratelimiting for presence.
|
|
||||||
|
|
||||||
The `rc_presence.per_user` option sets rate limits on how often a specific
|
|
||||||
users' presence updates are evaluated. Ratelimited presence updates sent via sync are
|
|
||||||
ignored, and no error is returned to the client.
|
|
||||||
This option also sets the rate limit for the
|
|
||||||
[`PUT /_matrix/client/v3/presence/{userId}/status`](https://spec.matrix.org/latest/client-server-api/#put_matrixclientv3presenceuseridstatus)
|
|
||||||
endpoint.
|
|
||||||
|
|
||||||
`per_user` defaults to `per_second: 0.1`, `burst_count: 1`.
|
|
||||||
|
|
||||||
Example configuration:
|
|
||||||
```yaml
|
|
||||||
rc_presence:
|
|
||||||
per_user:
|
|
||||||
per_second: 0.05
|
|
||||||
burst_count: 1
|
|
||||||
```
|
|
||||||
---
|
|
||||||
### `rc_delayed_event_mgmt`
|
|
||||||
|
|
||||||
Ratelimiting settings for delayed event management.
|
|
||||||
|
|
||||||
This is a ratelimiting option that ratelimits
|
|
||||||
attempts to restart, cancel, or view delayed events
|
|
||||||
based on the sending client's account and device ID.
|
|
||||||
It defaults to: `per_second: 1`, `burst_count: 5`.
|
|
||||||
|
|
||||||
Attempts to create or send delayed events are ratelimited not by this setting, but by `rc_message`.
|
|
||||||
|
|
||||||
Setting this to a high value allows clients to make delayed event management requests often
|
|
||||||
(such as repeatedly restarting a delayed event with a short timeout,
|
|
||||||
or restarting several different delayed events all at once)
|
|
||||||
without the risk of being ratelimited.
|
|
||||||
|
|
||||||
Example configuration:
|
|
||||||
```yaml
|
|
||||||
rc_delayed_event_mgmt:
|
|
||||||
per_second: 2
|
|
||||||
burst_count: 20
|
|
||||||
```
|
|
||||||
---
|
|
||||||
### `federation_rr_transactions_per_room_per_second`
|
### `federation_rr_transactions_per_room_per_second`
|
||||||
|
|
||||||
Sets outgoing federation transaction frequency for sending read-receipts,
|
Sets outgoing federation transaction frequency for sending read-receipts,
|
||||||
@@ -1991,33 +1887,12 @@ Config options related to Synapse's media store.
|
|||||||
|
|
||||||
When set to true, all subsequent media uploads will be marked as authenticated, and will not be available over legacy
|
When set to true, all subsequent media uploads will be marked as authenticated, and will not be available over legacy
|
||||||
unauthenticated media endpoints (`/_matrix/media/(r0|v3|v1)/download` and `/_matrix/media/(r0|v3|v1)/thumbnail`) - requests for authenticated media over these endpoints will result in a 404. All media, including authenticated media, will be available over the authenticated media endpoints `_matrix/client/v1/media/download` and `_matrix/client/v1/media/thumbnail`. Media uploaded prior to setting this option to true will still be available over the legacy endpoints. Note if the setting is switched to false
|
unauthenticated media endpoints (`/_matrix/media/(r0|v3|v1)/download` and `/_matrix/media/(r0|v3|v1)/thumbnail`) - requests for authenticated media over these endpoints will result in a 404. All media, including authenticated media, will be available over the authenticated media endpoints `_matrix/client/v1/media/download` and `_matrix/client/v1/media/thumbnail`. Media uploaded prior to setting this option to true will still be available over the legacy endpoints. Note if the setting is switched to false
|
||||||
after enabling, media marked as authenticated will be available over legacy endpoints. Defaults to true (previously false). In a future release of Synapse, this option will be removed and become always-on.
|
after enabling, media marked as authenticated will be available over legacy endpoints. Defaults to false, but
|
||||||
|
this will change to true in a future Synapse release.
|
||||||
In all cases, authenticated requests to download media will succeed, but for unauthenticated requests, this
|
|
||||||
case-by-case breakdown describes whether media downloads are permitted:
|
|
||||||
|
|
||||||
* `enable_authenticated_media = False`:
|
|
||||||
* unauthenticated client or homeserver requesting local media: allowed
|
|
||||||
* unauthenticated client or homeserver requesting remote media: allowed as long as the media is in the cache,
|
|
||||||
or as long as the remote homeserver does not require authentication to retrieve the media
|
|
||||||
* `enable_authenticated_media = True`:
|
|
||||||
* unauthenticated client or homeserver requesting local media:
|
|
||||||
allowed if the media was stored on the server whilst `enable_authenticated_media` was `False` (or in a previous Synapse version where this option did not exist);
|
|
||||||
otherwise denied.
|
|
||||||
* unauthenticated client or homeserver requesting remote media: the same as for local media;
|
|
||||||
allowed if the media was stored on the server whilst `enable_authenticated_media` was `False` (or in a previous Synapse version where this option did not exist);
|
|
||||||
otherwise denied.
|
|
||||||
|
|
||||||
It is especially notable that media downloaded before this option existed (in older Synapse versions), or whilst this option was set to `False`,
|
|
||||||
will perpetually be available over the legacy, unauthenticated endpoint, even after this option is set to `True`.
|
|
||||||
This is for backwards compatibility with older clients and homeservers that do not yet support requesting authenticated media;
|
|
||||||
those older clients or homeservers will not be cut off from media they can already see.
|
|
||||||
|
|
||||||
_Changed in Synapse 1.120:_ This option now defaults to `True` when not set, whereas before this version it defaulted to `False`.
|
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
enable_authenticated_media: false
|
enable_authenticated_media: true
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
### `enable_media_repo`
|
### `enable_media_repo`
|
||||||
@@ -2615,14 +2490,6 @@ This is primarily intended for use with the `register_new_matrix_user` script
|
|||||||
(see [Registering a user](../../setup/installation.md#registering-a-user));
|
(see [Registering a user](../../setup/installation.md#registering-a-user));
|
||||||
however, the interface is [documented](../../admin_api/register_api.html).
|
however, the interface is [documented](../../admin_api/register_api.html).
|
||||||
|
|
||||||
Replacing an existing `registration_shared_secret` with a new one requires users
|
|
||||||
of the [Shared-Secret Registration API](../../admin_api/register_api.html) to
|
|
||||||
start using the new secret for requesting any further one-time nonces.
|
|
||||||
|
|
||||||
> ⚠️ **Warning** – The additional consequences of replacing
|
|
||||||
> [`macaroon_secret_key`](#macaroon_secret_key) will apply in case it delegates
|
|
||||||
> to `registration_shared_secret`.
|
|
||||||
|
|
||||||
See also [`registration_shared_secret_path`](#registration_shared_secret_path).
|
See also [`registration_shared_secret_path`](#registration_shared_secret_path).
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
@@ -3199,31 +3066,10 @@ A secret which is used to sign
|
|||||||
If none is specified, the `registration_shared_secret` is used, if one is given;
|
If none is specified, the `registration_shared_secret` is used, if one is given;
|
||||||
otherwise, a secret key is derived from the signing key.
|
otherwise, a secret key is derived from the signing key.
|
||||||
|
|
||||||
> ⚠️ **Warning** – Replacing an existing `macaroon_secret_key` with a new one
|
|
||||||
> will lead to invalidation of access tokens for all guest users. It will also
|
|
||||||
> break unsubscribe links in emails sent before the change. An unlucky user
|
|
||||||
> might encounter a broken SSO login flow and would have to start again.
|
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
macaroon_secret_key: <PRIVATE STRING>
|
macaroon_secret_key: <PRIVATE STRING>
|
||||||
```
|
```
|
||||||
---
|
|
||||||
### `macaroon_secret_key_path`
|
|
||||||
|
|
||||||
An alternative to [`macaroon_secret_key`](#macaroon_secret_key):
|
|
||||||
allows the secret key to be specified in an external file.
|
|
||||||
|
|
||||||
The file should be a plain text file, containing only the secret key.
|
|
||||||
Synapse reads the secret key from the given file once at startup.
|
|
||||||
|
|
||||||
Example configuration:
|
|
||||||
```yaml
|
|
||||||
macaroon_secret_key_path: /path/to/secrets/file
|
|
||||||
```
|
|
||||||
|
|
||||||
_Added in Synapse 1.121.0._
|
|
||||||
|
|
||||||
---
|
---
|
||||||
### `form_secret`
|
### `form_secret`
|
||||||
|
|
||||||
@@ -3231,29 +3077,10 @@ A secret which is used to calculate HMACs for form values, to stop
|
|||||||
falsification of values. Must be specified for the User Consent
|
falsification of values. Must be specified for the User Consent
|
||||||
forms to work.
|
forms to work.
|
||||||
|
|
||||||
Replacing an existing `form_secret` with a new one might break the user consent
|
|
||||||
page for an unlucky user and require them to reopen the page from a new link.
|
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
form_secret: <PRIVATE STRING>
|
form_secret: <PRIVATE STRING>
|
||||||
```
|
```
|
||||||
---
|
|
||||||
### `form_secret_path`
|
|
||||||
|
|
||||||
An alternative to [`form_secret`](#form_secret):
|
|
||||||
allows the secret to be specified in an external file.
|
|
||||||
|
|
||||||
The file should be a plain text file, containing only the secret.
|
|
||||||
Synapse reads the secret from the given file once at startup.
|
|
||||||
|
|
||||||
Example configuration:
|
|
||||||
```yaml
|
|
||||||
form_secret_path: /path/to/secrets/file
|
|
||||||
```
|
|
||||||
|
|
||||||
_Added in Synapse 1.126.0._
|
|
||||||
|
|
||||||
---
|
---
|
||||||
## Signing Keys
|
## Signing Keys
|
||||||
Config options relating to signing keys
|
Config options relating to signing keys
|
||||||
@@ -3281,15 +3108,6 @@ it was last used.
|
|||||||
It is possible to build an entry from an old `signing.key` file using the
|
It is possible to build an entry from an old `signing.key` file using the
|
||||||
`export_signing_key` script which is provided with synapse.
|
`export_signing_key` script which is provided with synapse.
|
||||||
|
|
||||||
If you have lost the private key file, you can ask another server you trust to
|
|
||||||
tell you the public keys it has seen from your server. To fetch the keys from
|
|
||||||
`matrix.org`, try something like:
|
|
||||||
|
|
||||||
```
|
|
||||||
curl https://matrix-federation.matrix.org/_matrix/key/v2/query/myserver.example.com |
|
|
||||||
jq '.server_keys | map(.verify_keys) | add'
|
|
||||||
```
|
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
old_signing_keys:
|
old_signing_keys:
|
||||||
@@ -3450,9 +3268,8 @@ This setting has the following sub-options:
|
|||||||
The default is 'uid'.
|
The default is 'uid'.
|
||||||
* `attribute_requirements`: It is possible to configure Synapse to only allow logins if SAML attributes
|
* `attribute_requirements`: It is possible to configure Synapse to only allow logins if SAML attributes
|
||||||
match particular values. The requirements can be listed under
|
match particular values. The requirements can be listed under
|
||||||
`attribute_requirements` as shown in the example. All of the listed attributes must
|
`attribute_requirements` as shown in the example. All of the listed attributes must
|
||||||
match for the login to be permitted. Values can be specified in a `one_of` list to allow
|
match for the login to be permitted.
|
||||||
multiple values for an attribute.
|
|
||||||
* `idp_entityid`: If the metadata XML contains multiple IdP entities then the `idp_entityid`
|
* `idp_entityid`: If the metadata XML contains multiple IdP entities then the `idp_entityid`
|
||||||
option must be set to the entity to redirect users to.
|
option must be set to the entity to redirect users to.
|
||||||
Most deployments only have a single IdP entity and so should omit this option.
|
Most deployments only have a single IdP entity and so should omit this option.
|
||||||
@@ -3533,9 +3350,7 @@ saml2_config:
|
|||||||
- attribute: userGroup
|
- attribute: userGroup
|
||||||
value: "staff"
|
value: "staff"
|
||||||
- attribute: department
|
- attribute: department
|
||||||
one_of:
|
value: "sales"
|
||||||
- "sales"
|
|
||||||
- "admins"
|
|
||||||
|
|
||||||
idp_entityid: 'https://our_idp/entityid'
|
idp_entityid: 'https://our_idp/entityid'
|
||||||
```
|
```
|
||||||
@@ -3618,24 +3433,6 @@ Options for each entry include:
|
|||||||
to `auto`, which uses PKCE if supported during metadata discovery. Set to `always`
|
to `auto`, which uses PKCE if supported during metadata discovery. Set to `always`
|
||||||
to force enable PKCE or `never` to force disable PKCE.
|
to force enable PKCE or `never` to force disable PKCE.
|
||||||
|
|
||||||
* `id_token_signing_alg_values_supported`: List of the JWS signing algorithms (`alg`
|
|
||||||
values) that are supported for signing the `id_token`.
|
|
||||||
|
|
||||||
This is *not* required if `discovery` is disabled. We default to supporting `RS256` in
|
|
||||||
the downstream usage if no algorithms are configured here or in the discovery
|
|
||||||
document.
|
|
||||||
|
|
||||||
According to the spec, the algorithm `"RS256"` MUST be included. The absolute rigid
|
|
||||||
approach would be to reject this provider as non-compliant if it's not included but we
|
|
||||||
simply allow whatever and see what happens (you're the one that configured the value
|
|
||||||
and cooperating with the identity provider).
|
|
||||||
|
|
||||||
The `alg` value `"none"` MAY be supported but can only be used if the Authorization
|
|
||||||
Endpoint does not include `id_token` in the `response_type` (ex.
|
|
||||||
`/authorize?response_type=code` where `none` can apply,
|
|
||||||
`/authorize?response_type=code%20id_token` where `none` can't apply) (such as when
|
|
||||||
using the Authorization Code Flow).
|
|
||||||
|
|
||||||
* `scopes`: list of scopes to request. This should normally include the "openid"
|
* `scopes`: list of scopes to request. This should normally include the "openid"
|
||||||
scope. Defaults to `["openid"]`.
|
scope. Defaults to `["openid"]`.
|
||||||
|
|
||||||
@@ -3662,13 +3459,6 @@ Options for each entry include:
|
|||||||
not included in `scopes`. Set to `userinfo_endpoint` to always use the
|
not included in `scopes`. Set to `userinfo_endpoint` to always use the
|
||||||
userinfo endpoint.
|
userinfo endpoint.
|
||||||
|
|
||||||
* `redirect_uri`: An optional string, that if set will override the `redirect_uri`
|
|
||||||
parameter sent in the requests to the authorization and token endpoints.
|
|
||||||
Useful if you want to redirect the client to another endpoint as part of the
|
|
||||||
OIDC login. Be aware that the client must then call Synapse's OIDC callback
|
|
||||||
URL (`<public_baseurl>/_synapse/client/oidc/callback`) manually afterwards.
|
|
||||||
Must be a valid URL including scheme and path.
|
|
||||||
|
|
||||||
* `additional_authorization_parameters`: String to string dictionary that will be passed as
|
* `additional_authorization_parameters`: String to string dictionary that will be passed as
|
||||||
additional parameters to the authorization grant URL.
|
additional parameters to the authorization grant URL.
|
||||||
|
|
||||||
@@ -3932,8 +3722,6 @@ Additional sub-options for this setting include:
|
|||||||
Required if `enabled` is set to true.
|
Required if `enabled` is set to true.
|
||||||
* `subject_claim`: Name of the claim containing a unique identifier for the user.
|
* `subject_claim`: Name of the claim containing a unique identifier for the user.
|
||||||
Optional, defaults to `sub`.
|
Optional, defaults to `sub`.
|
||||||
* `display_name_claim`: Name of the claim containing the display name for the user. Optional.
|
|
||||||
If provided, the display name will be set to the value of this claim upon first login.
|
|
||||||
* `issuer`: The issuer to validate the "iss" claim against. Optional. If provided the
|
* `issuer`: The issuer to validate the "iss" claim against. Optional. If provided the
|
||||||
"iss" claim will be required and validated for all JSON web tokens.
|
"iss" claim will be required and validated for all JSON web tokens.
|
||||||
* `audiences`: A list of audiences to validate the "aud" claim against. Optional.
|
* `audiences`: A list of audiences to validate the "aud" claim against. Optional.
|
||||||
@@ -3948,7 +3736,6 @@ jwt_config:
|
|||||||
secret: "provided-by-your-issuer"
|
secret: "provided-by-your-issuer"
|
||||||
algorithm: "provided-by-your-issuer"
|
algorithm: "provided-by-your-issuer"
|
||||||
subject_claim: "name_of_claim"
|
subject_claim: "name_of_claim"
|
||||||
display_name_claim: "name_of_claim"
|
|
||||||
issuer: "provided-by-your-issuer"
|
issuer: "provided-by-your-issuer"
|
||||||
audiences:
|
audiences:
|
||||||
- "provided-by-your-issuer"
|
- "provided-by-your-issuer"
|
||||||
@@ -4291,8 +4078,8 @@ unwanted entries from being published in the public room list.
|
|||||||
|
|
||||||
The format of this option is the same as that for
|
The format of this option is the same as that for
|
||||||
[`alias_creation_rules`](#alias_creation_rules): an optional list of 0 or more
|
[`alias_creation_rules`](#alias_creation_rules): an optional list of 0 or more
|
||||||
rules. By default, no list is provided, meaning that no one may publish to the
|
rules. By default, no list is provided, meaning that all rooms may be
|
||||||
room list (except server admins).
|
published to the room list.
|
||||||
|
|
||||||
Otherwise, requests to publish a room are matched against each rule in order.
|
Otherwise, requests to publish a room are matched against each rule in order.
|
||||||
The first rule that matches decides if the request is allowed or denied. If no
|
The first rule that matches decides if the request is allowed or denied. If no
|
||||||
@@ -4318,10 +4105,6 @@ Note that the patterns match against fully qualified IDs, e.g. against
|
|||||||
of `alice`, `room` and `abcedgghijk`.
|
of `alice`, `room` and `abcedgghijk`.
|
||||||
|
|
||||||
|
|
||||||
_Changed in Synapse 1.126.0: The default was changed to deny publishing to the
|
|
||||||
room list by default_
|
|
||||||
|
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
@@ -4527,29 +4310,10 @@ HTTP requests from workers.
|
|||||||
The default, this value is omitted (equivalently `null`), which means that
|
The default, this value is omitted (equivalently `null`), which means that
|
||||||
traffic between the workers and the main process is not authenticated.
|
traffic between the workers and the main process is not authenticated.
|
||||||
|
|
||||||
Replacing an existing `worker_replication_secret` with a new one will break
|
|
||||||
communication with all workers that have not yet updated their secret.
|
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
worker_replication_secret: "secret_secret"
|
worker_replication_secret: "secret_secret"
|
||||||
```
|
```
|
||||||
---
|
|
||||||
### `worker_replication_secret_path`
|
|
||||||
|
|
||||||
An alternative to [`worker_replication_secret`](#worker_replication_secret):
|
|
||||||
allows the secret to be specified in an external file.
|
|
||||||
|
|
||||||
The file should be a plain text file, containing only the secret.
|
|
||||||
Synapse reads the secret from the given file once at startup.
|
|
||||||
|
|
||||||
Example configuration:
|
|
||||||
```yaml
|
|
||||||
worker_replication_secret_path: /path/to/secrets/file
|
|
||||||
```
|
|
||||||
|
|
||||||
_Added in Synapse 1.126.0._
|
|
||||||
|
|
||||||
---
|
---
|
||||||
### `start_pushers`
|
### `start_pushers`
|
||||||
|
|
||||||
@@ -4606,12 +4370,6 @@ a `federation_sender_instances` map. Doing so will remove handling of this funct
|
|||||||
the main process. Multiple workers can be added to this map, in which case the work is
|
the main process. Multiple workers can be added to this map, in which case the work is
|
||||||
balanced across them.
|
balanced across them.
|
||||||
|
|
||||||
The way that the load balancing works is any outbound federation request will be assigned
|
|
||||||
to a federation sender worker based on the hash of the destination server name. This
|
|
||||||
means that all requests being sent to the same destination will be processed by the same
|
|
||||||
worker instance. Multiple `federation_sender_instances` are useful if there is a federation
|
|
||||||
with multiple servers.
|
|
||||||
|
|
||||||
This configuration setting must be shared between all workers handling federation
|
This configuration setting must be shared between all workers handling federation
|
||||||
sending, and if changed all federation sender workers must be stopped at the same time
|
sending, and if changed all federation sender workers must be stopped at the same time
|
||||||
and then started, to ensure that all instances are running with the same config (otherwise
|
and then started, to ensure that all instances are running with the same config (otherwise
|
||||||
@@ -4650,10 +4408,6 @@ instance_map:
|
|||||||
worker1:
|
worker1:
|
||||||
host: localhost
|
host: localhost
|
||||||
port: 8034
|
port: 8034
|
||||||
other:
|
|
||||||
host: localhost
|
|
||||||
port: 8035
|
|
||||||
tls: true
|
|
||||||
```
|
```
|
||||||
Example configuration(#2, for UNIX sockets):
|
Example configuration(#2, for UNIX sockets):
|
||||||
```yaml
|
```yaml
|
||||||
@@ -4764,9 +4518,6 @@ This setting has the following sub-options:
|
|||||||
* `path`: The full path to a local Unix socket file. **If this is used, `host` and
|
* `path`: The full path to a local Unix socket file. **If this is used, `host` and
|
||||||
`port` are ignored.** Defaults to `/tmp/redis.sock'
|
`port` are ignored.** Defaults to `/tmp/redis.sock'
|
||||||
* `password`: Optional password if configured on the Redis instance.
|
* `password`: Optional password if configured on the Redis instance.
|
||||||
* `password_path`: Alternative to `password`, reading the password from an
|
|
||||||
external file. The file should be a plain text file, containing only the
|
|
||||||
password. Synapse reads the password from the given file once at startup.
|
|
||||||
* `dbid`: Optional redis dbid if needs to connect to specific redis logical db.
|
* `dbid`: Optional redis dbid if needs to connect to specific redis logical db.
|
||||||
* `use_tls`: Whether to use tls connection. Defaults to false.
|
* `use_tls`: Whether to use tls connection. Defaults to false.
|
||||||
* `certificate_file`: Optional path to the certificate file
|
* `certificate_file`: Optional path to the certificate file
|
||||||
@@ -4780,16 +4531,13 @@ This setting has the following sub-options:
|
|||||||
|
|
||||||
_Changed in Synapse 1.85.0: Added path option to use a local Unix socket_
|
_Changed in Synapse 1.85.0: Added path option to use a local Unix socket_
|
||||||
|
|
||||||
_Changed in Synapse 1.116.0: Added password\_path_
|
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
```yaml
|
```yaml
|
||||||
redis:
|
redis:
|
||||||
enabled: true
|
enabled: true
|
||||||
host: localhost
|
host: localhost
|
||||||
port: 6379
|
port: 6379
|
||||||
password_path: <path_to_the_password_file>
|
password: <secret_password>
|
||||||
# OR password: <secret_password>
|
|
||||||
dbid: <dbid>
|
dbid: <dbid>
|
||||||
#use_tls: True
|
#use_tls: True
|
||||||
#certificate_file: <path_to_the_certificate_file>
|
#certificate_file: <path_to_the_certificate_file>
|
||||||
|
|||||||
@@ -177,11 +177,11 @@ The following applies to Synapse installations that have been installed from sou
|
|||||||
|
|
||||||
You can start the main Synapse process with Poetry by running the following command:
|
You can start the main Synapse process with Poetry by running the following command:
|
||||||
```console
|
```console
|
||||||
poetry run synapse_homeserver --config-path [your homeserver.yaml]
|
poetry run synapse_homeserver --config-file [your homeserver.yaml]
|
||||||
```
|
```
|
||||||
For worker setups, you can run the following command
|
For worker setups, you can run the following command
|
||||||
```console
|
```console
|
||||||
poetry run synapse_worker --config-path [your homeserver.yaml] --config-path [your worker.yaml]
|
poetry run synapse_worker --config-file [your homeserver.yaml] --config-file [your worker.yaml]
|
||||||
```
|
```
|
||||||
## Available worker applications
|
## Available worker applications
|
||||||
|
|
||||||
@@ -255,7 +255,7 @@ information.
|
|||||||
^/_matrix/client/(r0|v3|unstable)/keys/changes$
|
^/_matrix/client/(r0|v3|unstable)/keys/changes$
|
||||||
^/_matrix/client/(r0|v3|unstable)/keys/claim$
|
^/_matrix/client/(r0|v3|unstable)/keys/claim$
|
||||||
^/_matrix/client/(r0|v3|unstable)/room_keys/
|
^/_matrix/client/(r0|v3|unstable)/room_keys/
|
||||||
^/_matrix/client/(r0|v3|unstable)/keys/upload$
|
^/_matrix/client/(r0|v3|unstable)/keys/upload/
|
||||||
|
|
||||||
# Registration/login requests
|
# Registration/login requests
|
||||||
^/_matrix/client/(api/v1|r0|v3|unstable)/login$
|
^/_matrix/client/(api/v1|r0|v3|unstable)/login$
|
||||||
@@ -273,6 +273,17 @@ information.
|
|||||||
^/_matrix/client/(api/v1|r0|v3|unstable)/knock/
|
^/_matrix/client/(api/v1|r0|v3|unstable)/knock/
|
||||||
^/_matrix/client/(api/v1|r0|v3|unstable)/profile/
|
^/_matrix/client/(api/v1|r0|v3|unstable)/profile/
|
||||||
|
|
||||||
|
# Account data requests
|
||||||
|
^/_matrix/client/(r0|v3|unstable)/.*/tags
|
||||||
|
^/_matrix/client/(r0|v3|unstable)/.*/account_data
|
||||||
|
|
||||||
|
# Receipts requests
|
||||||
|
^/_matrix/client/(r0|v3|unstable)/rooms/.*/receipt
|
||||||
|
^/_matrix/client/(r0|v3|unstable)/rooms/.*/read_markers
|
||||||
|
|
||||||
|
# Presence requests
|
||||||
|
^/_matrix/client/(api/v1|r0|v3|unstable)/presence/
|
||||||
|
|
||||||
# User directory search requests
|
# User directory search requests
|
||||||
^/_matrix/client/(r0|v3|unstable)/user_directory/search$
|
^/_matrix/client/(r0|v3|unstable)/user_directory/search$
|
||||||
|
|
||||||
@@ -281,13 +292,6 @@ Additionally, the following REST endpoints can be handled for GET requests:
|
|||||||
^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/
|
^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/
|
||||||
^/_matrix/client/unstable/org.matrix.msc4140/delayed_events
|
^/_matrix/client/unstable/org.matrix.msc4140/delayed_events
|
||||||
|
|
||||||
# Account data requests
|
|
||||||
^/_matrix/client/(r0|v3|unstable)/.*/tags
|
|
||||||
^/_matrix/client/(r0|v3|unstable)/.*/account_data
|
|
||||||
|
|
||||||
# Presence requests
|
|
||||||
^/_matrix/client/(api/v1|r0|v3|unstable)/presence/
|
|
||||||
|
|
||||||
Pagination requests can also be handled, but all requests for a given
|
Pagination requests can also be handled, but all requests for a given
|
||||||
room must be routed to the same instance. Additionally, care must be taken to
|
room must be routed to the same instance. Additionally, care must be taken to
|
||||||
ensure that the purge history admin API is not used while pagination requests
|
ensure that the purge history admin API is not used while pagination requests
|
||||||
|
|||||||
56
flake.lock
generated
56
flake.lock
generated
@@ -56,6 +56,24 @@
|
|||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"flake-utils_2": {
|
||||||
|
"inputs": {
|
||||||
|
"systems": "systems_2"
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1681202837,
|
||||||
|
"narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=",
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"rev": "cfacdce06f30d2b68473a46042957675eebb3401",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
"gitignore": {
|
"gitignore": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"nixpkgs": [
|
"nixpkgs": [
|
||||||
@@ -168,27 +186,27 @@
|
|||||||
},
|
},
|
||||||
"nixpkgs_2": {
|
"nixpkgs_2": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1729265718,
|
"lastModified": 1690535733,
|
||||||
"narHash": "sha256-4HQI+6LsO3kpWTYuVGIzhJs1cetFcwT7quWCk/6rqeo=",
|
"narHash": "sha256-WgjUPscQOw3cB8yySDGlyzo6cZNihnRzUwE9kadv/5I=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "ccc0c2126893dd20963580b6478d1a10a4512185",
|
"rev": "8cacc05fbfffeaab910e8c2c9e2a7c6b32ce881a",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "nixpkgs-unstable",
|
"ref": "master",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"nixpkgs_3": {
|
"nixpkgs_3": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1728538411,
|
"lastModified": 1681358109,
|
||||||
"narHash": "sha256-f0SBJz1eZ2yOuKUr5CA9BHULGXVSn6miBuUWdTyhUhU=",
|
"narHash": "sha256-eKyxW4OohHQx9Urxi7TQlFBTDWII+F+x2hklDOQPB50=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "b69de56fac8c2b6f8fd27f2eca01dcda8e0a4221",
|
"rev": "96ba1c52e54e74c3197f4d43026b3f3d92e83ff9",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -231,19 +249,20 @@
|
|||||||
"devenv": "devenv",
|
"devenv": "devenv",
|
||||||
"nixpkgs": "nixpkgs_2",
|
"nixpkgs": "nixpkgs_2",
|
||||||
"rust-overlay": "rust-overlay",
|
"rust-overlay": "rust-overlay",
|
||||||
"systems": "systems_2"
|
"systems": "systems_3"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"rust-overlay": {
|
"rust-overlay": {
|
||||||
"inputs": {
|
"inputs": {
|
||||||
|
"flake-utils": "flake-utils_2",
|
||||||
"nixpkgs": "nixpkgs_3"
|
"nixpkgs": "nixpkgs_3"
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1731897198,
|
"lastModified": 1693966243,
|
||||||
"narHash": "sha256-Ou7vLETSKwmE/HRQz4cImXXJBr/k9gp4J4z/PF8LzTE=",
|
"narHash": "sha256-a2CA1aMIPE67JWSVIGoGtD3EGlFdK9+OlJQs0FOWCKY=",
|
||||||
"owner": "oxalica",
|
"owner": "oxalica",
|
||||||
"repo": "rust-overlay",
|
"repo": "rust-overlay",
|
||||||
"rev": "0be641045af6d8666c11c2c40e45ffc9667839b5",
|
"rev": "a8b4bb4cbb744baaabc3e69099f352f99164e2c1",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@@ -281,6 +300,21 @@
|
|||||||
"repo": "default",
|
"repo": "default",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"systems_3": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1681028828,
|
||||||
|
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"root": "root",
|
"root": "root",
|
||||||
|
|||||||
26
flake.nix
26
flake.nix
@@ -3,13 +3,13 @@
|
|||||||
# (https://github.com/matrix-org/complement) Matrix homeserver test suites are also
|
# (https://github.com/matrix-org/complement) Matrix homeserver test suites are also
|
||||||
# installed automatically.
|
# installed automatically.
|
||||||
#
|
#
|
||||||
# You must have already installed Nix (https://nixos.org/download/) on your system to use this.
|
# You must have already installed Nix (https://nixos.org) on your system to use this.
|
||||||
# Nix can be installed on any Linux distribiution or MacOS; NixOS is not required.
|
# Nix can be installed on Linux or MacOS; NixOS is not required. Windows is not
|
||||||
# Windows is not directly supported, but Nix can be installed inside of WSL2 or even Docker
|
# directly supported, but Nix can be installed inside of WSL2 or even Docker
|
||||||
# containers. Please refer to https://nixos.org/download for details.
|
# containers. Please refer to https://nixos.org/download for details.
|
||||||
#
|
#
|
||||||
# You must also enable support for flakes in Nix. See the following for how to
|
# You must also enable support for flakes in Nix. See the following for how to
|
||||||
# do so permanently: https://wiki.nixos.org/wiki/Flakes#Other_Distros,_without_Home-Manager
|
# do so permanently: https://nixos.wiki/wiki/Flakes#Enable_flakes
|
||||||
#
|
#
|
||||||
# Be warned: you'll need over 3.75 GB of free space to download all the dependencies.
|
# Be warned: you'll need over 3.75 GB of free space to download all the dependencies.
|
||||||
#
|
#
|
||||||
@@ -20,7 +20,7 @@
|
|||||||
# locally from "services", such as PostgreSQL and Redis.
|
# locally from "services", such as PostgreSQL and Redis.
|
||||||
#
|
#
|
||||||
# You should now be dropped into a new shell with all programs and dependencies
|
# You should now be dropped into a new shell with all programs and dependencies
|
||||||
# available to you!
|
# availabile to you!
|
||||||
#
|
#
|
||||||
# You can start up pre-configured local Synapse, PostgreSQL and Redis instances by
|
# You can start up pre-configured local Synapse, PostgreSQL and Redis instances by
|
||||||
# running: `devenv up`. To stop them, use Ctrl-C.
|
# running: `devenv up`. To stop them, use Ctrl-C.
|
||||||
@@ -39,9 +39,9 @@
|
|||||||
|
|
||||||
{
|
{
|
||||||
inputs = {
|
inputs = {
|
||||||
# Use the rolling/unstable branch of nixpkgs. Used to fetch the latest
|
# Use the master/unstable branch of nixpkgs. Used to fetch the latest
|
||||||
# available versions of packages.
|
# available versions of packages.
|
||||||
nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
|
nixpkgs.url = "github:NixOS/nixpkgs/master";
|
||||||
# Output a development shell for x86_64/aarch64 Linux/Darwin (MacOS).
|
# Output a development shell for x86_64/aarch64 Linux/Darwin (MacOS).
|
||||||
systems.url = "github:nix-systems/default";
|
systems.url = "github:nix-systems/default";
|
||||||
# A development environment manager built on Nix. See https://devenv.sh.
|
# A development environment manager built on Nix. See https://devenv.sh.
|
||||||
@@ -50,7 +50,7 @@
|
|||||||
rust-overlay.url = "github:oxalica/rust-overlay";
|
rust-overlay.url = "github:oxalica/rust-overlay";
|
||||||
};
|
};
|
||||||
|
|
||||||
outputs = { nixpkgs, devenv, systems, rust-overlay, ... } @ inputs:
|
outputs = { self, nixpkgs, devenv, systems, rust-overlay, ... } @ inputs:
|
||||||
let
|
let
|
||||||
forEachSystem = nixpkgs.lib.genAttrs (import systems);
|
forEachSystem = nixpkgs.lib.genAttrs (import systems);
|
||||||
in {
|
in {
|
||||||
@@ -82,7 +82,7 @@
|
|||||||
#
|
#
|
||||||
# NOTE: We currently need to set the Rust version unnecessarily high
|
# NOTE: We currently need to set the Rust version unnecessarily high
|
||||||
# in order to work around https://github.com/matrix-org/synapse/issues/15939
|
# in order to work around https://github.com/matrix-org/synapse/issues/15939
|
||||||
(rust-bin.stable."1.82.0".default.override {
|
(rust-bin.stable."1.71.1".default.override {
|
||||||
# Additionally install the "rust-src" extension to allow diving into the
|
# Additionally install the "rust-src" extension to allow diving into the
|
||||||
# Rust source code in an IDE (rust-analyzer will also make use of it).
|
# Rust source code in an IDE (rust-analyzer will also make use of it).
|
||||||
extensions = [ "rust-src" ];
|
extensions = [ "rust-src" ];
|
||||||
@@ -126,7 +126,7 @@
|
|||||||
# Automatically activate the poetry virtualenv upon entering the shell.
|
# Automatically activate the poetry virtualenv upon entering the shell.
|
||||||
languages.python.poetry.activate.enable = true;
|
languages.python.poetry.activate.enable = true;
|
||||||
# Install all extra Python dependencies; this is needed to run the unit
|
# Install all extra Python dependencies; this is needed to run the unit
|
||||||
# tests and utilise all Synapse features.
|
# tests and utilitise all Synapse features.
|
||||||
languages.python.poetry.install.arguments = ["--extras all"];
|
languages.python.poetry.install.arguments = ["--extras all"];
|
||||||
# Install the 'matrix-synapse' package from the local checkout.
|
# Install the 'matrix-synapse' package from the local checkout.
|
||||||
languages.python.poetry.install.installRootPackage = true;
|
languages.python.poetry.install.installRootPackage = true;
|
||||||
@@ -163,8 +163,8 @@
|
|||||||
# Create a postgres user called 'synapse_user' which has ownership
|
# Create a postgres user called 'synapse_user' which has ownership
|
||||||
# over the 'synapse' database.
|
# over the 'synapse' database.
|
||||||
services.postgres.initialScript = ''
|
services.postgres.initialScript = ''
|
||||||
CREATE USER synapse_user;
|
CREATE USER synapse_user;
|
||||||
ALTER DATABASE synapse OWNER TO synapse_user;
|
ALTER DATABASE synapse OWNER TO synapse_user;
|
||||||
'';
|
'';
|
||||||
|
|
||||||
# Redis is needed in order to run Synapse in worker mode.
|
# Redis is needed in order to run Synapse in worker mode.
|
||||||
@@ -205,7 +205,7 @@
|
|||||||
# corresponding Nix packages on https://search.nixos.org/packages.
|
# corresponding Nix packages on https://search.nixos.org/packages.
|
||||||
#
|
#
|
||||||
# This was done until `./install-deps.pl --dryrun` produced no output.
|
# This was done until `./install-deps.pl --dryrun` produced no output.
|
||||||
env.PERL5LIB = "${with pkgs.perl538Packages; makePerlPath [
|
env.PERL5LIB = "${with pkgs.perl536Packages; makePerlPath [
|
||||||
DBI
|
DBI
|
||||||
ClassMethodModifiers
|
ClassMethodModifiers
|
||||||
CryptEd25519
|
CryptEd25519
|
||||||
|
|||||||
2
mypy.ini
2
mypy.ini
@@ -26,7 +26,7 @@ strict_equality = True
|
|||||||
|
|
||||||
# Run mypy type checking with the minimum supported Python version to catch new usage
|
# Run mypy type checking with the minimum supported Python version to catch new usage
|
||||||
# that isn't backwards-compatible (types, overloads, etc).
|
# that isn't backwards-compatible (types, overloads, etc).
|
||||||
python_version = 3.9
|
python_version = 3.8
|
||||||
|
|
||||||
files =
|
files =
|
||||||
docker/,
|
docker/,
|
||||||
|
|||||||
1683
poetry.lock
generated
1683
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -36,7 +36,7 @@
|
|||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
line-length = 88
|
line-length = 88
|
||||||
target-version = "py39"
|
target-version = "py38"
|
||||||
|
|
||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
# See https://beta.ruff.rs/docs/rules/#error-e
|
# See https://beta.ruff.rs/docs/rules/#error-e
|
||||||
@@ -97,7 +97,7 @@ module-name = "synapse.synapse_rust"
|
|||||||
|
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "matrix-synapse"
|
name = "matrix-synapse"
|
||||||
version = "1.128.0rc1"
|
version = "1.116.0rc2"
|
||||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||||
license = "AGPL-3.0-or-later"
|
license = "AGPL-3.0-or-later"
|
||||||
@@ -155,7 +155,7 @@ synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main"
|
|||||||
update_synapse_database = "synapse._scripts.update_synapse_database:main"
|
update_synapse_database = "synapse._scripts.update_synapse_database:main"
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = "^3.9.0"
|
python = "^3.8.0"
|
||||||
|
|
||||||
# Mandatory Dependencies
|
# Mandatory Dependencies
|
||||||
# ----------------------
|
# ----------------------
|
||||||
@@ -178,7 +178,7 @@ Twisted = {extras = ["tls"], version = ">=18.9.0"}
|
|||||||
treq = ">=15.1"
|
treq = ">=15.1"
|
||||||
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
|
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
|
||||||
pyOpenSSL = ">=16.0.0"
|
pyOpenSSL = ">=16.0.0"
|
||||||
PyYAML = ">=5.3"
|
PyYAML = ">=3.13"
|
||||||
pyasn1 = ">=0.1.9"
|
pyasn1 = ">=0.1.9"
|
||||||
pyasn1-modules = ">=0.0.7"
|
pyasn1-modules = ">=0.0.7"
|
||||||
bcrypt = ">=3.1.7"
|
bcrypt = ">=3.1.7"
|
||||||
@@ -241,7 +241,7 @@ authlib = { version = ">=0.15.1", optional = true }
|
|||||||
# `contrib/systemd/log_config.yaml`.
|
# `contrib/systemd/log_config.yaml`.
|
||||||
# Note: systemd-python 231 appears to have been yanked from pypi
|
# Note: systemd-python 231 appears to have been yanked from pypi
|
||||||
systemd-python = { version = ">=231", optional = true }
|
systemd-python = { version = ">=231", optional = true }
|
||||||
lxml = { version = ">=4.5.2", optional = true }
|
lxml = { version = ">=4.2.0", optional = true }
|
||||||
sentry-sdk = { version = ">=0.7.2", optional = true }
|
sentry-sdk = { version = ">=0.7.2", optional = true }
|
||||||
opentracing = { version = ">=2.2.0", optional = true }
|
opentracing = { version = ">=2.2.0", optional = true }
|
||||||
jaeger-client = { version = ">=4.0.0", optional = true }
|
jaeger-client = { version = ">=4.0.0", optional = true }
|
||||||
@@ -320,7 +320,7 @@ all = [
|
|||||||
# failing on new releases. Keeping lower bounds loose here means that dependabot
|
# failing on new releases. Keeping lower bounds loose here means that dependabot
|
||||||
# can bump versions without having to update the content-hash in the lockfile.
|
# can bump versions without having to update the content-hash in the lockfile.
|
||||||
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
||||||
ruff = "0.7.3"
|
ruff = "0.6.7"
|
||||||
# Type checking only works with the pydantic.v1 compat module from pydantic v2
|
# Type checking only works with the pydantic.v1 compat module from pydantic v2
|
||||||
pydantic = "^2"
|
pydantic = "^2"
|
||||||
|
|
||||||
@@ -370,7 +370,7 @@ tomli = ">=1.2.3"
|
|||||||
# runtime errors caused by build system changes.
|
# runtime errors caused by build system changes.
|
||||||
# We are happy to raise these upper bounds upon request,
|
# We are happy to raise these upper bounds upon request,
|
||||||
# provided we check that it's safe to do so (i.e. that CI passes).
|
# provided we check that it's safe to do so (i.e. that CI passes).
|
||||||
requires = ["poetry-core>=1.1.0,<=1.9.1", "setuptools_rust>=1.3,<=1.10.2"]
|
requires = ["poetry-core>=1.1.0,<=1.9.0", "setuptools_rust>=1.3,<=1.8.1"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|
||||||
|
|
||||||
@@ -378,19 +378,16 @@ build-backend = "poetry.core.masonry.api"
|
|||||||
# Skip unsupported platforms (by us or by Rust).
|
# Skip unsupported platforms (by us or by Rust).
|
||||||
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets.
|
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets.
|
||||||
# We skip:
|
# We skip:
|
||||||
# - CPython 3.6, 3.7 and 3.8: EOLed
|
# - CPython 3.6 and 3.7: EOLed
|
||||||
# - PyPy 3.7 and 3.8: we only support Python 3.9+
|
# - PyPy 3.7: we only support Python 3.8+
|
||||||
# - musllinux i686: excluded to reduce number of wheels we build.
|
# - musllinux i686: excluded to reduce number of wheels we build.
|
||||||
# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677
|
# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677
|
||||||
# - PyPy on Aarch64 and musllinux on aarch64: too slow to build.
|
# - PyPy on Aarch64 and musllinux on aarch64: too slow to build.
|
||||||
# c.f. https://github.com/matrix-org/synapse/pull/14259
|
# c.f. https://github.com/matrix-org/synapse/pull/14259
|
||||||
skip = "cp36* cp37* cp38* pp37* pp38* *-musllinux_i686 pp*aarch64 *-musllinux_aarch64"
|
skip = "cp36* cp37* pp37* *-musllinux_i686 pp*aarch64 *-musllinux_aarch64"
|
||||||
|
|
||||||
# We need a rust compiler.
|
# We need a rust compiler
|
||||||
#
|
before-all = "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable -y --profile minimal"
|
||||||
# We temporarily pin Rust to 1.82.0 to work around
|
|
||||||
# https://github.com/element-hq/synapse/issues/17988
|
|
||||||
before-all = "sh .ci/before_build_wheel.sh"
|
|
||||||
environment= { PATH = "$PATH:$HOME/.cargo/bin" }
|
environment= { PATH = "$PATH:$HOME/.cargo/bin" }
|
||||||
|
|
||||||
# For some reason if we don't manually clean the build directory we
|
# For some reason if we don't manually clean the build directory we
|
||||||
|
|||||||
@@ -30,14 +30,14 @@ http = "1.1.0"
|
|||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
log = "0.4.17"
|
log = "0.4.17"
|
||||||
mime = "0.3.17"
|
mime = "0.3.17"
|
||||||
pyo3 = { version = "0.23.5", features = [
|
pyo3 = { version = "0.21.0", features = [
|
||||||
"macros",
|
"macros",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"abi3",
|
"abi3",
|
||||||
"abi3-py39",
|
"abi3-py38",
|
||||||
] }
|
] }
|
||||||
pyo3-log = "0.12.0"
|
pyo3-log = "0.10.0"
|
||||||
pythonize = "0.23.0"
|
pythonize = "0.21.0"
|
||||||
regex = "1.6.0"
|
regex = "1.6.0"
|
||||||
sha2 = "0.10.8"
|
sha2 = "0.10.8"
|
||||||
serde = { version = "1.0.144", features = ["derive"] }
|
serde = { version = "1.0.144", features = ["derive"] }
|
||||||
|
|||||||
@@ -60,7 +60,6 @@ fn bench_match_exact(b: &mut Bencher) {
|
|||||||
true,
|
true,
|
||||||
vec![],
|
vec![],
|
||||||
false,
|
false,
|
||||||
false,
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -106,7 +105,6 @@ fn bench_match_word(b: &mut Bencher) {
|
|||||||
true,
|
true,
|
||||||
vec![],
|
vec![],
|
||||||
false,
|
false,
|
||||||
false,
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -152,7 +150,6 @@ fn bench_match_word_miss(b: &mut Bencher) {
|
|||||||
true,
|
true,
|
||||||
vec![],
|
vec![],
|
||||||
false,
|
false,
|
||||||
false,
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -198,7 +195,6 @@ fn bench_eval_message(b: &mut Bencher) {
|
|||||||
true,
|
true,
|
||||||
vec![],
|
vec![],
|
||||||
false,
|
false,
|
||||||
false,
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -209,7 +205,6 @@ fn bench_eval_message(b: &mut Bencher) {
|
|||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
false,
|
|
||||||
);
|
);
|
||||||
|
|
||||||
b.iter(|| eval.run(&rules, Some("bob"), Some("person")));
|
b.iter(|| eval.run(&rules, Some("bob"), Some("person")));
|
||||||
|
|||||||
@@ -32,14 +32,14 @@ use crate::push::utils::{glob_to_regex, GlobMatchType};
|
|||||||
|
|
||||||
/// Called when registering modules with python.
|
/// Called when registering modules with python.
|
||||||
pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
|
pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
|
||||||
let child_module = PyModule::new(py, "acl")?;
|
let child_module = PyModule::new_bound(py, "acl")?;
|
||||||
child_module.add_class::<ServerAclEvaluator>()?;
|
child_module.add_class::<ServerAclEvaluator>()?;
|
||||||
|
|
||||||
m.add_submodule(&child_module)?;
|
m.add_submodule(&child_module)?;
|
||||||
|
|
||||||
// We need to manually add the module to sys.modules to make `from
|
// We need to manually add the module to sys.modules to make `from
|
||||||
// synapse.synapse_rust import acl` work.
|
// synapse.synapse_rust import acl` work.
|
||||||
py.import("sys")?
|
py.import_bound("sys")?
|
||||||
.getattr("modules")?
|
.getattr("modules")?
|
||||||
.set_item("synapse.synapse_rust.acl", child_module)?;
|
.set_item("synapse.synapse_rust.acl", child_module)?;
|
||||||
|
|
||||||
|
|||||||
@@ -1,107 +0,0 @@
|
|||||||
/*
|
|
||||||
* This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
||||||
*
|
|
||||||
* Copyright (C) 2024 New Vector, Ltd
|
|
||||||
*
|
|
||||||
* This program is free software: you can redistribute it and/or modify
|
|
||||||
* it under the terms of the GNU Affero General Public License as
|
|
||||||
* published by the Free Software Foundation, either version 3 of the
|
|
||||||
* License, or (at your option) any later version.
|
|
||||||
*
|
|
||||||
* See the GNU Affero General Public License for more details:
|
|
||||||
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
||||||
*/
|
|
||||||
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
use pyo3::{exceptions::PyValueError, pyfunction, PyResult};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
identifier::UserID,
|
|
||||||
matrix_const::{
|
|
||||||
HISTORY_VISIBILITY_INVITED, HISTORY_VISIBILITY_JOINED, MEMBERSHIP_INVITE, MEMBERSHIP_JOIN,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[pyfunction(name = "event_visible_to_server")]
|
|
||||||
pub fn event_visible_to_server_py(
|
|
||||||
sender: String,
|
|
||||||
target_server_name: String,
|
|
||||||
history_visibility: String,
|
|
||||||
erased_senders: HashMap<String, bool>,
|
|
||||||
partial_state_invisible: bool,
|
|
||||||
memberships: Vec<(String, String)>, // (state_key, membership)
|
|
||||||
) -> PyResult<bool> {
|
|
||||||
event_visible_to_server(
|
|
||||||
sender,
|
|
||||||
target_server_name,
|
|
||||||
history_visibility,
|
|
||||||
erased_senders,
|
|
||||||
partial_state_invisible,
|
|
||||||
memberships,
|
|
||||||
)
|
|
||||||
.map_err(|e| PyValueError::new_err(format!("{e}")))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return whether the target server is allowed to see the event.
|
|
||||||
///
|
|
||||||
/// For a fully stated room, the target server is allowed to see an event E if:
|
|
||||||
/// - the state at E has world readable or shared history vis, OR
|
|
||||||
/// - the state at E says that the target server is in the room.
|
|
||||||
///
|
|
||||||
/// For a partially stated room, the target server is allowed to see E if:
|
|
||||||
/// - E was created by this homeserver, AND:
|
|
||||||
/// - the partial state at E has world readable or shared history vis, OR
|
|
||||||
/// - the partial state at E says that the target server is in the room.
|
|
||||||
pub fn event_visible_to_server(
|
|
||||||
sender: String,
|
|
||||||
target_server_name: String,
|
|
||||||
history_visibility: String,
|
|
||||||
erased_senders: HashMap<String, bool>,
|
|
||||||
partial_state_invisible: bool,
|
|
||||||
memberships: Vec<(String, String)>, // (state_key, membership)
|
|
||||||
) -> anyhow::Result<bool> {
|
|
||||||
if let Some(&erased) = erased_senders.get(&sender) {
|
|
||||||
if erased {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if partial_state_invisible {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
if history_visibility != HISTORY_VISIBILITY_INVITED
|
|
||||||
&& history_visibility != HISTORY_VISIBILITY_JOINED
|
|
||||||
{
|
|
||||||
return Ok(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut visible = false;
|
|
||||||
for (state_key, membership) in memberships {
|
|
||||||
let state_key = UserID::try_from(state_key.as_ref())
|
|
||||||
.map_err(|e| anyhow::anyhow!(format!("invalid user_id ({state_key}): {e}")))?;
|
|
||||||
if state_key.server_name() != target_server_name {
|
|
||||||
return Err(anyhow::anyhow!(
|
|
||||||
"state_key.server_name ({}) does not match target_server_name ({target_server_name})",
|
|
||||||
state_key.server_name()
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
match membership.as_str() {
|
|
||||||
MEMBERSHIP_INVITE => {
|
|
||||||
if history_visibility == HISTORY_VISIBILITY_INVITED {
|
|
||||||
visible = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
MEMBERSHIP_JOIN => {
|
|
||||||
visible = true;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
_ => continue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(visible)
|
|
||||||
}
|
|
||||||
@@ -41,11 +41,9 @@ use pyo3::{
|
|||||||
pybacked::PyBackedStr,
|
pybacked::PyBackedStr,
|
||||||
pyclass, pymethods,
|
pyclass, pymethods,
|
||||||
types::{PyAnyMethods, PyDict, PyDictMethods, PyString},
|
types::{PyAnyMethods, PyDict, PyDictMethods, PyString},
|
||||||
Bound, IntoPyObject, PyAny, PyObject, PyResult, Python,
|
Bound, IntoPy, PyAny, PyObject, PyResult, Python,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::UnwrapInfallible;
|
|
||||||
|
|
||||||
/// Definitions of the various fields of the internal metadata.
|
/// Definitions of the various fields of the internal metadata.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
enum EventInternalMetadataData {
|
enum EventInternalMetadataData {
|
||||||
@@ -62,59 +60,31 @@ enum EventInternalMetadataData {
|
|||||||
|
|
||||||
impl EventInternalMetadataData {
|
impl EventInternalMetadataData {
|
||||||
/// Convert the field to its name and python object.
|
/// Convert the field to its name and python object.
|
||||||
fn to_python_pair<'a>(&self, py: Python<'a>) -> (&'a Bound<'a, PyString>, Bound<'a, PyAny>) {
|
fn to_python_pair<'a>(&self, py: Python<'a>) -> (&'a Bound<'a, PyString>, PyObject) {
|
||||||
match self {
|
match self {
|
||||||
EventInternalMetadataData::OutOfBandMembership(o) => (
|
EventInternalMetadataData::OutOfBandMembership(o) => {
|
||||||
pyo3::intern!(py, "out_of_band_membership"),
|
(pyo3::intern!(py, "out_of_band_membership"), o.into_py(py))
|
||||||
o.into_pyobject(py)
|
}
|
||||||
.unwrap_infallible()
|
EventInternalMetadataData::SendOnBehalfOf(o) => {
|
||||||
.to_owned()
|
(pyo3::intern!(py, "send_on_behalf_of"), o.into_py(py))
|
||||||
.into_any(),
|
}
|
||||||
),
|
EventInternalMetadataData::RecheckRedaction(o) => {
|
||||||
EventInternalMetadataData::SendOnBehalfOf(o) => (
|
(pyo3::intern!(py, "recheck_redaction"), o.into_py(py))
|
||||||
pyo3::intern!(py, "send_on_behalf_of"),
|
}
|
||||||
o.into_pyobject(py).unwrap_infallible().into_any(),
|
EventInternalMetadataData::SoftFailed(o) => {
|
||||||
),
|
(pyo3::intern!(py, "soft_failed"), o.into_py(py))
|
||||||
EventInternalMetadataData::RecheckRedaction(o) => (
|
}
|
||||||
pyo3::intern!(py, "recheck_redaction"),
|
EventInternalMetadataData::ProactivelySend(o) => {
|
||||||
o.into_pyobject(py)
|
(pyo3::intern!(py, "proactively_send"), o.into_py(py))
|
||||||
.unwrap_infallible()
|
}
|
||||||
.to_owned()
|
EventInternalMetadataData::Redacted(o) => {
|
||||||
.into_any(),
|
(pyo3::intern!(py, "redacted"), o.into_py(py))
|
||||||
),
|
}
|
||||||
EventInternalMetadataData::SoftFailed(o) => (
|
EventInternalMetadataData::TxnId(o) => (pyo3::intern!(py, "txn_id"), o.into_py(py)),
|
||||||
pyo3::intern!(py, "soft_failed"),
|
EventInternalMetadataData::TokenId(o) => (pyo3::intern!(py, "token_id"), o.into_py(py)),
|
||||||
o.into_pyobject(py)
|
EventInternalMetadataData::DeviceId(o) => {
|
||||||
.unwrap_infallible()
|
(pyo3::intern!(py, "device_id"), o.into_py(py))
|
||||||
.to_owned()
|
}
|
||||||
.into_any(),
|
|
||||||
),
|
|
||||||
EventInternalMetadataData::ProactivelySend(o) => (
|
|
||||||
pyo3::intern!(py, "proactively_send"),
|
|
||||||
o.into_pyobject(py)
|
|
||||||
.unwrap_infallible()
|
|
||||||
.to_owned()
|
|
||||||
.into_any(),
|
|
||||||
),
|
|
||||||
EventInternalMetadataData::Redacted(o) => (
|
|
||||||
pyo3::intern!(py, "redacted"),
|
|
||||||
o.into_pyobject(py)
|
|
||||||
.unwrap_infallible()
|
|
||||||
.to_owned()
|
|
||||||
.into_any(),
|
|
||||||
),
|
|
||||||
EventInternalMetadataData::TxnId(o) => (
|
|
||||||
pyo3::intern!(py, "txn_id"),
|
|
||||||
o.into_pyobject(py).unwrap_infallible().into_any(),
|
|
||||||
),
|
|
||||||
EventInternalMetadataData::TokenId(o) => (
|
|
||||||
pyo3::intern!(py, "token_id"),
|
|
||||||
o.into_pyobject(py).unwrap_infallible().into_any(),
|
|
||||||
),
|
|
||||||
EventInternalMetadataData::DeviceId(o) => (
|
|
||||||
pyo3::intern!(py, "device_id"),
|
|
||||||
o.into_pyobject(py).unwrap_infallible().into_any(),
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -277,7 +247,7 @@ impl EventInternalMetadata {
|
|||||||
///
|
///
|
||||||
/// Note that `outlier` and `stream_ordering` are stored in separate columns so are not returned here.
|
/// Note that `outlier` and `stream_ordering` are stored in separate columns so are not returned here.
|
||||||
fn get_dict(&self, py: Python<'_>) -> PyResult<PyObject> {
|
fn get_dict(&self, py: Python<'_>) -> PyResult<PyObject> {
|
||||||
let dict = PyDict::new(py);
|
let dict = PyDict::new_bound(py);
|
||||||
|
|
||||||
for entry in &self.data {
|
for entry in &self.data {
|
||||||
let (key, value) = entry.to_python_pair(py);
|
let (key, value) = entry.to_python_pair(py);
|
||||||
|
|||||||
@@ -22,23 +22,21 @@
|
|||||||
|
|
||||||
use pyo3::{
|
use pyo3::{
|
||||||
types::{PyAnyMethods, PyModule, PyModuleMethods},
|
types::{PyAnyMethods, PyModule, PyModuleMethods},
|
||||||
wrap_pyfunction, Bound, PyResult, Python,
|
Bound, PyResult, Python,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub mod filter;
|
|
||||||
mod internal_metadata;
|
mod internal_metadata;
|
||||||
|
|
||||||
/// Called when registering modules with python.
|
/// Called when registering modules with python.
|
||||||
pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
|
pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
|
||||||
let child_module = PyModule::new(py, "events")?;
|
let child_module = PyModule::new_bound(py, "events")?;
|
||||||
child_module.add_class::<internal_metadata::EventInternalMetadata>()?;
|
child_module.add_class::<internal_metadata::EventInternalMetadata>()?;
|
||||||
child_module.add_function(wrap_pyfunction!(filter::event_visible_to_server_py, m)?)?;
|
|
||||||
|
|
||||||
m.add_submodule(&child_module)?;
|
m.add_submodule(&child_module)?;
|
||||||
|
|
||||||
// We need to manually add the module to sys.modules to make `from
|
// We need to manually add the module to sys.modules to make `from
|
||||||
// synapse.synapse_rust import events` work.
|
// synapse.synapse_rust import events` work.
|
||||||
py.import("sys")?
|
py.import_bound("sys")?
|
||||||
.getattr("modules")?
|
.getattr("modules")?
|
||||||
.set_item("synapse.synapse_rust.events", child_module)?;
|
.set_item("synapse.synapse_rust.events", child_module)?;
|
||||||
|
|
||||||
|
|||||||
@@ -70,7 +70,7 @@ pub fn http_request_from_twisted(request: &Bound<'_, PyAny>) -> PyResult<Request
|
|||||||
let headers_iter = request
|
let headers_iter = request
|
||||||
.getattr("requestHeaders")?
|
.getattr("requestHeaders")?
|
||||||
.call_method0("getAllRawHeaders")?
|
.call_method0("getAllRawHeaders")?
|
||||||
.try_iter()?;
|
.iter()?;
|
||||||
|
|
||||||
for header in headers_iter {
|
for header in headers_iter {
|
||||||
let header = header?;
|
let header = header?;
|
||||||
|
|||||||
@@ -1,252 +0,0 @@
|
|||||||
/*
|
|
||||||
* This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
||||||
*
|
|
||||||
* Copyright (C) 2024 New Vector, Ltd
|
|
||||||
*
|
|
||||||
* This program is free software: you can redistribute it and/or modify
|
|
||||||
* it under the terms of the GNU Affero General Public License as
|
|
||||||
* published by the Free Software Foundation, either version 3 of the
|
|
||||||
* License, or (at your option) any later version.
|
|
||||||
*
|
|
||||||
* See the GNU Affero General Public License for more details:
|
|
||||||
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
||||||
*/
|
|
||||||
|
|
||||||
//! # Matrix Identifiers
|
|
||||||
//!
|
|
||||||
//! This module contains definitions and utilities for working with matrix identifiers.
|
|
||||||
|
|
||||||
use std::{fmt, ops::Deref};
|
|
||||||
|
|
||||||
/// Errors that can occur when parsing a matrix identifier.
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub enum IdentifierError {
|
|
||||||
IncorrectSigil,
|
|
||||||
MissingColon,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for IdentifierError {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{:?}", self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A Matrix user_id.
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub struct UserID(String);
|
|
||||||
|
|
||||||
impl UserID {
|
|
||||||
/// Returns the `localpart` of the user_id.
|
|
||||||
pub fn localpart(&self) -> &str {
|
|
||||||
&self[1..self.colon_pos()]
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the `server_name` / `domain` of the user_id.
|
|
||||||
pub fn server_name(&self) -> &str {
|
|
||||||
&self[self.colon_pos() + 1..]
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the position of the ':' inside of the user_id.
|
|
||||||
/// Used when splitting the user_id into it's respective parts.
|
|
||||||
fn colon_pos(&self) -> usize {
|
|
||||||
self.find(':').unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<&str> for UserID {
|
|
||||||
type Error = IdentifierError;
|
|
||||||
|
|
||||||
/// Will try creating a `UserID` from the provided `&str`.
|
|
||||||
/// Can fail if the user_id is incorrectly formatted.
|
|
||||||
fn try_from(s: &str) -> Result<Self, Self::Error> {
|
|
||||||
if !s.starts_with('@') {
|
|
||||||
return Err(IdentifierError::IncorrectSigil);
|
|
||||||
}
|
|
||||||
|
|
||||||
if s.find(':').is_none() {
|
|
||||||
return Err(IdentifierError::MissingColon);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(UserID(s.to_string()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<String> for UserID {
|
|
||||||
type Error = IdentifierError;
|
|
||||||
|
|
||||||
/// Will try creating a `UserID` from the provided `&str`.
|
|
||||||
/// Can fail if the user_id is incorrectly formatted.
|
|
||||||
fn try_from(s: String) -> Result<Self, Self::Error> {
|
|
||||||
if !s.starts_with('@') {
|
|
||||||
return Err(IdentifierError::IncorrectSigil);
|
|
||||||
}
|
|
||||||
|
|
||||||
if s.find(':').is_none() {
|
|
||||||
return Err(IdentifierError::MissingColon);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(UserID(s))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'de> serde::Deserialize<'de> for UserID {
|
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
|
||||||
where
|
|
||||||
D: serde::Deserializer<'de>,
|
|
||||||
{
|
|
||||||
let s: String = serde::Deserialize::deserialize(deserializer)?;
|
|
||||||
UserID::try_from(s).map_err(serde::de::Error::custom)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for UserID {
|
|
||||||
type Target = str;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for UserID {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{}", self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A Matrix room_id.
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub struct RoomID(String);
|
|
||||||
|
|
||||||
impl RoomID {
|
|
||||||
/// Returns the `localpart` of the room_id.
|
|
||||||
pub fn localpart(&self) -> &str {
|
|
||||||
&self[1..self.colon_pos()]
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the `server_name` / `domain` of the room_id.
|
|
||||||
pub fn server_name(&self) -> &str {
|
|
||||||
&self[self.colon_pos() + 1..]
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the position of the ':' inside of the room_id.
|
|
||||||
/// Used when splitting the room_id into it's respective parts.
|
|
||||||
fn colon_pos(&self) -> usize {
|
|
||||||
self.find(':').unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<&str> for RoomID {
|
|
||||||
type Error = IdentifierError;
|
|
||||||
|
|
||||||
/// Will try creating a `RoomID` from the provided `&str`.
|
|
||||||
/// Can fail if the room_id is incorrectly formatted.
|
|
||||||
fn try_from(s: &str) -> Result<Self, Self::Error> {
|
|
||||||
if !s.starts_with('!') {
|
|
||||||
return Err(IdentifierError::IncorrectSigil);
|
|
||||||
}
|
|
||||||
|
|
||||||
if s.find(':').is_none() {
|
|
||||||
return Err(IdentifierError::MissingColon);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(RoomID(s.to_string()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<String> for RoomID {
|
|
||||||
type Error = IdentifierError;
|
|
||||||
|
|
||||||
/// Will try creating a `RoomID` from the provided `String`.
|
|
||||||
/// Can fail if the room_id is incorrectly formatted.
|
|
||||||
fn try_from(s: String) -> Result<Self, Self::Error> {
|
|
||||||
if !s.starts_with('!') {
|
|
||||||
return Err(IdentifierError::IncorrectSigil);
|
|
||||||
}
|
|
||||||
|
|
||||||
if s.find(':').is_none() {
|
|
||||||
return Err(IdentifierError::MissingColon);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(RoomID(s))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'de> serde::Deserialize<'de> for RoomID {
|
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
|
||||||
where
|
|
||||||
D: serde::Deserializer<'de>,
|
|
||||||
{
|
|
||||||
let s: String = serde::Deserialize::deserialize(deserializer)?;
|
|
||||||
RoomID::try_from(s).map_err(serde::de::Error::custom)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for RoomID {
|
|
||||||
type Target = str;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for RoomID {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{}", self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A Matrix event_id.
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub struct EventID(String);
|
|
||||||
|
|
||||||
impl TryFrom<&str> for EventID {
|
|
||||||
type Error = IdentifierError;
|
|
||||||
|
|
||||||
/// Will try creating a `EventID` from the provided `&str`.
|
|
||||||
/// Can fail if the event_id is incorrectly formatted.
|
|
||||||
fn try_from(s: &str) -> Result<Self, Self::Error> {
|
|
||||||
if !s.starts_with('$') {
|
|
||||||
return Err(IdentifierError::IncorrectSigil);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(EventID(s.to_string()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<String> for EventID {
|
|
||||||
type Error = IdentifierError;
|
|
||||||
|
|
||||||
/// Will try creating a `EventID` from the provided `String`.
|
|
||||||
/// Can fail if the event_id is incorrectly formatted.
|
|
||||||
fn try_from(s: String) -> Result<Self, Self::Error> {
|
|
||||||
if !s.starts_with('$') {
|
|
||||||
return Err(IdentifierError::IncorrectSigil);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(EventID(s))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'de> serde::Deserialize<'de> for EventID {
|
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
|
||||||
where
|
|
||||||
D: serde::Deserializer<'de>,
|
|
||||||
{
|
|
||||||
let s: String = serde::Deserialize::deserialize(deserializer)?;
|
|
||||||
EventID::try_from(s).map_err(serde::de::Error::custom)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for EventID {
|
|
||||||
type Target = str;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for EventID {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "{}", self.0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,5 +1,3 @@
|
|||||||
use std::convert::Infallible;
|
|
||||||
|
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use pyo3::prelude::*;
|
use pyo3::prelude::*;
|
||||||
use pyo3_log::ResetHandle;
|
use pyo3_log::ResetHandle;
|
||||||
@@ -8,8 +6,6 @@ pub mod acl;
|
|||||||
pub mod errors;
|
pub mod errors;
|
||||||
pub mod events;
|
pub mod events;
|
||||||
pub mod http;
|
pub mod http;
|
||||||
pub mod identifier;
|
|
||||||
pub mod matrix_const;
|
|
||||||
pub mod push;
|
pub mod push;
|
||||||
pub mod rendezvous;
|
pub mod rendezvous;
|
||||||
|
|
||||||
@@ -54,16 +50,3 @@ fn synapse_rust(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
|
|||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait UnwrapInfallible<T> {
|
|
||||||
fn unwrap_infallible(self) -> T;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> UnwrapInfallible<T> for Result<T, Infallible> {
|
|
||||||
fn unwrap_infallible(self) -> T {
|
|
||||||
match self {
|
|
||||||
Ok(val) => val,
|
|
||||||
Err(never) => match never {},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,28 +0,0 @@
|
|||||||
/*
|
|
||||||
* This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
||||||
*
|
|
||||||
* Copyright (C) 2024 New Vector, Ltd
|
|
||||||
*
|
|
||||||
* This program is free software: you can redistribute it and/or modify
|
|
||||||
* it under the terms of the GNU Affero General Public License as
|
|
||||||
* published by the Free Software Foundation, either version 3 of the
|
|
||||||
* License, or (at your option) any later version.
|
|
||||||
*
|
|
||||||
* See the GNU Affero General Public License for more details:
|
|
||||||
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
||||||
*/
|
|
||||||
|
|
||||||
//! # Matrix Constants
|
|
||||||
//!
|
|
||||||
//! This module contains definitions for constant values described by the matrix specification.
|
|
||||||
|
|
||||||
pub const HISTORY_VISIBILITY_WORLD_READABLE: &str = "world_readable";
|
|
||||||
pub const HISTORY_VISIBILITY_SHARED: &str = "shared";
|
|
||||||
pub const HISTORY_VISIBILITY_INVITED: &str = "invited";
|
|
||||||
pub const HISTORY_VISIBILITY_JOINED: &str = "joined";
|
|
||||||
|
|
||||||
pub const MEMBERSHIP_BAN: &str = "ban";
|
|
||||||
pub const MEMBERSHIP_LEAVE: &str = "leave";
|
|
||||||
pub const MEMBERSHIP_KNOCK: &str = "knock";
|
|
||||||
pub const MEMBERSHIP_INVITE: &str = "invite";
|
|
||||||
pub const MEMBERSHIP_JOIN: &str = "join";
|
|
||||||
@@ -81,7 +81,7 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
|||||||
))]),
|
))]),
|
||||||
actions: Cow::Borrowed(&[Action::Notify]),
|
actions: Cow::Borrowed(&[Action::Notify]),
|
||||||
default: true,
|
default: true,
|
||||||
default_enabled: true,
|
default_enabled: false,
|
||||||
},
|
},
|
||||||
PushRule {
|
PushRule {
|
||||||
rule_id: Cow::Borrowed("global/override/.m.rule.suppress_notices"),
|
rule_id: Cow::Borrowed("global/override/.m.rule.suppress_notices"),
|
||||||
|
|||||||
@@ -105,9 +105,6 @@ pub struct PushRuleEvaluator {
|
|||||||
/// If MSC3931 (room version feature flags) is enabled. Usually controlled by the same
|
/// If MSC3931 (room version feature flags) is enabled. Usually controlled by the same
|
||||||
/// flag as MSC1767 (extensible events core).
|
/// flag as MSC1767 (extensible events core).
|
||||||
msc3931_enabled: bool,
|
msc3931_enabled: bool,
|
||||||
|
|
||||||
// If MSC4210 (remove legacy mentions) is enabled.
|
|
||||||
msc4210_enabled: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[pymethods]
|
#[pymethods]
|
||||||
@@ -125,7 +122,6 @@ impl PushRuleEvaluator {
|
|||||||
related_event_match_enabled,
|
related_event_match_enabled,
|
||||||
room_version_feature_flags,
|
room_version_feature_flags,
|
||||||
msc3931_enabled,
|
msc3931_enabled,
|
||||||
msc4210_enabled,
|
|
||||||
))]
|
))]
|
||||||
pub fn py_new(
|
pub fn py_new(
|
||||||
flattened_keys: BTreeMap<String, JsonValue>,
|
flattened_keys: BTreeMap<String, JsonValue>,
|
||||||
@@ -137,7 +133,6 @@ impl PushRuleEvaluator {
|
|||||||
related_event_match_enabled: bool,
|
related_event_match_enabled: bool,
|
||||||
room_version_feature_flags: Vec<String>,
|
room_version_feature_flags: Vec<String>,
|
||||||
msc3931_enabled: bool,
|
msc3931_enabled: bool,
|
||||||
msc4210_enabled: bool,
|
|
||||||
) -> Result<Self, Error> {
|
) -> Result<Self, Error> {
|
||||||
let body = match flattened_keys.get("content.body") {
|
let body = match flattened_keys.get("content.body") {
|
||||||
Some(JsonValue::Value(SimpleJsonValue::Str(s))) => s.clone().into_owned(),
|
Some(JsonValue::Value(SimpleJsonValue::Str(s))) => s.clone().into_owned(),
|
||||||
@@ -155,7 +150,6 @@ impl PushRuleEvaluator {
|
|||||||
related_event_match_enabled,
|
related_event_match_enabled,
|
||||||
room_version_feature_flags,
|
room_version_feature_flags,
|
||||||
msc3931_enabled,
|
msc3931_enabled,
|
||||||
msc4210_enabled,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -167,7 +161,6 @@ impl PushRuleEvaluator {
|
|||||||
///
|
///
|
||||||
/// Returns the set of actions, if any, that match (filtering out any
|
/// Returns the set of actions, if any, that match (filtering out any
|
||||||
/// `dont_notify` and `coalesce` actions).
|
/// `dont_notify` and `coalesce` actions).
|
||||||
#[pyo3(signature = (push_rules, user_id=None, display_name=None))]
|
|
||||||
pub fn run(
|
pub fn run(
|
||||||
&self,
|
&self,
|
||||||
push_rules: &FilteredPushRules,
|
push_rules: &FilteredPushRules,
|
||||||
@@ -183,8 +176,7 @@ impl PushRuleEvaluator {
|
|||||||
|
|
||||||
// For backwards-compatibility the legacy mention rules are disabled
|
// For backwards-compatibility the legacy mention rules are disabled
|
||||||
// if the event contains the 'm.mentions' property.
|
// if the event contains the 'm.mentions' property.
|
||||||
// Additionally, MSC4210 always disables the legacy rules.
|
if self.has_mentions
|
||||||
if (self.has_mentions || self.msc4210_enabled)
|
|
||||||
&& (rule_id == "global/override/.m.rule.contains_display_name"
|
&& (rule_id == "global/override/.m.rule.contains_display_name"
|
||||||
|| rule_id == "global/content/.m.rule.contains_user_name"
|
|| rule_id == "global/content/.m.rule.contains_user_name"
|
||||||
|| rule_id == "global/override/.m.rule.roomnotif")
|
|| rule_id == "global/override/.m.rule.roomnotif")
|
||||||
@@ -237,7 +229,6 @@ impl PushRuleEvaluator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Check if the given condition matches.
|
/// Check if the given condition matches.
|
||||||
#[pyo3(signature = (condition, user_id=None, display_name=None))]
|
|
||||||
fn matches(
|
fn matches(
|
||||||
&self,
|
&self,
|
||||||
condition: Condition,
|
condition: Condition,
|
||||||
@@ -535,7 +526,6 @@ fn push_rule_evaluator() {
|
|||||||
true,
|
true,
|
||||||
vec![],
|
vec![],
|
||||||
true,
|
true,
|
||||||
false,
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -565,7 +555,6 @@ fn test_requires_room_version_supports_condition() {
|
|||||||
false,
|
false,
|
||||||
flags,
|
flags,
|
||||||
true,
|
true,
|
||||||
false,
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -593,7 +582,7 @@ fn test_requires_room_version_supports_condition() {
|
|||||||
};
|
};
|
||||||
let rules = PushRules::new(vec![custom_rule]);
|
let rules = PushRules::new(vec![custom_rule]);
|
||||||
result = evaluator.run(
|
result = evaluator.run(
|
||||||
&FilteredPushRules::py_new(rules, BTreeMap::new(), true, false, true, false, false),
|
&FilteredPushRules::py_new(rules, BTreeMap::new(), true, false, true, false),
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -65,8 +65,8 @@ use anyhow::{Context, Error};
|
|||||||
use log::warn;
|
use log::warn;
|
||||||
use pyo3::exceptions::PyTypeError;
|
use pyo3::exceptions::PyTypeError;
|
||||||
use pyo3::prelude::*;
|
use pyo3::prelude::*;
|
||||||
use pyo3::types::{PyBool, PyInt, PyList, PyString};
|
use pyo3::types::{PyBool, PyList, PyLong, PyString};
|
||||||
use pythonize::{depythonize, pythonize, PythonizeError};
|
use pythonize::{depythonize_bound, pythonize};
|
||||||
use serde::de::Error as _;
|
use serde::de::Error as _;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
@@ -79,7 +79,7 @@ pub mod utils;
|
|||||||
|
|
||||||
/// Called when registering modules with python.
|
/// Called when registering modules with python.
|
||||||
pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
|
pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
|
||||||
let child_module = PyModule::new(py, "push")?;
|
let child_module = PyModule::new_bound(py, "push")?;
|
||||||
child_module.add_class::<PushRule>()?;
|
child_module.add_class::<PushRule>()?;
|
||||||
child_module.add_class::<PushRules>()?;
|
child_module.add_class::<PushRules>()?;
|
||||||
child_module.add_class::<FilteredPushRules>()?;
|
child_module.add_class::<FilteredPushRules>()?;
|
||||||
@@ -90,7 +90,7 @@ pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()>
|
|||||||
|
|
||||||
// We need to manually add the module to sys.modules to make `from
|
// We need to manually add the module to sys.modules to make `from
|
||||||
// synapse.synapse_rust import push` work.
|
// synapse.synapse_rust import push` work.
|
||||||
py.import("sys")?
|
py.import_bound("sys")?
|
||||||
.getattr("modules")?
|
.getattr("modules")?
|
||||||
.set_item("synapse.synapse_rust.push", child_module)?;
|
.set_item("synapse.synapse_rust.push", child_module)?;
|
||||||
|
|
||||||
@@ -182,16 +182,12 @@ pub enum Action {
|
|||||||
Unknown(Value),
|
Unknown(Value),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'py> IntoPyObject<'py> for Action {
|
impl IntoPy<PyObject> for Action {
|
||||||
type Target = PyAny;
|
fn into_py(self, py: Python<'_>) -> PyObject {
|
||||||
type Output = Bound<'py, Self::Target>;
|
|
||||||
type Error = PythonizeError;
|
|
||||||
|
|
||||||
fn into_pyobject(self, py: Python<'py>) -> Result<Self::Output, Self::Error> {
|
|
||||||
// When we pass the `Action` struct to Python we want it to be converted
|
// When we pass the `Action` struct to Python we want it to be converted
|
||||||
// to a dict. We use `pythonize`, which converts the struct using the
|
// to a dict. We use `pythonize`, which converts the struct using the
|
||||||
// `serde` serialization.
|
// `serde` serialization.
|
||||||
pythonize(py, &self)
|
pythonize(py, &self).expect("valid action")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -274,13 +270,13 @@ pub enum SimpleJsonValue {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'source> FromPyObject<'source> for SimpleJsonValue {
|
impl<'source> FromPyObject<'source> for SimpleJsonValue {
|
||||||
fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
|
fn extract(ob: &'source PyAny) -> PyResult<Self> {
|
||||||
if let Ok(s) = ob.downcast::<PyString>() {
|
if let Ok(s) = ob.downcast::<PyString>() {
|
||||||
Ok(SimpleJsonValue::Str(Cow::Owned(s.to_string())))
|
Ok(SimpleJsonValue::Str(Cow::Owned(s.to_string())))
|
||||||
// A bool *is* an int, ensure we try bool first.
|
// A bool *is* an int, ensure we try bool first.
|
||||||
} else if let Ok(b) = ob.downcast::<PyBool>() {
|
} else if let Ok(b) = ob.downcast::<PyBool>() {
|
||||||
Ok(SimpleJsonValue::Bool(b.extract()?))
|
Ok(SimpleJsonValue::Bool(b.extract()?))
|
||||||
} else if let Ok(i) = ob.downcast::<PyInt>() {
|
} else if let Ok(i) = ob.downcast::<PyLong>() {
|
||||||
Ok(SimpleJsonValue::Int(i.extract()?))
|
Ok(SimpleJsonValue::Int(i.extract()?))
|
||||||
} else if ob.is_none() {
|
} else if ob.is_none() {
|
||||||
Ok(SimpleJsonValue::Null)
|
Ok(SimpleJsonValue::Null)
|
||||||
@@ -302,19 +298,15 @@ pub enum JsonValue {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'source> FromPyObject<'source> for JsonValue {
|
impl<'source> FromPyObject<'source> for JsonValue {
|
||||||
fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
|
fn extract(ob: &'source PyAny) -> PyResult<Self> {
|
||||||
if let Ok(l) = ob.downcast::<PyList>() {
|
if let Ok(l) = ob.downcast::<PyList>() {
|
||||||
match l
|
match l.iter().map(SimpleJsonValue::extract).collect() {
|
||||||
.iter()
|
|
||||||
.map(|it| SimpleJsonValue::extract_bound(&it))
|
|
||||||
.collect()
|
|
||||||
{
|
|
||||||
Ok(a) => Ok(JsonValue::Array(a)),
|
Ok(a) => Ok(JsonValue::Array(a)),
|
||||||
Err(e) => Err(PyTypeError::new_err(format!(
|
Err(e) => Err(PyTypeError::new_err(format!(
|
||||||
"Can't convert to JsonValue::Array: {e}"
|
"Can't convert to JsonValue::Array: {e}"
|
||||||
))),
|
))),
|
||||||
}
|
}
|
||||||
} else if let Ok(v) = SimpleJsonValue::extract_bound(ob) {
|
} else if let Ok(v) = SimpleJsonValue::extract(ob) {
|
||||||
Ok(JsonValue::Value(v))
|
Ok(JsonValue::Value(v))
|
||||||
} else {
|
} else {
|
||||||
Err(PyTypeError::new_err(format!(
|
Err(PyTypeError::new_err(format!(
|
||||||
@@ -371,19 +363,15 @@ pub enum KnownCondition {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'source> IntoPyObject<'source> for Condition {
|
impl IntoPy<PyObject> for Condition {
|
||||||
type Target = PyAny;
|
fn into_py(self, py: Python<'_>) -> PyObject {
|
||||||
type Output = Bound<'source, Self::Target>;
|
pythonize(py, &self).expect("valid condition")
|
||||||
type Error = PythonizeError;
|
|
||||||
|
|
||||||
fn into_pyobject(self, py: Python<'source>) -> Result<Self::Output, Self::Error> {
|
|
||||||
pythonize(py, &self)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'source> FromPyObject<'source> for Condition {
|
impl<'source> FromPyObject<'source> for Condition {
|
||||||
fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
|
fn extract_bound(ob: &Bound<'source, PyAny>) -> PyResult<Self> {
|
||||||
Ok(depythonize(ob)?)
|
Ok(depythonize_bound(ob.clone())?)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -546,7 +534,6 @@ pub struct FilteredPushRules {
|
|||||||
msc3381_polls_enabled: bool,
|
msc3381_polls_enabled: bool,
|
||||||
msc3664_enabled: bool,
|
msc3664_enabled: bool,
|
||||||
msc4028_push_encrypted_events: bool,
|
msc4028_push_encrypted_events: bool,
|
||||||
msc4210_enabled: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[pymethods]
|
#[pymethods]
|
||||||
@@ -559,7 +546,6 @@ impl FilteredPushRules {
|
|||||||
msc3381_polls_enabled: bool,
|
msc3381_polls_enabled: bool,
|
||||||
msc3664_enabled: bool,
|
msc3664_enabled: bool,
|
||||||
msc4028_push_encrypted_events: bool,
|
msc4028_push_encrypted_events: bool,
|
||||||
msc4210_enabled: bool,
|
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
push_rules,
|
push_rules,
|
||||||
@@ -568,7 +554,6 @@ impl FilteredPushRules {
|
|||||||
msc3381_polls_enabled,
|
msc3381_polls_enabled,
|
||||||
msc3664_enabled,
|
msc3664_enabled,
|
||||||
msc4028_push_encrypted_events,
|
msc4028_push_encrypted_events,
|
||||||
msc4210_enabled,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -611,14 +596,6 @@ impl FilteredPushRules {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.msc4210_enabled
|
|
||||||
&& (rule.rule_id == "global/override/.m.rule.contains_display_name"
|
|
||||||
|| rule.rule_id == "global/content/.m.rule.contains_user_name"
|
|
||||||
|| rule.rule_id == "global/override/.m.rule.roomnotif")
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
true
|
true
|
||||||
})
|
})
|
||||||
.map(|r| {
|
.map(|r| {
|
||||||
|
|||||||
@@ -23,6 +23,7 @@ use anyhow::bail;
|
|||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use anyhow::Error;
|
use anyhow::Error;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
|
use regex;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use regex::RegexBuilder;
|
use regex::RegexBuilder;
|
||||||
|
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ use pyo3::{
|
|||||||
exceptions::PyValueError,
|
exceptions::PyValueError,
|
||||||
pyclass, pymethods,
|
pyclass, pymethods,
|
||||||
types::{PyAnyMethods, PyModule, PyModuleMethods},
|
types::{PyAnyMethods, PyModule, PyModuleMethods},
|
||||||
Bound, IntoPyObject, Py, PyAny, PyObject, PyResult, Python,
|
Bound, Py, PyAny, PyObject, PyResult, Python, ToPyObject,
|
||||||
};
|
};
|
||||||
use ulid::Ulid;
|
use ulid::Ulid;
|
||||||
|
|
||||||
@@ -37,7 +37,6 @@ use self::session::Session;
|
|||||||
use crate::{
|
use crate::{
|
||||||
errors::{NotFoundError, SynapseError},
|
errors::{NotFoundError, SynapseError},
|
||||||
http::{http_request_from_twisted, http_response_to_twisted, HeaderMapPyExt},
|
http::{http_request_from_twisted, http_response_to_twisted, HeaderMapPyExt},
|
||||||
UnwrapInfallible,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
mod session;
|
mod session;
|
||||||
@@ -47,7 +46,7 @@ fn prepare_headers(headers: &mut HeaderMap, session: &Session) {
|
|||||||
headers.typed_insert(AccessControlAllowOrigin::ANY);
|
headers.typed_insert(AccessControlAllowOrigin::ANY);
|
||||||
headers.typed_insert(AccessControlExposeHeaders::from_iter([ETAG]));
|
headers.typed_insert(AccessControlExposeHeaders::from_iter([ETAG]));
|
||||||
headers.typed_insert(Pragma::no_cache());
|
headers.typed_insert(Pragma::no_cache());
|
||||||
headers.typed_insert(CacheControl::new().with_no_store().with_no_transform());
|
headers.typed_insert(CacheControl::new().with_no_store());
|
||||||
headers.typed_insert(session.etag());
|
headers.typed_insert(session.etag());
|
||||||
headers.typed_insert(session.expires());
|
headers.typed_insert(session.expires());
|
||||||
headers.typed_insert(session.last_modified());
|
headers.typed_insert(session.last_modified());
|
||||||
@@ -126,11 +125,7 @@ impl RendezvousHandler {
|
|||||||
let base = Uri::try_from(format!("{base}_synapse/client/rendezvous"))
|
let base = Uri::try_from(format!("{base}_synapse/client/rendezvous"))
|
||||||
.map_err(|_| PyValueError::new_err("Invalid base URI"))?;
|
.map_err(|_| PyValueError::new_err("Invalid base URI"))?;
|
||||||
|
|
||||||
let clock = homeserver
|
let clock = homeserver.call_method0("get_clock")?.to_object(py);
|
||||||
.call_method0("get_clock")?
|
|
||||||
.into_pyobject(py)
|
|
||||||
.unwrap_infallible()
|
|
||||||
.unbind();
|
|
||||||
|
|
||||||
// Construct a Python object so that we can get a reference to the
|
// Construct a Python object so that we can get a reference to the
|
||||||
// evict method and schedule it to run.
|
// evict method and schedule it to run.
|
||||||
@@ -192,12 +187,10 @@ impl RendezvousHandler {
|
|||||||
"url": uri,
|
"url": uri,
|
||||||
})
|
})
|
||||||
.to_string();
|
.to_string();
|
||||||
let length = response.len() as _;
|
|
||||||
|
|
||||||
let mut response = Response::new(response.as_bytes());
|
let mut response = Response::new(response.as_bytes());
|
||||||
*response.status_mut() = StatusCode::CREATED;
|
*response.status_mut() = StatusCode::CREATED;
|
||||||
response.headers_mut().typed_insert(ContentType::json());
|
response.headers_mut().typed_insert(ContentType::json());
|
||||||
response.headers_mut().typed_insert(ContentLength(length));
|
|
||||||
prepare_headers(response.headers_mut(), &session);
|
prepare_headers(response.headers_mut(), &session);
|
||||||
http_response_to_twisted(twisted_request, response)?;
|
http_response_to_twisted(twisted_request, response)?;
|
||||||
|
|
||||||
@@ -295,14 +288,6 @@ impl RendezvousHandler {
|
|||||||
let mut response = Response::new(Bytes::new());
|
let mut response = Response::new(Bytes::new());
|
||||||
*response.status_mut() = StatusCode::ACCEPTED;
|
*response.status_mut() = StatusCode::ACCEPTED;
|
||||||
prepare_headers(response.headers_mut(), session);
|
prepare_headers(response.headers_mut(), session);
|
||||||
|
|
||||||
// Even though this isn't mandated by the MSC, we set a Content-Type on the response. It
|
|
||||||
// doesn't do any harm as the body is empty, but this helps escape a bug in some reverse
|
|
||||||
// proxy/cache setup which strips the ETag header if there is no Content-Type set.
|
|
||||||
// Specifically, we noticed this behaviour when placing Synapse behind Cloudflare.
|
|
||||||
response.headers_mut().typed_insert(ContentType::text());
|
|
||||||
response.headers_mut().typed_insert(ContentLength(0));
|
|
||||||
|
|
||||||
http_response_to_twisted(twisted_request, response)?;
|
http_response_to_twisted(twisted_request, response)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -319,7 +304,6 @@ impl RendezvousHandler {
|
|||||||
response
|
response
|
||||||
.headers_mut()
|
.headers_mut()
|
||||||
.typed_insert(AccessControlAllowOrigin::ANY);
|
.typed_insert(AccessControlAllowOrigin::ANY);
|
||||||
response.headers_mut().typed_insert(ContentLength(0));
|
|
||||||
http_response_to_twisted(twisted_request, response)?;
|
http_response_to_twisted(twisted_request, response)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -327,7 +311,7 @@ impl RendezvousHandler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
|
pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
|
||||||
let child_module = PyModule::new(py, "rendezvous")?;
|
let child_module = PyModule::new_bound(py, "rendezvous")?;
|
||||||
|
|
||||||
child_module.add_class::<RendezvousHandler>()?;
|
child_module.add_class::<RendezvousHandler>()?;
|
||||||
|
|
||||||
@@ -335,7 +319,7 @@ pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()>
|
|||||||
|
|
||||||
// We need to manually add the module to sys.modules to make `from
|
// We need to manually add the module to sys.modules to make `from
|
||||||
// synapse.synapse_rust import rendezvous` work.
|
// synapse.synapse_rust import rendezvous` work.
|
||||||
py.import("sys")?
|
py.import_bound("sys")?
|
||||||
.getattr("modules")?
|
.getattr("modules")?
|
||||||
.set_item("synapse.synapse_rust.rendezvous", child_module)?;
|
.set_item("synapse.synapse_rust.rendezvous", child_module)?;
|
||||||
|
|
||||||
|
|||||||
@@ -28,11 +28,12 @@ from typing import Collection, Optional, Sequence, Set
|
|||||||
# example)
|
# example)
|
||||||
DISTS = (
|
DISTS = (
|
||||||
"debian:bullseye", # (EOL ~2024-07) (our EOL forced by Python 3.9 is 2025-10-05)
|
"debian:bullseye", # (EOL ~2024-07) (our EOL forced by Python 3.9 is 2025-10-05)
|
||||||
"debian:bookworm", # (EOL 2026-06) (our EOL forced by Python 3.11 is 2027-10-24)
|
"debian:bookworm", # (EOL not specified yet) (our EOL forced by Python 3.11 is 2027-10-24)
|
||||||
"debian:sid", # (rolling distro, no EOL)
|
"debian:sid", # (EOL not specified yet) (our EOL forced by Python 3.11 is 2027-10-24)
|
||||||
|
"ubuntu:focal", # 20.04 LTS (EOL 2025-04) (our EOL forced by Python 3.8 is 2024-10-14)
|
||||||
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04) (our EOL forced by Python 3.10 is 2026-10-04)
|
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04) (our EOL forced by Python 3.10 is 2026-10-04)
|
||||||
"ubuntu:noble", # 24.04 LTS (EOL 2029-06)
|
"ubuntu:lunar", # 23.04 (EOL 2024-01) (our EOL forced by Python 3.11 is 2027-10-24)
|
||||||
"ubuntu:oracular", # 24.10 (EOL 2025-07)
|
"ubuntu:mantic", # 23.10 (EOL 2024-07) (our EOL forced by Python 3.11 is 2027-10-24)
|
||||||
"debian:trixie", # (EOL not specified yet)
|
"debian:trixie", # (EOL not specified yet)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -195,10 +195,6 @@ if [ -z "$skip_docker_build" ]; then
|
|||||||
# Build the unified Complement image (from the worker Synapse image we just built).
|
# Build the unified Complement image (from the worker Synapse image we just built).
|
||||||
echo_if_github "::group::Build Docker image: complement/Dockerfile"
|
echo_if_github "::group::Build Docker image: complement/Dockerfile"
|
||||||
$CONTAINER_RUNTIME build -t complement-synapse \
|
$CONTAINER_RUNTIME build -t complement-synapse \
|
||||||
`# This is the tag we end up pushing to the registry (see` \
|
|
||||||
`# .github/workflows/push_complement_image.yml) so let's just label it now` \
|
|
||||||
`# so people can reference it by the same name locally.` \
|
|
||||||
-t ghcr.io/element-hq/synapse/complement-synapse \
|
|
||||||
-f "docker/complement/Dockerfile" "docker/complement"
|
-f "docker/complement/Dockerfile" "docker/complement"
|
||||||
echo_if_github "::endgroup::"
|
echo_if_github "::endgroup::"
|
||||||
|
|
||||||
|
|||||||
@@ -360,7 +360,7 @@ def is_cacheable(
|
|||||||
# For a type alias, check if the underlying real type is cachable.
|
# For a type alias, check if the underlying real type is cachable.
|
||||||
return is_cacheable(mypy.types.get_proper_type(rt), signature, verbose)
|
return is_cacheable(mypy.types.get_proper_type(rt), signature, verbose)
|
||||||
|
|
||||||
elif isinstance(rt, UninhabitedType):
|
elif isinstance(rt, UninhabitedType) and rt.is_noreturn:
|
||||||
# There is no return value, just consider it cachable. This is only used
|
# There is no return value, just consider it cachable. This is only used
|
||||||
# in tests.
|
# in tests.
|
||||||
return True, None
|
return True, None
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ import commonmark
|
|||||||
import git
|
import git
|
||||||
from click.exceptions import ClickException
|
from click.exceptions import ClickException
|
||||||
from git import GitCommandError, Repo
|
from git import GitCommandError, Repo
|
||||||
from github import BadCredentialsException, Github
|
from github import Github
|
||||||
from packaging import version
|
from packaging import version
|
||||||
|
|
||||||
|
|
||||||
@@ -323,8 +323,10 @@ def tag(gh_token: Optional[str]) -> None:
|
|||||||
def _tag(gh_token: Optional[str]) -> None:
|
def _tag(gh_token: Optional[str]) -> None:
|
||||||
"""Tags the release and generates a draft GitHub release"""
|
"""Tags the release and generates a draft GitHub release"""
|
||||||
|
|
||||||
# Test that the GH Token is valid before continuing.
|
if gh_token:
|
||||||
check_valid_gh_token(gh_token)
|
# Test that the GH Token is valid before continuing.
|
||||||
|
gh = Github(gh_token)
|
||||||
|
gh.get_user()
|
||||||
|
|
||||||
# Make sure we're in a git repo.
|
# Make sure we're in a git repo.
|
||||||
repo = get_repo_and_check_clean_checkout()
|
repo = get_repo_and_check_clean_checkout()
|
||||||
@@ -467,8 +469,10 @@ def upload(gh_token: Optional[str]) -> None:
|
|||||||
def _upload(gh_token: Optional[str]) -> None:
|
def _upload(gh_token: Optional[str]) -> None:
|
||||||
"""Upload release to pypi."""
|
"""Upload release to pypi."""
|
||||||
|
|
||||||
# Test that the GH Token is valid before continuing.
|
if gh_token:
|
||||||
check_valid_gh_token(gh_token)
|
# Test that the GH Token is valid before continuing.
|
||||||
|
gh = Github(gh_token)
|
||||||
|
gh.get_user()
|
||||||
|
|
||||||
current_version = get_package_version()
|
current_version = get_package_version()
|
||||||
tag_name = f"v{current_version}"
|
tag_name = f"v{current_version}"
|
||||||
@@ -565,8 +569,10 @@ def wait_for_actions(gh_token: Optional[str]) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def _wait_for_actions(gh_token: Optional[str]) -> None:
|
def _wait_for_actions(gh_token: Optional[str]) -> None:
|
||||||
# Test that the GH Token is valid before continuing.
|
if gh_token:
|
||||||
check_valid_gh_token(gh_token)
|
# Test that the GH Token is valid before continuing.
|
||||||
|
gh = Github(gh_token)
|
||||||
|
gh.get_user()
|
||||||
|
|
||||||
# Find out the version and tag name.
|
# Find out the version and tag name.
|
||||||
current_version = get_package_version()
|
current_version = get_package_version()
|
||||||
@@ -592,7 +598,7 @@ def _wait_for_actions(gh_token: Optional[str]) -> None:
|
|||||||
if all(
|
if all(
|
||||||
workflow["status"] != "in_progress" for workflow in resp["workflow_runs"]
|
workflow["status"] != "in_progress" for workflow in resp["workflow_runs"]
|
||||||
):
|
):
|
||||||
success = all(
|
success = (
|
||||||
workflow["status"] == "completed" for workflow in resp["workflow_runs"]
|
workflow["status"] == "completed" for workflow in resp["workflow_runs"]
|
||||||
)
|
)
|
||||||
if success:
|
if success:
|
||||||
@@ -800,22 +806,6 @@ def get_repo_and_check_clean_checkout(
|
|||||||
return repo
|
return repo
|
||||||
|
|
||||||
|
|
||||||
def check_valid_gh_token(gh_token: Optional[str]) -> None:
|
|
||||||
"""Check that a github token is valid, if supplied"""
|
|
||||||
|
|
||||||
if not gh_token:
|
|
||||||
# No github token supplied, so nothing to do.
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
gh = Github(gh_token)
|
|
||||||
|
|
||||||
# We need to lookup name to trigger a request.
|
|
||||||
_name = gh.get_user().name
|
|
||||||
except BadCredentialsException as e:
|
|
||||||
raise click.ClickException(f"Github credentials are bad: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
def find_ref(repo: git.Repo, ref_name: str) -> Optional[git.HEAD]:
|
def find_ref(repo: git.Repo, ref_name: str) -> Optional[git.HEAD]:
|
||||||
"""Find the branch/ref, looking first locally then in the remote."""
|
"""Find the branch/ref, looking first locally then in the remote."""
|
||||||
if ref_name in repo.references:
|
if ref_name in repo.references:
|
||||||
|
|||||||
@@ -39,8 +39,8 @@ ImageFile.LOAD_TRUNCATED_IMAGES = True
|
|||||||
# Note that we use an (unneeded) variable here so that pyupgrade doesn't nuke the
|
# Note that we use an (unneeded) variable here so that pyupgrade doesn't nuke the
|
||||||
# if-statement completely.
|
# if-statement completely.
|
||||||
py_version = sys.version_info
|
py_version = sys.version_info
|
||||||
if py_version < (3, 9):
|
if py_version < (3, 8):
|
||||||
print("Synapse requires Python 3.9 or above.")
|
print("Synapse requires Python 3.8 or above.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Allow using the asyncio reactor via env var.
|
# Allow using the asyncio reactor via env var.
|
||||||
|
|||||||
@@ -42,12 +42,12 @@ from typing import (
|
|||||||
Set,
|
Set,
|
||||||
Tuple,
|
Tuple,
|
||||||
Type,
|
Type,
|
||||||
TypedDict,
|
|
||||||
TypeVar,
|
TypeVar,
|
||||||
cast,
|
cast,
|
||||||
)
|
)
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
from typing_extensions import TypedDict
|
||||||
|
|
||||||
from twisted.internet import defer, reactor as reactor_
|
from twisted.internet import defer, reactor as reactor_
|
||||||
|
|
||||||
@@ -88,7 +88,6 @@ from synapse.storage.databases.main.relations import RelationsWorkerStore
|
|||||||
from synapse.storage.databases.main.room import RoomBackgroundUpdateStore
|
from synapse.storage.databases.main.room import RoomBackgroundUpdateStore
|
||||||
from synapse.storage.databases.main.roommember import RoomMemberBackgroundUpdateStore
|
from synapse.storage.databases.main.roommember import RoomMemberBackgroundUpdateStore
|
||||||
from synapse.storage.databases.main.search import SearchBackgroundUpdateStore
|
from synapse.storage.databases.main.search import SearchBackgroundUpdateStore
|
||||||
from synapse.storage.databases.main.sliding_sync import SlidingSyncStore
|
|
||||||
from synapse.storage.databases.main.state import MainStateBackgroundUpdateStore
|
from synapse.storage.databases.main.state import MainStateBackgroundUpdateStore
|
||||||
from synapse.storage.databases.main.stats import StatsStore
|
from synapse.storage.databases.main.stats import StatsStore
|
||||||
from synapse.storage.databases.main.user_directory import (
|
from synapse.storage.databases.main.user_directory import (
|
||||||
@@ -128,7 +127,6 @@ BOOLEAN_COLUMNS = {
|
|||||||
"pushers": ["enabled"],
|
"pushers": ["enabled"],
|
||||||
"redactions": ["have_censored"],
|
"redactions": ["have_censored"],
|
||||||
"remote_media_cache": ["authenticated"],
|
"remote_media_cache": ["authenticated"],
|
||||||
"room_memberships": ["participant"],
|
|
||||||
"room_stats_state": ["is_federatable"],
|
"room_stats_state": ["is_federatable"],
|
||||||
"rooms": ["is_public", "has_auth_chain_index"],
|
"rooms": ["is_public", "has_auth_chain_index"],
|
||||||
"sliding_sync_joined_rooms": ["is_encrypted"],
|
"sliding_sync_joined_rooms": ["is_encrypted"],
|
||||||
@@ -192,11 +190,6 @@ APPEND_ONLY_TABLES = [
|
|||||||
|
|
||||||
|
|
||||||
IGNORED_TABLES = {
|
IGNORED_TABLES = {
|
||||||
# Porting the auto generated sequence in this table is non-trivial.
|
|
||||||
# None of the entries in this list are mandatory for Synapse to keep working.
|
|
||||||
# If state group disk space is an issue after the port, the
|
|
||||||
# `mark_unreferenced_state_groups_for_deletion_bg_update` background task can be run again.
|
|
||||||
"state_groups_pending_deletion",
|
|
||||||
# We don't port these tables, as they're a faff and we can regenerate
|
# We don't port these tables, as they're a faff and we can regenerate
|
||||||
# them anyway.
|
# them anyway.
|
||||||
"user_directory",
|
"user_directory",
|
||||||
@@ -222,15 +215,6 @@ IGNORED_TABLES = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# These background updates will not be applied upon creation of the postgres database.
|
|
||||||
IGNORED_BACKGROUND_UPDATES = {
|
|
||||||
# Reapplying this background update to the postgres database is unnecessary after
|
|
||||||
# already having waited for the SQLite database to complete all running background
|
|
||||||
# updates.
|
|
||||||
"mark_unreferenced_state_groups_for_deletion_bg_update",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Error returned by the run function. Used at the top-level part of the script to
|
# Error returned by the run function. Used at the top-level part of the script to
|
||||||
# handle errors and return codes.
|
# handle errors and return codes.
|
||||||
end_error: Optional[str] = None
|
end_error: Optional[str] = None
|
||||||
@@ -271,7 +255,6 @@ class Store(
|
|||||||
ReceiptsBackgroundUpdateStore,
|
ReceiptsBackgroundUpdateStore,
|
||||||
RelationsWorkerStore,
|
RelationsWorkerStore,
|
||||||
EventFederationWorkerStore,
|
EventFederationWorkerStore,
|
||||||
SlidingSyncStore,
|
|
||||||
):
|
):
|
||||||
def execute(self, f: Callable[..., R], *args: Any, **kwargs: Any) -> Awaitable[R]:
|
def execute(self, f: Callable[..., R], *args: Any, **kwargs: Any) -> Awaitable[R]:
|
||||||
return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs)
|
return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs)
|
||||||
@@ -702,20 +685,6 @@ class Porter:
|
|||||||
# 0 means off. 1 means full. 2 means incremental.
|
# 0 means off. 1 means full. 2 means incremental.
|
||||||
return autovacuum_setting != 0
|
return autovacuum_setting != 0
|
||||||
|
|
||||||
async def remove_ignored_background_updates_from_database(self) -> None:
|
|
||||||
def _remove_delete_unreferenced_state_groups_bg_updates(
|
|
||||||
txn: LoggingTransaction,
|
|
||||||
) -> None:
|
|
||||||
txn.execute(
|
|
||||||
"DELETE FROM background_updates WHERE update_name = ANY(?)",
|
|
||||||
(list(IGNORED_BACKGROUND_UPDATES),),
|
|
||||||
)
|
|
||||||
|
|
||||||
await self.postgres_store.db_pool.runInteraction(
|
|
||||||
"remove_delete_unreferenced_state_groups_bg_updates",
|
|
||||||
_remove_delete_unreferenced_state_groups_bg_updates,
|
|
||||||
)
|
|
||||||
|
|
||||||
async def run(self) -> None:
|
async def run(self) -> None:
|
||||||
"""Ports the SQLite database to a PostgreSQL database.
|
"""Ports the SQLite database to a PostgreSQL database.
|
||||||
|
|
||||||
@@ -761,8 +730,6 @@ class Porter:
|
|||||||
self.hs_config.database.get_single_database()
|
self.hs_config.database.get_single_database()
|
||||||
)
|
)
|
||||||
|
|
||||||
await self.remove_ignored_background_updates_from_database()
|
|
||||||
|
|
||||||
await self.run_background_updates_on_postgres()
|
await self.run_background_updates_on_postgres()
|
||||||
|
|
||||||
self.progress.set_state("Creating port tables")
|
self.progress.set_state("Creating port tables")
|
||||||
|
|||||||
@@ -18,7 +18,9 @@
|
|||||||
# [This file includes modifications made by New Vector Limited]
|
# [This file includes modifications made by New Vector Limited]
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
from typing import TYPE_CHECKING, Optional, Protocol, Tuple
|
from typing import TYPE_CHECKING, Optional, Tuple
|
||||||
|
|
||||||
|
from typing_extensions import Protocol
|
||||||
|
|
||||||
from twisted.web.server import Request
|
from twisted.web.server import Request
|
||||||
|
|
||||||
|
|||||||
@@ -19,8 +19,7 @@
|
|||||||
#
|
#
|
||||||
#
|
#
|
||||||
import logging
|
import logging
|
||||||
from dataclasses import dataclass
|
from typing import TYPE_CHECKING, Any, Dict, List, Optional
|
||||||
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional
|
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
from authlib.oauth2 import ClientAuth
|
from authlib.oauth2 import ClientAuth
|
||||||
@@ -48,7 +47,6 @@ from synapse.logging.context import make_deferred_yieldable
|
|||||||
from synapse.types import Requester, UserID, create_requester
|
from synapse.types import Requester, UserID, create_requester
|
||||||
from synapse.util import json_decoder
|
from synapse.util import json_decoder
|
||||||
from synapse.util.caches.cached_call import RetryOnExceptionCachedCall
|
from synapse.util.caches.cached_call import RetryOnExceptionCachedCall
|
||||||
from synapse.util.caches.response_cache import ResponseCache
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from synapse.rest.admin.experimental_features import ExperimentalFeature
|
from synapse.rest.admin.experimental_features import ExperimentalFeature
|
||||||
@@ -78,61 +76,6 @@ def scope_to_list(scope: str) -> List[str]:
|
|||||||
return scope.strip().split(" ")
|
return scope.strip().split(" ")
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class IntrospectionResult:
|
|
||||||
_inner: IntrospectionToken
|
|
||||||
|
|
||||||
# when we retrieved this token,
|
|
||||||
# in milliseconds since the Unix epoch
|
|
||||||
retrieved_at_ms: int
|
|
||||||
|
|
||||||
def is_active(self, now_ms: int) -> bool:
|
|
||||||
if not self._inner.get("active"):
|
|
||||||
return False
|
|
||||||
|
|
||||||
expires_in = self._inner.get("expires_in")
|
|
||||||
if expires_in is None:
|
|
||||||
return True
|
|
||||||
if not isinstance(expires_in, int):
|
|
||||||
raise InvalidClientTokenError("token `expires_in` is not an int")
|
|
||||||
|
|
||||||
absolute_expiry_ms = expires_in * 1000 + self.retrieved_at_ms
|
|
||||||
return now_ms < absolute_expiry_ms
|
|
||||||
|
|
||||||
def get_scope_list(self) -> List[str]:
|
|
||||||
value = self._inner.get("scope")
|
|
||||||
if not isinstance(value, str):
|
|
||||||
return []
|
|
||||||
return scope_to_list(value)
|
|
||||||
|
|
||||||
def get_sub(self) -> Optional[str]:
|
|
||||||
value = self._inner.get("sub")
|
|
||||||
if not isinstance(value, str):
|
|
||||||
return None
|
|
||||||
return value
|
|
||||||
|
|
||||||
def get_username(self) -> Optional[str]:
|
|
||||||
value = self._inner.get("username")
|
|
||||||
if not isinstance(value, str):
|
|
||||||
return None
|
|
||||||
return value
|
|
||||||
|
|
||||||
def get_name(self) -> Optional[str]:
|
|
||||||
value = self._inner.get("name")
|
|
||||||
if not isinstance(value, str):
|
|
||||||
return None
|
|
||||||
return value
|
|
||||||
|
|
||||||
def get_device_id(self) -> Optional[str]:
|
|
||||||
value = self._inner.get("device_id")
|
|
||||||
if value is not None and not isinstance(value, str):
|
|
||||||
raise AuthError(
|
|
||||||
500,
|
|
||||||
"Invalid device ID in introspection result",
|
|
||||||
)
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
class PrivateKeyJWTWithKid(PrivateKeyJWT): # type: ignore[misc]
|
class PrivateKeyJWTWithKid(PrivateKeyJWT): # type: ignore[misc]
|
||||||
"""An implementation of the private_key_jwt client auth method that includes a kid header.
|
"""An implementation of the private_key_jwt client auth method that includes a kid header.
|
||||||
|
|
||||||
@@ -176,32 +119,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
|||||||
self._clock = hs.get_clock()
|
self._clock = hs.get_clock()
|
||||||
self._http_client = hs.get_proxied_http_client()
|
self._http_client = hs.get_proxied_http_client()
|
||||||
self._hostname = hs.hostname
|
self._hostname = hs.hostname
|
||||||
self._admin_token: Callable[[], Optional[str]] = self._config.admin_token
|
self._admin_token = self._config.admin_token
|
||||||
|
|
||||||
# # Token Introspection Cache
|
|
||||||
# This remembers what users/devices are represented by which access tokens,
|
|
||||||
# in order to reduce overall system load:
|
|
||||||
# - on Synapse (as requests are relatively expensive)
|
|
||||||
# - on the network
|
|
||||||
# - on MAS
|
|
||||||
#
|
|
||||||
# Since there is no invalidation mechanism currently,
|
|
||||||
# the entries expire after 2 minutes.
|
|
||||||
# This does mean tokens can be treated as valid by Synapse
|
|
||||||
# for longer than reality.
|
|
||||||
#
|
|
||||||
# Ideally, tokens should logically be invalidated in the following circumstances:
|
|
||||||
# - If a session logout happens.
|
|
||||||
# In this case, MAS will delete the device within Synapse
|
|
||||||
# anyway and this is good enough as an invalidation.
|
|
||||||
# - If the client refreshes their token in MAS.
|
|
||||||
# In this case, the device still exists and it's not the end of the world for
|
|
||||||
# the old access token to continue working for a short time.
|
|
||||||
self._introspection_cache: ResponseCache[str] = ResponseCache(
|
|
||||||
self._clock,
|
|
||||||
"token_introspection",
|
|
||||||
timeout_ms=120_000,
|
|
||||||
)
|
|
||||||
|
|
||||||
self._issuer_metadata = RetryOnExceptionCachedCall[OpenIDProviderMetadata](
|
self._issuer_metadata = RetryOnExceptionCachedCall[OpenIDProviderMetadata](
|
||||||
self._load_metadata
|
self._load_metadata
|
||||||
@@ -215,10 +133,9 @@ class MSC3861DelegatedAuth(BaseAuth):
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
# Else use the client secret
|
# Else use the client secret
|
||||||
client_secret = self._config.client_secret()
|
assert self._config.client_secret, "No client_secret provided"
|
||||||
assert client_secret, "No client_secret provided"
|
|
||||||
self._client_auth = ClientAuth(
|
self._client_auth = ClientAuth(
|
||||||
self._config.client_id, client_secret, auth_method
|
self._config.client_id, self._config.client_secret, auth_method
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _load_metadata(self) -> OpenIDProviderMetadata:
|
async def _load_metadata(self) -> OpenIDProviderMetadata:
|
||||||
@@ -257,12 +174,6 @@ class MSC3861DelegatedAuth(BaseAuth):
|
|||||||
logger.warning("Failed to load metadata:", exc_info=True)
|
logger.warning("Failed to load metadata:", exc_info=True)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
async def auth_metadata(self) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Returns the auth metadata dict
|
|
||||||
"""
|
|
||||||
return await self._issuer_metadata.get()
|
|
||||||
|
|
||||||
async def _introspection_endpoint(self) -> str:
|
async def _introspection_endpoint(self) -> str:
|
||||||
"""
|
"""
|
||||||
Returns the introspection endpoint of the issuer
|
Returns the introspection endpoint of the issuer
|
||||||
@@ -275,7 +186,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
|||||||
metadata = await self._issuer_metadata.get()
|
metadata = await self._issuer_metadata.get()
|
||||||
return metadata.get("introspection_endpoint")
|
return metadata.get("introspection_endpoint")
|
||||||
|
|
||||||
async def _introspect_token(self, token: str) -> IntrospectionResult:
|
async def _introspect_token(self, token: str) -> IntrospectionToken:
|
||||||
"""
|
"""
|
||||||
Send a token to the introspection endpoint and returns the introspection response
|
Send a token to the introspection endpoint and returns the introspection response
|
||||||
|
|
||||||
@@ -296,9 +207,6 @@ class MSC3861DelegatedAuth(BaseAuth):
|
|||||||
"Content-Type": "application/x-www-form-urlencoded",
|
"Content-Type": "application/x-www-form-urlencoded",
|
||||||
"User-Agent": str(self._http_client.user_agent, "utf-8"),
|
"User-Agent": str(self._http_client.user_agent, "utf-8"),
|
||||||
"Accept": "application/json",
|
"Accept": "application/json",
|
||||||
# Tell MAS that we support reading the device ID as an explicit
|
|
||||||
# value, not encoded in the scope. This is supported by MAS 0.15+
|
|
||||||
"X-MAS-Supports-Device-Id": "1",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
args = {"token": token, "token_type_hint": "access_token"}
|
args = {"token": token, "token_type_hint": "access_token"}
|
||||||
@@ -348,9 +256,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
|||||||
"The introspection endpoint returned an invalid JSON response."
|
"The introspection endpoint returned an invalid JSON response."
|
||||||
)
|
)
|
||||||
|
|
||||||
return IntrospectionResult(
|
return IntrospectionToken(**resp)
|
||||||
IntrospectionToken(**resp), retrieved_at_ms=self._clock.time_msec()
|
|
||||||
)
|
|
||||||
|
|
||||||
async def is_server_admin(self, requester: Requester) -> bool:
|
async def is_server_admin(self, requester: Requester) -> bool:
|
||||||
return "urn:synapse:admin:*" in requester.scope
|
return "urn:synapse:admin:*" in requester.scope
|
||||||
@@ -371,7 +277,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
|||||||
requester = await self.get_user_by_access_token(access_token, allow_expired)
|
requester = await self.get_user_by_access_token(access_token, allow_expired)
|
||||||
|
|
||||||
# Do not record requests from MAS using the virtual `__oidc_admin` user.
|
# Do not record requests from MAS using the virtual `__oidc_admin` user.
|
||||||
if access_token != self._admin_token():
|
if access_token != self._admin_token:
|
||||||
await self._record_request(request, requester)
|
await self._record_request(request, requester)
|
||||||
|
|
||||||
if not allow_guest and requester.is_guest:
|
if not allow_guest and requester.is_guest:
|
||||||
@@ -412,8 +318,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
|||||||
token: str,
|
token: str,
|
||||||
allow_expired: bool = False,
|
allow_expired: bool = False,
|
||||||
) -> Requester:
|
) -> Requester:
|
||||||
admin_token = self._admin_token()
|
if self._admin_token is not None and token == self._admin_token:
|
||||||
if admin_token is not None and token == admin_token:
|
|
||||||
# XXX: This is a temporary solution so that the admin API can be called by
|
# XXX: This is a temporary solution so that the admin API can be called by
|
||||||
# the OIDC provider. This will be removed once we have OIDC client
|
# the OIDC provider. This will be removed once we have OIDC client
|
||||||
# credentials grant support in matrix-authentication-service.
|
# credentials grant support in matrix-authentication-service.
|
||||||
@@ -428,22 +333,20 @@ class MSC3861DelegatedAuth(BaseAuth):
|
|||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
introspection_result = await self._introspection_cache.wrap(
|
introspection_result = await self._introspect_token(token)
|
||||||
token, self._introspect_token, token
|
|
||||||
)
|
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception("Failed to introspect token")
|
logger.exception("Failed to introspect token")
|
||||||
raise SynapseError(503, "Unable to introspect the access token")
|
raise SynapseError(503, "Unable to introspect the access token")
|
||||||
|
|
||||||
logger.debug("Introspection result: %r", introspection_result)
|
logger.info(f"Introspection result: {introspection_result!r}")
|
||||||
|
|
||||||
# TODO: introspection verification should be more extensive, especially:
|
# TODO: introspection verification should be more extensive, especially:
|
||||||
# - verify the audience
|
# - verify the audience
|
||||||
if not introspection_result.is_active(self._clock.time_msec()):
|
if not introspection_result.get("active"):
|
||||||
raise InvalidClientTokenError("Token is not active")
|
raise InvalidClientTokenError("Token is not active")
|
||||||
|
|
||||||
# Let's look at the scope
|
# Let's look at the scope
|
||||||
scope: List[str] = introspection_result.get_scope_list()
|
scope: List[str] = scope_to_list(introspection_result.get("scope", ""))
|
||||||
|
|
||||||
# Determine type of user based on presence of particular scopes
|
# Determine type of user based on presence of particular scopes
|
||||||
has_user_scope = SCOPE_MATRIX_API in scope
|
has_user_scope = SCOPE_MATRIX_API in scope
|
||||||
@@ -453,7 +356,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
|||||||
raise InvalidClientTokenError("No scope in token granting user rights")
|
raise InvalidClientTokenError("No scope in token granting user rights")
|
||||||
|
|
||||||
# Match via the sub claim
|
# Match via the sub claim
|
||||||
sub: Optional[str] = introspection_result.get_sub()
|
sub: Optional[str] = introspection_result.get("sub")
|
||||||
if sub is None:
|
if sub is None:
|
||||||
raise InvalidClientTokenError(
|
raise InvalidClientTokenError(
|
||||||
"Invalid sub claim in the introspection result"
|
"Invalid sub claim in the introspection result"
|
||||||
@@ -467,7 +370,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
|||||||
# or the external_id was never recorded
|
# or the external_id was never recorded
|
||||||
|
|
||||||
# TODO: claim mapping should be configurable
|
# TODO: claim mapping should be configurable
|
||||||
username: Optional[str] = introspection_result.get_username()
|
username: Optional[str] = introspection_result.get("username")
|
||||||
if username is None or not isinstance(username, str):
|
if username is None or not isinstance(username, str):
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
500,
|
500,
|
||||||
@@ -485,7 +388,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
|||||||
|
|
||||||
# TODO: claim mapping should be configurable
|
# TODO: claim mapping should be configurable
|
||||||
# If present, use the name claim as the displayname
|
# If present, use the name claim as the displayname
|
||||||
name: Optional[str] = introspection_result.get_name()
|
name: Optional[str] = introspection_result.get("name")
|
||||||
|
|
||||||
await self.store.register_user(
|
await self.store.register_user(
|
||||||
user_id=user_id.to_string(), create_profile_with_displayname=name
|
user_id=user_id.to_string(), create_profile_with_displayname=name
|
||||||
@@ -498,34 +401,29 @@ class MSC3861DelegatedAuth(BaseAuth):
|
|||||||
else:
|
else:
|
||||||
user_id = UserID.from_string(user_id_str)
|
user_id = UserID.from_string(user_id_str)
|
||||||
|
|
||||||
# MAS 0.15+ will give us the device ID as an explicit value for compatibility sessions
|
# Find device_ids in scope
|
||||||
# If present, we get it from here, if not we get it in thee scope
|
# We only allow a single device_id in the scope, so we find them all in the
|
||||||
device_id = introspection_result.get_device_id()
|
# scope list, and raise if there are more than one. The OIDC server should be
|
||||||
if device_id is None:
|
# the one enforcing valid scopes, so we raise a 500 if we find an invalid scope.
|
||||||
# Find device_ids in scope
|
device_ids = [
|
||||||
# We only allow a single device_id in the scope, so we find them all in the
|
tok[len(SCOPE_MATRIX_DEVICE_PREFIX) :]
|
||||||
# scope list, and raise if there are more than one. The OIDC server should be
|
for tok in scope
|
||||||
# the one enforcing valid scopes, so we raise a 500 if we find an invalid scope.
|
if tok.startswith(SCOPE_MATRIX_DEVICE_PREFIX)
|
||||||
device_ids = [
|
]
|
||||||
tok[len(SCOPE_MATRIX_DEVICE_PREFIX) :]
|
|
||||||
for tok in scope
|
|
||||||
if tok.startswith(SCOPE_MATRIX_DEVICE_PREFIX)
|
|
||||||
]
|
|
||||||
|
|
||||||
if len(device_ids) > 1:
|
if len(device_ids) > 1:
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
500,
|
500,
|
||||||
"Multiple device IDs in scope",
|
"Multiple device IDs in scope",
|
||||||
)
|
)
|
||||||
|
|
||||||
device_id = device_ids[0] if device_ids else None
|
|
||||||
|
|
||||||
|
device_id = device_ids[0] if device_ids else None
|
||||||
if device_id is not None:
|
if device_id is not None:
|
||||||
# Sanity check the device_id
|
# Sanity check the device_id
|
||||||
if len(device_id) > 255 or len(device_id) < 1:
|
if len(device_id) > 255 or len(device_id) < 1:
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
500,
|
500,
|
||||||
"Invalid device ID in introspection result",
|
"Invalid device ID in scope",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create the device on the fly if it does not exist
|
# Create the device on the fly if it does not exist
|
||||||
|
|||||||
@@ -29,13 +29,8 @@ from typing import Final
|
|||||||
# the max size of a (canonical-json-encoded) event
|
# the max size of a (canonical-json-encoded) event
|
||||||
MAX_PDU_SIZE = 65536
|
MAX_PDU_SIZE = 65536
|
||||||
|
|
||||||
# Max/min size of ints in canonical JSON
|
# the "depth" field on events is limited to 2**63 - 1
|
||||||
CANONICALJSON_MAX_INT = (2**53) - 1
|
MAX_DEPTH = 2**63 - 1
|
||||||
CANONICALJSON_MIN_INT = -CANONICALJSON_MAX_INT
|
|
||||||
|
|
||||||
# the "depth" field on events is limited to the same as what
|
|
||||||
# canonicaljson accepts
|
|
||||||
MAX_DEPTH = CANONICALJSON_MAX_INT
|
|
||||||
|
|
||||||
# the maximum length for a room alias is 255 characters
|
# the maximum length for a room alias is 255 characters
|
||||||
MAX_ALIAS_LENGTH = 255
|
MAX_ALIAS_LENGTH = 255
|
||||||
@@ -236,8 +231,6 @@ class EventContentFields:
|
|||||||
ROOM_NAME: Final = "name"
|
ROOM_NAME: Final = "name"
|
||||||
|
|
||||||
MEMBERSHIP: Final = "membership"
|
MEMBERSHIP: Final = "membership"
|
||||||
MEMBERSHIP_DISPLAYNAME: Final = "displayname"
|
|
||||||
MEMBERSHIP_AVATAR_URL: Final = "avatar_url"
|
|
||||||
|
|
||||||
# Used in m.room.guest_access events.
|
# Used in m.room.guest_access events.
|
||||||
GUEST_ACCESS: Final = "guest_access"
|
GUEST_ACCESS: Final = "guest_access"
|
||||||
@@ -325,8 +318,3 @@ class ApprovalNoticeMedium:
|
|||||||
class Direction(enum.Enum):
|
class Direction(enum.Enum):
|
||||||
BACKWARDS = "b"
|
BACKWARDS = "b"
|
||||||
FORWARDS = "f"
|
FORWARDS = "f"
|
||||||
|
|
||||||
|
|
||||||
class ProfileFields:
|
|
||||||
DISPLAYNAME: Final = "displayname"
|
|
||||||
AVATAR_URL: Final = "avatar_url"
|
|
||||||
|
|||||||
@@ -87,7 +87,8 @@ class Codes(str, Enum):
|
|||||||
WEAK_PASSWORD = "M_WEAK_PASSWORD"
|
WEAK_PASSWORD = "M_WEAK_PASSWORD"
|
||||||
INVALID_SIGNATURE = "M_INVALID_SIGNATURE"
|
INVALID_SIGNATURE = "M_INVALID_SIGNATURE"
|
||||||
USER_DEACTIVATED = "M_USER_DEACTIVATED"
|
USER_DEACTIVATED = "M_USER_DEACTIVATED"
|
||||||
USER_LOCKED = "M_USER_LOCKED"
|
# USER_LOCKED = "M_USER_LOCKED"
|
||||||
|
USER_LOCKED = "ORG_MATRIX_MSC3939_USER_LOCKED"
|
||||||
NOT_YET_UPLOADED = "M_NOT_YET_UPLOADED"
|
NOT_YET_UPLOADED = "M_NOT_YET_UPLOADED"
|
||||||
CANNOT_OVERWRITE_MEDIA = "M_CANNOT_OVERWRITE_MEDIA"
|
CANNOT_OVERWRITE_MEDIA = "M_CANNOT_OVERWRITE_MEDIA"
|
||||||
|
|
||||||
@@ -100,9 +101,8 @@ class Codes(str, Enum):
|
|||||||
# The account has been suspended on the server.
|
# The account has been suspended on the server.
|
||||||
# By opposition to `USER_DEACTIVATED`, this is a reversible measure
|
# By opposition to `USER_DEACTIVATED`, this is a reversible measure
|
||||||
# that can possibly be appealed and reverted.
|
# that can possibly be appealed and reverted.
|
||||||
# Introduced by MSC3823
|
# Part of MSC3823.
|
||||||
# https://github.com/matrix-org/matrix-spec-proposals/pull/3823
|
USER_ACCOUNT_SUSPENDED = "ORG.MATRIX.MSC3823.USER_ACCOUNT_SUSPENDED"
|
||||||
USER_ACCOUNT_SUSPENDED = "M_USER_SUSPENDED"
|
|
||||||
|
|
||||||
BAD_ALIAS = "M_BAD_ALIAS"
|
BAD_ALIAS = "M_BAD_ALIAS"
|
||||||
# For restricted join rules.
|
# For restricted join rules.
|
||||||
@@ -132,10 +132,6 @@ class Codes(str, Enum):
|
|||||||
# connection.
|
# connection.
|
||||||
UNKNOWN_POS = "M_UNKNOWN_POS"
|
UNKNOWN_POS = "M_UNKNOWN_POS"
|
||||||
|
|
||||||
# Part of MSC4133
|
|
||||||
PROFILE_TOO_LARGE = "M_PROFILE_TOO_LARGE"
|
|
||||||
KEY_TOO_LARGE = "M_KEY_TOO_LARGE"
|
|
||||||
|
|
||||||
|
|
||||||
class CodeMessageException(RuntimeError):
|
class CodeMessageException(RuntimeError):
|
||||||
"""An exception with integer code, a message string attributes and optional headers.
|
"""An exception with integer code, a message string attributes and optional headers.
|
||||||
|
|||||||
@@ -275,7 +275,6 @@ class Ratelimiter:
|
|||||||
update: bool = True,
|
update: bool = True,
|
||||||
n_actions: int = 1,
|
n_actions: int = 1,
|
||||||
_time_now_s: Optional[float] = None,
|
_time_now_s: Optional[float] = None,
|
||||||
pause: Optional[float] = 0.5,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Checks if an action can be performed. If not, raises a LimitExceededError
|
"""Checks if an action can be performed. If not, raises a LimitExceededError
|
||||||
|
|
||||||
@@ -299,8 +298,6 @@ class Ratelimiter:
|
|||||||
at all.
|
at all.
|
||||||
_time_now_s: The current time. Optional, defaults to the current time according
|
_time_now_s: The current time. Optional, defaults to the current time according
|
||||||
to self.clock. Only used by tests.
|
to self.clock. Only used by tests.
|
||||||
pause: Time in seconds to pause when an action is being limited. Defaults to 0.5
|
|
||||||
to stop clients from "tight-looping" on retrying their request.
|
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
LimitExceededError: If an action could not be performed, along with the time in
|
LimitExceededError: If an action could not be performed, along with the time in
|
||||||
@@ -319,8 +316,9 @@ class Ratelimiter:
|
|||||||
)
|
)
|
||||||
|
|
||||||
if not allowed:
|
if not allowed:
|
||||||
if pause:
|
# We pause for a bit here to stop clients from "tight-looping" on
|
||||||
await self.clock.sleep(pause)
|
# retrying their request.
|
||||||
|
await self.clock.sleep(0.5)
|
||||||
|
|
||||||
raise LimitExceededError(
|
raise LimitExceededError(
|
||||||
limiter_name=self._limiter_name,
|
limiter_name=self._limiter_name,
|
||||||
|
|||||||
@@ -23,8 +23,7 @@
|
|||||||
|
|
||||||
import hmac
|
import hmac
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from typing import Optional
|
from urllib.parse import urlencode
|
||||||
from urllib.parse import urlencode, urljoin
|
|
||||||
|
|
||||||
from synapse.config import ConfigError
|
from synapse.config import ConfigError
|
||||||
from synapse.config.homeserver import HomeServerConfig
|
from synapse.config.homeserver import HomeServerConfig
|
||||||
@@ -67,42 +66,3 @@ class ConsentURIBuilder:
|
|||||||
urlencode({"u": user_id, "h": mac}),
|
urlencode({"u": user_id, "h": mac}),
|
||||||
)
|
)
|
||||||
return consent_uri
|
return consent_uri
|
||||||
|
|
||||||
|
|
||||||
class LoginSSORedirectURIBuilder:
|
|
||||||
def __init__(self, hs_config: HomeServerConfig):
|
|
||||||
self._public_baseurl = hs_config.server.public_baseurl
|
|
||||||
|
|
||||||
def build_login_sso_redirect_uri(
|
|
||||||
self, *, idp_id: Optional[str], client_redirect_url: str
|
|
||||||
) -> str:
|
|
||||||
"""Build a `/login/sso/redirect` URI for the given identity provider.
|
|
||||||
|
|
||||||
Builds `/_matrix/client/v3/login/sso/redirect/{idpId}?redirectUrl=xxx` when `idp_id` is specified.
|
|
||||||
Otherwise, builds `/_matrix/client/v3/login/sso/redirect?redirectUrl=xxx` when `idp_id` is `None`.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
idp_id: Optional ID of the identity provider
|
|
||||||
client_redirect_url: URL to redirect the user to after login
|
|
||||||
|
|
||||||
Returns
|
|
||||||
The URI to follow when choosing a specific identity provider.
|
|
||||||
"""
|
|
||||||
base_url = urljoin(
|
|
||||||
self._public_baseurl,
|
|
||||||
f"{CLIENT_API_PREFIX}/v3/login/sso/redirect",
|
|
||||||
)
|
|
||||||
|
|
||||||
serialized_query_parameters = urlencode({"redirectUrl": client_redirect_url})
|
|
||||||
|
|
||||||
if idp_id:
|
|
||||||
resultant_url = urljoin(
|
|
||||||
# We have to add a trailing slash to the base URL to ensure that the
|
|
||||||
# last path segment is not stripped away when joining with another path.
|
|
||||||
f"{base_url}/",
|
|
||||||
f"{idp_id}?{serialized_query_parameters}",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
resultant_url = f"{base_url}?{serialized_query_parameters}"
|
|
||||||
|
|
||||||
return resultant_url
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
#
|
#
|
||||||
# Copyright 2020 The Matrix.org Foundation C.I.C.
|
# Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||||
# Copyright 2016 OpenMarket Ltd
|
# Copyright 2016 OpenMarket Ltd
|
||||||
# Copyright (C) 2023-2024 New Vector, Ltd
|
# Copyright (C) 2023 New Vector, Ltd
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as
|
# it under the terms of the GNU Affero General Public License as
|
||||||
|
|||||||
@@ -87,7 +87,6 @@ class ApplicationService:
|
|||||||
ip_range_whitelist: Optional[IPSet] = None,
|
ip_range_whitelist: Optional[IPSet] = None,
|
||||||
supports_ephemeral: bool = False,
|
supports_ephemeral: bool = False,
|
||||||
msc3202_transaction_extensions: bool = False,
|
msc3202_transaction_extensions: bool = False,
|
||||||
msc4190_device_management: bool = False,
|
|
||||||
):
|
):
|
||||||
self.token = token
|
self.token = token
|
||||||
self.url = (
|
self.url = (
|
||||||
@@ -101,7 +100,6 @@ class ApplicationService:
|
|||||||
self.ip_range_whitelist = ip_range_whitelist
|
self.ip_range_whitelist = ip_range_whitelist
|
||||||
self.supports_ephemeral = supports_ephemeral
|
self.supports_ephemeral = supports_ephemeral
|
||||||
self.msc3202_transaction_extensions = msc3202_transaction_extensions
|
self.msc3202_transaction_extensions = msc3202_transaction_extensions
|
||||||
self.msc4190_device_management = msc4190_device_management
|
|
||||||
|
|
||||||
if "|" in self.id:
|
if "|" in self.id:
|
||||||
raise Exception("application service ID cannot contain '|' character")
|
raise Exception("application service ID cannot contain '|' character")
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user