Compare commits
15 Commits
rei/thread
...
rei/ci_deb
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c9f212ab44 | ||
|
|
85e3adba86 | ||
|
|
d3bdf8b091 | ||
|
|
d8ab5434d5 | ||
|
|
4333eff1d5 | ||
|
|
c9f04f3484 | ||
|
|
a387d6ecf8 | ||
|
|
9e473d9e38 | ||
|
|
d2ea7e32f5 | ||
|
|
2db0f1e49b | ||
|
|
a256423553 | ||
|
|
e91aa4fd2f | ||
|
|
499c1631de | ||
|
|
4ecd9aba95 | ||
|
|
4e947d05ab |
@@ -61,7 +61,7 @@ poetry run update_synapse_database --database-config .ci/postgres-config-unporte
|
||||
echo "+++ Comparing ported schema with unported schema"
|
||||
# Ignore the tables that portdb creates. (Should it tidy them up when the porting is completed?)
|
||||
psql synapse -c "DROP TABLE port_from_sqlite3;"
|
||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner --restrict-key=TESTING synapse_unported > unported.sql
|
||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner --restrict-key=TESTING synapse > ported.sql
|
||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse_unported > unported.sql
|
||||
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse > ported.sql
|
||||
# By default, `diff` returns zero if there are no changes and nonzero otherwise
|
||||
diff -u unported.sql ported.sql | tee schema_diff
|
||||
diff -u unported.sql ported.sql | tee schema_diff
|
||||
6
.github/workflows/docker.yml
vendored
6
.github/workflows/docker.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Extract version from pyproject.toml
|
||||
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
|
||||
@@ -95,7 +95,7 @@ jobs:
|
||||
- build
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-*
|
||||
@@ -123,7 +123,7 @@ jobs:
|
||||
uses: sigstore/cosign-installer@d58896d6a1865668819e1d91763c7751a165e159 # v3.9.2
|
||||
|
||||
- name: Calculate docker image tag
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ matrix.repository }}
|
||||
flavor: |
|
||||
|
||||
4
.github/workflows/docs-pr.yaml
vendored
4
.github/workflows/docs-pr.yaml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
# Fetch all history so that the schema_versions script works.
|
||||
fetch-depth: 0
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
name: Check links in documentation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
||||
|
||||
2
.github/workflows/docs.yaml
vendored
2
.github/workflows/docs.yaml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
needs:
|
||||
- pre
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
# Fetch all history so that the schema_versions script works.
|
||||
fetch-depth: 0
|
||||
|
||||
2
.github/workflows/fix_lint.yaml
vendored
2
.github/workflows/fix_lint.yaml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
|
||||
10
.github/workflows/latest_deps.yml
vendored
10
.github/workflows/latest_deps.yml
vendored
@@ -42,7 +42,7 @@ jobs:
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
postgres-version: "14"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
@@ -202,7 +202,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out synapse codebase
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
@@ -234,7 +234,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
2
.github/workflows/poetry_lockfile.yaml
vendored
2
.github/workflows/poetry_lockfile.yaml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
name: "Check locked dependencies have sdists"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
8
.github/workflows/push_complement_image.yml
vendored
8
.github/workflows/push_complement_image.yml
vendored
@@ -33,17 +33,17 @@ jobs:
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout specific branch (debug build)
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
with:
|
||||
ref: ${{ inputs.branch }}
|
||||
- name: Checkout clean copy of develop (scheduled build)
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
if: github.event_name == 'schedule'
|
||||
with:
|
||||
ref: develop
|
||||
- name: Checkout clean copy of master (on-push)
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
if: github.event_name == 'push'
|
||||
with:
|
||||
ref: master
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Work out labels for complement image
|
||||
id: meta
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository }}/complement-synapse
|
||||
tags: |
|
||||
|
||||
12
.github/workflows/release-artifacts.yml
vendored
12
.github/workflows/release-artifacts.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
name: "Calculate list of debian distros"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: src
|
||||
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
install: true
|
||||
|
||||
- name: Set up docker layer caching
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
@@ -132,7 +132,7 @@ jobs:
|
||||
os: "ubuntu-24.04-arm"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
@@ -165,7 +165,7 @@ jobs:
|
||||
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: "3.10"
|
||||
@@ -191,7 +191,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download all workflow run artifacts
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
- name: Build a tarball for the debs
|
||||
# We need to merge all the debs uploads into one folder, then compress
|
||||
# that.
|
||||
|
||||
4
.github/workflows/schema.yaml
vendored
4
.github/workflows/schema.yaml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
name: Ensure Synapse config schema is valid
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
@@ -40,7 +40,7 @@ jobs:
|
||||
name: Ensure generated documentation is up-to-date
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
70
.github/workflows/tests.yml
vendored
70
.github/workflows/tests.yml
vendored
@@ -86,7 +86,7 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
with:
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
@@ -116,7 +116,7 @@ jobs:
|
||||
check-lockfile:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
@@ -129,7 +129,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
@@ -151,7 +151,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
@@ -174,7 +174,7 @@ jobs:
|
||||
# Cribbed from
|
||||
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
||||
- name: Restore/persist mypy's cache
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||
with:
|
||||
path: |
|
||||
.mypy_cache
|
||||
@@ -187,7 +187,7 @@ jobs:
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Check line endings
|
||||
run: scripts-dev/check_line_terminators.sh
|
||||
|
||||
@@ -195,7 +195,7 @@ jobs:
|
||||
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
@@ -213,7 +213,7 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Install Rust
|
||||
@@ -233,7 +233,7 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
@@ -252,7 +252,7 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
@@ -270,7 +270,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
@@ -306,7 +306,7 @@ jobs:
|
||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
@@ -326,7 +326,7 @@ jobs:
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.linting_readme == 'true' }}
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
@@ -373,10 +373,10 @@ jobs:
|
||||
|
||||
calculate-test-jobs:
|
||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
# needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
@@ -393,11 +393,12 @@ jobs:
|
||||
- changes
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
@@ -426,7 +427,24 @@ jobs:
|
||||
if: ${{ matrix.job.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: poetry run trial --jobs=6 tests
|
||||
- run: |
|
||||
(
|
||||
while true; do
|
||||
echo "......."
|
||||
date
|
||||
df -h | grep root
|
||||
free -m
|
||||
sleep 10
|
||||
done
|
||||
) &
|
||||
MONITOR_PID=$!
|
||||
|
||||
poetry run trial --jobs=6 tests
|
||||
STATUS=$?
|
||||
|
||||
kill $MONITOR_PID
|
||||
exit $STATUS
|
||||
|
||||
env:
|
||||
SYNAPSE_POSTGRES: ${{ matrix.job.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: /var/run/postgresql
|
||||
@@ -453,7 +471,7 @@ jobs:
|
||||
- changes
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
@@ -518,7 +536,7 @@ jobs:
|
||||
extras: ["all"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
@@ -568,7 +586,7 @@ jobs:
|
||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
|
||||
@@ -615,7 +633,7 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
@@ -659,7 +677,7 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Add PostgreSQL apt repository
|
||||
# We need a version of pg_dump that can handle the version of
|
||||
# PostgreSQL being tested against. The Ubuntu package repository lags
|
||||
@@ -714,7 +732,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout synapse codebase
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
@@ -750,7 +768,7 @@ jobs:
|
||||
- changes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
@@ -770,7 +788,7 @@ jobs:
|
||||
- changes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
|
||||
2
.github/workflows/triage_labelled.yml
vendored
2
.github/workflows/triage_labelled.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
if: >
|
||||
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
||||
steps:
|
||||
- uses: actions/add-to-project@c0c5949b017d0d4a39f7ba888255881bdac2a823 # v1.0.2
|
||||
- uses: actions/add-to-project@5b1a254a3546aef88e0a7724a77a623fa2e47c36 # main (v1.0.2 + 10 commits)
|
||||
id: add_project
|
||||
with:
|
||||
project-url: "https://github.com/orgs/matrix-org/projects/67"
|
||||
|
||||
10
.github/workflows/twisted_trunk.yml
vendored
10
.github/workflows/twisted_trunk.yml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
|
||||
- name: Install Rust
|
||||
@@ -117,7 +117,7 @@ jobs:
|
||||
- ${{ github.workspace }}:/src
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b # master
|
||||
@@ -175,7 +175,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Run actions/checkout@v4 for synapse
|
||||
uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
@@ -217,7 +217,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4.3.0
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
143
CHANGES.md
143
CHANGES.md
@@ -1,146 +1,3 @@
|
||||
# Synapse 1.137.0rc1 (2025-08-19)
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fix a bug which could corrupt auth chains making it impossible to perform state resolution. ([\#18746](https://github.com/element-hq/synapse/issues/18746))
|
||||
- Fix error message in `register_new_matrix_user` utility script for empty `registration_shared_secret`. ([\#18780](https://github.com/element-hq/synapse/issues/18780))
|
||||
- Allow enabling [MSC4108](https://github.com/matrix-org/matrix-spec-proposals/pull/4108) when the stable Matrix Authentication Service integration is enabled. ([\#18832](https://github.com/element-hq/synapse/issues/18832))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Include IPv6 networks in `denied-peer-ips` of coturn setup. Contributed by @litetex. ([\#18781](https://github.com/element-hq/synapse/issues/18781))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Update tests to ensure all database tables are emptied when purging a room. ([\#18794](https://github.com/element-hq/synapse/issues/18794))
|
||||
- Instrument the `encode_response` part of Sliding Sync requests for more complete traces in Jaeger. ([\#18815](https://github.com/element-hq/synapse/issues/18815))
|
||||
- Tag Sliding Sync traces when we `wait_for_events`. ([\#18816](https://github.com/element-hq/synapse/issues/18816))
|
||||
- Fix `portdb` CI by hardcoding the new `pg_dump` restrict key that was added due to [CVE-2025-8714](https://nvd.nist.gov/vuln/detail/cve-2025-8714). ([\#18824](https://github.com/element-hq/synapse/issues/18824))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump actions/add-to-project from 5b1a254a3546aef88e0a7724a77a623fa2e47c36 to 0c37450c4be3b6a7582b2fb013c9ebfd9c8e9300. ([\#18557](https://github.com/element-hq/synapse/issues/18557))
|
||||
* Bump actions/cache from 4.2.3 to 4.2.4. ([\#18799](https://github.com/element-hq/synapse/issues/18799))
|
||||
* Bump actions/checkout from 4.2.2 to 4.3.0. ([\#18800](https://github.com/element-hq/synapse/issues/18800))
|
||||
* Bump actions/download-artifact from 4.3.0 to 5.0.0. ([\#18801](https://github.com/element-hq/synapse/issues/18801))
|
||||
* Bump docker/metadata-action from 5.7.0 to 5.8.0. ([\#18773](https://github.com/element-hq/synapse/issues/18773))
|
||||
* Bump mypy from 1.16.1 to 1.17.1. ([\#18775](https://github.com/element-hq/synapse/issues/18775))
|
||||
* Bump phonenumbers from 9.0.10 to 9.0.11. ([\#18797](https://github.com/element-hq/synapse/issues/18797))
|
||||
* Bump pygithub from 2.6.1 to 2.7.0. ([\#18779](https://github.com/element-hq/synapse/issues/18779))
|
||||
* Bump serde_json from 1.0.141 to 1.0.142. ([\#18776](https://github.com/element-hq/synapse/issues/18776))
|
||||
* Bump slab from 0.4.10 to 0.4.11. ([\#18809](https://github.com/element-hq/synapse/issues/18809))
|
||||
* Bump tokio from 1.47.0 to 1.47.1. ([\#18774](https://github.com/element-hq/synapse/issues/18774))
|
||||
* Bump types-pyyaml from 6.0.12.20250516 to 6.0.12.20250809. ([\#18798](https://github.com/element-hq/synapse/issues/18798))
|
||||
* Bump types-setuptools from 80.9.0.20250529 to 80.9.0.20250809. ([\#18796](https://github.com/element-hq/synapse/issues/18796))
|
||||
|
||||
# Synapse 1.136.0 (2025-08-12)
|
||||
|
||||
Note: This release includes the security fixes from `1.135.2` and `1.136.0rc2`, detailed below.
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fix bug introduced in 1.135.2 and 1.136.0rc2 where the [Make Room Admin API](https://element-hq.github.io/synapse/latest/admin_api/rooms.html#make-room-admin-api) would not treat a room v12's creator power level as the highest in room. ([\#18805](https://github.com/element-hq/synapse/issues/18805))
|
||||
|
||||
|
||||
# Synapse 1.135.2 (2025-08-11)
|
||||
|
||||
This is the Synapse portion of the [Matrix coordinated security release](https://matrix.org/blog/2025/07/security-predisclosure/). This release includes support for [room version](https://spec.matrix.org/v1.15/rooms/) 12 which fixes a number of security vulnerabilities, including [CVE-2025-49090](https://www.cve.org/CVERecord?id=CVE-2025-49090).
|
||||
|
||||
The default room version is not changed. Not all clients will support room version 12 immediately, and not all users will be using the latest version of their clients. Large, public rooms are advised to wait a few weeks before upgrading to room version 12 to allow users throughout the Matrix ecosystem to update their clients.
|
||||
|
||||
Note: release 1.135.1 was skipped due to issues discovered during the release process.
|
||||
|
||||
Two patched Synapse releases are now available:
|
||||
|
||||
* `1.135.2`: stable release comprised of `1.135.0` + security patches
|
||||
* Upgrade to this release **if you are currently running 1.135.0 or below**.
|
||||
* `1.136.0rc2`: unstable release candidate comprised of `1.136.0rc1` + security patches.
|
||||
* Upgrade to this release **only if you are on 1.136.0rc1**.
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Fix invalidation of storage cache that was broken in 1.135.0. ([\#18786](https://github.com/element-hq/synapse/issues/18786))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Add a parameter to `upgrade_rooms(..)` to allow auto join local users. ([\#82](https://github.com/element-hq/synapse/issues/82))
|
||||
- Speed up upgrading a room with large numbers of banned users. ([\#18574](https://github.com/element-hq/synapse/issues/18574))
|
||||
|
||||
|
||||
# Synapse 1.136.0rc2 (2025-08-11)
|
||||
|
||||
- Update MSC4293 redaction logic for room v12. ([\#80](https://github.com/element-hq/synapse/issues/80))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Add a parameter to `upgrade_rooms(..)` to allow auto join local users. ([\#83](https://github.com/element-hq/synapse/issues/83))
|
||||
|
||||
|
||||
# Synapse 1.136.0rc1 (2025-08-05)
|
||||
|
||||
Please check [the relevant section in the upgrade notes](https://github.com/element-hq/synapse/blob/develop/docs/upgrade.md#upgrading-to-v11360) as this release contains changes to MAS support, metrics labels and the module API which may require your attention when upgrading.
|
||||
|
||||
### Features
|
||||
|
||||
- Add configurable rate limiting for the creation of rooms. ([\#18514](https://github.com/element-hq/synapse/issues/18514))
|
||||
- Add support for [MSC4293](https://github.com/matrix-org/matrix-spec-proposals/pull/4293) - Redact on Kick/Ban. ([\#18540](https://github.com/element-hq/synapse/issues/18540))
|
||||
- When admins enable themselves to see soft-failed events, they will also see if the cause is due to the policy server flagging them as spam via `unsigned`. ([\#18585](https://github.com/element-hq/synapse/issues/18585))
|
||||
- Add ability to configure forward/outbound proxy via homeserver config instead of environment variables. See `http_proxy`, `https_proxy`, `no_proxy_hosts`. ([\#18686](https://github.com/element-hq/synapse/issues/18686))
|
||||
- Advertise experimental support for [MSC4306](https://github.com/matrix-org/matrix-spec-proposals/pull/4306) (Thread Subscriptions) through `/_matrix/clients/versions` if enabled. ([\#18722](https://github.com/element-hq/synapse/issues/18722))
|
||||
- Stabilise support for delegating authentication to [Matrix Authentication Service](https://github.com/element-hq/matrix-authentication-service/). ([\#18759](https://github.com/element-hq/synapse/issues/18759))
|
||||
- Implement the push rules for experimental [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-doc/issues/4306). ([\#18762](https://github.com/element-hq/synapse/issues/18762))
|
||||
|
||||
### Bugfixes
|
||||
|
||||
- Allow return code 403 (allowed by C2S Spec since v1.2) when fetching profiles via federation. ([\#18696](https://github.com/element-hq/synapse/issues/18696))
|
||||
- Register the MSC4306 (Thread Subscriptions) endpoints in the CS API when the experimental feature is enabled. ([\#18726](https://github.com/element-hq/synapse/issues/18726))
|
||||
- Fix a long-standing bug where suspended users could not have server notices sent to them (a 403 was returned to the admin). ([\#18750](https://github.com/element-hq/synapse/issues/18750))
|
||||
- Fix an issue that could cause logcontexts to be lost on rate-limited requests. Found by @realtyem. ([\#18763](https://github.com/element-hq/synapse/issues/18763))
|
||||
- Fix invalidation of storage cache that was broken in 1.135.0. ([\#18786](https://github.com/element-hq/synapse/issues/18786))
|
||||
|
||||
### Improved Documentation
|
||||
|
||||
- Minor improvements to README. ([\#18700](https://github.com/element-hq/synapse/issues/18700))
|
||||
- Document that there can be multiple workers handling the `receipts` stream. ([\#18760](https://github.com/element-hq/synapse/issues/18760))
|
||||
- Improve worker documentation for some device paths. ([\#18761](https://github.com/element-hq/synapse/issues/18761))
|
||||
|
||||
### Deprecations and Removals
|
||||
|
||||
- Deprecate `run_as_background_process` exported as part of the module API interface in favor of `ModuleApi.run_as_background_process`. See [the relevant section in the upgrade notes](https://github.com/element-hq/synapse/blob/develop/docs/upgrade.md#upgrading-to-v11360) for more information. ([\#18737](https://github.com/element-hq/synapse/issues/18737))
|
||||
|
||||
### Internal Changes
|
||||
|
||||
- Add debug logging for HMAC digest verification failures when using the admin API to register users. ([\#18474](https://github.com/element-hq/synapse/issues/18474))
|
||||
- Speed up upgrading a room with large numbers of banned users. ([\#18574](https://github.com/element-hq/synapse/issues/18574))
|
||||
- Fix config documentation generation script on Windows by enforcing UTF-8. ([\#18580](https://github.com/element-hq/synapse/issues/18580))
|
||||
- Refactor cache, background process, `Counter`, `LaterGauge`, `GaugeBucketCollector`, `Histogram`, and `Gauge` metrics to be homeserver-scoped. ([\#18656](https://github.com/element-hq/synapse/issues/18656), [\#18714](https://github.com/element-hq/synapse/issues/18714), [\#18715](https://github.com/element-hq/synapse/issues/18715), [\#18724](https://github.com/element-hq/synapse/issues/18724), [\#18753](https://github.com/element-hq/synapse/issues/18753), [\#18725](https://github.com/element-hq/synapse/issues/18725), [\#18670](https://github.com/element-hq/synapse/issues/18670), [\#18748](https://github.com/element-hq/synapse/issues/18748), [\#18751](https://github.com/element-hq/synapse/issues/18751))
|
||||
- Reduce database usage in Sliding Sync by not querying for background update completion after the update is known to be complete. ([\#18718](https://github.com/element-hq/synapse/issues/18718))
|
||||
- Improve order of validation and ratelimiting in room creation. ([\#18723](https://github.com/element-hq/synapse/issues/18723))
|
||||
- Bump minimum version bound on Twisted to 21.2.0. ([\#18727](https://github.com/element-hq/synapse/issues/18727), [\#18729](https://github.com/element-hq/synapse/issues/18729))
|
||||
- Use `twisted.internet.testing` module in tests instead of deprecated `twisted.test.proto_helpers`. ([\#18728](https://github.com/element-hq/synapse/issues/18728))
|
||||
- Remove obsolete `/send_event` replication endpoint. ([\#18730](https://github.com/element-hq/synapse/issues/18730))
|
||||
- Update metrics linting to be able to handle custom metrics. ([\#18733](https://github.com/element-hq/synapse/issues/18733))
|
||||
- Work around `twisted.protocols.amp.TooLong` error by reducing logging in some tests. ([\#18736](https://github.com/element-hq/synapse/issues/18736))
|
||||
- Prevent "Move labelled issues to correct projects" GitHub Actions workflow from failing when an issue is already on the project board. ([\#18755](https://github.com/element-hq/synapse/issues/18755))
|
||||
- Bump minimum supported Rust version (MSRV) to 1.82.0. Missed in [#18553](https://github.com/element-hq/synapse/pull/18553) (released in Synapse 1.134.0). ([\#18757](https://github.com/element-hq/synapse/issues/18757))
|
||||
- Make `Clock.sleep(...)` return a coroutine, so that mypy can catch places where we don't await on it. ([\#18772](https://github.com/element-hq/synapse/issues/18772))
|
||||
- Update implementation of [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-doc/issues/4306) to include automatic subscription conflict prevention as introduced in later drafts. ([\#18756](https://github.com/element-hq/synapse/issues/18756))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump gitpython from 3.1.44 to 3.1.45. ([\#18743](https://github.com/element-hq/synapse/issues/18743))
|
||||
* Bump mypy-zope from 1.0.12 to 1.0.13. ([\#18744](https://github.com/element-hq/synapse/issues/18744))
|
||||
* Bump phonenumbers from 9.0.9 to 9.0.10. ([\#18741](https://github.com/element-hq/synapse/issues/18741))
|
||||
* Bump ruff from 0.12.4 to 0.12.5. ([\#18742](https://github.com/element-hq/synapse/issues/18742))
|
||||
* Bump sentry-sdk from 2.32.0 to 2.33.2. ([\#18745](https://github.com/element-hq/synapse/issues/18745))
|
||||
* Bump tokio from 1.46.1 to 1.47.0. ([\#18740](https://github.com/element-hq/synapse/issues/18740))
|
||||
* Bump types-jsonschema from 4.24.0.20250708 to 4.25.0.20250720. ([\#18703](https://github.com/element-hq/synapse/issues/18703))
|
||||
* Bump types-psycopg2 from 2.9.21.20250516 to 2.9.21.20250718. ([\#18706](https://github.com/element-hq/synapse/issues/18706))
|
||||
|
||||
# Synapse 1.135.0 (2025-08-01)
|
||||
|
||||
No significant changes since 1.135.0rc2.
|
||||
|
||||
12
Cargo.lock
generated
12
Cargo.lock
generated
@@ -1270,9 +1270,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.142"
|
||||
version = "1.0.141"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "030fedb782600dcbd6f02d479bf0d817ac3bb40d644745b769d6a96bc3afc5a7"
|
||||
checksum = "30b9eff21ebe718216c6ec64e1d9ac57087aad11efc64e32002bce4a0d4c03d3"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
@@ -1322,9 +1322,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
||||
|
||||
[[package]]
|
||||
name = "slab"
|
||||
version = "0.4.11"
|
||||
version = "0.4.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589"
|
||||
checksum = "04dc19736151f35336d325007ac991178d504a119863a2fcb3758cdb5e52c50d"
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
@@ -1478,9 +1478,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
||||
|
||||
[[package]]
|
||||
name = "tokio"
|
||||
version = "1.47.1"
|
||||
version = "1.47.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038"
|
||||
checksum = "43864ed400b6043a4757a25c7a64a8efde741aed79a056a2fb348a406701bb35"
|
||||
dependencies = [
|
||||
"backtrace",
|
||||
"bytes",
|
||||
|
||||
1
changelog.d/18474.misc
Normal file
1
changelog.d/18474.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add debug logging for HMAC digest verification failures when using the admin API to register users.
|
||||
1
changelog.d/18514.feature
Normal file
1
changelog.d/18514.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add configurable rate limiting for the creation of rooms.
|
||||
1
changelog.d/18540.feature
Normal file
1
changelog.d/18540.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add support for [MSC4293](https://github.com/matrix-org/matrix-spec-proposals/pull/4293) - Redact on Kick/Ban.
|
||||
1
changelog.d/18574.misc
Normal file
1
changelog.d/18574.misc
Normal file
@@ -0,0 +1 @@
|
||||
Speed up upgrading a room with large numbers of banned users.
|
||||
1
changelog.d/18580.misc
Normal file
1
changelog.d/18580.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix config documentation generation script on Windows by enforcing UTF-8.
|
||||
1
changelog.d/18585.feature
Normal file
1
changelog.d/18585.feature
Normal file
@@ -0,0 +1 @@
|
||||
When admins enable themselves to see soft-failed events, they will also see if the cause is due to the policy server flagging them as spam via `unsigned`.
|
||||
1
changelog.d/18656.misc
Normal file
1
changelog.d/18656.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor `Counter` metrics to be homeserver-scoped.
|
||||
1
changelog.d/18670.misc
Normal file
1
changelog.d/18670.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor background process metrics to be homeserver-scoped.
|
||||
1
changelog.d/18686.feature
Normal file
1
changelog.d/18686.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add ability to configure forward/outbound proxy via homeserver config instead of environment variables. See `http_proxy`, `https_proxy`, `no_proxy_hosts`.
|
||||
@@ -1 +0,0 @@
|
||||
Add experimental support for [MSC4308: Thread Subscriptions extension to Sliding Sync](https://github.com/matrix-org/matrix-spec-proposals/pull/4308) when [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-spec-proposals/pull/4306) and [MSC4186: Simplified Sliding Sync](https://github.com/matrix-org/matrix-spec-proposals/pull/4186) are enabled.
|
||||
1
changelog.d/18696.bugfix
Normal file
1
changelog.d/18696.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Allow return code 403 (allowed by C2S Spec since v1.2) when fetching profiles via federation.
|
||||
1
changelog.d/18700.doc
Normal file
1
changelog.d/18700.doc
Normal file
@@ -0,0 +1 @@
|
||||
Minor improvements to README.
|
||||
1
changelog.d/18714.misc
Normal file
1
changelog.d/18714.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor `LaterGauge` metrics to be homeserver-scoped.
|
||||
1
changelog.d/18715.misc
Normal file
1
changelog.d/18715.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor `GaugeBucketCollector` metrics to be homeserver-scoped.
|
||||
1
changelog.d/18718.misc
Normal file
1
changelog.d/18718.misc
Normal file
@@ -0,0 +1 @@
|
||||
Reduce database usage in Sliding Sync by not querying for background update completion after the update is known to be complete.
|
||||
1
changelog.d/18722.feature
Normal file
1
changelog.d/18722.feature
Normal file
@@ -0,0 +1 @@
|
||||
Advertise experimental support for [MSC4306](https://github.com/matrix-org/matrix-spec-proposals/pull/4306) through `/_matrix/clients/versions` if enabled.
|
||||
1
changelog.d/18723.misc
Normal file
1
changelog.d/18723.misc
Normal file
@@ -0,0 +1 @@
|
||||
Improve order of validation and ratelimiting in room creation.
|
||||
1
changelog.d/18724.misc
Normal file
1
changelog.d/18724.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor `Histogram` metrics to be homeserver-scoped.
|
||||
1
changelog.d/18725.misc
Normal file
1
changelog.d/18725.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor `Gauge` metrics to be homeserver-scoped.
|
||||
1
changelog.d/18726.bugfix
Normal file
1
changelog.d/18726.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Register the MSC4306 endpoints in the CS API when the experimental feature is enabled.
|
||||
1
changelog.d/18727.misc
Normal file
1
changelog.d/18727.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump minimum version bound on Twisted to 21.2.0.
|
||||
1
changelog.d/18728.misc
Normal file
1
changelog.d/18728.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use `twisted.internet.testing` module in tests instead of deprecated `twisted.test.proto_helpers`.
|
||||
1
changelog.d/18729.misc
Normal file
1
changelog.d/18729.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump minimum version bound on Twisted to 21.2.0.
|
||||
1
changelog.d/18730.misc
Normal file
1
changelog.d/18730.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove obsolete `/send_event` replication endpoint.
|
||||
1
changelog.d/18733.misc
Normal file
1
changelog.d/18733.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update metrics linting to be able to handle custom metrics.
|
||||
1
changelog.d/18736.misc
Normal file
1
changelog.d/18736.misc
Normal file
@@ -0,0 +1 @@
|
||||
Work around `twisted.protocols.amp.TooLong` error by reducing logging in some tests.
|
||||
1
changelog.d/18737.removal
Normal file
1
changelog.d/18737.removal
Normal file
@@ -0,0 +1 @@
|
||||
Deprecate `run_as_background_process` exported as part of the module API interface in favor of `ModuleApi.run_as_background_process`. See the relevant section in the upgrade notes for more information.
|
||||
1
changelog.d/18748.misc
Normal file
1
changelog.d/18748.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor cache metrics to be homeserver-scoped.
|
||||
1
changelog.d/18750.bugfix
Normal file
1
changelog.d/18750.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug where suspended users could not have server notices sent to them (a 403 was returned to the admin).
|
||||
1
changelog.d/18753.misc
Normal file
1
changelog.d/18753.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor `Histogram` metrics to be homeserver-scoped.
|
||||
1
changelog.d/18755.misc
Normal file
1
changelog.d/18755.misc
Normal file
@@ -0,0 +1 @@
|
||||
Prevent "Move labelled issues to correct projects" GitHub Actions workflow from failing when an issue is already on the project board.
|
||||
1
changelog.d/18756.misc
Normal file
1
changelog.d/18756.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update implementation of [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-doc/issues/4306) to include automatic subscription conflict prevention as introduced in later drafts.
|
||||
1
changelog.d/18757.misc
Normal file
1
changelog.d/18757.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump minimum supported Rust version (MSRV) to 1.82.0. Missed in [#18553](https://github.com/element-hq/synapse/pull/18553) (released in Synapse 1.134.0).
|
||||
1
changelog.d/18759.feature
Normal file
1
changelog.d/18759.feature
Normal file
@@ -0,0 +1 @@
|
||||
Stable support for delegating authentication to [Matrix Authentication Service](https://github.com/element-hq/matrix-authentication-service/).
|
||||
1
changelog.d/18760.doc.md
Normal file
1
changelog.d/18760.doc.md
Normal file
@@ -0,0 +1 @@
|
||||
Document that there can be multiple workers handling the `receipts` stream.
|
||||
1
changelog.d/18761.doc.md
Normal file
1
changelog.d/18761.doc.md
Normal file
@@ -0,0 +1 @@
|
||||
Improve worker documentation for some device paths.
|
||||
1
changelog.d/18762.feature
Normal file
1
changelog.d/18762.feature
Normal file
@@ -0,0 +1 @@
|
||||
Implement the push rules for experimental [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-doc/issues/4306).
|
||||
1
changelog.d/18763.bugfix
Normal file
1
changelog.d/18763.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix an issue that could cause logcontexts to be lost on rate-limited requests. Found by @realtyem.
|
||||
1
changelog.d/18772.misc
Normal file
1
changelog.d/18772.misc
Normal file
@@ -0,0 +1 @@
|
||||
Make `Clock.sleep(..)` return a coroutine, so that mypy can catch places where we don't await on it.
|
||||
1
changelog.d/18787.misc
Normal file
1
changelog.d/18787.misc
Normal file
@@ -0,0 +1 @@
|
||||
CI debugging.
|
||||
@@ -1 +0,0 @@
|
||||
Update push rules for experimental [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-doc/issues/4306) to follow newer draft.
|
||||
36
debian/changelog
vendored
36
debian/changelog
vendored
@@ -1,39 +1,3 @@
|
||||
matrix-synapse-py3 (1.137.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.137.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Aug 2025 10:55:22 +0100
|
||||
|
||||
matrix-synapse-py3 (1.136.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.136.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Aug 2025 13:18:03 +0100
|
||||
|
||||
matrix-synapse-py3 (1.136.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.136.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 12:18:52 -0600
|
||||
|
||||
matrix-synapse-py3 (1.136.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.136.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Aug 2025 08:13:30 -0600
|
||||
|
||||
matrix-synapse-py3 (1.135.2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.135.2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 11:52:01 -0600
|
||||
|
||||
matrix-synapse-py3 (1.135.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.135.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 11:13:15 -0600
|
||||
|
||||
matrix-synapse-py3 (1.135.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.135.0.
|
||||
|
||||
@@ -88,8 +88,7 @@ This will install and start a systemd service called `coturn`.
|
||||
denied-peer-ip=172.16.0.0-172.31.255.255
|
||||
|
||||
# recommended additional local peers to block, to mitigate external access to internal services.
|
||||
# https://www.enablesecurity.com/blog/slack-webrtc-turn-compromise-and-bug-bounty/#how-to-fix-an-open-turn-relay-to-address-this-vulnerability
|
||||
# https://www.enablesecurity.com/blog/cve-2020-26262-bypass-of-coturns-access-control-protection/#further-concerns-what-else
|
||||
# https://www.rtcsec.com/article/slack-webrtc-turn-compromise-and-bug-bounty/#how-to-fix-an-open-turn-relay-to-address-this-vulnerability
|
||||
no-multicast-peers
|
||||
denied-peer-ip=0.0.0.0-0.255.255.255
|
||||
denied-peer-ip=100.64.0.0-100.127.255.255
|
||||
@@ -102,14 +101,6 @@ This will install and start a systemd service called `coturn`.
|
||||
denied-peer-ip=198.51.100.0-198.51.100.255
|
||||
denied-peer-ip=203.0.113.0-203.0.113.255
|
||||
denied-peer-ip=240.0.0.0-255.255.255.255
|
||||
denied-peer-ip=::1
|
||||
denied-peer-ip=64:ff9b::-64:ff9b::ffff:ffff
|
||||
denied-peer-ip=::ffff:0.0.0.0-::ffff:255.255.255.255
|
||||
denied-peer-ip=100::-100::ffff:ffff:ffff:ffff
|
||||
denied-peer-ip=2001::-2001:1ff:ffff:ffff:ffff:ffff:ffff:ffff
|
||||
denied-peer-ip=2002::-2002:ffff:ffff:ffff:ffff:ffff:ffff:ffff
|
||||
denied-peer-ip=fc00::-fdff:ffff:ffff:ffff:ffff:ffff:ffff:ffff
|
||||
denied-peer-ip=fe80::-febf:ffff:ffff:ffff:ffff:ffff:ffff:ffff
|
||||
|
||||
# special case the turn server itself so that client->TURN->TURN->client flows work
|
||||
# this should be one of the turn server's listening IPs
|
||||
|
||||
@@ -4174,7 +4174,7 @@ The default power levels for each preset are:
|
||||
"m.room.history_visibility": 100
|
||||
"m.room.canonical_alias": 50
|
||||
"m.room.avatar": 50
|
||||
"m.room.tombstone": 100 (150 if MSC4289 is used)
|
||||
"m.room.tombstone": 100
|
||||
"m.room.server_acl": 100
|
||||
"m.room.encryption": 100
|
||||
```
|
||||
|
||||
202
poetry.lock
generated
202
poetry.lock
generated
@@ -441,6 +441,24 @@ files = [
|
||||
{file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deprecated"
|
||||
version = "1.2.13"
|
||||
description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"},
|
||||
{file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
wrapt = ">=1.10,<2"
|
||||
|
||||
[package.extras]
|
||||
dev = ["PyTest (<5) ; python_version < \"3.6\"", "PyTest ; python_version >= \"3.6\"", "PyTest-Cov (<2.6) ; python_version < \"3.6\"", "PyTest-Cov ; python_version >= \"3.6\"", "bump2version (<1)", "configparser (<5) ; python_version < \"3\"", "importlib-metadata (<3) ; python_version < \"3\"", "importlib-resources (<4) ; python_version < \"3\"", "sphinx (<2)", "sphinxcontrib-websupport (<2) ; python_version < \"3\"", "tox", "zipp (<2) ; python_version < \"3\""]
|
||||
|
||||
[[package]]
|
||||
name = "docutils"
|
||||
version = "0.19"
|
||||
@@ -1369,50 +1387,44 @@ docs = ["sphinx (>=8,<9)", "sphinx-autobuild"]
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "1.17.1"
|
||||
version = "1.16.1"
|
||||
description = "Optional static typing for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "mypy-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3fbe6d5555bf608c47203baa3e72dbc6ec9965b3d7c318aa9a4ca76f465bd972"},
|
||||
{file = "mypy-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80ef5c058b7bce08c83cac668158cb7edea692e458d21098c7d3bce35a5d43e7"},
|
||||
{file = "mypy-1.17.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a580f8a70c69e4a75587bd925d298434057fe2a428faaf927ffe6e4b9a98df"},
|
||||
{file = "mypy-1.17.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dd86bb649299f09d987a2eebb4d52d10603224500792e1bee18303bbcc1ce390"},
|
||||
{file = "mypy-1.17.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a76906f26bd8d51ea9504966a9c25419f2e668f012e0bdf3da4ea1526c534d94"},
|
||||
{file = "mypy-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:e79311f2d904ccb59787477b7bd5d26f3347789c06fcd7656fa500875290264b"},
|
||||
{file = "mypy-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad37544be07c5d7fba814eb370e006df58fed8ad1ef33ed1649cb1889ba6ff58"},
|
||||
{file = "mypy-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:064e2ff508e5464b4bd807a7c1625bc5047c5022b85c70f030680e18f37273a5"},
|
||||
{file = "mypy-1.17.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70401bbabd2fa1aa7c43bb358f54037baf0586f41e83b0ae67dd0534fc64edfd"},
|
||||
{file = "mypy-1.17.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e92bdc656b7757c438660f775f872a669b8ff374edc4d18277d86b63edba6b8b"},
|
||||
{file = "mypy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c1fdf4abb29ed1cb091cf432979e162c208a5ac676ce35010373ff29247bcad5"},
|
||||
{file = "mypy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:ff2933428516ab63f961644bc49bc4cbe42bbffb2cd3b71cc7277c07d16b1a8b"},
|
||||
{file = "mypy-1.17.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:69e83ea6553a3ba79c08c6e15dbd9bfa912ec1e493bf75489ef93beb65209aeb"},
|
||||
{file = "mypy-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b16708a66d38abb1e6b5702f5c2c87e133289da36f6a1d15f6a5221085c6403"},
|
||||
{file = "mypy-1.17.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:89e972c0035e9e05823907ad5398c5a73b9f47a002b22359b177d40bdaee7056"},
|
||||
{file = "mypy-1.17.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03b6d0ed2b188e35ee6d5c36b5580cffd6da23319991c49ab5556c023ccf1341"},
|
||||
{file = "mypy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c837b896b37cd103570d776bda106eabb8737aa6dd4f248451aecf53030cdbeb"},
|
||||
{file = "mypy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:665afab0963a4b39dff7c1fa563cc8b11ecff7910206db4b2e64dd1ba25aed19"},
|
||||
{file = "mypy-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93378d3203a5c0800c6b6d850ad2f19f7a3cdf1a3701d3416dbf128805c6a6a7"},
|
||||
{file = "mypy-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:15d54056f7fe7a826d897789f53dd6377ec2ea8ba6f776dc83c2902b899fee81"},
|
||||
{file = "mypy-1.17.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:209a58fed9987eccc20f2ca94afe7257a8f46eb5df1fb69958650973230f91e6"},
|
||||
{file = "mypy-1.17.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:099b9a5da47de9e2cb5165e581f158e854d9e19d2e96b6698c0d64de911dd849"},
|
||||
{file = "mypy-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ffadfbe6994d724c5a1bb6123a7d27dd68fc9c059561cd33b664a79578e14"},
|
||||
{file = "mypy-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:9a2b7d9180aed171f033c9f2fc6c204c1245cf60b0cb61cf2e7acc24eea78e0a"},
|
||||
{file = "mypy-1.17.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:15a83369400454c41ed3a118e0cc58bd8123921a602f385cb6d6ea5df050c733"},
|
||||
{file = "mypy-1.17.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:55b918670f692fc9fba55c3298d8a3beae295c5cded0a55dccdc5bbead814acd"},
|
||||
{file = "mypy-1.17.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:62761474061feef6f720149d7ba876122007ddc64adff5ba6f374fda35a018a0"},
|
||||
{file = "mypy-1.17.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c49562d3d908fd49ed0938e5423daed8d407774a479b595b143a3d7f87cdae6a"},
|
||||
{file = "mypy-1.17.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:397fba5d7616a5bc60b45c7ed204717eaddc38f826e3645402c426057ead9a91"},
|
||||
{file = "mypy-1.17.1-cp314-cp314-win_amd64.whl", hash = "sha256:9d6b20b97d373f41617bd0708fd46aa656059af57f2ef72aa8c7d6a2b73b74ed"},
|
||||
{file = "mypy-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5d1092694f166a7e56c805caaf794e0585cabdbf1df36911c414e4e9abb62ae9"},
|
||||
{file = "mypy-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79d44f9bfb004941ebb0abe8eff6504223a9c1ac51ef967d1263c6572bbebc99"},
|
||||
{file = "mypy-1.17.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b01586eed696ec905e61bd2568f48740f7ac4a45b3a468e6423a03d3788a51a8"},
|
||||
{file = "mypy-1.17.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43808d9476c36b927fbcd0b0255ce75efe1b68a080154a38ae68a7e62de8f0f8"},
|
||||
{file = "mypy-1.17.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:feb8cc32d319edd5859da2cc084493b3e2ce5e49a946377663cc90f6c15fb259"},
|
||||
{file = "mypy-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d7598cf74c3e16539d4e2f0b8d8c318e00041553d83d4861f87c7a72e95ac24d"},
|
||||
{file = "mypy-1.17.1-py3-none-any.whl", hash = "sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9"},
|
||||
{file = "mypy-1.17.1.tar.gz", hash = "sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01"},
|
||||
{file = "mypy-1.16.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b4f0fed1022a63c6fec38f28b7fc77fca47fd490445c69d0a66266c59dd0b88a"},
|
||||
{file = "mypy-1.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:86042bbf9f5a05ea000d3203cf87aa9d0ccf9a01f73f71c58979eb9249f46d72"},
|
||||
{file = "mypy-1.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ea7469ee5902c95542bea7ee545f7006508c65c8c54b06dc2c92676ce526f3ea"},
|
||||
{file = "mypy-1.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:352025753ef6a83cb9e7f2427319bb7875d1fdda8439d1e23de12ab164179574"},
|
||||
{file = "mypy-1.16.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ff9fa5b16e4c1364eb89a4d16bcda9987f05d39604e1e6c35378a2987c1aac2d"},
|
||||
{file = "mypy-1.16.1-cp310-cp310-win_amd64.whl", hash = "sha256:1256688e284632382f8f3b9e2123df7d279f603c561f099758e66dd6ed4e8bd6"},
|
||||
{file = "mypy-1.16.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:472e4e4c100062488ec643f6162dd0d5208e33e2f34544e1fc931372e806c0cc"},
|
||||
{file = "mypy-1.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea16e2a7d2714277e349e24d19a782a663a34ed60864006e8585db08f8ad1782"},
|
||||
{file = "mypy-1.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08e850ea22adc4d8a4014651575567b0318ede51e8e9fe7a68f25391af699507"},
|
||||
{file = "mypy-1.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22d76a63a42619bfb90122889b903519149879ddbf2ba4251834727944c8baca"},
|
||||
{file = "mypy-1.16.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2c7ce0662b6b9dc8f4ed86eb7a5d505ee3298c04b40ec13b30e572c0e5ae17c4"},
|
||||
{file = "mypy-1.16.1-cp311-cp311-win_amd64.whl", hash = "sha256:211287e98e05352a2e1d4e8759c5490925a7c784ddc84207f4714822f8cf99b6"},
|
||||
{file = "mypy-1.16.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:af4792433f09575d9eeca5c63d7d90ca4aeceda9d8355e136f80f8967639183d"},
|
||||
{file = "mypy-1.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:66df38405fd8466ce3517eda1f6640611a0b8e70895e2a9462d1d4323c5eb4b9"},
|
||||
{file = "mypy-1.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44e7acddb3c48bd2713994d098729494117803616e116032af192871aed80b79"},
|
||||
{file = "mypy-1.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ab5eca37b50188163fa7c1b73c685ac66c4e9bdee4a85c9adac0e91d8895e15"},
|
||||
{file = "mypy-1.16.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb6229b2c9086247e21a83c309754b9058b438704ad2f6807f0d8227f6ebdd"},
|
||||
{file = "mypy-1.16.1-cp312-cp312-win_amd64.whl", hash = "sha256:1f0435cf920e287ff68af3d10a118a73f212deb2ce087619eb4e648116d1fe9b"},
|
||||
{file = "mypy-1.16.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ddc91eb318c8751c69ddb200a5937f1232ee8efb4e64e9f4bc475a33719de438"},
|
||||
{file = "mypy-1.16.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:87ff2c13d58bdc4bbe7dc0dedfe622c0f04e2cb2a492269f3b418df2de05c536"},
|
||||
{file = "mypy-1.16.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a7cfb0fe29fe5a9841b7c8ee6dffb52382c45acdf68f032145b75620acfbd6f"},
|
||||
{file = "mypy-1.16.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:051e1677689c9d9578b9c7f4d206d763f9bbd95723cd1416fad50db49d52f359"},
|
||||
{file = "mypy-1.16.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d5d2309511cc56c021b4b4e462907c2b12f669b2dbeb68300110ec27723971be"},
|
||||
{file = "mypy-1.16.1-cp313-cp313-win_amd64.whl", hash = "sha256:4f58ac32771341e38a853c5d0ec0dfe27e18e27da9cdb8bbc882d2249c71a3ee"},
|
||||
{file = "mypy-1.16.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7fc688329af6a287567f45cc1cefb9db662defeb14625213a5b7da6e692e2069"},
|
||||
{file = "mypy-1.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e198ab3f55924c03ead626ff424cad1732d0d391478dfbf7bb97b34602395da"},
|
||||
{file = "mypy-1.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09aa4f91ada245f0a45dbc47e548fd94e0dd5a8433e0114917dc3b526912a30c"},
|
||||
{file = "mypy-1.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13c7cd5b1cb2909aa318a90fd1b7e31f17c50b242953e7dd58345b2a814f6383"},
|
||||
{file = "mypy-1.16.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:58e07fb958bc5d752a280da0e890c538f1515b79a65757bbdc54252ba82e0b40"},
|
||||
{file = "mypy-1.16.1-cp39-cp39-win_amd64.whl", hash = "sha256:f895078594d918f93337a505f8add9bd654d1a24962b4c6ed9390e12531eb31b"},
|
||||
{file = "mypy-1.16.1-py3-none-any.whl", hash = "sha256:5fc2ac4027d0ef28d6ba69a0343737a23c4d1b83672bf38d1fe237bdc0643b37"},
|
||||
{file = "mypy-1.16.1.tar.gz", hash = "sha256:6bd00a0a2094841c5e47e7374bb42b83d64c527a502e3334e1173a0c24437bab"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1531,14 +1543,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "phonenumbers"
|
||||
version = "9.0.11"
|
||||
version = "9.0.10"
|
||||
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "phonenumbers-9.0.11-py2.py3-none-any.whl", hash = "sha256:a8ebb2136f1f14dfdbadb98be01cb71b96f880dea011eb5e0921967fe3a23abf"},
|
||||
{file = "phonenumbers-9.0.11.tar.gz", hash = "sha256:6573858dcf0a7a2753a071375e154d9fc11791546c699b575af95d2ba7d84a1d"},
|
||||
{file = "phonenumbers-9.0.10-py2.py3-none-any.whl", hash = "sha256:13b12d269be1f2b363c9bc2868656a7e2e8b50f1a1cef629c75005da6c374c6b"},
|
||||
{file = "phonenumbers-9.0.10.tar.gz", hash = "sha256:c2d15a6a9d0534b14a7764f51246ada99563e263f65b80b0251d1a760ac4a1ba"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1908,21 +1920,22 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pygithub"
|
||||
version = "2.7.0"
|
||||
version = "2.6.1"
|
||||
description = "Use the full Github API v3"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "pygithub-2.7.0-py3-none-any.whl", hash = "sha256:40ecbfe26dc55cc34ab4b0ffa1d455e6f816ef9a2bc8d6f5ad18ce572f163700"},
|
||||
{file = "pygithub-2.7.0.tar.gz", hash = "sha256:7cd6eafabb09b5369afba3586d86b1f1ad6f1326d2ff01bc47bb26615dce4cbb"},
|
||||
{file = "PyGithub-2.6.1-py3-none-any.whl", hash = "sha256:6f2fa6d076ccae475f9fc392cc6cdbd54db985d4f69b8833a28397de75ed6ca3"},
|
||||
{file = "pygithub-2.6.1.tar.gz", hash = "sha256:b5c035392991cca63959e9453286b41b54d83bf2de2daa7d7ff7e4312cebf3bf"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
Deprecated = "*"
|
||||
pyjwt = {version = ">=2.4.0", extras = ["crypto"]}
|
||||
pynacl = ">=1.4.0"
|
||||
requests = ">=2.14.0"
|
||||
typing-extensions = ">=4.5.0"
|
||||
typing-extensions = ">=4.0.0"
|
||||
urllib3 = ">=1.26.0"
|
||||
|
||||
[[package]]
|
||||
@@ -2998,14 +3011,14 @@ types-cffi = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-pyyaml"
|
||||
version = "6.0.12.20250809"
|
||||
version = "6.0.12.20250516"
|
||||
description = "Typing stubs for PyYAML"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "types_pyyaml-6.0.12.20250809-py3-none-any.whl", hash = "sha256:032b6003b798e7de1a1ddfeefee32fac6486bdfe4845e0ae0e7fb3ee4512b52f"},
|
||||
{file = "types_pyyaml-6.0.12.20250809.tar.gz", hash = "sha256:af4a1aca028f18e75297da2ee0da465f799627370d74073e96fee876524f61b5"},
|
||||
{file = "types_pyyaml-6.0.12.20250516-py3-none-any.whl", hash = "sha256:8478208feaeb53a34cb5d970c56a7cd76b72659442e733e268a94dc72b2d0530"},
|
||||
{file = "types_pyyaml-6.0.12.20250516.tar.gz", hash = "sha256:9f21a70216fc0fa1b216a8176db5f9e0af6eb35d2f2932acb87689d03a5bf6ba"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3025,14 +3038,14 @@ urllib3 = ">=2"
|
||||
|
||||
[[package]]
|
||||
name = "types-setuptools"
|
||||
version = "80.9.0.20250809"
|
||||
version = "80.9.0.20250529"
|
||||
description = "Typing stubs for setuptools"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "types_setuptools-80.9.0.20250809-py3-none-any.whl", hash = "sha256:7c6539b4c7ac7b4ab4db2be66d8a58fb1e28affa3ee3834be48acafd94f5976a"},
|
||||
{file = "types_setuptools-80.9.0.20250809.tar.gz", hash = "sha256:e986ba37ffde364073d76189e1d79d9928fb6f5278c7d07589cde353d0218864"},
|
||||
{file = "types_setuptools-80.9.0.20250529-py3-none-any.whl", hash = "sha256:00dfcedd73e333a430e10db096e4d46af93faf9314f832f13b6bbe3d6757e95f"},
|
||||
{file = "types_setuptools-80.9.0.20250529.tar.gz", hash = "sha256:79e088ba0cba2186c8d6499cbd3e143abb142d28a44b042c28d3148b1e353c91"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3104,6 +3117,91 @@ files = [
|
||||
{file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wrapt"
|
||||
version = "1.15.0"
|
||||
description = "Module for decorators, wrappers and monkey patching."
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"},
|
||||
{file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"},
|
||||
{file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"},
|
||||
{file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"},
|
||||
{file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"},
|
||||
{file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"},
|
||||
{file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"},
|
||||
{file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"},
|
||||
{file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"},
|
||||
{file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"},
|
||||
{file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"},
|
||||
{file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"},
|
||||
{file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"},
|
||||
{file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"},
|
||||
{file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"},
|
||||
{file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"},
|
||||
{file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"},
|
||||
{file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"},
|
||||
{file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"},
|
||||
{file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"},
|
||||
{file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"},
|
||||
{file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"},
|
||||
{file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"},
|
||||
{file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"},
|
||||
{file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"},
|
||||
{file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"},
|
||||
{file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"},
|
||||
{file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"},
|
||||
{file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"},
|
||||
{file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"},
|
||||
{file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"},
|
||||
{file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"},
|
||||
{file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"},
|
||||
{file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"},
|
||||
{file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"},
|
||||
{file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"},
|
||||
{file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"},
|
||||
{file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"},
|
||||
{file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"},
|
||||
{file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"},
|
||||
{file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"},
|
||||
{file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"},
|
||||
{file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"},
|
||||
{file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"},
|
||||
{file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"},
|
||||
{file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"},
|
||||
{file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"},
|
||||
{file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"},
|
||||
{file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"},
|
||||
{file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"},
|
||||
{file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"},
|
||||
{file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"},
|
||||
{file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"},
|
||||
{file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"},
|
||||
{file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"},
|
||||
{file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"},
|
||||
{file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"},
|
||||
{file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"},
|
||||
{file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"},
|
||||
{file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"},
|
||||
{file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"},
|
||||
{file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"},
|
||||
{file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"},
|
||||
{file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"},
|
||||
{file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"},
|
||||
{file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"},
|
||||
{file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"},
|
||||
{file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"},
|
||||
{file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"},
|
||||
{file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"},
|
||||
{file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"},
|
||||
{file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"},
|
||||
{file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"},
|
||||
{file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"},
|
||||
{file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xmlschema"
|
||||
version = "2.4.0"
|
||||
|
||||
@@ -101,7 +101,7 @@ module-name = "synapse.synapse_rust"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.137.0rc1"
|
||||
version = "1.135.0"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "AGPL-3.0-or-later"
|
||||
|
||||
@@ -289,10 +289,10 @@ pub const BASE_APPEND_CONTENT_RULES: &[PushRule] = &[PushRule {
|
||||
default_enabled: true,
|
||||
}];
|
||||
|
||||
pub const BASE_APPEND_POSTCONTENT_RULES: &[PushRule] = &[
|
||||
pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/postcontent/.io.element.msc4306.rule.unsubscribed_thread"),
|
||||
priority_class: 6,
|
||||
rule_id: Cow::Borrowed("global/content/.io.element.msc4306.rule.unsubscribed_thread"),
|
||||
priority_class: 1,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(
|
||||
KnownCondition::Msc4306ThreadSubscription { subscribed: false },
|
||||
)]),
|
||||
@@ -301,8 +301,8 @@ pub const BASE_APPEND_POSTCONTENT_RULES: &[PushRule] = &[
|
||||
default_enabled: true,
|
||||
},
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/postcontent/.io.element.msc4306.rule.subscribed_thread"),
|
||||
priority_class: 6,
|
||||
rule_id: Cow::Borrowed("global/content/.io.element.msc4306.rule.subscribed_thread"),
|
||||
priority_class: 1,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(
|
||||
KnownCondition::Msc4306ThreadSubscription { subscribed: true },
|
||||
)]),
|
||||
@@ -310,9 +310,6 @@ pub const BASE_APPEND_POSTCONTENT_RULES: &[PushRule] = &[
|
||||
default: true,
|
||||
default_enabled: true,
|
||||
},
|
||||
];
|
||||
|
||||
pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/underride/.m.rule.call"),
|
||||
priority_class: 1,
|
||||
@@ -729,7 +726,6 @@ lazy_static! {
|
||||
.iter()
|
||||
.chain(BASE_APPEND_OVERRIDE_RULES.iter())
|
||||
.chain(BASE_APPEND_CONTENT_RULES.iter())
|
||||
.chain(BASE_APPEND_POSTCONTENT_RULES.iter())
|
||||
.chain(BASE_APPEND_UNDERRIDE_RULES.iter())
|
||||
.map(|rule| { (&*rule.rule_id, rule) })
|
||||
.collect();
|
||||
|
||||
@@ -527,7 +527,6 @@ impl PushRules {
|
||||
.chain(base_rules::BASE_APPEND_OVERRIDE_RULES.iter())
|
||||
.chain(self.content.iter())
|
||||
.chain(base_rules::BASE_APPEND_CONTENT_RULES.iter())
|
||||
.chain(base_rules::BASE_APPEND_POSTCONTENT_RULES.iter())
|
||||
.chain(self.room.iter())
|
||||
.chain(self.sender.iter())
|
||||
.chain(self.underride.iter())
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
$schema: https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json
|
||||
$id: https://element-hq.github.io/synapse/schema/synapse/v1.137/synapse-config.schema.json
|
||||
$id: https://element-hq.github.io/synapse/schema/synapse/v1.135/synapse-config.schema.json
|
||||
type: object
|
||||
properties:
|
||||
modules:
|
||||
@@ -5184,7 +5184,7 @@ properties:
|
||||
|
||||
"m.room.avatar": 50
|
||||
|
||||
"m.room.tombstone": 100 (150 if MSC4289 is used)
|
||||
"m.room.tombstone": 100
|
||||
|
||||
"m.room.server_acl": 100
|
||||
|
||||
|
||||
@@ -30,7 +30,6 @@ from typing import Any, Callable, Dict, Optional
|
||||
|
||||
import requests
|
||||
import yaml
|
||||
from typing_extensions import Never
|
||||
|
||||
_CONFLICTING_SHARED_SECRET_OPTS_ERROR = """\
|
||||
Conflicting options 'registration_shared_secret' and 'registration_shared_secret_path'
|
||||
@@ -41,10 +40,6 @@ _NO_SHARED_SECRET_OPTS_ERROR = """\
|
||||
No 'registration_shared_secret' or 'registration_shared_secret_path' defined in config.
|
||||
"""
|
||||
|
||||
_EMPTY_SHARED_SECRET_PATH_OPTS_ERROR = """\
|
||||
The secret given via `registration_shared_secret_path` must not be empty.
|
||||
"""
|
||||
|
||||
_DEFAULT_SERVER_URL = "http://localhost:8008"
|
||||
|
||||
|
||||
@@ -175,12 +170,6 @@ def register_new_user(
|
||||
)
|
||||
|
||||
|
||||
def bail(err_msg: str) -> Never:
|
||||
"""Prints the given message to stderr and exits."""
|
||||
print(err_msg, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
logging.captureWarnings(True)
|
||||
|
||||
@@ -273,20 +262,15 @@ def main() -> None:
|
||||
assert config is not None
|
||||
|
||||
secret = config.get("registration_shared_secret")
|
||||
if not isinstance(secret, (str, type(None))):
|
||||
bail("registration_shared_secret is not a string.")
|
||||
secret_file = config.get("registration_shared_secret_path")
|
||||
if not isinstance(secret_file, (str, type(None))):
|
||||
bail("registration_shared_secret_path is not a string.")
|
||||
|
||||
if not secret and not secret_file:
|
||||
bail(_NO_SHARED_SECRET_OPTS_ERROR)
|
||||
elif secret and secret_file:
|
||||
bail(_CONFLICTING_SHARED_SECRET_OPTS_ERROR)
|
||||
elif not secret and secret_file:
|
||||
if secret_file:
|
||||
if secret:
|
||||
print(_CONFLICTING_SHARED_SECRET_OPTS_ERROR, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
secret = _read_file(secret_file, "registration_shared_secret_path").strip()
|
||||
if not secret:
|
||||
bail(_EMPTY_SHARED_SECRET_PATH_OPTS_ERROR)
|
||||
if not secret:
|
||||
print(_NO_SHARED_SECRET_OPTS_ERROR, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if args.password_file:
|
||||
password = _read_file(args.password_file, "password-file").strip()
|
||||
|
||||
@@ -296,4 +296,4 @@ class InternalAuth(BaseAuth):
|
||||
Returns:
|
||||
True if the user is an admin
|
||||
"""
|
||||
return await self.store.is_server_admin(requester.user.to_string())
|
||||
return await self.store.is_server_admin(requester.user)
|
||||
|
||||
@@ -46,9 +46,6 @@ MAX_USERID_LENGTH = 255
|
||||
# Constant value used for the pseudo-thread which is the main timeline.
|
||||
MAIN_TIMELINE: Final = "main"
|
||||
|
||||
# MAX_INT + 1, so it always trumps any PL in canonical JSON.
|
||||
CREATOR_POWER_LEVEL = 2**53
|
||||
|
||||
|
||||
class Membership:
|
||||
"""Represents the membership states of a user in a room."""
|
||||
@@ -238,8 +235,6 @@ class EventContentFields:
|
||||
#
|
||||
# This is deprecated in MSC2175.
|
||||
ROOM_CREATOR: Final = "creator"
|
||||
# MSC4289
|
||||
ADDITIONAL_CREATORS: Final = "additional_creators"
|
||||
|
||||
# The version of the room for `m.room.create` events.
|
||||
ROOM_VERSION: Final = "room_version"
|
||||
|
||||
@@ -36,14 +36,12 @@ class EventFormatVersions:
|
||||
ROOM_V1_V2 = 1 # $id:server event id format: used for room v1 and v2
|
||||
ROOM_V3 = 2 # MSC1659-style $hash event id format: used for room v3
|
||||
ROOM_V4_PLUS = 3 # MSC1884-style $hash format: introduced for room v4
|
||||
ROOM_V11_HYDRA_PLUS = 4 # MSC4291 room IDs as hashes: introduced for room HydraV11
|
||||
|
||||
|
||||
KNOWN_EVENT_FORMAT_VERSIONS = {
|
||||
EventFormatVersions.ROOM_V1_V2,
|
||||
EventFormatVersions.ROOM_V3,
|
||||
EventFormatVersions.ROOM_V4_PLUS,
|
||||
EventFormatVersions.ROOM_V11_HYDRA_PLUS,
|
||||
}
|
||||
|
||||
|
||||
@@ -52,7 +50,6 @@ class StateResolutionVersions:
|
||||
|
||||
V1 = 1 # room v1 state res
|
||||
V2 = 2 # MSC1442 state res: room v2 and later
|
||||
V2_1 = 3 # MSC4297 state res
|
||||
|
||||
|
||||
class RoomDisposition:
|
||||
@@ -112,10 +109,6 @@ class RoomVersion:
|
||||
msc3931_push_features: Tuple[str, ...] # values from PushRuleRoomFlag
|
||||
# MSC3757: Restricting who can overwrite a state event
|
||||
msc3757_enabled: bool
|
||||
# MSC4289: Creator power enabled
|
||||
msc4289_creator_power_enabled: bool
|
||||
# MSC4291: Room IDs as hashes of the create event
|
||||
msc4291_room_ids_as_hashes: bool
|
||||
|
||||
|
||||
class RoomVersions:
|
||||
@@ -138,8 +131,6 @@ class RoomVersions:
|
||||
enforce_int_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3757_enabled=False,
|
||||
msc4289_creator_power_enabled=False,
|
||||
msc4291_room_ids_as_hashes=False,
|
||||
)
|
||||
V2 = RoomVersion(
|
||||
"2",
|
||||
@@ -160,8 +151,6 @@ class RoomVersions:
|
||||
enforce_int_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3757_enabled=False,
|
||||
msc4289_creator_power_enabled=False,
|
||||
msc4291_room_ids_as_hashes=False,
|
||||
)
|
||||
V3 = RoomVersion(
|
||||
"3",
|
||||
@@ -182,8 +171,6 @@ class RoomVersions:
|
||||
enforce_int_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3757_enabled=False,
|
||||
msc4289_creator_power_enabled=False,
|
||||
msc4291_room_ids_as_hashes=False,
|
||||
)
|
||||
V4 = RoomVersion(
|
||||
"4",
|
||||
@@ -204,8 +191,6 @@ class RoomVersions:
|
||||
enforce_int_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3757_enabled=False,
|
||||
msc4289_creator_power_enabled=False,
|
||||
msc4291_room_ids_as_hashes=False,
|
||||
)
|
||||
V5 = RoomVersion(
|
||||
"5",
|
||||
@@ -226,8 +211,6 @@ class RoomVersions:
|
||||
enforce_int_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3757_enabled=False,
|
||||
msc4289_creator_power_enabled=False,
|
||||
msc4291_room_ids_as_hashes=False,
|
||||
)
|
||||
V6 = RoomVersion(
|
||||
"6",
|
||||
@@ -248,8 +231,6 @@ class RoomVersions:
|
||||
enforce_int_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3757_enabled=False,
|
||||
msc4289_creator_power_enabled=False,
|
||||
msc4291_room_ids_as_hashes=False,
|
||||
)
|
||||
V7 = RoomVersion(
|
||||
"7",
|
||||
@@ -270,8 +251,6 @@ class RoomVersions:
|
||||
enforce_int_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3757_enabled=False,
|
||||
msc4289_creator_power_enabled=False,
|
||||
msc4291_room_ids_as_hashes=False,
|
||||
)
|
||||
V8 = RoomVersion(
|
||||
"8",
|
||||
@@ -292,8 +271,6 @@ class RoomVersions:
|
||||
enforce_int_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3757_enabled=False,
|
||||
msc4289_creator_power_enabled=False,
|
||||
msc4291_room_ids_as_hashes=False,
|
||||
)
|
||||
V9 = RoomVersion(
|
||||
"9",
|
||||
@@ -314,8 +291,6 @@ class RoomVersions:
|
||||
enforce_int_power_levels=False,
|
||||
msc3931_push_features=(),
|
||||
msc3757_enabled=False,
|
||||
msc4289_creator_power_enabled=False,
|
||||
msc4291_room_ids_as_hashes=False,
|
||||
)
|
||||
V10 = RoomVersion(
|
||||
"10",
|
||||
@@ -336,8 +311,6 @@ class RoomVersions:
|
||||
enforce_int_power_levels=True,
|
||||
msc3931_push_features=(),
|
||||
msc3757_enabled=False,
|
||||
msc4289_creator_power_enabled=False,
|
||||
msc4291_room_ids_as_hashes=False,
|
||||
)
|
||||
MSC1767v10 = RoomVersion(
|
||||
# MSC1767 (Extensible Events) based on room version "10"
|
||||
@@ -359,8 +332,6 @@ class RoomVersions:
|
||||
enforce_int_power_levels=True,
|
||||
msc3931_push_features=(PushRuleRoomFlag.EXTENSIBLE_EVENTS,),
|
||||
msc3757_enabled=False,
|
||||
msc4289_creator_power_enabled=False,
|
||||
msc4291_room_ids_as_hashes=False,
|
||||
)
|
||||
MSC3757v10 = RoomVersion(
|
||||
# MSC3757 (Restricting who can overwrite a state event) based on room version "10"
|
||||
@@ -382,8 +353,6 @@ class RoomVersions:
|
||||
enforce_int_power_levels=True,
|
||||
msc3931_push_features=(),
|
||||
msc3757_enabled=True,
|
||||
msc4289_creator_power_enabled=False,
|
||||
msc4291_room_ids_as_hashes=False,
|
||||
)
|
||||
V11 = RoomVersion(
|
||||
"11",
|
||||
@@ -404,8 +373,6 @@ class RoomVersions:
|
||||
enforce_int_power_levels=True,
|
||||
msc3931_push_features=(),
|
||||
msc3757_enabled=False,
|
||||
msc4289_creator_power_enabled=False,
|
||||
msc4291_room_ids_as_hashes=False,
|
||||
)
|
||||
MSC3757v11 = RoomVersion(
|
||||
# MSC3757 (Restricting who can overwrite a state event) based on room version "11"
|
||||
@@ -427,52 +394,6 @@ class RoomVersions:
|
||||
enforce_int_power_levels=True,
|
||||
msc3931_push_features=(),
|
||||
msc3757_enabled=True,
|
||||
msc4289_creator_power_enabled=False,
|
||||
msc4291_room_ids_as_hashes=False,
|
||||
)
|
||||
HydraV11 = RoomVersion(
|
||||
"org.matrix.hydra.11",
|
||||
RoomDisposition.UNSTABLE,
|
||||
EventFormatVersions.ROOM_V11_HYDRA_PLUS,
|
||||
StateResolutionVersions.V2_1, # Changed from v11
|
||||
enforce_key_validity=True,
|
||||
special_case_aliases_auth=False,
|
||||
strict_canonicaljson=True,
|
||||
limit_notifications_power_levels=True,
|
||||
implicit_room_creator=True, # Used by MSC3820
|
||||
updated_redaction_rules=True, # Used by MSC3820
|
||||
restricted_join_rule=True,
|
||||
restricted_join_rule_fix=True,
|
||||
knock_join_rule=True,
|
||||
msc3389_relation_redactions=False,
|
||||
knock_restricted_join_rule=True,
|
||||
enforce_int_power_levels=True,
|
||||
msc3931_push_features=(),
|
||||
msc3757_enabled=False,
|
||||
msc4289_creator_power_enabled=True, # Changed from v11
|
||||
msc4291_room_ids_as_hashes=True, # Changed from v11
|
||||
)
|
||||
V12 = RoomVersion(
|
||||
"12",
|
||||
RoomDisposition.STABLE,
|
||||
EventFormatVersions.ROOM_V11_HYDRA_PLUS,
|
||||
StateResolutionVersions.V2_1, # Changed from v11
|
||||
enforce_key_validity=True,
|
||||
special_case_aliases_auth=False,
|
||||
strict_canonicaljson=True,
|
||||
limit_notifications_power_levels=True,
|
||||
implicit_room_creator=True, # Used by MSC3820
|
||||
updated_redaction_rules=True, # Used by MSC3820
|
||||
restricted_join_rule=True,
|
||||
restricted_join_rule_fix=True,
|
||||
knock_join_rule=True,
|
||||
msc3389_relation_redactions=False,
|
||||
knock_restricted_join_rule=True,
|
||||
enforce_int_power_levels=True,
|
||||
msc3931_push_features=(),
|
||||
msc3757_enabled=False,
|
||||
msc4289_creator_power_enabled=True, # Changed from v11
|
||||
msc4291_room_ids_as_hashes=True, # Changed from v11
|
||||
)
|
||||
|
||||
|
||||
@@ -490,10 +411,8 @@ KNOWN_ROOM_VERSIONS: Dict[str, RoomVersion] = {
|
||||
RoomVersions.V9,
|
||||
RoomVersions.V10,
|
||||
RoomVersions.V11,
|
||||
RoomVersions.V12,
|
||||
RoomVersions.MSC3757v10,
|
||||
RoomVersions.MSC3757v11,
|
||||
RoomVersions.HydraV11,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -535,15 +535,11 @@ class ExperimentalConfig(Config):
|
||||
"msc4108_delegation_endpoint", None
|
||||
)
|
||||
|
||||
auth_delegated = self.msc3861.enabled or (
|
||||
config.get("matrix_authentication_service") or {}
|
||||
).get("enabled", False)
|
||||
|
||||
if (
|
||||
self.msc4108_enabled or self.msc4108_delegation_endpoint is not None
|
||||
) and not auth_delegated:
|
||||
) and not self.msc3861.enabled:
|
||||
raise ConfigError(
|
||||
"MSC4108 requires MSC3861 or matrix_authentication_service to be enabled",
|
||||
"MSC4108 requires MSC3861 to be enabled",
|
||||
("experimental", "msc4108_delegation_endpoint"),
|
||||
)
|
||||
|
||||
@@ -590,5 +586,5 @@ class ExperimentalConfig(Config):
|
||||
self.msc4293_enabled: bool = experimental.get("msc4293_enabled", False)
|
||||
|
||||
# MSC4306: Thread Subscriptions
|
||||
# (and MSC4308: Thread Subscriptions extension to Sliding Sync)
|
||||
# (and MSC4308: sliding sync extension for thread subscriptions)
|
||||
self.msc4306_enabled: bool = experimental.get("msc4306_enabled", False)
|
||||
|
||||
@@ -101,9 +101,6 @@ def compute_content_hash(
|
||||
event_dict.pop("outlier", None)
|
||||
event_dict.pop("destinations", None)
|
||||
|
||||
# N.B. no need to pop the room_id from create events in MSC4291 rooms
|
||||
# as they shouldn't have one.
|
||||
|
||||
event_json_bytes = encode_canonical_json(event_dict)
|
||||
|
||||
hashed = hash_algorithm(event_json_bytes)
|
||||
|
||||
@@ -45,7 +45,6 @@ from signedjson.sign import SignatureVerifyException, verify_signed_json
|
||||
from unpaddedbase64 import decode_base64
|
||||
|
||||
from synapse.api.constants import (
|
||||
CREATOR_POWER_LEVEL,
|
||||
MAX_PDU_SIZE,
|
||||
EventContentFields,
|
||||
EventTypes,
|
||||
@@ -65,7 +64,6 @@ from synapse.api.room_versions import (
|
||||
RoomVersion,
|
||||
RoomVersions,
|
||||
)
|
||||
from synapse.events import is_creator
|
||||
from synapse.state import CREATE_KEY
|
||||
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
|
||||
from synapse.types import (
|
||||
@@ -263,8 +261,7 @@ async def check_state_independent_auth_rules(
|
||||
f"Event {event.event_id} has unexpected auth_event for {k}: {auth_event_id}",
|
||||
)
|
||||
|
||||
# 2.3 ... If there are entries which were themselves rejected under the checks performed on receipt
|
||||
# of a PDU, reject.
|
||||
# We also need to check that the auth event itself is not rejected.
|
||||
if auth_event.rejected_reason:
|
||||
raise AuthError(
|
||||
403,
|
||||
@@ -274,7 +271,7 @@ async def check_state_independent_auth_rules(
|
||||
|
||||
auth_dict[k] = auth_event_id
|
||||
|
||||
# 2.4. If event does not have a m.room.create in its auth_events, reject.
|
||||
# 3. If event does not have a m.room.create in its auth_events, reject.
|
||||
creation_event = auth_dict.get((EventTypes.Create, ""), None)
|
||||
if not creation_event:
|
||||
raise AuthError(403, "No create event in auth events")
|
||||
@@ -314,14 +311,13 @@ def check_state_dependent_auth_rules(
|
||||
|
||||
# Later code relies on there being a create event e.g _can_federate, _is_membership_change_allowed
|
||||
# so produce a more intelligible error if we don't have one.
|
||||
create_event = auth_dict.get(CREATE_KEY)
|
||||
if create_event is None:
|
||||
if auth_dict.get(CREATE_KEY) is None:
|
||||
raise AuthError(
|
||||
403, f"Event {event.event_id} is missing a create event in auth_events."
|
||||
)
|
||||
|
||||
# additional check for m.federate
|
||||
creating_domain = get_domain_from_id(create_event.sender)
|
||||
creating_domain = get_domain_from_id(event.room_id)
|
||||
originating_domain = get_domain_from_id(event.sender)
|
||||
if creating_domain != originating_domain:
|
||||
if not _can_federate(event, auth_dict):
|
||||
@@ -474,20 +470,12 @@ def _check_create(event: "EventBase") -> None:
|
||||
if event.prev_event_ids():
|
||||
raise AuthError(403, "Create event has prev events")
|
||||
|
||||
if event.room_version.msc4291_room_ids_as_hashes:
|
||||
# 1.2 If the create event has a room_id, reject
|
||||
if "room_id" in event:
|
||||
raise AuthError(403, "Create event has a room_id")
|
||||
else:
|
||||
# 1.2 If the domain of the room_id does not match the domain of the sender,
|
||||
# reject.
|
||||
if not event.room_version.msc4291_room_ids_as_hashes:
|
||||
sender_domain = get_domain_from_id(event.sender)
|
||||
room_id_domain = get_domain_from_id(event.room_id)
|
||||
if room_id_domain != sender_domain:
|
||||
raise AuthError(
|
||||
403, "Creation event's room_id domain does not match sender's"
|
||||
)
|
||||
# 1.2 If the domain of the room_id does not match the domain of the sender,
|
||||
# reject.
|
||||
sender_domain = get_domain_from_id(event.sender)
|
||||
room_id_domain = get_domain_from_id(event.room_id)
|
||||
if room_id_domain != sender_domain:
|
||||
raise AuthError(403, "Creation event's room_id domain does not match sender's")
|
||||
|
||||
# 1.3 If content.room_version is present and is not a recognised version, reject
|
||||
room_version_prop = event.content.get("room_version", "1")
|
||||
@@ -504,16 +492,6 @@ def _check_create(event: "EventBase") -> None:
|
||||
):
|
||||
raise AuthError(403, "Create event lacks a 'creator' property")
|
||||
|
||||
# 1.5 If the additional_creators field is present and is not an array of strings where each
|
||||
# string is a valid user ID, reject.
|
||||
if (
|
||||
event.room_version.msc4289_creator_power_enabled
|
||||
and EventContentFields.ADDITIONAL_CREATORS in event.content
|
||||
):
|
||||
check_valid_additional_creators(
|
||||
event.content[EventContentFields.ADDITIONAL_CREATORS]
|
||||
)
|
||||
|
||||
|
||||
def _can_federate(event: "EventBase", auth_events: StateMap["EventBase"]) -> bool:
|
||||
creation_event = auth_events.get((EventTypes.Create, ""))
|
||||
@@ -555,13 +533,7 @@ def _is_membership_change_allowed(
|
||||
|
||||
target_user_id = event.state_key
|
||||
|
||||
# We need the create event in order to check if we can federate or not.
|
||||
# If it's missing, yell loudly. Previously we only did this inside the
|
||||
# _can_federate check.
|
||||
create_event = auth_events.get((EventTypes.Create, ""))
|
||||
if not create_event:
|
||||
raise AuthError(403, "Create event missing from auth_events")
|
||||
creating_domain = get_domain_from_id(create_event.sender)
|
||||
creating_domain = get_domain_from_id(event.room_id)
|
||||
target_domain = get_domain_from_id(target_user_id)
|
||||
if creating_domain != target_domain:
|
||||
if not _can_federate(event, auth_events):
|
||||
@@ -931,32 +903,6 @@ def _check_power_levels(
|
||||
except Exception:
|
||||
raise SynapseError(400, "Not a valid power level: %s" % (v,))
|
||||
|
||||
if room_version_obj.msc4289_creator_power_enabled:
|
||||
# Enforce the creator does not appear in the users map
|
||||
create_event = auth_events.get((EventTypes.Create, ""))
|
||||
if not create_event:
|
||||
raise SynapseError(
|
||||
400, "Cannot check power levels without a create event in auth_events"
|
||||
)
|
||||
if create_event.sender in user_list:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"Creator user %s must not appear in content.users"
|
||||
% (create_event.sender,),
|
||||
)
|
||||
additional_creators = create_event.content.get(
|
||||
EventContentFields.ADDITIONAL_CREATORS, []
|
||||
)
|
||||
if additional_creators:
|
||||
creators_in_user_list = set(additional_creators).intersection(
|
||||
set(user_list)
|
||||
)
|
||||
if len(creators_in_user_list) > 0:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"Additional creators users must not appear in content.users",
|
||||
)
|
||||
|
||||
# Reject events with stringy power levels if required by room version
|
||||
if (
|
||||
event.type == EventTypes.PowerLevels
|
||||
@@ -1082,9 +1028,6 @@ def get_user_power_level(user_id: str, auth_events: StateMap["EventBase"]) -> in
|
||||
"A create event in the auth events chain is required to calculate user power level correctly,"
|
||||
" but was not found. This indicates a bug"
|
||||
)
|
||||
if create_event.room_version.msc4289_creator_power_enabled:
|
||||
if is_creator(create_event, user_id):
|
||||
return CREATOR_POWER_LEVEL
|
||||
power_level_event = get_power_level_event(auth_events)
|
||||
if power_level_event:
|
||||
level = power_level_event.content.get("users", {}).get(user_id)
|
||||
@@ -1245,26 +1188,3 @@ def auth_types_for_event(
|
||||
auth_types.add(key)
|
||||
|
||||
return auth_types
|
||||
|
||||
|
||||
def check_valid_additional_creators(additional_creators: Any) -> None:
|
||||
"""Check if the additional_creators provided is valid according to MSC4289.
|
||||
|
||||
The additional_creators can be supplied from an m.room.create event or from an /upgrade request.
|
||||
|
||||
Raises:
|
||||
AuthError if the additional_creators is invalid for some reason.
|
||||
"""
|
||||
if type(additional_creators) is not list:
|
||||
raise AuthError(400, "additional_creators must be an array")
|
||||
for entry in additional_creators:
|
||||
if type(entry) is not str:
|
||||
raise AuthError(400, "entry in additional_creators is not a string")
|
||||
if not UserID.is_valid(entry):
|
||||
raise AuthError(400, "entry in additional_creators is not a valid user ID")
|
||||
# UserID.is_valid doesn't actually validate everything, so check the rest manually.
|
||||
if len(entry) > 255 or len(entry.encode("utf-8")) > 255:
|
||||
raise AuthError(
|
||||
400,
|
||||
"entry in additional_creators too long",
|
||||
)
|
||||
|
||||
@@ -41,13 +41,10 @@ from typing import (
|
||||
import attr
|
||||
from unpaddedbase64 import encode_base64
|
||||
|
||||
from synapse.api.constants import EventContentFields, EventTypes, RelationTypes
|
||||
from synapse.api.constants import EventTypes, RelationTypes
|
||||
from synapse.api.room_versions import EventFormatVersions, RoomVersion, RoomVersions
|
||||
from synapse.synapse_rust.events import EventInternalMetadata
|
||||
from synapse.types import (
|
||||
JsonDict,
|
||||
StrCollection,
|
||||
)
|
||||
from synapse.types import JsonDict, StrCollection
|
||||
from synapse.util.caches import intern_dict
|
||||
from synapse.util.frozenutils import freeze
|
||||
|
||||
@@ -212,6 +209,7 @@ class EventBase(metaclass=abc.ABCMeta):
|
||||
content: DictProperty[JsonDict] = DictProperty("content")
|
||||
hashes: DictProperty[Dict[str, str]] = DictProperty("hashes")
|
||||
origin_server_ts: DictProperty[int] = DictProperty("origin_server_ts")
|
||||
room_id: DictProperty[str] = DictProperty("room_id")
|
||||
sender: DictProperty[str] = DictProperty("sender")
|
||||
# TODO state_key should be Optional[str]. This is generally asserted in Synapse
|
||||
# by calling is_state() first (which ensures it is not None), but it is hard (not possible?)
|
||||
@@ -226,10 +224,6 @@ class EventBase(metaclass=abc.ABCMeta):
|
||||
def event_id(self) -> str:
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def room_id(self) -> str:
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def membership(self) -> str:
|
||||
return self.content["membership"]
|
||||
@@ -392,10 +386,6 @@ class FrozenEvent(EventBase):
|
||||
def event_id(self) -> str:
|
||||
return self._event_id
|
||||
|
||||
@property
|
||||
def room_id(self) -> str:
|
||||
return self._dict["room_id"]
|
||||
|
||||
|
||||
class FrozenEventV2(EventBase):
|
||||
format_version = EventFormatVersions.ROOM_V3 # All events of this type are V2
|
||||
@@ -453,10 +443,6 @@ class FrozenEventV2(EventBase):
|
||||
self._event_id = "$" + encode_base64(compute_event_reference_hash(self)[1])
|
||||
return self._event_id
|
||||
|
||||
@property
|
||||
def room_id(self) -> str:
|
||||
return self._dict["room_id"]
|
||||
|
||||
def prev_event_ids(self) -> List[str]:
|
||||
"""Returns the list of prev event IDs. The order matches the order
|
||||
specified in the event, though there is no meaning to it.
|
||||
@@ -495,67 +481,6 @@ class FrozenEventV3(FrozenEventV2):
|
||||
return self._event_id
|
||||
|
||||
|
||||
class FrozenEventV4(FrozenEventV3):
|
||||
"""FrozenEventV4 for MSC4291 room IDs are hashes"""
|
||||
|
||||
format_version = EventFormatVersions.ROOM_V11_HYDRA_PLUS
|
||||
|
||||
"""Override the room_id for m.room.create events"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
event_dict: JsonDict,
|
||||
room_version: RoomVersion,
|
||||
internal_metadata_dict: Optional[JsonDict] = None,
|
||||
rejected_reason: Optional[str] = None,
|
||||
):
|
||||
super().__init__(
|
||||
event_dict=event_dict,
|
||||
room_version=room_version,
|
||||
internal_metadata_dict=internal_metadata_dict,
|
||||
rejected_reason=rejected_reason,
|
||||
)
|
||||
self._room_id: Optional[str] = None
|
||||
|
||||
@property
|
||||
def room_id(self) -> str:
|
||||
# if we have calculated the room ID already, don't do it again.
|
||||
if self._room_id:
|
||||
return self._room_id
|
||||
|
||||
is_create_event = self.type == EventTypes.Create and self.get_state_key() == ""
|
||||
|
||||
# for non-create events: use the supplied value from the JSON, as per FrozenEventV3
|
||||
if not is_create_event:
|
||||
self._room_id = self._dict["room_id"]
|
||||
assert self._room_id is not None
|
||||
return self._room_id
|
||||
|
||||
# for create events: calculate the room ID
|
||||
from synapse.crypto.event_signing import compute_event_reference_hash
|
||||
|
||||
self._room_id = "!" + encode_base64(
|
||||
compute_event_reference_hash(self)[1], urlsafe=True
|
||||
)
|
||||
return self._room_id
|
||||
|
||||
def auth_event_ids(self) -> StrCollection:
|
||||
"""Returns the list of auth event IDs. The order matches the order
|
||||
specified in the event, though there is no meaning to it.
|
||||
Returns:
|
||||
The list of event IDs of this event's auth_events
|
||||
Includes the creation event ID for convenience of all the codepaths
|
||||
which expects the auth chain to include the creator ID, even though
|
||||
it's explicitly not included on the wire. Excludes the create event
|
||||
for the create event itself.
|
||||
"""
|
||||
create_event_id = "$" + self.room_id[1:]
|
||||
assert create_event_id not in self._dict["auth_events"]
|
||||
if self.type == EventTypes.Create and self.get_state_key() == "":
|
||||
return self._dict["auth_events"] # should be []
|
||||
return self._dict["auth_events"] + [create_event_id]
|
||||
|
||||
|
||||
def _event_type_from_format_version(
|
||||
format_version: int,
|
||||
) -> Type[Union[FrozenEvent, FrozenEventV2, FrozenEventV3]]:
|
||||
@@ -575,8 +500,6 @@ def _event_type_from_format_version(
|
||||
return FrozenEventV2
|
||||
elif format_version == EventFormatVersions.ROOM_V4_PLUS:
|
||||
return FrozenEventV3
|
||||
elif format_version == EventFormatVersions.ROOM_V11_HYDRA_PLUS:
|
||||
return FrozenEventV4
|
||||
else:
|
||||
raise Exception("No event format %r" % (format_version,))
|
||||
|
||||
@@ -636,23 +559,6 @@ def relation_from_event(event: EventBase) -> Optional[_EventRelation]:
|
||||
return _EventRelation(parent_id, rel_type, aggregation_key)
|
||||
|
||||
|
||||
def is_creator(create: EventBase, user_id: str) -> bool:
|
||||
"""
|
||||
Return true if the provided user ID is the room creator.
|
||||
|
||||
This includes additional creators in MSC4289.
|
||||
"""
|
||||
assert create.type == EventTypes.Create
|
||||
if create.sender == user_id:
|
||||
return True
|
||||
if create.room_version.msc4289_creator_power_enabled:
|
||||
additional_creators = set(
|
||||
create.content.get(EventContentFields.ADDITIONAL_CREATORS, [])
|
||||
)
|
||||
return user_id in additional_creators
|
||||
return False
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class StrippedStateEvent:
|
||||
"""
|
||||
|
||||
@@ -82,8 +82,7 @@ class EventBuilder:
|
||||
|
||||
room_version: RoomVersion
|
||||
|
||||
# MSC4291 makes the room ID == the create event ID. This means the create event has no room_id.
|
||||
room_id: Optional[str]
|
||||
room_id: str
|
||||
type: str
|
||||
sender: str
|
||||
|
||||
@@ -143,14 +142,7 @@ class EventBuilder:
|
||||
Returns:
|
||||
The signed and hashed event.
|
||||
"""
|
||||
# Create events always have empty auth_events.
|
||||
if self.type == EventTypes.Create and self.is_state() and self.state_key == "":
|
||||
auth_event_ids = []
|
||||
|
||||
# Calculate auth_events for non-create events
|
||||
if auth_event_ids is None:
|
||||
# Every non-create event must have a room ID
|
||||
assert self.room_id is not None
|
||||
state_ids = await self._state.compute_state_after_events(
|
||||
self.room_id,
|
||||
prev_event_ids,
|
||||
@@ -232,31 +224,12 @@ class EventBuilder:
|
||||
"auth_events": auth_events,
|
||||
"prev_events": prev_events,
|
||||
"type": self.type,
|
||||
"room_id": self.room_id,
|
||||
"sender": self.sender,
|
||||
"content": self.content,
|
||||
"unsigned": self.unsigned,
|
||||
"depth": depth,
|
||||
}
|
||||
if self.room_id is not None:
|
||||
event_dict["room_id"] = self.room_id
|
||||
|
||||
if self.room_version.msc4291_room_ids_as_hashes:
|
||||
# In MSC4291: the create event has no room ID as the create event ID /is/ the room ID.
|
||||
if (
|
||||
self.type == EventTypes.Create
|
||||
and self.is_state()
|
||||
and self._state_key == ""
|
||||
):
|
||||
assert self.room_id is None
|
||||
else:
|
||||
# All other events do not reference the create event in auth_events, as the room ID
|
||||
# /is/ the create event. However, the rest of the code (for consistency between room
|
||||
# versions) assume that the create event remains part of the auth events. c.f. event
|
||||
# class which automatically adds the create event when `.auth_event_ids()` is called
|
||||
assert self.room_id is not None
|
||||
create_event_id = "$" + self.room_id[1:]
|
||||
auth_event_ids.remove(create_event_id)
|
||||
event_dict["auth_events"] = auth_event_ids
|
||||
|
||||
if self.is_state():
|
||||
event_dict["state_key"] = self._state_key
|
||||
@@ -312,7 +285,7 @@ class EventBuilderFactory:
|
||||
room_version=room_version,
|
||||
type=key_values["type"],
|
||||
state_key=key_values.get("state_key"),
|
||||
room_id=key_values.get("room_id"),
|
||||
room_id=key_values["room_id"],
|
||||
sender=key_values["sender"],
|
||||
content=key_values.get("content", {}),
|
||||
unsigned=key_values.get("unsigned", {}),
|
||||
|
||||
@@ -26,8 +26,8 @@ from typing import (
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Collection,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
Mapping,
|
||||
Match,
|
||||
@@ -49,7 +49,6 @@ from synapse.api.constants import (
|
||||
)
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.api.room_versions import RoomVersion
|
||||
from synapse.logging.opentracing import SynapseTags, set_tag, trace
|
||||
from synapse.types import JsonDict, Requester
|
||||
|
||||
from . import EventBase, StrippedStateEvent, make_event_from_dict
|
||||
@@ -177,12 +176,9 @@ def prune_event_dict(room_version: RoomVersion, event_dict: JsonDict) -> JsonDic
|
||||
if room_version.updated_redaction_rules:
|
||||
# MSC2176 rules state that create events cannot have their `content` redacted.
|
||||
new_content = event_dict["content"]
|
||||
if not room_version.implicit_room_creator:
|
||||
elif not room_version.implicit_room_creator:
|
||||
# Some room versions give meaning to `creator`
|
||||
add_fields("creator")
|
||||
if room_version.msc4291_room_ids_as_hashes:
|
||||
# room_id is not allowed on the create event as it's derived from the event ID
|
||||
allowed_keys.remove("room_id")
|
||||
|
||||
elif event_type == EventTypes.JoinRules:
|
||||
add_fields("join_rule")
|
||||
@@ -531,10 +527,6 @@ def serialize_event(
|
||||
if config.as_client_event:
|
||||
d = config.event_format(d)
|
||||
|
||||
# Ensure the room_id field is set for create events in MSC4291 rooms
|
||||
if e.type == EventTypes.Create and e.room_version.msc4291_room_ids_as_hashes:
|
||||
d["room_id"] = e.room_id
|
||||
|
||||
# If the event is a redaction, the field with the redacted event ID appears
|
||||
# in a different location depending on the room version. e.redacts handles
|
||||
# fetching from the proper location; copy it to the other location for forwards-
|
||||
@@ -711,10 +703,9 @@ class EventClientSerializer:
|
||||
"m.relations", {}
|
||||
).update(serialized_aggregations)
|
||||
|
||||
@trace
|
||||
async def serialize_events(
|
||||
self,
|
||||
events: Collection[Union[JsonDict, EventBase]],
|
||||
events: Iterable[Union[JsonDict, EventBase]],
|
||||
time_now: int,
|
||||
*,
|
||||
config: SerializeEventConfig = _DEFAULT_SERIALIZE_EVENT_CONFIG,
|
||||
@@ -733,11 +724,6 @@ class EventClientSerializer:
|
||||
Returns:
|
||||
The list of serialized events
|
||||
"""
|
||||
set_tag(
|
||||
SynapseTags.FUNC_ARG_PREFIX + "events.length",
|
||||
str(len(events)),
|
||||
)
|
||||
|
||||
return [
|
||||
await self.serialize_event(
|
||||
event,
|
||||
@@ -886,14 +872,6 @@ def strip_event(event: EventBase) -> JsonDict:
|
||||
Stripped state events can only have the `sender`, `type`, `state_key` and `content`
|
||||
properties present.
|
||||
"""
|
||||
# MSC4311: Ensure the create event is available on invites and knocks.
|
||||
# TODO: Implement the rest of MSC4311
|
||||
if (
|
||||
event.room_version.msc4291_room_ids_as_hashes
|
||||
and event.type == EventTypes.Create
|
||||
and event.get_state_key() == ""
|
||||
):
|
||||
return event.get_pdu_json()
|
||||
|
||||
return {
|
||||
"type": event.type,
|
||||
|
||||
@@ -183,18 +183,8 @@ class EventValidator:
|
||||
fields an event would have
|
||||
"""
|
||||
|
||||
create_event_as_room_id = (
|
||||
event.room_version.msc4291_room_ids_as_hashes
|
||||
and event.type == EventTypes.Create
|
||||
and hasattr(event, "state_key")
|
||||
and event.state_key == ""
|
||||
)
|
||||
|
||||
strings = ["room_id", "sender", "type"]
|
||||
|
||||
if create_event_as_room_id:
|
||||
strings.remove("room_id")
|
||||
|
||||
if hasattr(event, "state_key"):
|
||||
strings.append("state_key")
|
||||
|
||||
@@ -202,14 +192,7 @@ class EventValidator:
|
||||
if not isinstance(getattr(event, s), str):
|
||||
raise SynapseError(400, "Not '%s' a string type" % (s,))
|
||||
|
||||
if not create_event_as_room_id:
|
||||
assert event.room_id is not None
|
||||
RoomID.from_string(event.room_id)
|
||||
if event.room_version.msc4291_room_ids_as_hashes and not RoomID.is_valid(
|
||||
event.room_id
|
||||
):
|
||||
raise SynapseError(400, f"Invalid room ID '{event.room_id}'")
|
||||
|
||||
RoomID.from_string(event.room_id)
|
||||
UserID.from_string(event.sender)
|
||||
|
||||
if event.type == EventTypes.Message:
|
||||
|
||||
@@ -343,21 +343,6 @@ def event_from_pdu_json(pdu_json: JsonDict, room_version: RoomVersion) -> EventB
|
||||
if room_version.strict_canonicaljson:
|
||||
validate_canonicaljson(pdu_json)
|
||||
|
||||
# enforce that MSC4291 auth events don't include the create event.
|
||||
# N.B. if they DO include a spurious create event, it'll fail auth checks elsewhere, so we don't
|
||||
# need to do expensive DB lookups to find which event ID is the create event here.
|
||||
if room_version.msc4291_room_ids_as_hashes:
|
||||
room_id = pdu_json.get("room_id")
|
||||
if room_id:
|
||||
create_event_id = "$" + room_id[1:]
|
||||
auth_events = pdu_json.get("auth_events")
|
||||
if auth_events:
|
||||
if create_event_id in auth_events:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"auth_events must not contain the create event",
|
||||
Codes.BAD_JSON,
|
||||
)
|
||||
event = make_event_from_dict(pdu_json, room_version)
|
||||
return event
|
||||
|
||||
|
||||
@@ -135,7 +135,7 @@ class PublicRoomList(BaseFederationServlet):
|
||||
if not self.allow_access:
|
||||
raise FederationDeniedError(origin)
|
||||
|
||||
limit: Optional[int] = parse_integer_from_args(query, "limit", 0)
|
||||
limit = parse_integer_from_args(query, "limit", 0)
|
||||
since_token = parse_string_from_args(query, "since", None)
|
||||
include_all_networks = parse_boolean_from_args(
|
||||
query, "include_all_networks", default=False
|
||||
|
||||
@@ -23,8 +23,6 @@ from typing import TYPE_CHECKING, List, Mapping, Optional, Union
|
||||
|
||||
from synapse import event_auth
|
||||
from synapse.api.constants import (
|
||||
CREATOR_POWER_LEVEL,
|
||||
EventContentFields,
|
||||
EventTypes,
|
||||
JoinRules,
|
||||
Membership,
|
||||
@@ -143,8 +141,6 @@ class EventAuthHandler:
|
||||
Raises:
|
||||
SynapseError if no appropriate user is found.
|
||||
"""
|
||||
create_event_id = current_state_ids[(EventTypes.Create, "")]
|
||||
create_event = await self._store.get_event(create_event_id)
|
||||
power_level_event_id = current_state_ids.get((EventTypes.PowerLevels, ""))
|
||||
invite_level = 0
|
||||
users_default_level = 0
|
||||
@@ -160,28 +156,15 @@ class EventAuthHandler:
|
||||
|
||||
# Find the user with the highest power level (only interested in local
|
||||
# users).
|
||||
user_power_level = 0
|
||||
chosen_user = None
|
||||
local_users_in_room = await self._store.get_local_users_in_room(room_id)
|
||||
if create_event.room_version.msc4289_creator_power_enabled:
|
||||
creators = set(
|
||||
create_event.content.get(EventContentFields.ADDITIONAL_CREATORS, [])
|
||||
)
|
||||
creators.add(create_event.sender)
|
||||
local_creators = creators.intersection(set(local_users_in_room))
|
||||
if len(local_creators) > 0:
|
||||
chosen_user = local_creators.pop() # random creator
|
||||
user_power_level = CREATOR_POWER_LEVEL
|
||||
else:
|
||||
chosen_user = max(
|
||||
local_users_in_room,
|
||||
key=lambda user: users.get(user, users_default_level),
|
||||
default=None,
|
||||
)
|
||||
# Return the chosen if they can issue invites.
|
||||
if chosen_user:
|
||||
user_power_level = users.get(chosen_user, users_default_level)
|
||||
chosen_user = max(
|
||||
local_users_in_room,
|
||||
key=lambda user: users.get(user, users_default_level),
|
||||
default=None,
|
||||
)
|
||||
|
||||
# Return the chosen if they can issue invites.
|
||||
user_power_level = users.get(chosen_user, users_default_level)
|
||||
if chosen_user and user_power_level >= invite_level:
|
||||
logger.debug(
|
||||
"Found a user who can issue invites %s with power level %d >= invite level %d",
|
||||
|
||||
@@ -1728,9 +1728,6 @@ class FederationEventHandler:
|
||||
event,
|
||||
auth_event_id,
|
||||
)
|
||||
# Drop the event from the auth_map too, else we may incorrectly persist
|
||||
# events which depend on this dropped event.
|
||||
auth_map.pop(event.event_id, None)
|
||||
return
|
||||
auth.append(ae)
|
||||
|
||||
|
||||
@@ -688,10 +688,7 @@ class EventCreationHandler:
|
||||
Codes.USER_ACCOUNT_SUSPENDED,
|
||||
)
|
||||
|
||||
is_create_event = (
|
||||
event_dict["type"] == EventTypes.Create and event_dict["state_key"] == ""
|
||||
)
|
||||
if is_create_event:
|
||||
if event_dict["type"] == EventTypes.Create and event_dict["state_key"] == "":
|
||||
room_version_id = event_dict["content"]["room_version"]
|
||||
maybe_room_version_obj = KNOWN_ROOM_VERSIONS.get(room_version_id)
|
||||
if not maybe_room_version_obj:
|
||||
@@ -797,7 +794,6 @@ class EventCreationHandler:
|
||||
"""
|
||||
# the only thing the user can do is join the server notices room.
|
||||
if builder.type == EventTypes.Member:
|
||||
assert builder.room_id is not None
|
||||
membership = builder.content.get("membership", None)
|
||||
if membership == Membership.JOIN:
|
||||
return await self.store.is_server_notice_room(builder.room_id)
|
||||
@@ -1263,40 +1259,13 @@ class EventCreationHandler:
|
||||
for_verification=False,
|
||||
)
|
||||
|
||||
if (
|
||||
builder.room_version.msc4291_room_ids_as_hashes
|
||||
and builder.type == EventTypes.Create
|
||||
and builder.is_state()
|
||||
):
|
||||
if builder.room_id is not None:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"Cannot resend m.room.create event",
|
||||
Codes.INVALID_PARAM,
|
||||
)
|
||||
else:
|
||||
assert builder.room_id is not None
|
||||
|
||||
if prev_event_ids is not None:
|
||||
assert len(prev_event_ids) <= 10, (
|
||||
"Attempting to create an event with %i prev_events"
|
||||
% (len(prev_event_ids),)
|
||||
)
|
||||
else:
|
||||
if builder.room_id:
|
||||
prev_event_ids = await self.store.get_prev_events_for_room(
|
||||
builder.room_id
|
||||
)
|
||||
else:
|
||||
prev_event_ids = [] # can only happen for the create event in MSC4291 rooms
|
||||
|
||||
if builder.type == EventTypes.Create and builder.is_state():
|
||||
if len(prev_event_ids) != 0:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"Cannot resend m.room.create event",
|
||||
Codes.INVALID_PARAM,
|
||||
)
|
||||
prev_event_ids = await self.store.get_prev_events_for_room(builder.room_id)
|
||||
|
||||
# We now ought to have some `prev_events` (unless it's a create event).
|
||||
#
|
||||
@@ -1560,7 +1529,7 @@ class EventCreationHandler:
|
||||
self,
|
||||
requester: Requester,
|
||||
room_id: str,
|
||||
prev_event_id: Optional[str],
|
||||
prev_event_id: str,
|
||||
event_dicts: Sequence[JsonDict],
|
||||
ratelimit: bool = True,
|
||||
ignore_shadow_ban: bool = False,
|
||||
@@ -1591,12 +1560,6 @@ class EventCreationHandler:
|
||||
# Nothing to do.
|
||||
return
|
||||
|
||||
if prev_event_id is None:
|
||||
# Pick the latest forward extremity as the previous event ID.
|
||||
prev_event_ids = await self.store.get_forward_extremities_for_room(room_id)
|
||||
prev_event_ids.sort(key=lambda x: x[2]) # Sort by depth.
|
||||
prev_event_id = prev_event_ids[-1][0]
|
||||
|
||||
state_groups = await self._storage_controllers.state.get_state_group_for_events(
|
||||
[prev_event_id]
|
||||
)
|
||||
@@ -2265,7 +2228,6 @@ class EventCreationHandler:
|
||||
original_event.room_version, third_party_result
|
||||
)
|
||||
self.validator.validate_builder(builder)
|
||||
assert builder.room_id is not None
|
||||
except SynapseError as e:
|
||||
raise Exception(
|
||||
"Third party rules module created an invalid event: " + e.msg,
|
||||
|
||||
@@ -82,7 +82,6 @@ from synapse.types import (
|
||||
Requester,
|
||||
RoomAlias,
|
||||
RoomID,
|
||||
RoomIdWithDomain,
|
||||
RoomStreamToken,
|
||||
StateMap,
|
||||
StrCollection,
|
||||
@@ -94,7 +93,6 @@ from synapse.types import (
|
||||
from synapse.types.handlers import ShutdownRoomParams, ShutdownRoomResponse
|
||||
from synapse.types.state import StateFilter
|
||||
from synapse.util import stringutils
|
||||
from synapse.util.async_helpers import concurrently_execute
|
||||
from synapse.util.caches.response_cache import ResponseCache
|
||||
from synapse.util.iterutils import batch_iter
|
||||
from synapse.util.stringutils import parse_and_validate_server_name
|
||||
@@ -197,13 +195,7 @@ class RoomCreationHandler:
|
||||
)
|
||||
|
||||
async def upgrade_room(
|
||||
self,
|
||||
requester: Requester,
|
||||
old_room_id: str,
|
||||
new_version: RoomVersion,
|
||||
additional_creators: Optional[List[str]],
|
||||
auto_member: bool = False,
|
||||
ratelimit: bool = True,
|
||||
self, requester: Requester, old_room_id: str, new_version: RoomVersion
|
||||
) -> str:
|
||||
"""Replace a room with a new room with a different version
|
||||
|
||||
@@ -211,9 +203,6 @@ class RoomCreationHandler:
|
||||
requester: the user requesting the upgrade
|
||||
old_room_id: the id of the room to be replaced
|
||||
new_version: the new room version to use
|
||||
additional_creators: additional room creators, for MSC4289.
|
||||
auto_member: Whether to automatically join local users to the new
|
||||
room and send out invites to remote users.
|
||||
|
||||
Returns:
|
||||
the new room id
|
||||
@@ -221,12 +210,11 @@ class RoomCreationHandler:
|
||||
Raises:
|
||||
ShadowBanError if the requester is shadow-banned.
|
||||
"""
|
||||
if ratelimit:
|
||||
await self.creation_ratelimiter.ratelimit(requester, update=False)
|
||||
await self.creation_ratelimiter.ratelimit(requester, update=False)
|
||||
|
||||
# then apply the ratelimits
|
||||
await self.common_request_ratelimiter.ratelimit(requester)
|
||||
await self.creation_ratelimiter.ratelimit(requester)
|
||||
# then apply the ratelimits
|
||||
await self.common_request_ratelimiter.ratelimit(requester)
|
||||
await self.creation_ratelimiter.ratelimit(requester)
|
||||
|
||||
user_id = requester.user.to_string()
|
||||
|
||||
@@ -247,29 +235,8 @@ class RoomCreationHandler:
|
||||
old_room = await self.store.get_room(old_room_id)
|
||||
if old_room is None:
|
||||
raise NotFoundError("Unknown room id %s" % (old_room_id,))
|
||||
old_room_is_public, _ = old_room
|
||||
|
||||
creation_event_with_context = None
|
||||
if new_version.msc4291_room_ids_as_hashes:
|
||||
old_room_create_event = await self.store.get_create_event_for_room(
|
||||
old_room_id
|
||||
)
|
||||
creation_content = self._calculate_upgraded_room_creation_content(
|
||||
old_room_create_event,
|
||||
tombstone_event_id=None,
|
||||
new_room_version=new_version,
|
||||
additional_creators=additional_creators,
|
||||
)
|
||||
creation_event_with_context = await self._generate_create_event_for_room_id(
|
||||
requester,
|
||||
creation_content,
|
||||
old_room_is_public,
|
||||
new_version,
|
||||
)
|
||||
(create_event, _) = creation_event_with_context
|
||||
new_room_id = create_event.room_id
|
||||
else:
|
||||
new_room_id = self._generate_room_id()
|
||||
new_room_id = self._generate_room_id()
|
||||
|
||||
# Try several times, it could fail with PartialStateConflictError
|
||||
# in _upgrade_room, cf comment in except block.
|
||||
@@ -318,9 +285,6 @@ class RoomCreationHandler:
|
||||
new_version,
|
||||
tombstone_event,
|
||||
tombstone_context,
|
||||
additional_creators,
|
||||
creation_event_with_context,
|
||||
auto_member=auto_member,
|
||||
)
|
||||
|
||||
return ret
|
||||
@@ -344,11 +308,6 @@ class RoomCreationHandler:
|
||||
new_version: RoomVersion,
|
||||
tombstone_event: EventBase,
|
||||
tombstone_context: synapse.events.snapshot.EventContext,
|
||||
additional_creators: Optional[List[str]],
|
||||
creation_event_with_context: Optional[
|
||||
Tuple[EventBase, synapse.events.snapshot.EventContext]
|
||||
] = None,
|
||||
auto_member: bool = False,
|
||||
) -> str:
|
||||
"""
|
||||
Args:
|
||||
@@ -360,10 +319,6 @@ class RoomCreationHandler:
|
||||
new_version: the version to upgrade the room to
|
||||
tombstone_event: the tombstone event to send to the old room
|
||||
tombstone_context: the context for the tombstone event
|
||||
additional_creators: additional room creators, for MSC4289.
|
||||
creation_event_with_context: The new room's create event, for room IDs as create event IDs.
|
||||
auto_member: Whether to automatically join local users to the new
|
||||
room and send out invites to remote users.
|
||||
|
||||
Raises:
|
||||
ShadowBanError if the requester is shadow-banned.
|
||||
@@ -373,16 +328,14 @@ class RoomCreationHandler:
|
||||
|
||||
logger.info("Creating new room %s to replace %s", new_room_id, old_room_id)
|
||||
|
||||
# We've already stored the room if we have the create event
|
||||
if not creation_event_with_context:
|
||||
# create the new room. may raise a `StoreError` in the exceedingly unlikely
|
||||
# event of a room ID collision.
|
||||
await self.store.store_room(
|
||||
room_id=new_room_id,
|
||||
room_creator_user_id=user_id,
|
||||
is_public=old_room[0],
|
||||
room_version=new_version,
|
||||
)
|
||||
# create the new room. may raise a `StoreError` in the exceedingly unlikely
|
||||
# event of a room ID collision.
|
||||
await self.store.store_room(
|
||||
room_id=new_room_id,
|
||||
room_creator_user_id=user_id,
|
||||
is_public=old_room[0],
|
||||
room_version=new_version,
|
||||
)
|
||||
|
||||
await self.clone_existing_room(
|
||||
requester,
|
||||
@@ -390,9 +343,6 @@ class RoomCreationHandler:
|
||||
new_room_id=new_room_id,
|
||||
new_room_version=new_version,
|
||||
tombstone_event_id=tombstone_event.event_id,
|
||||
additional_creators=additional_creators,
|
||||
creation_event_with_context=creation_event_with_context,
|
||||
auto_member=auto_member,
|
||||
)
|
||||
|
||||
# now send the tombstone
|
||||
@@ -426,7 +376,6 @@ class RoomCreationHandler:
|
||||
old_room_id,
|
||||
new_room_id,
|
||||
old_room_state,
|
||||
additional_creators,
|
||||
)
|
||||
|
||||
return new_room_id
|
||||
@@ -437,7 +386,6 @@ class RoomCreationHandler:
|
||||
old_room_id: str,
|
||||
new_room_id: str,
|
||||
old_room_state: StateMap[str],
|
||||
additional_creators: Optional[List[str]],
|
||||
) -> None:
|
||||
"""Send updated power levels in both rooms after an upgrade
|
||||
|
||||
@@ -446,7 +394,7 @@ class RoomCreationHandler:
|
||||
old_room_id: the id of the room to be replaced
|
||||
new_room_id: the id of the replacement room
|
||||
old_room_state: the state map for the old room
|
||||
additional_creators: Additional creators in the new room.
|
||||
|
||||
Raises:
|
||||
ShadowBanError if the requester is shadow-banned.
|
||||
"""
|
||||
@@ -502,14 +450,6 @@ class RoomCreationHandler:
|
||||
except AuthError as e:
|
||||
logger.warning("Unable to update PLs in old room: %s", e)
|
||||
|
||||
new_room_version = await self.store.get_room_version(new_room_id)
|
||||
if new_room_version.msc4289_creator_power_enabled:
|
||||
self._remove_creators_from_pl_users_map(
|
||||
old_room_pl_state.content.get("users", {}),
|
||||
requester.user.to_string(),
|
||||
additional_creators,
|
||||
)
|
||||
|
||||
await self.event_creation_handler.create_and_send_nonmember_event(
|
||||
requester,
|
||||
{
|
||||
@@ -524,36 +464,6 @@ class RoomCreationHandler:
|
||||
ratelimit=False,
|
||||
)
|
||||
|
||||
def _calculate_upgraded_room_creation_content(
|
||||
self,
|
||||
old_room_create_event: EventBase,
|
||||
tombstone_event_id: Optional[str],
|
||||
new_room_version: RoomVersion,
|
||||
additional_creators: Optional[List[str]],
|
||||
) -> JsonDict:
|
||||
creation_content: JsonDict = {
|
||||
"room_version": new_room_version.identifier,
|
||||
"predecessor": {
|
||||
"room_id": old_room_create_event.room_id,
|
||||
},
|
||||
}
|
||||
if tombstone_event_id is not None:
|
||||
creation_content["predecessor"]["event_id"] = tombstone_event_id
|
||||
if (
|
||||
additional_creators is not None
|
||||
and new_room_version.msc4289_creator_power_enabled
|
||||
):
|
||||
creation_content["additional_creators"] = additional_creators
|
||||
# Check if old room was non-federatable
|
||||
if not old_room_create_event.content.get(EventContentFields.FEDERATE, True):
|
||||
# If so, mark the new room as non-federatable as well
|
||||
creation_content[EventContentFields.FEDERATE] = False
|
||||
# Copy the room type as per MSC3818.
|
||||
room_type = old_room_create_event.content.get(EventContentFields.ROOM_TYPE)
|
||||
if room_type is not None:
|
||||
creation_content[EventContentFields.ROOM_TYPE] = room_type
|
||||
return creation_content
|
||||
|
||||
async def clone_existing_room(
|
||||
self,
|
||||
requester: Requester,
|
||||
@@ -561,11 +471,6 @@ class RoomCreationHandler:
|
||||
new_room_id: str,
|
||||
new_room_version: RoomVersion,
|
||||
tombstone_event_id: str,
|
||||
additional_creators: Optional[List[str]],
|
||||
creation_event_with_context: Optional[
|
||||
Tuple[EventBase, synapse.events.snapshot.EventContext]
|
||||
] = None,
|
||||
auto_member: bool = False,
|
||||
) -> None:
|
||||
"""Populate a new room based on an old room
|
||||
|
||||
@@ -576,27 +481,24 @@ class RoomCreationHandler:
|
||||
created with _generate_room_id())
|
||||
new_room_version: the new room version to use
|
||||
tombstone_event_id: the ID of the tombstone event in the old room.
|
||||
additional_creators: additional room creators, for MSC4289.
|
||||
creation_event_with_context: The create event of the new room, if the new room supports
|
||||
room ID as create event ID hash.
|
||||
auto_member: Whether to automatically join local users to the new
|
||||
room and send out invites to remote users.
|
||||
"""
|
||||
user_id = requester.user.to_string()
|
||||
|
||||
creation_content: JsonDict = {
|
||||
"room_version": new_room_version.identifier,
|
||||
"predecessor": {"room_id": old_room_id, "event_id": tombstone_event_id},
|
||||
}
|
||||
|
||||
# Check if old room was non-federatable
|
||||
|
||||
# Get old room's create event
|
||||
old_room_create_event = await self.store.get_create_event_for_room(old_room_id)
|
||||
|
||||
if creation_event_with_context:
|
||||
create_event, _ = creation_event_with_context
|
||||
creation_content = create_event.content
|
||||
else:
|
||||
creation_content = self._calculate_upgraded_room_creation_content(
|
||||
old_room_create_event,
|
||||
tombstone_event_id,
|
||||
new_room_version,
|
||||
additional_creators=additional_creators,
|
||||
)
|
||||
# Check if the create event specified a non-federatable room
|
||||
if not old_room_create_event.content.get(EventContentFields.FEDERATE, True):
|
||||
# If so, mark the new room as non-federatable as well
|
||||
creation_content[EventContentFields.FEDERATE] = False
|
||||
|
||||
initial_state = {}
|
||||
|
||||
# Replicate relevant room events
|
||||
@@ -612,8 +514,11 @@ class RoomCreationHandler:
|
||||
(EventTypes.PowerLevels, ""),
|
||||
]
|
||||
|
||||
# Copy the room type as per MSC3818.
|
||||
room_type = old_room_create_event.content.get(EventContentFields.ROOM_TYPE)
|
||||
if room_type is not None:
|
||||
creation_content[EventContentFields.ROOM_TYPE] = room_type
|
||||
|
||||
# If the old room was a space, copy over the rooms in the space.
|
||||
if room_type == RoomTypes.SPACE:
|
||||
types_to_copy.append((EventTypes.SpaceChild, None))
|
||||
@@ -685,14 +590,6 @@ class RoomCreationHandler:
|
||||
if current_power_level_int < needed_power_level:
|
||||
user_power_levels[user_id] = needed_power_level
|
||||
|
||||
if new_room_version.msc4289_creator_power_enabled:
|
||||
# the creator(s) cannot be in the users map
|
||||
self._remove_creators_from_pl_users_map(
|
||||
user_power_levels,
|
||||
user_id,
|
||||
additional_creators,
|
||||
)
|
||||
|
||||
# We construct what the body of a call to /createRoom would look like for passing
|
||||
# to the spam checker. We don't include a preset here, as we expect the
|
||||
# initial state to contain everything we need.
|
||||
@@ -721,7 +618,6 @@ class RoomCreationHandler:
|
||||
invite_list=[],
|
||||
initial_state=initial_state,
|
||||
creation_content=creation_content,
|
||||
creation_event_with_context=creation_event_with_context,
|
||||
)
|
||||
|
||||
# Transfer membership events
|
||||
@@ -735,7 +631,7 @@ class RoomCreationHandler:
|
||||
# `update_membership`, however in this case its fine to bypass as
|
||||
# these bans don't need any special treatment, i.e. the sender is in
|
||||
# the room and they don't need any extra signatures, etc.
|
||||
for batched_ban_events in batch_iter(ban_events, 1000):
|
||||
for batched_events in batch_iter(ban_events, 1000):
|
||||
await self.event_creation_handler.create_and_send_new_client_events(
|
||||
requester=requester,
|
||||
room_id=new_room_id,
|
||||
@@ -748,201 +644,13 @@ class RoomCreationHandler:
|
||||
"sender": requester.user.to_string(),
|
||||
"content": ban_event.content,
|
||||
}
|
||||
for ban_event in batched_ban_events
|
||||
for ban_event in batched_events
|
||||
],
|
||||
ratelimit=False, # We ratelimit the entire upgrade, not individual events.
|
||||
ratelimit=False,
|
||||
)
|
||||
|
||||
if auto_member:
|
||||
logger.info("Joining local users to %s", new_room_id)
|
||||
|
||||
# 1. Copy over all joins for local
|
||||
joined_profiles = await self.store.get_users_in_room_with_profiles(
|
||||
old_room_id
|
||||
)
|
||||
|
||||
local_user_ids = [
|
||||
user_id for user_id in joined_profiles if self.hs.is_mine_id(user_id)
|
||||
]
|
||||
|
||||
logger.info("Local user IDs %s", local_user_ids)
|
||||
|
||||
for batched_local_user_ids in batch_iter(local_user_ids, 1000):
|
||||
invites_to_send = []
|
||||
|
||||
# For each local user we create an invite event (from the
|
||||
# upgrading user) plus a join event.
|
||||
for local_user_id in batched_local_user_ids:
|
||||
if local_user_id == user_id:
|
||||
# Ignore the upgrading user, as they are already in the
|
||||
# new room.
|
||||
continue
|
||||
|
||||
invites_to_send.append(
|
||||
{
|
||||
"type": EventTypes.Member,
|
||||
"state_key": local_user_id,
|
||||
"room_id": new_room_id,
|
||||
"sender": requester.user.to_string(),
|
||||
"content": {
|
||||
"membership": Membership.INVITE,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
# If the user has profile information in the previous join,
|
||||
# add it to the content.
|
||||
#
|
||||
# We could instead copy over the contents from the old join
|
||||
# event, however a) that would require us to fetch all the
|
||||
# old join events (which is slow), and b) generally the join
|
||||
# events have no extra information in them. (We also believe
|
||||
# that most clients don't copy this information over either,
|
||||
# but we could be wrong.)
|
||||
content_profile = {}
|
||||
user_profile = joined_profiles[local_user_id]
|
||||
if user_profile.display_name:
|
||||
content_profile["displayname"] = user_profile.display_name
|
||||
if user_profile.avatar_url:
|
||||
content_profile["avatar_url"] = user_profile.avatar_url
|
||||
|
||||
invites_to_send.append(
|
||||
{
|
||||
"type": EventTypes.Member,
|
||||
"state_key": local_user_id,
|
||||
"room_id": new_room_id,
|
||||
"sender": local_user_id,
|
||||
"content": {
|
||||
"membership": Membership.JOIN,
|
||||
**content_profile,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
await self.event_creation_handler.create_and_send_new_client_events(
|
||||
requester=requester,
|
||||
room_id=new_room_id,
|
||||
prev_event_id=None,
|
||||
event_dicts=invites_to_send,
|
||||
ratelimit=False, # We ratelimit the entire upgrade, not individual events.
|
||||
)
|
||||
|
||||
# Invite other users if the room is not public. If the room *is*
|
||||
# public then users can simply directly join, and inviting them as
|
||||
# well may lead to confusion.
|
||||
|
||||
join_rule_content = initial_state.get((EventTypes.JoinRules, ""), None)
|
||||
is_public = False
|
||||
if join_rule_content:
|
||||
is_public = join_rule_content["join_rule"] == JoinRules.PUBLIC
|
||||
|
||||
if not is_public:
|
||||
# Copy invites
|
||||
# TODO: Copy over 3pid invites as well.
|
||||
invited_users = await self.store.get_invited_users_in_room(
|
||||
room_id=old_room_id
|
||||
)
|
||||
|
||||
# For local users we can just batch send the invites.
|
||||
local_invited_users = [
|
||||
user_id for user_id in invited_users if self.hs.is_mine_id(user_id)
|
||||
]
|
||||
|
||||
logger.info(
|
||||
"Joining local user IDs %s to new room %s",
|
||||
local_invited_users,
|
||||
new_room_id,
|
||||
)
|
||||
|
||||
for batched_local_invited_users in batch_iter(
|
||||
local_invited_users, 1000
|
||||
):
|
||||
invites_to_send = []
|
||||
leaves_to_send = []
|
||||
|
||||
# For each local user we create an invite event (from the
|
||||
# upgrading user), and reject the invite event in the old
|
||||
# room.
|
||||
#
|
||||
# This ensures that the user ends up with a single invite to
|
||||
# the new room (rather than multiple invites which may be
|
||||
# noisy and confusing).
|
||||
for local_user_id in batched_local_invited_users:
|
||||
leaves_to_send.append(
|
||||
{
|
||||
"type": EventTypes.Member,
|
||||
"state_key": local_user_id,
|
||||
"room_id": old_room_id,
|
||||
"sender": local_user_id,
|
||||
"content": {
|
||||
"membership": Membership.LEAVE,
|
||||
},
|
||||
}
|
||||
)
|
||||
invites_to_send.append(
|
||||
{
|
||||
"type": EventTypes.Member,
|
||||
"state_key": local_user_id,
|
||||
"room_id": new_room_id,
|
||||
"sender": requester.user.to_string(),
|
||||
"content": {
|
||||
"membership": Membership.INVITE,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
await self.event_creation_handler.create_and_send_new_client_events(
|
||||
requester=requester,
|
||||
room_id=old_room_id,
|
||||
prev_event_id=None,
|
||||
event_dicts=leaves_to_send,
|
||||
ratelimit=False, # We ratelimit the entire upgrade, not individual events.
|
||||
)
|
||||
await self.event_creation_handler.create_and_send_new_client_events(
|
||||
requester=requester,
|
||||
room_id=new_room_id,
|
||||
prev_event_id=None,
|
||||
event_dicts=invites_to_send,
|
||||
ratelimit=False,
|
||||
)
|
||||
|
||||
# For remote users we send invites one by one, as we need to
|
||||
# send each one to the remote server.
|
||||
#
|
||||
# We also invite joined remote users who were in the old room.
|
||||
remote_user_ids = [
|
||||
user_id
|
||||
for user_id in itertools.chain(invited_users, joined_profiles)
|
||||
if not self.hs.is_mine_id(user_id)
|
||||
]
|
||||
|
||||
logger.debug("Inviting remote user IDs %s", remote_user_ids)
|
||||
|
||||
async def remote_invite(remote_user: str) -> None:
|
||||
try:
|
||||
await self.room_member_handler.update_membership(
|
||||
requester,
|
||||
UserID.from_string(remote_user),
|
||||
new_room_id,
|
||||
Membership.INVITE,
|
||||
ratelimit=False, # We ratelimit the entire upgrade, not individual events.
|
||||
)
|
||||
except SynapseError as e:
|
||||
# If we fail to invite a remote user, we log it but continue
|
||||
# on with the upgrade.
|
||||
logger.warning(
|
||||
"Failed to invite remote user %s to new room %s: %s",
|
||||
remote_user,
|
||||
new_room_id,
|
||||
e,
|
||||
)
|
||||
|
||||
# We do this concurrently, as it can take a while to invite
|
||||
await concurrently_execute(
|
||||
remote_invite,
|
||||
remote_user_ids,
|
||||
10,
|
||||
)
|
||||
# XXX invites/joins
|
||||
# XXX 3pid invites
|
||||
|
||||
async def _move_aliases_to_new_room(
|
||||
self,
|
||||
@@ -1196,7 +904,6 @@ class RoomCreationHandler:
|
||||
power_level_content_override = config.get("power_level_content_override")
|
||||
if (
|
||||
power_level_content_override
|
||||
and not room_version.msc4289_creator_power_enabled # this validation doesn't apply in MSC4289 rooms
|
||||
and "users" in power_level_content_override
|
||||
and user_id not in power_level_content_override["users"]
|
||||
):
|
||||
@@ -1226,41 +933,11 @@ class RoomCreationHandler:
|
||||
additional_fields=spam_check[1],
|
||||
)
|
||||
|
||||
creation_content = config.get("creation_content", {})
|
||||
# override any attempt to set room versions via the creation_content
|
||||
creation_content["room_version"] = room_version.identifier
|
||||
|
||||
# trusted private chats have the invited users marked as additional creators
|
||||
if (
|
||||
room_version.msc4289_creator_power_enabled
|
||||
and config.get("preset", None) == RoomCreationPreset.TRUSTED_PRIVATE_CHAT
|
||||
and len(config.get("invite", [])) > 0
|
||||
):
|
||||
# the other user(s) are additional creators
|
||||
invitees = config.get("invite", [])
|
||||
# we don't want to replace any additional_creators additionally specified, and we want
|
||||
# to remove duplicates.
|
||||
creation_content[EventContentFields.ADDITIONAL_CREATORS] = list(
|
||||
set(creation_content.get(EventContentFields.ADDITIONAL_CREATORS, []))
|
||||
| set(invitees)
|
||||
)
|
||||
|
||||
creation_event_with_context = None
|
||||
if room_version.msc4291_room_ids_as_hashes:
|
||||
creation_event_with_context = await self._generate_create_event_for_room_id(
|
||||
requester,
|
||||
creation_content,
|
||||
is_public,
|
||||
room_version,
|
||||
)
|
||||
(create_event, _) = creation_event_with_context
|
||||
room_id = create_event.room_id
|
||||
else:
|
||||
room_id = await self._generate_and_create_room_id(
|
||||
creator_id=user_id,
|
||||
is_public=is_public,
|
||||
room_version=room_version,
|
||||
)
|
||||
room_id = await self._generate_and_create_room_id(
|
||||
creator_id=user_id,
|
||||
is_public=is_public,
|
||||
room_version=room_version,
|
||||
)
|
||||
|
||||
# Check whether this visibility value is blocked by a third party module
|
||||
allowed_by_third_party_rules = await (
|
||||
@@ -1297,6 +974,11 @@ class RoomCreationHandler:
|
||||
for val in raw_initial_state:
|
||||
initial_state[(val["type"], val.get("state_key", ""))] = val["content"]
|
||||
|
||||
creation_content = config.get("creation_content", {})
|
||||
|
||||
# override any attempt to set room versions via the creation_content
|
||||
creation_content["room_version"] = room_version.identifier
|
||||
|
||||
(
|
||||
last_stream_id,
|
||||
last_sent_event_id,
|
||||
@@ -1313,7 +995,6 @@ class RoomCreationHandler:
|
||||
power_level_content_override=power_level_content_override,
|
||||
creator_join_profile=creator_join_profile,
|
||||
ignore_forced_encryption=ignore_forced_encryption,
|
||||
creation_event_with_context=creation_event_with_context,
|
||||
)
|
||||
|
||||
# we avoid dropping the lock between invites, as otherwise joins can
|
||||
@@ -1379,38 +1060,6 @@ class RoomCreationHandler:
|
||||
|
||||
return room_id, room_alias, last_stream_id
|
||||
|
||||
async def _generate_create_event_for_room_id(
|
||||
self,
|
||||
creator: Requester,
|
||||
creation_content: JsonDict,
|
||||
is_public: bool,
|
||||
room_version: RoomVersion,
|
||||
) -> Tuple[EventBase, synapse.events.snapshot.EventContext]:
|
||||
(
|
||||
creation_event,
|
||||
new_unpersisted_context,
|
||||
) = await self.event_creation_handler.create_event(
|
||||
creator,
|
||||
{
|
||||
"content": creation_content,
|
||||
"sender": creator.user.to_string(),
|
||||
"type": EventTypes.Create,
|
||||
"state_key": "",
|
||||
},
|
||||
prev_event_ids=[],
|
||||
depth=1,
|
||||
state_map={},
|
||||
for_batch=False,
|
||||
)
|
||||
await self.store.store_room(
|
||||
room_id=creation_event.room_id,
|
||||
room_creator_user_id=creator.user.to_string(),
|
||||
is_public=is_public,
|
||||
room_version=room_version,
|
||||
)
|
||||
creation_context = await new_unpersisted_context.persist(creation_event)
|
||||
return (creation_event, creation_context)
|
||||
|
||||
async def _send_events_for_new_room(
|
||||
self,
|
||||
creator: Requester,
|
||||
@@ -1424,9 +1073,6 @@ class RoomCreationHandler:
|
||||
power_level_content_override: Optional[JsonDict] = None,
|
||||
creator_join_profile: Optional[JsonDict] = None,
|
||||
ignore_forced_encryption: bool = False,
|
||||
creation_event_with_context: Optional[
|
||||
Tuple[EventBase, synapse.events.snapshot.EventContext]
|
||||
] = None,
|
||||
) -> Tuple[int, str, int]:
|
||||
"""Sends the initial events into a new room. Sends the room creation, membership,
|
||||
and power level events into the room sequentially, then creates and batches up the
|
||||
@@ -1463,10 +1109,7 @@ class RoomCreationHandler:
|
||||
user in this room.
|
||||
ignore_forced_encryption:
|
||||
Ignore encryption forced by `encryption_enabled_by_default_for_room_type` setting.
|
||||
creation_event_with_context:
|
||||
Set in MSC4291 rooms where the create event determines the room ID. If provided,
|
||||
does not create an additional create event but instead appends the remaining new
|
||||
events onto the provided create event.
|
||||
|
||||
Returns:
|
||||
A tuple containing the stream ID, event ID and depth of the last
|
||||
event sent to the room.
|
||||
@@ -1531,26 +1174,13 @@ class RoomCreationHandler:
|
||||
|
||||
preset_config, config = self._room_preset_config(room_config)
|
||||
|
||||
if creation_event_with_context is None:
|
||||
# MSC2175 removes the creator field from the create event.
|
||||
if not room_version.implicit_room_creator:
|
||||
creation_content["creator"] = creator_id
|
||||
creation_event, unpersisted_creation_context = await create_event(
|
||||
EventTypes.Create, creation_content, False
|
||||
)
|
||||
creation_context = await unpersisted_creation_context.persist(
|
||||
creation_event
|
||||
)
|
||||
else:
|
||||
(creation_event, creation_context) = creation_event_with_context
|
||||
# we had to do the above already in order to have a room ID, so just updates local vars
|
||||
# and continue.
|
||||
depth = 2
|
||||
prev_event = [creation_event.event_id]
|
||||
state_map[(creation_event.type, creation_event.state_key)] = (
|
||||
creation_event.event_id
|
||||
)
|
||||
|
||||
# MSC2175 removes the creator field from the create event.
|
||||
if not room_version.implicit_room_creator:
|
||||
creation_content["creator"] = creator_id
|
||||
creation_event, unpersisted_creation_context = await create_event(
|
||||
EventTypes.Create, creation_content, False
|
||||
)
|
||||
creation_context = await unpersisted_creation_context.persist(creation_event)
|
||||
logger.debug("Sending %s in new room", EventTypes.Member)
|
||||
ev = await self.event_creation_handler.handle_new_client_event(
|
||||
requester=creator,
|
||||
@@ -1599,9 +1229,7 @@ class RoomCreationHandler:
|
||||
# Please update the docs for `default_power_level_content_override` when
|
||||
# updating the `events` dict below
|
||||
power_level_content: JsonDict = {
|
||||
"users": {creator_id: 100}
|
||||
if not room_version.msc4289_creator_power_enabled
|
||||
else {},
|
||||
"users": {creator_id: 100},
|
||||
"users_default": 0,
|
||||
"events": {
|
||||
EventTypes.Name: 50,
|
||||
@@ -1609,9 +1237,7 @@ class RoomCreationHandler:
|
||||
EventTypes.RoomHistoryVisibility: 100,
|
||||
EventTypes.CanonicalAlias: 50,
|
||||
EventTypes.RoomAvatar: 50,
|
||||
EventTypes.Tombstone: 150
|
||||
if room_version.msc4289_creator_power_enabled
|
||||
else 100,
|
||||
EventTypes.Tombstone: 100,
|
||||
EventTypes.ServerACL: 100,
|
||||
EventTypes.RoomEncryption: 100,
|
||||
},
|
||||
@@ -1624,13 +1250,7 @@ class RoomCreationHandler:
|
||||
"historical": 100,
|
||||
}
|
||||
|
||||
# original_invitees_have_ops is set on preset:trusted_private_chat which will already
|
||||
# have set these users as additional_creators, hence don't set the PL for creators as
|
||||
# that is invalid.
|
||||
if (
|
||||
config["original_invitees_have_ops"]
|
||||
and not room_version.msc4289_creator_power_enabled
|
||||
):
|
||||
if config["original_invitees_have_ops"]:
|
||||
for invitee in invite_list:
|
||||
power_level_content["users"][invitee] = 100
|
||||
|
||||
@@ -1803,19 +1423,6 @@ class RoomCreationHandler:
|
||||
)
|
||||
return preset_name, preset_config
|
||||
|
||||
def _remove_creators_from_pl_users_map(
|
||||
self,
|
||||
users_map: Dict[str, int],
|
||||
creator: str,
|
||||
additional_creators: Optional[List[str]],
|
||||
) -> None:
|
||||
creators = [creator]
|
||||
if additional_creators:
|
||||
creators.extend(additional_creators)
|
||||
for creator in creators:
|
||||
# the creator(s) cannot be in the users map
|
||||
users_map.pop(creator, None)
|
||||
|
||||
def _generate_room_id(self) -> str:
|
||||
"""Generates a random room ID.
|
||||
|
||||
@@ -1833,7 +1440,7 @@ class RoomCreationHandler:
|
||||
A random room ID of the form "!opaque_id:domain".
|
||||
"""
|
||||
random_string = stringutils.random_string(18)
|
||||
return RoomIdWithDomain(random_string, self.hs.hostname).to_string()
|
||||
return RoomID(random_string, self.hs.hostname).to_string()
|
||||
|
||||
async def _generate_and_create_room_id(
|
||||
self,
|
||||
|
||||
@@ -42,7 +42,7 @@ from synapse.api.errors import (
|
||||
)
|
||||
from synapse.api.ratelimiting import Ratelimiter
|
||||
from synapse.event_auth import get_named_level, get_power_level_event
|
||||
from synapse.events import EventBase, is_creator
|
||||
from synapse.events import EventBase
|
||||
from synapse.events.snapshot import EventContext
|
||||
from synapse.handlers.pagination import PURGE_ROOM_ACTION_NAME
|
||||
from synapse.handlers.profile import MAX_AVATAR_URL_LEN, MAX_DISPLAYNAME_LEN
|
||||
@@ -1160,8 +1160,9 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
|
||||
|
||||
elif effective_membership_state == Membership.KNOCK:
|
||||
if not is_host_in_room:
|
||||
# we used to add the domain of the room ID to remote_room_hosts.
|
||||
# This is not safe in MSC4291 rooms which do not have a domain.
|
||||
# The knock needs to be sent over federation instead
|
||||
remote_room_hosts.append(get_domain_from_id(room_id))
|
||||
|
||||
content["membership"] = Membership.KNOCK
|
||||
|
||||
try:
|
||||
@@ -1920,7 +1921,7 @@ class RoomMemberMasterHandler(RoomMemberHandler):
|
||||
check_complexity
|
||||
and self.hs.config.server.limit_remote_rooms.admins_can_join
|
||||
):
|
||||
check_complexity = not await self.store.is_server_admin(user.to_string())
|
||||
check_complexity = not await self.store.is_server_admin(user)
|
||||
|
||||
if check_complexity:
|
||||
# Fetch the room complexity
|
||||
@@ -2323,7 +2324,6 @@ def get_users_which_can_issue_invite(auth_events: StateMap[EventBase]) -> List[s
|
||||
|
||||
# Check which members are able to invite by ensuring they're joined and have
|
||||
# the necessary power level.
|
||||
create_event = auth_events[(EventTypes.Create, "")]
|
||||
for (event_type, state_key), event in auth_events.items():
|
||||
if event_type != EventTypes.Member:
|
||||
continue
|
||||
@@ -2331,12 +2331,8 @@ def get_users_which_can_issue_invite(auth_events: StateMap[EventBase]) -> List[s
|
||||
if event.membership != Membership.JOIN:
|
||||
continue
|
||||
|
||||
if create_event.room_version.msc4289_creator_power_enabled and is_creator(
|
||||
create_event, state_key
|
||||
):
|
||||
result.append(state_key)
|
||||
# Check if the user has a custom power level.
|
||||
elif users.get(state_key, users_default_level) >= invite_level:
|
||||
if users.get(state_key, users_default_level) >= invite_level:
|
||||
result.append(state_key)
|
||||
|
||||
return result
|
||||
|
||||
@@ -116,7 +116,7 @@ class SlidingSyncHandler:
|
||||
sync_config: SlidingSyncConfig,
|
||||
from_token: Optional[SlidingSyncStreamToken] = None,
|
||||
timeout_ms: int = 0,
|
||||
) -> Tuple[SlidingSyncResult, bool]:
|
||||
) -> SlidingSyncResult:
|
||||
"""
|
||||
Get the sync for a client if we have new data for it now. Otherwise
|
||||
wait for new data to arrive on the server. If the timeout expires, then
|
||||
@@ -128,16 +128,9 @@ class SlidingSyncHandler:
|
||||
from_token: The point in the stream to sync from. Token of the end of the
|
||||
previous batch. May be `None` if this is the initial sync request.
|
||||
timeout_ms: The time in milliseconds to wait for new data to arrive. If 0,
|
||||
we will respond immediately but there might not be any new data so we just
|
||||
return an empty response.
|
||||
|
||||
Returns:
|
||||
A tuple containing the `SlidingSyncResult` and whether we waited for new
|
||||
activity before responding. Knowing whether we waited is useful in traces
|
||||
to filter out long-running requests where we were just waiting.
|
||||
we will immediately but there might not be any new data so we just return an
|
||||
empty response.
|
||||
"""
|
||||
did_wait = False
|
||||
|
||||
# If the user is not part of the mau group, then check that limits have
|
||||
# not been exceeded (if not part of the group by this point, almost certain
|
||||
# auth_blocking will occur)
|
||||
@@ -156,7 +149,7 @@ class SlidingSyncHandler:
|
||||
logger.warning(
|
||||
"Timed out waiting for worker to catch up. Returning empty response"
|
||||
)
|
||||
return SlidingSyncResult.empty(from_token), did_wait
|
||||
return SlidingSyncResult.empty(from_token)
|
||||
|
||||
# If we've spent significant time waiting to catch up, take it off
|
||||
# the timeout.
|
||||
@@ -192,9 +185,8 @@ class SlidingSyncHandler:
|
||||
current_sync_callback,
|
||||
from_token=from_token.stream_token,
|
||||
)
|
||||
did_wait = True
|
||||
|
||||
return result, did_wait
|
||||
return result
|
||||
|
||||
@trace
|
||||
async def current_sync_for_user(
|
||||
@@ -211,7 +203,7 @@ class SlidingSyncHandler:
|
||||
|
||||
Args:
|
||||
sync_config: Sync configuration
|
||||
to_token: The latest point in the stream to sync up to.
|
||||
to_token: The point in the stream to sync up to.
|
||||
from_token: The point in the stream to sync from. Token of the end of the
|
||||
previous batch. May be `None` if this is the initial sync request.
|
||||
"""
|
||||
|
||||
@@ -27,7 +27,7 @@ from typing import (
|
||||
cast,
|
||||
)
|
||||
|
||||
from typing_extensions import TypeAlias, assert_never
|
||||
from typing_extensions import assert_never
|
||||
|
||||
from synapse.api.constants import AccountDataTypes, EduTypes
|
||||
from synapse.handlers.receipts import ReceiptEventSource
|
||||
@@ -40,7 +40,6 @@ from synapse.types import (
|
||||
SlidingSyncStreamToken,
|
||||
StrCollection,
|
||||
StreamToken,
|
||||
ThreadSubscriptionsToken,
|
||||
)
|
||||
from synapse.types.handlers.sliding_sync import (
|
||||
HaveSentRoomFlag,
|
||||
@@ -55,13 +54,6 @@ from synapse.util.async_helpers import (
|
||||
gather_optional_coroutines,
|
||||
)
|
||||
|
||||
_ThreadSubscription: TypeAlias = (
|
||||
SlidingSyncResult.Extensions.ThreadSubscriptionsExtension.ThreadSubscription
|
||||
)
|
||||
_ThreadUnsubscription: TypeAlias = (
|
||||
SlidingSyncResult.Extensions.ThreadSubscriptionsExtension.ThreadUnsubscription
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
@@ -76,7 +68,6 @@ class SlidingSyncExtensionHandler:
|
||||
self.event_sources = hs.get_event_sources()
|
||||
self.device_handler = hs.get_device_handler()
|
||||
self.push_rules_handler = hs.get_push_rules_handler()
|
||||
self._enable_thread_subscriptions = hs.config.experimental.msc4306_enabled
|
||||
|
||||
@trace
|
||||
async def get_extensions_response(
|
||||
@@ -102,7 +93,7 @@ class SlidingSyncExtensionHandler:
|
||||
actual_room_ids: The actual room IDs in the the Sliding Sync response.
|
||||
actual_room_response_map: A map of room ID to room results in the the
|
||||
Sliding Sync response.
|
||||
to_token: The latest point in the stream to sync up to.
|
||||
to_token: The point in the stream to sync up to.
|
||||
from_token: The point in the stream to sync from.
|
||||
"""
|
||||
|
||||
@@ -165,32 +156,18 @@ class SlidingSyncExtensionHandler:
|
||||
from_token=from_token,
|
||||
)
|
||||
|
||||
thread_subs_coro = None
|
||||
if (
|
||||
sync_config.extensions.thread_subscriptions is not None
|
||||
and self._enable_thread_subscriptions
|
||||
):
|
||||
thread_subs_coro = self.get_thread_subscriptions_extension_response(
|
||||
sync_config=sync_config,
|
||||
thread_subscriptions_request=sync_config.extensions.thread_subscriptions,
|
||||
to_token=to_token,
|
||||
from_token=from_token,
|
||||
)
|
||||
|
||||
(
|
||||
to_device_response,
|
||||
e2ee_response,
|
||||
account_data_response,
|
||||
receipts_response,
|
||||
typing_response,
|
||||
thread_subs_response,
|
||||
) = await gather_optional_coroutines(
|
||||
to_device_coro,
|
||||
e2ee_coro,
|
||||
account_data_coro,
|
||||
receipts_coro,
|
||||
typing_coro,
|
||||
thread_subs_coro,
|
||||
)
|
||||
|
||||
return SlidingSyncResult.Extensions(
|
||||
@@ -199,7 +176,6 @@ class SlidingSyncExtensionHandler:
|
||||
account_data=account_data_response,
|
||||
receipts=receipts_response,
|
||||
typing=typing_response,
|
||||
thread_subscriptions=thread_subs_response,
|
||||
)
|
||||
|
||||
def find_relevant_room_ids_for_extension(
|
||||
@@ -901,72 +877,3 @@ class SlidingSyncExtensionHandler:
|
||||
return SlidingSyncResult.Extensions.TypingExtension(
|
||||
room_id_to_typing_map=room_id_to_typing_map,
|
||||
)
|
||||
|
||||
async def get_thread_subscriptions_extension_response(
|
||||
self,
|
||||
sync_config: SlidingSyncConfig,
|
||||
thread_subscriptions_request: SlidingSyncConfig.Extensions.ThreadSubscriptionsExtension,
|
||||
to_token: StreamToken,
|
||||
from_token: Optional[SlidingSyncStreamToken],
|
||||
) -> Optional[SlidingSyncResult.Extensions.ThreadSubscriptionsExtension]:
|
||||
"""Handle Thread Subscriptions extension (MSC4308)
|
||||
|
||||
Args:
|
||||
sync_config: Sync configuration
|
||||
thread_subscriptions_request: The thread_subscriptions extension from the request
|
||||
to_token: The point in the stream to sync up to.
|
||||
from_token: The point in the stream to sync from.
|
||||
|
||||
Returns:
|
||||
the response (None if empty or thread subscriptions are disabled)
|
||||
"""
|
||||
if not thread_subscriptions_request.enabled:
|
||||
return None
|
||||
|
||||
limit = thread_subscriptions_request.limit
|
||||
|
||||
if from_token:
|
||||
from_stream_id = from_token.stream_token.thread_subscriptions_key
|
||||
else:
|
||||
from_stream_id = StreamToken.START.thread_subscriptions_key
|
||||
|
||||
to_stream_id = to_token.thread_subscriptions_key
|
||||
|
||||
updates = await self.store.get_latest_updated_thread_subscriptions_for_user(
|
||||
user_id=sync_config.user.to_string(),
|
||||
from_id=from_stream_id,
|
||||
to_id=to_stream_id,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
if len(updates) == 0:
|
||||
return None
|
||||
|
||||
subscribed_threads: Dict[str, Dict[str, _ThreadSubscription]] = {}
|
||||
unsubscribed_threads: Dict[str, Dict[str, _ThreadUnsubscription]] = {}
|
||||
for stream_id, room_id, thread_root_id, subscribed, automatic in updates:
|
||||
if subscribed:
|
||||
subscribed_threads.setdefault(room_id, {})[thread_root_id] = (
|
||||
_ThreadSubscription(
|
||||
automatic=automatic,
|
||||
bump_stamp=stream_id,
|
||||
)
|
||||
)
|
||||
else:
|
||||
unsubscribed_threads.setdefault(room_id, {})[thread_root_id] = (
|
||||
_ThreadUnsubscription(bump_stamp=stream_id)
|
||||
)
|
||||
|
||||
prev_batch = None
|
||||
if len(updates) == limit:
|
||||
# Tell the client about a potential gap where there may be more
|
||||
# thread subscriptions for it to backpaginate.
|
||||
# We subtract one because the 'later in the stream' bound is inclusive,
|
||||
# and we already saw the element at index 0.
|
||||
prev_batch = ThreadSubscriptionsToken(updates[0][0] - 1)
|
||||
|
||||
return SlidingSyncResult.Extensions.ThreadSubscriptionsExtension(
|
||||
subscribed=subscribed_threads,
|
||||
unsubscribed=unsubscribed_threads,
|
||||
prev_batch=prev_batch,
|
||||
)
|
||||
|
||||
@@ -9,7 +9,7 @@ from synapse.storage.databases.main.thread_subscriptions import (
|
||||
AutomaticSubscriptionConflicted,
|
||||
ThreadSubscription,
|
||||
)
|
||||
from synapse.types import EventOrderings, StreamKeyType, UserID
|
||||
from synapse.types import EventOrderings, UserID
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
@@ -22,7 +22,6 @@ class ThreadSubscriptionsHandler:
|
||||
self.store = hs.get_datastores().main
|
||||
self.event_handler = hs.get_event_handler()
|
||||
self.auth = hs.get_auth()
|
||||
self._notifier = hs.get_notifier()
|
||||
|
||||
async def get_thread_subscription_settings(
|
||||
self,
|
||||
@@ -133,15 +132,6 @@ class ThreadSubscriptionsHandler:
|
||||
errcode=Codes.MSC4306_CONFLICTING_UNSUBSCRIPTION,
|
||||
)
|
||||
|
||||
if outcome is not None:
|
||||
# wake up user streams (e.g. sliding sync) on the same worker
|
||||
self._notifier.on_new_event(
|
||||
StreamKeyType.THREAD_SUBSCRIPTIONS,
|
||||
# outcome is a stream_id
|
||||
outcome,
|
||||
users=[user_id.to_string()],
|
||||
)
|
||||
|
||||
return outcome
|
||||
|
||||
async def unsubscribe_user_from_thread(
|
||||
@@ -172,19 +162,8 @@ class ThreadSubscriptionsHandler:
|
||||
logger.info("rejecting thread subscriptions change (thread not accessible)")
|
||||
raise NotFoundError("No such thread root")
|
||||
|
||||
outcome = await self.store.unsubscribe_user_from_thread(
|
||||
return await self.store.unsubscribe_user_from_thread(
|
||||
user_id.to_string(),
|
||||
event.room_id,
|
||||
thread_root_event_id,
|
||||
)
|
||||
|
||||
if outcome is not None:
|
||||
# wake up user streams (e.g. sliding sync) on the same worker
|
||||
self._notifier.on_new_event(
|
||||
StreamKeyType.THREAD_SUBSCRIPTIONS,
|
||||
# outcome is a stream_id
|
||||
outcome,
|
||||
users=[user_id.to_string()],
|
||||
)
|
||||
|
||||
return outcome
|
||||
|
||||
@@ -130,16 +130,6 @@ def parse_integer(
|
||||
return parse_integer_from_args(args, name, default, required, negative)
|
||||
|
||||
|
||||
@overload
|
||||
def parse_integer_from_args(
|
||||
args: Mapping[bytes, Sequence[bytes]],
|
||||
name: str,
|
||||
default: int,
|
||||
required: Literal[False] = False,
|
||||
negative: bool = False,
|
||||
) -> int: ...
|
||||
|
||||
|
||||
@overload
|
||||
def parse_integer_from_args(
|
||||
args: Mapping[bytes, Sequence[bytes]],
|
||||
|
||||
@@ -761,7 +761,7 @@ class ModuleApi:
|
||||
Returns:
|
||||
True if the user is a server admin, False otherwise.
|
||||
"""
|
||||
return await self._store.is_server_admin(user_id)
|
||||
return await self._store.is_server_admin(UserID.from_string(user_id))
|
||||
|
||||
async def set_user_admin(self, user_id: str, admin: bool) -> None:
|
||||
"""Sets if a user is a server admin.
|
||||
|
||||
@@ -522,7 +522,6 @@ class Notifier:
|
||||
StreamKeyType.TO_DEVICE,
|
||||
StreamKeyType.TYPING,
|
||||
StreamKeyType.UN_PARTIAL_STATED_ROOMS,
|
||||
StreamKeyType.THREAD_SUBSCRIPTIONS,
|
||||
],
|
||||
new_token: int,
|
||||
users: Optional[Collection[Union[str, UserID]]] = None,
|
||||
|
||||
@@ -91,7 +91,7 @@ def _rule_to_template(rule: PushRule) -> Optional[Dict[str, Any]]:
|
||||
unscoped_rule_id = _rule_id_from_namespaced(rule.rule_id)
|
||||
|
||||
template_name = _priority_class_to_template_name(rule.priority_class)
|
||||
if template_name in ["override", "underride", "postcontent"]:
|
||||
if template_name in ["override", "underride"]:
|
||||
templaterule = {"conditions": rule.conditions, "actions": rule.actions}
|
||||
elif template_name in ["sender", "room"]:
|
||||
templaterule = {"actions": rule.actions}
|
||||
|
||||
@@ -19,14 +19,10 @@
|
||||
#
|
||||
#
|
||||
|
||||
# Integer literals for push rule `kind`s
|
||||
# This is used to store them in the database.
|
||||
PRIORITY_CLASS_MAP = {
|
||||
"underride": 1,
|
||||
"sender": 2,
|
||||
"room": 3,
|
||||
# MSC4306
|
||||
"postcontent": 6,
|
||||
"content": 4,
|
||||
"override": 5,
|
||||
}
|
||||
|
||||
@@ -44,7 +44,6 @@ from synapse.replication.tcp.streams import (
|
||||
UnPartialStatedEventStream,
|
||||
UnPartialStatedRoomStream,
|
||||
)
|
||||
from synapse.replication.tcp.streams._base import ThreadSubscriptionsStream
|
||||
from synapse.replication.tcp.streams.events import (
|
||||
EventsStream,
|
||||
EventsStreamEventRow,
|
||||
@@ -256,12 +255,6 @@ class ReplicationDataHandler:
|
||||
self._state_storage_controller.notify_event_un_partial_stated(
|
||||
row.event_id
|
||||
)
|
||||
elif stream_name == ThreadSubscriptionsStream.NAME:
|
||||
self.notifier.on_new_event(
|
||||
StreamKeyType.THREAD_SUBSCRIPTIONS,
|
||||
token,
|
||||
users=[row.user_id for row in rows],
|
||||
)
|
||||
|
||||
await self._presence_handler.process_replication_rows(
|
||||
stream_name, instance_name, token, rows
|
||||
|
||||
@@ -627,15 +627,6 @@ class MakeRoomAdminRestServlet(ResolveRoomIdMixin, RestServlet):
|
||||
]
|
||||
admin_users.sort(key=lambda user: user_power[user])
|
||||
|
||||
if create_event.room_version.msc4289_creator_power_enabled:
|
||||
creators = create_event.content.get("additional_creators", []) + [
|
||||
create_event.sender
|
||||
]
|
||||
for creator in creators:
|
||||
if self.is_mine_id(creator):
|
||||
# include the creator as they won't be in the PL users map.
|
||||
admin_users.append(creator)
|
||||
|
||||
if not admin_users:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST, "No local admin user in room"
|
||||
@@ -675,11 +666,7 @@ class MakeRoomAdminRestServlet(ResolveRoomIdMixin, RestServlet):
|
||||
# updated power level event.
|
||||
new_pl_content = dict(pl_content)
|
||||
new_pl_content["users"] = dict(pl_content.get("users", {}))
|
||||
# give the new user the same PL as the admin, default to 100 in case there is no PL event.
|
||||
# This means in v12+ rooms we get PL100 if the creator promotes us.
|
||||
new_pl_content["users"][user_to_add] = new_pl_content["users"].get(
|
||||
admin_user_id, 100
|
||||
)
|
||||
new_pl_content["users"][user_to_add] = new_pl_content["users"][admin_user_id]
|
||||
|
||||
fake_requester = create_requester(
|
||||
admin_user_id,
|
||||
|
||||
@@ -1000,7 +1000,7 @@ class UserAdminServlet(RestServlet):
|
||||
"Only local users can be admins of this homeserver",
|
||||
)
|
||||
|
||||
is_admin = await self.store.is_server_admin(target_user.to_string())
|
||||
is_admin = await self.store.is_server_admin(target_user)
|
||||
|
||||
return HTTPStatus.OK, {"admin": is_admin}
|
||||
|
||||
|
||||
@@ -19,11 +19,9 @@
|
||||
#
|
||||
#
|
||||
|
||||
from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING, List, Tuple, Union
|
||||
|
||||
from synapse.api.errors import (
|
||||
Codes,
|
||||
NotFoundError,
|
||||
StoreError,
|
||||
SynapseError,
|
||||
@@ -241,15 +239,6 @@ def _rule_spec_from_path(path: List[str]) -> RuleSpec:
|
||||
def _rule_tuple_from_request_object(
|
||||
rule_template: str, rule_id: str, req_obj: JsonDict
|
||||
) -> Tuple[List[JsonDict], List[Union[str, JsonDict]]]:
|
||||
if rule_template == "postcontent":
|
||||
# postcontent is from MSC4306, which says that clients
|
||||
# cannot create their own postcontent rules right now.
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"user-defined rules using `postcontent` are not accepted",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
if rule_template in ["override", "underride"]:
|
||||
if "conditions" not in req_obj:
|
||||
raise InvalidRuleException("Missing 'conditions'")
|
||||
|
||||
@@ -24,7 +24,6 @@ from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.api.errors import Codes, ShadowBanError, SynapseError
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
||||
from synapse.event_auth import check_valid_additional_creators
|
||||
from synapse.handlers.worker_lock import NEW_EVENT_DURING_PURGE_LOCK_NAME
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import (
|
||||
@@ -86,18 +85,13 @@ class RoomUpgradeRestServlet(RestServlet):
|
||||
"Your homeserver does not support this room version",
|
||||
Codes.UNSUPPORTED_ROOM_VERSION,
|
||||
)
|
||||
additional_creators = None
|
||||
if new_version.msc4289_creator_power_enabled:
|
||||
additional_creators = content.get("additional_creators")
|
||||
if additional_creators is not None:
|
||||
check_valid_additional_creators(additional_creators)
|
||||
|
||||
try:
|
||||
async with self._worker_lock_handler.acquire_read_write_lock(
|
||||
NEW_EVENT_DURING_PURGE_LOCK_NAME, room_id, write=False
|
||||
):
|
||||
new_room_id = await self._room_creation_handler.upgrade_room(
|
||||
requester, room_id, new_version, additional_creators
|
||||
requester, room_id, new_version
|
||||
)
|
||||
except ShadowBanError:
|
||||
# Generate a random room ID.
|
||||
|
||||
@@ -23,8 +23,6 @@ import logging
|
||||
from collections import defaultdict
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Tuple, Union
|
||||
|
||||
import attr
|
||||
|
||||
from synapse.api.constants import AccountDataTypes, EduTypes, Membership, PresenceState
|
||||
from synapse.api.errors import Codes, StoreError, SynapseError
|
||||
from synapse.api.filtering import FilterCollection
|
||||
@@ -807,21 +805,12 @@ class SlidingSyncE2eeRestServlet(RestServlet):
|
||||
|
||||
class SlidingSyncRestServlet(RestServlet):
|
||||
"""
|
||||
API endpoint for MSC4186 Simplified Sliding Sync `/sync`, which was historically derived
|
||||
from MSC3575 (Sliding Sync; now abandoned). Allows for clients to request a
|
||||
API endpoint for MSC3575 Sliding Sync `/sync`. Allows for clients to request a
|
||||
subset (sliding window) of rooms, state, and timeline events (just what they need)
|
||||
in order to bootstrap quickly and subscribe to only what the client cares about.
|
||||
Because the client can specify what it cares about, we can respond quickly and skip
|
||||
all of the work we would normally have to do with a sync v2 response.
|
||||
|
||||
Extensions of various features are defined in:
|
||||
- to-device messaging (MSC3885)
|
||||
- end-to-end encryption (MSC3884)
|
||||
- typing notifications (MSC3961)
|
||||
- receipts (MSC3960)
|
||||
- account data (MSC3959)
|
||||
- thread subscriptions (MSC4308)
|
||||
|
||||
Request query parameters:
|
||||
timeout: How long to wait for new events in milliseconds.
|
||||
pos: Stream position token when asking for incremental deltas.
|
||||
@@ -1005,18 +994,12 @@ class SlidingSyncRestServlet(RestServlet):
|
||||
extensions=body.extensions,
|
||||
)
|
||||
|
||||
(
|
||||
sliding_sync_results,
|
||||
did_wait,
|
||||
) = await self.sliding_sync_handler.wait_for_sync_for_user(
|
||||
sliding_sync_results = await self.sliding_sync_handler.wait_for_sync_for_user(
|
||||
requester,
|
||||
sync_config,
|
||||
from_token,
|
||||
timeout,
|
||||
)
|
||||
# Knowing whether we waited is useful in traces to filter out long-running
|
||||
# requests where we were just waiting.
|
||||
set_tag("sliding_sync.did_wait", str(did_wait))
|
||||
|
||||
# The client may have disconnected by now; don't bother to serialize the
|
||||
# response if so.
|
||||
@@ -1028,7 +1011,6 @@ class SlidingSyncRestServlet(RestServlet):
|
||||
|
||||
return 200, response_content
|
||||
|
||||
@trace_with_opname("sliding_sync.encode_response")
|
||||
async def encode_response(
|
||||
self,
|
||||
requester: Requester,
|
||||
@@ -1049,7 +1031,6 @@ class SlidingSyncRestServlet(RestServlet):
|
||||
|
||||
return response
|
||||
|
||||
@trace_with_opname("sliding_sync.encode_lists")
|
||||
def encode_lists(
|
||||
self, lists: Mapping[str, SlidingSyncResult.SlidingWindowList]
|
||||
) -> JsonDict:
|
||||
@@ -1071,7 +1052,6 @@ class SlidingSyncRestServlet(RestServlet):
|
||||
|
||||
return serialized_lists
|
||||
|
||||
@trace_with_opname("sliding_sync.encode_rooms")
|
||||
async def encode_rooms(
|
||||
self,
|
||||
requester: Requester,
|
||||
@@ -1192,7 +1172,6 @@ class SlidingSyncRestServlet(RestServlet):
|
||||
|
||||
return serialized_rooms
|
||||
|
||||
@trace_with_opname("sliding_sync.encode_extensions")
|
||||
async def encode_extensions(
|
||||
self, requester: Requester, extensions: SlidingSyncResult.Extensions
|
||||
) -> JsonDict:
|
||||
@@ -1258,48 +1237,9 @@ class SlidingSyncRestServlet(RestServlet):
|
||||
"rooms": extensions.typing.room_id_to_typing_map,
|
||||
}
|
||||
|
||||
# excludes both None and falsy `thread_subscriptions`
|
||||
if extensions.thread_subscriptions:
|
||||
serialized_extensions["io.element.msc4308.thread_subscriptions"] = (
|
||||
_serialise_thread_subscriptions(extensions.thread_subscriptions)
|
||||
)
|
||||
|
||||
return serialized_extensions
|
||||
|
||||
|
||||
def _serialise_thread_subscriptions(
|
||||
thread_subscriptions: SlidingSyncResult.Extensions.ThreadSubscriptionsExtension,
|
||||
) -> JsonDict:
|
||||
out: JsonDict = {}
|
||||
|
||||
if thread_subscriptions.subscribed:
|
||||
out["subscribed"] = {
|
||||
room_id: {
|
||||
thread_root_id: attr.asdict(
|
||||
change, filter=lambda _attr, v: v is not None
|
||||
)
|
||||
for thread_root_id, change in room_threads.items()
|
||||
}
|
||||
for room_id, room_threads in thread_subscriptions.subscribed.items()
|
||||
}
|
||||
|
||||
if thread_subscriptions.unsubscribed:
|
||||
out["unsubscribed"] = {
|
||||
room_id: {
|
||||
thread_root_id: attr.asdict(
|
||||
change, filter=lambda _attr, v: v is not None
|
||||
)
|
||||
for thread_root_id, change in room_threads.items()
|
||||
}
|
||||
for room_id, room_threads in thread_subscriptions.unsubscribed.items()
|
||||
}
|
||||
|
||||
if thread_subscriptions.prev_batch:
|
||||
out["prev_batch"] = thread_subscriptions.prev_batch.to_string()
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
SyncRestServlet(hs).register(http_server)
|
||||
|
||||
|
||||
@@ -1,39 +1,21 @@
|
||||
from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING, Dict, Optional, Tuple
|
||||
|
||||
import attr
|
||||
from typing_extensions import TypeAlias
|
||||
from typing import TYPE_CHECKING, Optional, Tuple
|
||||
|
||||
from synapse.api.errors import Codes, NotFoundError, SynapseError
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.http.servlet import (
|
||||
RestServlet,
|
||||
parse_and_validate_json_object_from_request,
|
||||
parse_integer,
|
||||
parse_string,
|
||||
)
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.rest.client._base import client_patterns
|
||||
from synapse.types import (
|
||||
JsonDict,
|
||||
RoomID,
|
||||
SlidingSyncStreamToken,
|
||||
ThreadSubscriptionsToken,
|
||||
)
|
||||
from synapse.types.handlers.sliding_sync import SlidingSyncResult
|
||||
from synapse.types import JsonDict, RoomID
|
||||
from synapse.types.rest import RequestBodyModel
|
||||
from synapse.util.pydantic_models import AnyEventId
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
_ThreadSubscription: TypeAlias = (
|
||||
SlidingSyncResult.Extensions.ThreadSubscriptionsExtension.ThreadSubscription
|
||||
)
|
||||
_ThreadUnsubscription: TypeAlias = (
|
||||
SlidingSyncResult.Extensions.ThreadSubscriptionsExtension.ThreadUnsubscription
|
||||
)
|
||||
|
||||
|
||||
class ThreadSubscriptionsRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns(
|
||||
@@ -118,129 +100,6 @@ class ThreadSubscriptionsRestServlet(RestServlet):
|
||||
return HTTPStatus.OK, {}
|
||||
|
||||
|
||||
class ThreadSubscriptionsPaginationRestServlet(RestServlet):
|
||||
PATTERNS = client_patterns(
|
||||
"/io.element.msc4308/thread_subscriptions$",
|
||||
unstable=True,
|
||||
releases=(),
|
||||
)
|
||||
CATEGORY = "Thread Subscriptions requests (unstable)"
|
||||
|
||||
# Maximum number of thread subscriptions to return in one request.
|
||||
MAX_LIMIT = 512
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.auth = hs.get_auth()
|
||||
self.is_mine = hs.is_mine
|
||||
self.store = hs.get_datastores().main
|
||||
|
||||
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
|
||||
limit = min(
|
||||
parse_integer(request, "limit", default=100, negative=False),
|
||||
ThreadSubscriptionsPaginationRestServlet.MAX_LIMIT,
|
||||
)
|
||||
from_end_opt = parse_string(request, "from", required=False)
|
||||
to_start_opt = parse_string(request, "to", required=False)
|
||||
_direction = parse_string(request, "dir", required=True, allowed_values=("b",))
|
||||
|
||||
if limit <= 0:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"limit must be greater than 0",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
if from_end_opt is not None:
|
||||
try:
|
||||
# because of backwards pagination, the `from` token is actually the
|
||||
# bound closest to the end of the stream
|
||||
end_stream_id = ThreadSubscriptionsToken.from_string(
|
||||
from_end_opt
|
||||
).stream_id
|
||||
except ValueError:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"`from` is not a valid token",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
else:
|
||||
end_stream_id = self.store.get_max_thread_subscriptions_stream_id()
|
||||
|
||||
if to_start_opt is not None:
|
||||
# because of backwards pagination, the `to` token is actually the
|
||||
# bound closest to the start of the stream
|
||||
try:
|
||||
start_stream_id = ThreadSubscriptionsToken.from_string(
|
||||
to_start_opt
|
||||
).stream_id
|
||||
except ValueError:
|
||||
# we also accept sliding sync `pos` tokens on this parameter
|
||||
try:
|
||||
sliding_sync_pos = await SlidingSyncStreamToken.from_string(
|
||||
self.store, to_start_opt
|
||||
)
|
||||
start_stream_id = (
|
||||
sliding_sync_pos.stream_token.thread_subscriptions_key
|
||||
)
|
||||
except ValueError:
|
||||
raise SynapseError(
|
||||
HTTPStatus.BAD_REQUEST,
|
||||
"`to` is not a valid token",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
else:
|
||||
# the start of time is ID 1; the lower bound is exclusive though
|
||||
start_stream_id = 0
|
||||
|
||||
subscriptions = (
|
||||
await self.store.get_latest_updated_thread_subscriptions_for_user(
|
||||
requester.user.to_string(),
|
||||
from_id=start_stream_id,
|
||||
to_id=end_stream_id,
|
||||
limit=limit,
|
||||
)
|
||||
)
|
||||
|
||||
subscribed_threads: Dict[str, Dict[str, JsonDict]] = {}
|
||||
unsubscribed_threads: Dict[str, Dict[str, JsonDict]] = {}
|
||||
for stream_id, room_id, thread_root_id, subscribed, automatic in subscriptions:
|
||||
if subscribed:
|
||||
subscribed_threads.setdefault(room_id, {})[thread_root_id] = (
|
||||
attr.asdict(
|
||||
_ThreadSubscription(
|
||||
automatic=automatic,
|
||||
bump_stamp=stream_id,
|
||||
)
|
||||
)
|
||||
)
|
||||
else:
|
||||
unsubscribed_threads.setdefault(room_id, {})[thread_root_id] = (
|
||||
attr.asdict(_ThreadUnsubscription(bump_stamp=stream_id))
|
||||
)
|
||||
|
||||
result: JsonDict = {}
|
||||
if subscribed_threads:
|
||||
result["subscribed"] = subscribed_threads
|
||||
if unsubscribed_threads:
|
||||
result["unsubscribed"] = unsubscribed_threads
|
||||
|
||||
if len(subscriptions) == limit:
|
||||
# We hit the limit, so there might be more entries to return.
|
||||
# Generate a new token that has moved backwards, ready for the next
|
||||
# request.
|
||||
min_returned_stream_id, _, _, _, _ = subscriptions[0]
|
||||
result["end"] = ThreadSubscriptionsToken(
|
||||
# We subtract one because the 'later in the stream' bound is inclusive,
|
||||
# and we already saw the element at index 0.
|
||||
stream_id=min_returned_stream_id - 1
|
||||
).to_string()
|
||||
|
||||
return HTTPStatus.OK, result
|
||||
|
||||
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
if hs.config.experimental.msc4306_enabled:
|
||||
ThreadSubscriptionsRestServlet(hs).register(http_server)
|
||||
ThreadSubscriptionsPaginationRestServlet(hs).register(http_server)
|
||||
|
||||
@@ -54,7 +54,6 @@ from synapse.logging.opentracing import tag_args, trace
|
||||
from synapse.metrics import SERVER_NAME_LABEL
|
||||
from synapse.replication.http.state import ReplicationUpdateCurrentStateRestServlet
|
||||
from synapse.state import v1, v2
|
||||
from synapse.storage.databases.main.event_federation import StateDifference
|
||||
from synapse.storage.databases.main.events_worker import EventRedactBehaviour
|
||||
from synapse.types import StateMap, StrCollection
|
||||
from synapse.types.state import StateFilter
|
||||
@@ -991,35 +990,17 @@ class StateResolutionStore:
|
||||
)
|
||||
|
||||
def get_auth_chain_difference(
|
||||
self,
|
||||
room_id: str,
|
||||
state_sets: List[Set[str]],
|
||||
conflicted_state: Optional[Set[str]],
|
||||
additional_backwards_reachable_conflicted_events: Optional[Set[str]],
|
||||
) -> Awaitable[StateDifference]:
|
||||
""" "Given sets of state events figure out the auth chain difference (as
|
||||
self, room_id: str, state_sets: List[Set[str]]
|
||||
) -> Awaitable[Set[str]]:
|
||||
"""Given sets of state events figure out the auth chain difference (as
|
||||
per state res v2 algorithm).
|
||||
|
||||
This is equivalent to fetching the full auth chain for each set of state
|
||||
This equivalent to fetching the full auth chain for each set of state
|
||||
and returning the events that don't appear in each and every auth
|
||||
chain.
|
||||
|
||||
If conflicted_state is not None, calculate and return the conflicted sub-graph as per
|
||||
state res v2.1. The event IDs in the conflicted state MUST be a subset of the event IDs in
|
||||
state_sets.
|
||||
|
||||
If additional_backwards_reachable_conflicted_events is set, the provided events are included
|
||||
when calculating the conflicted subgraph. This is primarily useful for calculating the
|
||||
subgraph across a combination of persisted and unpersisted events.
|
||||
|
||||
Returns:
|
||||
information on the auth chain difference, and also the conflicted subgraph if
|
||||
conflicted_state is not None
|
||||
An awaitable that resolves to a set of event IDs.
|
||||
"""
|
||||
|
||||
return self.main_store.get_auth_chain_difference_extended(
|
||||
room_id,
|
||||
state_sets,
|
||||
conflicted_state,
|
||||
additional_backwards_reachable_conflicted_events,
|
||||
)
|
||||
return self.main_store.get_auth_chain_difference(room_id, state_sets)
|
||||
|
||||
@@ -39,11 +39,10 @@ from typing import (
|
||||
)
|
||||
|
||||
from synapse import event_auth
|
||||
from synapse.api.constants import CREATOR_POWER_LEVEL, EventTypes
|
||||
from synapse.api.constants import EventTypes
|
||||
from synapse.api.errors import AuthError
|
||||
from synapse.api.room_versions import RoomVersion, StateResolutionVersions
|
||||
from synapse.events import EventBase, is_creator
|
||||
from synapse.storage.databases.main.event_federation import StateDifference
|
||||
from synapse.api.room_versions import RoomVersion
|
||||
from synapse.events import EventBase
|
||||
from synapse.types import MutableStateMap, StateMap, StrCollection
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -64,12 +63,8 @@ class StateResolutionStore(Protocol):
|
||||
) -> Awaitable[Dict[str, EventBase]]: ...
|
||||
|
||||
def get_auth_chain_difference(
|
||||
self,
|
||||
room_id: str,
|
||||
state_sets: List[Set[str]],
|
||||
conflicted_state: Optional[Set[str]],
|
||||
additional_backwards_reachable_conflicted_events: Optional[set[str]],
|
||||
) -> Awaitable[StateDifference]: ...
|
||||
self, room_id: str, state_sets: List[Set[str]]
|
||||
) -> Awaitable[Set[str]]: ...
|
||||
|
||||
|
||||
# We want to await to the reactor occasionally during state res when dealing
|
||||
@@ -128,17 +123,12 @@ async def resolve_events_with_store(
|
||||
logger.debug("%d conflicted state entries", len(conflicted_state))
|
||||
logger.debug("Calculating auth chain difference")
|
||||
|
||||
conflicted_set: Optional[Set[str]] = None
|
||||
if room_version.state_res == StateResolutionVersions.V2_1:
|
||||
# calculate the conflicted subgraph
|
||||
conflicted_set = set(itertools.chain.from_iterable(conflicted_state.values()))
|
||||
# Also fetch all auth events that appear in only some of the state sets'
|
||||
# auth chains.
|
||||
auth_diff = await _get_auth_chain_difference(
|
||||
room_id,
|
||||
state_sets,
|
||||
event_map,
|
||||
state_res_store,
|
||||
conflicted_set,
|
||||
room_id, state_sets, event_map, state_res_store
|
||||
)
|
||||
|
||||
full_conflicted_set = set(
|
||||
itertools.chain(
|
||||
itertools.chain.from_iterable(conflicted_state.values()), auth_diff
|
||||
@@ -178,26 +168,15 @@ async def resolve_events_with_store(
|
||||
|
||||
logger.debug("sorted %d power events", len(sorted_power_events))
|
||||
|
||||
# v2.1 starts iterative auth checks from the empty set and not the unconflicted state.
|
||||
# It relies on IAC behaviour which populates the base state with the events from auth_events
|
||||
# if the state tuple is missing from the base state. This ensures the base state is only
|
||||
# populated from auth_events rather than whatever the unconflicted state is (which could be
|
||||
# completely bogus).
|
||||
base_state = (
|
||||
{}
|
||||
if room_version.state_res == StateResolutionVersions.V2_1
|
||||
else unconflicted_state
|
||||
)
|
||||
|
||||
# Now sequentially auth each one
|
||||
resolved_state = await _iterative_auth_checks(
|
||||
clock,
|
||||
room_id,
|
||||
room_version,
|
||||
event_ids=sorted_power_events,
|
||||
base_state=base_state,
|
||||
event_map=event_map,
|
||||
state_res_store=state_res_store,
|
||||
sorted_power_events,
|
||||
unconflicted_state,
|
||||
event_map,
|
||||
state_res_store,
|
||||
)
|
||||
|
||||
logger.debug("resolved power events")
|
||||
@@ -260,23 +239,13 @@ async def _get_power_level_for_sender(
|
||||
event = await _get_event(room_id, event_id, event_map, state_res_store)
|
||||
|
||||
pl = None
|
||||
create = None
|
||||
for aid in event.auth_event_ids():
|
||||
aev = await _get_event(
|
||||
room_id, aid, event_map, state_res_store, allow_none=True
|
||||
)
|
||||
if aev and (aev.type, aev.state_key) == (EventTypes.PowerLevels, ""):
|
||||
pl = aev
|
||||
if aev and (aev.type, aev.state_key) == (EventTypes.Create, ""):
|
||||
create = aev
|
||||
|
||||
if event.type != EventTypes.Create:
|
||||
# we should always have a create event
|
||||
assert create is not None
|
||||
|
||||
if create and create.room_version.msc4289_creator_power_enabled:
|
||||
if is_creator(create, event.sender):
|
||||
return CREATOR_POWER_LEVEL
|
||||
break
|
||||
|
||||
if pl is None:
|
||||
# Couldn't find power level. Check if they're the creator of the room
|
||||
@@ -317,7 +286,6 @@ async def _get_auth_chain_difference(
|
||||
state_sets: Sequence[StateMap[str]],
|
||||
unpersisted_events: Dict[str, EventBase],
|
||||
state_res_store: StateResolutionStore,
|
||||
conflicted_state: Optional[Set[str]],
|
||||
) -> Set[str]:
|
||||
"""Compare the auth chains of each state set and return the set of events
|
||||
that only appear in some, but not all of the auth chains.
|
||||
@@ -326,18 +294,11 @@ async def _get_auth_chain_difference(
|
||||
state_sets: The input state sets we are trying to resolve across.
|
||||
unpersisted_events: A map from event ID to EventBase containing all unpersisted
|
||||
events involved in this resolution.
|
||||
state_res_store: A way to retrieve events and extract graph information on the auth chains.
|
||||
conflicted_state: which event IDs are conflicted. Used in v2.1 for calculating the conflicted
|
||||
subgraph.
|
||||
state_res_store:
|
||||
|
||||
Returns:
|
||||
The auth difference of the given state sets, as a set of event IDs. Also includes the
|
||||
conflicted subgraph if `conflicted_state` is set.
|
||||
The auth difference of the given state sets, as a set of event IDs.
|
||||
"""
|
||||
is_state_res_v21 = conflicted_state is not None
|
||||
num_conflicted_state = (
|
||||
len(conflicted_state) if conflicted_state is not None else None
|
||||
)
|
||||
|
||||
# The `StateResolutionStore.get_auth_chain_difference` function assumes that
|
||||
# all events passed to it (and their auth chains) have been persisted
|
||||
@@ -357,19 +318,14 @@ async def _get_auth_chain_difference(
|
||||
# the event's auth chain with the events in `unpersisted_events` *plus* their
|
||||
# auth event IDs.
|
||||
events_to_auth_chain: Dict[str, Set[str]] = {}
|
||||
# remember the forward links when doing the graph traversal, we'll need it for v2.1 checks
|
||||
# This is a map from an event to the set of events that contain it as an auth event.
|
||||
event_to_next_event: Dict[str, Set[str]] = {}
|
||||
for event in unpersisted_events.values():
|
||||
chain = {event.event_id}
|
||||
events_to_auth_chain[event.event_id] = chain
|
||||
|
||||
to_search = [event]
|
||||
while to_search:
|
||||
next_event = to_search.pop()
|
||||
for auth_id in next_event.auth_event_ids():
|
||||
for auth_id in to_search.pop().auth_event_ids():
|
||||
chain.add(auth_id)
|
||||
event_to_next_event.setdefault(auth_id, set()).add(next_event.event_id)
|
||||
auth_event = unpersisted_events.get(auth_id)
|
||||
if auth_event:
|
||||
to_search.append(auth_event)
|
||||
@@ -379,8 +335,6 @@ async def _get_auth_chain_difference(
|
||||
#
|
||||
# Note: If there are no `unpersisted_events` (which is the common case), we can do a
|
||||
# much simpler calculation.
|
||||
additional_backwards_reachable_conflicted_events: Set[str] = set()
|
||||
unpersisted_conflicted_events: Set[str] = set()
|
||||
if unpersisted_events:
|
||||
# The list of state sets to pass to the store, where each state set is a set
|
||||
# of the event ids making up the state. This is similar to `state_sets`,
|
||||
@@ -418,16 +372,7 @@ async def _get_auth_chain_difference(
|
||||
)
|
||||
else:
|
||||
set_ids.add(event_id)
|
||||
if conflicted_state:
|
||||
for conflicted_event_id in conflicted_state:
|
||||
# presence in this map means it is unpersisted.
|
||||
event_chain = events_to_auth_chain.get(conflicted_event_id)
|
||||
if event_chain is not None:
|
||||
unpersisted_conflicted_events.add(conflicted_event_id)
|
||||
# tell the DB layer that we have some unpersisted conflicted events
|
||||
additional_backwards_reachable_conflicted_events.update(
|
||||
e for e in event_chain if e not in unpersisted_events
|
||||
)
|
||||
|
||||
# The auth chain difference of the unpersisted events of the state sets
|
||||
# is calculated by taking the difference between the union and
|
||||
# intersections.
|
||||
@@ -439,89 +384,12 @@ async def _get_auth_chain_difference(
|
||||
auth_difference_unpersisted_part = ()
|
||||
state_sets_ids = [set(state_set.values()) for state_set in state_sets]
|
||||
|
||||
if conflicted_state:
|
||||
# to ensure that conflicted state is a subset of state set IDs, we need to remove UNPERSISTED
|
||||
# conflicted state set ids as we removed them above.
|
||||
conflicted_state = conflicted_state - unpersisted_conflicted_events
|
||||
|
||||
difference = await state_res_store.get_auth_chain_difference(
|
||||
room_id,
|
||||
state_sets_ids,
|
||||
conflicted_state,
|
||||
additional_backwards_reachable_conflicted_events,
|
||||
room_id, state_sets_ids
|
||||
)
|
||||
difference.auth_difference.update(auth_difference_unpersisted_part)
|
||||
difference.update(auth_difference_unpersisted_part)
|
||||
|
||||
# if we're doing v2.1 we may need to add or expand the conflicted subgraph
|
||||
if (
|
||||
is_state_res_v21
|
||||
and difference.conflicted_subgraph is not None
|
||||
and unpersisted_events
|
||||
):
|
||||
# we always include the conflicted events themselves in the subgraph.
|
||||
if conflicted_state:
|
||||
difference.conflicted_subgraph.update(conflicted_state)
|
||||
# we may need to expand the subgraph in the case where the subgraph starts in the DB and
|
||||
# ends in unpersisted events. To do this, we first need to see where the subgraph got up to,
|
||||
# which we can do by finding the intersection between the additional backwards reachable
|
||||
# conflicted events and the conflicted subgraph. Events in both sets mean A) some unpersisted
|
||||
# conflicted event could backwards reach it and B) some persisted conflicted event could forward
|
||||
# reach it.
|
||||
subgraph_frontier = difference.conflicted_subgraph.intersection(
|
||||
additional_backwards_reachable_conflicted_events
|
||||
)
|
||||
# we can now combine the 2 scenarios:
|
||||
# - subgraph starts in DB and ends in unpersisted
|
||||
# - subgraph starts in unpersisted and ends in unpersisted
|
||||
# by expanding the frontier into unpersisted events.
|
||||
# The frontier is currently all persisted events. We want to expand this into unpersisted
|
||||
# events. Mark every forwards reachable event from the frontier in the forwards_conflicted_set
|
||||
# but NOT the backwards conflicted set. This mirrors what the DB layer does but in reverse:
|
||||
# we supplied events which are backwards reachable to the DB and now the DB is providing
|
||||
# forwards reachable events from the DB.
|
||||
forwards_conflicted_set: Set[str] = set()
|
||||
# we include unpersisted conflicted events here to process exclusive unpersisted subgraphs
|
||||
search_queue = subgraph_frontier.union(unpersisted_conflicted_events)
|
||||
while search_queue:
|
||||
frontier_event = search_queue.pop()
|
||||
next_event_ids = event_to_next_event.get(frontier_event, set())
|
||||
search_queue.update(next_event_ids)
|
||||
forwards_conflicted_set.add(frontier_event)
|
||||
|
||||
# we've already calculated the backwards form as this is the auth chain for each
|
||||
# unpersisted conflicted event.
|
||||
backwards_conflicted_set: Set[str] = set()
|
||||
for uce in unpersisted_conflicted_events:
|
||||
backwards_conflicted_set.update(events_to_auth_chain.get(uce, []))
|
||||
|
||||
# the unpersisted conflicted subgraph is the intersection of the backwards/forwards sets
|
||||
conflicted_subgraph_unpersisted_part = backwards_conflicted_set.intersection(
|
||||
forwards_conflicted_set
|
||||
)
|
||||
# print(f"event_to_next_event={event_to_next_event}")
|
||||
# print(f"unpersisted_conflicted_events={unpersisted_conflicted_events}")
|
||||
# print(f"unperssited backwards_conflicted_set={backwards_conflicted_set}")
|
||||
# print(f"unperssited forwards_conflicted_set={forwards_conflicted_set}")
|
||||
difference.conflicted_subgraph.update(conflicted_subgraph_unpersisted_part)
|
||||
|
||||
if difference.conflicted_subgraph:
|
||||
old_events = difference.auth_difference.union(
|
||||
conflicted_state if conflicted_state else set()
|
||||
)
|
||||
additional_events = difference.conflicted_subgraph.difference(old_events)
|
||||
|
||||
logger.debug(
|
||||
"v2.1 %s additional events replayed=%d num_conflicts=%d conflicted_subgraph=%d auth_difference=%d",
|
||||
room_id,
|
||||
len(additional_events),
|
||||
num_conflicted_state,
|
||||
len(difference.conflicted_subgraph),
|
||||
len(difference.auth_difference),
|
||||
)
|
||||
# State res v2.1 includes the conflicted subgraph in the difference
|
||||
return difference.auth_difference.union(difference.conflicted_subgraph)
|
||||
|
||||
return difference.auth_difference
|
||||
return difference
|
||||
|
||||
|
||||
def _seperate(
|
||||
|
||||
@@ -114,12 +114,6 @@ _LONGEST_BACKOFF_PERIOD_MILLISECONDS = (
|
||||
assert 0 < _LONGEST_BACKOFF_PERIOD_MILLISECONDS <= ((2**31) - 1)
|
||||
|
||||
|
||||
# We use 2^53-1 as a "very large number", it has no particular
|
||||
# importance other than knowing synapse can support it (given canonical json
|
||||
# requires it).
|
||||
MAX_CHAIN_LENGTH = (2**53) - 1
|
||||
|
||||
|
||||
# All the info we need while iterating the DAG while backfilling
|
||||
@attr.s(frozen=True, slots=True, auto_attribs=True)
|
||||
class BackfillQueueNavigationItem:
|
||||
@@ -129,14 +123,6 @@ class BackfillQueueNavigationItem:
|
||||
type: str
|
||||
|
||||
|
||||
@attr.s(frozen=True, slots=True, auto_attribs=True)
|
||||
class StateDifference:
|
||||
# The event IDs in the auth difference.
|
||||
auth_difference: Set[str]
|
||||
# The event IDs in the conflicted state subgraph. Used in v2.1 only.
|
||||
conflicted_subgraph: Optional[Set[str]]
|
||||
|
||||
|
||||
class _NoChainCoverIndex(Exception):
|
||||
def __init__(self, room_id: str):
|
||||
super().__init__("Unexpectedly no chain cover for events in %s" % (room_id,))
|
||||
@@ -485,41 +471,17 @@ class EventFederationWorkerStore(
|
||||
return results
|
||||
|
||||
async def get_auth_chain_difference(
|
||||
self,
|
||||
room_id: str,
|
||||
state_sets: List[Set[str]],
|
||||
self, room_id: str, state_sets: List[Set[str]]
|
||||
) -> Set[str]:
|
||||
state_diff = await self.get_auth_chain_difference_extended(
|
||||
room_id, state_sets, None, None
|
||||
)
|
||||
return state_diff.auth_difference
|
||||
|
||||
async def get_auth_chain_difference_extended(
|
||||
self,
|
||||
room_id: str,
|
||||
state_sets: List[Set[str]],
|
||||
conflicted_set: Optional[Set[str]],
|
||||
additional_backwards_reachable_conflicted_events: Optional[Set[str]],
|
||||
) -> StateDifference:
|
||||
""" "Given sets of state events figure out the auth chain difference (as
|
||||
"""Given sets of state events figure out the auth chain difference (as
|
||||
per state res v2 algorithm).
|
||||
|
||||
This is equivalent to fetching the full auth chain for each set of state
|
||||
This equivalent to fetching the full auth chain for each set of state
|
||||
and returning the events that don't appear in each and every auth
|
||||
chain.
|
||||
|
||||
If conflicted_set is not None, calculate and return the conflicted sub-graph as per
|
||||
state res v2.1. The event IDs in the conflicted set MUST be a subset of the event IDs in
|
||||
state_sets.
|
||||
|
||||
If additional_backwards_reachable_conflicted_events is set, the provided events are included
|
||||
when calculating the conflicted subgraph. This is primarily useful for calculating the
|
||||
subgraph across a combination of persisted and unpersisted events. The event IDs in this set
|
||||
MUST be a subset of the event IDs in state_sets.
|
||||
|
||||
Returns:
|
||||
information on the auth chain difference, and also the conflicted subgraph if
|
||||
conflicted_set is not None
|
||||
The set of the difference in auth chains.
|
||||
"""
|
||||
|
||||
# Check if we have indexed the room so we can use the chain cover
|
||||
@@ -533,8 +495,6 @@ class EventFederationWorkerStore(
|
||||
self._get_auth_chain_difference_using_cover_index_txn,
|
||||
room_id,
|
||||
state_sets,
|
||||
conflicted_set,
|
||||
additional_backwards_reachable_conflicted_events,
|
||||
)
|
||||
except _NoChainCoverIndex:
|
||||
# For whatever reason we don't actually have a chain cover index
|
||||
@@ -543,48 +503,25 @@ class EventFederationWorkerStore(
|
||||
if not self.tests_allow_no_chain_cover_index:
|
||||
raise
|
||||
|
||||
# It's been 4 years since we added chain cover, so we expect all rooms to have it.
|
||||
# If they don't, we will error out when trying to do state res v2.1
|
||||
if conflicted_set is not None:
|
||||
raise _NoChainCoverIndex(room_id)
|
||||
|
||||
auth_diff = await self.db_pool.runInteraction(
|
||||
return await self.db_pool.runInteraction(
|
||||
"get_auth_chain_difference",
|
||||
self._get_auth_chain_difference_txn,
|
||||
state_sets,
|
||||
)
|
||||
return StateDifference(auth_difference=auth_diff, conflicted_subgraph=None)
|
||||
|
||||
def _get_auth_chain_difference_using_cover_index_txn(
|
||||
self,
|
||||
txn: LoggingTransaction,
|
||||
room_id: str,
|
||||
state_sets: List[Set[str]],
|
||||
conflicted_set: Optional[Set[str]] = None,
|
||||
additional_backwards_reachable_conflicted_events: Optional[Set[str]] = None,
|
||||
) -> StateDifference:
|
||||
self, txn: LoggingTransaction, room_id: str, state_sets: List[Set[str]]
|
||||
) -> Set[str]:
|
||||
"""Calculates the auth chain difference using the chain index.
|
||||
|
||||
See docs/auth_chain_difference_algorithm.md for details
|
||||
"""
|
||||
is_state_res_v21 = conflicted_set is not None
|
||||
|
||||
# First we look up the chain ID/sequence numbers for all the events, and
|
||||
# work out the chain/sequence numbers reachable from each state set.
|
||||
|
||||
initial_events = set(state_sets[0]).union(*state_sets[1:])
|
||||
|
||||
if is_state_res_v21:
|
||||
# Sanity check v2.1 fields
|
||||
assert conflicted_set is not None
|
||||
assert conflicted_set.issubset(initial_events)
|
||||
# It's possible for the conflicted_set to be empty if all the conflicts are in
|
||||
# unpersisted events, so we don't assert that conflicted_set has len > 0
|
||||
if additional_backwards_reachable_conflicted_events:
|
||||
assert additional_backwards_reachable_conflicted_events.issubset(
|
||||
initial_events
|
||||
)
|
||||
|
||||
# Map from event_id -> (chain ID, seq no)
|
||||
chain_info: Dict[str, Tuple[int, int]] = {}
|
||||
|
||||
@@ -620,14 +557,14 @@ class EventFederationWorkerStore(
|
||||
events_missing_chain_info = initial_events.difference(chain_info)
|
||||
|
||||
# The result set to return, i.e. the auth chain difference.
|
||||
auth_difference_result: Set[str] = set()
|
||||
result: Set[str] = set()
|
||||
|
||||
if events_missing_chain_info:
|
||||
# For some reason we have events we haven't calculated the chain
|
||||
# index for, so we need to handle those separately. This should only
|
||||
# happen for older rooms where the server doesn't have all the auth
|
||||
# events.
|
||||
auth_difference_result = self._fixup_auth_chain_difference_sets(
|
||||
result = self._fixup_auth_chain_difference_sets(
|
||||
txn,
|
||||
room_id,
|
||||
state_sets=state_sets,
|
||||
@@ -646,45 +583,6 @@ class EventFederationWorkerStore(
|
||||
|
||||
fetch_chain_info(new_events_to_fetch)
|
||||
|
||||
# State Res v2.1 needs extra data structures to calculate the conflicted subgraph which
|
||||
# are outlined below.
|
||||
|
||||
# A subset of chain_info for conflicted events only, as we need to
|
||||
# loop all conflicted chain positions. Map from event_id -> (chain ID, seq no)
|
||||
conflicted_chain_positions: Dict[str, Tuple[int, int]] = {}
|
||||
# For each chain, remember the positions where conflicted events are.
|
||||
# We need this for calculating the forward reachable events.
|
||||
conflicted_chain_to_seq: Dict[int, Set[int]] = {} # chain_id => {seq_num}
|
||||
# A subset of chain_info for additional backwards reachable events only, as we need to
|
||||
# loop all additional backwards reachable events for calculating backwards reachable events.
|
||||
additional_backwards_reachable_positions: Dict[
|
||||
str, Tuple[int, int]
|
||||
] = {} # event_id => (chain_id, seq_num)
|
||||
# These next two fields are critical as the intersection of them is the conflicted subgraph.
|
||||
# We'll populate them when we walk the chain links.
|
||||
# chain_id => max(seq_num) backwards reachable (e.g 4 means 1,2,3,4 are backwards reachable)
|
||||
conflicted_backwards_reachable: Dict[int, int] = {}
|
||||
# chain_id => min(seq_num) forwards reachable (e.g 4 means 4,5,6..n are forwards reachable)
|
||||
conflicted_forwards_reachable: Dict[int, int] = {}
|
||||
|
||||
# populate the v2.1 data structures
|
||||
if is_state_res_v21:
|
||||
assert conflicted_set is not None
|
||||
# provide chain positions for each conflicted event
|
||||
for conflicted_event_id in conflicted_set:
|
||||
(chain_id, seq_num) = chain_info[conflicted_event_id]
|
||||
conflicted_chain_positions[conflicted_event_id] = (chain_id, seq_num)
|
||||
conflicted_chain_to_seq.setdefault(chain_id, set()).add(seq_num)
|
||||
if additional_backwards_reachable_conflicted_events:
|
||||
for (
|
||||
additional_event_id
|
||||
) in additional_backwards_reachable_conflicted_events:
|
||||
(chain_id, seq_num) = chain_info[additional_event_id]
|
||||
additional_backwards_reachable_positions[additional_event_id] = (
|
||||
chain_id,
|
||||
seq_num,
|
||||
)
|
||||
|
||||
# Corresponds to `state_sets`, except as a map from chain ID to max
|
||||
# sequence number reachable from the state set.
|
||||
set_to_chain: List[Dict[int, int]] = []
|
||||
@@ -702,8 +600,6 @@ class EventFederationWorkerStore(
|
||||
|
||||
# (We need to take a copy of `seen_chains` as the function mutates it)
|
||||
for links in self._get_chain_links(txn, set(seen_chains)):
|
||||
# `links` encodes the backwards reachable events _from a single chain_ all the way to
|
||||
# the root of the graph.
|
||||
for chains in set_to_chain:
|
||||
for chain_id in links:
|
||||
if chain_id not in chains:
|
||||
@@ -712,87 +608,6 @@ class EventFederationWorkerStore(
|
||||
_materialize(chain_id, chains[chain_id], links, chains)
|
||||
|
||||
seen_chains.update(chains)
|
||||
if is_state_res_v21:
|
||||
# Apply v2.1 conflicted event reachability checks.
|
||||
#
|
||||
# A <-- B <-- C <-- D <-- E
|
||||
#
|
||||
# Backwards reachable from C = {A,B}
|
||||
# Forwards reachable from C = {D,E}
|
||||
|
||||
# this handles calculating forwards reachable information and updates
|
||||
# conflicted_forwards_reachable.
|
||||
accumulate_forwards_reachable_events(
|
||||
conflicted_forwards_reachable,
|
||||
links,
|
||||
conflicted_chain_positions,
|
||||
)
|
||||
|
||||
# handle backwards reachable information
|
||||
for (
|
||||
conflicted_chain_id,
|
||||
conflicted_chain_seq,
|
||||
) in conflicted_chain_positions.values():
|
||||
if conflicted_chain_id not in links:
|
||||
# This conflicted event does not lie on the path to the root.
|
||||
continue
|
||||
|
||||
# The conflicted chain position itself encodes reachability information
|
||||
# _within_ the chain. Set it now before walking to other links.
|
||||
conflicted_backwards_reachable[conflicted_chain_id] = max(
|
||||
conflicted_chain_seq,
|
||||
conflicted_backwards_reachable.get(conflicted_chain_id, 0),
|
||||
)
|
||||
|
||||
# Build backwards reachability paths. This is the same as what the auth difference
|
||||
# code does. We find which chain the conflicted event
|
||||
# belongs to then walk it backwards to the root. We store reachability info
|
||||
# for all conflicted events in the same map 'conflicted_backwards_reachable'
|
||||
# as we don't care about the paths themselves.
|
||||
_materialize(
|
||||
conflicted_chain_id,
|
||||
conflicted_chain_seq,
|
||||
links,
|
||||
conflicted_backwards_reachable,
|
||||
)
|
||||
# Mark some extra events as backwards reachable. This is used when we have some
|
||||
# unpersisted events and want to know the subgraph across the persisted/unpersisted
|
||||
# boundary:
|
||||
# |
|
||||
# A <-- B <-- C <-|- D <-- E <-- F
|
||||
# persisted | unpersisted
|
||||
#
|
||||
# Assume {B,E} are conflicted, we want to return {B,C,D,E}
|
||||
#
|
||||
# The unpersisted code ensures it passes C as an additional backwards reachable
|
||||
# event. C is NOT a conflicted event, but we do need to consider it as part of
|
||||
# the backwards reachable set. When we then calculate the forwards reachable set
|
||||
# from B, C will be in both the backwards and forwards reachable sets and hence
|
||||
# will be included in the conflicted subgraph.
|
||||
for (
|
||||
additional_chain_id,
|
||||
additional_chain_seq,
|
||||
) in additional_backwards_reachable_positions.values():
|
||||
if additional_chain_id not in links:
|
||||
# The additional backwards reachable event does not lie on the path to the root.
|
||||
continue
|
||||
|
||||
# the additional event chain position itself encodes reachability information.
|
||||
# It means that position and all positions earlier in that chain are backwards reachable
|
||||
# by some unpersisted conflicted event.
|
||||
conflicted_backwards_reachable[additional_chain_id] = max(
|
||||
additional_chain_seq,
|
||||
conflicted_backwards_reachable.get(additional_chain_id, 0),
|
||||
)
|
||||
|
||||
# Now walk the chains back, marking backwards reachable events.
|
||||
# This is the same thing we do for auth difference / conflicted events.
|
||||
_materialize(
|
||||
additional_chain_id, # walk all links back, marking them as backwards reachable
|
||||
additional_chain_seq,
|
||||
links,
|
||||
conflicted_backwards_reachable,
|
||||
)
|
||||
|
||||
# Now for each chain we figure out the maximum sequence number reachable
|
||||
# from *any* state set and the minimum sequence number reachable from
|
||||
@@ -801,7 +616,7 @@ class EventFederationWorkerStore(
|
||||
|
||||
# Mapping from chain ID to the range of sequence numbers that should be
|
||||
# pulled from the database.
|
||||
auth_diff_chain_to_gap: Dict[int, Tuple[int, int]] = {}
|
||||
chain_to_gap: Dict[int, Tuple[int, int]] = {}
|
||||
|
||||
for chain_id in seen_chains:
|
||||
min_seq_no = min(chains.get(chain_id, 0) for chains in set_to_chain)
|
||||
@@ -814,76 +629,15 @@ class EventFederationWorkerStore(
|
||||
for seq_no in range(min_seq_no + 1, max_seq_no + 1):
|
||||
event_id = chain_to_event.get(chain_id, {}).get(seq_no)
|
||||
if event_id:
|
||||
auth_difference_result.add(event_id)
|
||||
result.add(event_id)
|
||||
else:
|
||||
auth_diff_chain_to_gap[chain_id] = (min_seq_no, max_seq_no)
|
||||
chain_to_gap[chain_id] = (min_seq_no, max_seq_no)
|
||||
break
|
||||
|
||||
conflicted_subgraph_result: Set[str] = set()
|
||||
# Mapping from chain ID to the range of sequence numbers that should be
|
||||
# pulled from the database.
|
||||
conflicted_subgraph_chain_to_gap: Dict[int, Tuple[int, int]] = {}
|
||||
if is_state_res_v21:
|
||||
# also include the conflicted subgraph using backward/forward reachability info from all
|
||||
# the conflicted events. To calculate this, we want to extract the intersection between
|
||||
# the backwards and forwards reachability sets, e.g:
|
||||
# A <- B <- C <- D <- E
|
||||
# Assume B and D are conflicted so we want {C} as the conflicted subgraph.
|
||||
# B_backwards={A}, B_forwards={C,D,E}
|
||||
# D_backwards={A,B,C} D_forwards={E}
|
||||
# ALL_backwards={A,B,C} ALL_forwards={C,D,E}
|
||||
# Intersection(ALL_backwards, ALL_forwards) = {C}
|
||||
#
|
||||
# It's worth noting that once we have the ALL_ sets, we no longer care about the paths.
|
||||
# We're dealing with chains and not singular events, but we've already got the ALL_ sets.
|
||||
# As such, we can inspect each chain in isolation and check for overlapping sequence
|
||||
# numbers:
|
||||
# 1,2,3,4,5 Seq Num
|
||||
# Chain N [A,B,C,D,E]
|
||||
#
|
||||
# if (N,4) is in the backwards set and (N,2) is in the forwards set, then the
|
||||
# intersection is events between 2 < 4. We will include the conflicted events themselves
|
||||
# in the subgraph, but they will already be, hence the full set of events is {B,C,D}.
|
||||
for chain_id, backwards_seq_num in conflicted_backwards_reachable.items():
|
||||
forwards_seq_num = conflicted_forwards_reachable.get(chain_id)
|
||||
if forwards_seq_num is None:
|
||||
continue # this chain isn't in both sets so can't intersect
|
||||
if forwards_seq_num > backwards_seq_num:
|
||||
continue # this chain is in both sets but they don't overap
|
||||
for seq_no in range(
|
||||
forwards_seq_num, backwards_seq_num + 1
|
||||
): # inclusive of both
|
||||
event_id = chain_to_event.get(chain_id, {}).get(seq_no)
|
||||
if event_id:
|
||||
conflicted_subgraph_result.add(event_id)
|
||||
else:
|
||||
conflicted_subgraph_chain_to_gap[chain_id] = (
|
||||
# _fetch_event_ids_from_chains_txn is exclusive of the min value
|
||||
forwards_seq_num - 1,
|
||||
backwards_seq_num,
|
||||
)
|
||||
break
|
||||
if not chain_to_gap:
|
||||
# If there are no gaps to fetch, we're done!
|
||||
return result
|
||||
|
||||
if auth_diff_chain_to_gap:
|
||||
auth_difference_result.update(
|
||||
self._fetch_event_ids_from_chains_txn(txn, auth_diff_chain_to_gap)
|
||||
)
|
||||
if conflicted_subgraph_chain_to_gap:
|
||||
conflicted_subgraph_result.update(
|
||||
self._fetch_event_ids_from_chains_txn(
|
||||
txn, conflicted_subgraph_chain_to_gap
|
||||
)
|
||||
)
|
||||
|
||||
return StateDifference(
|
||||
auth_difference=auth_difference_result,
|
||||
conflicted_subgraph=conflicted_subgraph_result,
|
||||
)
|
||||
|
||||
def _fetch_event_ids_from_chains_txn(
|
||||
self, txn: LoggingTransaction, chains: Dict[int, Tuple[int, int]]
|
||||
) -> Set[str]:
|
||||
result: Set[str] = set()
|
||||
if isinstance(self.database_engine, PostgresEngine):
|
||||
# We can use `execute_values` to efficiently fetch the gaps when
|
||||
# using postgres.
|
||||
@@ -897,7 +651,7 @@ class EventFederationWorkerStore(
|
||||
|
||||
args = [
|
||||
(chain_id, min_no, max_no)
|
||||
for chain_id, (min_no, max_no) in chains.items()
|
||||
for chain_id, (min_no, max_no) in chain_to_gap.items()
|
||||
]
|
||||
|
||||
rows = txn.execute_values(sql, args)
|
||||
@@ -908,9 +662,10 @@ class EventFederationWorkerStore(
|
||||
SELECT event_id FROM event_auth_chains
|
||||
WHERE chain_id = ? AND ? < sequence_number AND sequence_number <= ?
|
||||
"""
|
||||
for chain_id, (min_no, max_no) in chains.items():
|
||||
for chain_id, (min_no, max_no) in chain_to_gap.items():
|
||||
txn.execute(sql, (chain_id, min_no, max_no))
|
||||
result.update(r for (r,) in txn)
|
||||
|
||||
return result
|
||||
|
||||
def _fixup_auth_chain_difference_sets(
|
||||
@@ -2410,7 +2165,6 @@ def _materialize(
|
||||
origin_sequence_number: int,
|
||||
links: Dict[int, List[Tuple[int, int, int]]],
|
||||
materialized: Dict[int, int],
|
||||
backwards: bool = True,
|
||||
) -> None:
|
||||
"""Helper function for fetching auth chain links. For a given origin chain
|
||||
ID / sequence number and a dictionary of links, updates the materialized
|
||||
@@ -2427,7 +2181,6 @@ def _materialize(
|
||||
target sequence number.
|
||||
materialized: dict to update with new reachability information, as a
|
||||
map from chain ID to max sequence number reachable.
|
||||
backwards: If True, walks backwards down the chains. If False, walks forwards from the chains.
|
||||
"""
|
||||
|
||||
# Do a standard graph traversal.
|
||||
@@ -2442,104 +2195,12 @@ def _materialize(
|
||||
target_chain_id,
|
||||
target_sequence_number,
|
||||
) in chain_links:
|
||||
if backwards:
|
||||
# Ignore any links that are higher up the chain
|
||||
if sequence_number > s:
|
||||
continue
|
||||
# Ignore any links that are higher up the chain
|
||||
if sequence_number > s:
|
||||
continue
|
||||
|
||||
# Check if we have already visited the target chain before, if so we
|
||||
# can skip it.
|
||||
if materialized.get(target_chain_id, 0) < target_sequence_number:
|
||||
stack.append((target_chain_id, target_sequence_number))
|
||||
materialized[target_chain_id] = target_sequence_number
|
||||
else:
|
||||
# Ignore any links that are lower down the chain.
|
||||
if sequence_number < s:
|
||||
continue
|
||||
# Check if we have already visited the target chain before, if so we
|
||||
# can skip it.
|
||||
if (
|
||||
materialized.get(target_chain_id, MAX_CHAIN_LENGTH)
|
||||
> target_sequence_number
|
||||
):
|
||||
stack.append((target_chain_id, target_sequence_number))
|
||||
materialized[target_chain_id] = target_sequence_number
|
||||
|
||||
|
||||
def _generate_forward_links(
|
||||
links: Dict[int, List[Tuple[int, int, int]]],
|
||||
) -> Dict[int, List[Tuple[int, int, int]]]:
|
||||
"""Reverse the input links from the given backwards links"""
|
||||
new_links: Dict[int, List[Tuple[int, int, int]]] = {}
|
||||
for origin_chain_id, chain_links in links.items():
|
||||
for origin_seq_num, target_chain_id, target_seq_num in chain_links:
|
||||
new_links.setdefault(target_chain_id, []).append(
|
||||
(target_seq_num, origin_chain_id, origin_seq_num)
|
||||
)
|
||||
return new_links
|
||||
|
||||
|
||||
def accumulate_forwards_reachable_events(
|
||||
conflicted_forwards_reachable: Dict[int, int],
|
||||
back_links: Dict[int, List[Tuple[int, int, int]]],
|
||||
conflicted_chain_positions: Dict[str, Tuple[int, int]],
|
||||
) -> None:
|
||||
"""Accumulate new forwards reachable events using the back_links provided.
|
||||
|
||||
Accumulating forwards reachable information is quite different from backwards reachable information
|
||||
because _get_chain_links returns the entire linkage information for backwards reachable events,
|
||||
but not _forwards_ reachable events. We are only interested in the forwards reachable information
|
||||
that is encoded in the backwards reachable links, so we can just invert all the operations we do
|
||||
for backwards reachable events to calculate a subset of forwards reachable information. The
|
||||
caveat with this approach is that it is a _subset_. This means new back_links may encode new
|
||||
forwards reachable information which we also need. Consider this scenario:
|
||||
|
||||
A <-- B <-- C <--- D <-- E <-- F Chain 1
|
||||
|
|
||||
`----- G <-- H <-- I Chain 2
|
||||
|
|
||||
`---- J <-- K Chain 3
|
||||
|
||||
Now consider what happens when B is a conflicted event. _get_chain_links returns the conflicted
|
||||
chain and ALL links heading towards the root of the graph. This means we will know the
|
||||
Chain 1 to Chain 2 link via C (as all links for the chain are returned, not strictly ones with
|
||||
a lower sequence number), but we will NOT know the Chain 2 to Chain 3 link via H. We can be
|
||||
blissfully unaware of Chain 3 entirely, if and only if there isn't some other conflicted event
|
||||
on that chain. Consider what happens when K is /also/ conflicted. _get_chain_links will generate
|
||||
two iterations: one for B and one for K. It's important that we re-evaluate the forwards reachable
|
||||
information for B to include Chain 3 when we process the K iteration, hence we are "accumulating"
|
||||
forwards reachability information.
|
||||
|
||||
NB: We don't consider 'additional backwards reachable events' here because they have no effect
|
||||
on forwards reachability calculations, only backwards.
|
||||
|
||||
Args:
|
||||
conflicted_forwards_reachable: The materialised dict of forwards reachable information.
|
||||
The output to this function are stored here.
|
||||
back_links: One iteration of _get_chain_links which encodes backwards reachable information.
|
||||
conflicted_chain_positions: The conflicted events.
|
||||
"""
|
||||
# links go backwards but we want them to go forwards as well for v2.1
|
||||
fwd_links = _generate_forward_links(back_links)
|
||||
|
||||
# for each conflicted event, accumulate forwards reachability information
|
||||
for (
|
||||
conflicted_chain_id,
|
||||
conflicted_chain_seq,
|
||||
) in conflicted_chain_positions.values():
|
||||
# the conflicted event itself encodes reachability information
|
||||
# e.g if D was conflicted, it encodes E,F as forwards reachable.
|
||||
conflicted_forwards_reachable[conflicted_chain_id] = min(
|
||||
conflicted_chain_seq,
|
||||
conflicted_forwards_reachable.get(conflicted_chain_id, MAX_CHAIN_LENGTH),
|
||||
)
|
||||
# Walk from the conflicted event forwards to explore the links.
|
||||
# This function checks if we've visited the chain before and skips reprocessing, so this
|
||||
# does not repeatedly traverse the graph.
|
||||
_materialize(
|
||||
conflicted_chain_id,
|
||||
conflicted_chain_seq,
|
||||
fwd_links,
|
||||
conflicted_forwards_reachable,
|
||||
backwards=False,
|
||||
)
|
||||
# Check if we have already visited the target chain before, if so we
|
||||
# can skip it.
|
||||
if materialized.get(target_chain_id, 0) < target_sequence_number:
|
||||
stack.append((target_chain_id, target_sequence_number))
|
||||
materialized[target_chain_id] = target_sequence_number
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user