Compare commits
1 Commits
release-v1
...
squah/tria
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
df61544455 |
@@ -29,12 +29,11 @@ IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
|
||||
|
||||
# First calculate the various trial jobs.
|
||||
#
|
||||
# For PRs, we only run each type of test with the oldest Python version supported (which
|
||||
# is Python 3.8 right now)
|
||||
# For each type of test we only run on Py3.7 on PRs
|
||||
|
||||
trial_sqlite_tests = [
|
||||
{
|
||||
"python-version": "3.8",
|
||||
"python-version": "3.7",
|
||||
"database": "sqlite",
|
||||
"extras": "all",
|
||||
}
|
||||
@@ -47,13 +46,13 @@ if not IS_PR:
|
||||
"database": "sqlite",
|
||||
"extras": "all",
|
||||
}
|
||||
for version in ("3.9", "3.10", "3.11")
|
||||
for version in ("3.8", "3.9", "3.10", "3.11")
|
||||
)
|
||||
|
||||
|
||||
trial_postgres_tests = [
|
||||
{
|
||||
"python-version": "3.8",
|
||||
"python-version": "3.7",
|
||||
"database": "postgres",
|
||||
"postgres-version": "11",
|
||||
"extras": "all",
|
||||
@@ -72,7 +71,7 @@ if not IS_PR:
|
||||
|
||||
trial_no_extra_tests = [
|
||||
{
|
||||
"python-version": "3.8",
|
||||
"python-version": "3.7",
|
||||
"database": "sqlite",
|
||||
"extras": "",
|
||||
}
|
||||
@@ -134,6 +133,11 @@ if not IS_PR:
|
||||
"sytest-tag": "testing",
|
||||
"postgres": "postgres",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "buster",
|
||||
"postgres": "multi-postgres",
|
||||
"workers": "workers",
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
49
.github/workflows/dependabot_changelog.yml
vendored
Normal file
49
.github/workflows/dependabot_changelog.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
name: Write changelog for dependabot PR
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- reopened # For debugging!
|
||||
|
||||
permissions:
|
||||
# Needed to be able to push the commit. See
|
||||
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#enable-auto-merge-on-a-pull-request
|
||||
# for a similar example
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
add-changelog:
|
||||
runs-on: 'ubuntu-latest'
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
- name: Write, commit and push changelog
|
||||
env:
|
||||
PR_TITLE: ${{ github.event.pull_request.title }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
run: |
|
||||
echo "${PR_TITLE}." > "changelog.d/${PR_NUMBER}".misc
|
||||
git add changelog.d
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git config user.name "GitHub Actions"
|
||||
git commit -m "Changelog"
|
||||
git push
|
||||
shell: bash
|
||||
# The `git push` above does not trigger CI on the dependabot PR.
|
||||
#
|
||||
# By default, workflows can't trigger other workflows when they're just using the
|
||||
# default `GITHUB_TOKEN` access token. (This is intended to stop you from writing
|
||||
# recursive workflow loops by accident, because that'll get very expensive very
|
||||
# quickly.) Instead, you have to manually call out to another workflow, or else
|
||||
# make your changes (i.e. the `git push` above) using a personal access token.
|
||||
# See
|
||||
# https://docs.github.com/en/actions/using-workflows/triggering-a-workflow#triggering-a-workflow-from-a-workflow
|
||||
#
|
||||
# I have tried and failed to find a way to trigger CI on the "merge ref" of the PR.
|
||||
# See git commit history for previous attempts. If anyone desperately wants to try
|
||||
# again in the future, make a matrix-bot account and use its access token to git push.
|
||||
|
||||
# THIS WORKFLOW HAS WRITE PERMISSIONS---do not add other jobs here unless they
|
||||
# are sufficiently locked down to dependabot only as above.
|
||||
23
.github/workflows/latest_deps.yml
vendored
23
.github/workflows/latest_deps.yml
vendored
@@ -22,21 +22,7 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check_repo:
|
||||
# Prevent this workflow from running on any fork of Synapse other than matrix-org/synapse, as it is
|
||||
# only useful to the Synapse core team.
|
||||
# All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest
|
||||
# of the workflow will be skipped as well.
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }}
|
||||
steps:
|
||||
- id: check_condition
|
||||
run: echo "should_run_workflow=${{ github.repository == 'matrix-org/synapse' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
mypy:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@@ -61,8 +47,6 @@ jobs:
|
||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||
- run: poetry run mypy
|
||||
trial:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -121,8 +105,6 @@ jobs:
|
||||
|
||||
|
||||
sytest:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:testing
|
||||
@@ -174,8 +156,7 @@ jobs:
|
||||
|
||||
|
||||
complement:
|
||||
needs: check_repo
|
||||
if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'"
|
||||
if: "${{ !failure() && !cancelled() }}"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
@@ -211,7 +192,7 @@ jobs:
|
||||
# Open an issue if the build fails, so we know about it.
|
||||
# Only do this if we're not experimenting with this action in a PR.
|
||||
open-issue:
|
||||
if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request' && needs.check_repo.outputs.should_run_workflow == 'true'"
|
||||
if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request'"
|
||||
needs:
|
||||
# TODO: should mypy be included here? It feels more brittle than the others.
|
||||
- mypy
|
||||
|
||||
3
.github/workflows/release-artifacts.yml
vendored
3
.github/workflows/release-artifacts.yml
vendored
@@ -34,7 +34,6 @@ jobs:
|
||||
- id: set-distros
|
||||
run: |
|
||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||
# NOTE: inside the actual Dockerfile-dhvirtualenv, the image name is expanded into its full image path
|
||||
dists='["debian:sid"]'
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
|
||||
@@ -144,7 +143,7 @@ jobs:
|
||||
|
||||
- name: Only build a single wheel on PR
|
||||
if: startsWith(github.ref, 'refs/pull/')
|
||||
run: echo "CIBW_BUILD="cp38-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
|
||||
run: echo "CIBW_BUILD="cp37-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
|
||||
|
||||
- name: Build wheels
|
||||
run: python -m cibuildwheel --output-dir wheelhouse
|
||||
|
||||
36
.github/workflows/tests.yml
vendored
36
.github/workflows/tests.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.60.0
|
||||
uses: dtolnay/rust-toolchain@1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
@@ -92,10 +92,6 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.60.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
@@ -107,6 +103,10 @@ jobs:
|
||||
# To make CI green, err towards caution and install the project.
|
||||
install-project: "true"
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
# Cribbed from
|
||||
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
||||
- name: Restore/persist mypy's cache
|
||||
@@ -150,7 +150,7 @@ jobs:
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.60.0
|
||||
uses: dtolnay/rust-toolchain@1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
@@ -167,7 +167,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.60.0
|
||||
uses: dtolnay/rust-toolchain@1.58.1
|
||||
with:
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
@@ -268,7 +268,7 @@ jobs:
|
||||
postgres:${{ matrix.job.postgres-version }}
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.60.0
|
||||
uses: dtolnay/rust-toolchain@1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
@@ -308,19 +308,19 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.60.0
|
||||
uses: dtolnay/rust-toolchain@1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
# There aren't wheels for some of the older deps, so we need to install
|
||||
# their build dependencies
|
||||
- run: |
|
||||
sudo apt-get -qq update
|
||||
sudo apt update
|
||||
sudo apt-get -qq install build-essential libffi-dev python-dev \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.8'
|
||||
python-version: '3.7'
|
||||
|
||||
- name: Prepare old deps
|
||||
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
||||
@@ -362,7 +362,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["pypy-3.8"]
|
||||
python-version: ["pypy-3.7"]
|
||||
extras: ["all"]
|
||||
|
||||
steps:
|
||||
@@ -399,8 +399,8 @@ jobs:
|
||||
env:
|
||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
||||
POSTGRES: ${{ matrix.job.postgres && 1}}
|
||||
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') || '' }}
|
||||
ASYNCIO_REACTOR: ${{ (matrix.job.reactor == 'asyncio') || '' }}
|
||||
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') && 1}}
|
||||
ASYNCIO_REACTOR: ${{ (matrix.job.reactor == 'asyncio') && 1 }}
|
||||
WORKERS: ${{ matrix.job.workers && 1 }}
|
||||
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
|
||||
TOP: ${{ github.workspace }}
|
||||
@@ -416,7 +416,7 @@ jobs:
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.60.0
|
||||
uses: dtolnay/rust-toolchain@1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Run SyTest
|
||||
@@ -477,7 +477,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- python-version: "3.8"
|
||||
- python-version: "3.7"
|
||||
postgres-version: "11"
|
||||
|
||||
- python-version: "3.11"
|
||||
@@ -556,7 +556,7 @@ jobs:
|
||||
path: synapse
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.60.0
|
||||
uses: dtolnay/rust-toolchain@1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- uses: actions/setup-go@v4
|
||||
@@ -584,7 +584,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@1.60.0
|
||||
uses: dtolnay/rust-toolchain@1.58.1
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- run: cargo test
|
||||
|
||||
30
.github/workflows/twisted_trunk.yml
vendored
30
.github/workflows/twisted_trunk.yml
vendored
@@ -18,22 +18,7 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check_repo:
|
||||
# Prevent this workflow from running on any fork of Synapse other than matrix-org/synapse, as it is
|
||||
# only useful to the Synapse core team.
|
||||
# All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest
|
||||
# of the workflow will be skipped as well.
|
||||
if: github.repository == 'matrix-org/synapse'
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }}
|
||||
steps:
|
||||
- id: check_condition
|
||||
run: echo "should_run_workflow=${{ github.repository == 'matrix-org/synapse' }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
mypy:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -56,8 +41,6 @@ jobs:
|
||||
- run: poetry run mypy
|
||||
|
||||
trial:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -92,15 +75,9 @@ jobs:
|
||||
|| true
|
||||
|
||||
sytest:
|
||||
needs: check_repo
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
# We're using ubuntu:focal because it uses Python 3.8 which is our minimum supported Python version.
|
||||
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
|
||||
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
|
||||
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
|
||||
image: matrixdotorg/sytest-synapse:focal
|
||||
image: matrixdotorg/sytest-synapse:buster
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
|
||||
@@ -142,8 +119,7 @@ jobs:
|
||||
/logs/**/*.log*
|
||||
|
||||
complement:
|
||||
needs: check_repo
|
||||
if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'"
|
||||
if: "${{ !failure() && !cancelled() }}"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
@@ -190,7 +166,7 @@ jobs:
|
||||
|
||||
# open an issue if the build fails, so we know about it.
|
||||
open-issue:
|
||||
if: failure() && needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
if: failure()
|
||||
needs:
|
||||
- mypy
|
||||
- trial
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -34,7 +34,6 @@ __pycache__/
|
||||
/logs
|
||||
/media_store/
|
||||
/uploads
|
||||
/homeserver-config-overrides.d
|
||||
|
||||
# For direnv users
|
||||
/.envrc
|
||||
|
||||
3220
CHANGES.md
3220
CHANGES.md
File diff suppressed because it is too large
Load Diff
65
Cargo.lock
generated
65
Cargo.lock
generated
@@ -4,18 +4,18 @@ version = 3
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "1.0.2"
|
||||
version = "0.7.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41"
|
||||
checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.72"
|
||||
version = "1.0.71"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854"
|
||||
checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
@@ -132,9 +132,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.19"
|
||||
version = "0.4.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
|
||||
checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
@@ -182,9 +185,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.64"
|
||||
version = "1.0.52"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "78803b62cbf1f46fde80d7c0e803111524b9877184cfe7c3033659490ac7a7da"
|
||||
checksum = "1d0e1ae9e836cc3beddd63db0df682593d7e2d3d891ae8c9083d2113e1744224"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
@@ -229,9 +232,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-log"
|
||||
version = "0.8.3"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f47b0777feb17f61eea78667d61103758b243a871edc09a7786500a50467b605"
|
||||
checksum = "f9c8b57fe71fb5dcf38970ebedc2b1531cf1c14b1b9b4c560a182a57e115575c"
|
||||
dependencies = [
|
||||
"arc-swap",
|
||||
"log",
|
||||
@@ -273,9 +276,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.29"
|
||||
version = "1.0.26"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105"
|
||||
checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
@@ -291,21 +294,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.9.1"
|
||||
version = "1.7.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-automata",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "83d3daa6976cffb758ec878f108ba0e062a45b2d6ca3a2cca965338855476caf"
|
||||
checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
@@ -314,9 +305,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.7.3"
|
||||
version = "0.6.29"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2ab07dc67230e4a4718e70fd5c20055a4334b121f1f9db8fe63ef39ce9b8c846"
|
||||
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
|
||||
|
||||
[[package]]
|
||||
name = "ryu"
|
||||
@@ -332,29 +323,29 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.171"
|
||||
version = "1.0.163"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "30e27d1e4fd7659406c492fd6cfaf2066ba8773de45ca75e855590f856dc34a9"
|
||||
checksum = "2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.171"
|
||||
version = "1.0.163"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "389894603bd18c46fa56231694f8d827779c0951a667087194cf9de94ed24682"
|
||||
checksum = "8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.25",
|
||||
"syn 2.0.10",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.103"
|
||||
version = "1.0.96"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d03b412469450d4404fe8499a268edd7f8b79fecb074b0d812ad64ca21f4031b"
|
||||
checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
@@ -386,9 +377,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.25"
|
||||
version = "2.0.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "15e3fc8c0c74267e2df136e5e5fb656a464158aa57624053375eb9c8c6e25ae2"
|
||||
checksum = "5aad1363ed6d37b84299588d62d3a7d95b5a5c2d9aad5c85609fda12afaa1f40"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
||||
@@ -3,4 +3,3 @@
|
||||
|
||||
[workspace]
|
||||
members = ["rust"]
|
||||
resolver = "2"
|
||||
|
||||
12
book.toml
12
book.toml
@@ -34,14 +34,6 @@ additional-css = [
|
||||
"docs/website_files/table-of-contents.css",
|
||||
"docs/website_files/remove-nav-buttons.css",
|
||||
"docs/website_files/indent-section-headers.css",
|
||||
"docs/website_files/version-picker.css",
|
||||
]
|
||||
additional-js = [
|
||||
"docs/website_files/table-of-contents.js",
|
||||
"docs/website_files/version-picker.js",
|
||||
"docs/website_files/version.js",
|
||||
]
|
||||
theme = "docs/website_files/theme"
|
||||
|
||||
[preprocessor.schema_versions]
|
||||
command = "./scripts-dev/schema_versions.py"
|
||||
additional-js = ["docs/website_files/table-of-contents.js"]
|
||||
theme = "docs/website_files/theme"
|
||||
1
changelog.d/10428.removal
Normal file
1
changelog.d/10428.removal
Normal file
@@ -0,0 +1 @@
|
||||
Remove the old version of the R30 (30-day retained users) phone-home metric.
|
||||
1
changelog.d/15464.bugfix
Normal file
1
changelog.d/15464.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug where setting the read marker could fail when using message retention. Contributed by Nick @ Beeper (@fizzadar).
|
||||
1
changelog.d/15537.misc
Normal file
1
changelog.d/15537.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add not null constraint to column full_user_id of tables profiles and user_filters.
|
||||
1
changelog.d/15599.bugfix
Normal file
1
changelog.d/15599.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Print full error and stack-trace of any exception that occurs during startup/initialization.
|
||||
1
changelog.d/15601.bugfix
Normal file
1
changelog.d/15601.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug where the `url_preview_url_blacklist` configuration setting was not applied to oEmbed or image URLs found while previewing a URL.
|
||||
1
changelog.d/15602.misc
Normal file
1
changelog.d/15602.misc
Normal file
@@ -0,0 +1 @@
|
||||
Run mypy type checking with the minimum supported Python version to catch new usage that isn't backwards-compatible.
|
||||
1
changelog.d/15604.misc
Normal file
1
changelog.d/15604.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix subscriptable type usage in Python <3.9.
|
||||
1
changelog.d/15606.misc
Normal file
1
changelog.d/15606.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update internal terminology.
|
||||
1
changelog.d/15610.misc
Normal file
1
changelog.d/15610.misc
Normal file
@@ -0,0 +1 @@
|
||||
Instrument `state` and `state_group` storage-related operations to better picture what's happening when tracing.
|
||||
1
changelog.d/15611.feature
Normal file
1
changelog.d/15611.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add a new admin API to create a new device for a user.
|
||||
1
changelog.d/15613.doc
Normal file
1
changelog.d/15613.doc
Normal file
@@ -0,0 +1 @@
|
||||
Warn users that at least 3.75GB of space is needed for the nix Synapse development environment.
|
||||
1
changelog.d/15614.bugfix
Normal file
1
changelog.d/15614.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug introduced in Synapse 1.82.0 where the error message displayed when validation of the `app_service_config_files` config option fails would be incorrectly formatted.
|
||||
1
changelog.d/15615.misc
Normal file
1
changelog.d/15615.misc
Normal file
@@ -0,0 +1 @@
|
||||
Re-type config paths in `ConfigError`s to be `StrSequence`s instead of `Iterable[str]`s.
|
||||
1
changelog.d/15620.misc
Normal file
1
changelog.d/15620.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update internal terminology.
|
||||
1
changelog.d/15621.misc
Normal file
1
changelog.d/15621.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update Mutual Rooms (MSC2666) implementation to match new proposal text.
|
||||
1
changelog.d/15624.bugfix
Normal file
1
changelog.d/15624.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug where deactivated users were still able to login using the custom `org.matrix.login.jwt` login type (if enabled).
|
||||
1
changelog.d/15625.misc
Normal file
1
changelog.d/15625.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove the unstable identifiers from faster joins ([MSC3706](https://github.com/matrix-org/matrix-spec-proposals/pull/3706).
|
||||
1
changelog.d/15626.misc
Normal file
1
changelog.d/15626.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix the olddeps CI.
|
||||
1
changelog.d/15630.misc
Normal file
1
changelog.d/15630.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix two memory leaks in `trial` test runs.
|
||||
1
changelog.d/15633.misc
Normal file
1
changelog.d/15633.misc
Normal file
@@ -0,0 +1 @@
|
||||
Trace how many new events from the backfill response we need to process.
|
||||
1
changelog.d/15639.misc
Normal file
1
changelog.d/15639.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump types-setuptools from 67.7.0.2 to 67.8.0.0.
|
||||
1
changelog.d/15640.misc
Normal file
1
changelog.d/15640.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump types-pillow from 9.5.0.2 to 9.5.0.4.
|
||||
1
changelog.d/15641.misc
Normal file
1
changelog.d/15641.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump sphinx from 6.1.3 to 6.2.1.
|
||||
1
changelog.d/15642.misc
Normal file
1
changelog.d/15642.misc
Normal file
@@ -0,0 +1 @@
|
||||
Bump furo from 2023.3.27 to 2023.5.20.
|
||||
1
changelog.d/15646.misc
Normal file
1
changelog.d/15646.misc
Normal file
@@ -0,0 +1 @@
|
||||
Limit the size of the `HomeServerConfig` cache in trial test runs.
|
||||
File diff suppressed because it is too large
Load Diff
@@ -29,7 +29,7 @@
|
||||
"level": "error"
|
||||
},
|
||||
{
|
||||
"line": "my-matrix-server-federation-sender-1 | 2023-01-25 20:56:20,995 - synapse.http.matrixfederationclient - 709 - WARNING - federation_transaction_transmission_loop-3 - {PUT-O-3} [example.com] Request failed: PUT matrix-federation://example.com/_matrix/federation/v1/send/1674680155797: HttpResponseException('403: Forbidden')",
|
||||
"line": "my-matrix-server-federation-sender-1 | 2023-01-25 20:56:20,995 - synapse.http.matrixfederationclient - 709 - WARNING - federation_transaction_transmission_loop-3 - {PUT-O-3} [example.com] Request failed: PUT matrix://example.com/_matrix/federation/v1/send/1674680155797: HttpResponseException('403: Forbidden')",
|
||||
"level": "warning"
|
||||
},
|
||||
{
|
||||
|
||||
96
debian/changelog
vendored
96
debian/changelog
vendored
@@ -1,99 +1,3 @@
|
||||
matrix-synapse-py3 (1.89.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.89.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Aug 2023 11:07:15 +0100
|
||||
|
||||
matrix-synapse-py3 (1.89.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.89.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Jul 2023 14:31:07 +0200
|
||||
|
||||
matrix-synapse-py3 (1.88.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.88.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Jul 2023 13:59:28 +0100
|
||||
|
||||
matrix-synapse-py3 (1.88.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.88.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Jul 2023 10:20:19 +0100
|
||||
|
||||
matrix-synapse-py3 (1.87.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.87.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Jul 2023 16:24:00 +0100
|
||||
|
||||
matrix-synapse-py3 (1.87.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.87.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 27 Jun 2023 15:27:04 +0000
|
||||
|
||||
matrix-synapse-py3 (1.86.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.86.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Jun 2023 17:22:46 +0200
|
||||
|
||||
matrix-synapse-py3 (1.86.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.86.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 14 Jun 2023 12:16:27 +0200
|
||||
|
||||
matrix-synapse-py3 (1.86.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.86.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Jun 2023 14:30:45 +0200
|
||||
|
||||
matrix-synapse-py3 (1.85.2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.85.2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 08 Jun 2023 13:04:18 +0100
|
||||
|
||||
matrix-synapse-py3 (1.85.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.85.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 07 Jun 2023 10:51:12 +0100
|
||||
|
||||
matrix-synapse-py3 (1.85.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.85.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Jun 2023 09:39:29 +0100
|
||||
|
||||
matrix-synapse-py3 (1.85.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.85.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 01 Jun 2023 09:16:18 -0700
|
||||
|
||||
matrix-synapse-py3 (1.85.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.85.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 May 2023 13:56:54 +0100
|
||||
|
||||
matrix-synapse-py3 (1.84.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.84.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 26 May 2023 16:15:30 +0100
|
||||
|
||||
matrix-synapse-py3 (1.84.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.84.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 May 2023 10:57:22 +0100
|
||||
|
||||
matrix-synapse-py3 (1.84.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.84.0rc1.
|
||||
|
||||
@@ -27,7 +27,7 @@ ARG PYTHON_VERSION=3.11
|
||||
###
|
||||
# We hardcode the use of Debian bullseye here because this could change upstream
|
||||
# and other Dockerfiles used for testing are expecting bullseye.
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bullseye as requirements
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as requirements
|
||||
|
||||
# RUN --mount is specific to buildkit and is documented at
|
||||
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
|
||||
@@ -87,7 +87,7 @@ RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
||||
###
|
||||
### Stage 1: builder
|
||||
###
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bullseye as builder
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as builder
|
||||
|
||||
# install the OS build deps
|
||||
RUN \
|
||||
@@ -158,7 +158,7 @@ RUN --mount=type=cache,target=/synapse/target,sharing=locked \
|
||||
### Stage 2: runtime
|
||||
###
|
||||
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bullseye
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye
|
||||
|
||||
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
||||
LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md'
|
||||
|
||||
@@ -24,16 +24,16 @@ ARG distro=""
|
||||
# https://launchpad.net/~jyrki-pulliainen/+archive/ubuntu/dh-virtualenv, but
|
||||
# it's not obviously easier to use that than to build our own.)
|
||||
|
||||
FROM docker.io/library/${distro} as builder
|
||||
FROM ${distro} as builder
|
||||
|
||||
RUN apt-get update -qq -o Acquire::Languages=none
|
||||
RUN env DEBIAN_FRONTEND=noninteractive apt-get install \
|
||||
-yqq --no-install-recommends \
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
devscripts \
|
||||
equivs \
|
||||
wget
|
||||
-yqq --no-install-recommends \
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
devscripts \
|
||||
equivs \
|
||||
wget
|
||||
|
||||
# fetch and unpack the package
|
||||
# We are temporarily using a fork of dh-virtualenv due to an incompatibility with Python 3.11, which ships with
|
||||
@@ -55,36 +55,40 @@ RUN cd /dh-virtualenv && DEB_BUILD_OPTIONS=nodoc dpkg-buildpackage -us -uc -b
|
||||
###
|
||||
### Stage 1
|
||||
###
|
||||
FROM docker.io/library/${distro}
|
||||
FROM ${distro}
|
||||
|
||||
# Get the distro we want to pull from as a dynamic build variable
|
||||
# (We need to define it in each build stage)
|
||||
ARG distro=""
|
||||
ENV distro ${distro}
|
||||
|
||||
# Python < 3.7 assumes LANG="C" means ASCII-only and throws on printing unicode
|
||||
# http://bugs.python.org/issue19846
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
# Install the build dependencies
|
||||
#
|
||||
# NB: keep this list in sync with the list of build-deps in debian/control
|
||||
# TODO: it would be nice to do that automatically.
|
||||
RUN apt-get update -qq -o Acquire::Languages=none \
|
||||
&& env DEBIAN_FRONTEND=noninteractive apt-get install \
|
||||
-yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \
|
||||
build-essential \
|
||||
curl \
|
||||
debhelper \
|
||||
devscripts \
|
||||
libsystemd-dev \
|
||||
lsb-release \
|
||||
pkg-config \
|
||||
python3-dev \
|
||||
python3-pip \
|
||||
python3-setuptools \
|
||||
python3-venv \
|
||||
sqlite3 \
|
||||
libpq-dev \
|
||||
libicu-dev \
|
||||
pkg-config \
|
||||
xmlsec1
|
||||
-yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \
|
||||
build-essential \
|
||||
curl \
|
||||
debhelper \
|
||||
devscripts \
|
||||
libsystemd-dev \
|
||||
lsb-release \
|
||||
pkg-config \
|
||||
python3-dev \
|
||||
python3-pip \
|
||||
python3-setuptools \
|
||||
python3-venv \
|
||||
sqlite3 \
|
||||
libpq-dev \
|
||||
libicu-dev \
|
||||
pkg-config \
|
||||
xmlsec1
|
||||
|
||||
# Install rust and ensure it's in the PATH
|
||||
ENV RUSTUP_HOME=/rust
|
||||
|
||||
@@ -7,7 +7,7 @@ ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||
# target image. For repeated rebuilds, this is much faster than apt installing
|
||||
# each time.
|
||||
|
||||
FROM docker.io/library/debian:bullseye-slim AS deps_base
|
||||
FROM debian:bullseye-slim AS deps_base
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
@@ -21,7 +21,7 @@ FROM docker.io/library/debian:bullseye-slim AS deps_base
|
||||
# which makes it much easier to copy (but we need to make sure we use an image
|
||||
# based on the same debian version as the synapse image, to make sure we get
|
||||
# the expected version of libc.
|
||||
FROM docker.io/library/redis:7-bullseye AS redis_base
|
||||
FROM redis:6-bullseye AS redis_base
|
||||
|
||||
# now build the final image, based on the the regular Synapse docker image
|
||||
FROM $FROM
|
||||
|
||||
@@ -73,8 +73,7 @@ The following environment variables are supported in `generate` mode:
|
||||
will log sensitive information such as access tokens.
|
||||
This should not be needed unless you are a developer attempting to debug something
|
||||
particularly tricky.
|
||||
* `SYNAPSE_LOG_TESTING`: if set, Synapse will log additional information useful
|
||||
for testing.
|
||||
|
||||
|
||||
## Postgres
|
||||
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
# https://github.com/matrix-org/synapse/blob/develop/docker/README-testing.md#testing-with-postgresql-and-single-or-multi-process-synapse
|
||||
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
# This is an intermediate image, to be built locally (not pulled from a registry).
|
||||
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||
|
||||
FROM $FROM
|
||||
@@ -20,8 +19,8 @@ FROM $FROM
|
||||
# the same debian version as Synapse's docker image (so the versions of the
|
||||
# shared libraries match).
|
||||
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||
COPY --from=docker.io/library/postgres:13-bullseye /usr/lib/postgresql /usr/lib/postgresql
|
||||
COPY --from=docker.io/library/postgres:13-bullseye /usr/share/postgresql /usr/share/postgresql
|
||||
COPY --from=postgres:13-bullseye /usr/lib/postgresql /usr/lib/postgresql
|
||||
COPY --from=postgres:13-bullseye /usr/share/postgresql /usr/share/postgresql
|
||||
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
|
||||
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
||||
ENV PGDATA=/var/lib/postgresql/data
|
||||
|
||||
@@ -92,6 +92,8 @@ allow_device_name_lookup_over_federation: true
|
||||
## Experimental Features ##
|
||||
|
||||
experimental_features:
|
||||
# Enable history backfilling support
|
||||
msc2716_enabled: true
|
||||
# client-side support for partial state in /send_join responses
|
||||
faster_joins: true
|
||||
# Enable support for polls
|
||||
|
||||
@@ -35,11 +35,7 @@ server {
|
||||
|
||||
# Send all other traffic to the main process
|
||||
location ~* ^(\\/_matrix|\\/_synapse) {
|
||||
{% if using_unix_sockets %}
|
||||
proxy_pass http://unix:/run/main_public.sock;
|
||||
{% else %}
|
||||
proxy_pass http://localhost:8080;
|
||||
{% endif %}
|
||||
proxy_set_header X-Forwarded-For $remote_addr;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
@@ -6,9 +6,6 @@
|
||||
{% if enable_redis %}
|
||||
redis:
|
||||
enabled: true
|
||||
{% if using_unix_sockets %}
|
||||
path: /tmp/redis.sock
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{% if appservice_registrations is not none %}
|
||||
|
||||
@@ -19,11 +19,7 @@ username=www-data
|
||||
autorestart=true
|
||||
|
||||
[program:redis]
|
||||
{% if using_unix_sockets %}
|
||||
command=/usr/local/bin/prefix-log /usr/local/bin/redis-server --unixsocket /tmp/redis.sock
|
||||
{% else %}
|
||||
command=/usr/local/bin/prefix-log /usr/local/bin/redis-server
|
||||
{% endif %}
|
||||
priority=1
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
|
||||
@@ -8,11 +8,7 @@ worker_name: "{{ name }}"
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
{% if using_unix_sockets %}
|
||||
path: "/run/worker.{{ port }}"
|
||||
{% else %}
|
||||
port: {{ port }}
|
||||
{% endif %}
|
||||
{% if listener_resources %}
|
||||
resources:
|
||||
- names:
|
||||
|
||||
@@ -36,17 +36,12 @@ listeners:
|
||||
|
||||
# Allow configuring in case we want to reverse proxy 8008
|
||||
# using another process in the same container
|
||||
{% if SYNAPSE_USE_UNIX_SOCKET %}
|
||||
# Unix sockets don't care about TLS or IP addresses or ports
|
||||
- path: '/run/main_public.sock'
|
||||
type: http
|
||||
{% else %}
|
||||
- port: {{ SYNAPSE_HTTP_PORT or 8008 }}
|
||||
tls: false
|
||||
bind_addresses: ['::']
|
||||
type: http
|
||||
x_forwarded: false
|
||||
{% endif %}
|
||||
|
||||
resources:
|
||||
- names: [client]
|
||||
compress: true
|
||||
@@ -62,11 +57,8 @@ database:
|
||||
user: "{{ POSTGRES_USER or "synapse" }}"
|
||||
password: "{{ POSTGRES_PASSWORD }}"
|
||||
database: "{{ POSTGRES_DB or "synapse" }}"
|
||||
{% if not SYNAPSE_USE_UNIX_SOCKET %}
|
||||
{# Synapse will use a default unix socket for Postgres when host/port is not specified (behavior from `psycopg2`). #}
|
||||
host: "{{ POSTGRES_HOST or "db" }}"
|
||||
port: "{{ POSTGRES_PORT or "5432" }}"
|
||||
{% endif %}
|
||||
cp_min: 5
|
||||
cp_max: 10
|
||||
{% else %}
|
||||
|
||||
@@ -49,35 +49,17 @@ handlers:
|
||||
class: logging.StreamHandler
|
||||
formatter: precise
|
||||
|
||||
{% if not SYNAPSE_LOG_SENSITIVE %}
|
||||
{#
|
||||
If SYNAPSE_LOG_SENSITIVE is unset, then override synapse.storage.SQL to INFO
|
||||
so that DEBUG entries (containing sensitive information) are not emitted.
|
||||
#}
|
||||
loggers:
|
||||
# This is just here so we can leave `loggers` in the config regardless of whether
|
||||
# we configure other loggers below (avoid empty yaml dict error).
|
||||
_placeholder:
|
||||
level: "INFO"
|
||||
|
||||
{% if not SYNAPSE_LOG_SENSITIVE %}
|
||||
{#
|
||||
If SYNAPSE_LOG_SENSITIVE is unset, then override synapse.storage.SQL to INFO
|
||||
so that DEBUG entries (containing sensitive information) are not emitted.
|
||||
#}
|
||||
synapse.storage.SQL:
|
||||
# beware: increasing this to DEBUG will make synapse log sensitive
|
||||
# information such as access tokens.
|
||||
level: INFO
|
||||
{% endif %}
|
||||
|
||||
{% if SYNAPSE_LOG_TESTING %}
|
||||
{#
|
||||
If Synapse is under test, log a few more useful things for a developer
|
||||
attempting to debug something particularly tricky.
|
||||
|
||||
With `synapse.visibility.filtered_event_debug`, it logs when events are (maybe
|
||||
unexpectedly) filtered out of responses in tests. It's just nice to be able to
|
||||
look at the CI log and figure out why an event isn't being returned.
|
||||
#}
|
||||
synapse.visibility.filtered_event_debug:
|
||||
level: DEBUG
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
root:
|
||||
level: {{ SYNAPSE_LOG_LEVEL or "INFO" }}
|
||||
|
||||
@@ -40,8 +40,6 @@
|
||||
# log level. INFO is the default.
|
||||
# * SYNAPSE_LOG_SENSITIVE: If unset, SQL and SQL values won't be logged,
|
||||
# regardless of the SYNAPSE_LOG_LEVEL setting.
|
||||
# * SYNAPSE_LOG_TESTING: if set, Synapse will log additional information useful
|
||||
# for testing.
|
||||
#
|
||||
# NOTE: According to Complement's ENTRYPOINT expectations for a homeserver image (as defined
|
||||
# in the project's README), this script may be run multiple times, and functionality should
|
||||
@@ -74,9 +72,6 @@ MAIN_PROCESS_HTTP_LISTENER_PORT = 8080
|
||||
MAIN_PROCESS_INSTANCE_NAME = "main"
|
||||
MAIN_PROCESS_LOCALHOST_ADDRESS = "127.0.0.1"
|
||||
MAIN_PROCESS_REPLICATION_PORT = 9093
|
||||
# Obviously, these would only be used with the UNIX socket option
|
||||
MAIN_PROCESS_UNIX_SOCKET_PUBLIC_PATH = "/run/main_public.sock"
|
||||
MAIN_PROCESS_UNIX_SOCKET_PRIVATE_PATH = "/run/main_private.sock"
|
||||
|
||||
# A simple name used as a placeholder in the WORKERS_CONFIG below. This will be replaced
|
||||
# during processing with the name of the worker.
|
||||
@@ -247,6 +242,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/join/",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/knock/",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/profile/",
|
||||
"^/_matrix/client/(v1|unstable/org.matrix.msc2716)/rooms/.*/batch_send",
|
||||
],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
@@ -410,15 +406,11 @@ def add_worker_roles_to_shared_config(
|
||||
)
|
||||
|
||||
# Map of stream writer instance names to host/ports combos
|
||||
if os.environ.get("SYNAPSE_USE_UNIX_SOCKET", False):
|
||||
instance_map[worker_name] = {
|
||||
"path": f"/run/worker.{worker_port}",
|
||||
}
|
||||
else:
|
||||
instance_map[worker_name] = {
|
||||
"host": "localhost",
|
||||
"port": worker_port,
|
||||
}
|
||||
instance_map[worker_name] = {
|
||||
"host": "localhost",
|
||||
"port": worker_port,
|
||||
}
|
||||
|
||||
# Update the list of stream writers. It's convenient that the name of the worker
|
||||
# type is the same as the stream to write. Iterate over the whole list in case there
|
||||
# is more than one.
|
||||
@@ -430,15 +422,10 @@ def add_worker_roles_to_shared_config(
|
||||
|
||||
# Map of stream writer instance names to host/ports combos
|
||||
# For now, all stream writers need http replication ports
|
||||
if os.environ.get("SYNAPSE_USE_UNIX_SOCKET", False):
|
||||
instance_map[worker_name] = {
|
||||
"path": f"/run/worker.{worker_port}",
|
||||
}
|
||||
else:
|
||||
instance_map[worker_name] = {
|
||||
"host": "localhost",
|
||||
"port": worker_port,
|
||||
}
|
||||
instance_map[worker_name] = {
|
||||
"host": "localhost",
|
||||
"port": worker_port,
|
||||
}
|
||||
|
||||
|
||||
def merge_worker_template_configs(
|
||||
@@ -730,29 +717,17 @@ def generate_worker_files(
|
||||
# Note that yaml cares about indentation, so care should be taken to insert lines
|
||||
# into files at the correct indentation below.
|
||||
|
||||
# Convenience helper for if using unix sockets instead of host:port
|
||||
using_unix_sockets = environ.get("SYNAPSE_USE_UNIX_SOCKET", False)
|
||||
# First read the original config file and extract the listeners block. Then we'll
|
||||
# add another listener for replication. Later we'll write out the result to the
|
||||
# shared config file.
|
||||
listeners: List[Any]
|
||||
if using_unix_sockets:
|
||||
listeners = [
|
||||
{
|
||||
"path": MAIN_PROCESS_UNIX_SOCKET_PRIVATE_PATH,
|
||||
"type": "http",
|
||||
"resources": [{"names": ["replication"]}],
|
||||
}
|
||||
]
|
||||
else:
|
||||
listeners = [
|
||||
{
|
||||
"port": MAIN_PROCESS_REPLICATION_PORT,
|
||||
"bind_address": MAIN_PROCESS_LOCALHOST_ADDRESS,
|
||||
"type": "http",
|
||||
"resources": [{"names": ["replication"]}],
|
||||
}
|
||||
]
|
||||
listeners = [
|
||||
{
|
||||
"port": MAIN_PROCESS_REPLICATION_PORT,
|
||||
"bind_address": MAIN_PROCESS_LOCALHOST_ADDRESS,
|
||||
"type": "http",
|
||||
"resources": [{"names": ["replication"]}],
|
||||
}
|
||||
]
|
||||
with open(config_path) as file_stream:
|
||||
original_config = yaml.safe_load(file_stream)
|
||||
original_listeners = original_config.get("listeners")
|
||||
@@ -793,17 +768,7 @@ def generate_worker_files(
|
||||
|
||||
# A list of internal endpoints to healthcheck, starting with the main process
|
||||
# which exists even if no workers do.
|
||||
# This list ends up being part of the command line to curl, (curl added support for
|
||||
# Unix sockets in version 7.40).
|
||||
if using_unix_sockets:
|
||||
healthcheck_urls = [
|
||||
f"--unix-socket {MAIN_PROCESS_UNIX_SOCKET_PUBLIC_PATH} "
|
||||
# The scheme and hostname from the following URL are ignored.
|
||||
# The only thing that matters is the path `/health`
|
||||
"http://localhost/health"
|
||||
]
|
||||
else:
|
||||
healthcheck_urls = ["http://localhost:8080/health"]
|
||||
healthcheck_urls = ["http://localhost:8080/health"]
|
||||
|
||||
# Get the set of all worker types that we have configured
|
||||
all_worker_types_in_use = set(chain(*requested_worker_types.values()))
|
||||
@@ -840,12 +805,8 @@ def generate_worker_files(
|
||||
# given worker_type needs to stay assigned and not be replaced.
|
||||
worker_config["shared_extra_conf"].update(shared_config)
|
||||
shared_config = worker_config["shared_extra_conf"]
|
||||
if using_unix_sockets:
|
||||
healthcheck_urls.append(
|
||||
f"--unix-socket /run/worker.{worker_port} http://localhost/health"
|
||||
)
|
||||
else:
|
||||
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
||||
|
||||
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
||||
|
||||
# Update the shared config with sharding-related options if necessary
|
||||
add_worker_roles_to_shared_config(
|
||||
@@ -864,7 +825,6 @@ def generate_worker_files(
|
||||
"/conf/workers/{name}.yaml".format(name=worker_name),
|
||||
**worker_config,
|
||||
worker_log_config_filepath=log_config_filepath,
|
||||
using_unix_sockets=using_unix_sockets,
|
||||
)
|
||||
|
||||
# Save this worker's port number to the correct nginx upstreams
|
||||
@@ -885,13 +845,8 @@ def generate_worker_files(
|
||||
nginx_upstream_config = ""
|
||||
for upstream_worker_base_name, upstream_worker_ports in nginx_upstreams.items():
|
||||
body = ""
|
||||
if using_unix_sockets:
|
||||
for port in upstream_worker_ports:
|
||||
body += f" server unix:/run/worker.{port};\n"
|
||||
|
||||
else:
|
||||
for port in upstream_worker_ports:
|
||||
body += f" server localhost:{port};\n"
|
||||
for port in upstream_worker_ports:
|
||||
body += f" server localhost:{port};\n"
|
||||
|
||||
# Add to the list of configured upstreams
|
||||
nginx_upstream_config += NGINX_UPSTREAM_CONFIG_BLOCK.format(
|
||||
@@ -921,15 +876,10 @@ def generate_worker_files(
|
||||
# If there are workers, add the main process to the instance_map too.
|
||||
if workers_in_use:
|
||||
instance_map = shared_config.setdefault("instance_map", {})
|
||||
if using_unix_sockets:
|
||||
instance_map[MAIN_PROCESS_INSTANCE_NAME] = {
|
||||
"path": MAIN_PROCESS_UNIX_SOCKET_PRIVATE_PATH,
|
||||
}
|
||||
else:
|
||||
instance_map[MAIN_PROCESS_INSTANCE_NAME] = {
|
||||
"host": MAIN_PROCESS_LOCALHOST_ADDRESS,
|
||||
"port": MAIN_PROCESS_REPLICATION_PORT,
|
||||
}
|
||||
instance_map[MAIN_PROCESS_INSTANCE_NAME] = {
|
||||
"host": MAIN_PROCESS_LOCALHOST_ADDRESS,
|
||||
"port": MAIN_PROCESS_REPLICATION_PORT,
|
||||
}
|
||||
|
||||
# Shared homeserver config
|
||||
convert(
|
||||
@@ -939,7 +889,6 @@ def generate_worker_files(
|
||||
appservice_registrations=appservice_registrations,
|
||||
enable_redis=workers_in_use,
|
||||
workers_in_use=workers_in_use,
|
||||
using_unix_sockets=using_unix_sockets,
|
||||
)
|
||||
|
||||
# Nginx config
|
||||
@@ -950,7 +899,6 @@ def generate_worker_files(
|
||||
upstream_directives=nginx_upstream_config,
|
||||
tls_cert_path=os.environ.get("SYNAPSE_TLS_CERT"),
|
||||
tls_key_path=os.environ.get("SYNAPSE_TLS_KEY"),
|
||||
using_unix_sockets=using_unix_sockets,
|
||||
)
|
||||
|
||||
# Supervisord config
|
||||
@@ -960,7 +908,6 @@ def generate_worker_files(
|
||||
"/etc/supervisor/supervisord.conf",
|
||||
main_config_path=config_path,
|
||||
enable_redis=workers_in_use,
|
||||
using_unix_sockets=using_unix_sockets,
|
||||
)
|
||||
|
||||
convert(
|
||||
@@ -1000,7 +947,6 @@ def generate_worker_log_config(
|
||||
extra_log_template_args["SYNAPSE_LOG_SENSITIVE"] = environ.get(
|
||||
"SYNAPSE_LOG_SENSITIVE"
|
||||
)
|
||||
extra_log_template_args["SYNAPSE_LOG_TESTING"] = environ.get("SYNAPSE_LOG_TESTING")
|
||||
|
||||
# Render and write the file
|
||||
log_config_filepath = f"/conf/workers/{worker_name}.log.config"
|
||||
|
||||
@@ -10,7 +10,7 @@ ARG PYTHON_VERSION=3.9
|
||||
###
|
||||
# We hardcode the use of Debian bullseye here because this could change upstream
|
||||
# and other Dockerfiles used for testing are expecting bullseye.
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bullseye
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye
|
||||
|
||||
# Install Rust and other dependencies (stolen from normal Dockerfile)
|
||||
# install the OS build deps
|
||||
|
||||
@@ -419,7 +419,7 @@ The following query parameters are available:
|
||||
|
||||
* `from` (required) - The token to start returning events from. This token can be obtained from a prev_batch
|
||||
or next_batch token returned by the /sync endpoint, or from an end token returned by a previous request to this endpoint.
|
||||
* `to` - The token to stop returning events at.
|
||||
* `to` - The token to spot returning events at.
|
||||
* `limit` - The maximum number of events to return. Defaults to `10`.
|
||||
* `filter` - A JSON RoomEventFilter to filter returned events with.
|
||||
* `dir` - The direction to return events from. Either `f` for forwards or `b` for backwards. Setting
|
||||
|
||||
@@ -242,9 +242,6 @@ The following parameters should be set in the URL:
|
||||
|
||||
- `dir` - Direction of media order. Either `f` for forwards or `b` for backwards.
|
||||
Setting this value to `b` will reverse the above sort order. Defaults to `f`.
|
||||
- `not_user_type` - Exclude certain user types, such as bot users, from the request.
|
||||
Can be provided multiple times. Possible values are `bot`, `support` or "empty string".
|
||||
"empty string" here means to exclude users without a type.
|
||||
|
||||
Caution. The database only has indexes on the columns `name` and `creation_ts`.
|
||||
This means that if a different sort order is used (`is_guest`, `admin`,
|
||||
@@ -732,8 +729,7 @@ POST /_synapse/admin/v1/users/<user_id>/login
|
||||
|
||||
An optional `valid_until_ms` field can be specified in the request body as an
|
||||
integer timestamp that specifies when the token should expire. By default tokens
|
||||
do not expire. Note that this API does not allow a user to login as themselves
|
||||
(to create more tokens).
|
||||
do not expire.
|
||||
|
||||
A response body like the following is returned:
|
||||
|
||||
@@ -1184,7 +1180,7 @@ The following parameters should be set in the URL:
|
||||
- `user_id` - The fully qualified MXID: for example, `@user:server.com`. The user must
|
||||
be local.
|
||||
|
||||
## Check username availability
|
||||
### Check username availability
|
||||
|
||||
Checks to see if a username is available, and valid, for the server. See [the client-server
|
||||
API](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available)
|
||||
@@ -1202,7 +1198,7 @@ GET /_synapse/admin/v1/username_available?username=$localpart
|
||||
The request and response format is the same as the
|
||||
[/_matrix/client/r0/register/available](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available) API.
|
||||
|
||||
## Find a user based on their ID in an auth provider
|
||||
### Find a user based on their ID in an auth provider
|
||||
|
||||
The API is:
|
||||
|
||||
@@ -1241,7 +1237,7 @@ Returns a `404` HTTP status code if no user was found, with a response body like
|
||||
_Added in Synapse 1.68.0._
|
||||
|
||||
|
||||
## Find a user based on their Third Party ID (ThreePID or 3PID)
|
||||
### Find a user based on their Third Party ID (ThreePID or 3PID)
|
||||
|
||||
The API is:
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -23,7 +23,7 @@ people building from source should ensure they can fetch recent versions of Rust
|
||||
(e.g. by using [rustup](https://rustup.rs/)).
|
||||
|
||||
The oldest supported version of SQLite is the version
|
||||
[provided](https://packages.debian.org/bullseye/libsqlite3-0) by
|
||||
[provided](https://packages.debian.org/buster/libsqlite3-0) by
|
||||
[Debian oldstable](https://wiki.debian.org/DebianOldStable).
|
||||
|
||||
Context
|
||||
|
||||
@@ -22,9 +22,6 @@ on Windows is not officially supported.
|
||||
|
||||
The code of Synapse is written in Python 3. To do pretty much anything, you'll need [a recent version of Python 3](https://www.python.org/downloads/). Your Python also needs support for [virtual environments](https://docs.python.org/3/library/venv.html). This is usually built-in, but some Linux distributions like Debian and Ubuntu split it out into its own package. Running `sudo apt install python3-venv` should be enough.
|
||||
|
||||
A recent version of the Rust compiler is needed to build the native modules. The
|
||||
easiest way of installing the latest version is to use [rustup](https://rustup.rs/).
|
||||
|
||||
Synapse can connect to PostgreSQL via the [psycopg2](https://pypi.org/project/psycopg2/) Python library. Building this library from source requires access to PostgreSQL's C header files. On Debian or Ubuntu Linux, these can be installed with `sudo apt install libpq-dev`.
|
||||
|
||||
Synapse has an optional, improved user search with better Unicode support. For that you need the development package of `libicu`. On Debian or Ubuntu Linux, this can be installed with `sudo apt install libicu-dev`.
|
||||
@@ -33,6 +30,9 @@ The source code of Synapse is hosted on GitHub. You will also need [a recent ver
|
||||
|
||||
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
|
||||
|
||||
A recent version of the Rust compiler is needed to build the native modules. The
|
||||
easiest way of installing the latest version is to use [rustup](https://rustup.rs/).
|
||||
|
||||
|
||||
# 3. Get the source.
|
||||
|
||||
@@ -53,11 +53,6 @@ can find many good git tutorials on the web.
|
||||
|
||||
# 4. Install the dependencies
|
||||
|
||||
|
||||
Before installing the Python dependencies, make sure you have installed a recent version
|
||||
of Rust (see the "What do I need?" section above). The easiest way of installing the
|
||||
latest version is to use [rustup](https://rustup.rs/).
|
||||
|
||||
Synapse uses the [poetry](https://python-poetry.org/) project to manage its dependencies
|
||||
and development environment. Once you have installed Python 3 and added the
|
||||
source, you should install `poetry`.
|
||||
@@ -81,8 +76,7 @@ cd path/where/you/have/cloned/the/repository
|
||||
poetry install --extras all
|
||||
```
|
||||
|
||||
This will install the runtime and developer dependencies for the project. Be sure to check
|
||||
that the `poetry install` step completed cleanly.
|
||||
This will install the runtime and developer dependencies for the project.
|
||||
|
||||
## Running Synapse via poetry
|
||||
|
||||
@@ -90,31 +84,14 @@ To start a local instance of Synapse in the locked poetry environment, create a
|
||||
|
||||
```sh
|
||||
cp docs/sample_config.yaml homeserver.yaml
|
||||
cp docs/sample_log_config.yaml log_config.yaml
|
||||
```
|
||||
|
||||
Now edit `homeserver.yaml`, things you might want to change include:
|
||||
|
||||
- Set a `server_name`
|
||||
- Adjusting paths to be correct for your system like the `log_config` to point to the log config you just copied
|
||||
- Using a [PostgreSQL database instead of SQLite](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#database)
|
||||
- Adding a [`registration_shared_secret`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#registration_shared_secret) so you can use [`register_new_matrix_user` command](https://matrix-org.github.io/synapse/latest/setup/installation.html#registering-a-user).
|
||||
|
||||
And then run Synapse with the following command:
|
||||
Now edit homeserver.yaml, and run Synapse with:
|
||||
|
||||
```sh
|
||||
poetry run python -m synapse.app.homeserver -c homeserver.yaml
|
||||
```
|
||||
|
||||
If you get an error like the following:
|
||||
|
||||
```
|
||||
importlib.metadata.PackageNotFoundError: matrix-synapse
|
||||
```
|
||||
|
||||
this probably indicates that the `poetry install` step did not complete cleanly - go back and
|
||||
resolve any issues and re-run until successful.
|
||||
|
||||
# 5. Get in touch.
|
||||
|
||||
Join our developer community on Matrix: [#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org)!
|
||||
@@ -322,7 +299,7 @@ The following command will let you run the integration test with the most common
|
||||
configuration:
|
||||
|
||||
```sh
|
||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:focal
|
||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:buster
|
||||
```
|
||||
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)
|
||||
|
||||
@@ -370,7 +347,6 @@ The above will run a monolithic (single-process) Synapse with SQLite as the data
|
||||
See the [worker documentation](../workers.md) for additional information on workers.
|
||||
- Passing `ASYNCIO_REACTOR=1` as an environment variable to use the Twisted asyncio reactor instead of the default one.
|
||||
- Passing `PODMAN=1` will use the [podman](https://podman.io/) container runtime, instead of docker.
|
||||
- Passing `UNIX_SOCKETS=1` will utilise Unix socket functionality for Synapse, Redis, and Postgres(when applicable).
|
||||
|
||||
To increase the log level for the tests, set `SYNAPSE_TEST_LOG_LEVEL`, e.g:
|
||||
```sh
|
||||
|
||||
@@ -260,17 +260,15 @@ doesn't require poetry. (It's what we use in CI too). However, you could try
|
||||
|
||||
## ...handle a Dependabot pull request?
|
||||
|
||||
Synapse uses Dependabot to keep the `poetry.lock` and `Cargo.lock` file
|
||||
up-to-date with the latest releases of our dependencies. The changelog check is
|
||||
omitted for Dependabot PRs; the release script will include them in the
|
||||
changelog.
|
||||
|
||||
When reviewing a dependabot PR, ensure that:
|
||||
Synapse uses Dependabot to keep the `poetry.lock` file up-to-date. When it
|
||||
creates a pull request a GitHub Action will run to automatically create a changelog
|
||||
file. Ensure that:
|
||||
|
||||
* the lockfile changes look reasonable;
|
||||
* the upstream changelog file (linked in the description) doesn't include any
|
||||
breaking changes;
|
||||
* continuous integration passes.
|
||||
* continuous integration passes (due to permissions, the GitHub Actions run on
|
||||
the changelog commit will fail, look at the initial commit of the pull request);
|
||||
|
||||
In particular, any updates to the type hints (usually packages which start with `types-`)
|
||||
should be safe to merge if linting passes.
|
||||
|
||||
@@ -6,7 +6,7 @@ This is a work-in-progress set of notes with two goals:
|
||||
|
||||
See also [MSC3902](https://github.com/matrix-org/matrix-spec-proposals/pull/3902).
|
||||
|
||||
The key idea is described by [MSC3706](https://github.com/matrix-org/matrix-spec-proposals/pull/3706). This allows servers to
|
||||
The key idea is described by [MSC706](https://github.com/matrix-org/matrix-spec-proposals/pull/3902). This allows servers to
|
||||
request a lightweight response to the federation `/send_join` endpoint.
|
||||
This is called a **faster join**, also known as a **partial join**. In these
|
||||
notes we'll usually use the word "partial" as it matches the database schema.
|
||||
|
||||
@@ -46,9 +46,6 @@ instead.
|
||||
|
||||
If the authentication is unsuccessful, the module must return `None`.
|
||||
|
||||
Note that the user is not automatically registered, the `register_user(..)` method of
|
||||
the [module API](writing_a_module.html) can be used to lazily create users.
|
||||
|
||||
If multiple modules register an auth checker for the same login type but with different
|
||||
fields, Synapse will refuse to start.
|
||||
|
||||
|
||||
@@ -348,42 +348,6 @@ callback returns `False`, Synapse falls through to the next one. The value of th
|
||||
callback that does not return `False` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
|
||||
### `check_login_for_spam`
|
||||
|
||||
_First introduced in Synapse v1.87.0_
|
||||
|
||||
```python
|
||||
async def check_login_for_spam(
|
||||
user_id: str,
|
||||
device_id: Optional[str],
|
||||
initial_display_name: Optional[str],
|
||||
request_info: Collection[Tuple[Optional[str], str]],
|
||||
auth_provider_id: Optional[str] = None,
|
||||
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes"]
|
||||
```
|
||||
|
||||
Called when a user logs in.
|
||||
|
||||
The arguments passed to this callback are:
|
||||
|
||||
* `user_id`: The user ID the user is logging in with
|
||||
* `device_id`: The device ID the user is re-logging into.
|
||||
* `initial_display_name`: The device display name, if any.
|
||||
* `request_info`: A collection of tuples, which first item is a user agent, and which
|
||||
second item is an IP address. These user agents and IP addresses are the ones that were
|
||||
used during the login process.
|
||||
* `auth_provider_id`: The identifier of the SSO authentication provider, if any.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `synapse.module_api.NOT_SPAM`, Synapse falls through to the next one.
|
||||
The value of the first callback that does not return `synapse.module_api.NOT_SPAM` will
|
||||
be used. If this happens, Synapse will not call any of the subsequent implementations of
|
||||
this callback.
|
||||
|
||||
*Note:* This will not be called when a user registers.
|
||||
|
||||
|
||||
## Example
|
||||
|
||||
The example below is a module that implements the spam checker callback
|
||||
|
||||
@@ -68,7 +68,9 @@ root:
|
||||
# Write logs to the `buffer` handler, which will buffer them together in memory,
|
||||
# then write them to a file.
|
||||
#
|
||||
# Replace "buffer" with "console" to log to stderr instead.
|
||||
# Replace "buffer" with "console" to log to stderr instead. (Note that you'll
|
||||
# also need to update the configuration for the `twisted` logger above, in
|
||||
# this case.)
|
||||
#
|
||||
handlers: [buffer]
|
||||
|
||||
|
||||
@@ -135,8 +135,8 @@ Unofficial package are built for SLES 15 in the openSUSE:Backports:SLE-15 reposi
|
||||
|
||||
#### ArchLinux
|
||||
|
||||
The quickest way to get up and running with ArchLinux is probably with the package provided by ArchLinux
|
||||
<https://archlinux.org/packages/extra/x86_64/matrix-synapse/>, which should pull in most of
|
||||
The quickest way to get up and running with ArchLinux is probably with the community package
|
||||
<https://archlinux.org/packages/community/x86_64/matrix-synapse/>, which should pull in most of
|
||||
the necessary dependencies.
|
||||
|
||||
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ):
|
||||
@@ -200,7 +200,7 @@ When following this route please make sure that the [Platform-specific prerequis
|
||||
System requirements:
|
||||
|
||||
- POSIX-compliant system (tested on Linux & OS X)
|
||||
- Python 3.8 or later, up to Python 3.11.
|
||||
- Python 3.7 or later, up to Python 3.11.
|
||||
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||
|
||||
If building on an uncommon architecture for which pre-built wheels are
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
worker_app: synapse.app.generic_worker
|
||||
worker_name: background_worker
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/background-worker-log.yaml
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
worker_app: synapse.app.generic_worker
|
||||
worker_name: event_persister1
|
||||
worker_name: event_persister1
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
worker_app: synapse.app.federation_sender
|
||||
worker_name: federation_sender1
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/federation-sender-log.yaml
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
worker_app: synapse.app.media_repository
|
||||
worker_name: media_worker
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8085
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
worker_app: synapse.app.pusher
|
||||
worker_name: pusher_worker1
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/pusher-worker-log.yaml
|
||||
|
||||
@@ -88,82 +88,19 @@ process, for example:
|
||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
```
|
||||
|
||||
# Upgrading to v1.89.0
|
||||
|
||||
## Removal of unspecced `user` property for `/register`
|
||||
|
||||
Application services can no longer call `/register` with a `user` property to create new users.
|
||||
The standard `username` property should be used instead. See the
|
||||
[Application Service specification](https://spec.matrix.org/v1.7/application-service-api/#server-admin-style-permissions)
|
||||
for more information.
|
||||
|
||||
|
||||
# Upgrading to v1.88.0
|
||||
|
||||
## Minimum supported Python version
|
||||
|
||||
The minimum supported Python version has been increased from v3.7 to v3.8.
|
||||
You will need Python 3.8 to run Synapse v1.88.0 (due out July 18th, 2023).
|
||||
|
||||
If you use current versions of the Matrix.org-distributed Debian
|
||||
packages or Docker images, no action is required.
|
||||
|
||||
## Removal of `worker_replication_*` settings
|
||||
|
||||
As mentioned previously in [Upgrading to v1.84.0](#upgrading-to-v1840), the following deprecated settings
|
||||
are being removed in this release of Synapse:
|
||||
|
||||
* [`worker_replication_host`](https://matrix-org.github.io/synapse/v1.86/usage/configuration/config_documentation.html#worker_replication_host)
|
||||
* [`worker_replication_http_port`](https://matrix-org.github.io/synapse/v1.86/usage/configuration/config_documentation.html#worker_replication_http_port)
|
||||
* [`worker_replication_http_tls`](https://matrix-org.github.io/synapse/v1.86/usage/configuration/config_documentation.html#worker_replication_http_tls)
|
||||
|
||||
Please ensure that you have migrated to using `main` on your shared configuration's `instance_map`
|
||||
(or create one if necessary). This is required if you have ***any*** workers at all;
|
||||
administrators of single-process (monolith) installations don't need to do anything.
|
||||
|
||||
For an illustrative example, please see [Upgrading to v1.84.0](#upgrading-to-v1840) below.
|
||||
|
||||
|
||||
# Upgrading to v1.86.0
|
||||
|
||||
## Minimum supported Rust version
|
||||
|
||||
The minimum supported Rust version has been increased from v1.58.1 to v1.60.0.
|
||||
Users building from source will need to ensure their `rustc` version is up to
|
||||
date.
|
||||
|
||||
|
||||
# Upgrading to v1.85.0
|
||||
|
||||
## Application service registration with "user" property deprecation
|
||||
|
||||
Application services should ensure they call the `/register` endpoint with a
|
||||
`username` property. The legacy `user` property is considered deprecated and
|
||||
should no longer be included.
|
||||
|
||||
A future version of Synapse (v1.88.0 or later) will remove support for legacy
|
||||
application service login.
|
||||
|
||||
# Upgrading to v1.84.0
|
||||
|
||||
## Deprecation of `worker_replication_*` configuration settings
|
||||
|
||||
When using workers,
|
||||
|
||||
When using workers,
|
||||
* `worker_replication_host`
|
||||
* `worker_replication_http_port`
|
||||
* `worker_replication_http_tls`
|
||||
|
||||
should now be removed from individual worker YAML configurations and the main process should instead be added to the `instance_map`
|
||||
in the shared YAML configuration, using the name `main`.
|
||||
|
||||
The old `worker_replication_*` settings are now considered deprecated and are expected to be removed in Synapse v1.88.0.
|
||||
|
||||
|
||||
### Example change
|
||||
|
||||
#### Before:
|
||||
can now be removed from individual worker YAML configuration ***if*** you add the main process to the `instance_map` in the shared YAML configuration,
|
||||
using the name `main`.
|
||||
|
||||
### Before:
|
||||
Shared YAML
|
||||
```yaml
|
||||
instance_map:
|
||||
@@ -172,7 +109,6 @@ instance_map:
|
||||
port: 5678
|
||||
tls: false
|
||||
```
|
||||
|
||||
Worker YAML
|
||||
```yaml
|
||||
worker_app: synapse.app.generic_worker
|
||||
@@ -194,10 +130,7 @@ worker_listeners:
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/generic-worker-log.yaml
|
||||
```
|
||||
|
||||
|
||||
#### After:
|
||||
|
||||
### After:
|
||||
Shared YAML
|
||||
```yaml
|
||||
instance_map:
|
||||
@@ -210,7 +143,6 @@ instance_map:
|
||||
port: 5678
|
||||
tls: false
|
||||
```
|
||||
|
||||
Worker YAML
|
||||
```yaml
|
||||
worker_app: synapse.app.generic_worker
|
||||
@@ -233,6 +165,7 @@ Notes:
|
||||
* `tls` is optional but mirrors the functionality of `worker_replication_http_tls`
|
||||
|
||||
|
||||
|
||||
# Upgrading to v1.81.0
|
||||
|
||||
## Application service path & authentication deprecations
|
||||
|
||||
@@ -27,8 +27,9 @@ What servers are currently participating in this room?
|
||||
Run this sql query on your db:
|
||||
```sql
|
||||
SELECT DISTINCT split_part(state_key, ':', 2)
|
||||
FROM current_state_events
|
||||
WHERE room_id = '!cURbafjkfsMDVwdRDQ:matrix.org' AND membership = 'join';
|
||||
FROM current_state_events AS c
|
||||
INNER JOIN room_memberships AS m USING (room_id, event_id)
|
||||
WHERE room_id = '!cURbafjkfsMDVwdRDQ:matrix.org' AND membership = 'join';
|
||||
```
|
||||
|
||||
What users are registered on my server?
|
||||
|
||||
@@ -462,20 +462,6 @@ See the docs [request log format](../administration/request_log.md).
|
||||
* `additional_resources`: Only valid for an 'http' listener. A map of
|
||||
additional endpoints which should be loaded via dynamic modules.
|
||||
|
||||
Unix socket support (_Added in Synapse 1.89.0_):
|
||||
* `path`: A path and filename for a Unix socket. Make sure it is located in a
|
||||
directory with read and write permissions, and that it already exists (the directory
|
||||
will not be created). Defaults to `None`.
|
||||
* **Note**: The use of both `path` and `port` options for the same `listener` is not
|
||||
compatible.
|
||||
* The `x_forwarded` option defaults to true when using Unix sockets and can be omitted.
|
||||
* Other options that would not make sense to use with a UNIX socket, such as
|
||||
`bind_addresses` and `tls` will be ignored and can be removed.
|
||||
* `mode`: The file permissions to set on the UNIX socket. Defaults to `666`
|
||||
* **Note:** Must be set as `type: http` (does not support `metrics` and `manhole`).
|
||||
Also make sure that `metrics` is not included in `resources` -> `names`
|
||||
|
||||
|
||||
Valid resource names are:
|
||||
|
||||
* `client`: the client-server API (/_matrix/client), and the synapse admin API (/_synapse/admin). Also implies `media` and `static`.
|
||||
@@ -488,7 +474,7 @@ Valid resource names are:
|
||||
|
||||
* `media`: the media API (/_matrix/media).
|
||||
|
||||
* `metrics`: the metrics interface. See [here](../../metrics-howto.md). (Not compatible with Unix sockets)
|
||||
* `metrics`: the metrics interface. See [here](../../metrics-howto.md).
|
||||
|
||||
* `openid`: OpenID authentication. See [here](../../openid.md).
|
||||
|
||||
@@ -547,22 +533,6 @@ listeners:
|
||||
bind_addresses: ['::1', '127.0.0.1']
|
||||
type: manhole
|
||||
```
|
||||
Example configuration #3:
|
||||
```yaml
|
||||
listeners:
|
||||
# Unix socket listener: Ideal for Synapse deployments behind a reverse proxy, offering
|
||||
# lightweight interprocess communication without TCP/IP overhead, avoid port
|
||||
# conflicts, and providing enhanced security through system file permissions.
|
||||
#
|
||||
# Note that x_forwarded will default to true, when using a UNIX socket. Please see
|
||||
# https://matrix-org.github.io/synapse/latest/reverse_proxy.html.
|
||||
#
|
||||
- path: /var/run/synapse/main_public.sock
|
||||
type: http
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
```
|
||||
|
||||
---
|
||||
### `manhole_settings`
|
||||
|
||||
@@ -1226,32 +1196,6 @@ Example configuration:
|
||||
allow_device_name_lookup_over_federation: true
|
||||
```
|
||||
---
|
||||
### `federation`
|
||||
|
||||
The federation section defines some sub-options related to federation.
|
||||
|
||||
The following options are related to configuring timeout and retry logic for one request,
|
||||
independently of the others.
|
||||
Short retry algorithm is used when something or someone will wait for the request to have an
|
||||
answer, while long retry is used for requests that happen in the background,
|
||||
like sending a federation transaction.
|
||||
|
||||
* `client_timeout`: timeout for the federation requests. Default to 60s.
|
||||
* `max_short_retry_delay`: maximum delay to be used for the short retry algo. Default to 2s.
|
||||
* `max_long_retry_delay`: maximum delay to be used for the short retry algo. Default to 60s.
|
||||
* `max_short_retries`: maximum number of retries for the short retry algo. Default to 3 attempts.
|
||||
* `max_long_retries`: maximum number of retries for the long retry algo. Default to 10 attempts.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
federation:
|
||||
client_timeout: 180s
|
||||
max_short_retry_delay: 7s
|
||||
max_long_retry_delay: 100s
|
||||
max_short_retries: 5
|
||||
max_long_retries: 20
|
||||
```
|
||||
---
|
||||
## Caching
|
||||
|
||||
Options related to caching.
|
||||
@@ -2626,50 +2570,7 @@ Example configuration:
|
||||
```yaml
|
||||
nonrefreshable_access_token_lifetime: 24h
|
||||
```
|
||||
---
|
||||
### `ui_auth`
|
||||
|
||||
The amount of time to allow a user-interactive authentication session to be active.
|
||||
|
||||
This defaults to 0, meaning the user is queried for their credentials
|
||||
before every action, but this can be overridden to allow a single
|
||||
validation to be re-used. This weakens the protections afforded by
|
||||
the user-interactive authentication process, by allowing for multiple
|
||||
(and potentially different) operations to use the same validation session.
|
||||
|
||||
This is ignored for potentially "dangerous" operations (including
|
||||
deactivating an account, modifying an account password, adding a 3PID,
|
||||
and minting additional login tokens).
|
||||
|
||||
Use the `session_timeout` sub-option here to change the time allowed for credential validation.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
ui_auth:
|
||||
session_timeout: "15s"
|
||||
```
|
||||
---
|
||||
### `login_via_existing_session`
|
||||
|
||||
Matrix supports the ability of an existing session to mint a login token for
|
||||
another client.
|
||||
|
||||
Synapse disables this by default as it has security ramifications -- a malicious
|
||||
client could use the mechanism to spawn more than one session.
|
||||
|
||||
The duration of time the generated token is valid for can be configured with the
|
||||
`token_timeout` sub-option.
|
||||
|
||||
User-interactive authentication is required when this is enabled unless the
|
||||
`require_ui_auth` sub-option is set to `False`.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
login_via_existing_session:
|
||||
enabled: true
|
||||
require_ui_auth: false
|
||||
token_timeout: "5m"
|
||||
```
|
||||
---
|
||||
## Metrics
|
||||
Config options related to metrics.
|
||||
@@ -3514,6 +3415,28 @@ password_config:
|
||||
require_uppercase: true
|
||||
```
|
||||
---
|
||||
### `ui_auth`
|
||||
|
||||
The amount of time to allow a user-interactive authentication session to be active.
|
||||
|
||||
This defaults to 0, meaning the user is queried for their credentials
|
||||
before every action, but this can be overridden to allow a single
|
||||
validation to be re-used. This weakens the protections afforded by
|
||||
the user-interactive authentication process, by allowing for multiple
|
||||
(and potentially different) operations to use the same validation session.
|
||||
|
||||
This is ignored for potentially "dangerous" operations (including
|
||||
deactivating an account, modifying an account password, and
|
||||
adding a 3PID).
|
||||
|
||||
Use the `session_timeout` sub-option here to change the time allowed for credential validation.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
ui_auth:
|
||||
session_timeout: "15s"
|
||||
```
|
||||
---
|
||||
## Push
|
||||
Configuration settings related to push notifications
|
||||
|
||||
@@ -3960,14 +3883,13 @@ federation_sender_instances:
|
||||
---
|
||||
### `instance_map`
|
||||
|
||||
When using workers this should be a map from [`worker_name`](#worker_name) to the HTTP
|
||||
replication listener of the worker, if configured, and to the main process. Each worker
|
||||
declared under [`stream_writers`](../../workers.md#stream-writers) and
|
||||
[`outbound_federation_restricted_to`](#outbound_federation_restricted_to) needs a HTTP
|
||||
replication listener, and that listener should be included in the `instance_map`. The
|
||||
main process also needs an entry on the `instance_map`, and it should be listed under
|
||||
`main` **if even one other worker exists**. Ensure the port matches with what is
|
||||
declared inside the `listener` block for a `replication` listener.
|
||||
When using workers this should be a map from [`worker_name`](#worker_name) to the
|
||||
HTTP replication listener of the worker, if configured, and to the main process.
|
||||
Each worker declared under [`stream_writers`](../../workers.md#stream-writers) needs
|
||||
a HTTP replication listener, and that listener should be included in the `instance_map`.
|
||||
The main process also needs an entry on the `instance_map`, and it should be listed under
|
||||
`main` **if even one other worker exists**. Ensure the port matches with what is declared
|
||||
inside the `listener` block for a `replication` listener.
|
||||
|
||||
|
||||
Example configuration:
|
||||
@@ -3980,14 +3902,6 @@ instance_map:
|
||||
host: localhost
|
||||
port: 8034
|
||||
```
|
||||
Example configuration(#2, for UNIX sockets):
|
||||
```yaml
|
||||
instance_map:
|
||||
main:
|
||||
path: /var/run/synapse/main_replication.sock
|
||||
worker1:
|
||||
path: /var/run/synapse/worker1_replication.sock
|
||||
```
|
||||
---
|
||||
### `stream_writers`
|
||||
|
||||
@@ -4005,24 +3919,6 @@ stream_writers:
|
||||
typing: worker1
|
||||
```
|
||||
---
|
||||
### `outbound_federation_restricted_to`
|
||||
|
||||
When using workers, you can restrict outbound federation traffic to only go through a
|
||||
specific subset of workers. Any worker specified here must also be in the
|
||||
[`instance_map`](#instance_map).
|
||||
[`worker_replication_secret`](#worker_replication_secret) must also be configured to
|
||||
authorize inter-worker communication.
|
||||
|
||||
```yaml
|
||||
outbound_federation_restricted_to:
|
||||
- federation_sender1
|
||||
- federation_sender2
|
||||
```
|
||||
|
||||
Also see the [worker
|
||||
documentation](../../workers.md#restrict-outbound-federation-traffic-to-a-specific-set-of-workers)
|
||||
for more info.
|
||||
---
|
||||
### `run_background_tasks_on`
|
||||
|
||||
The [worker](../../workers.md#background-tasks) that is used to run
|
||||
@@ -4083,8 +3979,6 @@ This setting has the following sub-options:
|
||||
* `enabled`: whether to use Redis support. Defaults to false.
|
||||
* `host` and `port`: Optional host and port to use to connect to redis. Defaults to
|
||||
localhost and 6379
|
||||
* `path`: The full path to a local Unix socket file. **If this is used, `host` and
|
||||
`port` are ignored.** Defaults to `/tmp/redis.sock'
|
||||
* `password`: Optional password if configured on the Redis instance.
|
||||
* `dbid`: Optional redis dbid if needs to connect to specific redis logical db.
|
||||
* `use_tls`: Whether to use tls connection. Defaults to false.
|
||||
@@ -4097,8 +3991,6 @@ This setting has the following sub-options:
|
||||
|
||||
_Changed in Synapse 1.84.0: Added use\_tls, certificate\_file, private\_key\_file, ca\_file and ca\_path attributes_
|
||||
|
||||
_Changed in Synapse 1.85.0: Added path option to use a local Unix socket_
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
redis:
|
||||
@@ -4147,6 +4039,51 @@ Example configuration:
|
||||
worker_name: generic_worker1
|
||||
```
|
||||
---
|
||||
### `worker_replication_host`
|
||||
*Deprecated as of version 1.84.0. Place `host` under `main` entry on the [`instance_map`](#instance_map) in your shared yaml configuration instead.*
|
||||
|
||||
The HTTP replication endpoint that it should talk to on the main Synapse process.
|
||||
The main Synapse process defines this with a `replication` resource in
|
||||
[`listeners` option](#listeners).
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
worker_replication_host: 127.0.0.1
|
||||
```
|
||||
---
|
||||
### `worker_replication_http_port`
|
||||
*Deprecated as of version 1.84.0. Place `port` under `main` entry on the [`instance_map`](#instance_map) in your shared yaml configuration instead.*
|
||||
|
||||
The HTTP replication port that it should talk to on the main Synapse process.
|
||||
The main Synapse process defines this with a `replication` resource in
|
||||
[`listeners` option](#listeners).
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
worker_replication_http_port: 9093
|
||||
```
|
||||
---
|
||||
### `worker_replication_http_tls`
|
||||
*Deprecated as of version 1.84.0. Place `tls` under `main` entry on the [`instance_map`](#instance_map) in your shared yaml configuration instead.*
|
||||
|
||||
Whether TLS should be used for talking to the HTTP replication port on the main
|
||||
Synapse process.
|
||||
The main Synapse process defines this with the `tls` option on its [listener](#listeners) that
|
||||
has the `replication` resource enabled.
|
||||
|
||||
**Please note:** by default, it is not safe to expose replication ports to the
|
||||
public Internet, even with TLS enabled.
|
||||
See [`worker_replication_secret`](#worker_replication_secret).
|
||||
|
||||
Defaults to `false`.
|
||||
|
||||
*Added in Synapse 1.72.0.*
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
worker_replication_http_tls: true
|
||||
```
|
||||
---
|
||||
### `worker_listeners`
|
||||
|
||||
A worker can handle HTTP requests. To do so, a `worker_listeners` option
|
||||
@@ -4165,18 +4102,6 @@ worker_listeners:
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
```
|
||||
Example configuration(#2, using UNIX sockets with a `replication` listener):
|
||||
```yaml
|
||||
worker_listeners:
|
||||
- type: http
|
||||
path: /var/run/synapse/worker_public.sock
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
- type: http
|
||||
path: /var/run/synapse/worker_replication.sock
|
||||
resources:
|
||||
- names: [replication]
|
||||
```
|
||||
---
|
||||
### `worker_manhole`
|
||||
|
||||
|
||||
@@ -24,11 +24,6 @@ Finally, we also stylise the chapter titles in the left sidebar by indenting the
|
||||
slightly so that they are more visually distinguishable from the section headers
|
||||
(the bold titles). This is done through the `indent-section-headers.css` file.
|
||||
|
||||
In addition to these modifications, we have added a version picker to the documentation.
|
||||
Users can switch between documentations for different versions of Synapse.
|
||||
This functionality was implemented through the `version-picker.js` and
|
||||
`version-picker.css` files.
|
||||
|
||||
More information can be found in mdbook's official documentation for
|
||||
[injecting page JS/CSS](https://rust-lang.github.io/mdBook/format/config.html)
|
||||
and
|
||||
|
||||
@@ -131,18 +131,6 @@
|
||||
<i class="fa fa-search"></i>
|
||||
</button>
|
||||
{{/if}}
|
||||
<div class="version-picker">
|
||||
<div class="dropdown">
|
||||
<div class="select">
|
||||
<span></span>
|
||||
<i class="fa fa-chevron-down"></i>
|
||||
</div>
|
||||
<input type="hidden" name="version">
|
||||
<ul class="dropdown-menu">
|
||||
<!-- Versions will be added dynamically in version-picker.js -->
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h1 class="menu-title">{{ book_title }}</h1>
|
||||
@@ -321,4 +309,4 @@
|
||||
{{/if}}
|
||||
|
||||
</body>
|
||||
</html>
|
||||
</html>
|
||||
@@ -1,78 +0,0 @@
|
||||
.version-picker {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.version-picker .dropdown {
|
||||
width: 130px;
|
||||
max-height: 29px;
|
||||
margin-left: 10px;
|
||||
display: inline-block;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
position: relative;
|
||||
font-size: 13px;
|
||||
color: var(--fg);
|
||||
height: 100%;
|
||||
text-align: left;
|
||||
}
|
||||
.version-picker .dropdown .select {
|
||||
cursor: pointer;
|
||||
display: block;
|
||||
padding: 5px 2px 5px 15px;
|
||||
}
|
||||
.version-picker .dropdown .select > i {
|
||||
font-size: 10px;
|
||||
color: var(--fg);
|
||||
cursor: pointer;
|
||||
float: right;
|
||||
line-height: 20px !important;
|
||||
}
|
||||
.version-picker .dropdown:hover {
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
}
|
||||
.version-picker .dropdown:active {
|
||||
background-color: var(--theme-popup-bg);
|
||||
}
|
||||
.version-picker .dropdown.active:hover,
|
||||
.version-picker .dropdown.active {
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
border-radius: 2px 2px 0 0;
|
||||
background-color: var(--theme-popup-bg);
|
||||
}
|
||||
.version-picker .dropdown.active .select > i {
|
||||
transform: rotate(-180deg);
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu {
|
||||
position: absolute;
|
||||
background-color: var(--theme-popup-bg);
|
||||
width: 100%;
|
||||
left: -1px;
|
||||
right: 1px;
|
||||
margin-top: 1px;
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
border-radius: 0 0 4px 4px;
|
||||
overflow: hidden;
|
||||
display: none;
|
||||
max-height: 300px;
|
||||
overflow-y: auto;
|
||||
z-index: 9;
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu li {
|
||||
font-size: 12px;
|
||||
padding: 6px 20px;
|
||||
cursor: pointer;
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu {
|
||||
padding: 0;
|
||||
list-style: none;
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu li:hover {
|
||||
background-color: var(--theme-hover);
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu li.active::before {
|
||||
display: inline-block;
|
||||
content: "✓";
|
||||
margin-inline-start: -14px;
|
||||
width: 14px;
|
||||
}
|
||||
@@ -1,127 +0,0 @@
|
||||
|
||||
const dropdown = document.querySelector('.version-picker .dropdown');
|
||||
const dropdownMenu = dropdown.querySelector('.dropdown-menu');
|
||||
|
||||
fetchVersions(dropdown, dropdownMenu).then(() => {
|
||||
initializeVersionDropdown(dropdown, dropdownMenu);
|
||||
});
|
||||
|
||||
/**
|
||||
* Initialize the dropdown functionality for version selection.
|
||||
*
|
||||
* @param {Element} dropdown - The dropdown element.
|
||||
* @param {Element} dropdownMenu - The dropdown menu element.
|
||||
*/
|
||||
function initializeVersionDropdown(dropdown, dropdownMenu) {
|
||||
// Toggle the dropdown menu on click
|
||||
dropdown.addEventListener('click', function () {
|
||||
this.setAttribute('tabindex', 1);
|
||||
this.classList.toggle('active');
|
||||
dropdownMenu.style.display = (dropdownMenu.style.display === 'block') ? 'none' : 'block';
|
||||
});
|
||||
|
||||
// Remove the 'active' class and hide the dropdown menu on focusout
|
||||
dropdown.addEventListener('focusout', function () {
|
||||
this.classList.remove('active');
|
||||
dropdownMenu.style.display = 'none';
|
||||
});
|
||||
|
||||
// Handle item selection within the dropdown menu
|
||||
const dropdownMenuItems = dropdownMenu.querySelectorAll('li');
|
||||
dropdownMenuItems.forEach(function (item) {
|
||||
item.addEventListener('click', function () {
|
||||
dropdownMenuItems.forEach(function (item) {
|
||||
item.classList.remove('active');
|
||||
});
|
||||
this.classList.add('active');
|
||||
dropdown.querySelector('span').textContent = this.textContent;
|
||||
dropdown.querySelector('input').value = this.getAttribute('id');
|
||||
|
||||
window.location.href = changeVersion(window.location.href, this.textContent);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* This function fetches the available versions from a GitHub repository
|
||||
* and inserts them into the version picker.
|
||||
*
|
||||
* @param {Element} dropdown - The dropdown element.
|
||||
* @param {Element} dropdownMenu - The dropdown menu element.
|
||||
* @returns {Promise<Array<string>>} A promise that resolves with an array of available versions.
|
||||
*/
|
||||
function fetchVersions(dropdown, dropdownMenu) {
|
||||
return new Promise((resolve, reject) => {
|
||||
window.addEventListener("load", () => {
|
||||
|
||||
fetch("https://api.github.com/repos/matrix-org/synapse/git/trees/gh-pages", {
|
||||
cache: "force-cache",
|
||||
}).then(res =>
|
||||
res.json()
|
||||
).then(resObject => {
|
||||
const excluded = ['dev-docs', 'v1.91.0', 'v1.80.0', 'v1.69.0'];
|
||||
const tree = resObject.tree.filter(item => item.type === "tree" && !excluded.includes(item.path));
|
||||
const versions = tree.map(item => item.path).sort(sortVersions);
|
||||
|
||||
// Create a list of <li> items for versions
|
||||
versions.forEach((version) => {
|
||||
const li = document.createElement("li");
|
||||
li.textContent = version;
|
||||
li.id = version;
|
||||
|
||||
if (window.SYNAPSE_VERSION === version) {
|
||||
li.classList.add('active');
|
||||
dropdown.querySelector('span').textContent = version;
|
||||
dropdown.querySelector('input').value = version;
|
||||
}
|
||||
|
||||
dropdownMenu.appendChild(li);
|
||||
});
|
||||
|
||||
resolve(versions);
|
||||
|
||||
}).catch(ex => {
|
||||
console.error("Failed to fetch version data", ex);
|
||||
reject(ex);
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom sorting function to sort an array of version strings.
|
||||
*
|
||||
* @param {string} a - The first version string to compare.
|
||||
* @param {string} b - The second version string to compare.
|
||||
* @returns {number} - A negative number if a should come before b, a positive number if b should come before a, or 0 if they are equal.
|
||||
*/
|
||||
function sortVersions(a, b) {
|
||||
// Put 'develop' and 'latest' at the top
|
||||
if (a === 'develop' || a === 'latest') return -1;
|
||||
if (b === 'develop' || b === 'latest') return 1;
|
||||
|
||||
const versionA = (a.match(/v\d+(\.\d+)+/) || [])[0];
|
||||
const versionB = (b.match(/v\d+(\.\d+)+/) || [])[0];
|
||||
|
||||
return versionB.localeCompare(versionA);
|
||||
}
|
||||
|
||||
/**
|
||||
* Change the version in a URL path.
|
||||
*
|
||||
* @param {string} url - The original URL to be modified.
|
||||
* @param {string} newVersion - The new version to replace the existing version in the URL.
|
||||
* @returns {string} The updated URL with the new version.
|
||||
*/
|
||||
function changeVersion(url, newVersion) {
|
||||
const parsedURL = new URL(url);
|
||||
const pathSegments = parsedURL.pathname.split('/');
|
||||
|
||||
// Modify the version
|
||||
pathSegments[2] = newVersion;
|
||||
|
||||
// Reconstruct the URL
|
||||
parsedURL.pathname = pathSegments.join('/');
|
||||
|
||||
return parsedURL.href;
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
window.SYNAPSE_VERSION = 'v1.89';
|
||||
@@ -95,12 +95,9 @@ for the main process
|
||||
* Secondly, you need to enable
|
||||
[redis-based replication](usage/configuration/config_documentation.md#redis)
|
||||
* You will need to add an [`instance_map`](usage/configuration/config_documentation.md#instance_map)
|
||||
with the `main` process defined, as well as the relevant connection information from
|
||||
it's HTTP `replication` listener (defined in step 1 above).
|
||||
* Note that the `host` defined is the address the worker needs to look for the `main`
|
||||
process at, not necessarily the same address that is bound to.
|
||||
* If you are using Unix sockets for the `replication` resource, make sure to
|
||||
use a `path` to the socket file instead of a `port`.
|
||||
with the `main` process defined, as well as the relevant connection information from
|
||||
it's HTTP `replication` listener (defined in step 1 above). Note that the `host` defined
|
||||
is the address the worker needs to look for the `main` process at, not necessarily the same address that is bound to.
|
||||
* Optionally, a [shared secret](usage/configuration/config_documentation.md#worker_replication_secret)
|
||||
can be used to authenticate HTTP traffic between workers. For example:
|
||||
|
||||
@@ -148,6 +145,9 @@ In the config file for each worker, you must specify:
|
||||
with an `http` listener.
|
||||
* **Synapse 1.72 and older:** if handling the `^/_matrix/client/v3/keys/upload` endpoint, the HTTP URI for
|
||||
the main process (`worker_main_http_uri`). This config option is no longer required and is ignored when running Synapse 1.73 and newer.
|
||||
* **Synapse 1.83 and older:** The HTTP replication endpoint that the worker should talk to on the main synapse process
|
||||
([`worker_replication_host`](usage/configuration/config_documentation.md#worker_replication_host) and
|
||||
[`worker_replication_http_port`](usage/configuration/config_documentation.md#worker_replication_http_port)). If using Synapse 1.84 and newer, these are not needed if `main` is defined on the [shared configuration](#shared-configuration) `instance_map`
|
||||
|
||||
For example:
|
||||
|
||||
@@ -177,11 +177,11 @@ The following applies to Synapse installations that have been installed from sou
|
||||
|
||||
You can start the main Synapse process with Poetry by running the following command:
|
||||
```console
|
||||
poetry run synapse_homeserver --config-file [your homeserver.yaml]
|
||||
poetry run synapse_homeserver -c [your homeserver.yaml]
|
||||
```
|
||||
For worker setups, you can run the following command
|
||||
```console
|
||||
poetry run synapse_worker --config-file [your homeserver.yaml] --config-file [your worker.yaml]
|
||||
poetry run synapse_worker -c [your worker.yaml]
|
||||
```
|
||||
## Available worker applications
|
||||
|
||||
@@ -232,6 +232,7 @@ information.
|
||||
^/_matrix/client/v1/rooms/.*/hierarchy$
|
||||
^/_matrix/client/(v1|unstable)/rooms/.*/relations/
|
||||
^/_matrix/client/v1/rooms/.*/threads$
|
||||
^/_matrix/client/unstable/org.matrix.msc2716/rooms/.*/batch_send$
|
||||
^/_matrix/client/unstable/im.nheko.summary/rooms/.*/summary$
|
||||
^/_matrix/client/(r0|v3|unstable)/account/3pid$
|
||||
^/_matrix/client/(r0|v3|unstable)/account/whoami$
|
||||
@@ -531,30 +532,6 @@ the stream writer for the `presence` stream:
|
||||
|
||||
^/_matrix/client/(api/v1|r0|v3|unstable)/presence/
|
||||
|
||||
#### Restrict outbound federation traffic to a specific set of workers
|
||||
|
||||
The
|
||||
[`outbound_federation_restricted_to`](usage/configuration/config_documentation.md#outbound_federation_restricted_to)
|
||||
configuration is useful to make sure outbound federation traffic only goes through a
|
||||
specified subset of workers. This allows you to set more strict access controls (like a
|
||||
firewall) for all workers and only allow the `federation_sender`'s to contact the
|
||||
outside world.
|
||||
|
||||
```yaml
|
||||
instance_map:
|
||||
main:
|
||||
host: localhost
|
||||
port: 8030
|
||||
federation_sender1:
|
||||
host: localhost
|
||||
port: 8034
|
||||
|
||||
outbound_federation_restricted_to:
|
||||
- federation_sender1
|
||||
|
||||
worker_replication_secret: "secret_secret"
|
||||
```
|
||||
|
||||
#### Background tasks
|
||||
|
||||
There is also support for moving background tasks to a separate
|
||||
|
||||
96
flake.lock
generated
96
flake.lock
generated
@@ -22,6 +22,27 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"fenix": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
],
|
||||
"rust-analyzer-src": "rust-analyzer-src"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1682490133,
|
||||
"narHash": "sha256-tR2Qx0uuk97WySpSSk4rGS/oH7xb5LykbjATcw1vw1I=",
|
||||
"owner": "nix-community",
|
||||
"repo": "fenix",
|
||||
"rev": "4e9412753ab75ef0e038a5fe54a062fb44c27c6a",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"repo": "fenix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-compat": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
@@ -53,24 +74,6 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils_2": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1681202837,
|
||||
"narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "cfacdce06f30d2b68473a46042957675eebb3401",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"gitignore": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
@@ -197,22 +200,6 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs_3": {
|
||||
"locked": {
|
||||
"lastModified": 1681358109,
|
||||
"narHash": "sha256-eKyxW4OohHQx9Urxi7TQlFBTDWII+F+x2hklDOQPB50=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "96ba1c52e54e74c3197f4d43026b3f3d92e83ff9",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixpkgs-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"pre-commit-hooks": {
|
||||
"inputs": {
|
||||
"flake-compat": [
|
||||
@@ -244,27 +231,25 @@
|
||||
"root": {
|
||||
"inputs": {
|
||||
"devenv": "devenv",
|
||||
"fenix": "fenix",
|
||||
"nixpkgs": "nixpkgs_2",
|
||||
"rust-overlay": "rust-overlay",
|
||||
"systems": "systems_2"
|
||||
"systems": "systems"
|
||||
}
|
||||
},
|
||||
"rust-overlay": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils_2",
|
||||
"nixpkgs": "nixpkgs_3"
|
||||
},
|
||||
"rust-analyzer-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1689302058,
|
||||
"narHash": "sha256-yD74lcHTrw4niXcE9goJLbzsgyce48rQQoy5jK5ZK40=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "7b8dbbf4c67ed05a9bf3d9e658c12d4108bc24c8",
|
||||
"lastModified": 1682426789,
|
||||
"narHash": "sha256-UqnLmJESRZE0tTEaGbRAw05Hm19TWIPA+R3meqi5I4w=",
|
||||
"owner": "rust-lang",
|
||||
"repo": "rust-analyzer",
|
||||
"rev": "943d2a8a1ca15e8b28a1f51f5a5c135e3728da04",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"owner": "rust-lang",
|
||||
"ref": "nightly",
|
||||
"repo": "rust-analyzer",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
@@ -282,21 +267,6 @@
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"systems_2": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
|
||||
39
flake.nix
39
flake.nix
@@ -46,20 +46,20 @@
|
||||
systems.url = "github:nix-systems/default";
|
||||
# A development environment manager built on Nix. See https://devenv.sh.
|
||||
devenv.url = "github:cachix/devenv/main";
|
||||
# Rust toolchain.
|
||||
rust-overlay.url = "github:oxalica/rust-overlay";
|
||||
# Rust toolchains and rust-analyzer nightly.
|
||||
fenix = {
|
||||
url = "github:nix-community/fenix";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, devenv, systems, rust-overlay, ... } @ inputs:
|
||||
outputs = { self, nixpkgs, devenv, systems, ... } @ inputs:
|
||||
let
|
||||
forEachSystem = nixpkgs.lib.genAttrs (import systems);
|
||||
in {
|
||||
devShells = forEachSystem (system:
|
||||
let
|
||||
overlays = [ (import rust-overlay) ];
|
||||
pkgs = import nixpkgs {
|
||||
inherit system overlays;
|
||||
};
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
in {
|
||||
# Everything is configured via devenv - a Nix module for creating declarative
|
||||
# developer environments. See https://devenv.sh/reference/options/ for a list
|
||||
@@ -76,20 +76,6 @@
|
||||
# Configure packages to install.
|
||||
# Search for package names at https://search.nixos.org/packages?channel=unstable
|
||||
packages = with pkgs; [
|
||||
# The rust toolchain and related tools.
|
||||
# This will install the "default" profile of rust components.
|
||||
# https://rust-lang.github.io/rustup/concepts/profiles.html
|
||||
#
|
||||
# NOTE: We currently need to set the Rust version unnecessarily high
|
||||
# in order to work around https://github.com/matrix-org/synapse/issues/15939
|
||||
(rust-bin.stable."1.70.0".default.override {
|
||||
# Additionally install the "rust-src" extension to allow diving into the
|
||||
# Rust source code in an IDE (rust-analyzer will also make use of it).
|
||||
extensions = [ "rust-src" ];
|
||||
})
|
||||
# The rust-analyzer language server implementation.
|
||||
rust-analyzer
|
||||
|
||||
# Native dependencies for running Synapse.
|
||||
icu
|
||||
libffi
|
||||
@@ -109,10 +95,6 @@
|
||||
|
||||
# For building the Synapse documentation website.
|
||||
mdbook
|
||||
|
||||
# For releasing Synapse
|
||||
debian-devscripts # (`dch` for manipulating the Debian changelog)
|
||||
libnotify # (the release script uses `notify-send` to tell you when CI jobs are done)
|
||||
];
|
||||
|
||||
# Install Python and manage a virtualenv with Poetry.
|
||||
@@ -138,11 +120,12 @@
|
||||
# Install dependencies for the additional programming languages
|
||||
# involved with Synapse development.
|
||||
#
|
||||
# * Rust is used for developing and running Synapse.
|
||||
# * Golang is needed to run the Complement test suite.
|
||||
# * Perl is needed to run the SyTest test suite.
|
||||
# * Rust is used for developing and running Synapse.
|
||||
# It is installed manually with `packages` above.
|
||||
languages.go.enable = true;
|
||||
languages.rust.enable = true;
|
||||
languages.rust.version = "stable";
|
||||
languages.perl.enable = true;
|
||||
|
||||
# Postgres is needed to run Synapse with postgres support and
|
||||
@@ -191,7 +174,7 @@
|
||||
EOF
|
||||
'';
|
||||
# Start synapse when `devenv up` is run.
|
||||
processes.synapse.exec = "poetry run python -m synapse.app.homeserver -c homeserver.yaml -c homeserver-config-overrides.d";
|
||||
processes.synapse.exec = "poetry run python -m synapse.app.homeserver -c homeserver.yaml --config-directory homeserver-config-overrides.d";
|
||||
|
||||
# Define the perl modules we require to run SyTest.
|
||||
#
|
||||
|
||||
29
mypy.ini
29
mypy.ini
@@ -2,29 +2,17 @@
|
||||
namespace_packages = True
|
||||
plugins = pydantic.mypy, mypy_zope:plugin, scripts-dev/mypy_synapse_plugin.py
|
||||
follow_imports = normal
|
||||
check_untyped_defs = True
|
||||
show_error_codes = True
|
||||
show_traceback = True
|
||||
mypy_path = stubs
|
||||
warn_unreachable = True
|
||||
warn_unused_ignores = True
|
||||
local_partial_types = True
|
||||
no_implicit_optional = True
|
||||
|
||||
# Strict checks, see mypy --help
|
||||
warn_unused_configs = True
|
||||
# disallow_any_generics = True
|
||||
disallow_subclassing_any = True
|
||||
# disallow_untyped_calls = True
|
||||
disallow_untyped_defs = True
|
||||
disallow_incomplete_defs = True
|
||||
# check_untyped_defs = True
|
||||
# disallow_untyped_decorators = True
|
||||
warn_redundant_casts = True
|
||||
warn_unused_ignores = True
|
||||
# warn_return_any = True
|
||||
# no_implicit_reexport = True
|
||||
strict_equality = True
|
||||
strict_concatenate = True
|
||||
|
||||
warn_redundant_casts = True
|
||||
# Run mypy type checking with the minimum supported Python version to catch new usage
|
||||
# that isn't backwards-compatible (types, overloads, etc).
|
||||
python_version = 3.8
|
||||
@@ -43,7 +31,9 @@ warn_unused_ignores = False
|
||||
|
||||
[mypy-synapse.util.caches.treecache]
|
||||
disallow_untyped_defs = False
|
||||
disallow_incomplete_defs = False
|
||||
|
||||
[mypy-tests.util.caches.test_descriptors]
|
||||
disallow_untyped_defs = False
|
||||
|
||||
;; Dependencies without annotations
|
||||
;; Before ignoring a module, check to see if type stubs are available.
|
||||
@@ -53,18 +43,18 @@ disallow_incomplete_defs = False
|
||||
;; which we can pull in as a dev dependency by adding to `pyproject.toml`'s
|
||||
;; `[tool.poetry.dev-dependencies]` list.
|
||||
|
||||
# https://github.com/lepture/authlib/issues/460
|
||||
[mypy-authlib.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-ijson.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
# https://github.com/msgpack/msgpack-python/issues/448
|
||||
[mypy-lxml]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-msgpack]
|
||||
ignore_missing_imports = True
|
||||
|
||||
# https://github.com/wolever/parameterized/issues/143
|
||||
[mypy-parameterized.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
@@ -86,7 +76,6 @@ ignore_missing_imports = True
|
||||
[mypy-srvlookup.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
# https://github.com/twisted/treq/pull/366
|
||||
[mypy-treq.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
|
||||
1202
poetry.lock
generated
1202
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.89.0"
|
||||
version = "1.84.0rc1"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "Apache-2.0"
|
||||
@@ -147,7 +147,7 @@ synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main"
|
||||
update_synapse_database = "synapse._scripts.update_synapse_database:main"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.8.0"
|
||||
python = "^3.7.1"
|
||||
|
||||
# Mandatory Dependencies
|
||||
# ----------------------
|
||||
@@ -203,9 +203,11 @@ ijson = ">=3.1.4"
|
||||
matrix-common = "^1.3.0"
|
||||
# We need packaging.requirements.Requirement, added in 16.1.
|
||||
packaging = ">=16.1"
|
||||
# At the time of writing, we only use functions from the version `importlib.metadata`
|
||||
# which shipped in Python 3.8. This corresponds to version 1.4 of the backport.
|
||||
importlib_metadata = { version = ">=1.4", python = "<3.8" }
|
||||
# This is the most recent version of Pydantic with available on common distros.
|
||||
# We are currently incompatible with >=2.0.0: (https://github.com/matrix-org/synapse/issues/15858)
|
||||
pydantic = "^1.7.4"
|
||||
pydantic = ">=1.7.4"
|
||||
|
||||
# This is for building the rust components during "poetry install", which
|
||||
# currently ignores the `build-system.requires` directive (c.f.
|
||||
@@ -309,10 +311,9 @@ all = [
|
||||
# We pin black so that our tests don't start failing on new releases.
|
||||
isort = ">=5.10.1"
|
||||
black = ">=22.3.0"
|
||||
ruff = "0.0.277"
|
||||
ruff = "0.0.265"
|
||||
|
||||
# Typechecking
|
||||
lxml-stubs = ">=0.4.0"
|
||||
mypy = "*"
|
||||
mypy-zope = "*"
|
||||
types-bleach = ">=4.1.0"
|
||||
@@ -373,15 +374,7 @@ build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.cibuildwheel]
|
||||
# Skip unsupported platforms (by us or by Rust).
|
||||
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets.
|
||||
# We skip:
|
||||
# - CPython 3.6 and 3.7: EOLed
|
||||
# - PyPy 3.7: we only support Python 3.8+
|
||||
# - musllinux i686: excluded to reduce number of wheels we build.
|
||||
# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677
|
||||
# - PyPy on Aarch64 and musllinux on aarch64: too slow to build.
|
||||
# c.f. https://github.com/matrix-org/synapse/pull/14259
|
||||
skip = "cp36* cp37* pp37* *-musllinux_i686 pp*aarch64 *-musllinux_aarch64"
|
||||
skip = "cp36* *-musllinux_i686 pp*aarch64 *-musllinux_aarch64"
|
||||
|
||||
# We need a rust compiler
|
||||
before-all = "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable -y --profile minimal"
|
||||
|
||||
@@ -7,7 +7,7 @@ name = "synapse"
|
||||
version = "0.1.0"
|
||||
|
||||
edition = "2021"
|
||||
rust-version = "1.60.0"
|
||||
rust-version = "1.58.1"
|
||||
|
||||
[lib]
|
||||
name = "synapse"
|
||||
|
||||
@@ -13,6 +13,8 @@
|
||||
// limitations under the License.
|
||||
|
||||
#![feature(test)]
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use synapse::push::{
|
||||
evaluator::PushRuleEvaluator, Condition, EventMatchCondition, FilteredPushRules, JsonValue,
|
||||
PushRules, SimpleJsonValue,
|
||||
@@ -195,6 +197,7 @@ fn bench_eval_message(b: &mut Bencher) {
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
);
|
||||
|
||||
b.iter(|| eval.run(&rules, Some("bob"), Some("person")));
|
||||
|
||||
@@ -142,11 +142,11 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
default_enabled: true,
|
||||
},
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/override/.m.rule.is_user_mention"),
|
||||
rule_id: Cow::Borrowed(".org.matrix.msc3952.is_user_mention"),
|
||||
priority_class: 5,
|
||||
conditions: Cow::Borrowed(&[Condition::Known(
|
||||
KnownCondition::ExactEventPropertyContainsType(EventPropertyIsTypeCondition {
|
||||
key: Cow::Borrowed("content.m\\.mentions.user_ids"),
|
||||
key: Cow::Borrowed("content.org\\.matrix\\.msc3952\\.mentions.user_ids"),
|
||||
value_type: Cow::Borrowed(&EventMatchPatternType::UserId),
|
||||
}),
|
||||
)]),
|
||||
@@ -163,11 +163,11 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
|
||||
default_enabled: true,
|
||||
},
|
||||
PushRule {
|
||||
rule_id: Cow::Borrowed("global/override/.m.rule.is_room_mention"),
|
||||
rule_id: Cow::Borrowed(".org.matrix.msc3952.is_room_mention"),
|
||||
priority_class: 5,
|
||||
conditions: Cow::Borrowed(&[
|
||||
Condition::Known(KnownCondition::EventPropertyIs(EventPropertyIsCondition {
|
||||
key: Cow::Borrowed("content.m\\.mentions.room"),
|
||||
key: Cow::Borrowed("content.org\\.matrix\\.msc3952\\.mentions.room"),
|
||||
value: Cow::Borrowed(&SimpleJsonValue::Bool(true)),
|
||||
})),
|
||||
Condition::Known(KnownCondition::SenderNotificationPermission {
|
||||
|
||||
@@ -70,9 +70,7 @@ pub struct PushRuleEvaluator {
|
||||
/// The "content.body", if any.
|
||||
body: String,
|
||||
|
||||
/// True if the event has a m.mentions property. (Note that this is a separate
|
||||
/// flag instead of checking flattened_keys since the m.mentions property
|
||||
/// might be an empty map and not appear in flattened_keys.
|
||||
/// True if the event has a mentions property and MSC3952 support is enabled.
|
||||
has_mentions: bool,
|
||||
|
||||
/// The number of users in the room.
|
||||
@@ -157,7 +155,9 @@ impl PushRuleEvaluator {
|
||||
let rule_id = &push_rule.rule_id().to_string();
|
||||
|
||||
// For backwards-compatibility the legacy mention rules are disabled
|
||||
// if the event contains the 'm.mentions' property.
|
||||
// if the event contains the 'm.mentions' property (and if the
|
||||
// experimental feature is enabled, both of these are represented
|
||||
// by the has_mentions flag).
|
||||
if self.has_mentions
|
||||
&& (rule_id == "global/override/.m.rule.contains_display_name"
|
||||
|| rule_id == "global/content/.m.rule.contains_user_name"
|
||||
@@ -562,7 +562,7 @@ fn test_requires_room_version_supports_condition() {
|
||||
};
|
||||
let rules = PushRules::new(vec![custom_rule]);
|
||||
result = evaluator.run(
|
||||
&FilteredPushRules::py_new(rules, BTreeMap::new(), true, false, true, false),
|
||||
&FilteredPushRules::py_new(rules, BTreeMap::new(), true, false, true, false, false),
|
||||
None,
|
||||
None,
|
||||
);
|
||||
|
||||
@@ -527,6 +527,7 @@ pub struct FilteredPushRules {
|
||||
msc1767_enabled: bool,
|
||||
msc3381_polls_enabled: bool,
|
||||
msc3664_enabled: bool,
|
||||
msc3952_intentional_mentions: bool,
|
||||
msc3958_suppress_edits_enabled: bool,
|
||||
}
|
||||
|
||||
@@ -539,6 +540,7 @@ impl FilteredPushRules {
|
||||
msc1767_enabled: bool,
|
||||
msc3381_polls_enabled: bool,
|
||||
msc3664_enabled: bool,
|
||||
msc3952_intentional_mentions: bool,
|
||||
msc3958_suppress_edits_enabled: bool,
|
||||
) -> Self {
|
||||
Self {
|
||||
@@ -547,6 +549,7 @@ impl FilteredPushRules {
|
||||
msc1767_enabled,
|
||||
msc3381_polls_enabled,
|
||||
msc3664_enabled,
|
||||
msc3952_intentional_mentions,
|
||||
msc3958_suppress_edits_enabled,
|
||||
}
|
||||
}
|
||||
@@ -584,6 +587,10 @@ impl FilteredPushRules {
|
||||
return false;
|
||||
}
|
||||
|
||||
if !self.msc3952_intentional_mentions && rule.rule_id.contains("org.matrix.msc3952")
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if !self.msc3958_suppress_edits_enabled
|
||||
&& rule.rule_id == "global/override/.com.beeper.suppress_edits"
|
||||
{
|
||||
|
||||
@@ -20,21 +20,15 @@ from concurrent.futures import ThreadPoolExecutor
|
||||
from types import FrameType
|
||||
from typing import Collection, Optional, Sequence, Set
|
||||
|
||||
# These are expanded inside the dockerfile to be a fully qualified image name.
|
||||
# e.g. docker.io/library/debian:bullseye
|
||||
#
|
||||
# If an EOL is forced by a Python version and we're dropping support for it, make sure
|
||||
# to remove references to the distibution across Synapse (search for "bullseye" for
|
||||
# example)
|
||||
DISTS = (
|
||||
"debian:bullseye", # (EOL ~2024-07) (our EOL forced by Python 3.9 is 2025-10-05)
|
||||
"debian:bookworm", # (EOL not specified yet) (our EOL forced by Python 3.11 is 2027-10-24)
|
||||
"debian:sid", # (EOL not specified yet) (our EOL forced by Python 3.11 is 2027-10-24)
|
||||
"ubuntu:focal", # 20.04 LTS (EOL 2025-04) (our EOL forced by Python 3.8 is 2024-10-14)
|
||||
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04) (our EOL forced by Python 3.10 is 2026-10-04)
|
||||
"ubuntu:kinetic", # 22.10 (EOL 2023-07-20) (our EOL forced by Python 3.10 is 2026-10-04)
|
||||
"ubuntu:lunar", # 23.04 (EOL 2024-01) (our EOL forced by Python 3.11 is 2027-10-24)
|
||||
"debian:trixie", # (EOL not specified yet)
|
||||
"debian:buster", # oldstable: EOL 2022-08
|
||||
"debian:bullseye",
|
||||
"debian:bookworm",
|
||||
"debian:sid",
|
||||
"ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14)
|
||||
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04)
|
||||
"ubuntu:kinetic", # 22.10 (EOL 2023-07-20)
|
||||
"ubuntu:lunar", # 23.04 (EOL 2024-01)
|
||||
)
|
||||
|
||||
DESC = """\
|
||||
|
||||
@@ -214,7 +214,7 @@ fi
|
||||
|
||||
extra_test_args=()
|
||||
|
||||
test_tags="synapse_blacklist,msc3874,msc3890,msc3391,msc3930,faster_joins"
|
||||
test_tags="synapse_blacklist,msc3787,msc3874,msc3890,msc3391,msc3930,faster_joins"
|
||||
|
||||
# All environment variables starting with PASS_ will be shared.
|
||||
# (The prefix is stripped off before reaching the container.)
|
||||
@@ -246,6 +246,10 @@ else
|
||||
else
|
||||
export PASS_SYNAPSE_COMPLEMENT_DATABASE=sqlite
|
||||
fi
|
||||
|
||||
# The tests for importing historical messages (MSC2716)
|
||||
# only pass with monoliths, currently.
|
||||
test_tags="$test_tags,msc2716"
|
||||
fi
|
||||
|
||||
if [[ -n "$ASYNCIO_REACTOR" ]]; then
|
||||
@@ -253,10 +257,6 @@ if [[ -n "$ASYNCIO_REACTOR" ]]; then
|
||||
export PASS_SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=true
|
||||
fi
|
||||
|
||||
if [[ -n "$UNIX_SOCKETS" ]]; then
|
||||
# Enable full on Unix socket mode for Synapse, Redis and Postgresql
|
||||
export PASS_SYNAPSE_USE_UNIX_SOCKET=1
|
||||
fi
|
||||
|
||||
if [[ -n "$SYNAPSE_TEST_LOG_LEVEL" ]]; then
|
||||
# Set the log level to what is desired
|
||||
@@ -269,10 +269,6 @@ if [[ -n "$SYNAPSE_TEST_LOG_LEVEL" ]]; then
|
||||
export PASS_SYNAPSE_LOG_SENSITIVE=1
|
||||
fi
|
||||
|
||||
# Log a few more useful things for a developer attempting to debug something
|
||||
# particularly tricky.
|
||||
export PASS_SYNAPSE_LOG_TESTING=1
|
||||
|
||||
# Run the tests!
|
||||
echo "Images built; running complement"
|
||||
cd "$COMPLEMENT_DIR"
|
||||
|
||||
@@ -136,11 +136,11 @@ def request(
|
||||
authorization_headers.append(header)
|
||||
print("Authorization: %s" % header, file=sys.stderr)
|
||||
|
||||
dest = "matrix-federation://%s%s" % (destination, path)
|
||||
dest = "matrix://%s%s" % (destination, path)
|
||||
print("Requesting %s" % dest, file=sys.stderr)
|
||||
|
||||
s = requests.Session()
|
||||
s.mount("matrix-federation://", MatrixConnectionAdapter())
|
||||
s.mount("matrix://", MatrixConnectionAdapter())
|
||||
|
||||
headers: Dict[str, str] = {
|
||||
"Authorization": authorization_headers[0],
|
||||
|
||||
@@ -18,11 +18,10 @@ can crop up, e.g the cache descriptors.
|
||||
|
||||
from typing import Callable, Optional, Type
|
||||
|
||||
from mypy.erasetype import remove_instance_last_known_values
|
||||
from mypy.nodes import ARG_NAMED_OPT
|
||||
from mypy.plugin import MethodSigContext, Plugin
|
||||
from mypy.typeops import bind_self
|
||||
from mypy.types import CallableType, Instance, NoneType, UnionType
|
||||
from mypy.types import CallableType, NoneType, UnionType
|
||||
|
||||
|
||||
class SynapsePlugin(Plugin):
|
||||
@@ -93,41 +92,10 @@ def cached_function_method_signature(ctx: MethodSigContext) -> CallableType:
|
||||
arg_names.append("on_invalidate")
|
||||
arg_kinds.append(ARG_NAMED_OPT) # Arg is an optional kwarg.
|
||||
|
||||
# Finally we ensure the return type is a Deferred.
|
||||
if (
|
||||
isinstance(signature.ret_type, Instance)
|
||||
and signature.ret_type.type.fullname == "twisted.internet.defer.Deferred"
|
||||
):
|
||||
# If it is already a Deferred, nothing to do.
|
||||
ret_type = signature.ret_type
|
||||
else:
|
||||
ret_arg = None
|
||||
if isinstance(signature.ret_type, Instance):
|
||||
# If a coroutine, wrap the coroutine's return type in a Deferred.
|
||||
if signature.ret_type.type.fullname == "typing.Coroutine":
|
||||
ret_arg = signature.ret_type.args[2]
|
||||
|
||||
# If an awaitable, wrap the awaitable's final value in a Deferred.
|
||||
elif signature.ret_type.type.fullname == "typing.Awaitable":
|
||||
ret_arg = signature.ret_type.args[0]
|
||||
|
||||
# Otherwise, wrap the return value in a Deferred.
|
||||
if ret_arg is None:
|
||||
ret_arg = signature.ret_type
|
||||
|
||||
# This should be able to use ctx.api.named_generic_type, but that doesn't seem
|
||||
# to find the correct symbol for anything more than 1 module deep.
|
||||
#
|
||||
# modules is not part of CheckerPluginInterface. The following is a combination
|
||||
# of TypeChecker.named_generic_type and TypeChecker.lookup_typeinfo.
|
||||
sym = ctx.api.modules["twisted.internet.defer"].names.get("Deferred") # type: ignore[attr-defined]
|
||||
ret_type = Instance(sym.node, [remove_instance_last_known_values(ret_arg)])
|
||||
|
||||
signature = signature.copy_modified(
|
||||
arg_types=arg_types,
|
||||
arg_names=arg_names,
|
||||
arg_kinds=arg_kinds,
|
||||
ret_type=ret_type,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
@@ -27,7 +27,7 @@ import time
|
||||
import urllib.request
|
||||
from os import path
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, List, Match, Optional, Union
|
||||
from typing import Any, List, Optional
|
||||
|
||||
import attr
|
||||
import click
|
||||
@@ -233,7 +233,7 @@ def _prepare() -> None:
|
||||
subprocess.check_output(["poetry", "version", new_version])
|
||||
|
||||
# Generate changelogs.
|
||||
generate_and_write_changelog(synapse_repo, current_version, new_version)
|
||||
generate_and_write_changelog(current_version, new_version)
|
||||
|
||||
# Generate debian changelogs
|
||||
if parsed_new_version.pre is not None:
|
||||
@@ -814,7 +814,7 @@ def get_changes_for_version(wanted_version: version.Version) -> str:
|
||||
|
||||
|
||||
def generate_and_write_changelog(
|
||||
repo: Repo, current_version: version.Version, new_version: str
|
||||
current_version: version.Version, new_version: str
|
||||
) -> None:
|
||||
# We do this by getting a draft so that we can edit it before writing to the
|
||||
# changelog.
|
||||
@@ -827,10 +827,6 @@ def generate_and_write_changelog(
|
||||
new_changes = new_changes.replace(
|
||||
"No significant changes.", f"No significant changes since {current_version}."
|
||||
)
|
||||
new_changes += build_dependabot_changelog(
|
||||
repo,
|
||||
current_version,
|
||||
)
|
||||
|
||||
# Prepend changes to changelog
|
||||
with open("CHANGES.md", "r+") as f:
|
||||
@@ -845,49 +841,5 @@ def generate_and_write_changelog(
|
||||
os.remove(filename)
|
||||
|
||||
|
||||
def build_dependabot_changelog(repo: Repo, current_version: version.Version) -> str:
|
||||
"""Summarise dependabot commits between `current_version` and `release_branch`.
|
||||
|
||||
Returns an empty string if there have been no such commits; otherwise outputs a
|
||||
third-level markdown header followed by an unordered list."""
|
||||
last_release_commit = repo.tag("v" + str(current_version)).commit
|
||||
rev_spec = f"{last_release_commit.hexsha}.."
|
||||
commits = list(git.objects.Commit.iter_items(repo, rev_spec))
|
||||
messages = []
|
||||
for commit in reversed(commits):
|
||||
if commit.author.name == "dependabot[bot]":
|
||||
message: Union[str, bytes] = commit.message
|
||||
if isinstance(message, bytes):
|
||||
message = message.decode("utf-8")
|
||||
messages.append(message.split("\n", maxsplit=1)[0])
|
||||
|
||||
if not messages:
|
||||
print(f"No dependabot commits in range {rev_spec}", file=sys.stderr)
|
||||
return ""
|
||||
|
||||
messages.sort()
|
||||
|
||||
def replacer(match: Match[str]) -> str:
|
||||
desc = match.group(1)
|
||||
number = match.group(2)
|
||||
return f"* {desc}. ([\\#{number}](https://github.com/matrix-org/synapse/issues/{number}))"
|
||||
|
||||
for i, message in enumerate(messages):
|
||||
messages[i] = re.sub(r"(.*) \(#(\d+)\)$", replacer, message)
|
||||
messages.insert(0, "### Updates to locked dependencies\n")
|
||||
# Add an extra blank line to the bottom of the section
|
||||
messages.append("")
|
||||
return "\n".join(messages)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("since")
|
||||
def test_dependabot_changelog(since: str) -> None:
|
||||
"""Test building the dependabot changelog.
|
||||
|
||||
Summarises all dependabot commits between the SINCE tag and the current git HEAD."""
|
||||
print(build_dependabot_changelog(git.Repo("."), version.Version(since)))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli()
|
||||
|
||||
@@ -46,6 +46,7 @@ class FilteredPushRules:
|
||||
msc1767_enabled: bool,
|
||||
msc3381_polls_enabled: bool,
|
||||
msc3664_enabled: bool,
|
||||
msc3952_intentional_mentions: bool,
|
||||
msc3958_suppress_edits_enabled: bool,
|
||||
): ...
|
||||
def rules(self) -> Collection[Tuple[PushRule, bool]]: ...
|
||||
|
||||
@@ -61,9 +61,6 @@ def lazyConnection(
|
||||
# most methods to it via ConnectionHandler.__getattr__.
|
||||
class ConnectionHandler(RedisProtocol):
|
||||
def disconnect(self) -> "Deferred[None]": ...
|
||||
def __repr__(self) -> str: ...
|
||||
|
||||
class UnixConnectionHandler(ConnectionHandler): ...
|
||||
|
||||
class RedisFactory(protocol.ReconnectingClientFactory):
|
||||
continueTrying: bool
|
||||
|
||||
@@ -25,8 +25,8 @@ from synapse.util.rust import check_rust_lib_up_to_date
|
||||
from synapse.util.stringutils import strtobool
|
||||
|
||||
# Check that we're not running on an unsupported Python version.
|
||||
if sys.version_info < (3, 8):
|
||||
print("Synapse requires Python 3.8 or above.")
|
||||
if sys.version_info < (3, 7):
|
||||
print("Synapse requires Python 3.7 or above.")
|
||||
sys.exit(1)
|
||||
|
||||
# Allow using the asyncio reactor via env var.
|
||||
|
||||
@@ -61,7 +61,6 @@ from synapse.storage.databases.main.deviceinbox import DeviceInboxBackgroundUpda
|
||||
from synapse.storage.databases.main.devices import DeviceBackgroundUpdateStore
|
||||
from synapse.storage.databases.main.e2e_room_keys import EndToEndRoomKeyBackgroundStore
|
||||
from synapse.storage.databases.main.end_to_end_keys import EndToEndKeyBackgroundStore
|
||||
from synapse.storage.databases.main.event_federation import EventFederationWorkerStore
|
||||
from synapse.storage.databases.main.event_push_actions import EventPushActionsStore
|
||||
from synapse.storage.databases.main.events_bg_updates import (
|
||||
EventsBackgroundUpdatesStore,
|
||||
@@ -197,11 +196,6 @@ IGNORED_TABLES = {
|
||||
"ui_auth_sessions",
|
||||
"ui_auth_sessions_credentials",
|
||||
"ui_auth_sessions_ips",
|
||||
# Ignore the worker locks table, as a) there shouldn't be any acquired locks
|
||||
# after porting, and b) the circular foreign key constraints make it hard to
|
||||
# port.
|
||||
"worker_read_write_locks_mode",
|
||||
"worker_read_write_locks",
|
||||
}
|
||||
|
||||
|
||||
@@ -245,7 +239,6 @@ class Store(
|
||||
PresenceBackgroundUpdateStore,
|
||||
ReceiptsBackgroundUpdateStore,
|
||||
RelationsWorkerStore,
|
||||
EventFederationWorkerStore,
|
||||
):
|
||||
def execute(self, f: Callable[..., R], *args: Any, **kwargs: Any) -> Awaitable[R]:
|
||||
return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs)
|
||||
@@ -810,9 +803,7 @@ class Porter:
|
||||
)
|
||||
# Map from table name to args passed to `handle_table`, i.e. a tuple
|
||||
# of: `postgres_size`, `table_size`, `forward_chunk`, `backward_chunk`.
|
||||
tables_to_port_info_map = {
|
||||
r[0]: r[1:] for r in setup_res if r[0] not in IGNORED_TABLES
|
||||
}
|
||||
tables_to_port_info_map = {r[0]: r[1:] for r in setup_res}
|
||||
|
||||
# Step 5. Do the copying.
|
||||
#
|
||||
@@ -1378,9 +1369,6 @@ def main() -> None:
|
||||
sys.stderr.write("Database must use the 'psycopg2' connector.\n")
|
||||
sys.exit(3)
|
||||
|
||||
# Don't run the background tasks that get started by the data stores.
|
||||
hs_config["run_background_tasks_on"] = "some_other_process"
|
||||
|
||||
config = HomeServerConfig()
|
||||
config.parse_config_dict(hs_config, "", "")
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2023 The Matrix.org Foundation.
|
||||
# Copyright 2014 - 2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -14,6 +14,7 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Optional, Tuple
|
||||
|
||||
import pymacaroons
|
||||
from netaddr import IPAddress
|
||||
|
||||
from twisted.web.server import Request
|
||||
@@ -23,11 +24,19 @@ from synapse.api.constants import EventTypes, HistoryVisibility, Membership
|
||||
from synapse.api.errors import (
|
||||
AuthError,
|
||||
Codes,
|
||||
InvalidClientTokenError,
|
||||
MissingClientTokenError,
|
||||
UnstableSpecAuthError,
|
||||
)
|
||||
from synapse.appservice import ApplicationService
|
||||
from synapse.logging.opentracing import trace
|
||||
from synapse.http import get_request_user_agent
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.opentracing import (
|
||||
active_span,
|
||||
force_tracing,
|
||||
start_active_span,
|
||||
trace,
|
||||
)
|
||||
from synapse.types import Requester, create_requester
|
||||
from synapse.util.cancellation import cancellable
|
||||
|
||||
@@ -37,13 +46,26 @@ if TYPE_CHECKING:
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BaseAuth:
|
||||
"""Common base class for all auth implementations."""
|
||||
# guests always get this device id.
|
||||
GUEST_DEVICE_ID = "guest_device"
|
||||
|
||||
|
||||
class Auth:
|
||||
"""
|
||||
This class contains functions for authenticating users of our client-server API.
|
||||
"""
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.hs = hs
|
||||
self.clock = hs.get_clock()
|
||||
self.store = hs.get_datastores().main
|
||||
self._account_validity_handler = hs.get_account_validity_handler()
|
||||
self._storage_controllers = hs.get_storage_controllers()
|
||||
self._macaroon_generator = hs.get_macaroon_generator()
|
||||
|
||||
self._track_appservice_user_ips = hs.config.appservice.track_appservice_user_ips
|
||||
self._track_puppeted_user_ips = hs.config.api.track_puppeted_user_ips
|
||||
self._force_tracing_for_users = hs.config.tracing.force_tracing_for_users
|
||||
|
||||
async def check_user_in_room(
|
||||
self,
|
||||
@@ -97,49 +119,139 @@ class BaseAuth:
|
||||
errcode=Codes.NOT_JOINED,
|
||||
)
|
||||
|
||||
@trace
|
||||
async def check_user_in_room_or_world_readable(
|
||||
self, room_id: str, requester: Requester, allow_departed_users: bool = False
|
||||
) -> Tuple[str, Optional[str]]:
|
||||
"""Checks that the user is or was in the room or the room is world
|
||||
readable. If it isn't then an exception is raised.
|
||||
@cancellable
|
||||
async def get_user_by_req(
|
||||
self,
|
||||
request: SynapseRequest,
|
||||
allow_guest: bool = False,
|
||||
allow_expired: bool = False,
|
||||
) -> Requester:
|
||||
"""Get a registered user's ID.
|
||||
|
||||
Args:
|
||||
room_id: room to check
|
||||
user_id: user to check
|
||||
allow_departed_users: if True, accept users that were previously
|
||||
members but have now departed
|
||||
request: An HTTP request with an access_token query parameter.
|
||||
allow_guest: If False, will raise an AuthError if the user making the
|
||||
request is a guest.
|
||||
allow_expired: If True, allow the request through even if the account
|
||||
is expired, or session token lifetime has ended. Note that
|
||||
/login will deliver access tokens regardless of expiration.
|
||||
|
||||
Returns:
|
||||
Resolves to the current membership of the user in the room and the
|
||||
membership event ID of the user. If the user is not in the room and
|
||||
never has been, then `(Membership.JOIN, None)` is returned.
|
||||
Resolves to the requester
|
||||
Raises:
|
||||
InvalidClientCredentialsError if no user by that token exists or the token
|
||||
is invalid.
|
||||
AuthError if access is denied for the user in the access token
|
||||
"""
|
||||
parent_span = active_span()
|
||||
with start_active_span("get_user_by_req"):
|
||||
requester = await self._wrapped_get_user_by_req(
|
||||
request, allow_guest, allow_expired
|
||||
)
|
||||
|
||||
if parent_span:
|
||||
if requester.authenticated_entity in self._force_tracing_for_users:
|
||||
# request tracing is enabled for this user, so we need to force it
|
||||
# tracing on for the parent span (which will be the servlet span).
|
||||
#
|
||||
# It's too late for the get_user_by_req span to inherit the setting,
|
||||
# so we also force it on for that.
|
||||
force_tracing()
|
||||
force_tracing(parent_span)
|
||||
parent_span.set_tag(
|
||||
"authenticated_entity", requester.authenticated_entity
|
||||
)
|
||||
parent_span.set_tag("user_id", requester.user.to_string())
|
||||
if requester.device_id is not None:
|
||||
parent_span.set_tag("device_id", requester.device_id)
|
||||
if requester.app_service is not None:
|
||||
parent_span.set_tag("appservice_id", requester.app_service.id)
|
||||
return requester
|
||||
|
||||
@cancellable
|
||||
async def _wrapped_get_user_by_req(
|
||||
self,
|
||||
request: SynapseRequest,
|
||||
allow_guest: bool,
|
||||
allow_expired: bool,
|
||||
) -> Requester:
|
||||
"""Helper for get_user_by_req
|
||||
|
||||
Once get_user_by_req has set up the opentracing span, this does the actual work.
|
||||
"""
|
||||
try:
|
||||
# check_user_in_room will return the most recent membership
|
||||
# event for the user if:
|
||||
# * The user is a non-guest user, and was ever in the room
|
||||
# * The user is a guest user, and has joined the room
|
||||
# else it will throw.
|
||||
return await self.check_user_in_room(
|
||||
room_id, requester, allow_departed_users=allow_departed_users
|
||||
)
|
||||
except AuthError:
|
||||
visibility = await self._storage_controllers.state.get_current_state_event(
|
||||
room_id, EventTypes.RoomHistoryVisibility, ""
|
||||
)
|
||||
if (
|
||||
visibility
|
||||
and visibility.content.get("history_visibility")
|
||||
== HistoryVisibility.WORLD_READABLE
|
||||
ip_addr = request.getClientAddress().host
|
||||
user_agent = get_request_user_agent(request)
|
||||
|
||||
access_token = self.get_access_token_from_request(request)
|
||||
|
||||
# First check if it could be a request from an appservice
|
||||
requester = await self._get_appservice_user(request)
|
||||
if not requester:
|
||||
# If not, it should be from a regular user
|
||||
requester = await self.get_user_by_access_token(
|
||||
access_token, allow_expired=allow_expired
|
||||
)
|
||||
|
||||
# Deny the request if the user account has expired.
|
||||
# This check is only done for regular users, not appservice ones.
|
||||
if not allow_expired:
|
||||
if await self._account_validity_handler.is_user_expired(
|
||||
requester.user.to_string()
|
||||
):
|
||||
# Raise the error if either an account validity module has determined
|
||||
# the account has expired, or the legacy account validity
|
||||
# implementation is enabled and determined the account has expired
|
||||
raise AuthError(
|
||||
403,
|
||||
"User account has expired",
|
||||
errcode=Codes.EXPIRED_ACCOUNT,
|
||||
)
|
||||
|
||||
if ip_addr and (
|
||||
not requester.app_service or self._track_appservice_user_ips
|
||||
):
|
||||
return Membership.JOIN, None
|
||||
raise AuthError(
|
||||
403,
|
||||
"User %r not in room %s, and room previews are disabled"
|
||||
% (requester.user, room_id),
|
||||
)
|
||||
# XXX(quenting): I'm 95% confident that we could skip setting the
|
||||
# device_id to "dummy-device" for appservices, and that the only impact
|
||||
# would be some rows which whould not deduplicate in the 'user_ips'
|
||||
# table during the transition
|
||||
recorded_device_id = (
|
||||
"dummy-device"
|
||||
if requester.device_id is None and requester.app_service is not None
|
||||
else requester.device_id
|
||||
)
|
||||
await self.store.insert_client_ip(
|
||||
user_id=requester.authenticated_entity,
|
||||
access_token=access_token,
|
||||
ip=ip_addr,
|
||||
user_agent=user_agent,
|
||||
device_id=recorded_device_id,
|
||||
)
|
||||
|
||||
# Track also the puppeted user client IP if enabled and the user is puppeting
|
||||
if (
|
||||
requester.user.to_string() != requester.authenticated_entity
|
||||
and self._track_puppeted_user_ips
|
||||
):
|
||||
await self.store.insert_client_ip(
|
||||
user_id=requester.user.to_string(),
|
||||
access_token=access_token,
|
||||
ip=ip_addr,
|
||||
user_agent=user_agent,
|
||||
device_id=requester.device_id,
|
||||
)
|
||||
|
||||
if requester.is_guest and not allow_guest:
|
||||
raise AuthError(
|
||||
403,
|
||||
"Guest access not allowed",
|
||||
errcode=Codes.GUEST_ACCESS_FORBIDDEN,
|
||||
)
|
||||
|
||||
request.requester = requester
|
||||
return requester
|
||||
except KeyError:
|
||||
raise MissingClientTokenError()
|
||||
|
||||
async def validate_appservice_can_control_user_id(
|
||||
self, app_service: ApplicationService, user_id: str
|
||||
@@ -172,16 +284,184 @@ class BaseAuth:
|
||||
403, "Application service has not registered this user (%s)" % user_id
|
||||
)
|
||||
|
||||
@cancellable
|
||||
async def _get_appservice_user(self, request: Request) -> Optional[Requester]:
|
||||
"""
|
||||
Given a request, reads the request parameters to determine:
|
||||
- whether it's an application service that's making this request
|
||||
- what user the application service should be treated as controlling
|
||||
(the user_id URI parameter allows an application service to masquerade
|
||||
any applicable user in its namespace)
|
||||
- what device the application service should be treated as controlling
|
||||
(the device_id[^1] URI parameter allows an application service to masquerade
|
||||
as any device that exists for the relevant user)
|
||||
|
||||
[^1] Unstable and provided by MSC3202.
|
||||
Must use `org.matrix.msc3202.device_id` in place of `device_id` for now.
|
||||
|
||||
Returns:
|
||||
the application service `Requester` of that request
|
||||
|
||||
Postconditions:
|
||||
- The `app_service` field in the returned `Requester` is set
|
||||
- The `user_id` field in the returned `Requester` is either the application
|
||||
service sender or the controlled user set by the `user_id` URI parameter
|
||||
- The returned application service is permitted to control the returned user ID.
|
||||
- The returned device ID, if present, has been checked to be a valid device ID
|
||||
for the returned user ID.
|
||||
"""
|
||||
DEVICE_ID_ARG_NAME = b"org.matrix.msc3202.device_id"
|
||||
|
||||
app_service = self.store.get_app_service_by_token(
|
||||
self.get_access_token_from_request(request)
|
||||
)
|
||||
if app_service is None:
|
||||
return None
|
||||
|
||||
if app_service.ip_range_whitelist:
|
||||
ip_address = IPAddress(request.getClientAddress().host)
|
||||
if ip_address not in app_service.ip_range_whitelist:
|
||||
return None
|
||||
|
||||
# This will always be set by the time Twisted calls us.
|
||||
assert request.args is not None
|
||||
|
||||
if b"user_id" in request.args:
|
||||
effective_user_id = request.args[b"user_id"][0].decode("utf8")
|
||||
await self.validate_appservice_can_control_user_id(
|
||||
app_service, effective_user_id
|
||||
)
|
||||
else:
|
||||
effective_user_id = app_service.sender
|
||||
|
||||
effective_device_id: Optional[str] = None
|
||||
|
||||
if (
|
||||
self.hs.config.experimental.msc3202_device_masquerading_enabled
|
||||
and DEVICE_ID_ARG_NAME in request.args
|
||||
):
|
||||
effective_device_id = request.args[DEVICE_ID_ARG_NAME][0].decode("utf8")
|
||||
# We only just set this so it can't be None!
|
||||
assert effective_device_id is not None
|
||||
device_opt = await self.store.get_device(
|
||||
effective_user_id, effective_device_id
|
||||
)
|
||||
if device_opt is None:
|
||||
# For now, use 400 M_EXCLUSIVE if the device doesn't exist.
|
||||
# This is an open thread of discussion on MSC3202 as of 2021-12-09.
|
||||
raise AuthError(
|
||||
400,
|
||||
f"Application service trying to use a device that doesn't exist ('{effective_device_id}' for {effective_user_id})",
|
||||
Codes.EXCLUSIVE,
|
||||
)
|
||||
|
||||
return create_requester(
|
||||
effective_user_id, app_service=app_service, device_id=effective_device_id
|
||||
)
|
||||
|
||||
async def get_user_by_access_token(
|
||||
self,
|
||||
token: str,
|
||||
allow_expired: bool = False,
|
||||
) -> Requester:
|
||||
"""Validate access token and get user_id from it
|
||||
|
||||
Args:
|
||||
token: The access token to get the user by
|
||||
allow_expired: If False, raises an InvalidClientTokenError
|
||||
if the token is expired
|
||||
|
||||
Raises:
|
||||
InvalidClientTokenError if a user by that token exists, but the token is
|
||||
expired
|
||||
InvalidClientCredentialsError if no user by that token exists or the token
|
||||
is invalid
|
||||
"""
|
||||
|
||||
# First look in the database to see if the access token is present
|
||||
# as an opaque token.
|
||||
user_info = await self.store.get_user_by_access_token(token)
|
||||
if user_info:
|
||||
valid_until_ms = user_info.valid_until_ms
|
||||
if (
|
||||
not allow_expired
|
||||
and valid_until_ms is not None
|
||||
and valid_until_ms < self.clock.time_msec()
|
||||
):
|
||||
# there was a valid access token, but it has expired.
|
||||
# soft-logout the user.
|
||||
raise InvalidClientTokenError(
|
||||
msg="Access token has expired", soft_logout=True
|
||||
)
|
||||
|
||||
# Mark the token as used. This is used to invalidate old refresh
|
||||
# tokens after some time.
|
||||
await self.store.mark_access_token_as_used(user_info.token_id)
|
||||
|
||||
requester = create_requester(
|
||||
user_id=user_info.user_id,
|
||||
access_token_id=user_info.token_id,
|
||||
is_guest=user_info.is_guest,
|
||||
shadow_banned=user_info.shadow_banned,
|
||||
device_id=user_info.device_id,
|
||||
authenticated_entity=user_info.token_owner,
|
||||
)
|
||||
|
||||
return requester
|
||||
|
||||
# If the token isn't found in the database, then it could still be a
|
||||
# macaroon for a guest, so we check that here.
|
||||
try:
|
||||
user_id = self._macaroon_generator.verify_guest_token(token)
|
||||
|
||||
# Guest access tokens are not stored in the database (there can
|
||||
# only be one access token per guest, anyway).
|
||||
#
|
||||
# In order to prevent guest access tokens being used as regular
|
||||
# user access tokens (and hence getting around the invalidation
|
||||
# process), we look up the user id and check that it is indeed
|
||||
# a guest user.
|
||||
#
|
||||
# It would of course be much easier to store guest access
|
||||
# tokens in the database as well, but that would break existing
|
||||
# guest tokens.
|
||||
stored_user = await self.store.get_user_by_id(user_id)
|
||||
if not stored_user:
|
||||
raise InvalidClientTokenError("Unknown user_id %s" % user_id)
|
||||
if not stored_user["is_guest"]:
|
||||
raise InvalidClientTokenError(
|
||||
"Guest access token used for regular user"
|
||||
)
|
||||
|
||||
return create_requester(
|
||||
user_id=user_id,
|
||||
is_guest=True,
|
||||
# all guests get the same device id
|
||||
device_id=GUEST_DEVICE_ID,
|
||||
authenticated_entity=user_id,
|
||||
)
|
||||
except (
|
||||
pymacaroons.exceptions.MacaroonException,
|
||||
TypeError,
|
||||
ValueError,
|
||||
) as e:
|
||||
logger.warning(
|
||||
"Invalid access token in auth: %s %s.",
|
||||
type(e),
|
||||
e,
|
||||
)
|
||||
raise InvalidClientTokenError("Invalid access token passed.")
|
||||
|
||||
async def is_server_admin(self, requester: Requester) -> bool:
|
||||
"""Check if the given user is a local server admin.
|
||||
|
||||
Args:
|
||||
requester: user to check
|
||||
requester: The user making the request, according to the access token.
|
||||
|
||||
Returns:
|
||||
True if the user is an admin
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
return await self.store.is_server_admin(requester.user)
|
||||
|
||||
async def check_can_change_room_list(
|
||||
self, room_id: str, requester: Requester
|
||||
@@ -190,8 +470,8 @@ class BaseAuth:
|
||||
published room list.
|
||||
|
||||
Args:
|
||||
room_id
|
||||
user
|
||||
room_id: The room to check.
|
||||
requester: The user making the request, according to the access token.
|
||||
"""
|
||||
|
||||
is_admin = await self.is_server_admin(requester)
|
||||
@@ -238,6 +518,7 @@ class BaseAuth:
|
||||
return bool(query_params) or bool(auth_headers)
|
||||
|
||||
@staticmethod
|
||||
@cancellable
|
||||
def get_access_token_from_request(request: Request) -> str:
|
||||
"""Extracts the access_token from the request.
|
||||
|
||||
@@ -275,77 +556,47 @@ class BaseAuth:
|
||||
|
||||
return query_params[0].decode("ascii")
|
||||
|
||||
@cancellable
|
||||
async def get_appservice_user(
|
||||
self, request: Request, access_token: str
|
||||
) -> Optional[Requester]:
|
||||
"""
|
||||
Given a request, reads the request parameters to determine:
|
||||
- whether it's an application service that's making this request
|
||||
- what user the application service should be treated as controlling
|
||||
(the user_id URI parameter allows an application service to masquerade
|
||||
any applicable user in its namespace)
|
||||
- what device the application service should be treated as controlling
|
||||
(the device_id[^1] URI parameter allows an application service to masquerade
|
||||
as any device that exists for the relevant user)
|
||||
@trace
|
||||
async def check_user_in_room_or_world_readable(
|
||||
self, room_id: str, requester: Requester, allow_departed_users: bool = False
|
||||
) -> Tuple[str, Optional[str]]:
|
||||
"""Checks that the user is or was in the room or the room is world
|
||||
readable. If it isn't then an exception is raised.
|
||||
|
||||
[^1] Unstable and provided by MSC3202.
|
||||
Must use `org.matrix.msc3202.device_id` in place of `device_id` for now.
|
||||
Args:
|
||||
room_id: The room to check.
|
||||
requester: The user making the request, according to the access token.
|
||||
allow_departed_users: If True, accept users that were previously
|
||||
members but have now departed.
|
||||
|
||||
Returns:
|
||||
the application service `Requester` of that request
|
||||
|
||||
Postconditions:
|
||||
- The `app_service` field in the returned `Requester` is set
|
||||
- The `user_id` field in the returned `Requester` is either the application
|
||||
service sender or the controlled user set by the `user_id` URI parameter
|
||||
- The returned application service is permitted to control the returned user ID.
|
||||
- The returned device ID, if present, has been checked to be a valid device ID
|
||||
for the returned user ID.
|
||||
Resolves to the current membership of the user in the room and the
|
||||
membership event ID of the user. If the user is not in the room and
|
||||
never has been, then `(Membership.JOIN, None)` is returned.
|
||||
"""
|
||||
DEVICE_ID_ARG_NAME = b"org.matrix.msc3202.device_id"
|
||||
|
||||
app_service = self.store.get_app_service_by_token(access_token)
|
||||
if app_service is None:
|
||||
return None
|
||||
|
||||
if app_service.ip_range_whitelist:
|
||||
ip_address = IPAddress(request.getClientAddress().host)
|
||||
if ip_address not in app_service.ip_range_whitelist:
|
||||
return None
|
||||
|
||||
# This will always be set by the time Twisted calls us.
|
||||
assert request.args is not None
|
||||
|
||||
if b"user_id" in request.args:
|
||||
effective_user_id = request.args[b"user_id"][0].decode("utf8")
|
||||
await self.validate_appservice_can_control_user_id(
|
||||
app_service, effective_user_id
|
||||
try:
|
||||
# check_user_in_room will return the most recent membership
|
||||
# event for the user if:
|
||||
# * The user is a non-guest user, and was ever in the room
|
||||
# * The user is a guest user, and has joined the room
|
||||
# else it will throw.
|
||||
return await self.check_user_in_room(
|
||||
room_id, requester, allow_departed_users=allow_departed_users
|
||||
)
|
||||
else:
|
||||
effective_user_id = app_service.sender
|
||||
|
||||
effective_device_id: Optional[str] = None
|
||||
|
||||
if (
|
||||
self.hs.config.experimental.msc3202_device_masquerading_enabled
|
||||
and DEVICE_ID_ARG_NAME in request.args
|
||||
):
|
||||
effective_device_id = request.args[DEVICE_ID_ARG_NAME][0].decode("utf8")
|
||||
# We only just set this so it can't be None!
|
||||
assert effective_device_id is not None
|
||||
device_opt = await self.store.get_device(
|
||||
effective_user_id, effective_device_id
|
||||
except AuthError:
|
||||
visibility = await self._storage_controllers.state.get_current_state_event(
|
||||
room_id, EventTypes.RoomHistoryVisibility, ""
|
||||
)
|
||||
if (
|
||||
visibility
|
||||
and visibility.content.get("history_visibility")
|
||||
== HistoryVisibility.WORLD_READABLE
|
||||
):
|
||||
return Membership.JOIN, None
|
||||
raise UnstableSpecAuthError(
|
||||
403,
|
||||
"User %s not in room %s, and room previews are disabled"
|
||||
% (requester.user, room_id),
|
||||
errcode=Codes.NOT_JOINED,
|
||||
)
|
||||
if device_opt is None:
|
||||
# For now, use 400 M_EXCLUSIVE if the device doesn't exist.
|
||||
# This is an open thread of discussion on MSC3202 as of 2021-12-09.
|
||||
raise AuthError(
|
||||
400,
|
||||
f"Application service trying to use a device that doesn't exist ('{effective_device_id}' for {effective_user_id})",
|
||||
Codes.EXCLUSIVE,
|
||||
)
|
||||
|
||||
return create_requester(
|
||||
effective_user_id, app_service=app_service, device_id=effective_device_id
|
||||
)
|
||||
@@ -1,175 +0,0 @@
|
||||
# Copyright 2023 The Matrix.org Foundation.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from typing import Optional, Tuple
|
||||
|
||||
from typing_extensions import Protocol
|
||||
|
||||
from twisted.web.server import Request
|
||||
|
||||
from synapse.appservice import ApplicationService
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.types import Requester
|
||||
|
||||
# guests always get this device id.
|
||||
GUEST_DEVICE_ID = "guest_device"
|
||||
|
||||
|
||||
class Auth(Protocol):
|
||||
"""The interface that an auth provider must implement."""
|
||||
|
||||
async def check_user_in_room(
|
||||
self,
|
||||
room_id: str,
|
||||
requester: Requester,
|
||||
allow_departed_users: bool = False,
|
||||
) -> Tuple[str, Optional[str]]:
|
||||
"""Check if the user is in the room, or was at some point.
|
||||
Args:
|
||||
room_id: The room to check.
|
||||
|
||||
user_id: The user to check.
|
||||
|
||||
current_state: Optional map of the current state of the room.
|
||||
If provided then that map is used to check whether they are a
|
||||
member of the room. Otherwise the current membership is
|
||||
loaded from the database.
|
||||
|
||||
allow_departed_users: if True, accept users that were previously
|
||||
members but have now departed.
|
||||
|
||||
Raises:
|
||||
AuthError if the user is/was not in the room.
|
||||
Returns:
|
||||
The current membership of the user in the room and the
|
||||
membership event ID of the user.
|
||||
"""
|
||||
|
||||
async def get_user_by_req(
|
||||
self,
|
||||
request: SynapseRequest,
|
||||
allow_guest: bool = False,
|
||||
allow_expired: bool = False,
|
||||
) -> Requester:
|
||||
"""Get a registered user's ID.
|
||||
|
||||
Args:
|
||||
request: An HTTP request with an access_token query parameter.
|
||||
allow_guest: If False, will raise an AuthError if the user making the
|
||||
request is a guest.
|
||||
allow_expired: If True, allow the request through even if the account
|
||||
is expired, or session token lifetime has ended. Note that
|
||||
/login will deliver access tokens regardless of expiration.
|
||||
|
||||
Returns:
|
||||
Resolves to the requester
|
||||
Raises:
|
||||
InvalidClientCredentialsError if no user by that token exists or the token
|
||||
is invalid.
|
||||
AuthError if access is denied for the user in the access token
|
||||
"""
|
||||
|
||||
async def validate_appservice_can_control_user_id(
|
||||
self, app_service: ApplicationService, user_id: str
|
||||
) -> None:
|
||||
"""Validates that the app service is allowed to control
|
||||
the given user.
|
||||
|
||||
Args:
|
||||
app_service: The app service that controls the user
|
||||
user_id: The author MXID that the app service is controlling
|
||||
|
||||
Raises:
|
||||
AuthError: If the application service is not allowed to control the user
|
||||
(user namespace regex does not match, wrong homeserver, etc)
|
||||
or if the user has not been registered yet.
|
||||
"""
|
||||
|
||||
async def get_user_by_access_token(
|
||||
self,
|
||||
token: str,
|
||||
allow_expired: bool = False,
|
||||
) -> Requester:
|
||||
"""Validate access token and get user_id from it
|
||||
|
||||
Args:
|
||||
token: The access token to get the user by
|
||||
allow_expired: If False, raises an InvalidClientTokenError
|
||||
if the token is expired
|
||||
|
||||
Raises:
|
||||
InvalidClientTokenError if a user by that token exists, but the token is
|
||||
expired
|
||||
InvalidClientCredentialsError if no user by that token exists or the token
|
||||
is invalid
|
||||
"""
|
||||
|
||||
async def is_server_admin(self, requester: Requester) -> bool:
|
||||
"""Check if the given user is a local server admin.
|
||||
|
||||
Args:
|
||||
requester: user to check
|
||||
|
||||
Returns:
|
||||
True if the user is an admin
|
||||
"""
|
||||
|
||||
async def check_can_change_room_list(
|
||||
self, room_id: str, requester: Requester
|
||||
) -> bool:
|
||||
"""Determine whether the user is allowed to edit the room's entry in the
|
||||
published room list.
|
||||
|
||||
Args:
|
||||
room_id
|
||||
user
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def has_access_token(request: Request) -> bool:
|
||||
"""Checks if the request has an access_token.
|
||||
|
||||
Returns:
|
||||
False if no access_token was given, True otherwise.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def get_access_token_from_request(request: Request) -> str:
|
||||
"""Extracts the access_token from the request.
|
||||
|
||||
Args:
|
||||
request: The http request.
|
||||
Returns:
|
||||
The access_token
|
||||
Raises:
|
||||
MissingClientTokenError: If there isn't a single access_token in the
|
||||
request
|
||||
"""
|
||||
|
||||
async def check_user_in_room_or_world_readable(
|
||||
self, room_id: str, requester: Requester, allow_departed_users: bool = False
|
||||
) -> Tuple[str, Optional[str]]:
|
||||
"""Checks that the user is or was in the room or the room is world
|
||||
readable. If it isn't then an exception is raised.
|
||||
|
||||
Args:
|
||||
room_id: room to check
|
||||
user_id: user to check
|
||||
allow_departed_users: if True, accept users that were previously
|
||||
members but have now departed
|
||||
|
||||
Returns:
|
||||
Resolves to the current membership of the user in the room and the
|
||||
membership event ID of the user. If the user is not in the room and
|
||||
never has been, then `(Membership.JOIN, None)` is returned.
|
||||
"""
|
||||
@@ -1,291 +0,0 @@
|
||||
# Copyright 2023 The Matrix.org Foundation.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pymacaroons
|
||||
|
||||
from synapse.api.errors import (
|
||||
AuthError,
|
||||
Codes,
|
||||
InvalidClientTokenError,
|
||||
MissingClientTokenError,
|
||||
)
|
||||
from synapse.http import get_request_user_agent
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.opentracing import active_span, force_tracing, start_active_span
|
||||
from synapse.types import Requester, create_requester
|
||||
from synapse.util.cancellation import cancellable
|
||||
|
||||
from . import GUEST_DEVICE_ID
|
||||
from .base import BaseAuth
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class InternalAuth(BaseAuth):
|
||||
"""
|
||||
This class contains functions for authenticating users of our client-server API.
|
||||
"""
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__(hs)
|
||||
self.clock = hs.get_clock()
|
||||
self._account_validity_handler = hs.get_account_validity_handler()
|
||||
self._macaroon_generator = hs.get_macaroon_generator()
|
||||
|
||||
self._track_appservice_user_ips = hs.config.appservice.track_appservice_user_ips
|
||||
self._track_puppeted_user_ips = hs.config.api.track_puppeted_user_ips
|
||||
self._force_tracing_for_users = hs.config.tracing.force_tracing_for_users
|
||||
|
||||
@cancellable
|
||||
async def get_user_by_req(
|
||||
self,
|
||||
request: SynapseRequest,
|
||||
allow_guest: bool = False,
|
||||
allow_expired: bool = False,
|
||||
) -> Requester:
|
||||
"""Get a registered user's ID.
|
||||
|
||||
Args:
|
||||
request: An HTTP request with an access_token query parameter.
|
||||
allow_guest: If False, will raise an AuthError if the user making the
|
||||
request is a guest.
|
||||
allow_expired: If True, allow the request through even if the account
|
||||
is expired, or session token lifetime has ended. Note that
|
||||
/login will deliver access tokens regardless of expiration.
|
||||
|
||||
Returns:
|
||||
Resolves to the requester
|
||||
Raises:
|
||||
InvalidClientCredentialsError if no user by that token exists or the token
|
||||
is invalid.
|
||||
AuthError if access is denied for the user in the access token
|
||||
"""
|
||||
parent_span = active_span()
|
||||
with start_active_span("get_user_by_req"):
|
||||
requester = await self._wrapped_get_user_by_req(
|
||||
request, allow_guest, allow_expired
|
||||
)
|
||||
|
||||
if parent_span:
|
||||
if requester.authenticated_entity in self._force_tracing_for_users:
|
||||
# request tracing is enabled for this user, so we need to force it
|
||||
# tracing on for the parent span (which will be the servlet span).
|
||||
#
|
||||
# It's too late for the get_user_by_req span to inherit the setting,
|
||||
# so we also force it on for that.
|
||||
force_tracing()
|
||||
force_tracing(parent_span)
|
||||
parent_span.set_tag(
|
||||
"authenticated_entity", requester.authenticated_entity
|
||||
)
|
||||
parent_span.set_tag("user_id", requester.user.to_string())
|
||||
if requester.device_id is not None:
|
||||
parent_span.set_tag("device_id", requester.device_id)
|
||||
if requester.app_service is not None:
|
||||
parent_span.set_tag("appservice_id", requester.app_service.id)
|
||||
return requester
|
||||
|
||||
@cancellable
|
||||
async def _wrapped_get_user_by_req(
|
||||
self,
|
||||
request: SynapseRequest,
|
||||
allow_guest: bool,
|
||||
allow_expired: bool,
|
||||
) -> Requester:
|
||||
"""Helper for get_user_by_req
|
||||
|
||||
Once get_user_by_req has set up the opentracing span, this does the actual work.
|
||||
"""
|
||||
try:
|
||||
ip_addr = request.getClientAddress().host
|
||||
user_agent = get_request_user_agent(request)
|
||||
|
||||
access_token = self.get_access_token_from_request(request)
|
||||
|
||||
# First check if it could be a request from an appservice
|
||||
requester = await self.get_appservice_user(request, access_token)
|
||||
if not requester:
|
||||
# If not, it should be from a regular user
|
||||
requester = await self.get_user_by_access_token(
|
||||
access_token, allow_expired=allow_expired
|
||||
)
|
||||
|
||||
# Deny the request if the user account has expired.
|
||||
# This check is only done for regular users, not appservice ones.
|
||||
if not allow_expired:
|
||||
if await self._account_validity_handler.is_user_expired(
|
||||
requester.user.to_string()
|
||||
):
|
||||
# Raise the error if either an account validity module has determined
|
||||
# the account has expired, or the legacy account validity
|
||||
# implementation is enabled and determined the account has expired
|
||||
raise AuthError(
|
||||
403,
|
||||
"User account has expired",
|
||||
errcode=Codes.EXPIRED_ACCOUNT,
|
||||
)
|
||||
|
||||
if ip_addr and (
|
||||
not requester.app_service or self._track_appservice_user_ips
|
||||
):
|
||||
# XXX(quenting): I'm 95% confident that we could skip setting the
|
||||
# device_id to "dummy-device" for appservices, and that the only impact
|
||||
# would be some rows which whould not deduplicate in the 'user_ips'
|
||||
# table during the transition
|
||||
recorded_device_id = (
|
||||
"dummy-device"
|
||||
if requester.device_id is None and requester.app_service is not None
|
||||
else requester.device_id
|
||||
)
|
||||
await self.store.insert_client_ip(
|
||||
user_id=requester.authenticated_entity,
|
||||
access_token=access_token,
|
||||
ip=ip_addr,
|
||||
user_agent=user_agent,
|
||||
device_id=recorded_device_id,
|
||||
)
|
||||
|
||||
# Track also the puppeted user client IP if enabled and the user is puppeting
|
||||
if (
|
||||
requester.user.to_string() != requester.authenticated_entity
|
||||
and self._track_puppeted_user_ips
|
||||
):
|
||||
await self.store.insert_client_ip(
|
||||
user_id=requester.user.to_string(),
|
||||
access_token=access_token,
|
||||
ip=ip_addr,
|
||||
user_agent=user_agent,
|
||||
device_id=requester.device_id,
|
||||
)
|
||||
|
||||
if requester.is_guest and not allow_guest:
|
||||
raise AuthError(
|
||||
403,
|
||||
"Guest access not allowed",
|
||||
errcode=Codes.GUEST_ACCESS_FORBIDDEN,
|
||||
)
|
||||
|
||||
request.requester = requester
|
||||
return requester
|
||||
except KeyError:
|
||||
raise MissingClientTokenError()
|
||||
|
||||
async def get_user_by_access_token(
|
||||
self,
|
||||
token: str,
|
||||
allow_expired: bool = False,
|
||||
) -> Requester:
|
||||
"""Validate access token and get user_id from it
|
||||
|
||||
Args:
|
||||
token: The access token to get the user by
|
||||
allow_expired: If False, raises an InvalidClientTokenError
|
||||
if the token is expired
|
||||
|
||||
Raises:
|
||||
InvalidClientTokenError if a user by that token exists, but the token is
|
||||
expired
|
||||
InvalidClientCredentialsError if no user by that token exists or the token
|
||||
is invalid
|
||||
"""
|
||||
|
||||
# First look in the database to see if the access token is present
|
||||
# as an opaque token.
|
||||
user_info = await self.store.get_user_by_access_token(token)
|
||||
if user_info:
|
||||
valid_until_ms = user_info.valid_until_ms
|
||||
if (
|
||||
not allow_expired
|
||||
and valid_until_ms is not None
|
||||
and valid_until_ms < self.clock.time_msec()
|
||||
):
|
||||
# there was a valid access token, but it has expired.
|
||||
# soft-logout the user.
|
||||
raise InvalidClientTokenError(
|
||||
msg="Access token has expired", soft_logout=True
|
||||
)
|
||||
|
||||
# Mark the token as used. This is used to invalidate old refresh
|
||||
# tokens after some time.
|
||||
await self.store.mark_access_token_as_used(user_info.token_id)
|
||||
|
||||
requester = create_requester(
|
||||
user_id=user_info.user_id,
|
||||
access_token_id=user_info.token_id,
|
||||
is_guest=user_info.is_guest,
|
||||
shadow_banned=user_info.shadow_banned,
|
||||
device_id=user_info.device_id,
|
||||
authenticated_entity=user_info.token_owner,
|
||||
)
|
||||
|
||||
return requester
|
||||
|
||||
# If the token isn't found in the database, then it could still be a
|
||||
# macaroon for a guest, so we check that here.
|
||||
try:
|
||||
user_id = self._macaroon_generator.verify_guest_token(token)
|
||||
|
||||
# Guest access tokens are not stored in the database (there can
|
||||
# only be one access token per guest, anyway).
|
||||
#
|
||||
# In order to prevent guest access tokens being used as regular
|
||||
# user access tokens (and hence getting around the invalidation
|
||||
# process), we look up the user id and check that it is indeed
|
||||
# a guest user.
|
||||
#
|
||||
# It would of course be much easier to store guest access
|
||||
# tokens in the database as well, but that would break existing
|
||||
# guest tokens.
|
||||
stored_user = await self.store.get_user_by_id(user_id)
|
||||
if not stored_user:
|
||||
raise InvalidClientTokenError("Unknown user_id %s" % user_id)
|
||||
if not stored_user["is_guest"]:
|
||||
raise InvalidClientTokenError(
|
||||
"Guest access token used for regular user"
|
||||
)
|
||||
|
||||
return create_requester(
|
||||
user_id=user_id,
|
||||
is_guest=True,
|
||||
# all guests get the same device id
|
||||
device_id=GUEST_DEVICE_ID,
|
||||
authenticated_entity=user_id,
|
||||
)
|
||||
except (
|
||||
pymacaroons.exceptions.MacaroonException,
|
||||
TypeError,
|
||||
ValueError,
|
||||
) as e:
|
||||
logger.warning(
|
||||
"Invalid access token in auth: %s %s.",
|
||||
type(e),
|
||||
e,
|
||||
)
|
||||
raise InvalidClientTokenError("Invalid access token passed.")
|
||||
|
||||
async def is_server_admin(self, requester: Requester) -> bool:
|
||||
"""Check if the given user is a local server admin.
|
||||
|
||||
Args:
|
||||
requester: The user making the request, according to the access token.
|
||||
|
||||
Returns:
|
||||
True if the user is an admin
|
||||
"""
|
||||
return await self.store.is_server_admin(requester.user)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user