Compare commits
7 Commits
v1.107.0
...
anoa/modul
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3dcc1efc43 | ||
|
|
46c0ab559b | ||
|
|
e8cdfc771b | ||
|
|
1b30b82ac6 | ||
|
|
266f426c50 | ||
|
|
c3c3c6d200 | ||
|
|
9cd8fecdc5 |
@@ -1,23 +1,17 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||||
#
|
#
|
||||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
#
|
# you may not use this file except in compliance with the License.
|
||||||
# Copyright (C) 2023 New Vector, Ltd
|
# You may obtain a copy of the License at
|
||||||
#
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as
|
|
||||||
# published by the Free Software Foundation, either version 3 of the
|
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# See the GNU Affero General Public License for more details:
|
|
||||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
||||||
#
|
|
||||||
# Originally licensed under the Apache License, Version 2.0:
|
|
||||||
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
||||||
#
|
|
||||||
# [This file includes modifications made by New Vector Limited]
|
|
||||||
#
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
#
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
# Wraps `auditwheel repair` to first check if we're repairing a potentially abi3
|
# Wraps `auditwheel repair` to first check if we're repairing a potentially abi3
|
||||||
# compatible wheel, if so rename the wheel before repairing it.
|
# compatible wheel, if so rename the wheel before repairing it.
|
||||||
|
|||||||
@@ -1,23 +1,17 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||||
#
|
#
|
||||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
#
|
# you may not use this file except in compliance with the License.
|
||||||
# Copyright (C) 2023 New Vector, Ltd
|
# You may obtain a copy of the License at
|
||||||
#
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as
|
|
||||||
# published by the Free Software Foundation, either version 3 of the
|
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# See the GNU Affero General Public License for more details:
|
|
||||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
||||||
#
|
|
||||||
# Originally licensed under the Apache License, Version 2.0:
|
|
||||||
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
||||||
#
|
|
||||||
# [This file includes modifications made by New Vector Limited]
|
|
||||||
#
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
#
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
# Calculate the trial jobs to run based on if we're in a PR or not.
|
# Calculate the trial jobs to run based on if we're in a PR or not.
|
||||||
|
|
||||||
@@ -35,12 +29,11 @@ IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
|
|||||||
|
|
||||||
# First calculate the various trial jobs.
|
# First calculate the various trial jobs.
|
||||||
#
|
#
|
||||||
# For PRs, we only run each type of test with the oldest Python version supported (which
|
# For each type of test we only run on Py3.7 on PRs
|
||||||
# is Python 3.8 right now)
|
|
||||||
|
|
||||||
trial_sqlite_tests = [
|
trial_sqlite_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.8",
|
"python-version": "3.7",
|
||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
}
|
}
|
||||||
@@ -53,12 +46,13 @@ if not IS_PR:
|
|||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
}
|
}
|
||||||
for version in ("3.9", "3.10", "3.11", "3.12")
|
for version in ("3.8", "3.9", "3.10", "3.11")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
trial_postgres_tests = [
|
trial_postgres_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.8",
|
"python-version": "3.7",
|
||||||
"database": "postgres",
|
"database": "postgres",
|
||||||
"postgres-version": "11",
|
"postgres-version": "11",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
@@ -68,16 +62,16 @@ trial_postgres_tests = [
|
|||||||
if not IS_PR:
|
if not IS_PR:
|
||||||
trial_postgres_tests.append(
|
trial_postgres_tests.append(
|
||||||
{
|
{
|
||||||
"python-version": "3.12",
|
"python-version": "3.11",
|
||||||
"database": "postgres",
|
"database": "postgres",
|
||||||
"postgres-version": "16",
|
"postgres-version": "15",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
trial_no_extra_tests = [
|
trial_no_extra_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.8",
|
"python-version": "3.7",
|
||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "",
|
"extras": "",
|
||||||
}
|
}
|
||||||
@@ -139,6 +133,11 @@ if not IS_PR:
|
|||||||
"sytest-tag": "testing",
|
"sytest-tag": "testing",
|
||||||
"postgres": "postgres",
|
"postgres": "postgres",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"sytest-tag": "buster",
|
||||||
|
"postgres": "multi-postgres",
|
||||||
|
"workers": "workers",
|
||||||
|
},
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ except Exception:
|
|||||||
"""\
|
"""\
|
||||||
Lockfile is not version 2.0. You probably need to upgrade poetry on your local box
|
Lockfile is not version 2.0. You probably need to upgrade poetry on your local box
|
||||||
and re-run `poetry lock --no-update`. See the Poetry cheat sheet at
|
and re-run `poetry lock --no-update`. See the Poetry cheat sheet at
|
||||||
https://element-hq.github.io/synapse/develop/development/dependencies.html
|
https://matrix-org.github.io/synapse/develop/development/dependencies.html
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
raise
|
raise
|
||||||
|
|||||||
@@ -31,6 +31,34 @@ sed -i \
|
|||||||
-e '/systemd/d' \
|
-e '/systemd/d' \
|
||||||
pyproject.toml
|
pyproject.toml
|
||||||
|
|
||||||
|
# Use poetry to do the installation. This ensures that the versions are all mutually
|
||||||
|
# compatible (as far the package metadata declares, anyway); pip's package resolver
|
||||||
|
# is more lax.
|
||||||
|
#
|
||||||
|
# Rather than `poetry install --no-dev`, we drop all dev dependencies from the
|
||||||
|
# toml file. This means we don't have to ensure compatibility between old deps and
|
||||||
|
# dev tools.
|
||||||
|
|
||||||
|
pip install toml wheel
|
||||||
|
|
||||||
|
REMOVE_DEV_DEPENDENCIES="
|
||||||
|
import toml
|
||||||
|
with open('pyproject.toml', 'r') as f:
|
||||||
|
data = toml.loads(f.read())
|
||||||
|
|
||||||
|
del data['tool']['poetry']['dev-dependencies']
|
||||||
|
|
||||||
|
with open('pyproject.toml', 'w') as f:
|
||||||
|
toml.dump(data, f)
|
||||||
|
"
|
||||||
|
python3 -c "$REMOVE_DEV_DEPENDENCIES"
|
||||||
|
|
||||||
|
pip install poetry==1.3.2
|
||||||
|
poetry lock
|
||||||
|
|
||||||
echo "::group::Patched pyproject.toml"
|
echo "::group::Patched pyproject.toml"
|
||||||
cat pyproject.toml
|
cat pyproject.toml
|
||||||
echo "::endgroup::"
|
echo "::endgroup::"
|
||||||
|
echo "::group::Lockfile after patch"
|
||||||
|
cat poetry.lock
|
||||||
|
echo "::endgroup::"
|
||||||
|
|||||||
@@ -9,6 +9,16 @@ set -eu
|
|||||||
alias block='{ set +x; } 2>/dev/null; func() { echo "::group::$*"; set -x; }; func'
|
alias block='{ set +x; } 2>/dev/null; func() { echo "::group::$*"; set -x; }; func'
|
||||||
alias endblock='{ set +x; } 2>/dev/null; func() { echo "::endgroup::"; set -x; }; func'
|
alias endblock='{ set +x; } 2>/dev/null; func() { echo "::endgroup::"; set -x; }; func'
|
||||||
|
|
||||||
|
block Set Go Version
|
||||||
|
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
|
||||||
|
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
|
||||||
|
|
||||||
|
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
|
||||||
|
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
|
||||||
|
# Add the Go path to the PATH: We need this so we can call gotestfmt
|
||||||
|
echo "~/go/bin" >> $GITHUB_PATH
|
||||||
|
endblock
|
||||||
|
|
||||||
block Install Complement Dependencies
|
block Install Complement Dependencies
|
||||||
sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev
|
sudo apt-get -qq update && sudo apt-get install -qqy libolm3 libolm-dev
|
||||||
go install -v github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
go install -v github.com/gotesttools/gotestfmt/v2/cmd/gotestfmt@latest
|
||||||
|
|||||||
@@ -8,21 +8,21 @@
|
|||||||
# If ignoring a pull request that was not squash merged, only the merge
|
# If ignoring a pull request that was not squash merged, only the merge
|
||||||
# commit needs to be put here. Child commits will be resolved from it.
|
# commit needs to be put here. Child commits will be resolved from it.
|
||||||
|
|
||||||
# Run black (https://github.com/matrix-org/synapse/pull/3679).
|
# Run black (#3679).
|
||||||
8b3d9b6b199abb87246f982d5db356f1966db925
|
8b3d9b6b199abb87246f982d5db356f1966db925
|
||||||
|
|
||||||
# Black reformatting (https://github.com/matrix-org/synapse/pull/5482).
|
# Black reformatting (#5482).
|
||||||
32e7c9e7f20b57dd081023ac42d6931a8da9b3a3
|
32e7c9e7f20b57dd081023ac42d6931a8da9b3a3
|
||||||
|
|
||||||
# Target Python 3.5 with black (https://github.com/matrix-org/synapse/pull/8664).
|
# Target Python 3.5 with black (#8664).
|
||||||
aff1eb7c671b0a3813407321d2702ec46c71fa56
|
aff1eb7c671b0a3813407321d2702ec46c71fa56
|
||||||
|
|
||||||
# Update black to 20.8b1 (https://github.com/matrix-org/synapse/pull/9381).
|
# Update black to 20.8b1 (#9381).
|
||||||
0a00b7ff14890987f09112a2ae696c61001e6cf1
|
0a00b7ff14890987f09112a2ae696c61001e6cf1
|
||||||
|
|
||||||
# Convert tests/rest/admin/test_room.py to unix file endings (https://github.com/matrix-org/synapse/pull/7953).
|
# Convert tests/rest/admin/test_room.py to unix file endings (#7953).
|
||||||
c4268e3da64f1abb5b31deaeb5769adb6510c0a7
|
c4268e3da64f1abb5b31deaeb5769adb6510c0a7
|
||||||
|
|
||||||
# Update black to 23.1.0 (https://github.com/matrix-org/synapse/pull/15103)
|
# Update black to 23.1.0 (#15103)
|
||||||
9bb2eac71962970d02842bca441f4bcdbbf93a11
|
9bb2eac71962970d02842bca441f4bcdbbf93a11
|
||||||
|
|
||||||
|
|||||||
2
.github/CODEOWNERS
vendored
2
.github/CODEOWNERS
vendored
@@ -1,2 +1,2 @@
|
|||||||
# Automatically request reviews from the synapse-core team when a pull request comes in.
|
# Automatically request reviews from the synapse-core team when a pull request comes in.
|
||||||
* @element-hq/synapse-core
|
* @matrix-org/synapse-core
|
||||||
4
.github/FUNDING.yml
vendored
Normal file
4
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# One username per supported platform and one custom link
|
||||||
|
patreon: matrixdotorg
|
||||||
|
liberapay: matrixdotorg
|
||||||
|
custom: https://paypal.me/matrixdotorg
|
||||||
12
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
12
.github/ISSUE_TEMPLATE/BUG_REPORT.yml
vendored
@@ -86,7 +86,7 @@ body:
|
|||||||
|
|
||||||
If PostgreSQL, please also answer the following:
|
If PostgreSQL, please also answer the following:
|
||||||
- are you using a single PostgreSQL server
|
- are you using a single PostgreSQL server
|
||||||
or [separate servers for `main` and `state`](https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#databases)?
|
or [separate servers for `main` and `state`](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#databases)?
|
||||||
- have you previously ported from SQLite using the Synapse "portdb" script?
|
- have you previously ported from SQLite using the Synapse "portdb" script?
|
||||||
- have you previously restored from a backup?
|
- have you previously restored from a backup?
|
||||||
validations:
|
validations:
|
||||||
@@ -97,7 +97,7 @@ body:
|
|||||||
label: Workers
|
label: Workers
|
||||||
description: |
|
description: |
|
||||||
Are you running a single Synapse process, or are you running
|
Are you running a single Synapse process, or are you running
|
||||||
[2 or more workers](https://element-hq.github.io/synapse/latest/workers.html)?
|
[2 or more workers](https://matrix-org.github.io/synapse/latest/workers.html)?
|
||||||
options:
|
options:
|
||||||
- Single process
|
- Single process
|
||||||
- Multiple workers
|
- Multiple workers
|
||||||
@@ -121,15 +121,15 @@ body:
|
|||||||
Do you have any unusual config options turned on? If so, please provide details.
|
Do you have any unusual config options turned on? If so, please provide details.
|
||||||
|
|
||||||
- Experimental or undocumented features
|
- Experimental or undocumented features
|
||||||
- [Presence](https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#presence)
|
- [Presence](https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#presence)
|
||||||
- [Message retention](https://element-hq.github.io/synapse/latest/message_retention_policies.html)
|
- [Message retention](https://matrix-org.github.io/synapse/latest/message_retention_policies.html)
|
||||||
- [Synapse modules](https://element-hq.github.io/synapse/latest/modules/index.html)
|
- [Synapse modules](https://matrix-org.github.io/synapse/latest/modules/index.html)
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: logs
|
id: logs
|
||||||
attributes:
|
attributes:
|
||||||
label: Relevant log output
|
label: Relevant log output
|
||||||
description: |
|
description: |
|
||||||
Please copy and paste any relevant log output as text (not images), ideally at INFO or DEBUG log level.
|
Please copy and paste any relevant log output, ideally at INFO or DEBUG log level.
|
||||||
This will be automatically formatted into code, so there is no need for backticks (`\``).
|
This will be automatically formatted into code, so there is no need for backticks (`\``).
|
||||||
|
|
||||||
Please be careful to remove any personal or private data.
|
Please be careful to remove any personal or private data.
|
||||||
|
|||||||
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,13 +1,14 @@
|
|||||||
### Pull Request Checklist
|
### Pull Request Checklist
|
||||||
|
|
||||||
<!-- Please read https://element-hq.github.io/synapse/latest/development/contributing_guide.html before submitting your pull request -->
|
<!-- Please read https://matrix-org.github.io/synapse/latest/development/contributing_guide.html before submitting your pull request -->
|
||||||
|
|
||||||
* [ ] Pull request is based on the develop branch
|
* [ ] Pull request is based on the develop branch
|
||||||
* [ ] Pull request includes a [changelog file](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should:
|
* [ ] Pull request includes a [changelog file](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should:
|
||||||
- Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.".
|
- Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.".
|
||||||
- Use markdown where necessary, mostly for `code blocks`.
|
- Use markdown where necessary, mostly for `code blocks`.
|
||||||
- End with either a period (.) or an exclamation mark (!).
|
- End with either a period (.) or an exclamation mark (!).
|
||||||
- Start with a capital letter.
|
- Start with a capital letter.
|
||||||
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
|
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
|
||||||
* [ ] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct
|
* [ ] Pull request includes a [sign off](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#sign-off)
|
||||||
(run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
|
* [ ] [Code style](https://matrix-org.github.io/synapse/latest/code_style.html) is correct
|
||||||
|
(run the [linters](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
|
||||||
|
|||||||
49
.github/workflows/dependabot_changelog.yml
vendored
Normal file
49
.github/workflows/dependabot_changelog.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
name: Write changelog for dependabot PR
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- reopened # For debugging!
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
# Needed to be able to push the commit. See
|
||||||
|
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#enable-auto-merge-on-a-pull-request
|
||||||
|
# for a similar example
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
add-changelog:
|
||||||
|
runs-on: 'ubuntu-latest'
|
||||||
|
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: ${{ github.event.pull_request.head.ref }}
|
||||||
|
- name: Write, commit and push changelog
|
||||||
|
env:
|
||||||
|
PR_TITLE: ${{ github.event.pull_request.title }}
|
||||||
|
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||||
|
run: |
|
||||||
|
echo "${PR_TITLE}." > "changelog.d/${PR_NUMBER}".misc
|
||||||
|
git add changelog.d
|
||||||
|
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
|
git config user.name "GitHub Actions"
|
||||||
|
git commit -m "Changelog"
|
||||||
|
git push
|
||||||
|
shell: bash
|
||||||
|
# The `git push` above does not trigger CI on the dependabot PR.
|
||||||
|
#
|
||||||
|
# By default, workflows can't trigger other workflows when they're just using the
|
||||||
|
# default `GITHUB_TOKEN` access token. (This is intended to stop you from writing
|
||||||
|
# recursive workflow loops by accident, because that'll get very expensive very
|
||||||
|
# quickly.) Instead, you have to manually call out to another workflow, or else
|
||||||
|
# make your changes (i.e. the `git push` above) using a personal access token.
|
||||||
|
# See
|
||||||
|
# https://docs.github.com/en/actions/using-workflows/triggering-a-workflow#triggering-a-workflow-from-a-workflow
|
||||||
|
#
|
||||||
|
# I have tried and failed to find a way to trigger CI on the "merge ref" of the PR.
|
||||||
|
# See git commit history for previous attempts. If anyone desperately wants to try
|
||||||
|
# again in the future, make a matrix-bot account and use its access token to git push.
|
||||||
|
|
||||||
|
# THIS WORKFLOW HAS WRITE PERMISSIONS---do not add other jobs here unless they
|
||||||
|
# are sufficiently locked down to dependabot only as above.
|
||||||
51
.github/workflows/docker.yml
vendored
51
.github/workflows/docker.yml
vendored
@@ -10,58 +10,35 @@ on:
|
|||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
packages: write
|
|
||||||
id-token: write # needed for signing the images with GitHub OIDC Token
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
id: qemu
|
id: qemu
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: docker/setup-qemu-action@v2
|
||||||
with:
|
with:
|
||||||
platforms: arm64
|
platforms: arm64
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
- name: Inspect builder
|
- name: Inspect builder
|
||||||
run: docker buildx inspect
|
run: docker buildx inspect
|
||||||
|
|
||||||
- name: Install Cosign
|
|
||||||
uses: sigstore/cosign-installer@v3.5.0
|
|
||||||
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Extract version from pyproject.toml
|
|
||||||
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
|
|
||||||
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsshell
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Log in to DockerHub
|
- name: Log in to DockerHub
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Log in to GHCR
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Calculate docker image tag
|
- name: Calculate docker image tag
|
||||||
id: set-tag
|
id: set-tag
|
||||||
uses: docker/metadata-action@master
|
uses: docker/metadata-action@master
|
||||||
with:
|
with:
|
||||||
images: |
|
images: matrixdotorg/synapse
|
||||||
docker.io/matrixdotorg/synapse
|
|
||||||
ghcr.io/element-hq/synapse
|
|
||||||
flavor: |
|
flavor: |
|
||||||
latest=false
|
latest=false
|
||||||
tags: |
|
tags: |
|
||||||
@@ -71,13 +48,10 @@ jobs:
|
|||||||
type=pep440,pattern={{raw}}
|
type=pep440,pattern={{raw}}
|
||||||
|
|
||||||
- name: Build and push all platforms
|
- name: Build and push all platforms
|
||||||
id: build-and-push
|
uses: docker/build-push-action@v4
|
||||||
uses: docker/build-push-action@v5
|
|
||||||
with:
|
with:
|
||||||
push: true
|
push: true
|
||||||
labels: |
|
labels: "gitsha1=${{ github.sha }}"
|
||||||
gitsha1=${{ github.sha }}
|
|
||||||
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
|
|
||||||
tags: "${{ steps.set-tag.outputs.tags }}"
|
tags: "${{ steps.set-tag.outputs.tags }}"
|
||||||
file: "docker/Dockerfile"
|
file: "docker/Dockerfile"
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
@@ -86,14 +60,3 @@ jobs:
|
|||||||
# https://github.com/rust-lang/cargo/issues/10583
|
# https://github.com/rust-lang/cargo/issues/10583
|
||||||
build-args: |
|
build-args: |
|
||||||
CARGO_NET_GIT_FETCH_WITH_CLI=true
|
CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||||
|
|
||||||
- name: Sign the images with GitHub OIDC Token
|
|
||||||
env:
|
|
||||||
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
|
||||||
TAGS: ${{ steps.set-tag.outputs.tags }}
|
|
||||||
run: |
|
|
||||||
images=""
|
|
||||||
for tag in ${TAGS}; do
|
|
||||||
images+="${tag}@${DIGEST} "
|
|
||||||
done
|
|
||||||
cosign sign --yes ${images}
|
|
||||||
|
|||||||
4
.github/workflows/docs-pr-netlify.yaml
vendored
4
.github/workflows/docs-pr-netlify.yaml
vendored
@@ -14,7 +14,7 @@ jobs:
|
|||||||
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
|
||||||
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
|
||||||
- name: 📥 Download artifact
|
- name: 📥 Download artifact
|
||||||
uses: dawidd6/action-download-artifact@09f2f74827fd3a8607589e5ad7f9398816f540fe # v3.1.4
|
uses: dawidd6/action-download-artifact@5e780fc7bbd0cac69fc73271ed86edf5dcb72d67 # v2.26.0
|
||||||
with:
|
with:
|
||||||
workflow: docs-pr.yaml
|
workflow: docs-pr.yaml
|
||||||
run_id: ${{ github.event.workflow_run.id }}
|
run_id: ${{ github.event.workflow_run.id }}
|
||||||
@@ -22,7 +22,7 @@ jobs:
|
|||||||
path: book
|
path: book
|
||||||
|
|
||||||
- name: 📤 Deploy to Netlify
|
- name: 📤 Deploy to Netlify
|
||||||
uses: matrix-org/netlify-pr-preview@v3
|
uses: matrix-org/netlify-pr-preview@v1
|
||||||
with:
|
with:
|
||||||
path: book
|
path: book
|
||||||
owner: ${{ github.event.workflow_run.head_repository.owner.login }}
|
owner: ${{ github.event.workflow_run.head_repository.owner.login }}
|
||||||
|
|||||||
21
.github/workflows/docs-pr.yaml
vendored
21
.github/workflows/docs-pr.yaml
vendored
@@ -6,30 +6,19 @@ on:
|
|||||||
- docs/**
|
- docs/**
|
||||||
- book.toml
|
- book.toml
|
||||||
- .github/workflows/docs-pr.yaml
|
- .github/workflows/docs-pr.yaml
|
||||||
- scripts-dev/schema_versions.py
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pages:
|
pages:
|
||||||
name: GitHub Pages
|
name: GitHub Pages
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
|
||||||
# Fetch all history so that the schema_versions script works.
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Setup mdbook
|
- name: Setup mdbook
|
||||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||||
with:
|
with:
|
||||||
mdbook-version: '0.4.17'
|
mdbook-version: '0.4.17'
|
||||||
|
|
||||||
- name: Setup python
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
|
|
||||||
- run: "pip install 'packaging>=20.0' 'GitPython>=3.1.20'"
|
|
||||||
|
|
||||||
- name: Build the documentation
|
- name: Build the documentation
|
||||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
# However, we're using docs/README.md for other purposes and need to pick a new page
|
||||||
@@ -39,7 +28,7 @@ jobs:
|
|||||||
cp book/welcome_and_overview.html book/index.html
|
cp book/welcome_and_overview.html book/index.html
|
||||||
|
|
||||||
- name: Upload Artifact
|
- name: Upload Artifact
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: book
|
name: book
|
||||||
path: book
|
path: book
|
||||||
@@ -50,10 +39,10 @@ jobs:
|
|||||||
name: Check links in documentation
|
name: Check links in documentation
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Setup mdbook
|
- name: Setup mdbook
|
||||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||||
with:
|
with:
|
||||||
mdbook-version: '0.4.17'
|
mdbook-version: '0.4.17'
|
||||||
|
|
||||||
|
|||||||
60
.github/workflows/docs.yaml
vendored
60
.github/workflows/docs.yaml
vendored
@@ -13,10 +13,25 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre:
|
pages:
|
||||||
name: Calculate variables for GitHub Pages deployment
|
name: GitHub Pages
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Setup mdbook
|
||||||
|
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
|
||||||
|
with:
|
||||||
|
mdbook-version: '0.4.17'
|
||||||
|
|
||||||
|
- name: Build the documentation
|
||||||
|
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||||
|
# However, we're using docs/README.md for other purposes and need to pick a new page
|
||||||
|
# as the default. Let's opt for the welcome page instead.
|
||||||
|
run: |
|
||||||
|
mdbook build
|
||||||
|
cp book/welcome_and_overview.html book/index.html
|
||||||
|
|
||||||
# Figure out the target directory.
|
# Figure out the target directory.
|
||||||
#
|
#
|
||||||
# The target directory depends on the name of the branch
|
# The target directory depends on the name of the branch
|
||||||
@@ -40,48 +55,11 @@ jobs:
|
|||||||
|
|
||||||
# finally, set the 'branch-version' var.
|
# finally, set the 'branch-version' var.
|
||||||
echo "branch-version=$branch" >> "$GITHUB_OUTPUT"
|
echo "branch-version=$branch" >> "$GITHUB_OUTPUT"
|
||||||
outputs:
|
|
||||||
branch-version: ${{ steps.vars.outputs.branch-version }}
|
|
||||||
|
|
||||||
################################################################################
|
|
||||||
pages-docs:
|
|
||||||
name: GitHub Pages
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs:
|
|
||||||
- pre
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
# Fetch all history so that the schema_versions script works.
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Setup mdbook
|
|
||||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0
|
|
||||||
with:
|
|
||||||
mdbook-version: '0.4.17'
|
|
||||||
|
|
||||||
- name: Set version of docs
|
|
||||||
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
|
|
||||||
|
|
||||||
- name: Setup python
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
|
|
||||||
- run: "pip install 'packaging>=20.0' 'GitPython>=3.1.20'"
|
|
||||||
|
|
||||||
- name: Build the documentation
|
|
||||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
|
||||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
|
||||||
# as the default. Let's opt for the welcome page instead.
|
|
||||||
run: |
|
|
||||||
mdbook build
|
|
||||||
cp book/welcome_and_overview.html book/index.html
|
|
||||||
|
|
||||||
# Deploy to the target directory.
|
# Deploy to the target directory.
|
||||||
- name: Deploy to gh pages
|
- name: Deploy to gh pages
|
||||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0
|
uses: peaceiris/actions-gh-pages@bd8c6b06eba6b3d25d72b7a1767993c0aeee42e7 # v3.9.2
|
||||||
with:
|
with:
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
publish_dir: ./book
|
publish_dir: ./book
|
||||||
destination_dir: ./${{ needs.pre.outputs.branch-version }}
|
destination_dir: ./${{ steps.vars.outputs.branch-version }}
|
||||||
|
|||||||
52
.github/workflows/fix_lint.yaml
vendored
52
.github/workflows/fix_lint.yaml
vendored
@@ -1,52 +0,0 @@
|
|||||||
# A helper workflow to automatically fixup any linting errors on a PR. Must be
|
|
||||||
# triggered manually.
|
|
||||||
|
|
||||||
name: Attempt to automatically fix linting errors
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
fixup:
|
|
||||||
name: Fix up
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: dtolnay/rust-toolchain@master
|
|
||||||
with:
|
|
||||||
# We use nightly so that `fmt` correctly groups together imports, and
|
|
||||||
# clippy correctly fixes up the benchmarks.
|
|
||||||
toolchain: nightly-2022-12-01
|
|
||||||
components: rustfmt
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- name: Setup Poetry
|
|
||||||
uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
install-project: "false"
|
|
||||||
|
|
||||||
- name: Import order (isort)
|
|
||||||
continue-on-error: true
|
|
||||||
run: poetry run isort .
|
|
||||||
|
|
||||||
- name: Code style (black)
|
|
||||||
continue-on-error: true
|
|
||||||
run: poetry run black .
|
|
||||||
|
|
||||||
- name: Semantic checks (ruff)
|
|
||||||
continue-on-error: true
|
|
||||||
run: poetry run ruff --fix .
|
|
||||||
|
|
||||||
- run: cargo clippy --all-features --fix -- -D warnings
|
|
||||||
continue-on-error: true
|
|
||||||
|
|
||||||
- run: cargo fmt
|
|
||||||
continue-on-error: true
|
|
||||||
|
|
||||||
- uses: stefanzweifel/git-auto-commit-action@v5
|
|
||||||
with:
|
|
||||||
commit_message: "Attempt to fix linting"
|
|
||||||
62
.github/workflows/latest_deps.yml
vendored
62
.github/workflows/latest_deps.yml
vendored
@@ -22,26 +22,14 @@ concurrency:
|
|||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_repo:
|
|
||||||
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
|
||||||
# only useful to the Synapse core team.
|
|
||||||
# All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest
|
|
||||||
# of the workflow will be skipped as well.
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }}
|
|
||||||
steps:
|
|
||||||
- id: check_condition
|
|
||||||
run: echo "should_run_workflow=${{ github.repository == 'element-hq/synapse' }}" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
mypy:
|
mypy:
|
||||||
needs: check_repo
|
|
||||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||||
@@ -57,12 +45,10 @@ jobs:
|
|||||||
# `pip install matrix-synapse[all]` as closely as possible.
|
# `pip install matrix-synapse[all]` as closely as possible.
|
||||||
- run: poetry update --no-dev
|
- run: poetry update --no-dev
|
||||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
||||||
- name: Remove unhelpful options from mypy config
|
- name: Remove warn_unused_ignores from mypy config
|
||||||
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
|
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||||
- run: poetry run mypy
|
- run: poetry run mypy
|
||||||
trial:
|
trial:
|
||||||
needs: check_repo
|
|
||||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
@@ -72,10 +58,12 @@ jobs:
|
|||||||
postgres-version: "14"
|
postgres-version: "14"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
@@ -86,7 +74,7 @@ jobs:
|
|||||||
-e POSTGRES_PASSWORD=postgres \
|
-e POSTGRES_PASSWORD=postgres \
|
||||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||||
postgres:${{ matrix.postgres-version }}
|
postgres:${{ matrix.postgres-version }}
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: pip install .[all,test]
|
- run: pip install .[all,test]
|
||||||
@@ -121,8 +109,6 @@ jobs:
|
|||||||
|
|
||||||
|
|
||||||
sytest:
|
sytest:
|
||||||
needs: check_repo
|
|
||||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
image: matrixdotorg/sytest-synapse:testing
|
image: matrixdotorg/sytest-synapse:testing
|
||||||
@@ -145,10 +131,12 @@ jobs:
|
|||||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: Ensure sytest runs `pip install`
|
- name: Ensure sytest runs `pip install`
|
||||||
@@ -164,7 +152,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||||
@@ -174,8 +162,7 @@ jobs:
|
|||||||
|
|
||||||
|
|
||||||
complement:
|
complement:
|
||||||
needs: check_repo
|
if: "${{ !failure() && !cancelled() }}"
|
||||||
if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'"
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
@@ -192,19 +179,14 @@ jobs:
|
|||||||
database: Postgres
|
database: Postgres
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Run actions/checkout@v4 for synapse
|
- name: Run actions/checkout@v3 for synapse
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
|
|
||||||
- uses: actions/setup-go@v5
|
|
||||||
with:
|
|
||||||
cache-dependency-path: complement/go.sum
|
|
||||||
go-version-file: complement/go.mod
|
|
||||||
|
|
||||||
- run: |
|
- run: |
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
TEST_ONLY_IGNORE_POETRY_LOCKFILE=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
TEST_ONLY_IGNORE_POETRY_LOCKFILE=1 POSTGRES=${{ (matrix.database == 'Postgres') && 1 || '' }} WORKERS=${{ (matrix.arrangement == 'workers') && 1 || '' }} COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
||||||
@@ -214,7 +196,7 @@ jobs:
|
|||||||
# Open an issue if the build fails, so we know about it.
|
# Open an issue if the build fails, so we know about it.
|
||||||
# Only do this if we're not experimenting with this action in a PR.
|
# Only do this if we're not experimenting with this action in a PR.
|
||||||
open-issue:
|
open-issue:
|
||||||
if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request' && needs.check_repo.outputs.should_run_workflow == 'true'"
|
if: "failure() && github.event_name != 'push' && github.event_name != 'pull_request'"
|
||||||
needs:
|
needs:
|
||||||
# TODO: should mypy be included here? It feels more brittle than the others.
|
# TODO: should mypy be included here? It feels more brittle than the others.
|
||||||
- mypy
|
- mypy
|
||||||
@@ -225,8 +207,8 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
- uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
|
|||||||
4
.github/workflows/poetry_lockfile.yaml
vendored
4
.github/workflows/poetry_lockfile.yaml
vendored
@@ -16,8 +16,8 @@ jobs:
|
|||||||
name: "Check locked dependencies have sdists"
|
name: "Check locked dependencies have sdists"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
- run: pip install tomli
|
- run: pip install tomli
|
||||||
|
|||||||
10
.github/workflows/push_complement_image.yml
vendored
10
.github/workflows/push_complement_image.yml
vendored
@@ -33,29 +33,29 @@ jobs:
|
|||||||
packages: write
|
packages: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout specific branch (debug build)
|
- name: Checkout specific branch (debug build)
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
if: github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'workflow_dispatch'
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.branch }}
|
ref: ${{ inputs.branch }}
|
||||||
- name: Checkout clean copy of develop (scheduled build)
|
- name: Checkout clean copy of develop (scheduled build)
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
if: github.event_name == 'schedule'
|
if: github.event_name == 'schedule'
|
||||||
with:
|
with:
|
||||||
ref: develop
|
ref: develop
|
||||||
- name: Checkout clean copy of master (on-push)
|
- name: Checkout clean copy of master (on-push)
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
if: github.event_name == 'push'
|
if: github.event_name == 'push'
|
||||||
with:
|
with:
|
||||||
ref: master
|
ref: master
|
||||||
- name: Login to registry
|
- name: Login to registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Work out labels for complement image
|
- name: Work out labels for complement image
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@v4
|
||||||
with:
|
with:
|
||||||
images: ghcr.io/${{ github.repository }}/complement-synapse
|
images: ghcr.io/${{ github.repository }}/complement-synapse
|
||||||
tags: |
|
tags: |
|
||||||
|
|||||||
39
.github/workflows/release-artifacts.yml
vendored
39
.github/workflows/release-artifacts.yml
vendored
@@ -4,15 +4,13 @@ name: Build release artifacts
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
# we build on PRs and develop to (hopefully) get early warning
|
# we build on PRs and develop to (hopefully) get early warning
|
||||||
# of things breaking (but only build one set of debs). PRs skip
|
# of things breaking (but only build one set of debs)
|
||||||
# building wheels on macOS & ARM.
|
|
||||||
pull_request:
|
pull_request:
|
||||||
push:
|
push:
|
||||||
branches: ["develop", "release-*"]
|
branches: ["develop", "release-*"]
|
||||||
|
|
||||||
# we do the full build on tags.
|
# we do the full build on tags.
|
||||||
tags: ["v*"]
|
tags: ["v*"]
|
||||||
merge_group:
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
@@ -27,14 +25,13 @@ jobs:
|
|||||||
name: "Calculate list of debian distros"
|
name: "Calculate list of debian distros"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
- id: set-distros
|
- id: set-distros
|
||||||
run: |
|
run: |
|
||||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||||
# NOTE: inside the actual Dockerfile-dhvirtualenv, the image name is expanded into its full image path
|
|
||||||
dists='["debian:sid"]'
|
dists='["debian:sid"]'
|
||||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||||
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
|
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
|
||||||
@@ -55,18 +52,18 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
path: src
|
path: src
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
id: buildx
|
id: buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v2
|
||||||
with:
|
with:
|
||||||
install: true
|
install: true
|
||||||
|
|
||||||
- name: Set up docker layer caching
|
- name: Set up docker layer caching
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: /tmp/.buildx-cache
|
path: /tmp/.buildx-cache
|
||||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||||
@@ -74,7 +71,7 @@ jobs:
|
|||||||
${{ runner.os }}-buildx-
|
${{ runner.os }}-buildx-
|
||||||
|
|
||||||
- name: Set up python
|
- name: Set up python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.x'
|
python-version: '3.x'
|
||||||
|
|
||||||
@@ -92,7 +89,7 @@ jobs:
|
|||||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
||||||
|
|
||||||
- name: Upload debs as artifacts
|
- name: Upload debs as artifacts
|
||||||
uses: actions/upload-artifact@v3 # Don't upgrade to v4; broken: https://github.com/actions/upload-artifact#breaking-changes
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: debs
|
name: debs
|
||||||
path: debs/*
|
path: debs/*
|
||||||
@@ -121,20 +118,20 @@ jobs:
|
|||||||
arch: aarch64
|
arch: aarch64
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
|
||||||
# here, because `python` on osx points to Python 2.7.
|
# here, because `python` on osx points to Python 2.7.
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
|
||||||
- name: Install cibuildwheel
|
- name: Install cibuildwheel
|
||||||
run: python -m pip install cibuildwheel==2.16.2
|
run: python -m pip install cibuildwheel==2.9.0
|
||||||
|
|
||||||
- name: Set up QEMU to emulate aarch64
|
- name: Set up QEMU to emulate aarch64
|
||||||
if: matrix.arch == 'aarch64'
|
if: matrix.arch == 'aarch64'
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: docker/setup-qemu-action@v2
|
||||||
with:
|
with:
|
||||||
platforms: arm64
|
platforms: arm64
|
||||||
|
|
||||||
@@ -144,7 +141,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Only build a single wheel on PR
|
- name: Only build a single wheel on PR
|
||||||
if: startsWith(github.ref, 'refs/pull/')
|
if: startsWith(github.ref, 'refs/pull/')
|
||||||
run: echo "CIBW_BUILD="cp38-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
|
run: echo "CIBW_BUILD="cp37-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
run: python -m cibuildwheel --output-dir wheelhouse
|
run: python -m cibuildwheel --output-dir wheelhouse
|
||||||
@@ -156,7 +153,7 @@ jobs:
|
|||||||
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
||||||
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
|
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v3 # Don't upgrade to v4; broken: https://github.com/actions/upload-artifact#breaking-changes
|
- uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: Wheel
|
name: Wheel
|
||||||
path: ./wheelhouse/*.whl
|
path: ./wheelhouse/*.whl
|
||||||
@@ -167,8 +164,8 @@ jobs:
|
|||||||
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.10'
|
||||||
|
|
||||||
@@ -177,7 +174,7 @@ jobs:
|
|||||||
- name: Build sdist
|
- name: Build sdist
|
||||||
run: python -m build --sdist
|
run: python -m build --sdist
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v3 # Don't upgrade to v4; broken: https://github.com/actions/upload-artifact#breaking-changes
|
- uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: Sdist
|
name: Sdist
|
||||||
path: dist/*.tar.gz
|
path: dist/*.tar.gz
|
||||||
@@ -194,7 +191,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Download all workflow run artifacts
|
- name: Download all workflow run artifacts
|
||||||
uses: actions/download-artifact@v3 # Don't upgrade to v4, it should match upload-artifact
|
uses: actions/download-artifact@v3
|
||||||
- name: Build a tarball for the debs
|
- name: Build a tarball for the debs
|
||||||
run: tar -cvJf debs.tar.xz debs
|
run: tar -cvJf debs.tar.xz debs
|
||||||
- name: Attach to release
|
- name: Attach to release
|
||||||
|
|||||||
358
.github/workflows/tests.yml
vendored
358
.github/workflows/tests.yml
vendored
@@ -4,7 +4,6 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches: ["develop", "release-*"]
|
branches: ["develop", "release-*"]
|
||||||
pull_request:
|
pull_request:
|
||||||
merge_group:
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
@@ -18,11 +17,8 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
rust: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.rust }}
|
rust: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.rust }}
|
||||||
trial: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.trial }}
|
|
||||||
integration: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.integration }}
|
|
||||||
linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: dorny/paths-filter@v3
|
- uses: dorny/paths-filter@v2
|
||||||
id: filter
|
id: filter
|
||||||
# We only check on PRs
|
# We only check on PRs
|
||||||
if: startsWith(github.ref, 'refs/pull/')
|
if: startsWith(github.ref, 'refs/pull/')
|
||||||
@@ -32,57 +28,11 @@ jobs:
|
|||||||
- 'rust/**'
|
- 'rust/**'
|
||||||
- 'Cargo.toml'
|
- 'Cargo.toml'
|
||||||
- 'Cargo.lock'
|
- 'Cargo.lock'
|
||||||
- '.rustfmt.toml'
|
|
||||||
- '.github/workflows/tests.yml'
|
|
||||||
|
|
||||||
trial:
|
|
||||||
- 'synapse/**'
|
|
||||||
- 'tests/**'
|
|
||||||
- 'rust/**'
|
|
||||||
- '.ci/scripts/calculate_jobs.py'
|
|
||||||
- 'Cargo.toml'
|
|
||||||
- 'Cargo.lock'
|
|
||||||
- 'pyproject.toml'
|
|
||||||
- 'poetry.lock'
|
|
||||||
- '.github/workflows/tests.yml'
|
|
||||||
|
|
||||||
integration:
|
|
||||||
- 'synapse/**'
|
|
||||||
- 'rust/**'
|
|
||||||
- 'docker/**'
|
|
||||||
- 'Cargo.toml'
|
|
||||||
- 'Cargo.lock'
|
|
||||||
- 'pyproject.toml'
|
|
||||||
- 'poetry.lock'
|
|
||||||
- 'docker/**'
|
|
||||||
- '.ci/**'
|
|
||||||
- 'scripts-dev/complement.sh'
|
|
||||||
- '.github/workflows/tests.yml'
|
|
||||||
|
|
||||||
linting:
|
|
||||||
- 'synapse/**'
|
|
||||||
- 'docker/**'
|
|
||||||
- 'tests/**'
|
|
||||||
- 'scripts-dev/**'
|
|
||||||
- 'contrib/**'
|
|
||||||
- 'synmark/**'
|
|
||||||
- 'stubs/**'
|
|
||||||
- '.ci/**'
|
|
||||||
- 'mypy.ini'
|
|
||||||
- 'pyproject.toml'
|
|
||||||
- 'poetry.lock'
|
|
||||||
- '.github/workflows/tests.yml'
|
|
||||||
|
|
||||||
check-sampleconfig:
|
check-sampleconfig:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: changes
|
|
||||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: Install Rust
|
|
||||||
uses: dtolnay/rust-toolchain@1.66.0
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
@@ -93,12 +43,9 @@ jobs:
|
|||||||
|
|
||||||
check-schema-delta:
|
check-schema-delta:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: changes
|
|
||||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
|
||||||
@@ -107,78 +54,21 @@ jobs:
|
|||||||
check-lockfile:
|
check-lockfile:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: .ci/scripts/check_lockfile.py
|
- run: .ci/scripts/check_lockfile.py
|
||||||
|
|
||||||
lint:
|
lint:
|
||||||
runs-on: ubuntu-latest
|
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v2"
|
||||||
needs: changes
|
|
||||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Poetry
|
|
||||||
uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
with:
|
||||||
install-project: "false"
|
typechecking-extras: "all"
|
||||||
|
|
||||||
- name: Import order (isort)
|
|
||||||
run: poetry run isort --check --diff .
|
|
||||||
|
|
||||||
- name: Code style (black)
|
|
||||||
run: poetry run black --check --diff .
|
|
||||||
|
|
||||||
- name: Semantic checks (ruff)
|
|
||||||
# --quiet suppresses the update check.
|
|
||||||
run: poetry run ruff --quiet .
|
|
||||||
|
|
||||||
lint-mypy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
name: Typechecking
|
|
||||||
needs: changes
|
|
||||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: dtolnay/rust-toolchain@1.66.0
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
|
|
||||||
- name: Setup Poetry
|
|
||||||
uses: matrix-org/setup-python-poetry@v1
|
|
||||||
with:
|
|
||||||
# We want to make use of type hints in optional dependencies too.
|
|
||||||
extras: all
|
|
||||||
# We have seen odd mypy failures that were resolved when we started
|
|
||||||
# installing the project again:
|
|
||||||
# https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775
|
|
||||||
# To make CI green, err towards caution and install the project.
|
|
||||||
install-project: "true"
|
|
||||||
|
|
||||||
# Cribbed from
|
|
||||||
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
|
||||||
- name: Restore/persist mypy's cache
|
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
.mypy_cache
|
|
||||||
key: mypy-cache-${{ github.context.sha }}
|
|
||||||
restore-keys: mypy-cache-
|
|
||||||
|
|
||||||
- name: Run mypy
|
|
||||||
run: poetry run mypy
|
|
||||||
|
|
||||||
lint-crlf:
|
lint-crlf:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: Check line endings
|
- name: Check line endings
|
||||||
run: scripts-dev/check_line_terminators.sh
|
run: scripts-dev/check_line_terminators.sh
|
||||||
|
|
||||||
@@ -186,11 +76,11 @@ jobs:
|
|||||||
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
- run: "pip install 'towncrier>=18.6.0rc1'"
|
||||||
@@ -200,16 +90,10 @@ jobs:
|
|||||||
|
|
||||||
lint-pydantic:
|
lint-pydantic:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: changes
|
|
||||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
- name: Install Rust
|
|
||||||
uses: dtolnay/rust-toolchain@1.66.0
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
poetry-version: "1.3.2"
|
poetry-version: "1.3.2"
|
||||||
@@ -222,11 +106,15 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.66.0
|
# There don't seem to be versioned releases of this action per se: for each rust
|
||||||
|
# version there is a branch which gets constantly rebased on top of master.
|
||||||
|
# We pin to a specific commit for paranoia's sake.
|
||||||
|
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||||
with:
|
with:
|
||||||
|
toolchain: 1.58.1
|
||||||
components: clippy
|
components: clippy
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
@@ -240,10 +128,13 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@master
|
# There don't seem to be versioned releases of this action per se: for each rust
|
||||||
|
# version there is a branch which gets constantly rebased on top of master.
|
||||||
|
# We pin to a specific commit for paranoia's sake.
|
||||||
|
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||||
with:
|
with:
|
||||||
toolchain: nightly-2022-12-01
|
toolchain: nightly-2022-12-01
|
||||||
components: clippy
|
components: clippy
|
||||||
@@ -257,10 +148,13 @@ jobs:
|
|||||||
if: ${{ needs.changes.outputs.rust == 'true' }}
|
if: ${{ needs.changes.outputs.rust == 'true' }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@master
|
# There don't seem to be versioned releases of this action per se: for each rust
|
||||||
|
# version there is a branch which gets constantly rebased on top of master.
|
||||||
|
# We pin to a specific commit for paranoia's sake.
|
||||||
|
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||||
with:
|
with:
|
||||||
# We use nightly so that it correctly groups together imports
|
# We use nightly so that it correctly groups together imports
|
||||||
toolchain: nightly-2022-12-01
|
toolchain: nightly-2022-12-01
|
||||||
@@ -274,7 +168,6 @@ jobs:
|
|||||||
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
||||||
needs:
|
needs:
|
||||||
- lint
|
- lint
|
||||||
- lint-mypy
|
|
||||||
- lint-crlf
|
- lint-crlf
|
||||||
- lint-newsfile
|
- lint-newsfile
|
||||||
- lint-pydantic
|
- lint-pydantic
|
||||||
@@ -282,34 +175,18 @@ jobs:
|
|||||||
- check-schema-delta
|
- check-schema-delta
|
||||||
- check-lockfile
|
- check-lockfile
|
||||||
- lint-clippy
|
- lint-clippy
|
||||||
- lint-clippy-nightly
|
|
||||||
- lint-rustfmt
|
- lint-rustfmt
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: matrix-org/done-action@v2
|
- run: "true"
|
||||||
with:
|
|
||||||
needs: ${{ toJSON(needs) }}
|
|
||||||
|
|
||||||
# Various bits are skipped if there was no applicable changes.
|
|
||||||
skippable: |
|
|
||||||
check-sampleconfig
|
|
||||||
check-schema-delta
|
|
||||||
lint
|
|
||||||
lint-mypy
|
|
||||||
lint-newsfile
|
|
||||||
lint-pydantic
|
|
||||||
lint-clippy
|
|
||||||
lint-clippy-nightly
|
|
||||||
lint-rustfmt
|
|
||||||
|
|
||||||
|
|
||||||
calculate-test-jobs:
|
calculate-test-jobs:
|
||||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||||
needs: linting-done
|
needs: linting-done
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
- id: get-matrix
|
- id: get-matrix
|
||||||
@@ -319,17 +196,15 @@ jobs:
|
|||||||
sytest_test_matrix: ${{ steps.get-matrix.outputs.sytest_test_matrix }}
|
sytest_test_matrix: ${{ steps.get-matrix.outputs.sytest_test_matrix }}
|
||||||
|
|
||||||
trial:
|
trial:
|
||||||
if: ${{ !cancelled() && !failure() && needs.changes.outputs.trial == 'true' }} # Allow previous steps to be skipped, but not fail
|
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||||
needs:
|
needs: calculate-test-jobs
|
||||||
- calculate-test-jobs
|
|
||||||
- changes
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
|
||||||
if: ${{ matrix.job.postgres-version }}
|
if: ${{ matrix.job.postgres-version }}
|
||||||
@@ -344,7 +219,12 @@ jobs:
|
|||||||
postgres:${{ matrix.job.postgres-version }}
|
postgres:${{ matrix.job.postgres-version }}
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.66.0
|
# There don't seem to be versioned releases of this action per se: for each rust
|
||||||
|
# version there is a branch which gets constantly rebased on top of master.
|
||||||
|
# We pin to a specific commit for paranoia's sake.
|
||||||
|
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||||
|
with:
|
||||||
|
toolchain: 1.58.1
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
@@ -377,48 +257,59 @@ jobs:
|
|||||||
|
|
||||||
trial-olddeps:
|
trial-olddeps:
|
||||||
# Note: sqlite only; no postgres
|
# Note: sqlite only; no postgres
|
||||||
if: ${{ !cancelled() && !failure() && needs.changes.outputs.trial == 'true' }} # Allow previous steps to be skipped, but not fail
|
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||||
needs:
|
needs: linting-done
|
||||||
- linting-done
|
|
||||||
- changes
|
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.66.0
|
# There don't seem to be versioned releases of this action per se: for each rust
|
||||||
|
# version there is a branch which gets constantly rebased on top of master.
|
||||||
|
# We pin to a specific commit for paranoia's sake.
|
||||||
|
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||||
|
with:
|
||||||
|
toolchain: 1.58.1
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
# There aren't wheels for some of the older deps, so we need to install
|
# There aren't wheels for some of the older deps, so we need to install
|
||||||
# their build dependencies
|
# their build dependencies
|
||||||
- run: |
|
- run: |
|
||||||
sudo apt-get -qq update
|
|
||||||
sudo apt-get -qq install build-essential libffi-dev python-dev \
|
sudo apt-get -qq install build-essential libffi-dev python-dev \
|
||||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.8'
|
python-version: '3.7'
|
||||||
|
|
||||||
|
# Calculating the old-deps actually takes a bunch of time, so we cache the
|
||||||
|
# pyproject.toml / poetry.lock. We need to cache pyproject.toml as
|
||||||
|
# otherwise the `poetry install` step will error due to the poetry.lock
|
||||||
|
# file being outdated.
|
||||||
|
#
|
||||||
|
# This caches the output of `Prepare old deps`, which should generate the
|
||||||
|
# same `pyproject.toml` and `poetry.lock` for a given `pyproject.toml` input.
|
||||||
|
- uses: actions/cache@v3
|
||||||
|
id: cache-poetry-old-deps
|
||||||
|
name: Cache poetry.lock
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
poetry.lock
|
||||||
|
pyproject.toml
|
||||||
|
key: poetry-old-deps2-${{ hashFiles('pyproject.toml') }}
|
||||||
- name: Prepare old deps
|
- name: Prepare old deps
|
||||||
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
||||||
run: .ci/scripts/prepare_old_deps.sh
|
run: .ci/scripts/prepare_old_deps.sh
|
||||||
|
|
||||||
# Note: we install using `pip` here, not poetry. `poetry install` ignores the
|
# We only now install poetry so that `setup-python-poetry` caches the
|
||||||
# build-system section (https://github.com/python-poetry/poetry/issues/6154), but
|
# right poetry.lock's dependencies.
|
||||||
# we explicitly want to test that you can `pip install` using the oldest version
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
# of poetry-core and setuptools-rust.
|
with:
|
||||||
- run: pip install .[all,test]
|
python-version: '3.7'
|
||||||
|
poetry-version: "1.3.2"
|
||||||
|
extras: "all test"
|
||||||
|
|
||||||
# We nuke the local copy, as we've installed synapse into the virtualenv
|
- run: poetry run trial -j6 tests
|
||||||
# (rather than use an editable install, which we no longer support). If we
|
|
||||||
# don't do this then python can't find the native lib.
|
|
||||||
- run: rm -rf synapse/
|
|
||||||
|
|
||||||
# Sanity check we can import/run Synapse
|
|
||||||
- run: python -m synapse.app.homeserver --help
|
|
||||||
|
|
||||||
- run: python -m twisted.trial -j6 tests
|
|
||||||
- name: Dump logs
|
- name: Dump logs
|
||||||
# Logs are most useful when the command fails, always include them.
|
# Logs are most useful when the command fails, always include them.
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
@@ -435,18 +326,16 @@ jobs:
|
|||||||
trial-pypy:
|
trial-pypy:
|
||||||
# Very slow; only run if the branch name includes 'pypy'
|
# Very slow; only run if the branch name includes 'pypy'
|
||||||
# Note: sqlite only; no postgres. Completely untested since poetry move.
|
# Note: sqlite only; no postgres. Completely untested since poetry move.
|
||||||
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() && needs.changes.outputs.trial == 'true' }}
|
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
|
||||||
needs:
|
needs: linting-done
|
||||||
- linting-done
|
|
||||||
- changes
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["pypy-3.8"]
|
python-version: ["pypy-3.7"]
|
||||||
extras: ["all"]
|
extras: ["all"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
@@ -469,10 +358,8 @@ jobs:
|
|||||||
|| true
|
|| true
|
||||||
|
|
||||||
sytest:
|
sytest:
|
||||||
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true' }}
|
if: ${{ !failure() && !cancelled() }}
|
||||||
needs:
|
needs: calculate-test-jobs
|
||||||
- calculate-test-jobs
|
|
||||||
- changes
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
image: matrixdotorg/sytest-synapse:${{ matrix.job.sytest-tag }}
|
image: matrixdotorg/sytest-synapse:${{ matrix.job.sytest-tag }}
|
||||||
@@ -481,8 +368,8 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
SYTEST_BRANCH: ${{ github.head_ref }}
|
||||||
POSTGRES: ${{ matrix.job.postgres && 1}}
|
POSTGRES: ${{ matrix.job.postgres && 1}}
|
||||||
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') || '' }}
|
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') && 1}}
|
||||||
ASYNCIO_REACTOR: ${{ (matrix.job.reactor == 'asyncio') || '' }}
|
ASYNCIO_REACTOR: ${{ (matrix.job.reactor == 'asyncio') && 1 }}
|
||||||
WORKERS: ${{ matrix.job.workers && 1 }}
|
WORKERS: ${{ matrix.job.workers && 1 }}
|
||||||
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
|
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
|
||||||
TOP: ${{ github.workspace }}
|
TOP: ${{ github.workspace }}
|
||||||
@@ -493,12 +380,17 @@ jobs:
|
|||||||
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: Prepare test blacklist
|
- name: Prepare test blacklist
|
||||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.66.0
|
# There don't seem to be versioned releases of this action per se: for each rust
|
||||||
|
# version there is a branch which gets constantly rebased on top of master.
|
||||||
|
# We pin to a specific commit for paranoia's sake.
|
||||||
|
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||||
|
with:
|
||||||
|
toolchain: 1.58.1
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: Run SyTest
|
- name: Run SyTest
|
||||||
@@ -508,7 +400,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
||||||
@@ -517,8 +409,8 @@ jobs:
|
|||||||
/logs/**/*.log*
|
/logs/**/*.log*
|
||||||
|
|
||||||
export-data:
|
export-data:
|
||||||
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true'}} # Allow previous steps to be skipped, but not fail
|
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
||||||
needs: [linting-done, portdb, changes]
|
needs: [linting-done, portdb]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
env:
|
env:
|
||||||
TOP: ${{ github.workspace }}
|
TOP: ${{ github.workspace }}
|
||||||
@@ -538,7 +430,7 @@ jobs:
|
|||||||
--health-retries 5
|
--health-retries 5
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
- run: sudo apt-get -qq install xmlsec1 postgresql-client
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
@@ -553,15 +445,13 @@ jobs:
|
|||||||
|
|
||||||
|
|
||||||
portdb:
|
portdb:
|
||||||
if: ${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true'}} # Allow previous steps to be skipped, but not fail
|
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
||||||
needs:
|
needs: linting-done
|
||||||
- linting-done
|
|
||||||
- changes
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- python-version: "3.8"
|
- python-version: "3.7"
|
||||||
postgres-version: "11"
|
postgres-version: "11"
|
||||||
|
|
||||||
- python-version: "3.11"
|
- python-version: "3.11"
|
||||||
@@ -582,7 +472,7 @@ jobs:
|
|||||||
--health-retries 5
|
--health-retries 5
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: Add PostgreSQL apt repository
|
- name: Add PostgreSQL apt repository
|
||||||
# We need a version of pg_dump that can handle the version of
|
# We need a version of pg_dump that can handle the version of
|
||||||
# PostgreSQL being tested against. The Ubuntu package repository lags
|
# PostgreSQL being tested against. The Ubuntu package repository lags
|
||||||
@@ -606,7 +496,7 @@ jobs:
|
|||||||
PGPASSWORD: postgres
|
PGPASSWORD: postgres
|
||||||
PGDATABASE: postgres
|
PGDATABASE: postgres
|
||||||
- name: "Upload schema differences"
|
- name: "Upload schema differences"
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
||||||
with:
|
with:
|
||||||
name: Schema dumps
|
name: Schema dumps
|
||||||
@@ -616,10 +506,8 @@ jobs:
|
|||||||
schema_diff
|
schema_diff
|
||||||
|
|
||||||
complement:
|
complement:
|
||||||
if: "${{ !failure() && !cancelled() && needs.changes.outputs.integration == 'true' }}"
|
if: "${{ !failure() && !cancelled() }}"
|
||||||
needs:
|
needs: linting-done
|
||||||
- linting-done
|
|
||||||
- changes
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
@@ -636,27 +524,26 @@ jobs:
|
|||||||
database: Postgres
|
database: Postgres
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Run actions/checkout@v4 for synapse
|
- name: Run actions/checkout@v3 for synapse
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.66.0
|
# There don't seem to be versioned releases of this action per se: for each rust
|
||||||
|
# version there is a branch which gets constantly rebased on top of master.
|
||||||
|
# We pin to a specific commit for paranoia's sake.
|
||||||
|
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||||
|
with:
|
||||||
|
toolchain: 1.58.1
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
|
|
||||||
- uses: actions/setup-go@v5
|
|
||||||
with:
|
|
||||||
cache-dependency-path: complement/go.sum
|
|
||||||
go-version-file: complement/go.mod
|
|
||||||
|
|
||||||
# use p=1 concurrency as GHA boxes are underpowered and don't like running tons of synapses at once.
|
|
||||||
- run: |
|
- run: |
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -p 1 -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | synapse/.ci/scripts/gotestfmt
|
||||||
shell: bash
|
shell: bash
|
||||||
env:
|
env:
|
||||||
POSTGRES: ${{ (matrix.database == 'Postgres') && 1 || '' }}
|
POSTGRES: ${{ (matrix.database == 'Postgres') && 1 || '' }}
|
||||||
@@ -671,10 +558,15 @@ jobs:
|
|||||||
- changes
|
- changes
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@1.66.0
|
# There don't seem to be versioned releases of this action per se: for each rust
|
||||||
|
# version there is a branch which gets constantly rebased on top of master.
|
||||||
|
# We pin to a specific commit for paranoia's sake.
|
||||||
|
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||||
|
with:
|
||||||
|
toolchain: 1.58.1
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- run: cargo test
|
- run: cargo test
|
||||||
@@ -689,10 +581,13 @@ jobs:
|
|||||||
- changes
|
- changes
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@master
|
# There don't seem to be versioned releases of this action per se: for each rust
|
||||||
|
# version there is a branch which gets constantly rebased on top of master.
|
||||||
|
# We pin to a specific commit for paranoia's sake.
|
||||||
|
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||||
with:
|
with:
|
||||||
toolchain: nightly-2022-12-01
|
toolchain: nightly-2022-12-01
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
@@ -711,22 +606,15 @@ jobs:
|
|||||||
- complement
|
- complement
|
||||||
- cargo-test
|
- cargo-test
|
||||||
- cargo-bench
|
- cargo-bench
|
||||||
- linting-done
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: matrix-org/done-action@v2
|
- uses: matrix-org/done-action@v2
|
||||||
with:
|
with:
|
||||||
needs: ${{ toJSON(needs) }}
|
needs: ${{ toJSON(needs) }}
|
||||||
|
|
||||||
# Various bits are skipped if there was no applicable changes.
|
# The newsfile lint may be skipped on non PR builds
|
||||||
# The newsfile lint may be skipped on non PR builds.
|
# Cargo test is skipped if there is no changes on Rust code
|
||||||
skippable: |
|
skippable: |
|
||||||
trial
|
|
||||||
trial-olddeps
|
|
||||||
sytest
|
|
||||||
portdb
|
|
||||||
export-data
|
|
||||||
complement
|
|
||||||
lint-newsfile
|
lint-newsfile
|
||||||
cargo-test
|
cargo-test
|
||||||
cargo-bench
|
cargo-bench
|
||||||
|
|||||||
1
.github/workflows/triage-incoming.yml
vendored
1
.github/workflows/triage-incoming.yml
vendored
@@ -12,3 +12,4 @@ jobs:
|
|||||||
content_id: ${{ github.event.issue.node_id }}
|
content_id: ${{ github.event.issue.node_id }}
|
||||||
secrets:
|
secrets:
|
||||||
github_access_token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
github_access_token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||||
|
|
||||||
|
|||||||
79
.github/workflows/twisted_trunk.yml
vendored
79
.github/workflows/twisted_trunk.yml
vendored
@@ -5,45 +5,22 @@ on:
|
|||||||
- cron: 0 8 * * *
|
- cron: 0 8 * * *
|
||||||
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
# NB: inputs are only present when this workflow is dispatched manually.
|
|
||||||
# (The default below is the default field value in the form to trigger
|
|
||||||
# a manual dispatch). Otherwise the inputs will evaluate to null.
|
|
||||||
inputs:
|
|
||||||
twisted_ref:
|
|
||||||
description: Commit, branch or tag to checkout from upstream Twisted.
|
|
||||||
required: false
|
|
||||||
default: 'trunk'
|
|
||||||
type: string
|
|
||||||
|
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_repo:
|
|
||||||
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
|
|
||||||
# only useful to the Synapse core team.
|
|
||||||
# All other workflow steps depend on this one, thus if 'should_run_workflow' is not 'true', the rest
|
|
||||||
# of the workflow will be skipped as well.
|
|
||||||
if: github.repository == 'element-hq/synapse'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
should_run_workflow: ${{ steps.check_condition.outputs.should_run_workflow }}
|
|
||||||
steps:
|
|
||||||
- id: check_condition
|
|
||||||
run: echo "should_run_workflow=${{ github.repository == 'element-hq/synapse' }}" >> "$GITHUB_OUTPUT"
|
|
||||||
|
|
||||||
mypy:
|
mypy:
|
||||||
needs: check_repo
|
|
||||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
@@ -52,23 +29,23 @@ jobs:
|
|||||||
extras: "all"
|
extras: "all"
|
||||||
- run: |
|
- run: |
|
||||||
poetry remove twisted
|
poetry remove twisted
|
||||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref || 'trunk' }}
|
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||||
poetry install --no-interaction --extras "all test"
|
poetry install --no-interaction --extras "all test"
|
||||||
- name: Remove unhelpful options from mypy config
|
- name: Remove warn_unused_ignores from mypy config
|
||||||
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
|
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||||
- run: poetry run mypy
|
- run: poetry run mypy
|
||||||
|
|
||||||
trial:
|
trial:
|
||||||
needs: check_repo
|
|
||||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@v1
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
@@ -95,23 +72,19 @@ jobs:
|
|||||||
|| true
|
|| true
|
||||||
|
|
||||||
sytest:
|
sytest:
|
||||||
needs: check_repo
|
|
||||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
# We're using ubuntu:focal because it uses Python 3.8 which is our minimum supported Python version.
|
image: matrixdotorg/sytest-synapse:buster
|
||||||
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
|
|
||||||
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
|
|
||||||
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
|
|
||||||
image: matrixdotorg/sytest-synapse:focal
|
|
||||||
volumes:
|
volumes:
|
||||||
- ${{ github.workspace }}:/src
|
- ${{ github.workspace }}:/src
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
uses: dtolnay/rust-toolchain@stable
|
uses: dtolnay/rust-toolchain@e12eda571dc9a5ee5d58eecf4738ec291c66f295
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
|
|
||||||
- name: Patch dependencies
|
- name: Patch dependencies
|
||||||
@@ -136,7 +109,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||||
@@ -145,8 +118,7 @@ jobs:
|
|||||||
/logs/**/*.log*
|
/logs/**/*.log*
|
||||||
|
|
||||||
complement:
|
complement:
|
||||||
needs: check_repo
|
if: "${{ !failure() && !cancelled() }}"
|
||||||
if: "!failure() && !cancelled() && needs.check_repo.outputs.should_run_workflow == 'true'"
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
@@ -163,19 +135,14 @@ jobs:
|
|||||||
database: Postgres
|
database: Postgres
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Run actions/checkout@v4 for synapse
|
- name: Run actions/checkout@v3 for synapse
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
|
|
||||||
- uses: actions/setup-go@v5
|
|
||||||
with:
|
|
||||||
cache-dependency-path: complement/go.sum
|
|
||||||
go-version-file: complement/go.mod
|
|
||||||
|
|
||||||
# This step is specific to the 'Twisted trunk' test run:
|
# This step is specific to the 'Twisted trunk' test run:
|
||||||
- name: Patch dependencies
|
- name: Patch dependencies
|
||||||
run: |
|
run: |
|
||||||
@@ -196,7 +163,7 @@ jobs:
|
|||||||
|
|
||||||
# open an issue if the build fails, so we know about it.
|
# open an issue if the build fails, so we know about it.
|
||||||
open-issue:
|
open-issue:
|
||||||
if: failure() && needs.check_repo.outputs.should_run_workflow == 'true'
|
if: failure()
|
||||||
needs:
|
needs:
|
||||||
- mypy
|
- mypy
|
||||||
- trial
|
- trial
|
||||||
@@ -206,8 +173,8 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
|
- uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # v2.9.1
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
|
|||||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -15,10 +15,9 @@ _trial_temp*/
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
__pycache__/
|
__pycache__/
|
||||||
|
|
||||||
# We do want poetry, cargo and flake lockfiles.
|
# We do want the poetry and cargo lockfile.
|
||||||
!poetry.lock
|
!poetry.lock
|
||||||
!Cargo.lock
|
!Cargo.lock
|
||||||
!flake.lock
|
|
||||||
|
|
||||||
# stuff that is likely to exist when you run a server locally
|
# stuff that is likely to exist when you run a server locally
|
||||||
/*.db
|
/*.db
|
||||||
@@ -34,15 +33,11 @@ __pycache__/
|
|||||||
/logs
|
/logs
|
||||||
/media_store/
|
/media_store/
|
||||||
/uploads
|
/uploads
|
||||||
/homeserver-config-overrides.d
|
|
||||||
|
|
||||||
# For direnv users
|
# For direnv users
|
||||||
/.envrc
|
/.envrc
|
||||||
.direnv/
|
.direnv/
|
||||||
|
|
||||||
# For nix/devenv users
|
|
||||||
.devenv/
|
|
||||||
|
|
||||||
# IDEs
|
# IDEs
|
||||||
/.idea/
|
/.idea/
|
||||||
/.ropeproject/
|
/.ropeproject/
|
||||||
@@ -58,7 +53,6 @@ __pycache__/
|
|||||||
/coverage.*
|
/coverage.*
|
||||||
/dist/
|
/dist/
|
||||||
/docs/build/
|
/docs/build/
|
||||||
/dev-docs/_build/
|
|
||||||
/htmlcov
|
/htmlcov
|
||||||
/pip-wheel-metadata/
|
/pip-wheel-metadata/
|
||||||
|
|
||||||
@@ -67,7 +61,7 @@ book/
|
|||||||
|
|
||||||
# complement
|
# complement
|
||||||
/complement-*
|
/complement-*
|
||||||
/main.tar.gz
|
/master.tar.gz
|
||||||
|
|
||||||
# rust
|
# rust
|
||||||
/target/
|
/target/
|
||||||
|
|||||||
4945
CHANGES.md
4945
CHANGES.md
File diff suppressed because it is too large
Load Diff
@@ -1,3 +1,3 @@
|
|||||||
# Welcome to Synapse
|
# Welcome to Synapse
|
||||||
|
|
||||||
Please see the [contributors' guide](https://element-hq.github.io/synapse/latest/development/contributing_guide.html) in our rendered documentation.
|
Please see the [contributors' guide](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html) in our rendered documentation.
|
||||||
|
|||||||
386
Cargo.lock
generated
386
Cargo.lock
generated
@@ -4,18 +4,18 @@ version = 3
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aho-corasick"
|
name = "aho-corasick"
|
||||||
version = "1.0.2"
|
version = "0.7.19"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41"
|
checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"memchr",
|
"memchr",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyhow"
|
name = "anyhow"
|
||||||
version = "1.0.82"
|
version = "1.0.69"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f538837af36e6f6a9be0faa67f9a314f8119e4e4b5867c6ab40ed60360142519"
|
checksum = "224afbd727c3d6e4b90103ece64b8d1b67fbb1973b1046c2281eed3f3803f800"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arc-swap"
|
name = "arc-swap"
|
||||||
@@ -29,12 +29,6 @@ version = "1.1.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "base64"
|
|
||||||
version = "0.21.7"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bitflags"
|
name = "bitflags"
|
||||||
version = "1.3.2"
|
version = "1.3.2"
|
||||||
@@ -59,33 +53,12 @@ dependencies = [
|
|||||||
"generic-array",
|
"generic-array",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bumpalo"
|
|
||||||
version = "3.16.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bytes"
|
|
||||||
version = "1.6.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cfg-if"
|
name = "cfg-if"
|
||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "cpufeatures"
|
|
||||||
version = "0.2.12"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "crypto-common"
|
name = "crypto-common"
|
||||||
version = "0.1.6"
|
version = "0.1.6"
|
||||||
@@ -98,21 +71,15 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "digest"
|
name = "digest"
|
||||||
version = "0.10.7"
|
version = "0.10.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
|
checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"block-buffer",
|
"block-buffer",
|
||||||
"crypto-common",
|
"crypto-common",
|
||||||
"subtle",
|
"subtle",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "fnv"
|
|
||||||
version = "1.0.7"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "generic-array"
|
name = "generic-array"
|
||||||
version = "0.14.6"
|
version = "0.14.6"
|
||||||
@@ -123,77 +90,17 @@ dependencies = [
|
|||||||
"version_check",
|
"version_check",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "getrandom"
|
|
||||||
version = "0.2.14"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c"
|
|
||||||
dependencies = [
|
|
||||||
"cfg-if",
|
|
||||||
"js-sys",
|
|
||||||
"libc",
|
|
||||||
"wasi",
|
|
||||||
"wasm-bindgen",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "headers"
|
|
||||||
version = "0.4.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "322106e6bd0cba2d5ead589ddb8150a13d7c4217cf80d7c4f682ca994ccc6aa9"
|
|
||||||
dependencies = [
|
|
||||||
"base64",
|
|
||||||
"bytes",
|
|
||||||
"headers-core",
|
|
||||||
"http",
|
|
||||||
"httpdate",
|
|
||||||
"mime",
|
|
||||||
"sha1",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "headers-core"
|
|
||||||
version = "0.3.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4"
|
|
||||||
dependencies = [
|
|
||||||
"http",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "heck"
|
|
||||||
version = "0.4.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hex"
|
name = "hex"
|
||||||
version = "0.4.3"
|
version = "0.4.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "http"
|
|
||||||
version = "1.1.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258"
|
|
||||||
dependencies = [
|
|
||||||
"bytes",
|
|
||||||
"fnv",
|
|
||||||
"itoa",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "httpdate"
|
|
||||||
version = "1.0.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "indoc"
|
name = "indoc"
|
||||||
version = "2.0.4"
|
version = "1.0.7"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8"
|
checksum = "adab1eaa3408fb7f0c777a73e7465fd5656136fc93b670eb6df3c88c2c1344e3"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "itoa"
|
name = "itoa"
|
||||||
@@ -201,15 +108,6 @@ version = "1.0.4"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
|
checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "js-sys"
|
|
||||||
version = "0.3.69"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d"
|
|
||||||
dependencies = [
|
|
||||||
"wasm-bindgen",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lazy_static"
|
name = "lazy_static"
|
||||||
version = "1.4.0"
|
version = "1.4.0"
|
||||||
@@ -218,9 +116,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libc"
|
name = "libc"
|
||||||
version = "0.2.153"
|
version = "0.2.135"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
|
checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lock_api"
|
name = "lock_api"
|
||||||
@@ -234,30 +132,27 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "log"
|
name = "log"
|
||||||
version = "0.4.21"
|
version = "0.4.17"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c"
|
checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "memchr"
|
|
||||||
version = "2.6.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "8f232d6ef707e1956a43342693d2a31e72989554d58299d7a88738cc95b0d35c"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "memoffset"
|
|
||||||
version = "0.9.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"autocfg",
|
"cfg-if",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mime"
|
name = "memchr"
|
||||||
version = "0.3.17"
|
version = "2.5.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
|
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "memoffset"
|
||||||
|
version = "0.6.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
|
||||||
|
dependencies = [
|
||||||
|
"autocfg",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "once_cell"
|
name = "once_cell"
|
||||||
@@ -288,32 +183,20 @@ dependencies = [
|
|||||||
"windows-sys",
|
"windows-sys",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "portable-atomic"
|
|
||||||
version = "1.6.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ppv-lite86"
|
|
||||||
version = "0.2.17"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.76"
|
version = "1.0.46"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c"
|
checksum = "94e2ef8dbfc347b10c094890f778ee2e36ca9bb4262e86dc99cd217e35f3470b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3"
|
name = "pyo3"
|
||||||
version = "0.20.3"
|
version = "0.17.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "53bdbb96d49157e65d45cc287af5f32ffadd5f4761438b527b055fb0d4bb8233"
|
checksum = "268be0c73583c183f2b14052337465768c07726936a260f480f0857cb95ba543"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
@@ -321,7 +204,6 @@ dependencies = [
|
|||||||
"libc",
|
"libc",
|
||||||
"memoffset",
|
"memoffset",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
"portable-atomic",
|
|
||||||
"pyo3-build-config",
|
"pyo3-build-config",
|
||||||
"pyo3-ffi",
|
"pyo3-ffi",
|
||||||
"pyo3-macros",
|
"pyo3-macros",
|
||||||
@@ -330,9 +212,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-build-config"
|
name = "pyo3-build-config"
|
||||||
version = "0.20.3"
|
version = "0.17.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "deaa5745de3f5231ce10517a1f5dd97d53e5a2fd77aa6b5842292085831d48d7"
|
checksum = "28fcd1e73f06ec85bf3280c48c67e731d8290ad3d730f8be9dc07946923005c8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"target-lexicon",
|
"target-lexicon",
|
||||||
@@ -340,9 +222,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-ffi"
|
name = "pyo3-ffi"
|
||||||
version = "0.20.3"
|
version = "0.17.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "62b42531d03e08d4ef1f6e85a2ed422eb678b8cd62b762e53891c05faf0d4afa"
|
checksum = "0f6cb136e222e49115b3c51c32792886defbfb0adead26a688142b346a0b9ffc"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
"pyo3-build-config",
|
"pyo3-build-config",
|
||||||
@@ -350,9 +232,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-log"
|
name = "pyo3-log"
|
||||||
version = "0.9.0"
|
version = "0.8.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4c10808ee7250403bedb24bc30c32493e93875fef7ba3e4292226fe924f398bd"
|
checksum = "f9c8b57fe71fb5dcf38970ebedc2b1531cf1c14b1b9b4c560a182a57e115575c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arc-swap",
|
"arc-swap",
|
||||||
"log",
|
"log",
|
||||||
@@ -361,9 +243,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-macros"
|
name = "pyo3-macros"
|
||||||
version = "0.20.3"
|
version = "0.17.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7305c720fa01b8055ec95e484a6eca7a83c841267f0dd5280f0c8b8551d2c158"
|
checksum = "94144a1266e236b1c932682136dc35a9dee8d3589728f68130c7c3861ef96b28"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"pyo3-macros-backend",
|
"pyo3-macros-backend",
|
||||||
@@ -373,22 +255,20 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-macros-backend"
|
name = "pyo3-macros-backend"
|
||||||
version = "0.20.3"
|
version = "0.17.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7c7e9b68bb9c3149c5b0cade5d07f953d6d125eb4337723c4ccdb665f1f96185"
|
checksum = "c8df9be978a2d2f0cdebabb03206ed73b11314701a5bfe71b0d753b81997777f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"heck",
|
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"pyo3-build-config",
|
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pythonize"
|
name = "pythonize"
|
||||||
version = "0.20.0"
|
version = "0.17.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ffd1c3ef39c725d63db5f9bc455461bafd80540cb7824c61afb823501921a850"
|
checksum = "0f7f0c136f5fbc01868185eef462800e49659eb23acca83b9e884367a006acb6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"pyo3",
|
"pyo3",
|
||||||
"serde",
|
"serde",
|
||||||
@@ -396,43 +276,13 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quote"
|
name = "quote"
|
||||||
version = "1.0.35"
|
version = "1.0.21"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
|
checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand"
|
|
||||||
version = "0.8.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
"rand_chacha",
|
|
||||||
"rand_core",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_chacha"
|
|
||||||
version = "0.3.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
|
|
||||||
dependencies = [
|
|
||||||
"ppv-lite86",
|
|
||||||
"rand_core",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_core"
|
|
||||||
version = "0.6.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
|
|
||||||
dependencies = [
|
|
||||||
"getrandom",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "redox_syscall"
|
name = "redox_syscall"
|
||||||
version = "0.2.16"
|
version = "0.2.16"
|
||||||
@@ -444,21 +294,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex"
|
name = "regex"
|
||||||
version = "1.10.4"
|
version = "1.7.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c"
|
checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733"
|
||||||
dependencies = [
|
|
||||||
"aho-corasick",
|
|
||||||
"memchr",
|
|
||||||
"regex-automata",
|
|
||||||
"regex-syntax",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "regex-automata"
|
|
||||||
version = "0.4.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "3b7fa1134405e2ec9353fd416b17f8dacd46c473d7d3fd1cf202706a14eb792a"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
@@ -467,9 +305,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-syntax"
|
name = "regex-syntax"
|
||||||
version = "0.8.2"
|
version = "0.6.27"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
|
checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ryu"
|
name = "ryu"
|
||||||
@@ -485,18 +323,18 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde"
|
name = "serde"
|
||||||
version = "1.0.200"
|
version = "1.0.152"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ddc6f9cc94d67c0e21aaf7eda3a010fd3af78ebf6e096aa6e2e13c79749cce4f"
|
checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_derive"
|
name = "serde_derive"
|
||||||
version = "1.0.200"
|
version = "1.0.152"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "856f046b9400cee3c8c94ed572ecdb752444c24528c035cd35882aad6f492bcb"
|
checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@@ -505,37 +343,15 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_json"
|
name = "serde_json"
|
||||||
version = "1.0.116"
|
version = "1.0.94"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813"
|
checksum = "1c533a59c9d8a93a09c6ab31f0fd5e5f4dd1b8fc9434804029839884765d04ea"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"itoa",
|
"itoa",
|
||||||
"ryu",
|
"ryu",
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "sha1"
|
|
||||||
version = "0.10.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3"
|
|
||||||
dependencies = [
|
|
||||||
"cfg-if",
|
|
||||||
"cpufeatures",
|
|
||||||
"digest",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "sha2"
|
|
||||||
version = "0.10.8"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
|
|
||||||
dependencies = [
|
|
||||||
"cfg-if",
|
|
||||||
"cpufeatures",
|
|
||||||
"digest",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "smallvec"
|
name = "smallvec"
|
||||||
version = "1.10.0"
|
version = "1.10.0"
|
||||||
@@ -550,9 +366,9 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.48"
|
version = "1.0.104"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f"
|
checksum = "4ae548ec36cf198c0ef7710d3c230987c2d6d7bd98ad6edc0274462724c585ce"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@@ -564,23 +380,16 @@ name = "synapse"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"base64",
|
|
||||||
"blake2",
|
"blake2",
|
||||||
"bytes",
|
|
||||||
"headers",
|
|
||||||
"hex",
|
"hex",
|
||||||
"http",
|
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
"mime",
|
|
||||||
"pyo3",
|
"pyo3",
|
||||||
"pyo3-log",
|
"pyo3-log",
|
||||||
"pythonize",
|
"pythonize",
|
||||||
"regex",
|
"regex",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"sha2",
|
|
||||||
"ulid",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -595,17 +404,6 @@ version = "1.15.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
|
checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ulid"
|
|
||||||
version = "1.1.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "34778c17965aa2a08913b57e1f34db9b4a63f5de31768b55bf20d2795f921259"
|
|
||||||
dependencies = [
|
|
||||||
"getrandom",
|
|
||||||
"rand",
|
|
||||||
"web-time",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-ident"
|
name = "unicode-ident"
|
||||||
version = "1.0.5"
|
version = "1.0.5"
|
||||||
@@ -614,9 +412,9 @@ checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unindent"
|
name = "unindent"
|
||||||
version = "0.2.3"
|
version = "0.1.10"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce"
|
checksum = "58ee9362deb4a96cef4d437d1ad49cffc9b9e92d202b6995674e928ce684f112"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "version_check"
|
name = "version_check"
|
||||||
@@ -624,76 +422,6 @@ version = "0.9.4"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wasi"
|
|
||||||
version = "0.11.0+wasi-snapshot-preview1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wasm-bindgen"
|
|
||||||
version = "0.2.92"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
|
|
||||||
dependencies = [
|
|
||||||
"cfg-if",
|
|
||||||
"wasm-bindgen-macro",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wasm-bindgen-backend"
|
|
||||||
version = "0.2.92"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
|
|
||||||
dependencies = [
|
|
||||||
"bumpalo",
|
|
||||||
"log",
|
|
||||||
"once_cell",
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
"wasm-bindgen-shared",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wasm-bindgen-macro"
|
|
||||||
version = "0.2.92"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
|
|
||||||
dependencies = [
|
|
||||||
"quote",
|
|
||||||
"wasm-bindgen-macro-support",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wasm-bindgen-macro-support"
|
|
||||||
version = "0.2.92"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
"wasm-bindgen-backend",
|
|
||||||
"wasm-bindgen-shared",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wasm-bindgen-shared"
|
|
||||||
version = "0.2.92"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "web-time"
|
|
||||||
version = "1.1.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb"
|
|
||||||
dependencies = [
|
|
||||||
"js-sys",
|
|
||||||
"wasm-bindgen",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-sys"
|
name = "windows-sys"
|
||||||
version = "0.36.1"
|
version = "0.36.1"
|
||||||
|
|||||||
@@ -3,4 +3,3 @@
|
|||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
members = ["rust"]
|
members = ["rust"]
|
||||||
resolver = "2"
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Installation Instructions
|
# Installation Instructions
|
||||||
|
|
||||||
This document has moved to the
|
This document has moved to the
|
||||||
[Synapse documentation website](https://element-hq.github.io/synapse/latest/setup/installation.html).
|
[Synapse documentation website](https://matrix-org.github.io/synapse/latest/setup/installation.html).
|
||||||
Please update your links.
|
Please update your links.
|
||||||
|
|
||||||
The markdown source is available in [docs/setup/installation.md](docs/setup/installation.md).
|
The markdown source is available in [docs/setup/installation.md](docs/setup/installation.md).
|
||||||
|
|||||||
832
LICENSE
832
LICENSE
@@ -1,661 +1,177 @@
|
|||||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
|
||||||
Version 3, 19 November 2007
|
|
||||||
|
|
||||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
Apache License
|
||||||
Everyone is permitted to copy and distribute verbatim copies
|
Version 2.0, January 2004
|
||||||
of this license document, but changing it is not allowed.
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
Preamble
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
The GNU Affero General Public License is a free, copyleft license for
|
1. Definitions.
|
||||||
software and other kinds of works, specifically designed to ensure
|
|
||||||
cooperation with the community in the case of network server software.
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
The licenses for most software and other practical works are designed
|
|
||||||
to take away your freedom to share and change the works. By contrast,
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
our General Public Licenses are intended to guarantee your freedom to
|
the copyright owner that is granting the License.
|
||||||
share and change all versions of a program--to make sure it remains free
|
|
||||||
software for all its users.
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
When we speak of free software, we are referring to freedom, not
|
control with that entity. For the purposes of this definition,
|
||||||
price. Our General Public Licenses are designed to make sure that you
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
have the freedom to distribute copies of free software (and charge for
|
direction or management of such entity, whether by contract or
|
||||||
them if you wish), that you receive source code or can get it if you
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
want it, that you can change the software or use pieces of it in new
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
free programs, and that you know you can do these things.
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
Developers that use our General Public Licenses protect your rights
|
exercising permissions granted by this License.
|
||||||
with two steps: (1) assert copyright on the software, and (2) offer
|
|
||||||
you this License which gives you legal permission to copy, distribute
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
and/or modify the software.
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
A secondary benefit of defending all users' freedom is that
|
|
||||||
improvements made in alternate versions of the program, if they
|
"Object" form shall mean any form resulting from mechanical
|
||||||
receive widespread use, become available for other developers to
|
transformation or translation of a Source form, including but
|
||||||
incorporate. Many developers of free software are heartened and
|
not limited to compiled object code, generated documentation,
|
||||||
encouraged by the resulting cooperation. However, in the case of
|
and conversions to other media types.
|
||||||
software used on network servers, this result may fail to come about.
|
|
||||||
The GNU General Public License permits making a modified version and
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
letting the public access it on a server without ever releasing its
|
Object form, made available under the License, as indicated by a
|
||||||
source code to the public.
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
The GNU Affero General Public License is designed specifically to
|
|
||||||
ensure that, in such cases, the modified source code becomes available
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
to the community. It requires the operator of a network server to
|
form, that is based on (or derived from) the Work and for which the
|
||||||
provide the source code of the modified version running there to the
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
users of that server. Therefore, public use of a modified version, on
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
a publicly accessible server, gives the public access to the source
|
of this License, Derivative Works shall not include works that remain
|
||||||
code of the modified version.
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
An older license, called the Affero General Public License and
|
|
||||||
published by Affero, was designed to accomplish similar goals. This is
|
"Contribution" shall mean any work of authorship, including
|
||||||
a different license, not a version of the Affero GPL, but Affero has
|
the original version of the Work and any modifications or additions
|
||||||
released a new version of the Affero GPL which permits relicensing under
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
this license.
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
The precise terms and conditions for copying, distribution and
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
modification follow.
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
TERMS AND CONDITIONS
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
0. Definitions.
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
works, such as semiconductor masks.
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
"The Program" refers to any copyrightable work licensed under this
|
|
||||||
License. Each licensee is addressed as "you". "Licensees" and
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
"recipients" may be individuals or organizations.
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
To "modify" a work means to copy from or adapt all or part of the work
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
in a fashion requiring copyright permission, other than the making of an
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
exact copy. The resulting work is called a "modified version" of the
|
Work and such Derivative Works in Source or Object form.
|
||||||
earlier work or a work "based on" the earlier work.
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
A "covered work" means either the unmodified Program or a work based
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
on the Program.
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
To "propagate" a work means to do anything with it that, without
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
permission, would make you directly or secondarily liable for
|
where such license applies only to those patent claims licensable
|
||||||
infringement under applicable copyright law, except executing it on a
|
by such Contributor that are necessarily infringed by their
|
||||||
computer or modifying a private copy. Propagation includes copying,
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
distribution (with or without modification), making available to the
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
public, and in some countries other activities as well.
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
To "convey" a work means any kind of propagation that enables other
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
parties to make or receive copies. Mere interaction with a user through
|
or contributory patent infringement, then any patent licenses
|
||||||
a computer network, with no transfer of a copy, is not conveying.
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
An interactive user interface displays "Appropriate Legal Notices"
|
|
||||||
to the extent that it includes a convenient and prominently visible
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
feature that (1) displays an appropriate copyright notice, and (2)
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
tells the user that there is no warranty for the work (except to the
|
modifications, and in Source or Object form, provided that You
|
||||||
extent that warranties are provided), that licensees may convey the
|
meet the following conditions:
|
||||||
work under this License, and how to view a copy of this License. If
|
|
||||||
the interface presents a list of user commands or options, such as a
|
(a) You must give any other recipients of the Work or
|
||||||
menu, a prominent item in the list meets this criterion.
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
1. Source Code.
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
The "source code" for a work means the preferred form of the work
|
|
||||||
for making modifications to it. "Object code" means any non-source
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
form of a work.
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
A "Standard Interface" means an interface that either is an official
|
excluding those notices that do not pertain to any part of
|
||||||
standard defined by a recognized standards body, or, in the case of
|
the Derivative Works; and
|
||||||
interfaces specified for a particular programming language, one that
|
|
||||||
is widely used among developers working in that language.
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
The "System Libraries" of an executable work include anything, other
|
include a readable copy of the attribution notices contained
|
||||||
than the work as a whole, that (a) is included in the normal form of
|
within such NOTICE file, excluding those notices that do not
|
||||||
packaging a Major Component, but which is not part of that Major
|
pertain to any part of the Derivative Works, in at least one
|
||||||
Component, and (b) serves only to enable use of the work with that
|
of the following places: within a NOTICE text file distributed
|
||||||
Major Component, or to implement a Standard Interface for which an
|
as part of the Derivative Works; within the Source form or
|
||||||
implementation is available to the public in source code form. A
|
documentation, if provided along with the Derivative Works; or,
|
||||||
"Major Component", in this context, means a major essential component
|
within a display generated by the Derivative Works, if and
|
||||||
(kernel, window system, and so on) of the specific operating system
|
wherever such third-party notices normally appear. The contents
|
||||||
(if any) on which the executable work runs, or a compiler used to
|
of the NOTICE file are for informational purposes only and
|
||||||
produce the work, or an object code interpreter used to run it.
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
The "Corresponding Source" for a work in object code form means all
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
the source code needed to generate, install, and (for an executable
|
that such additional attribution notices cannot be construed
|
||||||
work) run the object code and to modify the work, including scripts to
|
as modifying the License.
|
||||||
control those activities. However, it does not include the work's
|
|
||||||
System Libraries, or general-purpose tools or generally available free
|
You may add Your own copyright statement to Your modifications and
|
||||||
programs which are used unmodified in performing those activities but
|
may provide additional or different license terms and conditions
|
||||||
which are not part of the work. For example, Corresponding Source
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
includes interface definition files associated with source files for
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
the work, and the source code for shared libraries and dynamically
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
linked subprograms that the work is specifically designed to require,
|
the conditions stated in this License.
|
||||||
such as by intimate data communication or control flow between those
|
|
||||||
subprograms and other parts of the work.
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
The Corresponding Source need not include anything that users
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
can regenerate automatically from other parts of the Corresponding
|
this License, without any additional terms or conditions.
|
||||||
Source.
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
The Corresponding Source for a work in source code form is that
|
with Licensor regarding such Contributions.
|
||||||
same work.
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
2. Basic Permissions.
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
All rights granted under this License are granted for the term of
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
copyright on the Program, and are irrevocable provided the stated
|
|
||||||
conditions are met. This License explicitly affirms your unlimited
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
permission to run the unmodified Program. The output from running a
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
covered work is covered by this License only if the output, given its
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
content, constitutes a covered work. This License acknowledges your
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
rights of fair use or other equivalent, as provided by copyright law.
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
You may make, run and propagate covered works that you do not
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
convey, without conditions so long as your license otherwise remains
|
appropriateness of using or redistributing the Work and assume any
|
||||||
in force. You may convey covered works to others for the sole purpose
|
risks associated with Your exercise of permissions under this License.
|
||||||
of having them make modifications exclusively for you, or provide you
|
|
||||||
with facilities for running those works, provided that you comply with
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
the terms of this License in conveying all material for which you do
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
not control copyright. Those thus making or running the covered works
|
unless required by applicable law (such as deliberate and grossly
|
||||||
for you must do so exclusively on your behalf, under your direction
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
and control, on terms that prohibit them from making any copies of
|
liable to You for damages, including any direct, indirect, special,
|
||||||
your copyrighted material outside their relationship with you.
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
Conveying under any other circumstances is permitted solely under
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
the conditions stated below. Sublicensing is not allowed; section 10
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
makes it unnecessary.
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
No covered work shall be deemed part of an effective technological
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
measure under any applicable law fulfilling obligations under article
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
or other liability obligations and/or rights consistent with this
|
||||||
similar laws prohibiting or restricting circumvention of such
|
License. However, in accepting such obligations, You may act only
|
||||||
measures.
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
When you convey a covered work, you waive any legal power to forbid
|
defend, and hold each Contributor harmless for any liability
|
||||||
circumvention of technological measures to the extent such circumvention
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
is effected by exercising rights under this License with respect to
|
of your accepting any such warranty or additional liability.
|
||||||
the covered work, and you disclaim any intention to limit operation or
|
|
||||||
modification of the work as a means of enforcing, against the work's
|
|
||||||
users, your or third parties' legal rights to forbid circumvention of
|
|
||||||
technological measures.
|
|
||||||
|
|
||||||
4. Conveying Verbatim Copies.
|
|
||||||
|
|
||||||
You may convey verbatim copies of the Program's source code as you
|
|
||||||
receive it, in any medium, provided that you conspicuously and
|
|
||||||
appropriately publish on each copy an appropriate copyright notice;
|
|
||||||
keep intact all notices stating that this License and any
|
|
||||||
non-permissive terms added in accord with section 7 apply to the code;
|
|
||||||
keep intact all notices of the absence of any warranty; and give all
|
|
||||||
recipients a copy of this License along with the Program.
|
|
||||||
|
|
||||||
You may charge any price or no price for each copy that you convey,
|
|
||||||
and you may offer support or warranty protection for a fee.
|
|
||||||
|
|
||||||
5. Conveying Modified Source Versions.
|
|
||||||
|
|
||||||
You may convey a work based on the Program, or the modifications to
|
|
||||||
produce it from the Program, in the form of source code under the
|
|
||||||
terms of section 4, provided that you also meet all of these conditions:
|
|
||||||
|
|
||||||
a) The work must carry prominent notices stating that you modified
|
|
||||||
it, and giving a relevant date.
|
|
||||||
|
|
||||||
b) The work must carry prominent notices stating that it is
|
|
||||||
released under this License and any conditions added under section
|
|
||||||
7. This requirement modifies the requirement in section 4 to
|
|
||||||
"keep intact all notices".
|
|
||||||
|
|
||||||
c) You must license the entire work, as a whole, under this
|
|
||||||
License to anyone who comes into possession of a copy. This
|
|
||||||
License will therefore apply, along with any applicable section 7
|
|
||||||
additional terms, to the whole of the work, and all its parts,
|
|
||||||
regardless of how they are packaged. This License gives no
|
|
||||||
permission to license the work in any other way, but it does not
|
|
||||||
invalidate such permission if you have separately received it.
|
|
||||||
|
|
||||||
d) If the work has interactive user interfaces, each must display
|
|
||||||
Appropriate Legal Notices; however, if the Program has interactive
|
|
||||||
interfaces that do not display Appropriate Legal Notices, your
|
|
||||||
work need not make them do so.
|
|
||||||
|
|
||||||
A compilation of a covered work with other separate and independent
|
|
||||||
works, which are not by their nature extensions of the covered work,
|
|
||||||
and which are not combined with it such as to form a larger program,
|
|
||||||
in or on a volume of a storage or distribution medium, is called an
|
|
||||||
"aggregate" if the compilation and its resulting copyright are not
|
|
||||||
used to limit the access or legal rights of the compilation's users
|
|
||||||
beyond what the individual works permit. Inclusion of a covered work
|
|
||||||
in an aggregate does not cause this License to apply to the other
|
|
||||||
parts of the aggregate.
|
|
||||||
|
|
||||||
6. Conveying Non-Source Forms.
|
|
||||||
|
|
||||||
You may convey a covered work in object code form under the terms
|
|
||||||
of sections 4 and 5, provided that you also convey the
|
|
||||||
machine-readable Corresponding Source under the terms of this License,
|
|
||||||
in one of these ways:
|
|
||||||
|
|
||||||
a) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by the
|
|
||||||
Corresponding Source fixed on a durable physical medium
|
|
||||||
customarily used for software interchange.
|
|
||||||
|
|
||||||
b) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by a
|
|
||||||
written offer, valid for at least three years and valid for as
|
|
||||||
long as you offer spare parts or customer support for that product
|
|
||||||
model, to give anyone who possesses the object code either (1) a
|
|
||||||
copy of the Corresponding Source for all the software in the
|
|
||||||
product that is covered by this License, on a durable physical
|
|
||||||
medium customarily used for software interchange, for a price no
|
|
||||||
more than your reasonable cost of physically performing this
|
|
||||||
conveying of source, or (2) access to copy the
|
|
||||||
Corresponding Source from a network server at no charge.
|
|
||||||
|
|
||||||
c) Convey individual copies of the object code with a copy of the
|
|
||||||
written offer to provide the Corresponding Source. This
|
|
||||||
alternative is allowed only occasionally and noncommercially, and
|
|
||||||
only if you received the object code with such an offer, in accord
|
|
||||||
with subsection 6b.
|
|
||||||
|
|
||||||
d) Convey the object code by offering access from a designated
|
|
||||||
place (gratis or for a charge), and offer equivalent access to the
|
|
||||||
Corresponding Source in the same way through the same place at no
|
|
||||||
further charge. You need not require recipients to copy the
|
|
||||||
Corresponding Source along with the object code. If the place to
|
|
||||||
copy the object code is a network server, the Corresponding Source
|
|
||||||
may be on a different server (operated by you or a third party)
|
|
||||||
that supports equivalent copying facilities, provided you maintain
|
|
||||||
clear directions next to the object code saying where to find the
|
|
||||||
Corresponding Source. Regardless of what server hosts the
|
|
||||||
Corresponding Source, you remain obligated to ensure that it is
|
|
||||||
available for as long as needed to satisfy these requirements.
|
|
||||||
|
|
||||||
e) Convey the object code using peer-to-peer transmission, provided
|
|
||||||
you inform other peers where the object code and Corresponding
|
|
||||||
Source of the work are being offered to the general public at no
|
|
||||||
charge under subsection 6d.
|
|
||||||
|
|
||||||
A separable portion of the object code, whose source code is excluded
|
|
||||||
from the Corresponding Source as a System Library, need not be
|
|
||||||
included in conveying the object code work.
|
|
||||||
|
|
||||||
A "User Product" is either (1) a "consumer product", which means any
|
|
||||||
tangible personal property which is normally used for personal, family,
|
|
||||||
or household purposes, or (2) anything designed or sold for incorporation
|
|
||||||
into a dwelling. In determining whether a product is a consumer product,
|
|
||||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
|
||||||
product received by a particular user, "normally used" refers to a
|
|
||||||
typical or common use of that class of product, regardless of the status
|
|
||||||
of the particular user or of the way in which the particular user
|
|
||||||
actually uses, or expects or is expected to use, the product. A product
|
|
||||||
is a consumer product regardless of whether the product has substantial
|
|
||||||
commercial, industrial or non-consumer uses, unless such uses represent
|
|
||||||
the only significant mode of use of the product.
|
|
||||||
|
|
||||||
"Installation Information" for a User Product means any methods,
|
|
||||||
procedures, authorization keys, or other information required to install
|
|
||||||
and execute modified versions of a covered work in that User Product from
|
|
||||||
a modified version of its Corresponding Source. The information must
|
|
||||||
suffice to ensure that the continued functioning of the modified object
|
|
||||||
code is in no case prevented or interfered with solely because
|
|
||||||
modification has been made.
|
|
||||||
|
|
||||||
If you convey an object code work under this section in, or with, or
|
|
||||||
specifically for use in, a User Product, and the conveying occurs as
|
|
||||||
part of a transaction in which the right of possession and use of the
|
|
||||||
User Product is transferred to the recipient in perpetuity or for a
|
|
||||||
fixed term (regardless of how the transaction is characterized), the
|
|
||||||
Corresponding Source conveyed under this section must be accompanied
|
|
||||||
by the Installation Information. But this requirement does not apply
|
|
||||||
if neither you nor any third party retains the ability to install
|
|
||||||
modified object code on the User Product (for example, the work has
|
|
||||||
been installed in ROM).
|
|
||||||
|
|
||||||
The requirement to provide Installation Information does not include a
|
|
||||||
requirement to continue to provide support service, warranty, or updates
|
|
||||||
for a work that has been modified or installed by the recipient, or for
|
|
||||||
the User Product in which it has been modified or installed. Access to a
|
|
||||||
network may be denied when the modification itself materially and
|
|
||||||
adversely affects the operation of the network or violates the rules and
|
|
||||||
protocols for communication across the network.
|
|
||||||
|
|
||||||
Corresponding Source conveyed, and Installation Information provided,
|
|
||||||
in accord with this section must be in a format that is publicly
|
|
||||||
documented (and with an implementation available to the public in
|
|
||||||
source code form), and must require no special password or key for
|
|
||||||
unpacking, reading or copying.
|
|
||||||
|
|
||||||
7. Additional Terms.
|
|
||||||
|
|
||||||
"Additional permissions" are terms that supplement the terms of this
|
|
||||||
License by making exceptions from one or more of its conditions.
|
|
||||||
Additional permissions that are applicable to the entire Program shall
|
|
||||||
be treated as though they were included in this License, to the extent
|
|
||||||
that they are valid under applicable law. If additional permissions
|
|
||||||
apply only to part of the Program, that part may be used separately
|
|
||||||
under those permissions, but the entire Program remains governed by
|
|
||||||
this License without regard to the additional permissions.
|
|
||||||
|
|
||||||
When you convey a copy of a covered work, you may at your option
|
|
||||||
remove any additional permissions from that copy, or from any part of
|
|
||||||
it. (Additional permissions may be written to require their own
|
|
||||||
removal in certain cases when you modify the work.) You may place
|
|
||||||
additional permissions on material, added by you to a covered work,
|
|
||||||
for which you have or can give appropriate copyright permission.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, for material you
|
|
||||||
add to a covered work, you may (if authorized by the copyright holders of
|
|
||||||
that material) supplement the terms of this License with terms:
|
|
||||||
|
|
||||||
a) Disclaiming warranty or limiting liability differently from the
|
|
||||||
terms of sections 15 and 16 of this License; or
|
|
||||||
|
|
||||||
b) Requiring preservation of specified reasonable legal notices or
|
|
||||||
author attributions in that material or in the Appropriate Legal
|
|
||||||
Notices displayed by works containing it; or
|
|
||||||
|
|
||||||
c) Prohibiting misrepresentation of the origin of that material, or
|
|
||||||
requiring that modified versions of such material be marked in
|
|
||||||
reasonable ways as different from the original version; or
|
|
||||||
|
|
||||||
d) Limiting the use for publicity purposes of names of licensors or
|
|
||||||
authors of the material; or
|
|
||||||
|
|
||||||
e) Declining to grant rights under trademark law for use of some
|
|
||||||
trade names, trademarks, or service marks; or
|
|
||||||
|
|
||||||
f) Requiring indemnification of licensors and authors of that
|
|
||||||
material by anyone who conveys the material (or modified versions of
|
|
||||||
it) with contractual assumptions of liability to the recipient, for
|
|
||||||
any liability that these contractual assumptions directly impose on
|
|
||||||
those licensors and authors.
|
|
||||||
|
|
||||||
All other non-permissive additional terms are considered "further
|
|
||||||
restrictions" within the meaning of section 10. If the Program as you
|
|
||||||
received it, or any part of it, contains a notice stating that it is
|
|
||||||
governed by this License along with a term that is a further
|
|
||||||
restriction, you may remove that term. If a license document contains
|
|
||||||
a further restriction but permits relicensing or conveying under this
|
|
||||||
License, you may add to a covered work material governed by the terms
|
|
||||||
of that license document, provided that the further restriction does
|
|
||||||
not survive such relicensing or conveying.
|
|
||||||
|
|
||||||
If you add terms to a covered work in accord with this section, you
|
|
||||||
must place, in the relevant source files, a statement of the
|
|
||||||
additional terms that apply to those files, or a notice indicating
|
|
||||||
where to find the applicable terms.
|
|
||||||
|
|
||||||
Additional terms, permissive or non-permissive, may be stated in the
|
|
||||||
form of a separately written license, or stated as exceptions;
|
|
||||||
the above requirements apply either way.
|
|
||||||
|
|
||||||
8. Termination.
|
|
||||||
|
|
||||||
You may not propagate or modify a covered work except as expressly
|
|
||||||
provided under this License. Any attempt otherwise to propagate or
|
|
||||||
modify it is void, and will automatically terminate your rights under
|
|
||||||
this License (including any patent licenses granted under the third
|
|
||||||
paragraph of section 11).
|
|
||||||
|
|
||||||
However, if you cease all violation of this License, then your
|
|
||||||
license from a particular copyright holder is reinstated (a)
|
|
||||||
provisionally, unless and until the copyright holder explicitly and
|
|
||||||
finally terminates your license, and (b) permanently, if the copyright
|
|
||||||
holder fails to notify you of the violation by some reasonable means
|
|
||||||
prior to 60 days after the cessation.
|
|
||||||
|
|
||||||
Moreover, your license from a particular copyright holder is
|
|
||||||
reinstated permanently if the copyright holder notifies you of the
|
|
||||||
violation by some reasonable means, this is the first time you have
|
|
||||||
received notice of violation of this License (for any work) from that
|
|
||||||
copyright holder, and you cure the violation prior to 30 days after
|
|
||||||
your receipt of the notice.
|
|
||||||
|
|
||||||
Termination of your rights under this section does not terminate the
|
|
||||||
licenses of parties who have received copies or rights from you under
|
|
||||||
this License. If your rights have been terminated and not permanently
|
|
||||||
reinstated, you do not qualify to receive new licenses for the same
|
|
||||||
material under section 10.
|
|
||||||
|
|
||||||
9. Acceptance Not Required for Having Copies.
|
|
||||||
|
|
||||||
You are not required to accept this License in order to receive or
|
|
||||||
run a copy of the Program. Ancillary propagation of a covered work
|
|
||||||
occurring solely as a consequence of using peer-to-peer transmission
|
|
||||||
to receive a copy likewise does not require acceptance. However,
|
|
||||||
nothing other than this License grants you permission to propagate or
|
|
||||||
modify any covered work. These actions infringe copyright if you do
|
|
||||||
not accept this License. Therefore, by modifying or propagating a
|
|
||||||
covered work, you indicate your acceptance of this License to do so.
|
|
||||||
|
|
||||||
10. Automatic Licensing of Downstream Recipients.
|
|
||||||
|
|
||||||
Each time you convey a covered work, the recipient automatically
|
|
||||||
receives a license from the original licensors, to run, modify and
|
|
||||||
propagate that work, subject to this License. You are not responsible
|
|
||||||
for enforcing compliance by third parties with this License.
|
|
||||||
|
|
||||||
An "entity transaction" is a transaction transferring control of an
|
|
||||||
organization, or substantially all assets of one, or subdividing an
|
|
||||||
organization, or merging organizations. If propagation of a covered
|
|
||||||
work results from an entity transaction, each party to that
|
|
||||||
transaction who receives a copy of the work also receives whatever
|
|
||||||
licenses to the work the party's predecessor in interest had or could
|
|
||||||
give under the previous paragraph, plus a right to possession of the
|
|
||||||
Corresponding Source of the work from the predecessor in interest, if
|
|
||||||
the predecessor has it or can get it with reasonable efforts.
|
|
||||||
|
|
||||||
You may not impose any further restrictions on the exercise of the
|
|
||||||
rights granted or affirmed under this License. For example, you may
|
|
||||||
not impose a license fee, royalty, or other charge for exercise of
|
|
||||||
rights granted under this License, and you may not initiate litigation
|
|
||||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
|
||||||
any patent claim is infringed by making, using, selling, offering for
|
|
||||||
sale, or importing the Program or any portion of it.
|
|
||||||
|
|
||||||
11. Patents.
|
|
||||||
|
|
||||||
A "contributor" is a copyright holder who authorizes use under this
|
|
||||||
License of the Program or a work on which the Program is based. The
|
|
||||||
work thus licensed is called the contributor's "contributor version".
|
|
||||||
|
|
||||||
A contributor's "essential patent claims" are all patent claims
|
|
||||||
owned or controlled by the contributor, whether already acquired or
|
|
||||||
hereafter acquired, that would be infringed by some manner, permitted
|
|
||||||
by this License, of making, using, or selling its contributor version,
|
|
||||||
but do not include claims that would be infringed only as a
|
|
||||||
consequence of further modification of the contributor version. For
|
|
||||||
purposes of this definition, "control" includes the right to grant
|
|
||||||
patent sublicenses in a manner consistent with the requirements of
|
|
||||||
this License.
|
|
||||||
|
|
||||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
|
||||||
patent license under the contributor's essential patent claims, to
|
|
||||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
|
||||||
propagate the contents of its contributor version.
|
|
||||||
|
|
||||||
In the following three paragraphs, a "patent license" is any express
|
|
||||||
agreement or commitment, however denominated, not to enforce a patent
|
|
||||||
(such as an express permission to practice a patent or covenant not to
|
|
||||||
sue for patent infringement). To "grant" such a patent license to a
|
|
||||||
party means to make such an agreement or commitment not to enforce a
|
|
||||||
patent against the party.
|
|
||||||
|
|
||||||
If you convey a covered work, knowingly relying on a patent license,
|
|
||||||
and the Corresponding Source of the work is not available for anyone
|
|
||||||
to copy, free of charge and under the terms of this License, through a
|
|
||||||
publicly available network server or other readily accessible means,
|
|
||||||
then you must either (1) cause the Corresponding Source to be so
|
|
||||||
available, or (2) arrange to deprive yourself of the benefit of the
|
|
||||||
patent license for this particular work, or (3) arrange, in a manner
|
|
||||||
consistent with the requirements of this License, to extend the patent
|
|
||||||
license to downstream recipients. "Knowingly relying" means you have
|
|
||||||
actual knowledge that, but for the patent license, your conveying the
|
|
||||||
covered work in a country, or your recipient's use of the covered work
|
|
||||||
in a country, would infringe one or more identifiable patents in that
|
|
||||||
country that you have reason to believe are valid.
|
|
||||||
|
|
||||||
If, pursuant to or in connection with a single transaction or
|
|
||||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
|
||||||
covered work, and grant a patent license to some of the parties
|
|
||||||
receiving the covered work authorizing them to use, propagate, modify
|
|
||||||
or convey a specific copy of the covered work, then the patent license
|
|
||||||
you grant is automatically extended to all recipients of the covered
|
|
||||||
work and works based on it.
|
|
||||||
|
|
||||||
A patent license is "discriminatory" if it does not include within
|
|
||||||
the scope of its coverage, prohibits the exercise of, or is
|
|
||||||
conditioned on the non-exercise of one or more of the rights that are
|
|
||||||
specifically granted under this License. You may not convey a covered
|
|
||||||
work if you are a party to an arrangement with a third party that is
|
|
||||||
in the business of distributing software, under which you make payment
|
|
||||||
to the third party based on the extent of your activity of conveying
|
|
||||||
the work, and under which the third party grants, to any of the
|
|
||||||
parties who would receive the covered work from you, a discriminatory
|
|
||||||
patent license (a) in connection with copies of the covered work
|
|
||||||
conveyed by you (or copies made from those copies), or (b) primarily
|
|
||||||
for and in connection with specific products or compilations that
|
|
||||||
contain the covered work, unless you entered into that arrangement,
|
|
||||||
or that patent license was granted, prior to 28 March 2007.
|
|
||||||
|
|
||||||
Nothing in this License shall be construed as excluding or limiting
|
|
||||||
any implied license or other defenses to infringement that may
|
|
||||||
otherwise be available to you under applicable patent law.
|
|
||||||
|
|
||||||
12. No Surrender of Others' Freedom.
|
|
||||||
|
|
||||||
If conditions are imposed on you (whether by court order, agreement or
|
|
||||||
otherwise) that contradict the conditions of this License, they do not
|
|
||||||
excuse you from the conditions of this License. If you cannot convey a
|
|
||||||
covered work so as to satisfy simultaneously your obligations under this
|
|
||||||
License and any other pertinent obligations, then as a consequence you may
|
|
||||||
not convey it at all. For example, if you agree to terms that obligate you
|
|
||||||
to collect a royalty for further conveying from those to whom you convey
|
|
||||||
the Program, the only way you could satisfy both those terms and this
|
|
||||||
License would be to refrain entirely from conveying the Program.
|
|
||||||
|
|
||||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, if you modify the
|
|
||||||
Program, your modified version must prominently offer all users
|
|
||||||
interacting with it remotely through a computer network (if your version
|
|
||||||
supports such interaction) an opportunity to receive the Corresponding
|
|
||||||
Source of your version by providing access to the Corresponding Source
|
|
||||||
from a network server at no charge, through some standard or customary
|
|
||||||
means of facilitating copying of software. This Corresponding Source
|
|
||||||
shall include the Corresponding Source for any work covered by version 3
|
|
||||||
of the GNU General Public License that is incorporated pursuant to the
|
|
||||||
following paragraph.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, you have
|
|
||||||
permission to link or combine any covered work with a work licensed
|
|
||||||
under version 3 of the GNU General Public License into a single
|
|
||||||
combined work, and to convey the resulting work. The terms of this
|
|
||||||
License will continue to apply to the part which is the covered work,
|
|
||||||
but the work with which it is combined will remain governed by version
|
|
||||||
3 of the GNU General Public License.
|
|
||||||
|
|
||||||
14. Revised Versions of this License.
|
|
||||||
|
|
||||||
The Free Software Foundation may publish revised and/or new versions of
|
|
||||||
the GNU Affero General Public License from time to time. Such new versions
|
|
||||||
will be similar in spirit to the present version, but may differ in detail to
|
|
||||||
address new problems or concerns.
|
|
||||||
|
|
||||||
Each version is given a distinguishing version number. If the
|
|
||||||
Program specifies that a certain numbered version of the GNU Affero General
|
|
||||||
Public License "or any later version" applies to it, you have the
|
|
||||||
option of following the terms and conditions either of that numbered
|
|
||||||
version or of any later version published by the Free Software
|
|
||||||
Foundation. If the Program does not specify a version number of the
|
|
||||||
GNU Affero General Public License, you may choose any version ever published
|
|
||||||
by the Free Software Foundation.
|
|
||||||
|
|
||||||
If the Program specifies that a proxy can decide which future
|
|
||||||
versions of the GNU Affero General Public License can be used, that proxy's
|
|
||||||
public statement of acceptance of a version permanently authorizes you
|
|
||||||
to choose that version for the Program.
|
|
||||||
|
|
||||||
Later license versions may give you additional or different
|
|
||||||
permissions. However, no additional obligations are imposed on any
|
|
||||||
author or copyright holder as a result of your choosing to follow a
|
|
||||||
later version.
|
|
||||||
|
|
||||||
15. Disclaimer of Warranty.
|
|
||||||
|
|
||||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
|
||||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
|
||||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
|
||||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
||||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
|
||||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
|
||||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
|
||||||
|
|
||||||
16. Limitation of Liability.
|
|
||||||
|
|
||||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
|
||||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
|
||||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
|
||||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
|
||||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
|
||||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
|
||||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
|
||||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
|
||||||
SUCH DAMAGES.
|
|
||||||
|
|
||||||
17. Interpretation of Sections 15 and 16.
|
|
||||||
|
|
||||||
If the disclaimer of warranty and limitation of liability provided
|
|
||||||
above cannot be given local legal effect according to their terms,
|
|
||||||
reviewing courts shall apply local law that most closely approximates
|
|
||||||
an absolute waiver of all civil liability in connection with the
|
|
||||||
Program, unless a warranty or assumption of liability accompanies a
|
|
||||||
copy of the Program in return for a fee.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
How to Apply These Terms to Your New Programs
|
|
||||||
|
|
||||||
If you develop a new program, and you want it to be of the greatest
|
|
||||||
possible use to the public, the best way to achieve this is to make it
|
|
||||||
free software which everyone can redistribute and change under these terms.
|
|
||||||
|
|
||||||
To do so, attach the following notices to the program. It is safest
|
|
||||||
to attach them to the start of each source file to most effectively
|
|
||||||
state the exclusion of warranty; and each file should have at least
|
|
||||||
the "copyright" line and a pointer to where the full notice is found.
|
|
||||||
|
|
||||||
<one line to give the program's name and a brief idea of what it does.>
|
|
||||||
Copyright (C) <year> <name of author>
|
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
|
||||||
it under the terms of the GNU Affero General Public License as published by
|
|
||||||
the Free Software Foundation, either version 3 of the License, or
|
|
||||||
(at your option) any later version.
|
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Affero General Public License
|
|
||||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
Also add information on how to contact you by electronic and paper mail.
|
|
||||||
|
|
||||||
If your software can interact with users remotely through a computer
|
|
||||||
network, you should also make sure that it provides a way for users to
|
|
||||||
get its source. For example, if your program is a web application, its
|
|
||||||
interface could display a "Source" link that leads users to an archive
|
|
||||||
of the code. There are many ways you could offer source, and different
|
|
||||||
solutions will be better for different programs; see section 13 for the
|
|
||||||
specific requirements.
|
|
||||||
|
|
||||||
You should also get your employer (if you work as a programmer) or school,
|
|
||||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
|
||||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
|
||||||
<https://www.gnu.org/licenses/>.
|
|
||||||
|
|||||||
42
README.rst
42
README.rst
@@ -17,17 +17,17 @@ Matrix project <https://matrix.org/docs/guides/introduction>`_, and the `formal
|
|||||||
Installing and configuration
|
Installing and configuration
|
||||||
============================
|
============================
|
||||||
|
|
||||||
The Synapse documentation describes `how to install Synapse <https://element-hq.github.io/synapse/latest/setup/installation.html>`_. We recommend using
|
The Synapse documentation describes `how to install Synapse <https://matrix-org.github.io/synapse/latest/setup/installation.html>`_. We recommend using
|
||||||
`Docker images <https://element-hq.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
|
`Docker images <https://matrix-org.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
|
||||||
<https://element-hq.github.io/synapse/latest/setup/installation.html#matrixorg-packages>`_.
|
<https://matrix-org.github.io/synapse/latest/setup/installation.html#matrixorg-packages>`_.
|
||||||
|
|
||||||
.. _federation:
|
.. _federation:
|
||||||
|
|
||||||
Synapse has a variety of `config options
|
Synapse has a variety of `config options
|
||||||
<https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html>`_
|
<https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html>`_
|
||||||
which can be used to customise its behaviour after installation.
|
which can be used to customise its behaviour after installation.
|
||||||
There are additional details on how to `configure Synapse for federation here
|
There are additional details on how to `configure Synapse for federation here
|
||||||
<https://element-hq.github.io/synapse/latest/federate.html>`_.
|
<https://matrix-org.github.io/synapse/latest/federate.html>`_.
|
||||||
|
|
||||||
.. _reverse-proxy:
|
.. _reverse-proxy:
|
||||||
|
|
||||||
@@ -43,7 +43,7 @@ It is recommended to put a reverse proxy such as
|
|||||||
doing so is that it means that you can expose the default https port (443) to
|
doing so is that it means that you can expose the default https port (443) to
|
||||||
Matrix clients without needing to run Synapse with root privileges.
|
Matrix clients without needing to run Synapse with root privileges.
|
||||||
For information on configuring one, see `the reverse proxy docs
|
For information on configuring one, see `the reverse proxy docs
|
||||||
<https://element-hq.github.io/synapse/latest/reverse_proxy.html>`_.
|
<https://matrix-org.github.io/synapse/latest/reverse_proxy.html>`_.
|
||||||
|
|
||||||
Upgrading an existing Synapse
|
Upgrading an existing Synapse
|
||||||
-----------------------------
|
-----------------------------
|
||||||
@@ -52,7 +52,7 @@ The instructions for upgrading Synapse are in `the upgrade notes`_.
|
|||||||
Please check these instructions as upgrading may require extra steps for some
|
Please check these instructions as upgrading may require extra steps for some
|
||||||
versions of Synapse.
|
versions of Synapse.
|
||||||
|
|
||||||
.. _the upgrade notes: https://element-hq.github.io/synapse/develop/upgrade.html
|
.. _the upgrade notes: https://matrix-org.github.io/synapse/develop/upgrade.html
|
||||||
|
|
||||||
|
|
||||||
Platform dependencies
|
Platform dependencies
|
||||||
@@ -60,7 +60,7 @@ Platform dependencies
|
|||||||
|
|
||||||
Synapse uses a number of platform dependencies such as Python and PostgreSQL,
|
Synapse uses a number of platform dependencies such as Python and PostgreSQL,
|
||||||
and aims to follow supported upstream versions. See the
|
and aims to follow supported upstream versions. See the
|
||||||
`deprecation policy <https://element-hq.github.io/synapse/latest/deprecation_policy.html>`_
|
`deprecation policy <https://matrix-org.github.io/synapse/latest/deprecation_policy.html>`_
|
||||||
for more details.
|
for more details.
|
||||||
|
|
||||||
|
|
||||||
@@ -114,7 +114,7 @@ from a web client.
|
|||||||
Unless you are running a test instance of Synapse on your local machine, in
|
Unless you are running a test instance of Synapse on your local machine, in
|
||||||
general, you will need to enable TLS support before you can successfully
|
general, you will need to enable TLS support before you can successfully
|
||||||
connect from a client: see
|
connect from a client: see
|
||||||
`TLS certificates <https://element-hq.github.io/synapse/latest/setup/installation.html#tls-certificates>`_.
|
`TLS certificates <https://matrix-org.github.io/synapse/latest/setup/installation.html#tls-certificates>`_.
|
||||||
|
|
||||||
An easy way to get started is to login or register via Element at
|
An easy way to get started is to login or register via Element at
|
||||||
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
||||||
@@ -122,7 +122,7 @@ You will need to change the server you are logging into from ``matrix.org``
|
|||||||
and instead specify a Homeserver URL of ``https://<server_name>:8448``
|
and instead specify a Homeserver URL of ``https://<server_name>:8448``
|
||||||
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
||||||
If you prefer to use another client, refer to our
|
If you prefer to use another client, refer to our
|
||||||
`client breakdown <https://matrix.org/ecosystem/clients/>`_.
|
`client breakdown <https://matrix.org/docs/projects/clients-matrix>`_.
|
||||||
|
|
||||||
If all goes well you should at least be able to log in, create a room, and
|
If all goes well you should at least be able to log in, create a room, and
|
||||||
start sending messages.
|
start sending messages.
|
||||||
@@ -136,11 +136,11 @@ By default, registration of new users via Matrix clients is disabled. To enable
|
|||||||
it:
|
it:
|
||||||
|
|
||||||
1. In the
|
1. In the
|
||||||
`registration config section <https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#registration>`_
|
`registration config section <https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#registration>`_
|
||||||
set ``enable_registration: true`` in ``homeserver.yaml``.
|
set ``enable_registration: true`` in ``homeserver.yaml``.
|
||||||
2. Then **either**:
|
2. Then **either**:
|
||||||
|
|
||||||
a. set up a `CAPTCHA <https://element-hq.github.io/synapse/latest/CAPTCHA_SETUP.html>`_, or
|
a. set up a `CAPTCHA <https://matrix-org.github.io/synapse/latest/CAPTCHA_SETUP.html>`_, or
|
||||||
b. set ``enable_registration_without_verification: true`` in ``homeserver.yaml``.
|
b. set ``enable_registration_without_verification: true`` in ``homeserver.yaml``.
|
||||||
|
|
||||||
We **strongly** recommend using a CAPTCHA, particularly if your homeserver is exposed to
|
We **strongly** recommend using a CAPTCHA, particularly if your homeserver is exposed to
|
||||||
@@ -162,9 +162,9 @@ desired ``localpart`` in the 'User name' box.
|
|||||||
Troubleshooting and support
|
Troubleshooting and support
|
||||||
===========================
|
===========================
|
||||||
|
|
||||||
The `Admin FAQ <https://element-hq.github.io/synapse/latest/usage/administration/admin_faq.html>`_
|
The `Admin FAQ <https://matrix-org.github.io/synapse/latest/usage/administration/admin_faq.html>`_
|
||||||
includes tips on dealing with some common problems. For more details, see
|
includes tips on dealing with some common problems. For more details, see
|
||||||
`Synapse's wider documentation <https://element-hq.github.io/synapse/latest/>`_.
|
`Synapse's wider documentation <https://matrix-org.github.io/synapse/latest/>`_.
|
||||||
|
|
||||||
For additional support installing or managing Synapse, please ask in the community
|
For additional support installing or managing Synapse, please ask in the community
|
||||||
support room |room|_ (from a matrix.org account if necessary). We do not use GitHub
|
support room |room|_ (from a matrix.org account if necessary). We do not use GitHub
|
||||||
@@ -211,15 +211,15 @@ Development
|
|||||||
|
|
||||||
We welcome contributions to Synapse from the community!
|
We welcome contributions to Synapse from the community!
|
||||||
The best place to get started is our
|
The best place to get started is our
|
||||||
`guide for contributors <https://element-hq.github.io/synapse/latest/development/contributing_guide.html>`_.
|
`guide for contributors <https://matrix-org.github.io/synapse/latest/development/contributing_guide.html>`_.
|
||||||
This is part of our larger `documentation <https://element-hq.github.io/synapse/latest>`_, which includes
|
This is part of our larger `documentation <https://matrix-org.github.io/synapse/latest>`_, which includes
|
||||||
|
|
||||||
information for Synapse developers as well as Synapse administrators.
|
information for Synapse developers as well as Synapse administrators.
|
||||||
Developers might be particularly interested in:
|
Developers might be particularly interested in:
|
||||||
|
|
||||||
* `Synapse's database schema <https://element-hq.github.io/synapse/latest/development/database_schema.html>`_,
|
* `Synapse's database schema <https://matrix-org.github.io/synapse/latest/development/database_schema.html>`_,
|
||||||
* `notes on Synapse's implementation details <https://element-hq.github.io/synapse/latest/development/internal_documentation/index.html>`_, and
|
* `notes on Synapse's implementation details <https://matrix-org.github.io/synapse/latest/development/internal_documentation/index.html>`_, and
|
||||||
* `how we use git <https://element-hq.github.io/synapse/latest/development/git.html>`_.
|
* `how we use git <https://matrix-org.github.io/synapse/latest/development/git.html>`_.
|
||||||
|
|
||||||
Alongside all that, join our developer community on Matrix:
|
Alongside all that, join our developer community on Matrix:
|
||||||
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
||||||
@@ -235,9 +235,9 @@ Alongside all that, join our developer community on Matrix:
|
|||||||
|
|
||||||
.. |documentation| image:: https://img.shields.io/badge/documentation-%E2%9C%93-success
|
.. |documentation| image:: https://img.shields.io/badge/documentation-%E2%9C%93-success
|
||||||
:alt: (Rendered documentation on GitHub Pages)
|
:alt: (Rendered documentation on GitHub Pages)
|
||||||
:target: https://element-hq.github.io/synapse/latest/
|
:target: https://matrix-org.github.io/synapse/latest/
|
||||||
|
|
||||||
.. |license| image:: https://img.shields.io/github/license/element-hq/synapse
|
.. |license| image:: https://img.shields.io/github/license/matrix-org/synapse
|
||||||
:alt: (check license in LICENSE file)
|
:alt: (check license in LICENSE file)
|
||||||
:target: LICENSE
|
:target: LICENSE
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
Upgrading Synapse
|
Upgrading Synapse
|
||||||
=================
|
=================
|
||||||
|
|
||||||
This document has moved to the `Synapse documentation website <https://element-hq.github.io/synapse/latest/upgrade>`_.
|
This document has moved to the `Synapse documentation website <https://matrix-org.github.io/synapse/latest/upgrade>`_.
|
||||||
Please update your links.
|
Please update your links.
|
||||||
|
|
||||||
The markdown source is available in `docs/upgrade.md <docs/upgrade.md>`_.
|
The markdown source is available in `docs/upgrade.md <docs/upgrade.md>`_.
|
||||||
|
|||||||
14
book.toml
14
book.toml
@@ -16,14 +16,14 @@ create-missing = false
|
|||||||
|
|
||||||
[output.html]
|
[output.html]
|
||||||
# The URL visitors will be directed to when they try to edit a page
|
# The URL visitors will be directed to when they try to edit a page
|
||||||
edit-url-template = "https://github.com/element-hq/synapse/edit/develop/{path}"
|
edit-url-template = "https://github.com/matrix-org/synapse/edit/develop/{path}"
|
||||||
|
|
||||||
# Remove the numbers that appear before each item in the sidebar, as they can
|
# Remove the numbers that appear before each item in the sidebar, as they can
|
||||||
# get quite messy as we nest deeper
|
# get quite messy as we nest deeper
|
||||||
no-section-label = true
|
no-section-label = true
|
||||||
|
|
||||||
# The source code URL of the repository
|
# The source code URL of the repository
|
||||||
git-repository-url = "https://github.com/element-hq/synapse"
|
git-repository-url = "https://github.com/matrix-org/synapse"
|
||||||
|
|
||||||
# The path that the docs are hosted on
|
# The path that the docs are hosted on
|
||||||
site-url = "/synapse/"
|
site-url = "/synapse/"
|
||||||
@@ -34,14 +34,6 @@ additional-css = [
|
|||||||
"docs/website_files/table-of-contents.css",
|
"docs/website_files/table-of-contents.css",
|
||||||
"docs/website_files/remove-nav-buttons.css",
|
"docs/website_files/remove-nav-buttons.css",
|
||||||
"docs/website_files/indent-section-headers.css",
|
"docs/website_files/indent-section-headers.css",
|
||||||
"docs/website_files/version-picker.css",
|
|
||||||
]
|
|
||||||
additional-js = [
|
|
||||||
"docs/website_files/table-of-contents.js",
|
|
||||||
"docs/website_files/version-picker.js",
|
|
||||||
"docs/website_files/version.js",
|
|
||||||
]
|
]
|
||||||
|
additional-js = ["docs/website_files/table-of-contents.js"]
|
||||||
theme = "docs/website_files/theme"
|
theme = "docs/website_files/theme"
|
||||||
|
|
||||||
[preprocessor.schema_versions]
|
|
||||||
command = "./scripts-dev/schema_versions.py"
|
|
||||||
|
|||||||
1
changelog.d/15187.feature
Normal file
1
changelog.d/15187.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Stabilise support for [MSC3966](https://github.com/matrix-org/matrix-spec-proposals/pull/3966): `event_property_contains` push condition.
|
||||||
1
changelog.d/15190.bugfix
Normal file
1
changelog.d/15190.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Implement [MSC3873](https://github.com/matrix-org/matrix-spec-proposals/pull/3873) to fix a long-standing bug where properties with dots were handled ambiguously in push rules.
|
||||||
1
changelog.d/15195.misc
Normal file
1
changelog.d/15195.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Improve performance of creating and authenticating events.
|
||||||
1
changelog.d/15200.misc
Normal file
1
changelog.d/15200.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Make the `HttpTransactionCache` use the `Requester` in addition of the just the `Request` to build the transaction key.
|
||||||
1
changelog.d/15223.doc
Normal file
1
changelog.d/15223.doc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add a missing endpoint to the workers documentation.
|
||||||
@@ -1,25 +1,18 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
#
|
|
||||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
||||||
#
|
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
# Copyright (C) 2023 New Vector, Ltd
|
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# it under the terms of the GNU Affero General Public License as
|
# you may not use this file except in compliance with the License.
|
||||||
# published by the Free Software Foundation, either version 3 of the
|
# You may obtain a copy of the License at
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# See the GNU Affero General Public License for more details:
|
|
||||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
||||||
#
|
|
||||||
# Originally licensed under the Apache License, Version 2.0:
|
|
||||||
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
||||||
#
|
|
||||||
# [This file includes modifications made by New Vector Limited]
|
|
||||||
#
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
#
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
""" Starts a synapse client console. """
|
""" Starts a synapse client console. """
|
||||||
import argparse
|
import argparse
|
||||||
@@ -776,7 +769,7 @@ def main(server_url, identity_server_url, username, token, config_path):
|
|||||||
global CONFIG_JSON
|
global CONFIG_JSON
|
||||||
CONFIG_JSON = config_path # bit cheeky, but just overwrite the global
|
CONFIG_JSON = config_path # bit cheeky, but just overwrite the global
|
||||||
try:
|
try:
|
||||||
with open(config_path) as config:
|
with open(config_path, "r") as config:
|
||||||
syn_cmd.config = json.load(config)
|
syn_cmd.config = json.load(config)
|
||||||
try:
|
try:
|
||||||
http_client.verbose = "on" == syn_cmd.config["verbose"]
|
http_client.verbose = "on" == syn_cmd.config["verbose"]
|
||||||
|
|||||||
@@ -1,23 +1,16 @@
|
|||||||
#
|
|
||||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
||||||
#
|
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
# Copyright (C) 2023 New Vector, Ltd
|
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# it under the terms of the GNU Affero General Public License as
|
# you may not use this file except in compliance with the License.
|
||||||
# published by the Free Software Foundation, either version 3 of the
|
# You may obtain a copy of the License at
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# See the GNU Affero General Public License for more details:
|
|
||||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
||||||
#
|
|
||||||
# Originally licensed under the Apache License, Version 2.0:
|
|
||||||
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
||||||
#
|
|
||||||
# [This file includes modifications made by New Vector Limited]
|
|
||||||
#
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
#
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import urllib
|
import urllib
|
||||||
@@ -44,6 +37,7 @@ class HttpClient:
|
|||||||
Deferred: Succeeds when we get a 2xx HTTP response. The result
|
Deferred: Succeeds when we get a 2xx HTTP response. The result
|
||||||
will be the decoded JSON body.
|
will be the decoded JSON body.
|
||||||
"""
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
def get_json(self, url, args=None):
|
def get_json(self, url, args=None):
|
||||||
"""Gets some json from the given host homeserver and path
|
"""Gets some json from the given host homeserver and path
|
||||||
@@ -59,6 +53,7 @@ class HttpClient:
|
|||||||
Deferred: Succeeds when we get a 2xx HTTP response. The result
|
Deferred: Succeeds when we get a 2xx HTTP response. The result
|
||||||
will be the decoded JSON body.
|
will be the decoded JSON body.
|
||||||
"""
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class TwistedHttpClient(HttpClient):
|
class TwistedHttpClient(HttpClient):
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ services:
|
|||||||
- POSTGRES_USER=synapse
|
- POSTGRES_USER=synapse
|
||||||
- POSTGRES_PASSWORD=changeme
|
- POSTGRES_PASSWORD=changeme
|
||||||
# ensure the database gets created correctly
|
# ensure the database gets created correctly
|
||||||
# https://element-hq.github.io/synapse/latest/postgres.html#set-up-database
|
# https://matrix-org.github.io/synapse/latest/postgres.html#set-up-database
|
||||||
- POSTGRES_INITDB_ARGS=--encoding=UTF-8 --lc-collate=C --lc-ctype=C
|
- POSTGRES_INITDB_ARGS=--encoding=UTF-8 --lc-collate=C --lc-ctype=C
|
||||||
volumes:
|
volumes:
|
||||||
# You may store the database tables in a local folder..
|
# You may store the database tables in a local folder..
|
||||||
|
|||||||
@@ -70,10 +70,6 @@ redis:
|
|||||||
port: 6379
|
port: 6379
|
||||||
# dbid: <redis_logical_db_id>
|
# dbid: <redis_logical_db_id>
|
||||||
# password: <secret_password>
|
# password: <secret_password>
|
||||||
# use_tls: True
|
|
||||||
# certificate_file: <path_to_certificate>
|
|
||||||
# private_key_file: <path_to_private_key>
|
|
||||||
# ca_file: <path_to_ca_certificate>
|
|
||||||
```
|
```
|
||||||
|
|
||||||
This assumes that your Redis service is called `redis` in your Docker Compose file.
|
This assumes that your Redis service is called `redis` in your Docker Compose file.
|
||||||
@@ -113,4 +109,4 @@ federation_sender_instances:
|
|||||||
|
|
||||||
## Other Worker types
|
## Other Worker types
|
||||||
|
|
||||||
Using the concepts shown here it is possible to create other worker types in Docker Compose. See the [Workers](https://element-hq.github.io/synapse/latest/workers.html#available-worker-applications) documentation for a list of available workers.
|
Using the concepts shown here it is possible to create other worker types in Docker Compose. See the [Workers](https://matrix-org.github.io/synapse/latest/workers.html#available-worker-applications) documentation for a list of available workers.
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Using the Synapse Grafana dashboard
|
# Using the Synapse Grafana dashboard
|
||||||
|
|
||||||
0. Set up Prometheus and Grafana. Out of scope for this readme. Useful documentation about using Grafana with Prometheus: http://docs.grafana.org/features/datasources/prometheus/
|
0. Set up Prometheus and Grafana. Out of scope for this readme. Useful documentation about using Grafana with Prometheus: http://docs.grafana.org/features/datasources/prometheus/
|
||||||
1. Have your Prometheus scrape your Synapse. https://element-hq.github.io/synapse/latest/metrics-howto.html
|
1. Have your Prometheus scrape your Synapse. https://matrix-org.github.io/synapse/latest/metrics-howto.html
|
||||||
2. Import dashboard into Grafana. Download `synapse.json`. Import it to Grafana and select the correct Prometheus datasource. http://docs.grafana.org/reference/export_import/
|
2. Import dashboard into Grafana. Download `synapse.json`. Import it to Grafana and select the correct Prometheus datasource. http://docs.grafana.org/reference/export_import/
|
||||||
3. Set up required recording rules. [contrib/prometheus](../prometheus)
|
3. Set up required recording rules. [contrib/prometheus](../prometheus)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,23 +1,16 @@
|
|||||||
#
|
|
||||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
||||||
#
|
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
# Copyright (C) 2023 New Vector, Ltd
|
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# it under the terms of the GNU Affero General Public License as
|
# you may not use this file except in compliance with the License.
|
||||||
# published by the Free Software Foundation, either version 3 of the
|
# You may obtain a copy of the License at
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# See the GNU Affero General Public License for more details:
|
|
||||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
||||||
#
|
|
||||||
# Originally licensed under the Apache License, Version 2.0:
|
|
||||||
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
||||||
#
|
|
||||||
# [This file includes modifications made by New Vector Limited]
|
|
||||||
#
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
#
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import cgi
|
import cgi
|
||||||
|
|||||||
@@ -1,23 +1,16 @@
|
|||||||
#
|
|
||||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
||||||
#
|
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
# Copyright (C) 2023 New Vector, Ltd
|
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# it under the terms of the GNU Affero General Public License as
|
# you may not use this file except in compliance with the License.
|
||||||
# published by the Free Software Foundation, either version 3 of the
|
# You may obtain a copy of the License at
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# See the GNU Affero General Public License for more details:
|
|
||||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
||||||
#
|
|
||||||
# Originally licensed under the Apache License, Version 2.0:
|
|
||||||
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
||||||
#
|
|
||||||
# [This file includes modifications made by New Vector Limited]
|
|
||||||
#
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
#
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
|||||||
@@ -1,23 +1,16 @@
|
|||||||
#
|
|
||||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
||||||
#
|
|
||||||
# Copyright 2016 OpenMarket Ltd
|
# Copyright 2016 OpenMarket Ltd
|
||||||
# Copyright (C) 2023 New Vector, Ltd
|
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# it under the terms of the GNU Affero General Public License as
|
# you may not use this file except in compliance with the License.
|
||||||
# published by the Free Software Foundation, either version 3 of the
|
# You may obtain a copy of the License at
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# See the GNU Affero General Public License for more details:
|
|
||||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
||||||
#
|
|
||||||
# Originally licensed under the Apache License, Version 2.0:
|
|
||||||
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
||||||
#
|
|
||||||
# [This file includes modifications made by New Vector Limited]
|
|
||||||
#
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
#
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import datetime
|
import datetime
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ This directory contains an `lnav` [log format definition](
|
|||||||
https://docs.lnav.org/en/v0.10.1/formats.html#defining-a-new-format
|
https://docs.lnav.org/en/v0.10.1/formats.html#defining-a-new-format
|
||||||
) for Synapse logs as
|
) for Synapse logs as
|
||||||
emitted by Synapse with the default [logging configuration](
|
emitted by Synapse with the default [logging configuration](
|
||||||
https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#log_config
|
https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html#log_config
|
||||||
). It supports lnav 0.10.1 because that's what's packaged by my distribution.
|
). It supports lnav 0.10.1 because that's what's packaged by my distribution.
|
||||||
|
|
||||||
This should allow lnav:
|
This should allow lnav:
|
||||||
@@ -40,7 +40,7 @@ Within lnav itself:
|
|||||||
down and up.
|
down and up.
|
||||||
- Use `o` and `O` to skip through logs based on the request ID (`POST-1234`, or
|
- Use `o` and `O` to skip through logs based on the request ID (`POST-1234`, or
|
||||||
else the value of the [`request_id_header`](
|
else the value of the [`request_id_header`](
|
||||||
https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html?highlight=request_id_header#listeners
|
https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html?highlight=request_id_header#listeners
|
||||||
) header). This may get confused if the same request ID is repeated among
|
) header). This may get confused if the same request ID is repeated among
|
||||||
multiple files or process restarts.
|
multiple files or process restarts.
|
||||||
- ???
|
- ???
|
||||||
|
|||||||
@@ -29,7 +29,7 @@
|
|||||||
"level": "error"
|
"level": "error"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"line": "my-matrix-server-federation-sender-1 | 2023-01-25 20:56:20,995 - synapse.http.matrixfederationclient - 709 - WARNING - federation_transaction_transmission_loop-3 - {PUT-O-3} [example.com] Request failed: PUT matrix-federation://example.com/_matrix/federation/v1/send/1674680155797: HttpResponseException('403: Forbidden')",
|
"line": "my-matrix-server-federation-sender-1 | 2023-01-25 20:56:20,995 - synapse.http.matrixfederationclient - 709 - WARNING - federation_transaction_transmission_loop-3 - {PUT-O-3} [example.com] Request failed: PUT matrix://example.com/_matrix/federation/v1/send/1674680155797: HttpResponseException('403: Forbidden')",
|
||||||
"level": "warning"
|
"level": "warning"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ Add a new job to the main prometheus.yml file:
|
|||||||
```
|
```
|
||||||
|
|
||||||
An example of a Prometheus configuration with workers can be found in
|
An example of a Prometheus configuration with workers can be found in
|
||||||
[metrics-howto.md](https://element-hq.github.io/synapse/latest/metrics-howto.html).
|
[metrics-howto.md](https://matrix-org.github.io/synapse/latest/metrics-howto.html).
|
||||||
|
|
||||||
To use `synapse.rules` add
|
To use `synapse.rules` add
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ Purge history API examples
|
|||||||
# `purge_history.sh`
|
# `purge_history.sh`
|
||||||
|
|
||||||
A bash file, that uses the
|
A bash file, that uses the
|
||||||
[purge history API](https://element-hq.github.io/synapse/latest/admin_api/purge_history_api.html)
|
[purge history API](https://matrix-org.github.io/synapse/latest/admin_api/purge_history_api.html)
|
||||||
to purge all messages in a list of rooms up to a certain event. You can select a
|
to purge all messages in a list of rooms up to a certain event. You can select a
|
||||||
timeframe or a number of messages that you want to keep in the room.
|
timeframe or a number of messages that you want to keep in the room.
|
||||||
|
|
||||||
@@ -14,5 +14,5 @@ the script.
|
|||||||
# `purge_remote_media.sh`
|
# `purge_remote_media.sh`
|
||||||
|
|
||||||
A bash file, that uses the
|
A bash file, that uses the
|
||||||
[purge history API](https://element-hq.github.io/synapse/latest/admin_api/purge_history_api.html)
|
[purge history API](https://matrix-org.github.io/synapse/latest/admin_api/purge_history_api.html)
|
||||||
to purge all old cached remote media.
|
to purge all old cached remote media.
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
# this script will use the api:
|
# this script will use the api:
|
||||||
# https://element-hq.github.io/synapse/latest/admin_api/purge_history_api.html
|
# https://matrix-org.github.io/synapse/latest/admin_api/purge_history_api.html
|
||||||
#
|
#
|
||||||
# It will purge all messages in a list of rooms up to a cetrain event
|
# It will purge all messages in a list of rooms up to a cetrain event
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
The documentation for using systemd to manage synapse workers is now part of
|
The documentation for using systemd to manage synapse workers is now part of
|
||||||
the main synapse distribution. See
|
the main synapse distribution. See
|
||||||
[docs/systemd-with-workers](https://element-hq.github.io/synapse/latest/systemd-with-workers/index.html).
|
[docs/systemd-with-workers](https://matrix-org.github.io/synapse/latest/systemd-with-workers/index.html).
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ This is a setup for managing synapse with a user contributed systemd unit
|
|||||||
file. It provides a `matrix-synapse` systemd unit file that should be tailored
|
file. It provides a `matrix-synapse` systemd unit file that should be tailored
|
||||||
to accommodate your installation in accordance with the installation
|
to accommodate your installation in accordance with the installation
|
||||||
instructions provided in
|
instructions provided in
|
||||||
[installation instructions](https://element-hq.github.io/synapse/latest/setup/installation.html).
|
[installation instructions](https://matrix-org.github.io/synapse/latest/setup/installation.html).
|
||||||
|
|
||||||
## Setup
|
## Setup
|
||||||
1. Under the service section, ensure the `User` variable matches which user
|
1. Under the service section, ensure the `User` variable matches which user
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Creating multiple stream writers with a bash script
|
# Creating multiple stream writers with a bash script
|
||||||
|
|
||||||
This script creates multiple [stream writer](https://github.com/element-hq/synapse/blob/develop/docs/workers.md#stream-writers) workers.
|
This script creates multiple [stream writer](https://github.com/matrix-org/synapse/blob/develop/docs/workers.md#stream-writers) workers.
|
||||||
|
|
||||||
Stream writers require both replication and HTTP listeners.
|
Stream writers require both replication and HTTP listeners.
|
||||||
|
|
||||||
@@ -71,7 +71,7 @@ cat << EXAMPLECONFIG
|
|||||||
# Don't forget to configure your reverse proxy and
|
# Don't forget to configure your reverse proxy and
|
||||||
# necessary endpoints to their respective worker.
|
# necessary endpoints to their respective worker.
|
||||||
|
|
||||||
# See https://github.com/element-hq/synapse/blob/develop/docs/workers.md
|
# See https://github.com/matrix-org/synapse/blob/develop/docs/workers.md
|
||||||
# for more information.
|
# for more information.
|
||||||
|
|
||||||
# Remember: Under NO circumstances should the replication
|
# Remember: Under NO circumstances should the replication
|
||||||
@@ -102,7 +102,7 @@ You should receive an output similar to the following:
|
|||||||
# Don't forget to configure your reverse proxy and
|
# Don't forget to configure your reverse proxy and
|
||||||
# necessary endpoints to their respective worker.
|
# necessary endpoints to their respective worker.
|
||||||
|
|
||||||
# See https://github.com/element-hq/synapse/blob/develop/docs/workers.md
|
# See https://github.com/matrix-org/synapse/blob/develop/docs/workers.md
|
||||||
# for more information
|
# for more information
|
||||||
|
|
||||||
# Remember: Under NO circumstances should the replication
|
# Remember: Under NO circumstances should the replication
|
||||||
|
|||||||
453
debian/changelog
vendored
453
debian/changelog
vendored
@@ -1,454 +1,3 @@
|
|||||||
matrix-synapse-py3 (1.107.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.107.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 May 2024 14:15:34 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.107.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.107.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 May 2024 16:26:26 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.106.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.106.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Apr 2024 11:51:43 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.106.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.106.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Apr 2024 15:54:59 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.105.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.105.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Apr 2024 15:56:18 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.105.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.105.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 16 Apr 2024 15:53:23 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.105.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.105.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 11 Apr 2024 12:15:49 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.104.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.104.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Apr 2024 17:15:45 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.104.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.104.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Mar 2024 11:48:58 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.103.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.103.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Mar 2024 12:24:36 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.103.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.103.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Mar 2024 15:02:56 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.102.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.102.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Mar 2024 14:47:03 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.102.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.102.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Feb 2024 15:50:36 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.101.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.101.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Feb 2024 10:45:35 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.101.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.101.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Feb 2024 16:02:02 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.100.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.100.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Jan 2024 16:58:19 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.100.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.100.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 24 Jan 2024 14:18:15 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.100.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.100.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 24 Jan 2024 11:59:51 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.100.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.100.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Jan 2024 14:24:16 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.99.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* Fix copyright file with new licensing
|
|
||||||
* New Synapse release 1.99.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 16 Jan 2024 11:58:34 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.99.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.99.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 09 Jan 2024 13:43:56 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.98.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.98.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Dec 2023 15:04:31 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.98.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.98.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Dec 2023 13:08:42 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.97.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.97.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Nov 2023 14:08:58 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.97.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.97.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Nov 2023 12:32:03 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.96.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.96.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 17 Nov 2023 12:48:45 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.96.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.96.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 16 Nov 2023 17:54:26 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.96.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.96.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 31 Oct 2023 14:09:09 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.95.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.95.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 31 Oct 2023 14:00:00 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.95.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.95.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 24 Oct 2023 13:00:46 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.95.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.95.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 17 Oct 2023 15:50:17 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.94.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.94.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Oct 2023 10:57:41 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.94.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.94.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Oct 2023 11:48:18 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.93.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.93.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Sep 2023 15:54:40 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.93.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.93.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Sep 2023 11:55:00 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.92.3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.92.3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 18 Sep 2023 15:05:04 +0200
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.92.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.92.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 15 Sep 2023 13:17:41 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.92.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.92.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Sep 2023 13:19:42 +0200
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.92.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.92.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Sep 2023 11:59:23 +0200
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.91.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.91.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 06 Sep 2023 14:59:30 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.92.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.92.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Sep 2023 11:21:43 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.91.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.91.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 04 Sep 2023 14:03:18 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.91.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.91.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Aug 2023 11:18:10 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.91.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.91.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 23 Aug 2023 09:47:18 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.90.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.90.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Aug 2023 11:17:34 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.90.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.90.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Aug 2023 15:29:34 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.89.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.89.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Aug 2023 11:07:15 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.89.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.89.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Jul 2023 14:31:07 +0200
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.88.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.88.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Jul 2023 13:59:28 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.88.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.88.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Jul 2023 10:20:19 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.87.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.87.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Jul 2023 16:24:00 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.87.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.87.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 27 Jun 2023 15:27:04 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.86.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.86.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Jun 2023 17:22:46 +0200
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.86.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.86.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 14 Jun 2023 12:16:27 +0200
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.86.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.86.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Jun 2023 14:30:45 +0200
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.85.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.85.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 08 Jun 2023 13:04:18 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.85.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.85.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 07 Jun 2023 10:51:12 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.85.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.85.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Jun 2023 09:39:29 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.85.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.85.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 01 Jun 2023 09:16:18 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.85.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.85.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 May 2023 13:56:54 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.84.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.84.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 26 May 2023 16:15:30 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.84.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.84.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 May 2023 10:57:22 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.84.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.84.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 16 May 2023 11:12:02 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.83.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.83.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 09 May 2023 18:13:37 +0200
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.83.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.83.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 May 2023 15:56:38 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.82.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.82.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Apr 2023 11:56:06 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.82.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.82.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Apr 2023 09:47:30 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.81.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.81.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Apr 2023 14:18:35 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.81.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.81.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 06 Apr 2023 16:07:54 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.81.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.81.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Apr 2023 14:29:03 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.80.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.80.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Mar 2023 11:10:33 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.80.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.80.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 22 Mar 2023 08:30:16 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.80.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.80.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Mar 2023 10:56:08 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.79.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.79.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Mar 2023 16:14:50 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.79.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.79.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 13 Mar 2023 12:54:21 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.79.0~rc1) stable; urgency=medium
|
matrix-synapse-py3 (1.79.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
* New Synapse release 1.79.0rc1.
|
* New Synapse release 1.79.0rc1.
|
||||||
@@ -1800,7 +1349,7 @@ matrix-synapse-py3 (0.99.3.1) stable; urgency=medium
|
|||||||
matrix-synapse-py3 (0.99.3) stable; urgency=medium
|
matrix-synapse-py3 (0.99.3) stable; urgency=medium
|
||||||
|
|
||||||
[ Richard van der Hoff ]
|
[ Richard van der Hoff ]
|
||||||
* Fix warning during preconfiguration. (Fixes: https://github.com/matrix-org/synapse/issues/4819)
|
* Fix warning during preconfiguration. (Fixes: #4819)
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
[ Synapse Packaging team ]
|
||||||
* New synapse release 0.99.3.
|
* New synapse release 0.99.3.
|
||||||
|
|||||||
2
debian/control
vendored
2
debian/control
vendored
@@ -18,7 +18,7 @@ Build-Depends:
|
|||||||
python3-venv,
|
python3-venv,
|
||||||
tar,
|
tar,
|
||||||
Standards-Version: 3.9.8
|
Standards-Version: 3.9.8
|
||||||
Homepage: https://github.com/element-hq/synapse
|
Homepage: https://github.com/matrix-org/synapse
|
||||||
|
|
||||||
Package: matrix-synapse-py3
|
Package: matrix-synapse-py3
|
||||||
Architecture: any
|
Architecture: any
|
||||||
|
|||||||
6
debian/copyright
vendored
6
debian/copyright
vendored
@@ -1,15 +1,11 @@
|
|||||||
Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||||
Upstream-Name: synapse
|
Upstream-Name: synapse
|
||||||
Source: https://github.com/element-hq/synapse
|
Source: https://github.com/matrix-org/synapse
|
||||||
|
|
||||||
Files: *
|
Files: *
|
||||||
Copyright: 2014-2017, OpenMarket Ltd, 2017-2018 New Vector Ltd
|
Copyright: 2014-2017, OpenMarket Ltd, 2017-2018 New Vector Ltd
|
||||||
License: Apache-2.0
|
License: Apache-2.0
|
||||||
|
|
||||||
Files: *
|
|
||||||
Copyright: 2023 New Vector Ltd
|
|
||||||
License: AGPL-3.0-or-later
|
|
||||||
|
|
||||||
Files: synapse/config/saml2.py
|
Files: synapse/config/saml2.py
|
||||||
Copyright: 2015, Ericsson
|
Copyright: 2015, Ericsson
|
||||||
License: Apache-2.0
|
License: Apache-2.0
|
||||||
|
|||||||
2
debian/po/templates.pot
vendored
2
debian/po/templates.pot
vendored
@@ -30,7 +30,7 @@ msgid ""
|
|||||||
"The name that this homeserver will appear as, to clients and other servers "
|
"The name that this homeserver will appear as, to clients and other servers "
|
||||||
"via federation. This is normally the public hostname of the server running "
|
"via federation. This is normally the public hostname of the server running "
|
||||||
"synapse, but can be different if you set up delegation. Please refer to the "
|
"synapse, but can be different if you set up delegation. Please refer to the "
|
||||||
"delegation documentation in this case: https://github.com/element-hq/synapse/"
|
"delegation documentation in this case: https://github.com/matrix-org/synapse/"
|
||||||
"blob/master/docs/delegate.md."
|
"blob/master/docs/delegate.md."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
|
|||||||
7
debian/rules
vendored
7
debian/rules
vendored
@@ -40,9 +40,9 @@ override_dh_shlibdeps:
|
|||||||
# to be self-contained, but they have interdependencies and
|
# to be self-contained, but they have interdependencies and
|
||||||
# dpkg-shlibdeps doesn't know how to resolve them.
|
# dpkg-shlibdeps doesn't know how to resolve them.
|
||||||
#
|
#
|
||||||
# As of Pillow 7.1.0, these libraries are in site-packages/Pillow.libs.
|
# As of Pillow 7.1.0, these libraries are in
|
||||||
# Previously, they were in site-packages/PIL/.libs. As of Pillow 10.2.0
|
# site-packages/Pillow.libs. Previously, they were in
|
||||||
# the package name is lowercased to site-packages/pillow.libs.
|
# site-packages/PIL/.libs.
|
||||||
#
|
#
|
||||||
# (we also need to exclude psycopg2, of course, since we've already
|
# (we also need to exclude psycopg2, of course, since we've already
|
||||||
# dealt with that.)
|
# dealt with that.)
|
||||||
@@ -50,7 +50,6 @@ override_dh_shlibdeps:
|
|||||||
dh_shlibdeps \
|
dh_shlibdeps \
|
||||||
-X site-packages/PIL/.libs \
|
-X site-packages/PIL/.libs \
|
||||||
-X site-packages/Pillow.libs \
|
-X site-packages/Pillow.libs \
|
||||||
-X site-packages/pillow.libs \
|
|
||||||
-X site-packages/psycopg2
|
-X site-packages/psycopg2
|
||||||
|
|
||||||
override_dh_virtualenv:
|
override_dh_virtualenv:
|
||||||
|
|||||||
2
debian/templates
vendored
2
debian/templates
vendored
@@ -5,7 +5,7 @@ _Description: Name of the server:
|
|||||||
servers via federation. This is normally the public hostname of the
|
servers via federation. This is normally the public hostname of the
|
||||||
server running synapse, but can be different if you set up delegation.
|
server running synapse, but can be different if you set up delegation.
|
||||||
Please refer to the delegation documentation in this case:
|
Please refer to the delegation documentation in this case:
|
||||||
https://github.com/element-hq/synapse/blob/master/docs/delegate.md.
|
https://github.com/matrix-org/synapse/blob/master/docs/delegate.md.
|
||||||
|
|
||||||
Template: matrix-synapse/report-stats
|
Template: matrix-synapse/report-stats
|
||||||
Type: boolean
|
Type: boolean
|
||||||
|
|||||||
@@ -46,7 +46,7 @@ for port in 8080 8081 8082; do
|
|||||||
echo ''
|
echo ''
|
||||||
|
|
||||||
# Warning, this heredoc depends on the interaction of tabs and spaces.
|
# Warning, this heredoc depends on the interaction of tabs and spaces.
|
||||||
# Please don't accidentally bork me with your fancy settings.
|
# Please don't accidentaly bork me with your fancy settings.
|
||||||
listeners=$(cat <<-PORTLISTENERS
|
listeners=$(cat <<-PORTLISTENERS
|
||||||
# Configure server to listen on both $https_port and $port
|
# Configure server to listen on both $https_port and $port
|
||||||
# This overides some of the default settings above
|
# This overides some of the default settings above
|
||||||
@@ -80,8 +80,12 @@ for port in 8080 8081 8082; do
|
|||||||
echo "tls_certificate_path: \"$DIR/$port/localhost:$port.tls.crt\""
|
echo "tls_certificate_path: \"$DIR/$port/localhost:$port.tls.crt\""
|
||||||
echo "tls_private_key_path: \"$DIR/$port/localhost:$port.tls.key\""
|
echo "tls_private_key_path: \"$DIR/$port/localhost:$port.tls.key\""
|
||||||
|
|
||||||
# Request keys directly from servers contacted over federation
|
# Ignore keys from the trusted keys server
|
||||||
echo 'trusted_key_servers: []'
|
echo '# Ignore keys from the trusted keys server'
|
||||||
|
echo 'trusted_key_servers:'
|
||||||
|
echo ' - server_name: "matrix.org"'
|
||||||
|
echo ' accept_keys_insecurely: true'
|
||||||
|
echo ''
|
||||||
|
|
||||||
# Allow the servers to communicate over localhost.
|
# Allow the servers to communicate over localhost.
|
||||||
allow_list=$(cat <<-ALLOW_LIST
|
allow_list=$(cat <<-ALLOW_LIST
|
||||||
|
|||||||
@@ -25,9 +25,9 @@ ARG PYTHON_VERSION=3.11
|
|||||||
###
|
###
|
||||||
### Stage 0: generate requirements.txt
|
### Stage 0: generate requirements.txt
|
||||||
###
|
###
|
||||||
# We hardcode the use of Debian bookworm here because this could change upstream
|
# We hardcode the use of Debian bullseye here because this could change upstream
|
||||||
# and other Dockerfiles used for testing are expecting bookworm.
|
# and other Dockerfiles used for testing are expecting bullseye.
|
||||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm as requirements
|
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as requirements
|
||||||
|
|
||||||
# RUN --mount is specific to buildkit and is documented at
|
# RUN --mount is specific to buildkit and is documented at
|
||||||
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
|
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
|
||||||
@@ -37,24 +37,9 @@ RUN \
|
|||||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
apt-get update -qq && apt-get install -yqq \
|
apt-get update -qq && apt-get install -yqq \
|
||||||
build-essential curl git libffi-dev libssl-dev pkg-config \
|
build-essential git libffi-dev libssl-dev \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Install rust and ensure its in the PATH.
|
|
||||||
# (Rust may be needed to compile `cryptography`---which is one of poetry's
|
|
||||||
# dependencies---on platforms that don't have a `cryptography` wheel.
|
|
||||||
ENV RUSTUP_HOME=/rust
|
|
||||||
ENV CARGO_HOME=/cargo
|
|
||||||
ENV PATH=/cargo/bin:/rust/bin:$PATH
|
|
||||||
RUN mkdir /rust /cargo
|
|
||||||
|
|
||||||
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
|
|
||||||
|
|
||||||
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
|
|
||||||
# set to true, so we expose it as a build-arg.
|
|
||||||
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
|
|
||||||
ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
|
|
||||||
|
|
||||||
# We install poetry in its own build stage to avoid its dependencies conflicting with
|
# We install poetry in its own build stage to avoid its dependencies conflicting with
|
||||||
# synapse's dependencies.
|
# synapse's dependencies.
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
@@ -87,7 +72,7 @@ RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
|
|||||||
###
|
###
|
||||||
### Stage 1: builder
|
### Stage 1: builder
|
||||||
###
|
###
|
||||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm as builder
|
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye as builder
|
||||||
|
|
||||||
# install the OS build deps
|
# install the OS build deps
|
||||||
RUN \
|
RUN \
|
||||||
@@ -158,12 +143,12 @@ RUN --mount=type=cache,target=/synapse/target,sharing=locked \
|
|||||||
### Stage 2: runtime
|
### Stage 2: runtime
|
||||||
###
|
###
|
||||||
|
|
||||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
|
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye
|
||||||
|
|
||||||
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
||||||
LABEL org.opencontainers.image.documentation='https://github.com/element-hq/synapse/blob/master/docker/README.md'
|
LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md'
|
||||||
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
|
LABEL org.opencontainers.image.source='https://github.com/matrix-org/synapse.git'
|
||||||
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later'
|
LABEL org.opencontainers.image.licenses='Apache-2.0'
|
||||||
|
|
||||||
RUN \
|
RUN \
|
||||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
@@ -173,10 +158,10 @@ RUN \
|
|||||||
gosu \
|
gosu \
|
||||||
libjpeg62-turbo \
|
libjpeg62-turbo \
|
||||||
libpq5 \
|
libpq5 \
|
||||||
libwebp7 \
|
libwebp6 \
|
||||||
xmlsec1 \
|
xmlsec1 \
|
||||||
libjemalloc2 \
|
libjemalloc2 \
|
||||||
libicu72 \
|
libicu67 \
|
||||||
libssl-dev \
|
libssl-dev \
|
||||||
openssl \
|
openssl \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ ARG distro=""
|
|||||||
# https://launchpad.net/~jyrki-pulliainen/+archive/ubuntu/dh-virtualenv, but
|
# https://launchpad.net/~jyrki-pulliainen/+archive/ubuntu/dh-virtualenv, but
|
||||||
# it's not obviously easier to use that than to build our own.)
|
# it's not obviously easier to use that than to build our own.)
|
||||||
|
|
||||||
FROM docker.io/library/${distro} as builder
|
FROM ${distro} as builder
|
||||||
|
|
||||||
RUN apt-get update -qq -o Acquire::Languages=none
|
RUN apt-get update -qq -o Acquire::Languages=none
|
||||||
RUN env DEBIAN_FRONTEND=noninteractive apt-get install \
|
RUN env DEBIAN_FRONTEND=noninteractive apt-get install \
|
||||||
@@ -55,13 +55,17 @@ RUN cd /dh-virtualenv && DEB_BUILD_OPTIONS=nodoc dpkg-buildpackage -us -uc -b
|
|||||||
###
|
###
|
||||||
### Stage 1
|
### Stage 1
|
||||||
###
|
###
|
||||||
FROM docker.io/library/${distro}
|
FROM ${distro}
|
||||||
|
|
||||||
# Get the distro we want to pull from as a dynamic build variable
|
# Get the distro we want to pull from as a dynamic build variable
|
||||||
# (We need to define it in each build stage)
|
# (We need to define it in each build stage)
|
||||||
ARG distro=""
|
ARG distro=""
|
||||||
ENV distro ${distro}
|
ENV distro ${distro}
|
||||||
|
|
||||||
|
# Python < 3.7 assumes LANG="C" means ASCII-only and throws on printing unicode
|
||||||
|
# http://bugs.python.org/issue19846
|
||||||
|
ENV LANG C.UTF-8
|
||||||
|
|
||||||
# Install the build dependencies
|
# Install the build dependencies
|
||||||
#
|
#
|
||||||
# NB: keep this list in sync with the list of build-deps in debian/control
|
# NB: keep this list in sync with the list of build-deps in debian/control
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
|||||||
# target image. For repeated rebuilds, this is much faster than apt installing
|
# target image. For repeated rebuilds, this is much faster than apt installing
|
||||||
# each time.
|
# each time.
|
||||||
|
|
||||||
FROM docker.io/library/debian:bookworm-slim AS deps_base
|
FROM debian:bullseye-slim AS deps_base
|
||||||
RUN \
|
RUN \
|
||||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
@@ -21,7 +21,7 @@ FROM docker.io/library/debian:bookworm-slim AS deps_base
|
|||||||
# which makes it much easier to copy (but we need to make sure we use an image
|
# which makes it much easier to copy (but we need to make sure we use an image
|
||||||
# based on the same debian version as the synapse image, to make sure we get
|
# based on the same debian version as the synapse image, to make sure we get
|
||||||
# the expected version of libc.
|
# the expected version of libc.
|
||||||
FROM docker.io/library/redis:7-bookworm AS redis_base
|
FROM redis:6-bullseye AS redis_base
|
||||||
|
|
||||||
# now build the final image, based on the the regular Synapse docker image
|
# now build the final image, based on the the regular Synapse docker image
|
||||||
FROM $FROM
|
FROM $FROM
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ and run Synapse against Complement.
|
|||||||
Consult the [contributing guide][guideComplementSh] for instructions on how to use it.
|
Consult the [contributing guide][guideComplementSh] for instructions on how to use it.
|
||||||
|
|
||||||
|
|
||||||
[guideComplementSh]: https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-integration-tests-complement
|
[guideComplementSh]: https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#run-the-integration-tests-complement
|
||||||
|
|
||||||
## Building and running the images manually
|
## Building and running the images manually
|
||||||
|
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ docker run -it --rm \
|
|||||||
```
|
```
|
||||||
|
|
||||||
For information on picking a suitable server name, see
|
For information on picking a suitable server name, see
|
||||||
https://element-hq.github.io/synapse/latest/setup/installation.html.
|
https://matrix-org.github.io/synapse/latest/setup/installation.html.
|
||||||
|
|
||||||
The above command will generate a `homeserver.yaml` in (typically)
|
The above command will generate a `homeserver.yaml` in (typically)
|
||||||
`/var/lib/docker/volumes/synapse-data/_data`. You should check this file, and
|
`/var/lib/docker/volumes/synapse-data/_data`. You should check this file, and
|
||||||
@@ -73,12 +73,11 @@ The following environment variables are supported in `generate` mode:
|
|||||||
will log sensitive information such as access tokens.
|
will log sensitive information such as access tokens.
|
||||||
This should not be needed unless you are a developer attempting to debug something
|
This should not be needed unless you are a developer attempting to debug something
|
||||||
particularly tricky.
|
particularly tricky.
|
||||||
* `SYNAPSE_LOG_TESTING`: if set, Synapse will log additional information useful
|
|
||||||
for testing.
|
|
||||||
|
|
||||||
## Postgres
|
## Postgres
|
||||||
|
|
||||||
By default the config will use SQLite. See the [docs on using Postgres](https://github.com/element-hq/synapse/blob/develop/docs/postgres.md) for more info on how to use Postgres. Until this section is improved [this issue](https://github.com/element-hq/synapse/issues/8304) may provide useful information.
|
By default the config will use SQLite. See the [docs on using Postgres](https://github.com/matrix-org/synapse/blob/develop/docs/postgres.md) for more info on how to use Postgres. Until this section is improved [this issue](https://github.com/matrix-org/synapse/issues/8304) may provide useful information.
|
||||||
|
|
||||||
## Running synapse
|
## Running synapse
|
||||||
|
|
||||||
@@ -151,10 +150,10 @@ is suitable for local testing, but for any practical use, you will either need
|
|||||||
to use a reverse proxy, or configure Synapse to expose an HTTPS port.
|
to use a reverse proxy, or configure Synapse to expose an HTTPS port.
|
||||||
|
|
||||||
For documentation on using a reverse proxy, see
|
For documentation on using a reverse proxy, see
|
||||||
https://github.com/element-hq/synapse/blob/master/docs/reverse_proxy.md.
|
https://github.com/matrix-org/synapse/blob/master/docs/reverse_proxy.md.
|
||||||
|
|
||||||
For more information on enabling TLS support in synapse itself, see
|
For more information on enabling TLS support in synapse itself, see
|
||||||
https://element-hq.github.io/synapse/latest/setup/installation.html#tls-certificates. Of
|
https://matrix-org.github.io/synapse/latest/setup/installation.html#tls-certificates. Of
|
||||||
course, you will need to expose the TLS port from the container with a `-p`
|
course, you will need to expose the TLS port from the container with a `-p`
|
||||||
argument to `docker run`.
|
argument to `docker run`.
|
||||||
|
|
||||||
@@ -242,4 +241,4 @@ healthcheck:
|
|||||||
|
|
||||||
Jemalloc is embedded in the image and will be used instead of the default allocator.
|
Jemalloc is embedded in the image and will be used instead of the default allocator.
|
||||||
You can read about jemalloc by reading the Synapse
|
You can read about jemalloc by reading the Synapse
|
||||||
[Admin FAQ](https://element-hq.github.io/synapse/latest/usage/administration/admin_faq.html#help-synapse-is-slow-and-eats-all-my-ramcpu).
|
[Admin FAQ](https://matrix-org.github.io/synapse/latest/usage/administration/admin_faq.html#help-synapse-is-slow-and-eats-all-my-ramcpu).
|
||||||
|
|||||||
@@ -1,58 +1,57 @@
|
|||||||
# syntax=docker/dockerfile:1
|
# syntax=docker/dockerfile:1
|
||||||
# This dockerfile builds on top of 'docker/Dockerfile-workers' in element-hq/synapse
|
# This dockerfile builds on top of 'docker/Dockerfile-workers' in matrix-org/synapse
|
||||||
# by including a built-in postgres instance, as well as setting up the homeserver so
|
# by including a built-in postgres instance, as well as setting up the homeserver so
|
||||||
# that it is ready for testing via Complement.
|
# that it is ready for testing via Complement.
|
||||||
#
|
#
|
||||||
# Instructions for building this image from those it depends on is detailed in this guide:
|
# Instructions for building this image from those it depends on is detailed in this guide:
|
||||||
# https://github.com/element-hq/synapse/blob/develop/docker/README-testing.md#testing-with-postgresql-and-single-or-multi-process-synapse
|
# https://github.com/matrix-org/synapse/blob/develop/docker/README-testing.md#testing-with-postgresql-and-single-or-multi-process-synapse
|
||||||
|
|
||||||
ARG SYNAPSE_VERSION=latest
|
ARG SYNAPSE_VERSION=latest
|
||||||
# This is an intermediate image, to be built locally (not pulled from a registry).
|
|
||||||
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||||
|
|
||||||
FROM $FROM
|
FROM $FROM
|
||||||
# First of all, we copy postgres server from the official postgres image,
|
# First of all, we copy postgres server from the official postgres image,
|
||||||
# since for repeated rebuilds, this is much faster than apt installing
|
# since for repeated rebuilds, this is much faster than apt installing
|
||||||
# postgres each time.
|
# postgres each time.
|
||||||
|
|
||||||
# This trick only works because (a) the Synapse image happens to have all the
|
# This trick only works because (a) the Synapse image happens to have all the
|
||||||
# shared libraries that postgres wants, (b) we use a postgres image based on
|
# shared libraries that postgres wants, (b) we use a postgres image based on
|
||||||
# the same debian version as Synapse's docker image (so the versions of the
|
# the same debian version as Synapse's docker image (so the versions of the
|
||||||
# shared libraries match).
|
# shared libraries match).
|
||||||
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||||
COPY --from=docker.io/library/postgres:13-bookworm /usr/lib/postgresql /usr/lib/postgresql
|
COPY --from=postgres:13-bullseye /usr/lib/postgresql /usr/lib/postgresql
|
||||||
COPY --from=docker.io/library/postgres:13-bookworm /usr/share/postgresql /usr/share/postgresql
|
COPY --from=postgres:13-bullseye /usr/share/postgresql /usr/share/postgresql
|
||||||
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
|
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
|
||||||
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
||||||
ENV PGDATA=/var/lib/postgresql/data
|
ENV PGDATA=/var/lib/postgresql/data
|
||||||
|
|
||||||
# We also initialize the database at build time, rather than runtime, so that it's faster to spin up the image.
|
# We also initialize the database at build time, rather than runtime, so that it's faster to spin up the image.
|
||||||
RUN gosu postgres initdb --locale=C --encoding=UTF-8 --auth-host password
|
RUN gosu postgres initdb --locale=C --encoding=UTF-8 --auth-host password
|
||||||
|
|
||||||
# Configure a password and create a database for Synapse
|
# Configure a password and create a database for Synapse
|
||||||
RUN echo "ALTER USER postgres PASSWORD 'somesecret'" | gosu postgres postgres --single
|
RUN echo "ALTER USER postgres PASSWORD 'somesecret'" | gosu postgres postgres --single
|
||||||
RUN echo "CREATE DATABASE synapse" | gosu postgres postgres --single
|
RUN echo "CREATE DATABASE synapse" | gosu postgres postgres --single
|
||||||
|
|
||||||
# Extend the shared homeserver config to disable rate-limiting,
|
# Extend the shared homeserver config to disable rate-limiting,
|
||||||
# set Complement's static shared secret, enable registration, amongst other
|
# set Complement's static shared secret, enable registration, amongst other
|
||||||
# tweaks to get Synapse ready for testing.
|
# tweaks to get Synapse ready for testing.
|
||||||
# To do this, we copy the old template out of the way and then include it
|
# To do this, we copy the old template out of the way and then include it
|
||||||
# with Jinja2.
|
# with Jinja2.
|
||||||
RUN mv /conf/shared.yaml.j2 /conf/shared-orig.yaml.j2
|
RUN mv /conf/shared.yaml.j2 /conf/shared-orig.yaml.j2
|
||||||
COPY conf/workers-shared-extra.yaml.j2 /conf/shared.yaml.j2
|
COPY conf/workers-shared-extra.yaml.j2 /conf/shared.yaml.j2
|
||||||
|
|
||||||
WORKDIR /data
|
WORKDIR /data
|
||||||
|
|
||||||
COPY conf/postgres.supervisord.conf /etc/supervisor/conf.d/postgres.conf
|
COPY conf/postgres.supervisord.conf /etc/supervisor/conf.d/postgres.conf
|
||||||
|
|
||||||
# Copy the entrypoint
|
# Copy the entrypoint
|
||||||
COPY conf/start_for_complement.sh /
|
COPY conf/start_for_complement.sh /
|
||||||
|
|
||||||
# Expose nginx's listener ports
|
# Expose nginx's listener ports
|
||||||
EXPOSE 8008 8448
|
EXPOSE 8008 8448
|
||||||
|
|
||||||
ENTRYPOINT ["/start_for_complement.sh"]
|
ENTRYPOINT ["/start_for_complement.sh"]
|
||||||
|
|
||||||
# Update the healthcheck to have a shorter check interval
|
# Update the healthcheck to have a shorter check interval
|
||||||
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
||||||
CMD /bin/sh /healthcheck.sh
|
CMD /bin/sh /healthcheck.sh
|
||||||
|
|||||||
@@ -30,14 +30,3 @@ Consult `scripts-dev/complement.sh` in the repository root for a real example.
|
|||||||
|
|
||||||
[complement]: https://github.com/matrix-org/complement
|
[complement]: https://github.com/matrix-org/complement
|
||||||
[complementEnv]: https://github.com/matrix-org/complement/pull/382
|
[complementEnv]: https://github.com/matrix-org/complement/pull/382
|
||||||
|
|
||||||
## How to modify homeserver.yaml for Complement tests
|
|
||||||
|
|
||||||
It's common for MSCs to be gated behind a feature flag like this:
|
|
||||||
```yaml
|
|
||||||
experimental_features:
|
|
||||||
faster_joins: true
|
|
||||||
```
|
|
||||||
To modify this for the Complement image, modify `./conf/workers-shared-extra.yaml.j2`. Despite the name,
|
|
||||||
this will affect non-worker mode as well. Remember to _rebuild_ the image (so don't use `-e` if using
|
|
||||||
`complement.sh`).
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
[program:postgres]
|
[program:postgres]
|
||||||
command=/usr/local/bin/prefix-log gosu postgres postgres
|
command=/usr/local/bin/prefix-log gosu postgres postgres
|
||||||
|
|
||||||
# Only start if START_POSTGRES=true
|
# Only start if START_POSTGRES=1
|
||||||
autostart=%(ENV_START_POSTGRES)s
|
autostart=%(ENV_START_POSTGRES)s
|
||||||
|
|
||||||
# Lower priority number = starts first
|
# Lower priority number = starts first
|
||||||
|
|||||||
@@ -32,9 +32,8 @@ case "$SYNAPSE_COMPLEMENT_DATABASE" in
|
|||||||
;;
|
;;
|
||||||
|
|
||||||
sqlite|"")
|
sqlite|"")
|
||||||
# Set START_POSTGRES to false unless it has already been set
|
# Configure supervisord not to start Postgres, as we don't need it
|
||||||
# (i.e. by another container image inheriting our own).
|
export START_POSTGRES=false
|
||||||
export START_POSTGRES=${START_POSTGRES:-false}
|
|
||||||
;;
|
;;
|
||||||
|
|
||||||
*)
|
*)
|
||||||
@@ -52,7 +51,8 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
|||||||
# -z True if the length of string is zero.
|
# -z True if the length of string is zero.
|
||||||
if [[ -z "$SYNAPSE_WORKER_TYPES" ]]; then
|
if [[ -z "$SYNAPSE_WORKER_TYPES" ]]; then
|
||||||
export SYNAPSE_WORKER_TYPES="\
|
export SYNAPSE_WORKER_TYPES="\
|
||||||
event_persister:2, \
|
event_persister, \
|
||||||
|
event_persister, \
|
||||||
background_worker, \
|
background_worker, \
|
||||||
frontend_proxy, \
|
frontend_proxy, \
|
||||||
event_creator, \
|
event_creator, \
|
||||||
@@ -64,16 +64,10 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
|
|||||||
synchrotron, \
|
synchrotron, \
|
||||||
client_reader, \
|
client_reader, \
|
||||||
appservice, \
|
appservice, \
|
||||||
pusher, \
|
pusher"
|
||||||
stream_writers=account_data+presence+receipts+to_device+typing"
|
|
||||||
|
|
||||||
fi
|
fi
|
||||||
log "Workers requested: $SYNAPSE_WORKER_TYPES"
|
log "Workers requested: $SYNAPSE_WORKER_TYPES"
|
||||||
# adjust connection pool limits on worker mode as otherwise running lots of worker synapses
|
|
||||||
# can make docker unhappy (in GHA)
|
|
||||||
export POSTGRES_CP_MIN=1
|
|
||||||
export POSTGRES_CP_MAX=3
|
|
||||||
echo "using reduced connection pool limits for worker mode"
|
|
||||||
# Improve startup times by using a launcher based on fork()
|
# Improve startup times by using a launcher based on fork()
|
||||||
export SYNAPSE_USE_EXPERIMENTAL_FORKING_LAUNCHER=1
|
export SYNAPSE_USE_EXPERIMENTAL_FORKING_LAUNCHER=1
|
||||||
else
|
else
|
||||||
|
|||||||
@@ -92,6 +92,10 @@ allow_device_name_lookup_over_federation: true
|
|||||||
## Experimental Features ##
|
## Experimental Features ##
|
||||||
|
|
||||||
experimental_features:
|
experimental_features:
|
||||||
|
# Enable history backfilling support
|
||||||
|
msc2716_enabled: true
|
||||||
|
# client-side support for partial state in /send_join responses
|
||||||
|
faster_joins: true
|
||||||
# Enable support for polls
|
# Enable support for polls
|
||||||
msc3381_polls_enabled: true
|
msc3381_polls_enabled: true
|
||||||
# Enable deleting device-specific notification settings stored in account data
|
# Enable deleting device-specific notification settings stored in account data
|
||||||
@@ -100,12 +104,6 @@ experimental_features:
|
|||||||
msc3391_enabled: true
|
msc3391_enabled: true
|
||||||
# Filtering /messages by relation type.
|
# Filtering /messages by relation type.
|
||||||
msc3874_enabled: true
|
msc3874_enabled: true
|
||||||
# no UIA for x-signing upload for the first time
|
|
||||||
msc3967_enabled: true
|
|
||||||
# Expose a room summary for public rooms
|
|
||||||
msc3266_enabled: true
|
|
||||||
|
|
||||||
msc4115_membership_on_events: true
|
|
||||||
|
|
||||||
server_notices:
|
server_notices:
|
||||||
system_mxid_localpart: _server
|
system_mxid_localpart: _server
|
||||||
|
|||||||
@@ -35,11 +35,7 @@ server {
|
|||||||
|
|
||||||
# Send all other traffic to the main process
|
# Send all other traffic to the main process
|
||||||
location ~* ^(\\/_matrix|\\/_synapse) {
|
location ~* ^(\\/_matrix|\\/_synapse) {
|
||||||
{% if using_unix_sockets %}
|
|
||||||
proxy_pass http://unix:/run/main_public.sock;
|
|
||||||
{% else %}
|
|
||||||
proxy_pass http://localhost:8080;
|
proxy_pass http://localhost:8080;
|
||||||
{% endif %}
|
|
||||||
proxy_set_header X-Forwarded-For $remote_addr;
|
proxy_set_header X-Forwarded-For $remote_addr;
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
|
|||||||
@@ -6,9 +6,6 @@
|
|||||||
{% if enable_redis %}
|
{% if enable_redis %}
|
||||||
redis:
|
redis:
|
||||||
enabled: true
|
enabled: true
|
||||||
{% if using_unix_sockets %}
|
|
||||||
path: /tmp/redis.sock
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if appservice_registrations is not none %}
|
{% if appservice_registrations is not none %}
|
||||||
|
|||||||
@@ -19,11 +19,7 @@ username=www-data
|
|||||||
autorestart=true
|
autorestart=true
|
||||||
|
|
||||||
[program:redis]
|
[program:redis]
|
||||||
{% if using_unix_sockets %}
|
|
||||||
command=/usr/local/bin/prefix-log /usr/local/bin/redis-server --unixsocket /tmp/redis.sock
|
|
||||||
{% else %}
|
|
||||||
command=/usr/local/bin/prefix-log /usr/local/bin/redis-server
|
command=/usr/local/bin/prefix-log /usr/local/bin/redis-server
|
||||||
{% endif %}
|
|
||||||
priority=1
|
priority=1
|
||||||
stdout_logfile=/dev/stdout
|
stdout_logfile=/dev/stdout
|
||||||
stdout_logfile_maxbytes=0
|
stdout_logfile_maxbytes=0
|
||||||
|
|||||||
@@ -6,13 +6,13 @@
|
|||||||
worker_app: "{{ app }}"
|
worker_app: "{{ app }}"
|
||||||
worker_name: "{{ name }}"
|
worker_name: "{{ name }}"
|
||||||
|
|
||||||
|
# The replication listener on the main synapse process.
|
||||||
|
worker_replication_host: 127.0.0.1
|
||||||
|
worker_replication_http_port: 9093
|
||||||
|
|
||||||
worker_listeners:
|
worker_listeners:
|
||||||
- type: http
|
- type: http
|
||||||
{% if using_unix_sockets %}
|
|
||||||
path: "/run/worker.{{ port }}"
|
|
||||||
{% else %}
|
|
||||||
port: {{ port }}
|
port: {{ port }}
|
||||||
{% endif %}
|
|
||||||
{% if listener_resources %}
|
{% if listener_resources %}
|
||||||
resources:
|
resources:
|
||||||
- names:
|
- names:
|
||||||
|
|||||||
@@ -36,17 +36,12 @@ listeners:
|
|||||||
|
|
||||||
# Allow configuring in case we want to reverse proxy 8008
|
# Allow configuring in case we want to reverse proxy 8008
|
||||||
# using another process in the same container
|
# using another process in the same container
|
||||||
{% if SYNAPSE_USE_UNIX_SOCKET %}
|
|
||||||
# Unix sockets don't care about TLS or IP addresses or ports
|
|
||||||
- path: '/run/main_public.sock'
|
|
||||||
type: http
|
|
||||||
{% else %}
|
|
||||||
- port: {{ SYNAPSE_HTTP_PORT or 8008 }}
|
- port: {{ SYNAPSE_HTTP_PORT or 8008 }}
|
||||||
tls: false
|
tls: false
|
||||||
bind_addresses: ['::']
|
bind_addresses: ['::']
|
||||||
type: http
|
type: http
|
||||||
x_forwarded: false
|
x_forwarded: false
|
||||||
{% endif %}
|
|
||||||
resources:
|
resources:
|
||||||
- names: [client]
|
- names: [client]
|
||||||
compress: true
|
compress: true
|
||||||
@@ -62,13 +57,10 @@ database:
|
|||||||
user: "{{ POSTGRES_USER or "synapse" }}"
|
user: "{{ POSTGRES_USER or "synapse" }}"
|
||||||
password: "{{ POSTGRES_PASSWORD }}"
|
password: "{{ POSTGRES_PASSWORD }}"
|
||||||
database: "{{ POSTGRES_DB or "synapse" }}"
|
database: "{{ POSTGRES_DB or "synapse" }}"
|
||||||
{% if not SYNAPSE_USE_UNIX_SOCKET %}
|
|
||||||
{# Synapse will use a default unix socket for Postgres when host/port is not specified (behavior from `psycopg2`). #}
|
|
||||||
host: "{{ POSTGRES_HOST or "db" }}"
|
host: "{{ POSTGRES_HOST or "db" }}"
|
||||||
port: "{{ POSTGRES_PORT or "5432" }}"
|
port: "{{ POSTGRES_PORT or "5432" }}"
|
||||||
{% endif %}
|
cp_min: 5
|
||||||
cp_min: {{ POSTGRES_CP_MIN or 5 }}
|
cp_max: 10
|
||||||
cp_max: {{ POSTGRES_CP_MAX or 10 }}
|
|
||||||
{% else %}
|
{% else %}
|
||||||
database:
|
database:
|
||||||
name: "sqlite3"
|
name: "sqlite3"
|
||||||
|
|||||||
@@ -49,35 +49,17 @@ handlers:
|
|||||||
class: logging.StreamHandler
|
class: logging.StreamHandler
|
||||||
formatter: precise
|
formatter: precise
|
||||||
|
|
||||||
loggers:
|
{% if not SYNAPSE_LOG_SENSITIVE %}
|
||||||
# This is just here so we can leave `loggers` in the config regardless of whether
|
{#
|
||||||
# we configure other loggers below (avoid empty yaml dict error).
|
|
||||||
_placeholder:
|
|
||||||
level: "INFO"
|
|
||||||
|
|
||||||
{% if not SYNAPSE_LOG_SENSITIVE %}
|
|
||||||
{#
|
|
||||||
If SYNAPSE_LOG_SENSITIVE is unset, then override synapse.storage.SQL to INFO
|
If SYNAPSE_LOG_SENSITIVE is unset, then override synapse.storage.SQL to INFO
|
||||||
so that DEBUG entries (containing sensitive information) are not emitted.
|
so that DEBUG entries (containing sensitive information) are not emitted.
|
||||||
#}
|
#}
|
||||||
|
loggers:
|
||||||
synapse.storage.SQL:
|
synapse.storage.SQL:
|
||||||
# beware: increasing this to DEBUG will make synapse log sensitive
|
# beware: increasing this to DEBUG will make synapse log sensitive
|
||||||
# information such as access tokens.
|
# information such as access tokens.
|
||||||
level: INFO
|
level: INFO
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if SYNAPSE_LOG_TESTING %}
|
|
||||||
{#
|
|
||||||
If Synapse is under test, log a few more useful things for a developer
|
|
||||||
attempting to debug something particularly tricky.
|
|
||||||
|
|
||||||
With `synapse.visibility.filtered_event_debug`, it logs when events are (maybe
|
|
||||||
unexpectedly) filtered out of responses in tests. It's just nice to be able to
|
|
||||||
look at the CI log and figure out why an event isn't being returned.
|
|
||||||
#}
|
|
||||||
synapse.visibility.filtered_event_debug:
|
|
||||||
level: DEBUG
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
root:
|
root:
|
||||||
level: {{ SYNAPSE_LOG_LEVEL or "INFO" }}
|
level: {{ SYNAPSE_LOG_LEVEL or "INFO" }}
|
||||||
|
|||||||
@@ -1,24 +1,17 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
#
|
|
||||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
||||||
#
|
|
||||||
# Copyright 2021 The Matrix.org Foundation C.I.C.
|
# Copyright 2021 The Matrix.org Foundation C.I.C.
|
||||||
# Copyright (C) 2023 New Vector, Ltd
|
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# it under the terms of the GNU Affero General Public License as
|
# you may not use this file except in compliance with the License.
|
||||||
# published by the Free Software Foundation, either version 3 of the
|
# You may obtain a copy of the License at
|
||||||
# License, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# See the GNU Affero General Public License for more details:
|
|
||||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
||||||
#
|
|
||||||
# Originally licensed under the Apache License, Version 2.0:
|
|
||||||
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
||||||
#
|
|
||||||
# [This file includes modifications made by New Vector Limited]
|
|
||||||
#
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
#
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
# This script reads environment variables and generates a shared Synapse worker,
|
# This script reads environment variables and generates a shared Synapse worker,
|
||||||
# nginx and supervisord configs depending on the workers requested.
|
# nginx and supervisord configs depending on the workers requested.
|
||||||
@@ -26,15 +19,8 @@
|
|||||||
# The environment variables it reads are:
|
# The environment variables it reads are:
|
||||||
# * SYNAPSE_SERVER_NAME: The desired server_name of the homeserver.
|
# * SYNAPSE_SERVER_NAME: The desired server_name of the homeserver.
|
||||||
# * SYNAPSE_REPORT_STATS: Whether to report stats.
|
# * SYNAPSE_REPORT_STATS: Whether to report stats.
|
||||||
# * SYNAPSE_WORKER_TYPES: A comma separated list of worker names as specified in WORKERS_CONFIG
|
# * SYNAPSE_WORKER_TYPES: A comma separated list of worker names as specified in WORKER_CONFIG
|
||||||
# below. Leave empty for no workers. Add a ':' and a number at the end to
|
# below. Leave empty for no workers.
|
||||||
# multiply that worker. Append multiple worker types with '+' to merge the
|
|
||||||
# worker types into a single worker. Add a name and a '=' to the front of a
|
|
||||||
# worker type to give this instance a name in logs and nginx.
|
|
||||||
# Examples:
|
|
||||||
# SYNAPSE_WORKER_TYPES='event_persister, federation_sender, client_reader'
|
|
||||||
# SYNAPSE_WORKER_TYPES='event_persister:2, federation_sender:2, client_reader'
|
|
||||||
# SYNAPSE_WORKER_TYPES='stream_writers=account_data+presence+typing'
|
|
||||||
# * SYNAPSE_AS_REGISTRATION_DIR: If specified, a directory in which .yaml and .yml files
|
# * SYNAPSE_AS_REGISTRATION_DIR: If specified, a directory in which .yaml and .yml files
|
||||||
# will be treated as Application Service registration files.
|
# will be treated as Application Service registration files.
|
||||||
# * SYNAPSE_TLS_CERT: Path to a TLS certificate in PEM format.
|
# * SYNAPSE_TLS_CERT: Path to a TLS certificate in PEM format.
|
||||||
@@ -47,8 +33,6 @@
|
|||||||
# log level. INFO is the default.
|
# log level. INFO is the default.
|
||||||
# * SYNAPSE_LOG_SENSITIVE: If unset, SQL and SQL values won't be logged,
|
# * SYNAPSE_LOG_SENSITIVE: If unset, SQL and SQL values won't be logged,
|
||||||
# regardless of the SYNAPSE_LOG_LEVEL setting.
|
# regardless of the SYNAPSE_LOG_LEVEL setting.
|
||||||
# * SYNAPSE_LOG_TESTING: if set, Synapse will log additional information useful
|
|
||||||
# for testing.
|
|
||||||
#
|
#
|
||||||
# NOTE: According to Complement's ENTRYPOINT expectations for a homeserver image (as defined
|
# NOTE: According to Complement's ENTRYPOINT expectations for a homeserver image (as defined
|
||||||
# in the project's README), this script may be run multiple times, and functionality should
|
# in the project's README), this script may be run multiple times, and functionality should
|
||||||
@@ -56,39 +40,15 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
import re
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
from argparse import ArgumentParser
|
|
||||||
from collections import defaultdict
|
|
||||||
from itertools import chain
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import (
|
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional, Set
|
||||||
Any,
|
|
||||||
Dict,
|
|
||||||
List,
|
|
||||||
Mapping,
|
|
||||||
MutableMapping,
|
|
||||||
NoReturn,
|
|
||||||
Optional,
|
|
||||||
Set,
|
|
||||||
SupportsIndex,
|
|
||||||
)
|
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
from jinja2 import Environment, FileSystemLoader
|
from jinja2 import Environment, FileSystemLoader
|
||||||
|
|
||||||
MAIN_PROCESS_HTTP_LISTENER_PORT = 8080
|
MAIN_PROCESS_HTTP_LISTENER_PORT = 8080
|
||||||
MAIN_PROCESS_INSTANCE_NAME = "main"
|
|
||||||
MAIN_PROCESS_LOCALHOST_ADDRESS = "127.0.0.1"
|
|
||||||
MAIN_PROCESS_REPLICATION_PORT = 9093
|
|
||||||
# Obviously, these would only be used with the UNIX socket option
|
|
||||||
MAIN_PROCESS_UNIX_SOCKET_PUBLIC_PATH = "/run/main_public.sock"
|
|
||||||
MAIN_PROCESS_UNIX_SOCKET_PRIVATE_PATH = "/run/main_private.sock"
|
|
||||||
|
|
||||||
# A simple name used as a placeholder in the WORKERS_CONFIG below. This will be replaced
|
|
||||||
# during processing with the name of the worker.
|
|
||||||
WORKER_PLACEHOLDER_NAME = "placeholder_name"
|
|
||||||
|
|
||||||
# Workers with exposed endpoints needs either "client", "federation", or "media" listener_resources
|
# Workers with exposed endpoints needs either "client", "federation", or "media" listener_resources
|
||||||
# Watching /_matrix/client needs a "client" listener
|
# Watching /_matrix/client needs a "client" listener
|
||||||
@@ -110,13 +70,11 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
|||||||
"endpoint_patterns": [
|
"endpoint_patterns": [
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/user_directory/search$"
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/user_directory/search$"
|
||||||
],
|
],
|
||||||
"shared_extra_conf": {
|
"shared_extra_conf": {"update_user_directory_from_worker": "user_dir1"},
|
||||||
"update_user_directory_from_worker": WORKER_PLACEHOLDER_NAME
|
|
||||||
},
|
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
},
|
},
|
||||||
"media_repository": {
|
"media_repository": {
|
||||||
"app": "synapse.app.generic_worker",
|
"app": "synapse.app.media_repository",
|
||||||
"listener_resources": ["media"],
|
"listener_resources": ["media"],
|
||||||
"endpoint_patterns": [
|
"endpoint_patterns": [
|
||||||
"^/_matrix/media/",
|
"^/_matrix/media/",
|
||||||
@@ -129,7 +87,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
|||||||
# The first configured media worker will run the media background jobs
|
# The first configured media worker will run the media background jobs
|
||||||
"shared_extra_conf": {
|
"shared_extra_conf": {
|
||||||
"enable_media_repo": False,
|
"enable_media_repo": False,
|
||||||
"media_instance_running_background_jobs": WORKER_PLACEHOLDER_NAME,
|
"media_instance_running_background_jobs": "media_repository1",
|
||||||
},
|
},
|
||||||
"worker_extra_conf": "enable_media_repo: true",
|
"worker_extra_conf": "enable_media_repo: true",
|
||||||
},
|
},
|
||||||
@@ -137,9 +95,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
|||||||
"app": "synapse.app.generic_worker",
|
"app": "synapse.app.generic_worker",
|
||||||
"listener_resources": [],
|
"listener_resources": [],
|
||||||
"endpoint_patterns": [],
|
"endpoint_patterns": [],
|
||||||
"shared_extra_conf": {
|
"shared_extra_conf": {"notify_appservices_from_worker": "appservice1"},
|
||||||
"notify_appservices_from_worker": WORKER_PLACEHOLDER_NAME
|
|
||||||
},
|
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
},
|
},
|
||||||
"federation_sender": {
|
"federation_sender": {
|
||||||
@@ -179,7 +135,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
|||||||
"^/_matrix/client/versions$",
|
"^/_matrix/client/versions$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$",
|
||||||
"^/_matrix/client/(r0|v3|unstable)/register$",
|
"^/_matrix/client/(r0|v3|unstable)/register$",
|
||||||
"^/_matrix/client/(r0|v3|unstable)/register/available$",
|
|
||||||
"^/_matrix/client/(r0|v3|unstable)/auth/.*/fallback/web$",
|
"^/_matrix/client/(r0|v3|unstable)/auth/.*/fallback/web$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/messages$",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/messages$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/event",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/event",
|
||||||
@@ -188,10 +143,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
|||||||
"^/_matrix/client/v1/rooms/.*/timestamp_to_event$",
|
"^/_matrix/client/v1/rooms/.*/timestamp_to_event$",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/search",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/search",
|
||||||
"^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$)",
|
"^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$)",
|
||||||
"^/_matrix/client/(r0|v3|unstable)/password_policy$",
|
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$",
|
|
||||||
"^/_matrix/client/(r0|v3|unstable)/capabilities$",
|
|
||||||
"^/_matrix/client/(r0|v3|unstable)/notifications$",
|
|
||||||
],
|
],
|
||||||
"shared_extra_conf": {},
|
"shared_extra_conf": {},
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
@@ -241,9 +192,9 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
|||||||
"app": "synapse.app.generic_worker",
|
"app": "synapse.app.generic_worker",
|
||||||
"listener_resources": [],
|
"listener_resources": [],
|
||||||
"endpoint_patterns": [],
|
"endpoint_patterns": [],
|
||||||
# This worker cannot be sharded. Therefore, there should only ever be one
|
# This worker cannot be sharded. Therefore there should only ever be one background
|
||||||
# background worker. This is enforced for the safety of your database.
|
# worker, and it should be named background_worker1
|
||||||
"shared_extra_conf": {"run_background_tasks_on": WORKER_PLACEHOLDER_NAME},
|
"shared_extra_conf": {"run_background_tasks_on": "background_worker1"},
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
},
|
},
|
||||||
"event_creator": {
|
"event_creator": {
|
||||||
@@ -256,6 +207,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
|||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/join/",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/join/",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/knock/",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/knock/",
|
||||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/profile/",
|
"^/_matrix/client/(api/v1|r0|v3|unstable)/profile/",
|
||||||
|
"^/_matrix/client/(v1|unstable/org.matrix.msc2716)/rooms/.*/batch_send",
|
||||||
],
|
],
|
||||||
"shared_extra_conf": {},
|
"shared_extra_conf": {},
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
@@ -310,13 +262,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
|||||||
"shared_extra_conf": {},
|
"shared_extra_conf": {},
|
||||||
"worker_extra_conf": "",
|
"worker_extra_conf": "",
|
||||||
},
|
},
|
||||||
"push_rules": {
|
|
||||||
"app": "synapse.app.generic_worker",
|
|
||||||
"listener_resources": ["client", "replication"],
|
|
||||||
"endpoint_patterns": ["^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/"],
|
|
||||||
"shared_extra_conf": {},
|
|
||||||
"worker_extra_conf": "",
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Templates for sections that may be inserted multiple times in config files
|
# Templates for sections that may be inserted multiple times in config files
|
||||||
@@ -330,7 +275,7 @@ NGINX_LOCATION_CONFIG_BLOCK = """
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
NGINX_UPSTREAM_CONFIG_BLOCK = """
|
NGINX_UPSTREAM_CONFIG_BLOCK = """
|
||||||
upstream {upstream_worker_base_name} {{
|
upstream {upstream_worker_type} {{
|
||||||
{body}
|
{body}
|
||||||
}}
|
}}
|
||||||
"""
|
"""
|
||||||
@@ -381,7 +326,7 @@ def convert(src: str, dst: str, **template_vars: object) -> None:
|
|||||||
|
|
||||||
def add_worker_roles_to_shared_config(
|
def add_worker_roles_to_shared_config(
|
||||||
shared_config: dict,
|
shared_config: dict,
|
||||||
worker_types_set: Set[str],
|
worker_type: str,
|
||||||
worker_name: str,
|
worker_name: str,
|
||||||
worker_port: int,
|
worker_port: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
@@ -389,37 +334,22 @@ def add_worker_roles_to_shared_config(
|
|||||||
append appropriate worker information to it for the current worker_type instance.
|
append appropriate worker information to it for the current worker_type instance.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
shared_config: The config dict that all worker instances share (after being
|
shared_config: The config dict that all worker instances share (after being converted to YAML)
|
||||||
converted to YAML)
|
worker_type: The type of worker (one of those defined in WORKERS_CONFIG).
|
||||||
worker_types_set: The type of worker (one of those defined in WORKERS_CONFIG).
|
|
||||||
This list can be a single worker type or multiple.
|
|
||||||
worker_name: The name of the worker instance.
|
worker_name: The name of the worker instance.
|
||||||
worker_port: The HTTP replication port that the worker instance is listening on.
|
worker_port: The HTTP replication port that the worker instance is listening on.
|
||||||
"""
|
"""
|
||||||
# The instance_map config field marks the workers that write to various replication
|
# The instance_map config field marks the workers that write to various replication streams
|
||||||
# streams
|
|
||||||
instance_map = shared_config.setdefault("instance_map", {})
|
instance_map = shared_config.setdefault("instance_map", {})
|
||||||
|
|
||||||
# This is a list of the stream_writers that there can be only one of. Events can be
|
# Worker-type specific sharding config
|
||||||
# sharded, and therefore doesn't belong here.
|
if worker_type == "pusher":
|
||||||
singular_stream_writers = [
|
|
||||||
"account_data",
|
|
||||||
"presence",
|
|
||||||
"receipts",
|
|
||||||
"to_device",
|
|
||||||
"typing",
|
|
||||||
"push_rules",
|
|
||||||
]
|
|
||||||
|
|
||||||
# Worker-type specific sharding config. Now a single worker can fulfill multiple
|
|
||||||
# roles, check each.
|
|
||||||
if "pusher" in worker_types_set:
|
|
||||||
shared_config.setdefault("pusher_instances", []).append(worker_name)
|
shared_config.setdefault("pusher_instances", []).append(worker_name)
|
||||||
|
|
||||||
if "federation_sender" in worker_types_set:
|
elif worker_type == "federation_sender":
|
||||||
shared_config.setdefault("federation_sender_instances", []).append(worker_name)
|
shared_config.setdefault("federation_sender_instances", []).append(worker_name)
|
||||||
|
|
||||||
if "event_persister" in worker_types_set:
|
elif worker_type == "event_persister":
|
||||||
# Event persisters write to the events stream, so we need to update
|
# Event persisters write to the events stream, so we need to update
|
||||||
# the list of event stream writers
|
# the list of event stream writers
|
||||||
shared_config.setdefault("stream_writers", {}).setdefault("events", []).append(
|
shared_config.setdefault("stream_writers", {}).setdefault("events", []).append(
|
||||||
@@ -427,170 +357,26 @@ def add_worker_roles_to_shared_config(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Map of stream writer instance names to host/ports combos
|
# Map of stream writer instance names to host/ports combos
|
||||||
if os.environ.get("SYNAPSE_USE_UNIX_SOCKET", False):
|
|
||||||
instance_map[worker_name] = {
|
|
||||||
"path": f"/run/worker.{worker_port}",
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
instance_map[worker_name] = {
|
instance_map[worker_name] = {
|
||||||
"host": "localhost",
|
"host": "localhost",
|
||||||
"port": worker_port,
|
"port": worker_port,
|
||||||
}
|
}
|
||||||
# Update the list of stream writers. It's convenient that the name of the worker
|
|
||||||
# type is the same as the stream to write. Iterate over the whole list in case there
|
elif worker_type in ["account_data", "presence", "receipts", "to_device", "typing"]:
|
||||||
# is more than one.
|
# Update the list of stream writers
|
||||||
for worker in worker_types_set:
|
# It's convenient that the name of the worker type is the same as the stream to write
|
||||||
if worker in singular_stream_writers:
|
|
||||||
shared_config.setdefault("stream_writers", {}).setdefault(
|
shared_config.setdefault("stream_writers", {}).setdefault(
|
||||||
worker, []
|
worker_type, []
|
||||||
).append(worker_name)
|
).append(worker_name)
|
||||||
|
|
||||||
# Map of stream writer instance names to host/ports combos
|
# Map of stream writer instance names to host/ports combos
|
||||||
# For now, all stream writers need http replication ports
|
# For now, all stream writers need http replication ports
|
||||||
if os.environ.get("SYNAPSE_USE_UNIX_SOCKET", False):
|
|
||||||
instance_map[worker_name] = {
|
|
||||||
"path": f"/run/worker.{worker_port}",
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
instance_map[worker_name] = {
|
instance_map[worker_name] = {
|
||||||
"host": "localhost",
|
"host": "localhost",
|
||||||
"port": worker_port,
|
"port": worker_port,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def merge_worker_template_configs(
|
|
||||||
existing_dict: Optional[Dict[str, Any]],
|
|
||||||
to_be_merged_dict: Dict[str, Any],
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""When given an existing dict of worker template configuration consisting with both
|
|
||||||
dicts and lists, merge new template data from WORKERS_CONFIG(or create) and
|
|
||||||
return new dict.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
existing_dict: Either an existing worker template or a fresh blank one.
|
|
||||||
to_be_merged_dict: The template from WORKERS_CONFIGS to be merged into
|
|
||||||
existing_dict.
|
|
||||||
Returns: The newly merged together dict values.
|
|
||||||
"""
|
|
||||||
new_dict: Dict[str, Any] = {}
|
|
||||||
if not existing_dict:
|
|
||||||
# It doesn't exist yet, just use the new dict(but take a copy not a reference)
|
|
||||||
new_dict = to_be_merged_dict.copy()
|
|
||||||
else:
|
|
||||||
for i in to_be_merged_dict.keys():
|
|
||||||
if (i == "endpoint_patterns") or (i == "listener_resources"):
|
|
||||||
# merge the two lists, remove duplicates
|
|
||||||
new_dict[i] = list(set(existing_dict[i] + to_be_merged_dict[i]))
|
|
||||||
elif i == "shared_extra_conf":
|
|
||||||
# merge dictionary's, the worker name will be replaced later
|
|
||||||
new_dict[i] = {**existing_dict[i], **to_be_merged_dict[i]}
|
|
||||||
elif i == "worker_extra_conf":
|
|
||||||
# There is only one worker type that has a 'worker_extra_conf' and it is
|
|
||||||
# the media_repo. Since duplicate worker types on the same worker don't
|
|
||||||
# work, this is fine.
|
|
||||||
new_dict[i] = existing_dict[i] + to_be_merged_dict[i]
|
|
||||||
else:
|
|
||||||
# Everything else should be identical, like "app", which only works
|
|
||||||
# because all apps are now generic_workers.
|
|
||||||
new_dict[i] = to_be_merged_dict[i]
|
|
||||||
return new_dict
|
|
||||||
|
|
||||||
|
|
||||||
def insert_worker_name_for_worker_config(
|
|
||||||
existing_dict: Dict[str, Any], worker_name: str
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""Insert a given worker name into the worker's configuration dict.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
existing_dict: The worker_config dict that is imported into shared_config.
|
|
||||||
worker_name: The name of the worker to insert.
|
|
||||||
Returns: Copy of the dict with newly inserted worker name
|
|
||||||
"""
|
|
||||||
dict_to_edit = existing_dict.copy()
|
|
||||||
for k, v in dict_to_edit["shared_extra_conf"].items():
|
|
||||||
# Only proceed if it's the placeholder name string
|
|
||||||
if v == WORKER_PLACEHOLDER_NAME:
|
|
||||||
dict_to_edit["shared_extra_conf"][k] = worker_name
|
|
||||||
return dict_to_edit
|
|
||||||
|
|
||||||
|
|
||||||
def apply_requested_multiplier_for_worker(worker_types: List[str]) -> List[str]:
|
|
||||||
"""
|
|
||||||
Apply multiplier(if found) by returning a new expanded list with some basic error
|
|
||||||
checking.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
worker_types: The unprocessed List of requested workers
|
|
||||||
Returns:
|
|
||||||
A new list with all requested workers expanded.
|
|
||||||
"""
|
|
||||||
# Checking performed:
|
|
||||||
# 1. if worker:2 or more is declared, it will create additional workers up to number
|
|
||||||
# 2. if worker:1, it will create a single copy of this worker as if no number was
|
|
||||||
# given
|
|
||||||
# 3. if worker:0 is declared, this worker will be ignored. This is to allow for
|
|
||||||
# scripting and automated expansion and is intended behaviour.
|
|
||||||
# 4. if worker:NaN or is a negative number, it will error and log it.
|
|
||||||
new_worker_types = []
|
|
||||||
for worker_type in worker_types:
|
|
||||||
if ":" in worker_type:
|
|
||||||
worker_type_components = split_and_strip_string(worker_type, ":", 1)
|
|
||||||
worker_count = 0
|
|
||||||
# Should only be 2 components, a type of worker(s) and an integer as a
|
|
||||||
# string. Cast the number as an int then it can be used as a counter.
|
|
||||||
try:
|
|
||||||
worker_count = int(worker_type_components[1])
|
|
||||||
except ValueError:
|
|
||||||
error(
|
|
||||||
f"Bad number in worker count for '{worker_type}': "
|
|
||||||
f"'{worker_type_components[1]}' is not an integer"
|
|
||||||
)
|
|
||||||
|
|
||||||
# As long as there are more than 0, we add one to the list to make below.
|
|
||||||
for _ in range(worker_count):
|
|
||||||
new_worker_types.append(worker_type_components[0])
|
|
||||||
|
|
||||||
else:
|
|
||||||
# If it's not a real worker_type, it will error out later.
|
|
||||||
new_worker_types.append(worker_type)
|
|
||||||
return new_worker_types
|
|
||||||
|
|
||||||
|
|
||||||
def is_sharding_allowed_for_worker_type(worker_type: str) -> bool:
|
|
||||||
"""Helper to check to make sure worker types that cannot have multiples do not.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
worker_type: The type of worker to check against.
|
|
||||||
Returns: True if allowed, False if not
|
|
||||||
"""
|
|
||||||
return worker_type not in [
|
|
||||||
"background_worker",
|
|
||||||
"account_data",
|
|
||||||
"presence",
|
|
||||||
"receipts",
|
|
||||||
"typing",
|
|
||||||
"to_device",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def split_and_strip_string(
|
|
||||||
given_string: str, split_char: str, max_split: SupportsIndex = -1
|
|
||||||
) -> List[str]:
|
|
||||||
"""
|
|
||||||
Helper to split a string on split_char and strip whitespace from each end of each
|
|
||||||
element.
|
|
||||||
Args:
|
|
||||||
given_string: The string to split
|
|
||||||
split_char: The character to split the string on
|
|
||||||
max_split: kwarg for split() to limit how many times the split() happens
|
|
||||||
Returns:
|
|
||||||
A List of strings
|
|
||||||
"""
|
|
||||||
# Removes whitespace from ends of result strings before adding to list. Allow for
|
|
||||||
# overriding 'maxsplit' kwarg, default being -1 to signify no maximum.
|
|
||||||
return [x.strip() for x in given_string.split(split_char, maxsplit=max_split)]
|
|
||||||
|
|
||||||
|
|
||||||
def generate_base_homeserver_config() -> None:
|
def generate_base_homeserver_config() -> None:
|
||||||
"""Starts Synapse and generates a basic homeserver config, which will later be
|
"""Starts Synapse and generates a basic homeserver config, which will later be
|
||||||
modified for worker support.
|
modified for worker support.
|
||||||
@@ -603,169 +389,33 @@ def generate_base_homeserver_config() -> None:
|
|||||||
subprocess.run(["/usr/local/bin/python", "/start.py", "migrate_config"], check=True)
|
subprocess.run(["/usr/local/bin/python", "/start.py", "migrate_config"], check=True)
|
||||||
|
|
||||||
|
|
||||||
def parse_worker_types(
|
|
||||||
requested_worker_types: List[str],
|
|
||||||
) -> Dict[str, Set[str]]:
|
|
||||||
"""Read the desired list of requested workers and prepare the data for use in
|
|
||||||
generating worker config files while also checking for potential gotchas.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
requested_worker_types: The list formed from the split environment variable
|
|
||||||
containing the unprocessed requests for workers.
|
|
||||||
|
|
||||||
Returns: A dict of worker names to set of worker types. Format:
|
|
||||||
{'worker_name':
|
|
||||||
{'worker_type', 'worker_type2'}
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
# A counter of worker_base_name -> int. Used for determining the name for a given
|
|
||||||
# worker when generating its config file, as each worker's name is just
|
|
||||||
# worker_base_name followed by instance number
|
|
||||||
worker_base_name_counter: Dict[str, int] = defaultdict(int)
|
|
||||||
|
|
||||||
# Similar to above, but more finely grained. This is used to determine we don't have
|
|
||||||
# more than a single worker for cases where multiples would be bad(e.g. presence).
|
|
||||||
worker_type_shard_counter: Dict[str, int] = defaultdict(int)
|
|
||||||
|
|
||||||
# The final result of all this processing
|
|
||||||
dict_to_return: Dict[str, Set[str]] = {}
|
|
||||||
|
|
||||||
# Handle any multipliers requested for given workers.
|
|
||||||
multiple_processed_worker_types = apply_requested_multiplier_for_worker(
|
|
||||||
requested_worker_types
|
|
||||||
)
|
|
||||||
|
|
||||||
# Process each worker_type_string
|
|
||||||
# Examples of expected formats:
|
|
||||||
# - requested_name=type1+type2+type3
|
|
||||||
# - synchrotron
|
|
||||||
# - event_creator+event_persister
|
|
||||||
for worker_type_string in multiple_processed_worker_types:
|
|
||||||
# First, if a name is requested, use that — otherwise generate one.
|
|
||||||
worker_base_name: str = ""
|
|
||||||
if "=" in worker_type_string:
|
|
||||||
# Split on "=", remove extra whitespace from ends then make list
|
|
||||||
worker_type_split = split_and_strip_string(worker_type_string, "=")
|
|
||||||
if len(worker_type_split) > 2:
|
|
||||||
error(
|
|
||||||
"There should only be one '=' in the worker type string. "
|
|
||||||
f"Please fix: {worker_type_string}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Assign the name
|
|
||||||
worker_base_name = worker_type_split[0]
|
|
||||||
|
|
||||||
if not re.match(r"^[a-zA-Z0-9_+-]*[a-zA-Z_+-]$", worker_base_name):
|
|
||||||
# Apply a fairly narrow regex to the worker names. Some characters
|
|
||||||
# aren't safe for use in file paths or nginx configurations.
|
|
||||||
# Don't allow to end with a number because we'll add a number
|
|
||||||
# ourselves in a moment.
|
|
||||||
error(
|
|
||||||
"Invalid worker name; please choose a name consisting of "
|
|
||||||
"alphanumeric letters, _ + -, but not ending with a digit: "
|
|
||||||
f"{worker_base_name!r}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Continue processing the remainder of the worker_type string
|
|
||||||
# with the name override removed.
|
|
||||||
worker_type_string = worker_type_split[1]
|
|
||||||
|
|
||||||
# Split the worker_type_string on "+", remove whitespace from ends then make
|
|
||||||
# the list a set so it's deduplicated.
|
|
||||||
worker_types_set: Set[str] = set(
|
|
||||||
split_and_strip_string(worker_type_string, "+")
|
|
||||||
)
|
|
||||||
|
|
||||||
if not worker_base_name:
|
|
||||||
# No base name specified: generate one deterministically from set of
|
|
||||||
# types
|
|
||||||
worker_base_name = "+".join(sorted(worker_types_set))
|
|
||||||
|
|
||||||
# At this point, we have:
|
|
||||||
# worker_base_name which is the name for the worker, without counter.
|
|
||||||
# worker_types_set which is the set of worker types for this worker.
|
|
||||||
|
|
||||||
# Validate worker_type and make sure we don't allow sharding for a worker type
|
|
||||||
# that doesn't support it. Will error and stop if it is a problem,
|
|
||||||
# e.g. 'background_worker'.
|
|
||||||
for worker_type in worker_types_set:
|
|
||||||
# Verify this is a real defined worker type. If it's not, stop everything so
|
|
||||||
# it can be fixed.
|
|
||||||
if worker_type not in WORKERS_CONFIG:
|
|
||||||
error(
|
|
||||||
f"{worker_type} is an unknown worker type! Was found in "
|
|
||||||
f"'{worker_type_string}'. Please fix!"
|
|
||||||
)
|
|
||||||
|
|
||||||
if worker_type in worker_type_shard_counter:
|
|
||||||
if not is_sharding_allowed_for_worker_type(worker_type):
|
|
||||||
error(
|
|
||||||
f"There can be only a single worker with {worker_type} "
|
|
||||||
"type. Please recount and remove."
|
|
||||||
)
|
|
||||||
# Not in shard counter, must not have seen it yet, add it.
|
|
||||||
worker_type_shard_counter[worker_type] += 1
|
|
||||||
|
|
||||||
# Generate the number for the worker using incrementing counter
|
|
||||||
worker_base_name_counter[worker_base_name] += 1
|
|
||||||
worker_number = worker_base_name_counter[worker_base_name]
|
|
||||||
worker_name = f"{worker_base_name}{worker_number}"
|
|
||||||
|
|
||||||
if worker_number > 1:
|
|
||||||
# If this isn't the first worker, check that we don't have a confusing
|
|
||||||
# mixture of worker types with the same base name.
|
|
||||||
first_worker_with_base_name = dict_to_return[f"{worker_base_name}1"]
|
|
||||||
if first_worker_with_base_name != worker_types_set:
|
|
||||||
error(
|
|
||||||
f"Can not use worker_name: '{worker_name}' for worker_type(s): "
|
|
||||||
f"{worker_types_set!r}. It is already in use by "
|
|
||||||
f"worker_type(s): {first_worker_with_base_name!r}"
|
|
||||||
)
|
|
||||||
|
|
||||||
dict_to_return[worker_name] = worker_types_set
|
|
||||||
|
|
||||||
return dict_to_return
|
|
||||||
|
|
||||||
|
|
||||||
def generate_worker_files(
|
def generate_worker_files(
|
||||||
environ: Mapping[str, str],
|
environ: Mapping[str, str], config_path: str, data_dir: str
|
||||||
config_path: str,
|
|
||||||
data_dir: str,
|
|
||||||
requested_worker_types: Dict[str, Set[str]],
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Read the desired workers(if any) that is passed in and generate shared
|
"""Read the desired list of workers from environment variables and generate
|
||||||
homeserver, nginx and supervisord configs.
|
shared homeserver, nginx and supervisord configs.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
environ: os.environ instance.
|
environ: os.environ instance.
|
||||||
config_path: The location of the generated Synapse main worker config file.
|
config_path: The location of the generated Synapse main worker config file.
|
||||||
data_dir: The location of the synapse data directory. Where log and
|
data_dir: The location of the synapse data directory. Where log and
|
||||||
user-facing config files live.
|
user-facing config files live.
|
||||||
requested_worker_types: A Dict containing requested workers in the format of
|
|
||||||
{'worker_name1': {'worker_type', ...}}
|
|
||||||
"""
|
"""
|
||||||
# Note that yaml cares about indentation, so care should be taken to insert lines
|
# Note that yaml cares about indentation, so care should be taken to insert lines
|
||||||
# into files at the correct indentation below.
|
# into files at the correct indentation below.
|
||||||
|
|
||||||
# Convenience helper for if using unix sockets instead of host:port
|
# shared_config is the contents of a Synapse config file that will be shared amongst
|
||||||
using_unix_sockets = environ.get("SYNAPSE_USE_UNIX_SOCKET", False)
|
# the main Synapse process as well as all workers.
|
||||||
# First read the original config file and extract the listeners block. Then we'll
|
# It is intended mainly for disabling functionality when certain workers are spun up,
|
||||||
# add another listener for replication. Later we'll write out the result to the
|
# and adding a replication listener.
|
||||||
# shared config file.
|
|
||||||
listeners: List[Any]
|
# First read the original config file and extract the listeners block. Then we'll add
|
||||||
if using_unix_sockets:
|
# another listener for replication. Later we'll write out the result to the shared
|
||||||
|
# config file.
|
||||||
listeners = [
|
listeners = [
|
||||||
{
|
{
|
||||||
"path": MAIN_PROCESS_UNIX_SOCKET_PRIVATE_PATH,
|
"port": 9093,
|
||||||
"type": "http",
|
"bind_address": "127.0.0.1",
|
||||||
"resources": [{"names": ["replication"]}],
|
|
||||||
}
|
|
||||||
]
|
|
||||||
else:
|
|
||||||
listeners = [
|
|
||||||
{
|
|
||||||
"port": MAIN_PROCESS_REPLICATION_PORT,
|
|
||||||
"bind_address": MAIN_PROCESS_LOCALHOST_ADDRESS,
|
|
||||||
"type": "http",
|
"type": "http",
|
||||||
"resources": [{"names": ["replication"]}],
|
"resources": [{"names": ["replication"]}],
|
||||||
}
|
}
|
||||||
@@ -777,9 +427,9 @@ def generate_worker_files(
|
|||||||
listeners += original_listeners
|
listeners += original_listeners
|
||||||
|
|
||||||
# The shared homeserver config. The contents of which will be inserted into the
|
# The shared homeserver config. The contents of which will be inserted into the
|
||||||
# base shared worker jinja2 template. This config file will be passed to all
|
# base shared worker jinja2 template.
|
||||||
# workers, included Synapse's main process. It is intended mainly for disabling
|
#
|
||||||
# functionality when certain workers are spun up, and adding a replication listener.
|
# This config file will be passed to all workers, included Synapse's main process.
|
||||||
shared_config: Dict[str, Any] = {"listeners": listeners}
|
shared_config: Dict[str, Any] = {"listeners": listeners}
|
||||||
|
|
||||||
# List of dicts that describe workers.
|
# List of dicts that describe workers.
|
||||||
@@ -787,20 +437,31 @@ def generate_worker_files(
|
|||||||
# program blocks.
|
# program blocks.
|
||||||
worker_descriptors: List[Dict[str, Any]] = []
|
worker_descriptors: List[Dict[str, Any]] = []
|
||||||
|
|
||||||
# Upstreams for load-balancing purposes. This dict takes the form of the worker
|
# Upstreams for load-balancing purposes. This dict takes the form of a worker type to the
|
||||||
# type to the ports of each worker. For example:
|
# ports of each worker. For example:
|
||||||
# {
|
# {
|
||||||
# worker_type: {1234, 1235, ...}}
|
# worker_type: {1234, 1235, ...}}
|
||||||
# }
|
# }
|
||||||
# and will be used to construct 'upstream' nginx directives.
|
# and will be used to construct 'upstream' nginx directives.
|
||||||
nginx_upstreams: Dict[str, Set[int]] = {}
|
nginx_upstreams: Dict[str, Set[int]] = {}
|
||||||
|
|
||||||
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what
|
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what will be
|
||||||
# will be placed after the proxy_pass directive. The main benefit to representing
|
# placed after the proxy_pass directive. The main benefit to representing this data as a
|
||||||
# this data as a dict over a str is that we can easily deduplicate endpoints
|
# dict over a str is that we can easily deduplicate endpoints across multiple instances
|
||||||
# across multiple instances of the same worker. The final rendering will be combined
|
# of the same worker.
|
||||||
# with nginx_upstreams and placed in /etc/nginx/conf.d.
|
#
|
||||||
nginx_locations: Dict[str, str] = {}
|
# An nginx site config that will be amended to depending on the workers that are
|
||||||
|
# spun up. To be placed in /etc/nginx/conf.d.
|
||||||
|
nginx_locations = {}
|
||||||
|
|
||||||
|
# Read the desired worker configuration from the environment
|
||||||
|
worker_types_env = environ.get("SYNAPSE_WORKER_TYPES", "").strip()
|
||||||
|
if not worker_types_env:
|
||||||
|
# No workers, just the main process
|
||||||
|
worker_types = []
|
||||||
|
else:
|
||||||
|
# Split type names by comma, ignoring whitespace.
|
||||||
|
worker_types = [x.strip() for x in worker_types_env.split(",")]
|
||||||
|
|
||||||
# Create the worker configuration directory if it doesn't already exist
|
# Create the worker configuration directory if it doesn't already exist
|
||||||
os.makedirs("/conf/workers", exist_ok=True)
|
os.makedirs("/conf/workers", exist_ok=True)
|
||||||
@@ -808,86 +469,76 @@ def generate_worker_files(
|
|||||||
# Start worker ports from this arbitrary port
|
# Start worker ports from this arbitrary port
|
||||||
worker_port = 18009
|
worker_port = 18009
|
||||||
|
|
||||||
|
# A counter of worker_type -> int. Used for determining the name for a given
|
||||||
|
# worker type when generating its config file, as each worker's name is just
|
||||||
|
# worker_type + instance #
|
||||||
|
worker_type_counter: Dict[str, int] = {}
|
||||||
|
|
||||||
# A list of internal endpoints to healthcheck, starting with the main process
|
# A list of internal endpoints to healthcheck, starting with the main process
|
||||||
# which exists even if no workers do.
|
# which exists even if no workers do.
|
||||||
# This list ends up being part of the command line to curl, (curl added support for
|
|
||||||
# Unix sockets in version 7.40).
|
|
||||||
if using_unix_sockets:
|
|
||||||
healthcheck_urls = [
|
|
||||||
f"--unix-socket {MAIN_PROCESS_UNIX_SOCKET_PUBLIC_PATH} "
|
|
||||||
# The scheme and hostname from the following URL are ignored.
|
|
||||||
# The only thing that matters is the path `/health`
|
|
||||||
"http://localhost/health"
|
|
||||||
]
|
|
||||||
else:
|
|
||||||
healthcheck_urls = ["http://localhost:8080/health"]
|
healthcheck_urls = ["http://localhost:8080/health"]
|
||||||
|
|
||||||
# Get the set of all worker types that we have configured
|
# For each worker type specified by the user, create config values
|
||||||
all_worker_types_in_use = set(chain(*requested_worker_types.values()))
|
for worker_type in worker_types:
|
||||||
# Map locations to upstreams (corresponding to worker types) in Nginx
|
worker_config = WORKERS_CONFIG.get(worker_type)
|
||||||
# but only if we use the appropriate worker type
|
if worker_config:
|
||||||
for worker_type in all_worker_types_in_use:
|
worker_config = worker_config.copy()
|
||||||
for endpoint_pattern in WORKERS_CONFIG[worker_type]["endpoint_patterns"]:
|
else:
|
||||||
nginx_locations[endpoint_pattern] = f"http://{worker_type}"
|
error(worker_type + " is an unknown worker type! Please fix!")
|
||||||
|
|
||||||
# For each worker type specified by the user, create config values and write it's
|
new_worker_count = worker_type_counter.setdefault(worker_type, 0) + 1
|
||||||
# yaml config file
|
worker_type_counter[worker_type] = new_worker_count
|
||||||
for worker_name, worker_types_set in requested_worker_types.items():
|
|
||||||
# The collected and processed data will live here.
|
|
||||||
worker_config: Dict[str, Any] = {}
|
|
||||||
|
|
||||||
# Merge all worker config templates for this worker into a single config
|
|
||||||
for worker_type in worker_types_set:
|
|
||||||
copy_of_template_config = WORKERS_CONFIG[worker_type].copy()
|
|
||||||
|
|
||||||
# Merge worker type template configuration data. It's a combination of lists
|
|
||||||
# and dicts, so use this helper.
|
|
||||||
worker_config = merge_worker_template_configs(
|
|
||||||
worker_config, copy_of_template_config
|
|
||||||
)
|
|
||||||
|
|
||||||
# Replace placeholder names in the config template with the actual worker name.
|
|
||||||
worker_config = insert_worker_name_for_worker_config(worker_config, worker_name)
|
|
||||||
|
|
||||||
|
# Name workers by their type concatenated with an incrementing number
|
||||||
|
# e.g. federation_reader1
|
||||||
|
worker_name = worker_type + str(new_worker_count)
|
||||||
worker_config.update(
|
worker_config.update(
|
||||||
{"name": worker_name, "port": str(worker_port), "config_path": config_path}
|
{"name": worker_name, "port": str(worker_port), "config_path": config_path}
|
||||||
)
|
)
|
||||||
|
|
||||||
# Update the shared config with any worker_type specific options. The first of a
|
# Update the shared config with any worker-type specific options
|
||||||
# given worker_type needs to stay assigned and not be replaced.
|
shared_config.update(worker_config["shared_extra_conf"])
|
||||||
worker_config["shared_extra_conf"].update(shared_config)
|
|
||||||
shared_config = worker_config["shared_extra_conf"]
|
|
||||||
if using_unix_sockets:
|
|
||||||
healthcheck_urls.append(
|
|
||||||
f"--unix-socket /run/worker.{worker_port} http://localhost/health"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
||||||
|
|
||||||
|
# Check if more than one instance of this worker type has been specified
|
||||||
|
worker_type_total_count = worker_types.count(worker_type)
|
||||||
|
|
||||||
# Update the shared config with sharding-related options if necessary
|
# Update the shared config with sharding-related options if necessary
|
||||||
add_worker_roles_to_shared_config(
|
add_worker_roles_to_shared_config(
|
||||||
shared_config, worker_types_set, worker_name, worker_port
|
shared_config, worker_type, worker_name, worker_port
|
||||||
)
|
)
|
||||||
|
|
||||||
# Enable the worker in supervisord
|
# Enable the worker in supervisord
|
||||||
worker_descriptors.append(worker_config)
|
worker_descriptors.append(worker_config)
|
||||||
|
|
||||||
|
# Add nginx location blocks for this worker's endpoints (if any are defined)
|
||||||
|
for pattern in worker_config["endpoint_patterns"]:
|
||||||
|
# Determine whether we need to load-balance this worker
|
||||||
|
if worker_type_total_count > 1:
|
||||||
|
# Create or add to a load-balanced upstream for this worker
|
||||||
|
nginx_upstreams.setdefault(worker_type, set()).add(worker_port)
|
||||||
|
|
||||||
|
# Upstreams are named after the worker_type
|
||||||
|
upstream = "http://" + worker_type
|
||||||
|
else:
|
||||||
|
upstream = "http://localhost:%d" % (worker_port,)
|
||||||
|
|
||||||
|
# Note that this endpoint should proxy to this upstream
|
||||||
|
nginx_locations[pattern] = upstream
|
||||||
|
|
||||||
# Write out the worker's logging config file
|
# Write out the worker's logging config file
|
||||||
|
|
||||||
log_config_filepath = generate_worker_log_config(environ, worker_name, data_dir)
|
log_config_filepath = generate_worker_log_config(environ, worker_name, data_dir)
|
||||||
|
|
||||||
# Then a worker config file
|
# Then a worker config file
|
||||||
convert(
|
convert(
|
||||||
"/conf/worker.yaml.j2",
|
"/conf/worker.yaml.j2",
|
||||||
f"/conf/workers/{worker_name}.yaml",
|
"/conf/workers/{name}.yaml".format(name=worker_name),
|
||||||
**worker_config,
|
**worker_config,
|
||||||
worker_log_config_filepath=log_config_filepath,
|
worker_log_config_filepath=log_config_filepath,
|
||||||
using_unix_sockets=using_unix_sockets,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Save this worker's port number to the correct nginx upstreams
|
|
||||||
for worker_type in worker_types_set:
|
|
||||||
nginx_upstreams.setdefault(worker_type, set()).add(worker_port)
|
|
||||||
|
|
||||||
worker_port += 1
|
worker_port += 1
|
||||||
|
|
||||||
# Build the nginx location config blocks
|
# Build the nginx location config blocks
|
||||||
@@ -900,19 +551,15 @@ def generate_worker_files(
|
|||||||
|
|
||||||
# Determine the load-balancing upstreams to configure
|
# Determine the load-balancing upstreams to configure
|
||||||
nginx_upstream_config = ""
|
nginx_upstream_config = ""
|
||||||
for upstream_worker_base_name, upstream_worker_ports in nginx_upstreams.items():
|
|
||||||
body = ""
|
|
||||||
if using_unix_sockets:
|
|
||||||
for port in upstream_worker_ports:
|
|
||||||
body += f" server unix:/run/worker.{port};\n"
|
|
||||||
|
|
||||||
else:
|
for upstream_worker_type, upstream_worker_ports in nginx_upstreams.items():
|
||||||
|
body = ""
|
||||||
for port in upstream_worker_ports:
|
for port in upstream_worker_ports:
|
||||||
body += f" server localhost:{port};\n"
|
body += " server localhost:%d;\n" % (port,)
|
||||||
|
|
||||||
# Add to the list of configured upstreams
|
# Add to the list of configured upstreams
|
||||||
nginx_upstream_config += NGINX_UPSTREAM_CONFIG_BLOCK.format(
|
nginx_upstream_config += NGINX_UPSTREAM_CONFIG_BLOCK.format(
|
||||||
upstream_worker_base_name=upstream_worker_base_name,
|
upstream_worker_type=upstream_worker_type,
|
||||||
body=body,
|
body=body,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -933,20 +580,7 @@ def generate_worker_files(
|
|||||||
if reg_path.suffix.lower() in (".yaml", ".yml")
|
if reg_path.suffix.lower() in (".yaml", ".yml")
|
||||||
]
|
]
|
||||||
|
|
||||||
workers_in_use = len(requested_worker_types) > 0
|
workers_in_use = len(worker_types) > 0
|
||||||
|
|
||||||
# If there are workers, add the main process to the instance_map too.
|
|
||||||
if workers_in_use:
|
|
||||||
instance_map = shared_config.setdefault("instance_map", {})
|
|
||||||
if using_unix_sockets:
|
|
||||||
instance_map[MAIN_PROCESS_INSTANCE_NAME] = {
|
|
||||||
"path": MAIN_PROCESS_UNIX_SOCKET_PRIVATE_PATH,
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
instance_map[MAIN_PROCESS_INSTANCE_NAME] = {
|
|
||||||
"host": MAIN_PROCESS_LOCALHOST_ADDRESS,
|
|
||||||
"port": MAIN_PROCESS_REPLICATION_PORT,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Shared homeserver config
|
# Shared homeserver config
|
||||||
convert(
|
convert(
|
||||||
@@ -956,7 +590,6 @@ def generate_worker_files(
|
|||||||
appservice_registrations=appservice_registrations,
|
appservice_registrations=appservice_registrations,
|
||||||
enable_redis=workers_in_use,
|
enable_redis=workers_in_use,
|
||||||
workers_in_use=workers_in_use,
|
workers_in_use=workers_in_use,
|
||||||
using_unix_sockets=using_unix_sockets,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Nginx config
|
# Nginx config
|
||||||
@@ -967,7 +600,6 @@ def generate_worker_files(
|
|||||||
upstream_directives=nginx_upstream_config,
|
upstream_directives=nginx_upstream_config,
|
||||||
tls_cert_path=os.environ.get("SYNAPSE_TLS_CERT"),
|
tls_cert_path=os.environ.get("SYNAPSE_TLS_CERT"),
|
||||||
tls_key_path=os.environ.get("SYNAPSE_TLS_KEY"),
|
tls_key_path=os.environ.get("SYNAPSE_TLS_KEY"),
|
||||||
using_unix_sockets=using_unix_sockets,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Supervisord config
|
# Supervisord config
|
||||||
@@ -977,7 +609,6 @@ def generate_worker_files(
|
|||||||
"/etc/supervisor/supervisord.conf",
|
"/etc/supervisor/supervisord.conf",
|
||||||
main_config_path=config_path,
|
main_config_path=config_path,
|
||||||
enable_redis=workers_in_use,
|
enable_redis=workers_in_use,
|
||||||
using_unix_sockets=using_unix_sockets,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
convert(
|
convert(
|
||||||
@@ -1017,7 +648,6 @@ def generate_worker_log_config(
|
|||||||
extra_log_template_args["SYNAPSE_LOG_SENSITIVE"] = environ.get(
|
extra_log_template_args["SYNAPSE_LOG_SENSITIVE"] = environ.get(
|
||||||
"SYNAPSE_LOG_SENSITIVE"
|
"SYNAPSE_LOG_SENSITIVE"
|
||||||
)
|
)
|
||||||
extra_log_template_args["SYNAPSE_LOG_TESTING"] = environ.get("SYNAPSE_LOG_TESTING")
|
|
||||||
|
|
||||||
# Render and write the file
|
# Render and write the file
|
||||||
log_config_filepath = f"/conf/workers/{worker_name}.log.config"
|
log_config_filepath = f"/conf/workers/{worker_name}.log.config"
|
||||||
@@ -1034,14 +664,6 @@ def generate_worker_log_config(
|
|||||||
|
|
||||||
|
|
||||||
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||||
parser = ArgumentParser()
|
|
||||||
parser.add_argument(
|
|
||||||
"--generate-only",
|
|
||||||
action="store_true",
|
|
||||||
help="Only generate configuration; don't run Synapse.",
|
|
||||||
)
|
|
||||||
opts = parser.parse_args(args)
|
|
||||||
|
|
||||||
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
||||||
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
|
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
|
||||||
data_dir = environ.get("SYNAPSE_DATA_DIR", "/data")
|
data_dir = environ.get("SYNAPSE_DATA_DIR", "/data")
|
||||||
@@ -1056,26 +678,13 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
|||||||
generate_base_homeserver_config()
|
generate_base_homeserver_config()
|
||||||
else:
|
else:
|
||||||
log("Base homeserver config exists—not regenerating")
|
log("Base homeserver config exists—not regenerating")
|
||||||
# This script may be run multiple times (mostly by Complement, see note at top of
|
# This script may be run multiple times (mostly by Complement, see note at top of file).
|
||||||
# file). Don't re-configure workers in this instance.
|
# Don't re-configure workers in this instance.
|
||||||
mark_filepath = "/conf/workers_have_been_configured"
|
mark_filepath = "/conf/workers_have_been_configured"
|
||||||
if not os.path.exists(mark_filepath):
|
if not os.path.exists(mark_filepath):
|
||||||
# Collect and validate worker_type requests
|
|
||||||
# Read the desired worker configuration from the environment
|
|
||||||
worker_types_env = environ.get("SYNAPSE_WORKER_TYPES", "").strip()
|
|
||||||
# Only process worker_types if they exist
|
|
||||||
if not worker_types_env:
|
|
||||||
# No workers, just the main process
|
|
||||||
worker_types = []
|
|
||||||
requested_worker_types: Dict[str, Any] = {}
|
|
||||||
else:
|
|
||||||
# Split type names by comma, ignoring whitespace.
|
|
||||||
worker_types = split_and_strip_string(worker_types_env, ",")
|
|
||||||
requested_worker_types = parse_worker_types(worker_types)
|
|
||||||
|
|
||||||
# Always regenerate all other config files
|
# Always regenerate all other config files
|
||||||
log("Generating worker config files")
|
log("Generating worker config files")
|
||||||
generate_worker_files(environ, config_path, data_dir, requested_worker_types)
|
generate_worker_files(environ, config_path, data_dir)
|
||||||
|
|
||||||
# Mark workers as being configured
|
# Mark workers as being configured
|
||||||
with open(mark_filepath, "w") as f:
|
with open(mark_filepath, "w") as f:
|
||||||
@@ -1083,10 +692,6 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
|||||||
else:
|
else:
|
||||||
log("Worker config exists—not regenerating")
|
log("Worker config exists—not regenerating")
|
||||||
|
|
||||||
if opts.generate_only:
|
|
||||||
log("--generate-only: won't run Synapse")
|
|
||||||
return
|
|
||||||
|
|
||||||
# Lifted right out of start.py
|
# Lifted right out of start.py
|
||||||
jemallocpath = "/usr/lib/%s-linux-gnu/libjemalloc.so.2" % (platform.machine(),)
|
jemallocpath = "/usr/lib/%s-linux-gnu/libjemalloc.so.2" % (platform.machine(),)
|
||||||
|
|
||||||
@@ -1109,4 +714,4 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main(sys.argv[1:], os.environ)
|
main(sys.argv, os.environ)
|
||||||
|
|||||||
@@ -8,9 +8,9 @@ ARG PYTHON_VERSION=3.9
|
|||||||
###
|
###
|
||||||
### Stage 0: generate requirements.txt
|
### Stage 0: generate requirements.txt
|
||||||
###
|
###
|
||||||
# We hardcode the use of Debian bookworm here because this could change upstream
|
# We hardcode the use of Debian bullseye here because this could change upstream
|
||||||
# and other Dockerfiles used for testing are expecting bookworm.
|
# and other Dockerfiles used for testing are expecting bullseye.
|
||||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
|
FROM docker.io/python:${PYTHON_VERSION}-slim-bullseye
|
||||||
|
|
||||||
# Install Rust and other dependencies (stolen from normal Dockerfile)
|
# Install Rust and other dependencies (stolen from normal Dockerfile)
|
||||||
# install the OS build deps
|
# install the OS build deps
|
||||||
@@ -33,7 +33,7 @@ RUN \
|
|||||||
gosu \
|
gosu \
|
||||||
libjpeg62-turbo \
|
libjpeg62-turbo \
|
||||||
libpq5 \
|
libpq5 \
|
||||||
libwebp7 \
|
libwebp6 \
|
||||||
xmlsec1 \
|
xmlsec1 \
|
||||||
libjemalloc2 \
|
libjemalloc2 \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|||||||
@@ -7,9 +7,6 @@
|
|||||||
# prefix-log command [args...]
|
# prefix-log command [args...]
|
||||||
#
|
#
|
||||||
|
|
||||||
# '-W interactive' is a `mawk` extension which disables buffering on stdout and sets line-buffered reads on
|
exec 1> >(awk '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0}' >&1)
|
||||||
# stdin. The effect is that the output is flushed after each line, rather than being batched, which helps reduce
|
exec 2> >(awk '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0}' >&2)
|
||||||
# confusion due to to interleaving of the different processes.
|
|
||||||
exec 1> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&1)
|
|
||||||
exec 2> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&2)
|
|
||||||
exec "$@"
|
exec "$@"
|
||||||
|
|||||||
@@ -82,7 +82,7 @@ def generate_config_from_template(
|
|||||||
with open(filename) as handle:
|
with open(filename) as handle:
|
||||||
value = handle.read()
|
value = handle.read()
|
||||||
else:
|
else:
|
||||||
log(f"Generating a random secret for {secret}")
|
log("Generating a random secret for {}".format(secret))
|
||||||
value = codecs.encode(os.urandom(32), "hex").decode()
|
value = codecs.encode(os.urandom(32), "hex").decode()
|
||||||
with open(filename, "w") as handle:
|
with open(filename, "w") as handle:
|
||||||
handle.write(value)
|
handle.write(value)
|
||||||
@@ -160,6 +160,11 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) ->
|
|||||||
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
|
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
|
||||||
data_dir = environ.get("SYNAPSE_DATA_DIR", "/data")
|
data_dir = environ.get("SYNAPSE_DATA_DIR", "/data")
|
||||||
|
|
||||||
|
if ownership is not None:
|
||||||
|
# make sure that synapse has perms to write to the data dir.
|
||||||
|
log(f"Setting ownership on {data_dir} to {ownership}")
|
||||||
|
subprocess.run(["chown", ownership, data_dir], check=True)
|
||||||
|
|
||||||
# create a suitable log config from our template
|
# create a suitable log config from our template
|
||||||
log_config_file = "%s/%s.log.config" % (config_dir, server_name)
|
log_config_file = "%s/%s.log.config" % (config_dir, server_name)
|
||||||
if not os.path.exists(log_config_file):
|
if not os.path.exists(log_config_file):
|
||||||
@@ -184,15 +189,9 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) ->
|
|||||||
"--generate-config",
|
"--generate-config",
|
||||||
"--open-private-ports",
|
"--open-private-ports",
|
||||||
]
|
]
|
||||||
|
|
||||||
if ownership is not None:
|
|
||||||
# make sure that synapse has perms to write to the data dir.
|
|
||||||
log(f"Setting ownership on {data_dir} to {ownership}")
|
|
||||||
subprocess.run(["chown", ownership, data_dir], check=True)
|
|
||||||
args = ["gosu", ownership] + args
|
|
||||||
|
|
||||||
# log("running %s" % (args, ))
|
# log("running %s" % (args, ))
|
||||||
subprocess.run(args, check=True)
|
flush_buffers()
|
||||||
|
os.execv(sys.executable, args)
|
||||||
|
|
||||||
|
|
||||||
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||||
@@ -240,7 +239,7 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
|||||||
log("Could not find %s, will not use" % (jemallocpath,))
|
log("Could not find %s, will not use" % (jemallocpath,))
|
||||||
|
|
||||||
# if there are no config files passed to synapse, try adding the default file
|
# if there are no config files passed to synapse, try adding the default file
|
||||||
if not any(p.startswith(("--config-path", "-c")) for p in args):
|
if not any(p.startswith("--config-path") or p.startswith("-c") for p in args):
|
||||||
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
||||||
config_path = environ.get(
|
config_path = environ.get(
|
||||||
"SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml"
|
"SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml"
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
# This file is maintained as an up-to-date snapshot of the default
|
# This file is maintained as an up-to-date snapshot of the default
|
||||||
# homeserver.yaml configuration generated by Synapse. You can find a
|
# homeserver.yaml configuration generated by Synapse. You can find a
|
||||||
# complete accounting of possible configuration options at
|
# complete accounting of possible configuration options at
|
||||||
# https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html
|
# https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html
|
||||||
#
|
#
|
||||||
# It is *not* intended to be copied and used as the basis for a real
|
# It is *not* intended to be copied and used as the basis for a real
|
||||||
# homeserver.yaml. Instead, if you are starting from scratch, please generate
|
# homeserver.yaml. Instead, if you are starting from scratch, please generate
|
||||||
# a fresh config using Synapse by following the instructions in
|
# a fresh config using Synapse by following the instructions in
|
||||||
# https://element-hq.github.io/synapse/latest/setup/installation.html.
|
# https://matrix-org.github.io/synapse/latest/setup/installation.html.
|
||||||
#
|
#
|
||||||
################################################################################
|
################################################################################
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
# Synapse Documentation
|
# Synapse Documentation
|
||||||
|
|
||||||
**The documentation is currently hosted [here](https://element-hq.github.io/synapse).**
|
**The documentation is currently hosted [here](https://matrix-org.github.io/synapse).**
|
||||||
Please update any links to point to the new website instead.
|
Please update any links to point to the new website instead.
|
||||||
|
|
||||||
## About
|
## About
|
||||||
|
|
||||||
This directory currently holds a series of markdown files documenting how to install, use
|
This directory currently holds a series of markdown files documenting how to install, use
|
||||||
and develop Synapse. The documentation is readable directly from this repository, but it is
|
and develop Synapse. The documentation is readable directly from this repository, but it is
|
||||||
recommended to instead browse through the [website](https://element-hq.github.io/synapse) for
|
recommended to instead browse through the [website](https://matrix-org.github.io/synapse) for
|
||||||
easier discoverability.
|
easier discoverability.
|
||||||
|
|
||||||
## Adding to the documentation
|
## Adding to the documentation
|
||||||
|
|||||||
@@ -48,7 +48,6 @@
|
|||||||
- [Password auth provider callbacks](modules/password_auth_provider_callbacks.md)
|
- [Password auth provider callbacks](modules/password_auth_provider_callbacks.md)
|
||||||
- [Background update controller callbacks](modules/background_update_controller_callbacks.md)
|
- [Background update controller callbacks](modules/background_update_controller_callbacks.md)
|
||||||
- [Account data callbacks](modules/account_data_callbacks.md)
|
- [Account data callbacks](modules/account_data_callbacks.md)
|
||||||
- [Add extra fields to client events unsigned section callbacks](modules/add_extra_fields_to_client_events_unsigned.md)
|
|
||||||
- [Porting a legacy module to the new interface](modules/porting_legacy_module.md)
|
- [Porting a legacy module to the new interface](modules/porting_legacy_module.md)
|
||||||
- [Workers](workers.md)
|
- [Workers](workers.md)
|
||||||
- [Using `synctl` with Workers](synctl_workers.md)
|
- [Using `synctl` with Workers](synctl_workers.md)
|
||||||
@@ -58,7 +57,6 @@
|
|||||||
- [Account Validity](admin_api/account_validity.md)
|
- [Account Validity](admin_api/account_validity.md)
|
||||||
- [Background Updates](usage/administration/admin_api/background_updates.md)
|
- [Background Updates](usage/administration/admin_api/background_updates.md)
|
||||||
- [Event Reports](admin_api/event_reports.md)
|
- [Event Reports](admin_api/event_reports.md)
|
||||||
- [Experimental Features](admin_api/experimental_features.md)
|
|
||||||
- [Media](admin_api/media_admin_api.md)
|
- [Media](admin_api/media_admin_api.md)
|
||||||
- [Purge History](admin_api/purge_history_api.md)
|
- [Purge History](admin_api/purge_history_api.md)
|
||||||
- [Register Users](admin_api/register_api.md)
|
- [Register Users](admin_api/register_api.md)
|
||||||
@@ -98,7 +96,6 @@
|
|||||||
- [Cancellation](development/synapse_architecture/cancellation.md)
|
- [Cancellation](development/synapse_architecture/cancellation.md)
|
||||||
- [Log Contexts](log_contexts.md)
|
- [Log Contexts](log_contexts.md)
|
||||||
- [Replication](replication.md)
|
- [Replication](replication.md)
|
||||||
- [Streams](development/synapse_architecture/streams.md)
|
|
||||||
- [TCP Replication](tcp_replication.md)
|
- [TCP Replication](tcp_replication.md)
|
||||||
- [Faster remote joins](development/synapse_architecture/faster_joins.md)
|
- [Faster remote joins](development/synapse_architecture/faster_joins.md)
|
||||||
- [Internal Documentation](development/internal_documentation/README.md)
|
- [Internal Documentation](development/internal_documentation/README.md)
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
Admin APIs
|
Admin APIs
|
||||||
==========
|
==========
|
||||||
|
|
||||||
**Note**: The latest documentation can be viewed `here <https://element-hq.github.io/synapse>`_.
|
**Note**: The latest documentation can be viewed `here <https://matrix-org.github.io/synapse>`_.
|
||||||
See `docs/README.md <../README.md>`_ for more information.
|
See `docs/README.md <../README.md>`_ for more information.
|
||||||
|
|
||||||
**Please update links to point to the website instead.** Existing files in this directory
|
**Please update links to point to the website instead.** Existing files in this directory
|
||||||
@@ -11,3 +11,4 @@ This directory includes documentation for the various synapse specific admin
|
|||||||
APIs available. Updates to the existing Admin API documentation should still
|
APIs available. Updates to the existing Admin API documentation should still
|
||||||
be made to these files, but any new documentation files should instead be placed under
|
be made to these files, but any new documentation files should instead be placed under
|
||||||
`docs/usage/administration/admin_api <../usage/administration/admin_api>`_.
|
`docs/usage/administration/admin_api <../usage/administration/admin_api>`_.
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
# Account validity API
|
# Account validity API
|
||||||
|
|
||||||
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
|
|
||||||
|
|
||||||
This API allows a server administrator to manage the validity of an account. To
|
This API allows a server administrator to manage the validity of an account. To
|
||||||
use it, you must enable the account validity feature (under
|
use it, you must enable the account validity feature (under
|
||||||
`account_validity`) in Synapse's configuration.
|
`account_validity`) in Synapse's configuration.
|
||||||
|
|||||||
@@ -1,55 +0,0 @@
|
|||||||
# Experimental Features API
|
|
||||||
|
|
||||||
This API allows a server administrator to enable or disable some experimental features on a per-user
|
|
||||||
basis. The currently supported features are:
|
|
||||||
- [MSC3026](https://github.com/matrix-org/matrix-spec-proposals/pull/3026): busy
|
|
||||||
presence state enabled
|
|
||||||
- [MSC3881](https://github.com/matrix-org/matrix-spec-proposals/pull/3881): enable remotely toggling push notifications
|
|
||||||
for another client
|
|
||||||
- [MSC3967](https://github.com/matrix-org/matrix-spec-proposals/pull/3967): do not require
|
|
||||||
UIA when first uploading cross-signing keys.
|
|
||||||
|
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token`
|
|
||||||
for a server admin: see [Admin API](../usage/administration/admin_api/).
|
|
||||||
|
|
||||||
## Enabling/Disabling Features
|
|
||||||
|
|
||||||
This API allows a server administrator to enable experimental features for a given user. The request must
|
|
||||||
provide a body containing the user id and listing the features to enable/disable in the following format:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"features": {
|
|
||||||
"msc3026":true,
|
|
||||||
"msc3881":true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
where true is used to enable the feature, and false is used to disable the feature.
|
|
||||||
|
|
||||||
|
|
||||||
The API is:
|
|
||||||
|
|
||||||
```
|
|
||||||
PUT /_synapse/admin/v1/experimental_features/<user_id>
|
|
||||||
```
|
|
||||||
|
|
||||||
## Listing Enabled Features
|
|
||||||
|
|
||||||
To list which features are enabled/disabled for a given user send a request to the following API:
|
|
||||||
|
|
||||||
```
|
|
||||||
GET /_synapse/admin/v1/experimental_features/<user_id>
|
|
||||||
```
|
|
||||||
|
|
||||||
It will return a list of possible features and indicate whether they are enabled or disabled for the
|
|
||||||
user like so:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"features": {
|
|
||||||
"msc3026": true,
|
|
||||||
"msc3881": false,
|
|
||||||
"msc3967": false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
@@ -1,7 +1,5 @@
|
|||||||
# Shared-Secret Registration
|
# Shared-Secret Registration
|
||||||
|
|
||||||
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
|
|
||||||
|
|
||||||
This API allows for the creation of users in an administrative and
|
This API allows for the creation of users in an administrative and
|
||||||
non-interactive way. This is generally used for bootstrapping a Synapse
|
non-interactive way. This is generally used for bootstrapping a Synapse
|
||||||
instance with administrator accounts.
|
instance with administrator accounts.
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Edit Room Membership API
|
# Edit Room Membership API
|
||||||
|
|
||||||
This API allows an administrator to join a user account with a given `user_id`
|
This API allows an administrator to join an user account with a given `user_id`
|
||||||
to a room with a given `room_id_or_alias`. You can only modify the membership of
|
to a room with a given `room_id_or_alias`. You can only modify the membership of
|
||||||
local users. The server administrator must be in the room and have permission to
|
local users. The server administrator must be in the room and have permission to
|
||||||
invite users.
|
invite users.
|
||||||
|
|||||||
@@ -419,7 +419,7 @@ The following query parameters are available:
|
|||||||
|
|
||||||
* `from` (required) - The token to start returning events from. This token can be obtained from a prev_batch
|
* `from` (required) - The token to start returning events from. This token can be obtained from a prev_batch
|
||||||
or next_batch token returned by the /sync endpoint, or from an end token returned by a previous request to this endpoint.
|
or next_batch token returned by the /sync endpoint, or from an end token returned by a previous request to this endpoint.
|
||||||
* `to` - The token to stop returning events at.
|
* `to` - The token to spot returning events at.
|
||||||
* `limit` - The maximum number of events to return. Defaults to `10`.
|
* `limit` - The maximum number of events to return. Defaults to `10`.
|
||||||
* `filter` - A JSON RoomEventFilter to filter returned events with.
|
* `filter` - A JSON RoomEventFilter to filter returned events with.
|
||||||
* `dir` - The direction to return events from. Either `f` for forwards or `b` for backwards. Setting
|
* `dir` - The direction to return events from. Either `f` for forwards or `b` for backwards. Setting
|
||||||
@@ -536,8 +536,7 @@ The following query parameters are available:
|
|||||||
|
|
||||||
**Response**
|
**Response**
|
||||||
|
|
||||||
* `event_id` - The event ID closest to the given timestamp.
|
* `event_id` - converted from timestamp
|
||||||
* `origin_server_ts` - The timestamp of the event in milliseconds since the Unix epoch.
|
|
||||||
|
|
||||||
# Block Room API
|
# Block Room API
|
||||||
The Block Room admin API allows server admins to block and unblock rooms,
|
The Block Room admin API allows server admins to block and unblock rooms,
|
||||||
@@ -913,7 +912,7 @@ With all that being said, if you still want to try and recover the room:
|
|||||||
them handle rejoining themselves.
|
them handle rejoining themselves.
|
||||||
|
|
||||||
4. If `new_room_user_id` was given, a 'Content Violation' will have been
|
4. If `new_room_user_id` was given, a 'Content Violation' will have been
|
||||||
created. Consider whether you want to delete that room.
|
created. Consider whether you want to delete that roomm.
|
||||||
|
|
||||||
# Make Room Admin API
|
# Make Room Admin API
|
||||||
|
|
||||||
|
|||||||
@@ -81,52 +81,3 @@ The following fields are returned in the JSON response body:
|
|||||||
- `user_id` - string - Fully-qualified user ID (ex. `@user:server.com`).
|
- `user_id` - string - Fully-qualified user ID (ex. `@user:server.com`).
|
||||||
* `next_token` - integer - Opaque value used for pagination. See above.
|
* `next_token` - integer - Opaque value used for pagination. See above.
|
||||||
* `total` - integer - Total number of users after filtering.
|
* `total` - integer - Total number of users after filtering.
|
||||||
|
|
||||||
|
|
||||||
# Get largest rooms by size in database
|
|
||||||
|
|
||||||
Returns the 10 largest rooms and an estimate of how much space in the database
|
|
||||||
they are taking.
|
|
||||||
|
|
||||||
This does not include the size of any associated media associated with the room.
|
|
||||||
|
|
||||||
Returns an error on SQLite.
|
|
||||||
|
|
||||||
*Note:* This uses the planner statistics from PostgreSQL to do the estimates,
|
|
||||||
which means that the returned information can vary widely from reality. However,
|
|
||||||
it should be enough to get a rough idea of where database disk space is going.
|
|
||||||
|
|
||||||
|
|
||||||
The API is:
|
|
||||||
|
|
||||||
```
|
|
||||||
GET /_synapse/admin/v1/statistics/database/rooms
|
|
||||||
```
|
|
||||||
|
|
||||||
A response body like the following is returned:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"rooms": [
|
|
||||||
{
|
|
||||||
"room_id": "!OGEhHVWSdvArJzumhm:matrix.org",
|
|
||||||
"estimated_size": 47325417353
|
|
||||||
}
|
|
||||||
],
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
**Response**
|
|
||||||
|
|
||||||
The following fields are returned in the JSON response body:
|
|
||||||
|
|
||||||
* `rooms` - An array of objects, sorted by largest room first. Objects contain
|
|
||||||
the following fields:
|
|
||||||
- `room_id` - string - The room ID.
|
|
||||||
- `estimated_size` - integer - Estimated disk space used in bytes by the room
|
|
||||||
in the database.
|
|
||||||
|
|
||||||
|
|
||||||
*Added in Synapse 1.83.0*
|
|
||||||
|
|||||||
@@ -54,8 +54,7 @@ It returns a JSON body like the following:
|
|||||||
"external_id": "<user_id_provider_2>"
|
"external_id": "<user_id_provider_2>"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"user_type": null,
|
"user_type": null
|
||||||
"locked": false
|
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -63,7 +62,7 @@ URL parameters:
|
|||||||
|
|
||||||
- `user_id`: fully-qualified user id: for example, `@user:server.com`.
|
- `user_id`: fully-qualified user id: for example, `@user:server.com`.
|
||||||
|
|
||||||
## Create or modify account
|
## Create or modify Account
|
||||||
|
|
||||||
This API allows an administrator to create or modify a user account with a
|
This API allows an administrator to create or modify a user account with a
|
||||||
specific `user_id`.
|
specific `user_id`.
|
||||||
@@ -79,33 +78,31 @@ with a body of:
|
|||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"password": "user_password",
|
"password": "user_password",
|
||||||
"logout_devices": false,
|
"displayname": "User",
|
||||||
"displayname": "Alice Marigold",
|
|
||||||
"avatar_url": "mxc://example.com/abcde12345",
|
|
||||||
"threepids": [
|
"threepids": [
|
||||||
{
|
{
|
||||||
"medium": "email",
|
"medium": "email",
|
||||||
"address": "alice@example.com"
|
"address": "<user_mail_1>"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"medium": "email",
|
"medium": "email",
|
||||||
"address": "alice@domain.org"
|
"address": "<user_mail_2>"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"external_ids": [
|
"external_ids": [
|
||||||
{
|
{
|
||||||
"auth_provider": "example",
|
"auth_provider": "<provider1>",
|
||||||
"external_id": "12345"
|
"external_id": "<user_id_provider_1>"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"auth_provider": "example2",
|
"auth_provider": "<provider2>",
|
||||||
"external_id": "abc54321"
|
"external_id": "<user_id_provider_2>"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
"avatar_url": "<avatar_url>",
|
||||||
"admin": false,
|
"admin": false,
|
||||||
"deactivated": false,
|
"deactivated": false,
|
||||||
"user_type": null,
|
"user_type": null
|
||||||
"locked": false
|
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -115,56 +112,43 @@ Returns HTTP status code:
|
|||||||
|
|
||||||
URL parameters:
|
URL parameters:
|
||||||
|
|
||||||
- `user_id` - A fully-qualified user id. For example, `@user:server.com`.
|
- `user_id`: fully-qualified user id: for example, `@user:server.com`.
|
||||||
|
|
||||||
Body parameters:
|
Body parameters:
|
||||||
|
|
||||||
- `password` - **string**, optional. If provided, the user's password is updated and all
|
- `password` - string, optional. If provided, the user's password is updated and all
|
||||||
devices are logged out, unless `logout_devices` is set to `false`.
|
devices are logged out, unless `logout_devices` is set to `false`.
|
||||||
- `logout_devices` - **bool**, optional, defaults to `true`. If set to `false`, devices aren't
|
- `logout_devices` - bool, optional, defaults to `true`. If set to false, devices aren't
|
||||||
logged out even when `password` is provided.
|
logged out even when `password` is provided.
|
||||||
- `displayname` - **string**, optional. If set to an empty string (`""`), the user's display name
|
- `displayname` - string, optional, defaults to the value of `user_id`.
|
||||||
will be removed.
|
- `threepids` - array, optional, allows setting the third-party IDs (email, msisdn)
|
||||||
- `avatar_url` - **string**, optional. Must be a
|
- `medium` - string. Kind of third-party ID, either `email` or `msisdn`.
|
||||||
[MXC URI](https://matrix.org/docs/spec/client_server/r0.6.0#matrix-content-mxc-uris).
|
- `address` - string. Value of third-party ID.
|
||||||
If set to an empty string (`""`), the user's avatar is removed.
|
belonging to a user.
|
||||||
- `threepids` - **array**, optional. If provided, the user's third-party IDs (email, msisdn) are
|
- `external_ids` - array, optional. Allow setting the identifier of the external identity
|
||||||
entirely replaced with the given list. Each item in the array is an object with the following
|
provider for SSO (Single sign-on). Details in the configuration manual under the
|
||||||
fields:
|
|
||||||
- `medium` - **string**, required. The type of third-party ID, either `email` or `msisdn` (phone number).
|
|
||||||
- `address` - **string**, required. The third-party ID itself, e.g. `alice@example.com` for `email` or
|
|
||||||
`447470274584` (for a phone number with country code "44") and `19254857364` (for a phone number
|
|
||||||
with country code "1") for `msisdn`.
|
|
||||||
Note: If a threepid is removed from a user via this option, Synapse will also attempt to remove
|
|
||||||
that threepid from any identity servers it is aware has a binding for it.
|
|
||||||
- `external_ids` - **array**, optional. Allow setting the identifier of the external identity
|
|
||||||
provider for SSO (Single sign-on). More details are in the configuration manual under the
|
|
||||||
sections [sso](../usage/configuration/config_documentation.md#sso) and [oidc_providers](../usage/configuration/config_documentation.md#oidc_providers).
|
sections [sso](../usage/configuration/config_documentation.md#sso) and [oidc_providers](../usage/configuration/config_documentation.md#oidc_providers).
|
||||||
- `auth_provider` - **string**, required. The unique, internal ID of the external identity provider.
|
- `auth_provider` - string. ID of the external identity provider. Value of `idp_id`
|
||||||
The same as `idp_id` from the homeserver configuration. Note that no error is raised if the
|
in the homeserver configuration. Note that no error is raised if the provided
|
||||||
provided value is not in the homeserver configuration.
|
value is not in the homeserver configuration.
|
||||||
- `external_id` - **string**, required. An identifier for the user in the external identity provider.
|
- `external_id` - string, user ID in the external identity provider.
|
||||||
When the user logs in to the identity provider, this must be the unique ID that they map to.
|
- `avatar_url` - string, optional, must be a
|
||||||
- `admin` - **bool**, optional, defaults to `false`. Whether the user is a homeserver administrator,
|
[MXC URI](https://matrix.org/docs/spec/client_server/r0.6.0#matrix-content-mxc-uris).
|
||||||
granting them access to the Admin API, among other things.
|
- `admin` - bool, optional, defaults to `false`.
|
||||||
- `deactivated` - **bool**, optional. If unspecified, deactivation state will be left unchanged.
|
- `deactivated` - bool, optional. If unspecified, deactivation state will be left
|
||||||
|
unchanged on existing accounts and set to `false` for new accounts.
|
||||||
|
A user cannot be erased by deactivating with this API. For details on
|
||||||
|
deactivating users see [Deactivate Account](#deactivate-account).
|
||||||
|
- `user_type` - string or null, optional. If provided, the user type will be
|
||||||
|
adjusted. If `null` given, the user type will be cleared. Other
|
||||||
|
allowed options are: `bot` and `support`.
|
||||||
|
|
||||||
Note:
|
If the user already exists then optional parameters default to the current value.
|
||||||
- For the password field there is no strict check of the necessity for its presence.
|
|
||||||
It is possible to have active users without a password, e.g. when authenticating with OIDC is configured.
|
|
||||||
You must check yourself whether a password is required when reactivating a user or not.
|
|
||||||
- It is not possible to set a password if the config option `password_config.localdb_enabled` is set `false`.
|
|
||||||
Users' passwords are wiped upon account deactivation, hence the need to set a new one here.
|
|
||||||
|
|
||||||
Note: a user cannot be erased with this API. For more details on
|
In order to re-activate an account `deactivated` must be set to `false`. If
|
||||||
deactivating and erasing users see [Deactivate Account](#deactivate-account).
|
users do not login via single-sign-on, a new `password` must be provided.
|
||||||
- `locked` - **bool**, optional. If unspecified, locked state will be left unchanged.
|
|
||||||
- `user_type` - **string** or null, optional. If not provided, the user type will be
|
|
||||||
not be changed. If `null` is given, the user type will be cleared.
|
|
||||||
Other allowed options are: `bot` and `support`.
|
|
||||||
|
|
||||||
## List Accounts
|
## List Accounts
|
||||||
### List Accounts (V2)
|
|
||||||
|
|
||||||
This API returns all local user accounts.
|
This API returns all local user accounts.
|
||||||
By default, the response is ordered by ascending user ID.
|
By default, the response is ordered by ascending user ID.
|
||||||
@@ -188,8 +172,7 @@ A response body like the following is returned:
|
|||||||
"shadow_banned": 0,
|
"shadow_banned": 0,
|
||||||
"displayname": "<User One>",
|
"displayname": "<User One>",
|
||||||
"avatar_url": null,
|
"avatar_url": null,
|
||||||
"creation_ts": 1560432668000,
|
"creation_ts": 1560432668000
|
||||||
"locked": false
|
|
||||||
}, {
|
}, {
|
||||||
"name": "<user_id2>",
|
"name": "<user_id2>",
|
||||||
"is_guest": 0,
|
"is_guest": 0,
|
||||||
@@ -200,8 +183,7 @@ A response body like the following is returned:
|
|||||||
"shadow_banned": 0,
|
"shadow_banned": 0,
|
||||||
"displayname": "<User Two>",
|
"displayname": "<User Two>",
|
||||||
"avatar_url": "<avatar_url>",
|
"avatar_url": "<avatar_url>",
|
||||||
"creation_ts": 1561550621000,
|
"creation_ts": 1561550621000
|
||||||
"locked": false
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"next_token": "100",
|
"next_token": "100",
|
||||||
@@ -224,9 +206,7 @@ The following parameters should be set in the URL:
|
|||||||
- `name` - Is optional and filters to only return users with user ID localparts
|
- `name` - Is optional and filters to only return users with user ID localparts
|
||||||
**or** displaynames that contain this value.
|
**or** displaynames that contain this value.
|
||||||
- `guests` - string representing a bool - Is optional and if `false` will **exclude** guest users.
|
- `guests` - string representing a bool - Is optional and if `false` will **exclude** guest users.
|
||||||
Defaults to `true` to include guest users. This parameter is not supported when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
|
Defaults to `true` to include guest users.
|
||||||
- `admins` - Optional flag to filter admins. If `true`, only admins are queried. If `false`, admins are excluded from
|
|
||||||
the query. When the flag is absent (the default), **both** admins and non-admins are included in the search results.
|
|
||||||
- `deactivated` - string representing a bool - Is optional and if `true` will **include** deactivated users.
|
- `deactivated` - string representing a bool - Is optional and if `true` will **include** deactivated users.
|
||||||
Defaults to `false` to exclude deactivated users.
|
Defaults to `false` to exclude deactivated users.
|
||||||
- `limit` - string representing a positive integer - Is optional but is used for pagination,
|
- `limit` - string representing a positive integer - Is optional but is used for pagination,
|
||||||
@@ -248,15 +228,9 @@ The following parameters should be set in the URL:
|
|||||||
- `displayname` - Users are ordered alphabetically by `displayname`.
|
- `displayname` - Users are ordered alphabetically by `displayname`.
|
||||||
- `avatar_url` - Users are ordered alphabetically by avatar URL.
|
- `avatar_url` - Users are ordered alphabetically by avatar URL.
|
||||||
- `creation_ts` - Users are ordered by when the users was created in ms.
|
- `creation_ts` - Users are ordered by when the users was created in ms.
|
||||||
- `last_seen_ts` - Users are ordered by when the user was lastly seen in ms.
|
|
||||||
|
|
||||||
- `dir` - Direction of media order. Either `f` for forwards or `b` for backwards.
|
- `dir` - Direction of media order. Either `f` for forwards or `b` for backwards.
|
||||||
Setting this value to `b` will reverse the above sort order. Defaults to `f`.
|
Setting this value to `b` will reverse the above sort order. Defaults to `f`.
|
||||||
- `not_user_type` - Exclude certain user types, such as bot users, from the request.
|
|
||||||
Can be provided multiple times. Possible values are `bot`, `support` or "empty string".
|
|
||||||
"empty string" here means to exclude users without a type.
|
|
||||||
- `locked` - string representing a bool - Is optional and if `true` will **include** locked users.
|
|
||||||
Defaults to `false` to exclude locked users. Note: Introduced in v1.93.
|
|
||||||
|
|
||||||
Caution. The database only has indexes on the columns `name` and `creation_ts`.
|
Caution. The database only has indexes on the columns `name` and `creation_ts`.
|
||||||
This means that if a different sort order is used (`is_guest`, `admin`,
|
This means that if a different sort order is used (`is_guest`, `admin`,
|
||||||
@@ -281,25 +255,10 @@ The following fields are returned in the JSON response body:
|
|||||||
- `displayname` - string - The user's display name if they have set one.
|
- `displayname` - string - The user's display name if they have set one.
|
||||||
- `avatar_url` - string - The user's avatar URL if they have set one.
|
- `avatar_url` - string - The user's avatar URL if they have set one.
|
||||||
- `creation_ts` - integer - The user's creation timestamp in ms.
|
- `creation_ts` - integer - The user's creation timestamp in ms.
|
||||||
- `last_seen_ts` - integer - The user's last activity timestamp in ms.
|
|
||||||
- `locked` - bool - Status if that user has been marked as locked. Note: Introduced in v1.93.
|
|
||||||
- `next_token`: string representing a positive integer - Indication for pagination. See above.
|
- `next_token`: string representing a positive integer - Indication for pagination. See above.
|
||||||
- `total` - integer - Total number of media.
|
- `total` - integer - Total number of media.
|
||||||
|
|
||||||
*Added in Synapse 1.93:* the `locked` query parameter and response field.
|
|
||||||
|
|
||||||
### List Accounts (V3)
|
|
||||||
|
|
||||||
This API returns all local user accounts (see v2). In contrast to v2, the query parameter `deactivated` is handled differently.
|
|
||||||
|
|
||||||
```
|
|
||||||
GET /_synapse/admin/v3/users
|
|
||||||
```
|
|
||||||
|
|
||||||
**Parameters**
|
|
||||||
- `deactivated` - Optional flag to filter deactivated users. If `true`, only deactivated users are returned.
|
|
||||||
If `false`, deactivated users are excluded from the query. When the flag is absent (the default),
|
|
||||||
users are not filtered by deactivation status.
|
|
||||||
|
|
||||||
## Query current sessions for a user
|
## Query current sessions for a user
|
||||||
|
|
||||||
@@ -414,8 +373,6 @@ The following actions are **NOT** performed. The list may be incomplete.
|
|||||||
|
|
||||||
## Reset password
|
## Reset password
|
||||||
|
|
||||||
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
|
|
||||||
|
|
||||||
Changes the password of another user. This will automatically log the user out of all their devices.
|
Changes the password of another user. This will automatically log the user out of all their devices.
|
||||||
|
|
||||||
The api is:
|
The api is:
|
||||||
@@ -439,8 +396,6 @@ The parameter `logout_devices` is optional and defaults to `true`.
|
|||||||
|
|
||||||
## Get whether a user is a server administrator or not
|
## Get whether a user is a server administrator or not
|
||||||
|
|
||||||
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
|
|
||||||
|
|
||||||
The api is:
|
The api is:
|
||||||
|
|
||||||
```
|
```
|
||||||
@@ -458,8 +413,6 @@ A response body like the following is returned:
|
|||||||
|
|
||||||
## Change whether a user is a server administrator or not
|
## Change whether a user is a server administrator or not
|
||||||
|
|
||||||
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
|
|
||||||
|
|
||||||
Note that you cannot demote yourself.
|
Note that you cannot demote yourself.
|
||||||
|
|
||||||
The api is:
|
The api is:
|
||||||
@@ -633,16 +586,6 @@ A response body like the following is returned:
|
|||||||
"quarantined_by": null,
|
"quarantined_by": null,
|
||||||
"safe_from_quarantine": false,
|
"safe_from_quarantine": false,
|
||||||
"upload_name": "test2.png"
|
"upload_name": "test2.png"
|
||||||
},
|
|
||||||
{
|
|
||||||
"created_ts": 300400,
|
|
||||||
"last_access_ts": 300700,
|
|
||||||
"media_id": "BzYNLRUgGHphBkdKGbzXwbjX",
|
|
||||||
"media_length": 1337,
|
|
||||||
"media_type": "application/octet-stream",
|
|
||||||
"quarantined_by": null,
|
|
||||||
"safe_from_quarantine": false,
|
|
||||||
"upload_name": null
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"next_token": 3,
|
"next_token": 3,
|
||||||
@@ -704,17 +647,16 @@ The following fields are returned in the JSON response body:
|
|||||||
- `media` - An array of objects, each containing information about a media.
|
- `media` - An array of objects, each containing information about a media.
|
||||||
Media objects contain the following fields:
|
Media objects contain the following fields:
|
||||||
- `created_ts` - integer - Timestamp when the content was uploaded in ms.
|
- `created_ts` - integer - Timestamp when the content was uploaded in ms.
|
||||||
- `last_access_ts` - integer or null - Timestamp when the content was last accessed in ms.
|
- `last_access_ts` - integer - Timestamp when the content was last accessed in ms.
|
||||||
Null if there was no access, yet.
|
|
||||||
- `media_id` - string - The id used to refer to the media. Details about the format
|
- `media_id` - string - The id used to refer to the media. Details about the format
|
||||||
are documented under
|
are documented under
|
||||||
[media repository](../media_repository.md).
|
[media repository](../media_repository.md).
|
||||||
- `media_length` - integer - Length of the media in bytes.
|
- `media_length` - integer - Length of the media in bytes.
|
||||||
- `media_type` - string - The MIME-type of the media.
|
- `media_type` - string - The MIME-type of the media.
|
||||||
- `quarantined_by` - string or null - The user ID that initiated the quarantine request
|
- `quarantined_by` - string - The user ID that initiated the quarantine request
|
||||||
for this media. Null if not quarantined.
|
for this media.
|
||||||
- `safe_from_quarantine` - bool - Status if this media is safe from quarantining.
|
- `safe_from_quarantine` - bool - Status if this media is safe from quarantining.
|
||||||
- `upload_name` - string or null - The name the media was uploaded with. Null if not provided during upload.
|
- `upload_name` - string - The name the media was uploaded with.
|
||||||
- `next_token`: integer - Indication for pagination. See above.
|
- `next_token`: integer - Indication for pagination. See above.
|
||||||
- `total` - integer - Total number of media.
|
- `total` - integer - Total number of media.
|
||||||
|
|
||||||
@@ -764,8 +706,6 @@ delete largest/smallest or newest/oldest files first.
|
|||||||
|
|
||||||
## Login as a user
|
## Login as a user
|
||||||
|
|
||||||
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
|
|
||||||
|
|
||||||
Get an access token that can be used to authenticate as that user. Useful for
|
Get an access token that can be used to authenticate as that user. Useful for
|
||||||
when admins wish to do actions on behalf of a user.
|
when admins wish to do actions on behalf of a user.
|
||||||
|
|
||||||
@@ -778,8 +718,7 @@ POST /_synapse/admin/v1/users/<user_id>/login
|
|||||||
|
|
||||||
An optional `valid_until_ms` field can be specified in the request body as an
|
An optional `valid_until_ms` field can be specified in the request body as an
|
||||||
integer timestamp that specifies when the token should expire. By default tokens
|
integer timestamp that specifies when the token should expire. By default tokens
|
||||||
do not expire. Note that this API does not allow a user to login as themselves
|
do not expire.
|
||||||
(to create more tokens).
|
|
||||||
|
|
||||||
A response body like the following is returned:
|
A response body like the following is returned:
|
||||||
|
|
||||||
@@ -799,43 +738,6 @@ Note: The token will expire if the *admin* user calls `/logout/all` from any
|
|||||||
of their devices, but the token will *not* expire if the target user does the
|
of their devices, but the token will *not* expire if the target user does the
|
||||||
same.
|
same.
|
||||||
|
|
||||||
## Allow replacing master cross-signing key without User-Interactive Auth
|
|
||||||
|
|
||||||
This endpoint is not intended for server administrator usage;
|
|
||||||
we describe it here for completeness.
|
|
||||||
|
|
||||||
This API temporarily permits a user to replace their master cross-signing key
|
|
||||||
without going through
|
|
||||||
[user-interactive authentication](https://spec.matrix.org/v1.8/client-server-api/#user-interactive-authentication-api) (UIA).
|
|
||||||
This is useful when Synapse has delegated its authentication to the
|
|
||||||
[Matrix Authentication Service](https://github.com/matrix-org/matrix-authentication-service/);
|
|
||||||
as Synapse cannot perform UIA is not possible in these circumstances.
|
|
||||||
|
|
||||||
The API is
|
|
||||||
|
|
||||||
```http request
|
|
||||||
POST /_synapse/admin/v1/users/<user_id>/_allow_cross_signing_replacement_without_uia
|
|
||||||
{}
|
|
||||||
```
|
|
||||||
|
|
||||||
If the user does not exist, or does exist but has no master cross-signing key,
|
|
||||||
this will return with status code `404 Not Found`.
|
|
||||||
|
|
||||||
Otherwise, a response body like the following is returned, with status `200 OK`:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"updatable_without_uia_before_ms": 1234567890
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
The response body is a JSON object with a single field:
|
|
||||||
|
|
||||||
- `updatable_without_uia_before_ms`: integer. The timestamp in milliseconds
|
|
||||||
before which the user is permitted to replace their cross-signing key without
|
|
||||||
going through UIA.
|
|
||||||
|
|
||||||
_Added in Synapse 1.97.0._
|
|
||||||
|
|
||||||
## User devices
|
## User devices
|
||||||
|
|
||||||
@@ -900,33 +802,6 @@ The following fields are returned in the JSON response body:
|
|||||||
|
|
||||||
- `total` - Total number of user's devices.
|
- `total` - Total number of user's devices.
|
||||||
|
|
||||||
### Create a device
|
|
||||||
|
|
||||||
Creates a new device for a specific `user_id` and `device_id`. Does nothing if the `device_id`
|
|
||||||
exists already.
|
|
||||||
|
|
||||||
The API is:
|
|
||||||
|
|
||||||
```
|
|
||||||
POST /_synapse/admin/v2/users/<user_id>/devices
|
|
||||||
|
|
||||||
{
|
|
||||||
"device_id": "QBUAZIFURK"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
An empty JSON dict is returned.
|
|
||||||
|
|
||||||
**Parameters**
|
|
||||||
|
|
||||||
The following parameters should be set in the URL:
|
|
||||||
|
|
||||||
- `user_id` - fully qualified: for example, `@user:server.com`.
|
|
||||||
|
|
||||||
The following fields are required in the JSON request body:
|
|
||||||
|
|
||||||
- `device_id` - The device ID to create.
|
|
||||||
|
|
||||||
### Delete multiple devices
|
### Delete multiple devices
|
||||||
Deletes the given devices for a specific `user_id`, and invalidates
|
Deletes the given devices for a specific `user_id`, and invalidates
|
||||||
any access token associated with them.
|
any access token associated with them.
|
||||||
@@ -1267,7 +1142,7 @@ The following parameters should be set in the URL:
|
|||||||
- `user_id` - The fully qualified MXID: for example, `@user:server.com`. The user must
|
- `user_id` - The fully qualified MXID: for example, `@user:server.com`. The user must
|
||||||
be local.
|
be local.
|
||||||
|
|
||||||
## Check username availability
|
### Check username availability
|
||||||
|
|
||||||
Checks to see if a username is available, and valid, for the server. See [the client-server
|
Checks to see if a username is available, and valid, for the server. See [the client-server
|
||||||
API](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available)
|
API](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available)
|
||||||
@@ -1285,7 +1160,7 @@ GET /_synapse/admin/v1/username_available?username=$localpart
|
|||||||
The request and response format is the same as the
|
The request and response format is the same as the
|
||||||
[/_matrix/client/r0/register/available](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available) API.
|
[/_matrix/client/r0/register/available](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-register-available) API.
|
||||||
|
|
||||||
## Find a user based on their ID in an auth provider
|
### Find a user based on their ID in an auth provider
|
||||||
|
|
||||||
The API is:
|
The API is:
|
||||||
|
|
||||||
@@ -1324,7 +1199,7 @@ Returns a `404` HTTP status code if no user was found, with a response body like
|
|||||||
_Added in Synapse 1.68.0._
|
_Added in Synapse 1.68.0._
|
||||||
|
|
||||||
|
|
||||||
## Find a user based on their Third Party ID (ThreePID or 3PID)
|
### Find a user based on their Third Party ID (ThreePID or 3PID)
|
||||||
|
|
||||||
The API is:
|
The API is:
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# Version API
|
# Version API
|
||||||
|
|
||||||
This API returns the running Synapse version.
|
This API returns the running Synapse version and the Python version
|
||||||
This is useful when a Synapse instance
|
on which Synapse is being run. This is useful when a Synapse instance
|
||||||
is behind a proxy that does not forward the 'Server' header (which also
|
is behind a proxy that does not forward the 'Server' header (which also
|
||||||
contains Synapse version information).
|
contains Synapse version information).
|
||||||
|
|
||||||
@@ -15,9 +15,7 @@ It returns a JSON body like the following:
|
|||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"server_version": "0.99.2rc1 (b=develop, abcdef123)"
|
"server_version": "0.99.2rc1 (b=develop, abcdef123)",
|
||||||
|
"python_version": "3.7.8"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
*Changed in Synapse 1.94.0:* The `python_version` key was removed from the
|
|
||||||
response body.
|
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ Server with a domain specific API.
|
|||||||
1. **Messaging Layer**
|
1. **Messaging Layer**
|
||||||
|
|
||||||
This is what the rest of the homeserver hits to send messages, join rooms,
|
This is what the rest of the homeserver hits to send messages, join rooms,
|
||||||
etc. It also allows you to register callbacks for when it gets notified by
|
etc. It also allows you to register callbacks for when it get's notified by
|
||||||
lower levels that e.g. a new message has been received.
|
lower levels that e.g. a new message has been received.
|
||||||
|
|
||||||
It is responsible for serializing requests to send to the data
|
It is responsible for serializing requests to send to the data
|
||||||
|
|||||||
@@ -164,7 +164,7 @@ Synapse 1.6.0rc2 (2019-11-25)
|
|||||||
Bugfixes
|
Bugfixes
|
||||||
--------
|
--------
|
||||||
|
|
||||||
- Fix a bug which could cause the background database update handler for event labels to get stuck in a loop raising exceptions. ([\#6407](https://github.com/matrix-org/synapse/issues/6407))
|
- Fix a bug which could cause the background database update hander for event labels to get stuck in a loop raising exceptions. ([\#6407](https://github.com/matrix-org/synapse/issues/6407))
|
||||||
|
|
||||||
|
|
||||||
Synapse 1.6.0rc1 (2019-11-20)
|
Synapse 1.6.0rc1 (2019-11-20)
|
||||||
@@ -191,7 +191,7 @@ Bugfixes
|
|||||||
- Appservice requests will no longer contain a double slash prefix when the appservice url provided ends in a slash. ([\#6306](https://github.com/matrix-org/synapse/issues/6306))
|
- Appservice requests will no longer contain a double slash prefix when the appservice url provided ends in a slash. ([\#6306](https://github.com/matrix-org/synapse/issues/6306))
|
||||||
- Fix `/purge_room` admin API. ([\#6307](https://github.com/matrix-org/synapse/issues/6307))
|
- Fix `/purge_room` admin API. ([\#6307](https://github.com/matrix-org/synapse/issues/6307))
|
||||||
- Fix the `hidden` field in the `devices` table for SQLite versions prior to 3.23.0. ([\#6313](https://github.com/matrix-org/synapse/issues/6313))
|
- Fix the `hidden` field in the `devices` table for SQLite versions prior to 3.23.0. ([\#6313](https://github.com/matrix-org/synapse/issues/6313))
|
||||||
- Fix bug which caused rejected events to be persisted with the wrong room state. ([\#6320](https://github.com/matrix-org/synapse/issues/6320))
|
- Fix bug which casued rejected events to be persisted with the wrong room state. ([\#6320](https://github.com/matrix-org/synapse/issues/6320))
|
||||||
- Fix bug where `rc_login` ratelimiting would prematurely kick in. ([\#6335](https://github.com/matrix-org/synapse/issues/6335))
|
- Fix bug where `rc_login` ratelimiting would prematurely kick in. ([\#6335](https://github.com/matrix-org/synapse/issues/6335))
|
||||||
- Prevent the server taking a long time to start up when guest registration is enabled. ([\#6338](https://github.com/matrix-org/synapse/issues/6338))
|
- Prevent the server taking a long time to start up when guest registration is enabled. ([\#6338](https://github.com/matrix-org/synapse/issues/6338))
|
||||||
- Fix bug where upgrading a guest account to a full user would fail when account validity is enabled. ([\#6359](https://github.com/matrix-org/synapse/issues/6359))
|
- Fix bug where upgrading a guest account to a full user would fail when account validity is enabled. ([\#6359](https://github.com/matrix-org/synapse/issues/6359))
|
||||||
@@ -232,7 +232,7 @@ Internal Changes
|
|||||||
- Add some documentation about worker replication. ([\#6305](https://github.com/matrix-org/synapse/issues/6305))
|
- Add some documentation about worker replication. ([\#6305](https://github.com/matrix-org/synapse/issues/6305))
|
||||||
- Move admin endpoints into separate files. Contributed by Awesome Technologies Innovationslabor GmbH. ([\#6308](https://github.com/matrix-org/synapse/issues/6308))
|
- Move admin endpoints into separate files. Contributed by Awesome Technologies Innovationslabor GmbH. ([\#6308](https://github.com/matrix-org/synapse/issues/6308))
|
||||||
- Document the use of `lint.sh` for code style enforcement & extend it to run on specified paths only. ([\#6312](https://github.com/matrix-org/synapse/issues/6312))
|
- Document the use of `lint.sh` for code style enforcement & extend it to run on specified paths only. ([\#6312](https://github.com/matrix-org/synapse/issues/6312))
|
||||||
- Add optional python dependencies and dependent binary libraries to snapcraft packaging. ([\#6317](https://github.com/matrix-org/synapse/issues/6317))
|
- Add optional python dependencies and dependant binary libraries to snapcraft packaging. ([\#6317](https://github.com/matrix-org/synapse/issues/6317))
|
||||||
- Remove the dependency on psutil and replace functionality with the stdlib `resource` module. ([\#6318](https://github.com/matrix-org/synapse/issues/6318), [\#6336](https://github.com/matrix-org/synapse/issues/6336))
|
- Remove the dependency on psutil and replace functionality with the stdlib `resource` module. ([\#6318](https://github.com/matrix-org/synapse/issues/6318), [\#6336](https://github.com/matrix-org/synapse/issues/6336))
|
||||||
- Improve documentation for EventContext fields. ([\#6319](https://github.com/matrix-org/synapse/issues/6319))
|
- Improve documentation for EventContext fields. ([\#6319](https://github.com/matrix-org/synapse/issues/6319))
|
||||||
- Add some checks that we aren't using state from rejected events. ([\#6330](https://github.com/matrix-org/synapse/issues/6330))
|
- Add some checks that we aren't using state from rejected events. ([\#6330](https://github.com/matrix-org/synapse/issues/6330))
|
||||||
@@ -653,7 +653,7 @@ Internal Changes
|
|||||||
- Return 502 not 500 when failing to reach any remote server. ([\#5810](https://github.com/matrix-org/synapse/issues/5810))
|
- Return 502 not 500 when failing to reach any remote server. ([\#5810](https://github.com/matrix-org/synapse/issues/5810))
|
||||||
- Reduce global pauses in the events stream caused by expensive state resolution during persistence. ([\#5826](https://github.com/matrix-org/synapse/issues/5826))
|
- Reduce global pauses in the events stream caused by expensive state resolution during persistence. ([\#5826](https://github.com/matrix-org/synapse/issues/5826))
|
||||||
- Add a lower bound to well-known lookup cache time to avoid repeated lookups. ([\#5836](https://github.com/matrix-org/synapse/issues/5836))
|
- Add a lower bound to well-known lookup cache time to avoid repeated lookups. ([\#5836](https://github.com/matrix-org/synapse/issues/5836))
|
||||||
- Whitelist history visibility sytests in worker mode tests. ([\#5843](https://github.com/matrix-org/synapse/issues/5843))
|
- Whitelist history visbility sytests in worker mode tests. ([\#5843](https://github.com/matrix-org/synapse/issues/5843))
|
||||||
|
|
||||||
|
|
||||||
Synapse 1.2.1 (2019-07-26)
|
Synapse 1.2.1 (2019-07-26)
|
||||||
@@ -817,7 +817,7 @@ See the [upgrade notes](docs/upgrade.md#upgrading-to-v110) for more details.
|
|||||||
Features
|
Features
|
||||||
--------
|
--------
|
||||||
|
|
||||||
- Added possibility to disable local password authentication. Contributed by Daniel Hoffend. ([\#5092](https://github.com/matrix-org/synapse/issues/5092))
|
- Added possibilty to disable local password authentication. Contributed by Daniel Hoffend. ([\#5092](https://github.com/matrix-org/synapse/issues/5092))
|
||||||
- Add monthly active users to phonehome stats. ([\#5252](https://github.com/matrix-org/synapse/issues/5252))
|
- Add monthly active users to phonehome stats. ([\#5252](https://github.com/matrix-org/synapse/issues/5252))
|
||||||
- Allow expired user to trigger renewal email sending manually. ([\#5363](https://github.com/matrix-org/synapse/issues/5363))
|
- Allow expired user to trigger renewal email sending manually. ([\#5363](https://github.com/matrix-org/synapse/issues/5363))
|
||||||
- Statistics on forward extremities per room are now exposed via Prometheus. ([\#5384](https://github.com/matrix-org/synapse/issues/5384), [\#5458](https://github.com/matrix-org/synapse/issues/5458), [\#5461](https://github.com/matrix-org/synapse/issues/5461))
|
- Statistics on forward extremities per room are now exposed via Prometheus. ([\#5384](https://github.com/matrix-org/synapse/issues/5384), [\#5458](https://github.com/matrix-org/synapse/issues/5458), [\#5461](https://github.com/matrix-org/synapse/issues/5461))
|
||||||
@@ -850,7 +850,7 @@ Bugfixes
|
|||||||
- Fix bug where clients could tight loop calling `/sync` for a period. ([\#5507](https://github.com/matrix-org/synapse/issues/5507))
|
- Fix bug where clients could tight loop calling `/sync` for a period. ([\#5507](https://github.com/matrix-org/synapse/issues/5507))
|
||||||
- Fix bug with `jinja2` preventing Synapse from starting. Users who had this problem should now simply need to run `pip install matrix-synapse`. ([\#5514](https://github.com/matrix-org/synapse/issues/5514))
|
- Fix bug with `jinja2` preventing Synapse from starting. Users who had this problem should now simply need to run `pip install matrix-synapse`. ([\#5514](https://github.com/matrix-org/synapse/issues/5514))
|
||||||
- Fix a regression where homeservers on private IP addresses were incorrectly blacklisted. ([\#5523](https://github.com/matrix-org/synapse/issues/5523))
|
- Fix a regression where homeservers on private IP addresses were incorrectly blacklisted. ([\#5523](https://github.com/matrix-org/synapse/issues/5523))
|
||||||
- Fixed m.login.jwt using unregistered user_id and added pyjwt>=1.6.4 as jwt conditional dependencies. Contributed by Pau Rodriguez-Estivill. ([\#5555](https://github.com/matrix-org/synapse/issues/5555), [\#5586](https://github.com/matrix-org/synapse/issues/5586))
|
- Fixed m.login.jwt using unregistred user_id and added pyjwt>=1.6.4 as jwt conditional dependencies. Contributed by Pau Rodriguez-Estivill. ([\#5555](https://github.com/matrix-org/synapse/issues/5555), [\#5586](https://github.com/matrix-org/synapse/issues/5586))
|
||||||
- Fix a bug that would cause invited users to receive several emails for a single 3PID invite in case the inviter is rate limited. ([\#5576](https://github.com/matrix-org/synapse/issues/5576))
|
- Fix a bug that would cause invited users to receive several emails for a single 3PID invite in case the inviter is rate limited. ([\#5576](https://github.com/matrix-org/synapse/issues/5576))
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -251,7 +251,7 @@ Internal Changes
|
|||||||
|
|
||||||
- Optimise `/createRoom` with multiple invited users. ([\#8559](https://github.com/matrix-org/synapse/issues/8559))
|
- Optimise `/createRoom` with multiple invited users. ([\#8559](https://github.com/matrix-org/synapse/issues/8559))
|
||||||
- Implement and use an `@lru_cache` decorator. ([\#8595](https://github.com/matrix-org/synapse/issues/8595))
|
- Implement and use an `@lru_cache` decorator. ([\#8595](https://github.com/matrix-org/synapse/issues/8595))
|
||||||
- Don't instantiate Requester directly. ([\#8614](https://github.com/matrix-org/synapse/issues/8614))
|
- Don't instansiate Requester directly. ([\#8614](https://github.com/matrix-org/synapse/issues/8614))
|
||||||
- Type hints for `RegistrationStore`. ([\#8615](https://github.com/matrix-org/synapse/issues/8615))
|
- Type hints for `RegistrationStore`. ([\#8615](https://github.com/matrix-org/synapse/issues/8615))
|
||||||
- Change schema to support access tokens belonging to one user but granting access to another. ([\#8616](https://github.com/matrix-org/synapse/issues/8616))
|
- Change schema to support access tokens belonging to one user but granting access to another. ([\#8616](https://github.com/matrix-org/synapse/issues/8616))
|
||||||
- Remove unused OPTIONS handlers. ([\#8621](https://github.com/matrix-org/synapse/issues/8621))
|
- Remove unused OPTIONS handlers. ([\#8621](https://github.com/matrix-org/synapse/issues/8621))
|
||||||
@@ -518,7 +518,7 @@ Bugfixes
|
|||||||
- Fix a bug which cause the logging system to report errors, if `DEBUG` was enabled and no `context` filter was applied. ([\#8278](https://github.com/matrix-org/synapse/issues/8278))
|
- Fix a bug which cause the logging system to report errors, if `DEBUG` was enabled and no `context` filter was applied. ([\#8278](https://github.com/matrix-org/synapse/issues/8278))
|
||||||
- Fix edge case where push could get delayed for a user until a later event was pushed. ([\#8287](https://github.com/matrix-org/synapse/issues/8287))
|
- Fix edge case where push could get delayed for a user until a later event was pushed. ([\#8287](https://github.com/matrix-org/synapse/issues/8287))
|
||||||
- Fix fetching malformed events from remote servers. ([\#8324](https://github.com/matrix-org/synapse/issues/8324))
|
- Fix fetching malformed events from remote servers. ([\#8324](https://github.com/matrix-org/synapse/issues/8324))
|
||||||
- Fix `UnboundLocalError` from occurring when appservices send a malformed register request. ([\#8329](https://github.com/matrix-org/synapse/issues/8329))
|
- Fix `UnboundLocalError` from occuring when appservices send a malformed register request. ([\#8329](https://github.com/matrix-org/synapse/issues/8329))
|
||||||
- Don't send push notifications to expired user accounts. ([\#8353](https://github.com/matrix-org/synapse/issues/8353))
|
- Don't send push notifications to expired user accounts. ([\#8353](https://github.com/matrix-org/synapse/issues/8353))
|
||||||
- Fix a regression in v1.19.0 with reactivating users through the admin API. ([\#8362](https://github.com/matrix-org/synapse/issues/8362))
|
- Fix a regression in v1.19.0 with reactivating users through the admin API. ([\#8362](https://github.com/matrix-org/synapse/issues/8362))
|
||||||
- Fix a bug where during device registration the length of the device name wasn't limited. ([\#8364](https://github.com/matrix-org/synapse/issues/8364))
|
- Fix a bug where during device registration the length of the device name wasn't limited. ([\#8364](https://github.com/matrix-org/synapse/issues/8364))
|
||||||
@@ -815,7 +815,7 @@ Bugfixes
|
|||||||
- Fix a bug introduced in Synapse v1.7.2 which caused inaccurate membership counts in the room directory. ([\#7977](https://github.com/matrix-org/synapse/issues/7977))
|
- Fix a bug introduced in Synapse v1.7.2 which caused inaccurate membership counts in the room directory. ([\#7977](https://github.com/matrix-org/synapse/issues/7977))
|
||||||
- Fix a long standing bug: 'Duplicate key value violates unique constraint "event_relations_id"' when message retention is configured. ([\#7978](https://github.com/matrix-org/synapse/issues/7978))
|
- Fix a long standing bug: 'Duplicate key value violates unique constraint "event_relations_id"' when message retention is configured. ([\#7978](https://github.com/matrix-org/synapse/issues/7978))
|
||||||
- Fix "no create event in auth events" when trying to reject invitation after inviter leaves. Bug introduced in Synapse v1.10.0. ([\#7980](https://github.com/matrix-org/synapse/issues/7980))
|
- Fix "no create event in auth events" when trying to reject invitation after inviter leaves. Bug introduced in Synapse v1.10.0. ([\#7980](https://github.com/matrix-org/synapse/issues/7980))
|
||||||
- Fix various comments and minor discrepancies in server notices code. ([\#7996](https://github.com/matrix-org/synapse/issues/7996))
|
- Fix various comments and minor discrepencies in server notices code. ([\#7996](https://github.com/matrix-org/synapse/issues/7996))
|
||||||
- Fix a long standing bug where HTTP HEAD requests resulted in a 400 error. ([\#7999](https://github.com/matrix-org/synapse/issues/7999))
|
- Fix a long standing bug where HTTP HEAD requests resulted in a 400 error. ([\#7999](https://github.com/matrix-org/synapse/issues/7999))
|
||||||
- Fix a long-standing bug which caused two copies of some log lines to be written when synctl was used along with a MemoryHandler logger. ([\#8011](https://github.com/matrix-org/synapse/issues/8011), [\#8012](https://github.com/matrix-org/synapse/issues/8012))
|
- Fix a long-standing bug which caused two copies of some log lines to be written when synctl was used along with a MemoryHandler logger. ([\#8011](https://github.com/matrix-org/synapse/issues/8011), [\#8012](https://github.com/matrix-org/synapse/issues/8012))
|
||||||
|
|
||||||
@@ -1460,7 +1460,7 @@ Bugfixes
|
|||||||
- Transfer alias mappings on room upgrade. ([\#6946](https://github.com/matrix-org/synapse/issues/6946))
|
- Transfer alias mappings on room upgrade. ([\#6946](https://github.com/matrix-org/synapse/issues/6946))
|
||||||
- Ensure that a user interactive authentication session is tied to a single request. ([\#7068](https://github.com/matrix-org/synapse/issues/7068), [\#7455](https://github.com/matrix-org/synapse/issues/7455))
|
- Ensure that a user interactive authentication session is tied to a single request. ([\#7068](https://github.com/matrix-org/synapse/issues/7068), [\#7455](https://github.com/matrix-org/synapse/issues/7455))
|
||||||
- Fix a bug in the federation API which could cause occasional "Failed to get PDU" errors. ([\#7089](https://github.com/matrix-org/synapse/issues/7089))
|
- Fix a bug in the federation API which could cause occasional "Failed to get PDU" errors. ([\#7089](https://github.com/matrix-org/synapse/issues/7089))
|
||||||
- Return the proper error (`M_BAD_ALIAS`) when a non-existent canonical alias is provided. ([\#7109](https://github.com/matrix-org/synapse/issues/7109))
|
- Return the proper error (`M_BAD_ALIAS`) when a non-existant canonical alias is provided. ([\#7109](https://github.com/matrix-org/synapse/issues/7109))
|
||||||
- Fix a bug which meant that groups updates were not correctly replicated between workers. ([\#7117](https://github.com/matrix-org/synapse/issues/7117))
|
- Fix a bug which meant that groups updates were not correctly replicated between workers. ([\#7117](https://github.com/matrix-org/synapse/issues/7117))
|
||||||
- Fix starting workers when federation sending not split out. ([\#7133](https://github.com/matrix-org/synapse/issues/7133))
|
- Fix starting workers when federation sending not split out. ([\#7133](https://github.com/matrix-org/synapse/issues/7133))
|
||||||
- Ensure `is_verified` is a boolean in responses to `GET /_matrix/client/r0/room_keys/keys`. Also warn the user if they forgot the `version` query param. ([\#7150](https://github.com/matrix-org/synapse/issues/7150))
|
- Ensure `is_verified` is a boolean in responses to `GET /_matrix/client/r0/room_keys/keys`. Also warn the user if they forgot the `version` query param. ([\#7150](https://github.com/matrix-org/synapse/issues/7150))
|
||||||
@@ -1482,7 +1482,7 @@ Bugfixes
|
|||||||
- Fix bad error handling that would cause Synapse to crash if it's provided with a YAML configuration file that's either empty or doesn't parse into a key-value map. ([\#7341](https://github.com/matrix-org/synapse/issues/7341))
|
- Fix bad error handling that would cause Synapse to crash if it's provided with a YAML configuration file that's either empty or doesn't parse into a key-value map. ([\#7341](https://github.com/matrix-org/synapse/issues/7341))
|
||||||
- Fix incorrect metrics reporting for `renew_attestations` background task. ([\#7344](https://github.com/matrix-org/synapse/issues/7344))
|
- Fix incorrect metrics reporting for `renew_attestations` background task. ([\#7344](https://github.com/matrix-org/synapse/issues/7344))
|
||||||
- Prevent non-federating rooms from appearing in responses to federated `POST /publicRoom` requests when a filter was included. ([\#7367](https://github.com/matrix-org/synapse/issues/7367))
|
- Prevent non-federating rooms from appearing in responses to federated `POST /publicRoom` requests when a filter was included. ([\#7367](https://github.com/matrix-org/synapse/issues/7367))
|
||||||
- Fix a bug which would cause the room directory to be incorrectly populated if Synapse was upgraded directly from v1.2.1 or earlier to v1.4.0 or later. Note that this fix does not apply retrospectively; see the [upgrade notes](docs/upgrade.md#upgrading-to-v1130) for more information. ([\#7387](https://github.com/matrix-org/synapse/issues/7387))
|
- Fix a bug which would cause the room durectory to be incorrectly populated if Synapse was upgraded directly from v1.2.1 or earlier to v1.4.0 or later. Note that this fix does not apply retrospectively; see the [upgrade notes](docs/upgrade.md#upgrading-to-v1130) for more information. ([\#7387](https://github.com/matrix-org/synapse/issues/7387))
|
||||||
- Fix bug in `EventContext.deserialize`. ([\#7393](https://github.com/matrix-org/synapse/issues/7393))
|
- Fix bug in `EventContext.deserialize`. ([\#7393](https://github.com/matrix-org/synapse/issues/7393))
|
||||||
|
|
||||||
|
|
||||||
@@ -1638,7 +1638,7 @@ Security advisory
|
|||||||
-----------------
|
-----------------
|
||||||
|
|
||||||
Synapse may be vulnerable to request-smuggling attacks when it is used with a
|
Synapse may be vulnerable to request-smuggling attacks when it is used with a
|
||||||
reverse-proxy. The vulnerabilities are fixed in Twisted 20.3.0, and are
|
reverse-proxy. The vulnerabilties are fixed in Twisted 20.3.0, and are
|
||||||
described in
|
described in
|
||||||
[CVE-2020-10108](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10108)
|
[CVE-2020-10108](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10108)
|
||||||
and
|
and
|
||||||
@@ -1748,7 +1748,7 @@ Internal Changes
|
|||||||
- Refactoring work in preparation for changing the event redaction algorithm. ([\#6874](https://github.com/matrix-org/synapse/issues/6874), [\#6875](https://github.com/matrix-org/synapse/issues/6875), [\#6983](https://github.com/matrix-org/synapse/issues/6983), [\#7003](https://github.com/matrix-org/synapse/issues/7003))
|
- Refactoring work in preparation for changing the event redaction algorithm. ([\#6874](https://github.com/matrix-org/synapse/issues/6874), [\#6875](https://github.com/matrix-org/synapse/issues/6875), [\#6983](https://github.com/matrix-org/synapse/issues/6983), [\#7003](https://github.com/matrix-org/synapse/issues/7003))
|
||||||
- Improve performance of v2 state resolution for large rooms. ([\#6952](https://github.com/matrix-org/synapse/issues/6952), [\#7095](https://github.com/matrix-org/synapse/issues/7095))
|
- Improve performance of v2 state resolution for large rooms. ([\#6952](https://github.com/matrix-org/synapse/issues/6952), [\#7095](https://github.com/matrix-org/synapse/issues/7095))
|
||||||
- Reduce time spent doing GC, by freezing objects on startup. ([\#6953](https://github.com/matrix-org/synapse/issues/6953))
|
- Reduce time spent doing GC, by freezing objects on startup. ([\#6953](https://github.com/matrix-org/synapse/issues/6953))
|
||||||
- Minor performance fixes to `get_auth_chain_ids`. ([\#6954](https://github.com/matrix-org/synapse/issues/6954))
|
- Minor perfermance fixes to `get_auth_chain_ids`. ([\#6954](https://github.com/matrix-org/synapse/issues/6954))
|
||||||
- Don't record remote cross-signing keys in the `devices` table. ([\#6956](https://github.com/matrix-org/synapse/issues/6956))
|
- Don't record remote cross-signing keys in the `devices` table. ([\#6956](https://github.com/matrix-org/synapse/issues/6956))
|
||||||
- Use flake8-comprehensions to enforce good hygiene of list/set/dict comprehensions. ([\#6957](https://github.com/matrix-org/synapse/issues/6957))
|
- Use flake8-comprehensions to enforce good hygiene of list/set/dict comprehensions. ([\#6957](https://github.com/matrix-org/synapse/issues/6957))
|
||||||
- Merge worker apps together. ([\#6964](https://github.com/matrix-org/synapse/issues/6964), [\#7002](https://github.com/matrix-org/synapse/issues/7002), [\#7055](https://github.com/matrix-org/synapse/issues/7055), [\#7104](https://github.com/matrix-org/synapse/issues/7104))
|
- Merge worker apps together. ([\#6964](https://github.com/matrix-org/synapse/issues/6964), [\#7002](https://github.com/matrix-org/synapse/issues/7002), [\#7055](https://github.com/matrix-org/synapse/issues/7055), [\#7104](https://github.com/matrix-org/synapse/issues/7104))
|
||||||
@@ -1809,7 +1809,7 @@ Bugfixes
|
|||||||
- Allow URL-encoded User IDs on `/_synapse/admin/v2/users/<user_id>[/admin]` endpoints. Thanks to @NHAS for reporting. ([\#6825](https://github.com/matrix-org/synapse/issues/6825))
|
- Allow URL-encoded User IDs on `/_synapse/admin/v2/users/<user_id>[/admin]` endpoints. Thanks to @NHAS for reporting. ([\#6825](https://github.com/matrix-org/synapse/issues/6825))
|
||||||
- Fix Synapse refusing to start if `federation_certificate_verification_whitelist` option is blank. ([\#6849](https://github.com/matrix-org/synapse/issues/6849))
|
- Fix Synapse refusing to start if `federation_certificate_verification_whitelist` option is blank. ([\#6849](https://github.com/matrix-org/synapse/issues/6849))
|
||||||
- Fix errors from logging in the purge jobs related to the message retention policies support. ([\#6945](https://github.com/matrix-org/synapse/issues/6945))
|
- Fix errors from logging in the purge jobs related to the message retention policies support. ([\#6945](https://github.com/matrix-org/synapse/issues/6945))
|
||||||
- Return a 404 instead of 200 for querying information of a non-existent user through the admin API. ([\#6901](https://github.com/matrix-org/synapse/issues/6901))
|
- Return a 404 instead of 200 for querying information of a non-existant user through the admin API. ([\#6901](https://github.com/matrix-org/synapse/issues/6901))
|
||||||
|
|
||||||
|
|
||||||
Updates to the Docker image
|
Updates to the Docker image
|
||||||
@@ -1889,7 +1889,7 @@ Bugfixes
|
|||||||
Synapse 1.10.0rc4 (2020-02-11)
|
Synapse 1.10.0rc4 (2020-02-11)
|
||||||
==============================
|
==============================
|
||||||
|
|
||||||
This release candidate was built incorrectly and is superseded by 1.10.0rc5.
|
This release candidate was built incorrectly and is superceded by 1.10.0rc5.
|
||||||
|
|
||||||
Synapse 1.10.0rc3 (2020-02-10)
|
Synapse 1.10.0rc3 (2020-02-10)
|
||||||
==============================
|
==============================
|
||||||
|
|||||||
@@ -2270,7 +2270,7 @@ Features
|
|||||||
Bugfixes
|
Bugfixes
|
||||||
--------
|
--------
|
||||||
|
|
||||||
- Fix spurious errors in logs when deleting a non-existent pusher. ([\#9121](https://github.com/matrix-org/synapse/issues/9121))
|
- Fix spurious errors in logs when deleting a non-existant pusher. ([\#9121](https://github.com/matrix-org/synapse/issues/9121))
|
||||||
- Fix a long-standing bug where Synapse would return a 500 error when a thumbnail did not exist (and auto-generation of thumbnails was not enabled). ([\#9163](https://github.com/matrix-org/synapse/issues/9163))
|
- Fix a long-standing bug where Synapse would return a 500 error when a thumbnail did not exist (and auto-generation of thumbnails was not enabled). ([\#9163](https://github.com/matrix-org/synapse/issues/9163))
|
||||||
- Fix a long-standing bug where an internal server error was raised when attempting to preview an HTML document in an unknown character encoding. ([\#9164](https://github.com/matrix-org/synapse/issues/9164))
|
- Fix a long-standing bug where an internal server error was raised when attempting to preview an HTML document in an unknown character encoding. ([\#9164](https://github.com/matrix-org/synapse/issues/9164))
|
||||||
- Fix a long-standing bug where invalid data could cause errors when calculating the presentable room name for push. ([\#9165](https://github.com/matrix-org/synapse/issues/9165))
|
- Fix a long-standing bug where invalid data could cause errors when calculating the presentable room name for push. ([\#9165](https://github.com/matrix-org/synapse/issues/9165))
|
||||||
@@ -2522,7 +2522,7 @@ Bugfixes
|
|||||||
- Fix a long-standing bug where a `m.image` event without a `url` would cause errors on push. ([\#8965](https://github.com/matrix-org/synapse/issues/8965))
|
- Fix a long-standing bug where a `m.image` event without a `url` would cause errors on push. ([\#8965](https://github.com/matrix-org/synapse/issues/8965))
|
||||||
- Fix a small bug in v2 state resolution algorithm, which could also cause performance issues for rooms with large numbers of power levels. ([\#8971](https://github.com/matrix-org/synapse/issues/8971))
|
- Fix a small bug in v2 state resolution algorithm, which could also cause performance issues for rooms with large numbers of power levels. ([\#8971](https://github.com/matrix-org/synapse/issues/8971))
|
||||||
- Add validation to the `sendToDevice` API to raise a missing parameters error instead of a 500 error. ([\#8975](https://github.com/matrix-org/synapse/issues/8975))
|
- Add validation to the `sendToDevice` API to raise a missing parameters error instead of a 500 error. ([\#8975](https://github.com/matrix-org/synapse/issues/8975))
|
||||||
- Add validation of group IDs to raise a 400 error instead of a 500 error. ([\#8977](https://github.com/matrix-org/synapse/issues/8977))
|
- Add validation of group IDs to raise a 400 error instead of a 500 eror. ([\#8977](https://github.com/matrix-org/synapse/issues/8977))
|
||||||
|
|
||||||
|
|
||||||
Improved Documentation
|
Improved Documentation
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user