Compare commits
1 Commits
erikj/cach
...
erikj/rele
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0eaa6dd30e |
@@ -15,7 +15,6 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
|
||||
from synapse.storage.engines import create_engine
|
||||
|
||||
logger = logging.getLogger("create_postgres_db")
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/bin/bash
|
||||
|
||||
# this script is run by buildkite in a plain `xenial` container; it installs the
|
||||
# minimal requirements for tox and hands over to the py35-old tox environment.
|
||||
@@ -6,11 +6,8 @@
|
||||
set -ex
|
||||
|
||||
apt-get update
|
||||
apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox
|
||||
apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev zlib1g-dev tox
|
||||
|
||||
export LANG="C.UTF-8"
|
||||
|
||||
# Prevent virtualenv from auto-updating pip to an incompatible version
|
||||
export VIRTUALENV_NO_DOWNLOAD=1
|
||||
|
||||
exec tox -e py35-old,combine
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/bin/bash
|
||||
#
|
||||
# Test script for 'synapse_port_db', which creates a virtualenv, installs Synapse along
|
||||
# with additional dependencies needed for the test (such as coverage or the PostgreSQL
|
||||
|
||||
Binary file not shown.
@@ -5,29 +5,30 @@ jobs:
|
||||
- image: docker:git
|
||||
steps:
|
||||
- checkout
|
||||
- setup_remote_docker
|
||||
- docker_prepare
|
||||
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||
# for release builds, we want to get the amd64 image out asap, so first
|
||||
# we do an amd64-only build, before following up with a multiarch build.
|
||||
- docker_build:
|
||||
tag: -t matrixdotorg/synapse:${CIRCLE_TAG}
|
||||
platforms: linux/amd64
|
||||
- docker_build:
|
||||
tag: -t matrixdotorg/synapse:${CIRCLE_TAG}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||
|
||||
dockerhubuploadlatest:
|
||||
docker:
|
||||
- image: docker:git
|
||||
steps:
|
||||
- checkout
|
||||
- setup_remote_docker
|
||||
- docker_prepare
|
||||
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||
# for `latest`, we don't want the arm images to disappear, so don't update the tag
|
||||
# until all of the platforms are built.
|
||||
- docker_build:
|
||||
tag: -t matrixdotorg/synapse:latest
|
||||
platforms: linux/amd64,linux/arm64
|
||||
platforms: linux/amd64
|
||||
- docker_build:
|
||||
tag: -t matrixdotorg/synapse:latest
|
||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||
|
||||
workflows:
|
||||
build:
|
||||
@@ -45,16 +46,12 @@ workflows:
|
||||
|
||||
commands:
|
||||
docker_prepare:
|
||||
description: Sets up a remote docker server, downloads the buildx cli plugin, and enables multiarch images
|
||||
description: Downloads the buildx cli plugin and enables multiarch images
|
||||
parameters:
|
||||
buildx_version:
|
||||
type: string
|
||||
default: "v0.4.1"
|
||||
steps:
|
||||
- setup_remote_docker:
|
||||
# 19.03.13 was the most recent available on circleci at the time of
|
||||
# writing.
|
||||
version: 19.03.13
|
||||
- run: apk add --no-cache curl
|
||||
- run: mkdir -vp ~/.docker/cli-plugins/ ~/dockercache
|
||||
- run: curl --silent -L "https://github.com/docker/buildx/releases/download/<< parameters.buildx_version >>/buildx-<< parameters.buildx_version >>.linux-amd64" > ~/.docker/cli-plugins/docker-buildx
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
# Black reformatting (#5482).
|
||||
32e7c9e7f20b57dd081023ac42d6931a8da9b3a3
|
||||
|
||||
# Target Python 3.5 with black (#8664).
|
||||
aff1eb7c671b0a3813407321d2702ec46c71fa56
|
||||
|
||||
# Update black to 20.8b1 (#9381).
|
||||
0a00b7ff14890987f09112a2ae696c61001e6cf1
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -6,19 +6,16 @@
|
||||
*.egg
|
||||
*.egg-info
|
||||
*.lock
|
||||
*.py[cod]
|
||||
*.pyc
|
||||
*.snap
|
||||
*.tac
|
||||
_trial_temp/
|
||||
_trial_temp*/
|
||||
/out
|
||||
.DS_Store
|
||||
__pycache__/
|
||||
|
||||
# stuff that is likely to exist when you run a server locally
|
||||
/*.db
|
||||
/*.log
|
||||
/*.log.*
|
||||
/*.log.config
|
||||
/*.pid
|
||||
/.python-version
|
||||
|
||||
1280
CHANGES.md
1280
CHANGES.md
File diff suppressed because it is too large
Load Diff
253
CONTRIBUTING.md
253
CONTRIBUTING.md
@@ -1,31 +1,4 @@
|
||||
Welcome to Synapse
|
||||
|
||||
This document aims to get you started with contributing to this repo!
|
||||
|
||||
- [1. Who can contribute to Synapse?](#1-who-can-contribute-to-synapse)
|
||||
- [2. What do I need?](#2-what-do-i-need)
|
||||
- [3. Get the source.](#3-get-the-source)
|
||||
- [4. Install the dependencies](#4-install-the-dependencies)
|
||||
* [Under Unix (macOS, Linux, BSD, ...)](#under-unix-macos-linux-bsd-)
|
||||
* [Under Windows](#under-windows)
|
||||
- [5. Get in touch.](#5-get-in-touch)
|
||||
- [6. Pick an issue.](#6-pick-an-issue)
|
||||
- [7. Turn coffee and documentation into code and documentation!](#7-turn-coffee-and-documentation-into-code-and-documentation)
|
||||
- [8. Test, test, test!](#8-test-test-test)
|
||||
* [Run the linters.](#run-the-linters)
|
||||
* [Run the unit tests.](#run-the-unit-tests)
|
||||
* [Run the integration tests.](#run-the-integration-tests)
|
||||
- [9. Submit your patch.](#9-submit-your-patch)
|
||||
* [Changelog](#changelog)
|
||||
+ [How do I know what to call the changelog file before I create the PR?](#how-do-i-know-what-to-call-the-changelog-file-before-i-create-the-pr)
|
||||
+ [Debian changelog](#debian-changelog)
|
||||
* [Sign off](#sign-off)
|
||||
- [10. Turn feedback into better code.](#10-turn-feedback-into-better-code)
|
||||
- [11. Find a new issue.](#11-find-a-new-issue)
|
||||
- [Notes for maintainers on merging PRs etc](#notes-for-maintainers-on-merging-prs-etc)
|
||||
- [Conclusion](#conclusion)
|
||||
|
||||
# 1. Who can contribute to Synapse?
|
||||
# Contributing code to Synapse
|
||||
|
||||
Everyone is welcome to contribute code to [matrix.org
|
||||
projects](https://github.com/matrix-org), provided that they are willing to
|
||||
@@ -36,179 +9,70 @@ license the code under the same terms as the project's overall 'outbound'
|
||||
license - in our case, this is almost always Apache Software License v2 (see
|
||||
[LICENSE](LICENSE)).
|
||||
|
||||
# 2. What do I need?
|
||||
|
||||
The code of Synapse is written in Python 3. To do pretty much anything, you'll need [a recent version of Python 3](https://wiki.python.org/moin/BeginnersGuide/Download).
|
||||
|
||||
The source code of Synapse is hosted on GitHub. You will also need [a recent version of git](https://github.com/git-guides/install-git).
|
||||
|
||||
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
|
||||
|
||||
|
||||
# 3. Get the source.
|
||||
## How to contribute
|
||||
|
||||
The preferred and easiest way to contribute changes is to fork the relevant
|
||||
project on GitHub, and then [create a pull request](
|
||||
project on github, and then [create a pull request](
|
||||
https://help.github.com/articles/using-pull-requests/) to ask us to pull your
|
||||
changes into our repo.
|
||||
|
||||
Please base your changes on the `develop` branch.
|
||||
Some other points to follow:
|
||||
|
||||
```sh
|
||||
git clone git@github.com:YOUR_GITHUB_USER_NAME/synapse.git
|
||||
git checkout develop
|
||||
```
|
||||
* Please base your changes on the `develop` branch.
|
||||
|
||||
If you need help getting started with git, this is beyond the scope of the document, but you
|
||||
can find many good git tutorials on the web.
|
||||
* Please follow the [code style requirements](#code-style).
|
||||
|
||||
# 4. Install the dependencies
|
||||
* Please include a [changelog entry](#changelog) with each PR.
|
||||
|
||||
## Under Unix (macOS, Linux, BSD, ...)
|
||||
* Please [sign off](#sign-off) your contribution.
|
||||
|
||||
Once you have installed Python 3 and added the source, please open a terminal and
|
||||
setup a *virtualenv*, as follows:
|
||||
* Please keep an eye on the pull request for feedback from the [continuous
|
||||
integration system](#continuous-integration-and-testing) and try to fix any
|
||||
errors that come up.
|
||||
|
||||
```sh
|
||||
cd path/where/you/have/cloned/the/repository
|
||||
python3 -m venv ./env
|
||||
source ./env/bin/activate
|
||||
pip install -e ".[all,lint,mypy,test]"
|
||||
pip install tox
|
||||
```
|
||||
* If you need to [update your PR](#updating-your-pull-request), just add new
|
||||
commits to your branch rather than rebasing.
|
||||
|
||||
This will install the developer dependencies for the project.
|
||||
|
||||
## Under Windows
|
||||
|
||||
TBD
|
||||
|
||||
|
||||
# 5. Get in touch.
|
||||
|
||||
Join our developer community on Matrix: #synapse-dev:matrix.org !
|
||||
|
||||
|
||||
# 6. Pick an issue.
|
||||
|
||||
Fix your favorite problem or perhaps find a [Good First Issue](https://github.com/matrix-org/synapse/issues?q=is%3Aopen+is%3Aissue+label%3A%22Good+First+Issue%22)
|
||||
to work on.
|
||||
|
||||
|
||||
# 7. Turn coffee and documentation into code and documentation!
|
||||
## Code style
|
||||
|
||||
Synapse's code style is documented [here](docs/code_style.md). Please follow
|
||||
it, including the conventions for the [sample configuration
|
||||
file](docs/code_style.md#configuration-file-format).
|
||||
|
||||
There is a growing amount of documentation located in the [docs](docs)
|
||||
directory. This documentation is intended primarily for sysadmins running their
|
||||
own Synapse instance, as well as developers interacting externally with
|
||||
Synapse. [docs/dev](docs/dev) exists primarily to house documentation for
|
||||
Synapse developers. [docs/admin_api](docs/admin_api) houses documentation
|
||||
regarding Synapse's Admin API, which is used mostly by sysadmins and external
|
||||
service developers.
|
||||
Many of the conventions are enforced by scripts which are run as part of the
|
||||
[continuous integration system](#continuous-integration-and-testing). To help
|
||||
check if you have followed the code style, you can run `scripts-dev/lint.sh`
|
||||
locally. You'll need python 3.6 or later, and to install a number of tools:
|
||||
|
||||
If you add new files added to either of these folders, please use [GitHub-Flavoured
|
||||
Markdown](https://guides.github.com/features/mastering-markdown/).
|
||||
```
|
||||
# Install the dependencies
|
||||
pip install -e ".[lint,mypy]"
|
||||
|
||||
Some documentation also exists in [Synapse's GitHub
|
||||
Wiki](https://github.com/matrix-org/synapse/wiki), although this is primarily
|
||||
contributed to by community authors.
|
||||
|
||||
|
||||
# 8. Test, test, test!
|
||||
<a name="test-test-test"></a>
|
||||
|
||||
While you're developing and before submitting a patch, you'll
|
||||
want to test your code.
|
||||
|
||||
## Run the linters.
|
||||
|
||||
The linters look at your code and do two things:
|
||||
|
||||
- ensure that your code follows the coding style adopted by the project;
|
||||
- catch a number of errors in your code.
|
||||
|
||||
They're pretty fast, don't hesitate!
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
# Run the linter script
|
||||
./scripts-dev/lint.sh
|
||||
```
|
||||
|
||||
Note that this script *will modify your files* to fix styling errors.
|
||||
Make sure that you have saved all your files.
|
||||
**Note that the script does not just test/check, but also reformats code, so you
|
||||
may wish to ensure any new code is committed first**.
|
||||
|
||||
If you wish to restrict the linters to only the files changed since the last commit
|
||||
(much faster!), you can instead run:
|
||||
By default, this script checks all files and can take some time; if you alter
|
||||
only certain files, you might wish to specify paths as arguments to reduce the
|
||||
run-time:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh -d
|
||||
```
|
||||
|
||||
Or if you know exactly which files you wish to lint, you can instead run:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
|
||||
```
|
||||
|
||||
## Run the unit tests.
|
||||
You can also provide the `-d` option, which will lint the files that have been
|
||||
changed since the last git commit. This will often be significantly faster than
|
||||
linting the whole codebase.
|
||||
|
||||
The unit tests run parts of Synapse, including your changes, to see if anything
|
||||
was broken. They are slower than the linters but will typically catch more errors.
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
trial tests
|
||||
```
|
||||
|
||||
If you wish to only run *some* unit tests, you may specify
|
||||
another module instead of `tests` - or a test class or a method:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
trial tests.rest.admin.test_room tests.handlers.test_admin.ExfiltrateData.test_invite
|
||||
```
|
||||
|
||||
If your tests fail, you may wish to look at the logs:
|
||||
|
||||
```sh
|
||||
less _trial_temp/test.log
|
||||
```
|
||||
|
||||
## Run the integration tests.
|
||||
|
||||
The integration tests are a more comprehensive suite of tests. They
|
||||
run a full version of Synapse, including your changes, to check if
|
||||
anything was broken. They are slower than the unit tests but will
|
||||
typically catch more errors.
|
||||
|
||||
The following command will let you run the integration test with the most common
|
||||
configuration:
|
||||
|
||||
```sh
|
||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:py37
|
||||
```
|
||||
|
||||
This configuration should generally cover your needs. For more details about other configurations, see [documentation in the SyTest repo](https://github.com/matrix-org/sytest/blob/develop/docker/README.md).
|
||||
|
||||
|
||||
# 9. Submit your patch.
|
||||
|
||||
Once you're happy with your patch, it's time to prepare a Pull Request.
|
||||
|
||||
To prepare a Pull Request, please:
|
||||
|
||||
1. verify that [all the tests pass](#test-test-test), including the coding style;
|
||||
2. [sign off](#sign-off) your contribution;
|
||||
3. `git push` your commit to your fork of Synapse;
|
||||
4. on GitHub, [create the Pull Request](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request);
|
||||
5. add a [changelog entry](#changelog) and push it to your Pull Request;
|
||||
6. for most contributors, that's all - however, if you are a member of the organization `matrix-org`, on GitHub, please request a review from `matrix.org / Synapse Core`.
|
||||
Before pushing new changes, ensure they don't produce linting errors. Commit any
|
||||
files that were corrected.
|
||||
|
||||
Please ensure your changes match the cosmetic style of the existing project,
|
||||
and **never** mix cosmetic and functional changes in the same commit, as it
|
||||
makes it horribly hard to review otherwise.
|
||||
|
||||
## Changelog
|
||||
|
||||
@@ -358,36 +222,47 @@ Git allows you to add this signoff automatically when using the `-s`
|
||||
flag to `git commit`, which uses the name and email set in your
|
||||
`user.name` and `user.email` git configs.
|
||||
|
||||
## Continuous integration and testing
|
||||
|
||||
# 10. Turn feedback into better code.
|
||||
[Buildkite](https://buildkite.com/matrix-dot-org/synapse) will automatically
|
||||
run a series of checks and tests against any PR which is opened against the
|
||||
project; if your change breaks the build, this will be shown in GitHub, with
|
||||
links to the build results. If your build fails, please try to fix the errors
|
||||
and update your branch.
|
||||
|
||||
Once the Pull Request is opened, you will see a few things:
|
||||
To run unit tests in a local development environment, you can use:
|
||||
|
||||
1. our automated CI (Continuous Integration) pipeline will run (again) the linters, the unit tests, the integration tests and more;
|
||||
2. one or more of the developers will take a look at your Pull Request and offer feedback.
|
||||
- ``tox -e py35`` (requires tox to be installed by ``pip install tox``)
|
||||
for SQLite-backed Synapse on Python 3.5.
|
||||
- ``tox -e py36`` for SQLite-backed Synapse on Python 3.6.
|
||||
- ``tox -e py36-postgres`` for PostgreSQL-backed Synapse on Python 3.6
|
||||
(requires a running local PostgreSQL with access to create databases).
|
||||
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 3.5
|
||||
(requires Docker). Entirely self-contained, recommended if you don't want to
|
||||
set up PostgreSQL yourself.
|
||||
|
||||
From this point, you should:
|
||||
Docker images are available for running the integration tests (SyTest) locally,
|
||||
see the [documentation in the SyTest repo](
|
||||
https://github.com/matrix-org/sytest/blob/develop/docker/README.md) for more
|
||||
information.
|
||||
|
||||
1. Look at the results of the CI pipeline.
|
||||
- If there is any error, fix the error.
|
||||
2. If a developer has requested changes, make these changes and let us know if it is ready for a developer to review again.
|
||||
3. Create a new commit with the changes.
|
||||
- Please do NOT overwrite the history. New commits make the reviewer's life easier.
|
||||
- Push this commits to your Pull Request.
|
||||
4. Back to 1.
|
||||
## Updating your pull request
|
||||
|
||||
Once both the CI and the developers are happy, the patch will be merged into Synapse and released shortly!
|
||||
If you decide to make changes to your pull request - perhaps to address issues
|
||||
raised in a review, or to fix problems highlighted by [continuous
|
||||
integration](#continuous-integration-and-testing) - just add new commits to your
|
||||
branch, and push to GitHub. The pull request will automatically be updated.
|
||||
|
||||
# 11. Find a new issue.
|
||||
Please **avoid** rebasing your branch, especially once the PR has been
|
||||
reviewed: doing so makes it very difficult for a reviewer to see what has
|
||||
changed since a previous review.
|
||||
|
||||
By now, you know the drill!
|
||||
|
||||
# Notes for maintainers on merging PRs etc
|
||||
## Notes for maintainers on merging PRs etc
|
||||
|
||||
There are some notes for those with commit access to the project on how we
|
||||
manage git [here](docs/dev/git.md).
|
||||
|
||||
# Conclusion
|
||||
## Conclusion
|
||||
|
||||
That's it! Matrix is a very open and collaborative project as you might expect
|
||||
given our obsession with open communication. If we're going to successfully
|
||||
|
||||
320
INSTALL.md
320
INSTALL.md
@@ -1,44 +1,19 @@
|
||||
# Installation Instructions
|
||||
- [Choosing your server name](#choosing-your-server-name)
|
||||
- [Picking a database engine](#picking-a-database-engine)
|
||||
- [Installing Synapse](#installing-synapse)
|
||||
- [Installing from source](#installing-from-source)
|
||||
- [Platform-Specific Instructions](#platform-specific-instructions)
|
||||
- [Prebuilt packages](#prebuilt-packages)
|
||||
- [Setting up Synapse](#setting-up-synapse)
|
||||
- [TLS certificates](#tls-certificates)
|
||||
- [Client Well-Known URI](#client-well-known-uri)
|
||||
- [Email](#email)
|
||||
- [Registering a user](#registering-a-user)
|
||||
- [Setting up a TURN server](#setting-up-a-turn-server)
|
||||
- [URL previews](#url-previews)
|
||||
- [Troubleshooting Installation](#troubleshooting-installation)
|
||||
|
||||
There are 3 steps to follow under **Installation Instructions**.
|
||||
|
||||
- [Installation Instructions](#installation-instructions)
|
||||
- [Choosing your server name](#choosing-your-server-name)
|
||||
- [Installing Synapse](#installing-synapse)
|
||||
- [Installing from source](#installing-from-source)
|
||||
- [Platform-specific prerequisites](#platform-specific-prerequisites)
|
||||
- [Debian/Ubuntu/Raspbian](#debianubunturaspbian)
|
||||
- [ArchLinux](#archlinux)
|
||||
- [CentOS/Fedora](#centosfedora)
|
||||
- [macOS](#macos)
|
||||
- [OpenSUSE](#opensuse)
|
||||
- [OpenBSD](#openbsd)
|
||||
- [Windows](#windows)
|
||||
- [Prebuilt packages](#prebuilt-packages)
|
||||
- [Docker images and Ansible playbooks](#docker-images-and-ansible-playbooks)
|
||||
- [Debian/Ubuntu](#debianubuntu)
|
||||
- [Matrix.org packages](#matrixorg-packages)
|
||||
- [Downstream Debian packages](#downstream-debian-packages)
|
||||
- [Downstream Ubuntu packages](#downstream-ubuntu-packages)
|
||||
- [Fedora](#fedora)
|
||||
- [OpenSUSE](#opensuse-1)
|
||||
- [SUSE Linux Enterprise Server](#suse-linux-enterprise-server)
|
||||
- [ArchLinux](#archlinux-1)
|
||||
- [Void Linux](#void-linux)
|
||||
- [FreeBSD](#freebsd)
|
||||
- [OpenBSD](#openbsd-1)
|
||||
- [NixOS](#nixos)
|
||||
- [Setting up Synapse](#setting-up-synapse)
|
||||
- [Using PostgreSQL](#using-postgresql)
|
||||
- [TLS certificates](#tls-certificates)
|
||||
- [Client Well-Known URI](#client-well-known-uri)
|
||||
- [Email](#email)
|
||||
- [Registering a user](#registering-a-user)
|
||||
- [Setting up a TURN server](#setting-up-a-turn-server)
|
||||
- [URL previews](#url-previews)
|
||||
- [Troubleshooting Installation](#troubleshooting-installation)
|
||||
|
||||
## Choosing your server name
|
||||
# Choosing your server name
|
||||
|
||||
It is important to choose the name for your server before you install Synapse,
|
||||
because it cannot be changed later.
|
||||
@@ -54,24 +29,46 @@ that your email address is probably `user@example.com` rather than
|
||||
`user@email.example.com`) - but doing so may require more advanced setup: see
|
||||
[Setting up Federation](docs/federate.md).
|
||||
|
||||
## Installing Synapse
|
||||
# Picking a database engine
|
||||
|
||||
### Installing from source
|
||||
Synapse offers two database engines:
|
||||
* [PostgreSQL](https://www.postgresql.org)
|
||||
* [SQLite](https://sqlite.org/)
|
||||
|
||||
Almost all installations should opt to use PostgreSQL. Advantages include:
|
||||
|
||||
* significant performance improvements due to the superior threading and
|
||||
caching model, smarter query optimiser
|
||||
* allowing the DB to be run on separate hardware
|
||||
|
||||
For information on how to install and use PostgreSQL, please see
|
||||
[docs/postgres.md](docs/postgres.md)
|
||||
|
||||
By default Synapse uses SQLite and in doing so trades performance for convenience.
|
||||
SQLite is only recommended in Synapse for testing purposes or for servers with
|
||||
light workloads.
|
||||
|
||||
# Installing Synapse
|
||||
|
||||
## Installing from source
|
||||
|
||||
(Prebuilt packages are available for some platforms - see [Prebuilt packages](#prebuilt-packages).)
|
||||
|
||||
When installing from source please make sure that the [Platform-specific prerequisites](#platform-specific-prerequisites) are already installed.
|
||||
|
||||
System requirements:
|
||||
|
||||
- POSIX-compliant system (tested on Linux & OS X)
|
||||
- Python 3.5.2 or later, up to Python 3.9.
|
||||
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||
|
||||
Synapse is written in Python but some of the libraries it uses are written in
|
||||
C. So before we can install Synapse itself we need a working C compiler and the
|
||||
header files for Python C extensions. See [Platform-Specific
|
||||
Instructions](#platform-specific-instructions) for information on installing
|
||||
these on various platforms.
|
||||
|
||||
To install the Synapse homeserver run:
|
||||
|
||||
```sh
|
||||
```
|
||||
mkdir -p ~/synapse
|
||||
virtualenv -p python3 ~/synapse/env
|
||||
source ~/synapse/env/bin/activate
|
||||
@@ -88,7 +85,7 @@ prefer.
|
||||
This Synapse installation can then be later upgraded by using pip again with the
|
||||
update flag:
|
||||
|
||||
```sh
|
||||
```
|
||||
source ~/synapse/env/bin/activate
|
||||
pip install -U matrix-synapse
|
||||
```
|
||||
@@ -96,7 +93,7 @@ pip install -U matrix-synapse
|
||||
Before you can start Synapse, you will need to generate a configuration
|
||||
file. To do this, run (in your virtualenv, as before):
|
||||
|
||||
```sh
|
||||
```
|
||||
cd ~/synapse
|
||||
python -m synapse.app.homeserver \
|
||||
--server-name my.domain.name \
|
||||
@@ -114,58 +111,70 @@ wise to back them up somewhere safe. (If, for whatever reason, you do need to
|
||||
change your homeserver's keys, you may find that other homeserver have the
|
||||
old key cached. If you update the signing key, you should change the name of the
|
||||
key in the `<server name>.signing.key` file (the second word) to something
|
||||
different. See the [spec](https://matrix.org/docs/spec/server_server/latest.html#retrieving-server-keys) for more information on key management).
|
||||
different. See the
|
||||
[spec](https://matrix.org/docs/spec/server_server/latest.html#retrieving-server-keys)
|
||||
for more information on key management).
|
||||
|
||||
To actually run your new homeserver, pick a working directory for Synapse to
|
||||
run (e.g. `~/synapse`), and:
|
||||
|
||||
```sh
|
||||
```
|
||||
cd ~/synapse
|
||||
source env/bin/activate
|
||||
synctl start
|
||||
```
|
||||
|
||||
#### Platform-specific prerequisites
|
||||
### Platform-Specific Instructions
|
||||
|
||||
Synapse is written in Python but some of the libraries it uses are written in
|
||||
C. So before we can install Synapse itself we need a working C compiler and the
|
||||
header files for Python C extensions.
|
||||
|
||||
##### Debian/Ubuntu/Raspbian
|
||||
#### Debian/Ubuntu/Raspbian
|
||||
|
||||
Installing prerequisites on Ubuntu or Debian:
|
||||
|
||||
```sh
|
||||
sudo apt install build-essential python3-dev libffi-dev \
|
||||
```
|
||||
sudo apt-get install build-essential python3-dev libffi-dev \
|
||||
python3-pip python3-setuptools sqlite3 \
|
||||
libssl-dev virtualenv libjpeg-dev libxslt1-dev
|
||||
```
|
||||
|
||||
##### ArchLinux
|
||||
#### ArchLinux
|
||||
|
||||
Installing prerequisites on ArchLinux:
|
||||
|
||||
```sh
|
||||
```
|
||||
sudo pacman -S base-devel python python-pip \
|
||||
python-setuptools python-virtualenv sqlite3
|
||||
```
|
||||
|
||||
##### CentOS/Fedora
|
||||
#### CentOS/Fedora
|
||||
|
||||
Installing prerequisites on CentOS or Fedora Linux:
|
||||
Installing prerequisites on CentOS 8 or Fedora>26:
|
||||
|
||||
```sh
|
||||
```
|
||||
sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||
libwebp-devel libxml2-devel libxslt-devel libpq-devel \
|
||||
python3-virtualenv libffi-devel openssl-devel python3-devel
|
||||
libwebp-devel tk-devel redhat-rpm-config \
|
||||
python3-virtualenv libffi-devel openssl-devel
|
||||
sudo dnf groupinstall "Development Tools"
|
||||
```
|
||||
|
||||
##### macOS
|
||||
Installing prerequisites on CentOS 7 or Fedora<=25:
|
||||
|
||||
```
|
||||
sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||
lcms2-devel libwebp-devel tcl-devel tk-devel redhat-rpm-config \
|
||||
python3-virtualenv libffi-devel openssl-devel
|
||||
sudo yum groupinstall "Development Tools"
|
||||
```
|
||||
|
||||
Note that Synapse does not support versions of SQLite before 3.11, and CentOS 7
|
||||
uses SQLite 3.7. You may be able to work around this by installing a more
|
||||
recent SQLite version, but it is recommended that you instead use a Postgres
|
||||
database: see [docs/postgres.md](docs/postgres.md).
|
||||
|
||||
#### macOS
|
||||
|
||||
Installing prerequisites on macOS:
|
||||
|
||||
```sh
|
||||
```
|
||||
xcode-select --install
|
||||
sudo easy_install pip
|
||||
sudo pip install virtualenv
|
||||
@@ -175,23 +184,22 @@ brew install pkg-config libffi
|
||||
On macOS Catalina (10.15) you may need to explicitly install OpenSSL
|
||||
via brew and inform `pip` about it so that `psycopg2` builds:
|
||||
|
||||
```sh
|
||||
```
|
||||
brew install openssl@1.1
|
||||
export LDFLAGS="-L/usr/local/opt/openssl/lib"
|
||||
export CPPFLAGS="-I/usr/local/opt/openssl/include"
|
||||
export LDFLAGS=-L/usr/local/Cellar/openssl\@1.1/1.1.1d/lib/
|
||||
```
|
||||
|
||||
##### OpenSUSE
|
||||
#### OpenSUSE
|
||||
|
||||
Installing prerequisites on openSUSE:
|
||||
|
||||
```sh
|
||||
```
|
||||
sudo zypper in -t pattern devel_basis
|
||||
sudo zypper in python-pip python-setuptools sqlite3 python-virtualenv \
|
||||
python-devel libffi-devel libopenssl-devel libjpeg62-devel
|
||||
```
|
||||
|
||||
##### OpenBSD
|
||||
#### OpenBSD
|
||||
|
||||
A port of Synapse is available under `net/synapse`. The filesystem
|
||||
underlying the homeserver directory (defaults to `/var/synapse`) has to be
|
||||
@@ -205,72 +213,73 @@ mounted with `wxallowed` (cf. `mount(8)`).
|
||||
Creating a `WRKOBJDIR` for building python under `/usr/local` (which on a
|
||||
default OpenBSD installation is mounted with `wxallowed`):
|
||||
|
||||
```sh
|
||||
```
|
||||
doas mkdir /usr/local/pobj_wxallowed
|
||||
```
|
||||
|
||||
Assuming `PORTS_PRIVSEP=Yes` (cf. `bsd.port.mk(5)`) and `SUDO=doas` are
|
||||
configured in `/etc/mk.conf`:
|
||||
|
||||
```sh
|
||||
```
|
||||
doas chown _pbuild:_pbuild /usr/local/pobj_wxallowed
|
||||
```
|
||||
|
||||
Setting the `WRKOBJDIR` for building python:
|
||||
|
||||
```sh
|
||||
```
|
||||
echo WRKOBJDIR_lang/python/3.7=/usr/local/pobj_wxallowed \\nWRKOBJDIR_lang/python/2.7=/usr/local/pobj_wxallowed >> /etc/mk.conf
|
||||
```
|
||||
|
||||
Building Synapse:
|
||||
|
||||
```sh
|
||||
```
|
||||
cd /usr/ports/net/synapse
|
||||
make install
|
||||
```
|
||||
|
||||
##### Windows
|
||||
#### Windows
|
||||
|
||||
If you wish to run or develop Synapse on Windows, the Windows Subsystem For
|
||||
Linux provides a Linux environment on Windows 10 which is capable of using the
|
||||
Debian, Fedora, or source installation methods. More information about WSL can
|
||||
be found at <https://docs.microsoft.com/en-us/windows/wsl/install-win10> for
|
||||
Windows 10 and <https://docs.microsoft.com/en-us/windows/wsl/install-on-server>
|
||||
be found at https://docs.microsoft.com/en-us/windows/wsl/install-win10 for
|
||||
Windows 10 and https://docs.microsoft.com/en-us/windows/wsl/install-on-server
|
||||
for Windows Server.
|
||||
|
||||
### Prebuilt packages
|
||||
## Prebuilt packages
|
||||
|
||||
As an alternative to installing from source, prebuilt packages are available
|
||||
for a number of platforms.
|
||||
|
||||
#### Docker images and Ansible playbooks
|
||||
### Docker images and Ansible playbooks
|
||||
|
||||
There is an official synapse image available at
|
||||
<https://hub.docker.com/r/matrixdotorg/synapse> which can be used with
|
||||
There is an offical synapse image available at
|
||||
https://hub.docker.com/r/matrixdotorg/synapse which can be used with
|
||||
the docker-compose file available at [contrib/docker](contrib/docker). Further
|
||||
information on this including configuration options is available in the README
|
||||
on hub.docker.com.
|
||||
|
||||
Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a
|
||||
Dockerfile to automate a synapse server in a single Docker image, at
|
||||
<https://hub.docker.com/r/avhost/docker-matrix/tags/>
|
||||
https://hub.docker.com/r/avhost/docker-matrix/tags/
|
||||
|
||||
Slavi Pantaleev has created an Ansible playbook,
|
||||
which installs the offical Docker image of Matrix Synapse
|
||||
along with many other Matrix-related services (Postgres database, Element, coturn,
|
||||
ma1sd, SSL support, etc.).
|
||||
For more details, see
|
||||
<https://github.com/spantaleev/matrix-docker-ansible-deploy>
|
||||
https://github.com/spantaleev/matrix-docker-ansible-deploy
|
||||
|
||||
#### Debian/Ubuntu
|
||||
|
||||
##### Matrix.org packages
|
||||
### Debian/Ubuntu
|
||||
|
||||
#### Matrix.org packages
|
||||
|
||||
Matrix.org provides Debian/Ubuntu packages of the latest stable version of
|
||||
Synapse via <https://packages.matrix.org/debian/>. They are available for Debian
|
||||
Synapse via https://packages.matrix.org/debian/. They are available for Debian
|
||||
9 (Stretch), Ubuntu 16.04 (Xenial), and later. To use them:
|
||||
|
||||
```sh
|
||||
```
|
||||
sudo apt install -y lsb-release wget apt-transport-https
|
||||
sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
|
||||
echo "deb [signed-by=/usr/share/keyrings/matrix-org-archive-keyring.gpg] https://packages.matrix.org/debian/ $(lsb_release -cs) main" |
|
||||
@@ -290,7 +299,7 @@ The fingerprint of the repository signing key (as shown by `gpg
|
||||
/usr/share/keyrings/matrix-org-archive-keyring.gpg`) is
|
||||
`AAF9AE843A7584B5A3E4CD2BCF45A512DE2DA058`.
|
||||
|
||||
##### Downstream Debian packages
|
||||
#### Downstream Debian packages
|
||||
|
||||
We do not recommend using the packages from the default Debian `buster`
|
||||
repository at this time, as they are old and suffer from known security
|
||||
@@ -302,49 +311,49 @@ for information on how to use backports.
|
||||
If you are using Debian `sid` or testing, Synapse is available in the default
|
||||
repositories and it should be possible to install it simply with:
|
||||
|
||||
```sh
|
||||
```
|
||||
sudo apt install matrix-synapse
|
||||
```
|
||||
|
||||
##### Downstream Ubuntu packages
|
||||
#### Downstream Ubuntu packages
|
||||
|
||||
We do not recommend using the packages in the default Ubuntu repository
|
||||
at this time, as they are old and suffer from known security vulnerabilities.
|
||||
The latest version of Synapse can be installed from [our repository](#matrixorg-packages).
|
||||
|
||||
#### Fedora
|
||||
### Fedora
|
||||
|
||||
Synapse is in the Fedora repositories as `matrix-synapse`:
|
||||
|
||||
```sh
|
||||
```
|
||||
sudo dnf install matrix-synapse
|
||||
```
|
||||
|
||||
Oleg Girko provides Fedora RPMs at
|
||||
<https://obs.infoserver.lv/project/monitor/matrix-synapse>
|
||||
https://obs.infoserver.lv/project/monitor/matrix-synapse
|
||||
|
||||
#### OpenSUSE
|
||||
### OpenSUSE
|
||||
|
||||
Synapse is in the OpenSUSE repositories as `matrix-synapse`:
|
||||
|
||||
```sh
|
||||
```
|
||||
sudo zypper install matrix-synapse
|
||||
```
|
||||
|
||||
#### SUSE Linux Enterprise Server
|
||||
### SUSE Linux Enterprise Server
|
||||
|
||||
Unofficial package are built for SLES 15 in the openSUSE:Backports:SLE-15 repository at
|
||||
<https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15/standard/>
|
||||
https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15/standard/
|
||||
|
||||
#### ArchLinux
|
||||
### ArchLinux
|
||||
|
||||
The quickest way to get up and running with ArchLinux is probably with the community package
|
||||
<https://www.archlinux.org/packages/community/any/matrix-synapse/>, which should pull in most of
|
||||
https://www.archlinux.org/packages/community/any/matrix-synapse/, which should pull in most of
|
||||
the necessary dependencies.
|
||||
|
||||
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ):
|
||||
|
||||
```sh
|
||||
```
|
||||
sudo pip install --upgrade pip
|
||||
```
|
||||
|
||||
@@ -353,28 +362,28 @@ ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
||||
compile it under the right architecture. (This should not be needed if
|
||||
installing under virtualenv):
|
||||
|
||||
```sh
|
||||
```
|
||||
sudo pip uninstall py-bcrypt
|
||||
sudo pip install py-bcrypt
|
||||
```
|
||||
|
||||
#### Void Linux
|
||||
### Void Linux
|
||||
|
||||
Synapse can be found in the void repositories as 'synapse':
|
||||
|
||||
```sh
|
||||
```
|
||||
xbps-install -Su
|
||||
xbps-install -S synapse
|
||||
```
|
||||
|
||||
#### FreeBSD
|
||||
### FreeBSD
|
||||
|
||||
Synapse can be installed via FreeBSD Ports or Packages contributed by Brendan Molloy from:
|
||||
|
||||
- Ports: `cd /usr/ports/net-im/py-matrix-synapse && make install clean`
|
||||
- Packages: `pkg install py37-matrix-synapse`
|
||||
- Ports: `cd /usr/ports/net-im/py-matrix-synapse && make install clean`
|
||||
- Packages: `pkg install py37-matrix-synapse`
|
||||
|
||||
#### OpenBSD
|
||||
### OpenBSD
|
||||
|
||||
As of OpenBSD 6.7 Synapse is available as a pre-compiled binary. The filesystem
|
||||
underlying the homeserver directory (defaults to `/var/synapse`) has to be
|
||||
@@ -383,35 +392,20 @@ and mounting it to `/var/synapse` should be taken into consideration.
|
||||
|
||||
Installing Synapse:
|
||||
|
||||
```sh
|
||||
```
|
||||
doas pkg_add synapse
|
||||
```
|
||||
|
||||
#### NixOS
|
||||
### NixOS
|
||||
|
||||
Robin Lambertz has packaged Synapse for NixOS at:
|
||||
<https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/matrix-synapse.nix>
|
||||
https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/matrix-synapse.nix
|
||||
|
||||
## Setting up Synapse
|
||||
# Setting up Synapse
|
||||
|
||||
Once you have installed synapse as above, you will need to configure it.
|
||||
|
||||
### Using PostgreSQL
|
||||
|
||||
By default Synapse uses [SQLite](https://sqlite.org/) and in doing so trades performance for convenience.
|
||||
SQLite is only recommended in Synapse for testing purposes or for servers with
|
||||
very light workloads.
|
||||
|
||||
Almost all installations should opt to use [PostgreSQL](https://www.postgresql.org). Advantages include:
|
||||
|
||||
- significant performance improvements due to the superior threading and
|
||||
caching model, smarter query optimiser
|
||||
- allowing the DB to be run on separate hardware
|
||||
|
||||
For information on how to install and use PostgreSQL in Synapse, please see
|
||||
[docs/postgres.md](docs/postgres.md)
|
||||
|
||||
### TLS certificates
|
||||
## TLS certificates
|
||||
|
||||
The default configuration exposes a single HTTP port on the local
|
||||
interface: `http://localhost:8008`. It is suitable for local testing,
|
||||
@@ -425,19 +419,19 @@ The recommended way to do so is to set up a reverse proxy on port
|
||||
Alternatively, you can configure Synapse to expose an HTTPS port. To do
|
||||
so, you will need to edit `homeserver.yaml`, as follows:
|
||||
|
||||
- First, under the `listeners` section, uncomment the configuration for the
|
||||
* First, under the `listeners` section, uncomment the configuration for the
|
||||
TLS-enabled listener. (Remove the hash sign (`#`) at the start of
|
||||
each line). The relevant lines are like this:
|
||||
|
||||
```yaml
|
||||
- port: 8448
|
||||
type: http
|
||||
tls: true
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
```
|
||||
- port: 8448
|
||||
type: http
|
||||
tls: true
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
```
|
||||
|
||||
- You will also need to uncomment the `tls_certificate_path` and
|
||||
* You will also need to uncomment the `tls_certificate_path` and
|
||||
`tls_private_key_path` lines under the `TLS` section. You will need to manage
|
||||
provisioning of these certificates yourself — Synapse had built-in ACME
|
||||
support, but the ACMEv1 protocol Synapse implements is deprecated, not
|
||||
@@ -452,7 +446,7 @@ so, you will need to edit `homeserver.yaml`, as follows:
|
||||
For a more detailed guide to configuring your server for federation, see
|
||||
[federate.md](docs/federate.md).
|
||||
|
||||
### Client Well-Known URI
|
||||
## Client Well-Known URI
|
||||
|
||||
Setting up the client Well-Known URI is optional but if you set it up, it will
|
||||
allow users to enter their full username (e.g. `@user:<server_name>`) into clients
|
||||
@@ -463,7 +457,7 @@ about the actual homeserver URL you are using.
|
||||
The URL `https://<server_name>/.well-known/matrix/client` should return JSON in
|
||||
the following format.
|
||||
|
||||
```json
|
||||
```
|
||||
{
|
||||
"m.homeserver": {
|
||||
"base_url": "https://<matrix.example.com>"
|
||||
@@ -473,7 +467,7 @@ the following format.
|
||||
|
||||
It can optionally contain identity server information as well.
|
||||
|
||||
```json
|
||||
```
|
||||
{
|
||||
"m.homeserver": {
|
||||
"base_url": "https://<matrix.example.com>"
|
||||
@@ -490,11 +484,10 @@ Cross-Origin Resource Sharing (CORS) headers. A recommended value would be
|
||||
view it.
|
||||
|
||||
In nginx this would be something like:
|
||||
|
||||
```nginx
|
||||
```
|
||||
location /.well-known/matrix/client {
|
||||
return 200 '{"m.homeserver": {"base_url": "https://<matrix.example.com>"}}';
|
||||
default_type application/json;
|
||||
add_header Content-Type application/json;
|
||||
add_header Access-Control-Allow-Origin *;
|
||||
}
|
||||
```
|
||||
@@ -504,11 +497,11 @@ correctly. `public_baseurl` should be set to the URL that clients will use to
|
||||
connect to your server. This is the same URL you put for the `m.homeserver`
|
||||
`base_url` above.
|
||||
|
||||
```yaml
|
||||
```
|
||||
public_baseurl: "https://<matrix.example.com>"
|
||||
```
|
||||
|
||||
### Email
|
||||
## Email
|
||||
|
||||
It is desirable for Synapse to have the capability to send email. This allows
|
||||
Synapse to send password reset emails, send verifications when an email address
|
||||
@@ -523,28 +516,18 @@ and `notif_from` fields filled out. You may also need to set `smtp_user`,
|
||||
If email is not configured, password reset, registration and notifications via
|
||||
email will be disabled.
|
||||
|
||||
### Registering a user
|
||||
## Registering a user
|
||||
|
||||
The easiest way to create a new user is to do so from a client like [Element](https://element.io/).
|
||||
|
||||
Alternatively, you can do so from the command line. This can be done as follows:
|
||||
Alternatively you can do so from the command line if you have installed via pip.
|
||||
|
||||
1. If synapse was installed via pip, activate the virtualenv as follows (if Synapse was
|
||||
installed via a prebuilt package, `register_new_matrix_user` should already be
|
||||
on the search path):
|
||||
```sh
|
||||
cd ~/synapse
|
||||
source env/bin/activate
|
||||
synctl start # if not already running
|
||||
```
|
||||
2. Run the following command:
|
||||
```sh
|
||||
register_new_matrix_user -c homeserver.yaml http://localhost:8008
|
||||
```
|
||||
This can be done as follows:
|
||||
|
||||
This will prompt you to add details for the new user, and will then connect to
|
||||
the running Synapse to create the new user. For example:
|
||||
```
|
||||
$ source ~/synapse/env/bin/activate
|
||||
$ synctl start # if not already running
|
||||
$ register_new_matrix_user -c homeserver.yaml http://localhost:8008
|
||||
New user localpart: erikj
|
||||
Password:
|
||||
Confirm password:
|
||||
@@ -559,12 +542,12 @@ value is generated by `--generate-config`), but it should be kept secret, as
|
||||
anyone with knowledge of it can register users, including admin accounts,
|
||||
on your server even if `enable_registration` is `false`.
|
||||
|
||||
### Setting up a TURN server
|
||||
## Setting up a TURN server
|
||||
|
||||
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
||||
a TURN server. See [docs/turn-howto.md](docs/turn-howto.md) for details.
|
||||
|
||||
### URL previews
|
||||
## URL previews
|
||||
|
||||
Synapse includes support for previewing URLs, which is disabled by default. To
|
||||
turn it on you must enable the `url_preview_enabled: True` config parameter
|
||||
@@ -574,18 +557,19 @@ This is critical from a security perspective to stop arbitrary Matrix users
|
||||
spidering 'internal' URLs on your network. At the very least we recommend that
|
||||
your loopback and RFC1918 IP addresses are blacklisted.
|
||||
|
||||
This also requires the optional `lxml` python dependency to be installed. This
|
||||
in turn requires the `libxml2` library to be available - on Debian/Ubuntu this
|
||||
means `apt-get install libxml2-dev`, or equivalent for your OS.
|
||||
This also requires the optional `lxml` and `netaddr` python dependencies to be
|
||||
installed. This in turn requires the `libxml2` library to be available - on
|
||||
Debian/Ubuntu this means `apt-get install libxml2-dev`, or equivalent for
|
||||
your OS.
|
||||
|
||||
### Troubleshooting Installation
|
||||
# Troubleshooting Installation
|
||||
|
||||
`pip` seems to leak *lots* of memory during installation. For instance, a Linux
|
||||
host with 512MB of RAM may run out of memory whilst installing Twisted. If this
|
||||
happens, you will have to individually install the dependencies which are
|
||||
failing, e.g.:
|
||||
|
||||
```sh
|
||||
```
|
||||
pip install twisted
|
||||
```
|
||||
|
||||
|
||||
@@ -20,10 +20,9 @@ recursive-include scripts *
|
||||
recursive-include scripts-dev *
|
||||
recursive-include synapse *.pyi
|
||||
recursive-include tests *.py
|
||||
recursive-include tests *.pem
|
||||
recursive-include tests *.p8
|
||||
recursive-include tests *.crt
|
||||
recursive-include tests *.key
|
||||
include tests/http/ca.crt
|
||||
include tests/http/ca.key
|
||||
include tests/http/server.key
|
||||
|
||||
recursive-include synapse/res *
|
||||
recursive-include synapse/static *.css
|
||||
|
||||
44
README.rst
44
README.rst
@@ -183,9 +183,8 @@ Using a reverse proxy with Synapse
|
||||
It is recommended to put a reverse proxy such as
|
||||
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
||||
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
||||
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
|
||||
`HAProxy <https://www.haproxy.org/>`_ or
|
||||
`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
|
||||
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_ or
|
||||
`HAProxy <https://www.haproxy.org/>`_ in front of Synapse. One advantage of
|
||||
doing so is that it means that you can expose the default https port (443) to
|
||||
Matrix clients without needing to run Synapse with root privileges.
|
||||
|
||||
@@ -244,8 +243,6 @@ Then update the ``users`` table in the database::
|
||||
Synapse Development
|
||||
===================
|
||||
|
||||
Join our developer community on Matrix: `#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_
|
||||
|
||||
Before setting up a development environment for synapse, make sure you have the
|
||||
system dependencies (such as the python header files) installed - see
|
||||
`Installing from source <INSTALL.md#installing-from-source>`_.
|
||||
@@ -264,43 +261,18 @@ to install using pip and a virtualenv::
|
||||
pip install -e ".[all,test]"
|
||||
|
||||
This will run a process of downloading and installing all the needed
|
||||
dependencies into a virtual env. If any dependencies fail to install,
|
||||
try installing the failing modules individually::
|
||||
dependencies into a virtual env.
|
||||
|
||||
pip install -e "module-name"
|
||||
|
||||
Once this is done, you may wish to run Synapse's unit tests to
|
||||
check that everything is installed correctly::
|
||||
Once this is done, you may wish to run Synapse's unit tests, to
|
||||
check that everything is installed as it should be::
|
||||
|
||||
python -m twisted.trial tests
|
||||
|
||||
This should end with a 'PASSED' result (note that exact numbers will
|
||||
differ)::
|
||||
|
||||
Ran 1337 tests in 716.064s
|
||||
|
||||
PASSED (skips=15, successes=1322)
|
||||
|
||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`
|
||||
|
||||
./demo/start.sh
|
||||
|
||||
(to stop, you can use `./demo/stop.sh`)
|
||||
|
||||
If you just want to start a single instance of the app and run it directly::
|
||||
|
||||
# Create the homeserver.yaml config once
|
||||
python -m synapse.app.homeserver \
|
||||
--server-name my.domain.name \
|
||||
--config-path homeserver.yaml \
|
||||
--generate-config \
|
||||
--report-stats=[yes|no]
|
||||
|
||||
# Start the app
|
||||
python -m synapse.app.homeserver --config-path homeserver.yaml
|
||||
|
||||
This should end with a 'PASSED' result::
|
||||
|
||||
Ran 1266 tests in 643.930s
|
||||
|
||||
PASSED (skips=15, successes=1251)
|
||||
|
||||
Running the Integration Tests
|
||||
=============================
|
||||
|
||||
230
UPGRADE.rst
230
UPGRADE.rst
@@ -5,16 +5,6 @@ Before upgrading check if any special steps are required to upgrade from the
|
||||
version you currently have installed to the current version of Synapse. The extra
|
||||
instructions that may be required are listed later in this document.
|
||||
|
||||
* Check that your versions of Python and PostgreSQL are still supported.
|
||||
|
||||
Synapse follows upstream lifecycles for `Python`_ and `PostgreSQL`_, and
|
||||
removes support for versions which are no longer maintained.
|
||||
|
||||
The website https://endoflife.date also offers convenient summaries.
|
||||
|
||||
.. _Python: https://devguide.python.org/devcycle/#end-of-life-branches
|
||||
.. _PostgreSQL: https://www.postgresql.org/support/versioning/
|
||||
|
||||
* If Synapse was installed using `prebuilt packages
|
||||
<INSTALL.md#prebuilt-packages>`_, you will need to follow the normal process
|
||||
for upgrading those packages.
|
||||
@@ -85,224 +75,6 @@ for example:
|
||||
wget https://packages.matrix.org/debian/pool/main/m/matrix-synapse-py3/matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
|
||||
Upgrading to v1.29.0
|
||||
====================
|
||||
|
||||
Requirement for X-Forwarded-Proto header
|
||||
----------------------------------------
|
||||
|
||||
When using Synapse with a reverse proxy (in particular, when using the
|
||||
`x_forwarded` option on an HTTP listener), Synapse now expects to receive an
|
||||
`X-Forwarded-Proto` header on incoming HTTP requests. If it is not set, Synapse
|
||||
will log a warning on each received request.
|
||||
|
||||
To avoid the warning, administrators using a reverse proxy should ensure that
|
||||
the reverse proxy sets `X-Forwarded-Proto` header to `https` or `http` to
|
||||
indicate the protocol used by the client.
|
||||
|
||||
Synapse also requires the `Host` header to be preserved.
|
||||
|
||||
See the `reverse proxy documentation <docs/reverse_proxy.md>`_, where the
|
||||
example configurations have been updated to show how to set these headers.
|
||||
|
||||
(Users of `Caddy <https://caddyserver.com/>`_ are unaffected, since we believe it
|
||||
sets `X-Forwarded-Proto` by default.)
|
||||
|
||||
Upgrading to v1.27.0
|
||||
====================
|
||||
|
||||
Changes to callback URI for OAuth2 / OpenID Connect and SAML2
|
||||
-------------------------------------------------------------
|
||||
|
||||
This version changes the URI used for callbacks from OAuth2 and SAML2 identity providers:
|
||||
|
||||
* If your server is configured for single sign-on via an OpenID Connect or OAuth2 identity
|
||||
provider, you will need to add ``[synapse public baseurl]/_synapse/client/oidc/callback``
|
||||
to the list of permitted "redirect URIs" at the identity provider.
|
||||
|
||||
See `docs/openid.md <docs/openid.md>`_ for more information on setting up OpenID
|
||||
Connect.
|
||||
|
||||
* If your server is configured for single sign-on via a SAML2 identity provider, you will
|
||||
need to add ``[synapse public baseurl]/_synapse/client/saml2/authn_response`` as a permitted
|
||||
"ACS location" (also known as "allowed callback URLs") at the identity provider.
|
||||
|
||||
The "Issuer" in the "AuthnRequest" to the SAML2 identity provider is also updated to
|
||||
``[synapse public baseurl]/_synapse/client/saml2/metadata.xml``. If your SAML2 identity
|
||||
provider uses this property to validate or otherwise identify Synapse, its configuration
|
||||
will need to be updated to use the new URL. Alternatively you could create a new, separate
|
||||
"EntityDescriptor" in your SAML2 identity provider with the new URLs and leave the URLs in
|
||||
the existing "EntityDescriptor" as they were.
|
||||
|
||||
Changes to HTML templates
|
||||
-------------------------
|
||||
|
||||
The HTML templates for SSO and email notifications now have `Jinja2's autoescape <https://jinja.palletsprojects.com/en/2.11.x/api/#autoescaping>`_
|
||||
enabled for files ending in ``.html``, ``.htm``, and ``.xml``. If you have customised
|
||||
these templates and see issues when viewing them you might need to update them.
|
||||
It is expected that most configurations will need no changes.
|
||||
|
||||
If you have customised the templates *names* for these templates, it is recommended
|
||||
to verify they end in ``.html`` to ensure autoescape is enabled.
|
||||
|
||||
The above applies to the following templates:
|
||||
|
||||
* ``add_threepid.html``
|
||||
* ``add_threepid_failure.html``
|
||||
* ``add_threepid_success.html``
|
||||
* ``notice_expiry.html``
|
||||
* ``notice_expiry.html``
|
||||
* ``notif_mail.html`` (which, by default, includes ``room.html`` and ``notif.html``)
|
||||
* ``password_reset.html``
|
||||
* ``password_reset_confirmation.html``
|
||||
* ``password_reset_failure.html``
|
||||
* ``password_reset_success.html``
|
||||
* ``registration.html``
|
||||
* ``registration_failure.html``
|
||||
* ``registration_success.html``
|
||||
* ``sso_account_deactivated.html``
|
||||
* ``sso_auth_bad_user.html``
|
||||
* ``sso_auth_confirm.html``
|
||||
* ``sso_auth_success.html``
|
||||
* ``sso_error.html``
|
||||
* ``sso_login_idp_picker.html``
|
||||
* ``sso_redirect_confirm.html``
|
||||
|
||||
Upgrading to v1.26.0
|
||||
====================
|
||||
|
||||
Rolling back to v1.25.0 after a failed upgrade
|
||||
----------------------------------------------
|
||||
|
||||
v1.26.0 includes a lot of large changes. If something problematic occurs, you
|
||||
may want to roll-back to a previous version of Synapse. Because v1.26.0 also
|
||||
includes a new database schema version, reverting that version is also required
|
||||
alongside the generic rollback instructions mentioned above. In short, to roll
|
||||
back to v1.25.0 you need to:
|
||||
|
||||
1. Stop the server
|
||||
2. Decrease the schema version in the database:
|
||||
|
||||
.. code:: sql
|
||||
|
||||
UPDATE schema_version SET version = 58;
|
||||
|
||||
3. Delete the ignored users & chain cover data:
|
||||
|
||||
.. code:: sql
|
||||
|
||||
DROP TABLE IF EXISTS ignored_users;
|
||||
UPDATE rooms SET has_auth_chain_index = false;
|
||||
|
||||
For PostgreSQL run:
|
||||
|
||||
.. code:: sql
|
||||
|
||||
TRUNCATE event_auth_chain_links;
|
||||
TRUNCATE event_auth_chains;
|
||||
|
||||
For SQLite run:
|
||||
|
||||
.. code:: sql
|
||||
|
||||
DELETE FROM event_auth_chain_links;
|
||||
DELETE FROM event_auth_chains;
|
||||
|
||||
4. Mark the deltas as not run (so they will re-run on upgrade).
|
||||
|
||||
.. code:: sql
|
||||
|
||||
DELETE FROM applied_schema_deltas WHERE version = 59 AND file = "59/01ignored_user.py";
|
||||
DELETE FROM applied_schema_deltas WHERE version = 59 AND file = "59/06chain_cover_index.sql";
|
||||
|
||||
5. Downgrade Synapse by following the instructions for your installation method
|
||||
in the "Rolling back to older versions" section above.
|
||||
|
||||
Upgrading to v1.25.0
|
||||
====================
|
||||
|
||||
Last release supporting Python 3.5
|
||||
----------------------------------
|
||||
|
||||
This is the last release of Synapse which guarantees support with Python 3.5,
|
||||
which passed its upstream End of Life date several months ago.
|
||||
|
||||
We will attempt to maintain support through March 2021, but without guarantees.
|
||||
|
||||
In the future, Synapse will follow upstream schedules for ending support of
|
||||
older versions of Python and PostgreSQL. Please upgrade to at least Python 3.6
|
||||
and PostgreSQL 9.6 as soon as possible.
|
||||
|
||||
Blacklisting IP ranges
|
||||
----------------------
|
||||
|
||||
Synapse v1.25.0 includes new settings, ``ip_range_blacklist`` and
|
||||
``ip_range_whitelist``, for controlling outgoing requests from Synapse for federation,
|
||||
identity servers, push, and for checking key validity for third-party invite events.
|
||||
The previous setting, ``federation_ip_range_blacklist``, is deprecated. The new
|
||||
``ip_range_blacklist`` defaults to private IP ranges if it is not defined.
|
||||
|
||||
If you have never customised ``federation_ip_range_blacklist`` it is recommended
|
||||
that you remove that setting.
|
||||
|
||||
If you have customised ``federation_ip_range_blacklist`` you should update the
|
||||
setting name to ``ip_range_blacklist``.
|
||||
|
||||
If you have a custom push server that is reached via private IP space you may
|
||||
need to customise ``ip_range_blacklist`` or ``ip_range_whitelist``.
|
||||
|
||||
Upgrading to v1.24.0
|
||||
====================
|
||||
|
||||
Custom OpenID Connect mapping provider breaking change
|
||||
------------------------------------------------------
|
||||
|
||||
This release allows the OpenID Connect mapping provider to perform normalisation
|
||||
of the localpart of the Matrix ID. This allows for the mapping provider to
|
||||
specify different algorithms, instead of the [default way](https://matrix.org/docs/spec/appendices#mapping-from-other-character-sets).
|
||||
|
||||
If your Synapse configuration uses a custom mapping provider
|
||||
(`oidc_config.user_mapping_provider.module` is specified and not equal to
|
||||
`synapse.handlers.oidc_handler.JinjaOidcMappingProvider`) then you *must* ensure
|
||||
that `map_user_attributes` of the mapping provider performs some normalisation
|
||||
of the `localpart` returned. To match previous behaviour you can use the
|
||||
`map_username_to_mxid_localpart` function provided by Synapse. An example is
|
||||
shown below:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from synapse.types import map_username_to_mxid_localpart
|
||||
|
||||
class MyMappingProvider:
|
||||
def map_user_attributes(self, userinfo, token):
|
||||
# ... your custom logic ...
|
||||
sso_user_id = ...
|
||||
localpart = map_username_to_mxid_localpart(sso_user_id)
|
||||
|
||||
return {"localpart": localpart}
|
||||
|
||||
Removal historical Synapse Admin API
|
||||
------------------------------------
|
||||
|
||||
Historically, the Synapse Admin API has been accessible under:
|
||||
|
||||
* ``/_matrix/client/api/v1/admin``
|
||||
* ``/_matrix/client/unstable/admin``
|
||||
* ``/_matrix/client/r0/admin``
|
||||
* ``/_synapse/admin/v1``
|
||||
|
||||
The endpoints with ``/_matrix/client/*`` prefixes have been removed as of v1.24.0.
|
||||
The Admin API is now only accessible under:
|
||||
|
||||
* ``/_synapse/admin/v1``
|
||||
|
||||
The only exception is the `/admin/whois` endpoint, which is
|
||||
`also available via the client-server API <https://matrix.org/docs/spec/client_server/r0.6.1#get-matrix-client-r0-admin-whois-userid>`_.
|
||||
|
||||
The deprecation of the old endpoints was announced with Synapse 1.20.0 (released
|
||||
on 2020-09-22) and makes it easier for homeserver admins to lock down external
|
||||
access to the Admin API endpoints.
|
||||
|
||||
Upgrading to v1.23.0
|
||||
====================
|
||||
|
||||
@@ -315,7 +87,7 @@ then it should be modified based on the `structured logging documentation
|
||||
<https://github.com/matrix-org/synapse/blob/master/docs/structured_logging.md>`_.
|
||||
|
||||
The ``structured`` and ``drains`` logging options are now deprecated and should
|
||||
be replaced by standard logging configuration of ``handlers`` and ``formatters``.
|
||||
be replaced by standard logging configuration of ``handlers`` and ``formatters`.
|
||||
|
||||
A future will release of Synapse will make using ``structured: true`` an error.
|
||||
|
||||
|
||||
1
changelog.d/8455.bugfix
Normal file
1
changelog.d/8455.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix fetching of E2E cross signing keys over federation when only one of the master key and device signing key is cached already.
|
||||
1
changelog.d/8519.feature
Normal file
1
changelog.d/8519.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add an admin api to delete a single file or files were not used for a defined time from server. Contributed by @dklimpel.
|
||||
1
changelog.d/8539.feature
Normal file
1
changelog.d/8539.feature
Normal file
@@ -0,0 +1 @@
|
||||
Split admin API for reported events (`GET /_synapse/admin/v1/event_reports`) into detail and list endpoints. This is a breaking change to #8217 which was introduced in Synapse v1.21.0. Those who already use this API should check their scripts. Contributed by @dklimpel.
|
||||
1
changelog.d/8559.misc
Normal file
1
changelog.d/8559.misc
Normal file
@@ -0,0 +1 @@
|
||||
Optimise `/createRoom` with multiple invited users.
|
||||
1
changelog.d/8580.bugfix
Normal file
1
changelog.d/8580.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug where Synapse would blindly forward bad responses from federation to clients when retrieving profile information.
|
||||
1
changelog.d/8582.doc
Normal file
1
changelog.d/8582.doc
Normal file
@@ -0,0 +1 @@
|
||||
Instructions for Azure AD in the OpenID Connect documentation. Contributed by peterk.
|
||||
1
changelog.d/8595.misc
Normal file
1
changelog.d/8595.misc
Normal file
@@ -0,0 +1 @@
|
||||
Implement and use an @lru_cache decorator.
|
||||
1
changelog.d/8607.feature
Normal file
1
changelog.d/8607.feature
Normal file
@@ -0,0 +1 @@
|
||||
Support generating structured logs via the standard logging configuration.
|
||||
1
changelog.d/8610.feature
Normal file
1
changelog.d/8610.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add an admin APIs to allow server admins to list users' pushers. Contributed by @dklimpel.
|
||||
1
changelog.d/8614.misc
Normal file
1
changelog.d/8614.misc
Normal file
@@ -0,0 +1 @@
|
||||
Don't instansiate Requester directly.
|
||||
1
changelog.d/8615.misc
Normal file
1
changelog.d/8615.misc
Normal file
@@ -0,0 +1 @@
|
||||
Type hints for `RegistrationStore`.
|
||||
1
changelog.d/8616.misc
Normal file
1
changelog.d/8616.misc
Normal file
@@ -0,0 +1 @@
|
||||
Change schema to support access tokens belonging to one user but granting access to another.
|
||||
1
changelog.d/8620.bugfix
Normal file
1
changelog.d/8620.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug where the account validity endpoint would silently fail if the user ID did not have an expiration time. It now returns a 400 error.
|
||||
1
changelog.d/8621.misc
Normal file
1
changelog.d/8621.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove unused OPTIONS handlers.
|
||||
1
changelog.d/8627.bugfix
Normal file
1
changelog.d/8627.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix email notifications for invites without local state.
|
||||
1
changelog.d/8628.bugfix
Normal file
1
changelog.d/8628.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix handling of invalid group IDs to return a 400 rather than log an exception and return a 500.
|
||||
1
changelog.d/8632.bugfix
Normal file
1
changelog.d/8632.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix handling of User-Agent headers that are invalid UTF-8, which caused user agents of users to not get correctly recorded.
|
||||
1
changelog.d/8633.misc
Normal file
1
changelog.d/8633.misc
Normal file
@@ -0,0 +1 @@
|
||||
Run `mypy` as part of the lint.sh script.
|
||||
1
changelog.d/8634.misc
Normal file
1
changelog.d/8634.misc
Normal file
@@ -0,0 +1 @@
|
||||
Correct Synapse's PyPI package name in the OpenID Connect installation instructions.
|
||||
1
changelog.d/8635.doc
Normal file
1
changelog.d/8635.doc
Normal file
@@ -0,0 +1 @@
|
||||
Improve the sample configuration for single sign-on providers.
|
||||
1
changelog.d/8639.misc
Normal file
1
changelog.d/8639.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix typos and spelling errors in the code.
|
||||
1
changelog.d/8640.misc
Normal file
1
changelog.d/8640.misc
Normal file
@@ -0,0 +1 @@
|
||||
Reduce number of OpenTracing spans started.
|
||||
1
changelog.d/8643.bugfix
Normal file
1
changelog.d/8643.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug in the `joined_rooms` admin API if the user has never joined any rooms. The bug was introduced, along with the API, in v1.21.0.
|
||||
1
changelog.d/8644.misc
Normal file
1
changelog.d/8644.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add field `total` to device list in admin API.
|
||||
1
changelog.d/8647.feature
Normal file
1
changelog.d/8647.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add an admin API `GET /_synapse/admin/v1/users/<user_id>/media` to get information about uploaded media. Contributed by @dklimpel.
|
||||
1
changelog.d/8655.misc
Normal file
1
changelog.d/8655.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add more type hints to the application services code.
|
||||
1
changelog.d/8657.doc
Normal file
1
changelog.d/8657.doc
Normal file
@@ -0,0 +1 @@
|
||||
Fix the filepath of Dex's example config and the link to Dex's Getting Started guide in the OpenID Connect docs.
|
||||
1
changelog.d/8664.misc
Normal file
1
changelog.d/8664.misc
Normal file
@@ -0,0 +1 @@
|
||||
Tell Black to format code for Python 3.5.
|
||||
1
changelog.d/8665.doc
Normal file
1
changelog.d/8665.doc
Normal file
@@ -0,0 +1 @@
|
||||
Note support for Python 3.9.
|
||||
1
changelog.d/8666.doc
Normal file
1
changelog.d/8666.doc
Normal file
@@ -0,0 +1 @@
|
||||
Minor updates to docs on running tests.
|
||||
1
changelog.d/8667.doc
Normal file
1
changelog.d/8667.doc
Normal file
@@ -0,0 +1 @@
|
||||
Interlink prometheus/grafana documentation.
|
||||
1
changelog.d/8668.misc
Normal file
1
changelog.d/8668.misc
Normal file
@@ -0,0 +1 @@
|
||||
Reduce number of OpenTracing spans started.
|
||||
1
changelog.d/8669.misc
Normal file
1
changelog.d/8669.misc
Normal file
@@ -0,0 +1 @@
|
||||
Don't pull event from DB when handling replication traffic.
|
||||
1
changelog.d/8670.misc
Normal file
1
changelog.d/8670.misc
Normal file
@@ -0,0 +1 @@
|
||||
Reduce number of OpenTracing spans started.
|
||||
1
changelog.d/8671.misc
Normal file
1
changelog.d/8671.misc
Normal file
@@ -0,0 +1 @@
|
||||
Abstract some invite-related code in preparation for landing knocking.
|
||||
1
changelog.d/8679.misc
Normal file
1
changelog.d/8679.misc
Normal file
@@ -0,0 +1 @@
|
||||
Clarify representation of events in logfiles.
|
||||
1
changelog.d/8680.misc
Normal file
1
changelog.d/8680.misc
Normal file
@@ -0,0 +1 @@
|
||||
Don't require `hiredis` package to be installed to run unit tests.
|
||||
1
changelog.d/8682.bugfix
Normal file
1
changelog.d/8682.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix exception during handling multiple concurrent requests for remote media when using multiple media repositories.
|
||||
1
changelog.d/8684.misc
Normal file
1
changelog.d/8684.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix typing info on cache call signature to accept `on_invalidate`.
|
||||
1
changelog.d/8685.feature
Normal file
1
changelog.d/8685.feature
Normal file
@@ -0,0 +1 @@
|
||||
Support generating structured logs via the standard logging configuration.
|
||||
1
changelog.d/8688.misc
Normal file
1
changelog.d/8688.misc
Normal file
@@ -0,0 +1 @@
|
||||
Abstract some invite-related code in preparation for landing knocking.
|
||||
1
changelog.d/8689.feature
Normal file
1
changelog.d/8689.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add an admin APIs to allow server admins to list users' pushers. Contributed by @dklimpel.
|
||||
1
changelog.d/8690.misc
Normal file
1
changelog.d/8690.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fail tests if they do not await coroutines.
|
||||
1
changelog.d/8693.misc
Normal file
1
changelog.d/8693.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add more type hints to the application services code.
|
||||
@@ -1 +0,0 @@
|
||||
Update `scripts-dev/complement.sh` to use a local checkout of Complement, allow running a subset of tests and have it use Synapse's Complement test blacklist.
|
||||
@@ -1 +0,0 @@
|
||||
Add `order_by` to the admin API `GET /_synapse/admin/v2/users`. Contributed by @dklimpel.
|
||||
@@ -1 +0,0 @@
|
||||
Replace the `room_invite_state_types` configuration setting with `room_prejoin_state`.
|
||||
@@ -1 +0,0 @@
|
||||
Experimental Spaces support: include `m.room.create` in the room state sent with room-invites.
|
||||
@@ -1 +0,0 @@
|
||||
Fix recently added ratelimits to correctly honour the application service `rate_limited` flag.
|
||||
@@ -1 +0,0 @@
|
||||
Add experimental support for [MSC3083](https://github.com/matrix-org/matrix-doc/pull/3083): restricting room access via group membership.
|
||||
@@ -1 +0,0 @@
|
||||
Replace deprecated `imp` module with successor `importlib`. Contributed by Cristina Muñoz.
|
||||
@@ -1 +0,0 @@
|
||||
Make the allowed_local_3pids regex example in the sample config stricter.
|
||||
@@ -1 +0,0 @@
|
||||
Revert using `dmypy run` in lint script.
|
||||
@@ -92,7 +92,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
return self.config["user"].split(":")[1]
|
||||
|
||||
def do_config(self, line):
|
||||
"""Show the config for this client: "config"
|
||||
""" Show the config for this client: "config"
|
||||
Edit a key value mapping: "config key value" e.g. "config token 1234"
|
||||
Config variables:
|
||||
user: The username to auth with.
|
||||
@@ -360,7 +360,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
print(e)
|
||||
|
||||
def do_topic(self, line):
|
||||
""" "topic [set|get] <roomid> [<newtopic>]"
|
||||
""""topic [set|get] <roomid> [<newtopic>]"
|
||||
Set the topic for a room: topic set <roomid> <newtopic>
|
||||
Get the topic for a room: topic get <roomid>
|
||||
"""
|
||||
@@ -690,7 +690,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
self._do_presence_state(2, line)
|
||||
|
||||
def _parse(self, line, keys, force_keys=False):
|
||||
"""Parses the given line.
|
||||
""" Parses the given line.
|
||||
|
||||
Args:
|
||||
line : The line to parse
|
||||
@@ -721,7 +721,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
query_params={"access_token": None},
|
||||
alt_text=None,
|
||||
):
|
||||
"""Runs an HTTP request and pretty prints the output.
|
||||
""" Runs an HTTP request and pretty prints the output.
|
||||
|
||||
Args:
|
||||
method: HTTP method
|
||||
|
||||
@@ -23,10 +23,11 @@ from twisted.web.http_headers import Headers
|
||||
|
||||
|
||||
class HttpClient:
|
||||
"""Interface for talking json over http"""
|
||||
""" Interface for talking json over http
|
||||
"""
|
||||
|
||||
def put_json(self, url, data):
|
||||
"""Sends the specifed json data using PUT
|
||||
""" Sends the specifed json data using PUT
|
||||
|
||||
Args:
|
||||
url (str): The URL to PUT data to.
|
||||
@@ -40,7 +41,7 @@ class HttpClient:
|
||||
pass
|
||||
|
||||
def get_json(self, url, args=None):
|
||||
"""Gets some json from the given host homeserver and path
|
||||
""" Gets some json from the given host homeserver and path
|
||||
|
||||
Args:
|
||||
url (str): The URL to GET data from.
|
||||
@@ -57,7 +58,7 @@ class HttpClient:
|
||||
|
||||
|
||||
class TwistedHttpClient(HttpClient):
|
||||
"""Wrapper around the twisted HTTP client api.
|
||||
""" Wrapper around the twisted HTTP client api.
|
||||
|
||||
Attributes:
|
||||
agent (twisted.web.client.Agent): The twisted Agent used to send the
|
||||
@@ -86,7 +87,8 @@ class TwistedHttpClient(HttpClient):
|
||||
defer.returnValue(json.loads(body))
|
||||
|
||||
def _create_put_request(self, url, json_data, headers_dict={}):
|
||||
"""Wrapper of _create_request to issue a PUT request"""
|
||||
""" Wrapper of _create_request to issue a PUT request
|
||||
"""
|
||||
|
||||
if "Content-Type" not in headers_dict:
|
||||
raise defer.error(RuntimeError("Must include Content-Type header for PUTs"))
|
||||
@@ -96,7 +98,8 @@ class TwistedHttpClient(HttpClient):
|
||||
)
|
||||
|
||||
def _create_get_request(self, url, headers_dict={}):
|
||||
"""Wrapper of _create_request to issue a GET request"""
|
||||
""" Wrapper of _create_request to issue a GET request
|
||||
"""
|
||||
return self._create_request("GET", url, headers_dict=headers_dict)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@@ -124,7 +127,8 @@ class TwistedHttpClient(HttpClient):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _create_request(self, method, url, producer=None, headers_dict={}):
|
||||
"""Creates and sends a request to the given url"""
|
||||
""" Creates and sends a request to the given url
|
||||
"""
|
||||
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
|
||||
|
||||
retries_left = 5
|
||||
@@ -181,7 +185,8 @@ class _RawProducer:
|
||||
|
||||
|
||||
class _JsonProducer:
|
||||
"""Used by the twisted http client to create the HTTP body from json"""
|
||||
""" Used by the twisted http client to create the HTTP body from json
|
||||
"""
|
||||
|
||||
def __init__(self, jsn):
|
||||
self.data = jsn
|
||||
|
||||
@@ -63,7 +63,8 @@ class CursesStdIO:
|
||||
self.redraw()
|
||||
|
||||
def redraw(self):
|
||||
"""method for redisplaying lines based on internal list of lines"""
|
||||
""" method for redisplaying lines
|
||||
based on internal list of lines """
|
||||
|
||||
self.stdscr.clear()
|
||||
self.paintStatus(self.statusText)
|
||||
|
||||
@@ -56,7 +56,7 @@ def excpetion_errback(failure):
|
||||
|
||||
|
||||
class InputOutput:
|
||||
"""This is responsible for basic I/O so that a user can interact with
|
||||
""" This is responsible for basic I/O so that a user can interact with
|
||||
the example app.
|
||||
"""
|
||||
|
||||
@@ -68,7 +68,8 @@ class InputOutput:
|
||||
self.server = server
|
||||
|
||||
def on_line(self, line):
|
||||
"""This is where we process commands."""
|
||||
""" This is where we process commands.
|
||||
"""
|
||||
|
||||
try:
|
||||
m = re.match(r"^join (\S+)$", line)
|
||||
@@ -132,7 +133,7 @@ class IOLoggerHandler(logging.Handler):
|
||||
|
||||
|
||||
class Room:
|
||||
"""Used to store (in memory) the current membership state of a room, and
|
||||
""" Used to store (in memory) the current membership state of a room, and
|
||||
which home servers we should send PDUs associated with the room to.
|
||||
"""
|
||||
|
||||
@@ -147,7 +148,8 @@ class Room:
|
||||
self.have_got_metadata = False
|
||||
|
||||
def add_participant(self, participant):
|
||||
"""Someone has joined the room"""
|
||||
""" Someone has joined the room
|
||||
"""
|
||||
self.participants.add(participant)
|
||||
self.invited.discard(participant)
|
||||
|
||||
@@ -158,13 +160,14 @@ class Room:
|
||||
self.oldest_server = server
|
||||
|
||||
def add_invited(self, invitee):
|
||||
"""Someone has been invited to the room"""
|
||||
""" Someone has been invited to the room
|
||||
"""
|
||||
self.invited.add(invitee)
|
||||
self.servers.add(origin_from_ucid(invitee))
|
||||
|
||||
|
||||
class HomeServer(ReplicationHandler):
|
||||
"""A very basic home server implentation that allows people to join a
|
||||
""" A very basic home server implentation that allows people to join a
|
||||
room and then invite other people.
|
||||
"""
|
||||
|
||||
@@ -178,7 +181,8 @@ class HomeServer(ReplicationHandler):
|
||||
self.output = output
|
||||
|
||||
def on_receive_pdu(self, pdu):
|
||||
"""We just received a PDU"""
|
||||
""" We just received a PDU
|
||||
"""
|
||||
pdu_type = pdu.pdu_type
|
||||
|
||||
if pdu_type == "sy.room.message":
|
||||
@@ -195,20 +199,23 @@ class HomeServer(ReplicationHandler):
|
||||
)
|
||||
|
||||
def _on_message(self, pdu):
|
||||
"""We received a message"""
|
||||
""" We received a message
|
||||
"""
|
||||
self.output.print_line(
|
||||
"#%s %s %s" % (pdu.context, pdu.content["sender"], pdu.content["body"])
|
||||
)
|
||||
|
||||
def _on_join(self, context, joinee):
|
||||
"""Someone has joined a room, either a remote user or a local user"""
|
||||
""" Someone has joined a room, either a remote user or a local user
|
||||
"""
|
||||
room = self._get_or_create_room(context)
|
||||
room.add_participant(joinee)
|
||||
|
||||
self.output.print_line("#%s %s %s" % (context, joinee, "*** JOINED"))
|
||||
|
||||
def _on_invite(self, origin, context, invitee):
|
||||
"""Someone has been invited"""
|
||||
""" Someone has been invited
|
||||
"""
|
||||
room = self._get_or_create_room(context)
|
||||
room.add_invited(invitee)
|
||||
|
||||
@@ -221,7 +228,8 @@ class HomeServer(ReplicationHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def send_message(self, room_name, sender, body):
|
||||
"""Send a message to a room!"""
|
||||
""" Send a message to a room!
|
||||
"""
|
||||
destinations = yield self.get_servers_for_context(room_name)
|
||||
|
||||
try:
|
||||
@@ -239,7 +247,8 @@ class HomeServer(ReplicationHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def join_room(self, room_name, sender, joinee):
|
||||
"""Join a room!"""
|
||||
""" Join a room!
|
||||
"""
|
||||
self._on_join(room_name, joinee)
|
||||
|
||||
destinations = yield self.get_servers_for_context(room_name)
|
||||
@@ -260,7 +269,8 @@ class HomeServer(ReplicationHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def invite_to_room(self, room_name, sender, invitee):
|
||||
"""Invite someone to a room!"""
|
||||
""" Invite someone to a room!
|
||||
"""
|
||||
self._on_invite(self.server_name, room_name, invitee)
|
||||
|
||||
destinations = yield self.get_servers_for_context(room_name)
|
||||
|
||||
@@ -193,12 +193,15 @@ class TrivialXmppClient:
|
||||
time.sleep(7)
|
||||
print("SSRC spammer started")
|
||||
while self.running:
|
||||
ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % {
|
||||
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||
"nick": self.userId,
|
||||
"assrc": self.ssrcs["audio"],
|
||||
"vssrc": self.ssrcs["video"],
|
||||
}
|
||||
ssrcMsg = (
|
||||
"<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>"
|
||||
% {
|
||||
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||
"nick": self.userId,
|
||||
"assrc": self.ssrcs["audio"],
|
||||
"vssrc": self.ssrcs["video"],
|
||||
}
|
||||
)
|
||||
res = self.sendIq(ssrcMsg)
|
||||
print("reply from ssrc announce: ", res)
|
||||
time.sleep(10)
|
||||
|
||||
@@ -20,7 +20,6 @@ Add a new job to the main prometheus.conf file:
|
||||
```
|
||||
|
||||
### for Prometheus v2
|
||||
|
||||
Add a new job to the main prometheus.yml file:
|
||||
|
||||
```yaml
|
||||
@@ -30,17 +29,14 @@ Add a new job to the main prometheus.yml file:
|
||||
scheme: "https"
|
||||
|
||||
static_configs:
|
||||
- targets: ["my.server.here:port"]
|
||||
- targets: ['SERVER.LOCATION:PORT']
|
||||
```
|
||||
|
||||
An example of a Prometheus configuration with workers can be found in
|
||||
[metrics-howto.md](https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.md).
|
||||
|
||||
To use `synapse.rules` add
|
||||
|
||||
```yaml
|
||||
rule_files:
|
||||
- "/PATH/TO/synapse-v2.rules"
|
||||
rule_files:
|
||||
- "/PATH/TO/synapse-v2.rules"
|
||||
```
|
||||
|
||||
Metrics are disabled by default when running synapse; they must be enabled
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#process_resource_utime"),
|
||||
expr: "rate(process_cpu_seconds_total[2m]) * 100",
|
||||
name: "[[job]]-[[index]]",
|
||||
name: "[[job]]",
|
||||
min: 0,
|
||||
max: 100,
|
||||
renderer: "line",
|
||||
@@ -22,12 +22,12 @@ new PromConsole.Graph({
|
||||
</script>
|
||||
|
||||
<h3>Memory</h3>
|
||||
<div id="process_resident_memory_bytes"></div>
|
||||
<div id="process_resource_maxrss"></div>
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#process_resident_memory_bytes"),
|
||||
expr: "process_resident_memory_bytes",
|
||||
name: "[[job]]-[[index]]",
|
||||
node: document.querySelector("#process_resource_maxrss"),
|
||||
expr: "process_psutil_rss:max",
|
||||
name: "Maxrss",
|
||||
min: 0,
|
||||
renderer: "line",
|
||||
height: 150,
|
||||
@@ -43,8 +43,8 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#process_fds"),
|
||||
expr: "process_open_fds",
|
||||
name: "[[job]]-[[index]]",
|
||||
expr: "process_open_fds{job='synapse'}",
|
||||
name: "FDs",
|
||||
min: 0,
|
||||
renderer: "line",
|
||||
height: 150,
|
||||
@@ -62,8 +62,8 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#reactor_total_time"),
|
||||
expr: "rate(python_twisted_reactor_tick_time_sum[2m])",
|
||||
name: "[[job]]-[[index]]",
|
||||
expr: "rate(python_twisted_reactor_tick_time:total[2m]) / 1000",
|
||||
name: "time",
|
||||
max: 1,
|
||||
min: 0,
|
||||
renderer: "area",
|
||||
@@ -80,8 +80,8 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#reactor_average_time"),
|
||||
expr: "rate(python_twisted_reactor_tick_time_sum[2m]) / rate(python_twisted_reactor_tick_time_count[2m])",
|
||||
name: "[[job]]-[[index]]",
|
||||
expr: "rate(python_twisted_reactor_tick_time:total[2m]) / rate(python_twisted_reactor_tick_time:count[2m]) / 1000",
|
||||
name: "time",
|
||||
min: 0,
|
||||
renderer: "line",
|
||||
height: 150,
|
||||
@@ -97,14 +97,14 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#reactor_pending_calls"),
|
||||
expr: "rate(python_twisted_reactor_pending_calls_sum[30s]) / rate(python_twisted_reactor_pending_calls_count[30s])",
|
||||
name: "[[job]]-[[index]]",
|
||||
expr: "rate(python_twisted_reactor_pending_calls:total[30s])/rate(python_twisted_reactor_pending_calls:count[30s])",
|
||||
name: "calls",
|
||||
min: 0,
|
||||
renderer: "line",
|
||||
height: 150,
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yTitle: "Pending Calls"
|
||||
yTitle: "Pending Cals"
|
||||
})
|
||||
</script>
|
||||
|
||||
@@ -115,7 +115,7 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_storage_query_time"),
|
||||
expr: "sum(rate(synapse_storage_query_time_count[2m])) by (verb)",
|
||||
expr: "rate(synapse_storage_query_time:count[2m])",
|
||||
name: "[[verb]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
@@ -129,8 +129,8 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_storage_transaction_time"),
|
||||
expr: "topk(10, rate(synapse_storage_transaction_time_count[2m]))",
|
||||
name: "[[job]]-[[index]] [[desc]]",
|
||||
expr: "rate(synapse_storage_transaction_time:count[2m])",
|
||||
name: "[[desc]]",
|
||||
min: 0,
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
@@ -140,12 +140,12 @@ new PromConsole.Graph({
|
||||
</script>
|
||||
|
||||
<h3>Transaction execution time</h3>
|
||||
<div id="synapse_storage_transactions_time_sec"></div>
|
||||
<div id="synapse_storage_transactions_time_msec"></div>
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_storage_transactions_time_sec"),
|
||||
expr: "rate(synapse_storage_transaction_time_sum[2m])",
|
||||
name: "[[job]]-[[index]] [[desc]]",
|
||||
node: document.querySelector("#synapse_storage_transactions_time_msec"),
|
||||
expr: "rate(synapse_storage_transaction_time:total[2m]) / 1000",
|
||||
name: "[[desc]]",
|
||||
min: 0,
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
@@ -154,33 +154,34 @@ new PromConsole.Graph({
|
||||
})
|
||||
</script>
|
||||
|
||||
<h3>Average time waiting for database connection</h3>
|
||||
<div id="synapse_storage_avg_waiting_time"></div>
|
||||
<h3>Database scheduling latency</h3>
|
||||
<div id="synapse_storage_schedule_time"></div>
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_storage_avg_waiting_time"),
|
||||
expr: "rate(synapse_storage_schedule_time_sum[2m]) / rate(synapse_storage_schedule_time_count[2m])",
|
||||
name: "[[job]]-[[index]]",
|
||||
node: document.querySelector("#synapse_storage_schedule_time"),
|
||||
expr: "rate(synapse_storage_schedule_time:total[2m]) / 1000",
|
||||
name: "Total latency",
|
||||
min: 0,
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "s",
|
||||
yTitle: "Time"
|
||||
yUnits: "s/s",
|
||||
yTitle: "Usage"
|
||||
})
|
||||
</script>
|
||||
|
||||
<h3>Cache request rate</h3>
|
||||
<div id="synapse_cache_request_rate"></div>
|
||||
<h3>Cache hit ratio</h3>
|
||||
<div id="synapse_cache_ratio"></div>
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_cache_request_rate"),
|
||||
expr: "rate(synapse_util_caches_cache:total[2m])",
|
||||
name: "[[job]]-[[index]] [[name]]",
|
||||
node: document.querySelector("#synapse_cache_ratio"),
|
||||
expr: "rate(synapse_util_caches_cache:total[2m]) * 100",
|
||||
name: "[[name]]",
|
||||
min: 0,
|
||||
max: 100,
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
yUnits: "rps",
|
||||
yTitle: "Cache request rate"
|
||||
yUnits: "%",
|
||||
yTitle: "Percentage"
|
||||
})
|
||||
</script>
|
||||
|
||||
@@ -190,7 +191,7 @@ new PromConsole.Graph({
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_cache_size"),
|
||||
expr: "synapse_util_caches_cache:size",
|
||||
name: "[[job]]-[[index]] [[name]]",
|
||||
name: "[[name]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
yUnits: "",
|
||||
@@ -205,8 +206,8 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_http_server_request_count_servlet"),
|
||||
expr: "rate(synapse_http_server_in_flight_requests_count[2m])",
|
||||
name: "[[job]]-[[index]] [[method]] [[servlet]]",
|
||||
expr: "rate(synapse_http_server_request_count:servlet[2m])",
|
||||
name: "[[servlet]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "req/s",
|
||||
@@ -218,8 +219,8 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_http_server_request_count_servlet_minus_events"),
|
||||
expr: "rate(synapse_http_server_in_flight_requests_count{servlet!=\"EventStreamRestServlet\", servlet!=\"SyncRestServlet\"}[2m])",
|
||||
name: "[[job]]-[[index]] [[method]] [[servlet]]",
|
||||
expr: "rate(synapse_http_server_request_count:servlet{servlet!=\"EventStreamRestServlet\", servlet!=\"SyncRestServlet\"}[2m])",
|
||||
name: "[[servlet]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "req/s",
|
||||
@@ -232,8 +233,8 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_http_server_response_time_avg"),
|
||||
expr: "rate(synapse_http_server_response_time_seconds_sum[2m]) / rate(synapse_http_server_response_count[2m])",
|
||||
name: "[[job]]-[[index]] [[servlet]]",
|
||||
expr: "rate(synapse_http_server_response_time_seconds[2m]) / rate(synapse_http_server_response_count[2m]) / 1000",
|
||||
name: "[[servlet]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "s/req",
|
||||
@@ -276,7 +277,7 @@ new PromConsole.Graph({
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_http_server_response_ru_utime"),
|
||||
expr: "rate(synapse_http_server_response_ru_utime_seconds[2m])",
|
||||
name: "[[job]]-[[index]] [[servlet]]",
|
||||
name: "[[servlet]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "s/s",
|
||||
@@ -291,7 +292,7 @@ new PromConsole.Graph({
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_http_server_response_db_txn_duration"),
|
||||
expr: "rate(synapse_http_server_response_db_txn_duration_seconds[2m])",
|
||||
name: "[[job]]-[[index]] [[servlet]]",
|
||||
name: "[[servlet]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "s/s",
|
||||
@@ -305,8 +306,8 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_http_server_send_time_avg"),
|
||||
expr: "rate(synapse_http_server_response_time_seconds_sum{servlet='RoomSendEventRestServlet'}[2m]) / rate(synapse_http_server_response_count{servlet='RoomSendEventRestServlet'}[2m])",
|
||||
name: "[[job]]-[[index]] [[servlet]]",
|
||||
expr: "rate(synapse_http_server_response_time_second{servlet='RoomSendEventRestServlet'}[2m]) / rate(synapse_http_server_response_count{servlet='RoomSendEventRestServlet'}[2m]) / 1000",
|
||||
name: "[[servlet]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "s/req",
|
||||
@@ -322,7 +323,7 @@ new PromConsole.Graph({
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_federation_client_sent"),
|
||||
expr: "rate(synapse_federation_client_sent[2m])",
|
||||
name: "[[job]]-[[index]] [[type]]",
|
||||
name: "[[type]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "req/s",
|
||||
@@ -336,7 +337,7 @@ new PromConsole.Graph({
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_federation_server_received"),
|
||||
expr: "rate(synapse_federation_server_received[2m])",
|
||||
name: "[[job]]-[[index]] [[type]]",
|
||||
name: "[[type]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "req/s",
|
||||
@@ -366,7 +367,7 @@ new PromConsole.Graph({
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_notifier_listeners"),
|
||||
expr: "synapse_notifier_listeners",
|
||||
name: "[[job]]-[[index]]",
|
||||
name: "listeners",
|
||||
min: 0,
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
@@ -381,7 +382,7 @@ new PromConsole.Graph({
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_notifier_notified_events"),
|
||||
expr: "rate(synapse_notifier_notified_events[2m])",
|
||||
name: "[[job]]-[[index]]",
|
||||
name: "events",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "events/s",
|
||||
|
||||
@@ -58,21 +58,3 @@ groups:
|
||||
labels:
|
||||
type: "PDU"
|
||||
expr: 'synapse_federation_transaction_queue_pending_pdus + 0'
|
||||
|
||||
- record: synapse_storage_events_persisted_by_source_type
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_type="remote"})
|
||||
labels:
|
||||
type: remote
|
||||
- record: synapse_storage_events_persisted_by_source_type
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_entity="*client*",origin_type="local"})
|
||||
labels:
|
||||
type: local
|
||||
- record: synapse_storage_events_persisted_by_source_type
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_entity!="*client*",origin_type="local"})
|
||||
labels:
|
||||
type: bridges
|
||||
- record: synapse_storage_events_persisted_by_event_type
|
||||
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep)
|
||||
- record: synapse_storage_events_persisted_by_origin
|
||||
expr: sum without(type) (synapse_storage_events_persisted_events_sep)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/bin/bash
|
||||
|
||||
# this script will use the api:
|
||||
# https://github.com/matrix-org/synapse/blob/master/docs/admin_api/purge_history_api.rst
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/bin/bash
|
||||
|
||||
DOMAIN=yourserver.tld
|
||||
# add this user as admin in your home server:
|
||||
|
||||
10
debian/build_virtualenv
vendored
10
debian/build_virtualenv
vendored
@@ -33,13 +33,11 @@ esac
|
||||
# Use --builtin-venv to use the better `venv` module from CPython 3.4+ rather
|
||||
# than the 2/3 compatible `virtualenv`.
|
||||
|
||||
# Pin pip to 20.3.4 to fix breakage in 21.0 on py3.5 (xenial)
|
||||
|
||||
dh_virtualenv \
|
||||
--install-suffix "matrix-synapse" \
|
||||
--builtin-venv \
|
||||
--python "$SNAKE" \
|
||||
--upgrade-pip-to="20.3.4" \
|
||||
--upgrade-pip \
|
||||
--preinstall="lxml" \
|
||||
--preinstall="mock" \
|
||||
--extra-pip-arg="--no-cache-dir" \
|
||||
@@ -58,10 +56,10 @@ trap "rm -r $tmpdir" EXIT
|
||||
cp -r tests "$tmpdir"
|
||||
|
||||
PYTHONPATH="$tmpdir" \
|
||||
"${TARGET_PYTHON}" -m twisted.trial --reporter=text -j2 tests
|
||||
"${TARGET_PYTHON}" -B -m twisted.trial --reporter=text -j2 tests
|
||||
|
||||
# build the config file
|
||||
"${TARGET_PYTHON}" "${VIRTUALENV_DIR}/bin/generate_config" \
|
||||
"${TARGET_PYTHON}" -B "${VIRTUALENV_DIR}/bin/generate_config" \
|
||||
--config-dir="/etc/matrix-synapse" \
|
||||
--data-dir="/var/lib/matrix-synapse" |
|
||||
perl -pe '
|
||||
@@ -87,7 +85,7 @@ PYTHONPATH="$tmpdir" \
|
||||
' > "${PACKAGE_BUILD_DIR}/etc/matrix-synapse/homeserver.yaml"
|
||||
|
||||
# build the log config file
|
||||
"${TARGET_PYTHON}" "${VIRTUALENV_DIR}/bin/generate_log_config" \
|
||||
"${TARGET_PYTHON}" -B "${VIRTUALENV_DIR}/bin/generate_log_config" \
|
||||
--output-file="${PACKAGE_BUILD_DIR}/etc/matrix-synapse/log.yaml"
|
||||
|
||||
# add a dependency on the right version of python to substvars.
|
||||
|
||||
77
debian/changelog
vendored
77
debian/changelog
vendored
@@ -1,80 +1,3 @@
|
||||
matrix-synapse-py3 (1.30.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.30.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 26 Mar 2021 12:01:28 +0000
|
||||
|
||||
matrix-synapse-py3 (1.30.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.30.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 22 Mar 2021 13:15:34 +0000
|
||||
|
||||
matrix-synapse-py3 (1.29.0) stable; urgency=medium
|
||||
|
||||
[ Jonathan de Jong ]
|
||||
* Remove the python -B flag (don't generate bytecode) in scripts and documentation.
|
||||
|
||||
[ Synapse Packaging team ]
|
||||
* New synapse release 1.29.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 08 Mar 2021 13:51:50 +0000
|
||||
|
||||
matrix-synapse-py3 (1.28.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.28.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Feb 2021 10:21:57 +0000
|
||||
|
||||
matrix-synapse-py3 (1.27.0) stable; urgency=medium
|
||||
|
||||
[ Dan Callahan ]
|
||||
* Fix build on Ubuntu 16.04 LTS (Xenial).
|
||||
|
||||
[ Synapse Packaging team ]
|
||||
* New synapse release 1.27.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 16 Feb 2021 13:11:28 +0000
|
||||
|
||||
matrix-synapse-py3 (1.26.0) stable; urgency=medium
|
||||
|
||||
[ Richard van der Hoff ]
|
||||
* Remove dependency on `python3-distutils`.
|
||||
|
||||
[ Synapse Packaging team ]
|
||||
* New synapse release 1.26.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 27 Jan 2021 12:43:35 -0500
|
||||
|
||||
matrix-synapse-py3 (1.25.0) stable; urgency=medium
|
||||
|
||||
[ Dan Callahan ]
|
||||
* Update dependencies to account for the removal of the transitional
|
||||
dh-systemd package from Debian Bullseye.
|
||||
|
||||
[ Synapse Packaging team ]
|
||||
* New synapse release 1.25.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 13 Jan 2021 10:14:55 +0000
|
||||
|
||||
matrix-synapse-py3 (1.24.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.24.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 09 Dec 2020 10:14:30 +0000
|
||||
|
||||
matrix-synapse-py3 (1.23.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.23.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 09 Dec 2020 10:40:39 +0000
|
||||
|
||||
matrix-synapse-py3 (1.23.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.23.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 18 Nov 2020 11:41:28 +0000
|
||||
|
||||
matrix-synapse-py3 (1.22.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.22.1.
|
||||
|
||||
7
debian/control
vendored
7
debian/control
vendored
@@ -3,11 +3,9 @@ Section: contrib/python
|
||||
Priority: extra
|
||||
Maintainer: Synapse Packaging team <packages@matrix.org>
|
||||
# keep this list in sync with the build dependencies in docker/Dockerfile-dhvirtualenv.
|
||||
# TODO: Remove the dependency on dh-systemd after dropping support for Ubuntu xenial
|
||||
# On all other supported releases, it's merely a transitional package which
|
||||
# does nothing but depends on debhelper (> 9.20160709)
|
||||
Build-Depends:
|
||||
debhelper (>= 9.20160709) | dh-systemd,
|
||||
debhelper (>= 9),
|
||||
dh-systemd,
|
||||
dh-virtualenv (>= 1.1),
|
||||
libsystemd-dev,
|
||||
libpq-dev,
|
||||
@@ -31,6 +29,7 @@ Pre-Depends: dpkg (>= 1.16.1)
|
||||
Depends:
|
||||
adduser,
|
||||
debconf,
|
||||
python3-distutils|libpython3-stdlib (<< 3.6),
|
||||
${misc:Depends},
|
||||
${shlibs:Depends},
|
||||
${synapse:pydepends},
|
||||
|
||||
2
debian/synctl.1
vendored
2
debian/synctl.1
vendored
@@ -44,7 +44,7 @@ Configuration file may be generated as follows:
|
||||
.
|
||||
.nf
|
||||
|
||||
$ python \-m synapse\.app\.homeserver \-c config\.yaml \-\-generate\-config \-\-server\-name=<server name>
|
||||
$ python \-B \-m synapse\.app\.homeserver \-c config\.yaml \-\-generate\-config \-\-server\-name=<server name>
|
||||
.
|
||||
.fi
|
||||
.
|
||||
|
||||
2
debian/synctl.ronn
vendored
2
debian/synctl.ronn
vendored
@@ -41,7 +41,7 @@ process.
|
||||
|
||||
Configuration file may be generated as follows:
|
||||
|
||||
$ python -m synapse.app.homeserver -c config.yaml --generate-config --server-name=<server name>
|
||||
$ python -B -m synapse.app.homeserver -c config.yaml --generate-config --server-name=<server name>
|
||||
|
||||
## ENVIRONMENT
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/bin/bash
|
||||
|
||||
DIR="$( cd "$( dirname "$0" )" && pwd )"
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/bin/bash
|
||||
|
||||
DIR="$( cd "$( dirname "$0" )" && pwd )"
|
||||
|
||||
|
||||
59
demo/webserver.py
Normal file
59
demo/webserver.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import argparse
|
||||
import BaseHTTPServer
|
||||
import os
|
||||
import SimpleHTTPServer
|
||||
import cgi, logging
|
||||
|
||||
from daemonize import Daemonize
|
||||
|
||||
|
||||
class SimpleHTTPRequestHandlerWithPOST(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
||||
UPLOAD_PATH = "upload"
|
||||
|
||||
"""
|
||||
Accept all post request as file upload
|
||||
"""
|
||||
|
||||
def do_POST(self):
|
||||
|
||||
path = os.path.join(self.UPLOAD_PATH, os.path.basename(self.path))
|
||||
length = self.headers["content-length"]
|
||||
data = self.rfile.read(int(length))
|
||||
|
||||
with open(path, "wb") as fh:
|
||||
fh.write(data)
|
||||
|
||||
self.send_response(200)
|
||||
self.send_header("Content-Type", "application/json")
|
||||
self.end_headers()
|
||||
|
||||
# Return the absolute path of the uploaded file
|
||||
self.wfile.write('{"url":"/%s"}' % path)
|
||||
|
||||
|
||||
def setup():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("directory")
|
||||
parser.add_argument("-p", "--port", dest="port", type=int, default=8080)
|
||||
parser.add_argument("-P", "--pid-file", dest="pid", default="web.pid")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Get absolute path to directory to serve, as daemonize changes to '/'
|
||||
os.chdir(args.directory)
|
||||
dr = os.getcwd()
|
||||
|
||||
httpd = BaseHTTPServer.HTTPServer(("", args.port), SimpleHTTPRequestHandlerWithPOST)
|
||||
|
||||
def run():
|
||||
os.chdir(dr)
|
||||
httpd.serve_forever()
|
||||
|
||||
daemon = Daemonize(
|
||||
app="synapse-webclient", pid=args.pid, action=run, auto_close_fds=False
|
||||
)
|
||||
|
||||
daemon.start()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
setup()
|
||||
@@ -11,18 +11,13 @@
|
||||
# docker build -f docker/Dockerfile --build-arg PYTHON_VERSION=3.6 .
|
||||
#
|
||||
|
||||
ARG PYTHON_VERSION=3.8
|
||||
ARG PYTHON_VERSION=3.7
|
||||
|
||||
###
|
||||
### Stage 0: builder
|
||||
###
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim as builder
|
||||
|
||||
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
||||
LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md'
|
||||
LABEL org.opencontainers.image.source='https://github.com/matrix-org/synapse.git'
|
||||
LABEL org.opencontainers.image.licenses='Apache-2.0'
|
||||
|
||||
# install the OS build deps
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
@@ -33,32 +28,30 @@ RUN apt-get update && apt-get install -y \
|
||||
libwebp-dev \
|
||||
libxml++2.6-dev \
|
||||
libxslt1-dev \
|
||||
openssl \
|
||||
rustc \
|
||||
zlib1g-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy just what we need to pip install
|
||||
# Build dependencies that are not available as wheels, to speed up rebuilds
|
||||
RUN pip install --prefix="/install" --no-warn-script-location \
|
||||
frozendict \
|
||||
jaeger-client \
|
||||
opentracing \
|
||||
prometheus-client \
|
||||
psycopg2 \
|
||||
pycparser \
|
||||
pyrsistent \
|
||||
pyyaml \
|
||||
simplejson \
|
||||
threadloop \
|
||||
thrift
|
||||
|
||||
# now install synapse and all of the python deps to /install.
|
||||
COPY synapse /synapse/synapse/
|
||||
COPY scripts /synapse/scripts/
|
||||
COPY MANIFEST.in README.rst setup.py synctl /synapse/
|
||||
COPY synapse/__init__.py /synapse/synapse/__init__.py
|
||||
COPY synapse/python_dependencies.py /synapse/synapse/python_dependencies.py
|
||||
|
||||
# To speed up rebuilds, install all of the dependencies before we copy over
|
||||
# the whole synapse project so that we this layer in the Docker cache can be
|
||||
# used while you develop on the source
|
||||
#
|
||||
# This is aiming at installing the `install_requires` and `extras_require` from `setup.py`
|
||||
RUN pip install --prefix="/install" --no-warn-script-location \
|
||||
/synapse[all]
|
||||
|
||||
# Copy over the rest of the project
|
||||
COPY synapse /synapse/synapse/
|
||||
|
||||
# Install the synapse package itself and all of its children packages.
|
||||
#
|
||||
# This is aiming at installing only the `packages=find_packages(...)` from `setup.py
|
||||
RUN pip install --prefix="/install" --no-deps --no-warn-script-location /synapse
|
||||
/synapse[all]
|
||||
|
||||
###
|
||||
### Stage 1: runtime
|
||||
@@ -73,10 +66,7 @@ RUN apt-get update && apt-get install -y \
|
||||
libpq5 \
|
||||
libwebp6 \
|
||||
xmlsec1 \
|
||||
libjemalloc2 \
|
||||
libssl-dev \
|
||||
openssl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY --from=builder /install /usr/local
|
||||
COPY ./docker/start.py /start.py
|
||||
@@ -89,4 +79,4 @@ EXPOSE 8008/tcp 8009/tcp 8448/tcp
|
||||
ENTRYPOINT ["/start.py"]
|
||||
|
||||
HEALTHCHECK --interval=1m --timeout=5s \
|
||||
CMD curl -fSs http://localhost:8008/health || exit 1
|
||||
CMD curl -fSs http://localhost:8008/health || exit 1
|
||||
|
||||
@@ -27,7 +27,6 @@ RUN env DEBIAN_FRONTEND=noninteractive apt-get install \
|
||||
wget
|
||||
|
||||
# fetch and unpack the package
|
||||
# TODO: Upgrade to 1.2.2 once xenial is dropped
|
||||
RUN mkdir /dh-virtualenv
|
||||
RUN wget -q -O /dh-virtualenv.tar.gz https://github.com/spotify/dh-virtualenv/archive/ac6e1b1.tar.gz
|
||||
RUN tar -xv --strip-components=1 -C /dh-virtualenv -f /dh-virtualenv.tar.gz
|
||||
@@ -51,22 +50,17 @@ FROM ${distro}
|
||||
ARG distro=""
|
||||
ENV distro ${distro}
|
||||
|
||||
# Python < 3.7 assumes LANG="C" means ASCII-only and throws on printing unicode
|
||||
# http://bugs.python.org/issue19846
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
# Install the build dependencies
|
||||
#
|
||||
# NB: keep this list in sync with the list of build-deps in debian/control
|
||||
# TODO: it would be nice to do that automatically.
|
||||
# TODO: Remove the dh-systemd stanza after dropping support for Ubuntu xenial
|
||||
# it's a transitional package on all other, more recent releases
|
||||
RUN apt-get update -qq -o Acquire::Languages=none \
|
||||
&& env DEBIAN_FRONTEND=noninteractive apt-get install \
|
||||
-yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \
|
||||
build-essential \
|
||||
debhelper \
|
||||
devscripts \
|
||||
dh-systemd \
|
||||
libsystemd-dev \
|
||||
lsb-release \
|
||||
pkg-config \
|
||||
@@ -75,11 +69,7 @@ RUN apt-get update -qq -o Acquire::Languages=none \
|
||||
python3-setuptools \
|
||||
python3-venv \
|
||||
sqlite3 \
|
||||
libpq-dev \
|
||||
xmlsec1 \
|
||||
&& ( env DEBIAN_FRONTEND=noninteractive apt-get install \
|
||||
-yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \
|
||||
dh-systemd || true )
|
||||
libpq-dev
|
||||
|
||||
COPY --from=builder /dh-virtualenv_1.2~dev-1_all.deb /
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ The image also does *not* provide a TURN server.
|
||||
By default, the image expects a single volume, located at ``/data``, that will hold:
|
||||
|
||||
* configuration files;
|
||||
* temporary files during uploads;
|
||||
* uploaded media and thumbnails;
|
||||
* the SQLite database if you do not configure postgres;
|
||||
* the appservices configuration.
|
||||
@@ -204,8 +205,3 @@ healthcheck:
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
```
|
||||
|
||||
## Using jemalloc
|
||||
|
||||
Jemalloc is embedded in the image and will be used instead of the default allocator.
|
||||
You can read about jemalloc by reading the Synapse [README](../README.md)
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/bin/bash
|
||||
|
||||
# The script to build the Debian package, as ran inside the Docker image.
|
||||
|
||||
|
||||
@@ -89,6 +89,7 @@ federation_rc_concurrent: 3
|
||||
## Files ##
|
||||
|
||||
media_store_path: "/data/media"
|
||||
uploads_path: "/data/uploads"
|
||||
max_upload_size: "{{ SYNAPSE_MAX_UPLOAD_SIZE or "50M" }}"
|
||||
max_image_pixels: "32M"
|
||||
dynamic_thumbnails: false
|
||||
@@ -173,10 +174,18 @@ report_stats: False
|
||||
|
||||
## API Configuration ##
|
||||
|
||||
room_invite_state_types:
|
||||
- "m.room.join_rules"
|
||||
- "m.room.canonical_alias"
|
||||
- "m.room.avatar"
|
||||
- "m.room.name"
|
||||
|
||||
{% if SYNAPSE_APPSERVICES %}
|
||||
app_service_config_files:
|
||||
{% for appservice in SYNAPSE_APPSERVICES %} - "{{ appservice }}"
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
app_service_config_files: []
|
||||
{% endif %}
|
||||
|
||||
macaroon_secret_key: "{{ SYNAPSE_MACAROON_SECRET_KEY }}"
|
||||
@@ -189,10 +198,12 @@ old_signing_keys: {}
|
||||
key_refresh_interval: "1d" # 1 Day.
|
||||
|
||||
# The trusted servers to download signing keys from.
|
||||
trusted_key_servers:
|
||||
- server_name: matrix.org
|
||||
verify_keys:
|
||||
"ed25519:auto": "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
|
||||
perspectives:
|
||||
servers:
|
||||
"matrix.org":
|
||||
verify_keys:
|
||||
"ed25519:auto":
|
||||
key: "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
|
||||
|
||||
password_config:
|
||||
enabled: true
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
#!/bin/bash
|
||||
|
||||
# This script runs the PostgreSQL tests inside a Docker container. It expects
|
||||
# the relevant source files to be mounted into /src (done automatically by the
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
import codecs
|
||||
import glob
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
@@ -214,13 +213,6 @@ def main(args, environ):
|
||||
if "-m" not in args:
|
||||
args = ["-m", synapse_worker] + args
|
||||
|
||||
jemallocpath = "/usr/lib/%s-linux-gnu/libjemalloc.so.2" % (platform.machine(),)
|
||||
|
||||
if os.path.isfile(jemallocpath):
|
||||
environ["LD_PRELOAD"] = jemallocpath
|
||||
else:
|
||||
log("Could not find %s, will not use" % (jemallocpath,))
|
||||
|
||||
# if there are no config files passed to synapse, try adding the default file
|
||||
if not any(p.startswith("--config-path") or p.startswith("-c") for p in args):
|
||||
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
||||
@@ -256,9 +248,9 @@ running with 'migrate_config'. See the README for more details.
|
||||
args = ["python"] + args
|
||||
if ownership is not None:
|
||||
args = ["gosu", ownership] + args
|
||||
os.execve("/usr/sbin/gosu", args, environ)
|
||||
os.execv("/usr/sbin/gosu", args)
|
||||
else:
|
||||
os.execve("/usr/local/bin/python", args, environ)
|
||||
os.execv("/usr/local/bin/python", args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,172 +0,0 @@
|
||||
# Show reported events
|
||||
|
||||
This API returns information about reported events.
|
||||
|
||||
The api is:
|
||||
```
|
||||
GET /_synapse/admin/v1/event_reports?from=0&limit=10
|
||||
```
|
||||
To use it, you will need to authenticate by providing an `access_token` for a
|
||||
server admin: see [README.rst](README.rst).
|
||||
|
||||
It returns a JSON body like the following:
|
||||
|
||||
```json
|
||||
{
|
||||
"event_reports": [
|
||||
{
|
||||
"event_id": "$bNUFCwGzWca1meCGkjp-zwslF-GfVcXukvRLI1_FaVY",
|
||||
"id": 2,
|
||||
"reason": "foo",
|
||||
"score": -100,
|
||||
"received_ts": 1570897107409,
|
||||
"canonical_alias": "#alias1:matrix.org",
|
||||
"room_id": "!ERAgBpSOcCCuTJqQPk:matrix.org",
|
||||
"name": "Matrix HQ",
|
||||
"sender": "@foobar:matrix.org",
|
||||
"user_id": "@foo:matrix.org"
|
||||
},
|
||||
{
|
||||
"event_id": "$3IcdZsDaN_En-S1DF4EMCy3v4gNRKeOJs8W5qTOKj4I",
|
||||
"id": 3,
|
||||
"reason": "bar",
|
||||
"score": -100,
|
||||
"received_ts": 1598889612059,
|
||||
"canonical_alias": "#alias2:matrix.org",
|
||||
"room_id": "!eGvUQuTCkHGVwNMOjv:matrix.org",
|
||||
"name": "Your room name here",
|
||||
"sender": "@foobar:matrix.org",
|
||||
"user_id": "@bar:matrix.org"
|
||||
}
|
||||
],
|
||||
"next_token": 2,
|
||||
"total": 4
|
||||
}
|
||||
```
|
||||
|
||||
To paginate, check for `next_token` and if present, call the endpoint again with `from`
|
||||
set to the value of `next_token`. This will return a new page.
|
||||
|
||||
If the endpoint does not return a `next_token` then there are no more reports to
|
||||
paginate through.
|
||||
|
||||
**URL parameters:**
|
||||
|
||||
* `limit`: integer - Is optional but is used for pagination, denoting the maximum number
|
||||
of items to return in this call. Defaults to `100`.
|
||||
* `from`: integer - Is optional but used for pagination, denoting the offset in the
|
||||
returned results. This should be treated as an opaque value and not explicitly set to
|
||||
anything other than the return value of `next_token` from a previous call. Defaults to `0`.
|
||||
* `dir`: string - Direction of event report order. Whether to fetch the most recent
|
||||
first (`b`) or the oldest first (`f`). Defaults to `b`.
|
||||
* `user_id`: string - Is optional and filters to only return users with user IDs that
|
||||
contain this value. This is the user who reported the event and wrote the reason.
|
||||
* `room_id`: string - Is optional and filters to only return rooms with room IDs that
|
||||
contain this value.
|
||||
|
||||
**Response**
|
||||
|
||||
The following fields are returned in the JSON response body:
|
||||
|
||||
* `id`: integer - ID of event report.
|
||||
* `received_ts`: integer - The timestamp (in milliseconds since the unix epoch) when this
|
||||
report was sent.
|
||||
* `room_id`: string - The ID of the room in which the event being reported is located.
|
||||
* `name`: string - The name of the room.
|
||||
* `event_id`: string - The ID of the reported event.
|
||||
* `user_id`: string - This is the user who reported the event and wrote the reason.
|
||||
* `reason`: string - Comment made by the `user_id` in this report. May be blank.
|
||||
* `score`: integer - Content is reported based upon a negative score, where -100 is
|
||||
"most offensive" and 0 is "inoffensive".
|
||||
* `sender`: string - This is the ID of the user who sent the original message/event that
|
||||
was reported.
|
||||
* `canonical_alias`: string - The canonical alias of the room. `null` if the room does not
|
||||
have a canonical alias set.
|
||||
* `next_token`: integer - Indication for pagination. See above.
|
||||
* `total`: integer - Total number of event reports related to the query
|
||||
(`user_id` and `room_id`).
|
||||
|
||||
# Show details of a specific event report
|
||||
|
||||
This API returns information about a specific event report.
|
||||
|
||||
The api is:
|
||||
```
|
||||
GET /_synapse/admin/v1/event_reports/<report_id>
|
||||
```
|
||||
To use it, you will need to authenticate by providing an `access_token` for a
|
||||
server admin: see [README.rst](README.rst).
|
||||
|
||||
It returns a JSON body like the following:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"event_id": "$bNUFCwGzWca1meCGkjp-zwslF-GfVcXukvRLI1_FaVY",
|
||||
"event_json": {
|
||||
"auth_events": [
|
||||
"$YK4arsKKcc0LRoe700pS8DSjOvUT4NDv0HfInlMFw2M",
|
||||
"$oggsNXxzPFRE3y53SUNd7nsj69-QzKv03a1RucHu-ws"
|
||||
],
|
||||
"content": {
|
||||
"body": "matrix.org: This Week in Matrix",
|
||||
"format": "org.matrix.custom.html",
|
||||
"formatted_body": "<strong>matrix.org</strong>:<br><a href=\"https://matrix.org/blog/\"><strong>This Week in Matrix</strong></a>",
|
||||
"msgtype": "m.notice"
|
||||
},
|
||||
"depth": 546,
|
||||
"hashes": {
|
||||
"sha256": "xK1//xnmvHJIOvbgXlkI8eEqdvoMmihVDJ9J4SNlsAw"
|
||||
},
|
||||
"origin": "matrix.org",
|
||||
"origin_server_ts": 1592291711430,
|
||||
"prev_events": [
|
||||
"$YK4arsKKcc0LRoe700pS8DSjOvUT4NDv0HfInlMFw2M"
|
||||
],
|
||||
"prev_state": [],
|
||||
"room_id": "!ERAgBpSOcCCuTJqQPk:matrix.org",
|
||||
"sender": "@foobar:matrix.org",
|
||||
"signatures": {
|
||||
"matrix.org": {
|
||||
"ed25519:a_JaEG": "cs+OUKW/iHx5pEidbWxh0UiNNHwe46Ai9LwNz+Ah16aWDNszVIe2gaAcVZfvNsBhakQTew51tlKmL2kspXk/Dg"
|
||||
}
|
||||
},
|
||||
"type": "m.room.message",
|
||||
"unsigned": {
|
||||
"age_ts": 1592291711430,
|
||||
}
|
||||
},
|
||||
"id": <report_id>,
|
||||
"reason": "foo",
|
||||
"score": -100,
|
||||
"received_ts": 1570897107409,
|
||||
"canonical_alias": "#alias1:matrix.org",
|
||||
"room_id": "!ERAgBpSOcCCuTJqQPk:matrix.org",
|
||||
"name": "Matrix HQ",
|
||||
"sender": "@foobar:matrix.org",
|
||||
"user_id": "@foo:matrix.org"
|
||||
}
|
||||
```
|
||||
|
||||
**URL parameters:**
|
||||
|
||||
* `report_id`: string - The ID of the event report.
|
||||
|
||||
**Response**
|
||||
|
||||
The following fields are returned in the JSON response body:
|
||||
|
||||
* `id`: integer - ID of event report.
|
||||
* `received_ts`: integer - The timestamp (in milliseconds since the unix epoch) when this
|
||||
report was sent.
|
||||
* `room_id`: string - The ID of the room in which the event being reported is located.
|
||||
* `name`: string - The name of the room.
|
||||
* `event_id`: string - The ID of the reported event.
|
||||
* `user_id`: string - This is the user who reported the event and wrote the reason.
|
||||
* `reason`: string - Comment made by the `user_id` in this report. May be blank.
|
||||
* `score`: integer - Content is reported based upon a negative score, where -100 is
|
||||
"most offensive" and 0 is "inoffensive".
|
||||
* `sender`: string - This is the ID of the user who sent the original message/event that
|
||||
was reported.
|
||||
* `canonical_alias`: string - The canonical alias of the room. `null` if the room does not
|
||||
have a canonical alias set.
|
||||
* `event_json`: object - Details of the original event that was reported.
|
||||
165
docs/admin_api/event_reports.rst
Normal file
165
docs/admin_api/event_reports.rst
Normal file
@@ -0,0 +1,165 @@
|
||||
Show reported events
|
||||
====================
|
||||
|
||||
This API returns information about reported events.
|
||||
|
||||
The api is::
|
||||
|
||||
GET /_synapse/admin/v1/event_reports?from=0&limit=10
|
||||
|
||||
To use it, you will need to authenticate by providing an ``access_token`` for a
|
||||
server admin: see `README.rst <README.rst>`_.
|
||||
|
||||
It returns a JSON body like the following:
|
||||
|
||||
.. code:: jsonc
|
||||
|
||||
{
|
||||
"event_reports": [
|
||||
{
|
||||
"event_id": "$bNUFCwGzWca1meCGkjp-zwslF-GfVcXukvRLI1_FaVY",
|
||||
"id": 2,
|
||||
"reason": "foo",
|
||||
"score": -100,
|
||||
"received_ts": 1570897107409,
|
||||
"canonical_alias": "#alias1:matrix.org",
|
||||
"room_id": "!ERAgBpSOcCCuTJqQPk:matrix.org",
|
||||
"name": "Matrix HQ",
|
||||
"sender": "@foobar:matrix.org",
|
||||
"user_id": "@foo:matrix.org"
|
||||
},
|
||||
{
|
||||
"event_id": "$3IcdZsDaN_En-S1DF4EMCy3v4gNRKeOJs8W5qTOKj4I",
|
||||
"id": 3,
|
||||
"reason": "bar",
|
||||
"score": -100,
|
||||
"received_ts": 1598889612059,
|
||||
"canonical_alias": "#alias2:matrix.org",
|
||||
"room_id": "!eGvUQuTCkHGVwNMOjv:matrix.org",
|
||||
"name": "Your room name here",
|
||||
"sender": "@foobar:matrix.org",
|
||||
"user_id": "@bar:matrix.org"
|
||||
}
|
||||
],
|
||||
"next_token": 2,
|
||||
"total": 4
|
||||
}
|
||||
|
||||
To paginate, check for ``next_token`` and if present, call the endpoint again
|
||||
with ``from`` set to the value of ``next_token``. This will return a new page.
|
||||
|
||||
If the endpoint does not return a ``next_token`` then there are no more
|
||||
reports to paginate through.
|
||||
|
||||
**URL parameters:**
|
||||
|
||||
- ``limit``: integer - Is optional but is used for pagination,
|
||||
denoting the maximum number of items to return in this call. Defaults to ``100``.
|
||||
- ``from``: integer - Is optional but used for pagination,
|
||||
denoting the offset in the returned results. This should be treated as an opaque value and
|
||||
not explicitly set to anything other than the return value of ``next_token`` from a previous call.
|
||||
Defaults to ``0``.
|
||||
- ``dir``: string - Direction of event report order. Whether to fetch the most recent first (``b``) or the
|
||||
oldest first (``f``). Defaults to ``b``.
|
||||
- ``user_id``: string - Is optional and filters to only return users with user IDs that contain this value.
|
||||
This is the user who reported the event and wrote the reason.
|
||||
- ``room_id``: string - Is optional and filters to only return rooms with room IDs that contain this value.
|
||||
|
||||
**Response**
|
||||
|
||||
The following fields are returned in the JSON response body:
|
||||
|
||||
- ``id``: integer - ID of event report.
|
||||
- ``received_ts``: integer - The timestamp (in milliseconds since the unix epoch) when this report was sent.
|
||||
- ``room_id``: string - The ID of the room in which the event being reported is located.
|
||||
- ``name``: string - The name of the room.
|
||||
- ``event_id``: string - The ID of the reported event.
|
||||
- ``user_id``: string - This is the user who reported the event and wrote the reason.
|
||||
- ``reason``: string - Comment made by the ``user_id`` in this report. May be blank.
|
||||
- ``score``: integer - Content is reported based upon a negative score, where -100 is "most offensive" and 0 is "inoffensive".
|
||||
- ``sender``: string - This is the ID of the user who sent the original message/event that was reported.
|
||||
- ``canonical_alias``: string - The canonical alias of the room. ``null`` if the room does not have a canonical alias set.
|
||||
- ``next_token``: integer - Indication for pagination. See above.
|
||||
- ``total``: integer - Total number of event reports related to the query (``user_id`` and ``room_id``).
|
||||
|
||||
Show details of a specific event report
|
||||
=======================================
|
||||
|
||||
This API returns information about a specific event report.
|
||||
|
||||
The api is::
|
||||
|
||||
GET /_synapse/admin/v1/event_reports/<report_id>
|
||||
|
||||
To use it, you will need to authenticate by providing an ``access_token`` for a
|
||||
server admin: see `README.rst <README.rst>`_.
|
||||
|
||||
It returns a JSON body like the following:
|
||||
|
||||
.. code:: jsonc
|
||||
|
||||
{
|
||||
"event_id": "$bNUFCwGzWca1meCGkjp-zwslF-GfVcXukvRLI1_FaVY",
|
||||
"event_json": {
|
||||
"auth_events": [
|
||||
"$YK4arsKKcc0LRoe700pS8DSjOvUT4NDv0HfInlMFw2M",
|
||||
"$oggsNXxzPFRE3y53SUNd7nsj69-QzKv03a1RucHu-ws"
|
||||
],
|
||||
"content": {
|
||||
"body": "matrix.org: This Week in Matrix",
|
||||
"format": "org.matrix.custom.html",
|
||||
"formatted_body": "<strong>matrix.org</strong>:<br><a href=\"https://matrix.org/blog/\"><strong>This Week in Matrix</strong></a>",
|
||||
"msgtype": "m.notice"
|
||||
},
|
||||
"depth": 546,
|
||||
"hashes": {
|
||||
"sha256": "xK1//xnmvHJIOvbgXlkI8eEqdvoMmihVDJ9J4SNlsAw"
|
||||
},
|
||||
"origin": "matrix.org",
|
||||
"origin_server_ts": 1592291711430,
|
||||
"prev_events": [
|
||||
"$YK4arsKKcc0LRoe700pS8DSjOvUT4NDv0HfInlMFw2M"
|
||||
],
|
||||
"prev_state": [],
|
||||
"room_id": "!ERAgBpSOcCCuTJqQPk:matrix.org",
|
||||
"sender": "@foobar:matrix.org",
|
||||
"signatures": {
|
||||
"matrix.org": {
|
||||
"ed25519:a_JaEG": "cs+OUKW/iHx5pEidbWxh0UiNNHwe46Ai9LwNz+Ah16aWDNszVIe2gaAcVZfvNsBhakQTew51tlKmL2kspXk/Dg"
|
||||
}
|
||||
},
|
||||
"type": "m.room.message",
|
||||
"unsigned": {
|
||||
"age_ts": 1592291711430,
|
||||
}
|
||||
},
|
||||
"id": <report_id>,
|
||||
"reason": "foo",
|
||||
"score": -100,
|
||||
"received_ts": 1570897107409,
|
||||
"canonical_alias": "#alias1:matrix.org",
|
||||
"room_id": "!ERAgBpSOcCCuTJqQPk:matrix.org",
|
||||
"name": "Matrix HQ",
|
||||
"sender": "@foobar:matrix.org",
|
||||
"user_id": "@foo:matrix.org"
|
||||
}
|
||||
|
||||
**URL parameters:**
|
||||
|
||||
- ``report_id``: string - The ID of the event report.
|
||||
|
||||
**Response**
|
||||
|
||||
The following fields are returned in the JSON response body:
|
||||
|
||||
- ``id``: integer - ID of event report.
|
||||
- ``received_ts``: integer - The timestamp (in milliseconds since the unix epoch) when this report was sent.
|
||||
- ``room_id``: string - The ID of the room in which the event being reported is located.
|
||||
- ``name``: string - The name of the room.
|
||||
- ``event_id``: string - The ID of the reported event.
|
||||
- ``user_id``: string - This is the user who reported the event and wrote the reason.
|
||||
- ``reason``: string - Comment made by the ``user_id`` in this report. May be blank.
|
||||
- ``score``: integer - Content is reported based upon a negative score, where -100 is "most offensive" and 0 is "inoffensive".
|
||||
- ``sender``: string - This is the ID of the user who sent the original message/event that was reported.
|
||||
- ``canonical_alias``: string - The canonical alias of the room. ``null`` if the room does not have a canonical alias set.
|
||||
- ``event_json``: object - Details of the original event that was reported.
|
||||
@@ -1,25 +1,6 @@
|
||||
# Contents
|
||||
- [Querying media](#querying-media)
|
||||
* [List all media in a room](#list-all-media-in-a-room)
|
||||
* [List all media uploaded by a user](#list-all-media-uploaded-by-a-user)
|
||||
- [Quarantine media](#quarantine-media)
|
||||
* [Quarantining media by ID](#quarantining-media-by-id)
|
||||
* [Quarantining media in a room](#quarantining-media-in-a-room)
|
||||
* [Quarantining all media of a user](#quarantining-all-media-of-a-user)
|
||||
* [Protecting media from being quarantined](#protecting-media-from-being-quarantined)
|
||||
- [Delete local media](#delete-local-media)
|
||||
* [Delete a specific local media](#delete-a-specific-local-media)
|
||||
* [Delete local media by date or size](#delete-local-media-by-date-or-size)
|
||||
- [Purge Remote Media API](#purge-remote-media-api)
|
||||
|
||||
# Querying media
|
||||
|
||||
These APIs allow extracting media information from the homeserver.
|
||||
|
||||
## List all media in a room
|
||||
# List all media in a room
|
||||
|
||||
This API gets a list of known media in a room.
|
||||
However, it only shows media from unencrypted events or rooms.
|
||||
|
||||
The API is:
|
||||
```
|
||||
@@ -29,25 +10,19 @@ To use it, you will need to authenticate by providing an `access_token` for a
|
||||
server admin: see [README.rst](README.rst).
|
||||
|
||||
The API returns a JSON body like the following:
|
||||
```json
|
||||
```
|
||||
{
|
||||
"local": [
|
||||
"mxc://localhost/xwvutsrqponmlkjihgfedcba",
|
||||
"mxc://localhost/abcdefghijklmnopqrstuvwx"
|
||||
],
|
||||
"remote": [
|
||||
"mxc://matrix.org/xwvutsrqponmlkjihgfedcba",
|
||||
"mxc://matrix.org/abcdefghijklmnopqrstuvwx"
|
||||
]
|
||||
"local": [
|
||||
"mxc://localhost/xwvutsrqponmlkjihgfedcba",
|
||||
"mxc://localhost/abcdefghijklmnopqrstuvwx"
|
||||
],
|
||||
"remote": [
|
||||
"mxc://matrix.org/xwvutsrqponmlkjihgfedcba",
|
||||
"mxc://matrix.org/abcdefghijklmnopqrstuvwx"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## List all media uploaded by a user
|
||||
|
||||
Listing all media that has been uploaded by a local user can be achieved through
|
||||
the use of the [List media of a user](user_admin_api.rst#list-media-of-a-user)
|
||||
Admin API.
|
||||
|
||||
# Quarantine media
|
||||
|
||||
Quarantining media means that it is marked as inaccessible by users. It applies
|
||||
@@ -72,7 +47,7 @@ form of `abcdefg12345...`.
|
||||
|
||||
Response:
|
||||
|
||||
```json
|
||||
```
|
||||
{}
|
||||
```
|
||||
|
||||
@@ -92,18 +67,14 @@ Where `room_id` is in the form of `!roomid12345:example.org`.
|
||||
|
||||
Response:
|
||||
|
||||
```json
|
||||
```
|
||||
{
|
||||
"num_quarantined": 10
|
||||
"num_quarantined": 10 # The number of media items successfully quarantined
|
||||
}
|
||||
```
|
||||
|
||||
The following fields are returned in the JSON response body:
|
||||
|
||||
* `num_quarantined`: integer - The number of media items successfully quarantined
|
||||
|
||||
Note that there is a legacy endpoint, `POST
|
||||
/_synapse/admin/v1/quarantine_media/<room_id>`, that operates the same.
|
||||
/_synapse/admin/v1/quarantine_media/<room_id >`, that operates the same.
|
||||
However, it is deprecated and may be removed in a future release.
|
||||
|
||||
## Quarantining all media of a user
|
||||
@@ -120,52 +91,23 @@ POST /_synapse/admin/v1/user/<user_id>/media/quarantine
|
||||
{}
|
||||
```
|
||||
|
||||
URL Parameters
|
||||
|
||||
* `user_id`: string - User ID in the form of `@bob:example.org`
|
||||
Where `user_id` is in the form of `@bob:example.org`.
|
||||
|
||||
Response:
|
||||
|
||||
```json
|
||||
```
|
||||
{
|
||||
"num_quarantined": 10
|
||||
"num_quarantined": 10 # The number of media items successfully quarantined
|
||||
}
|
||||
```
|
||||
|
||||
The following fields are returned in the JSON response body:
|
||||
|
||||
* `num_quarantined`: integer - The number of media items successfully quarantined
|
||||
|
||||
## Protecting media from being quarantined
|
||||
|
||||
This API protects a single piece of local media from being quarantined using the
|
||||
above APIs. This is useful for sticker packs and other shared media which you do
|
||||
not want to get quarantined, especially when
|
||||
[quarantining media in a room](#quarantining-media-in-a-room).
|
||||
|
||||
Request:
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v1/media/protect/<media_id>
|
||||
|
||||
{}
|
||||
```
|
||||
|
||||
Where `media_id` is in the form of `abcdefg12345...`.
|
||||
|
||||
Response:
|
||||
|
||||
```json
|
||||
{}
|
||||
```
|
||||
|
||||
# Delete local media
|
||||
This API deletes the *local* media from the disk of your own server.
|
||||
This includes any local thumbnails and copies of media downloaded from
|
||||
remote homeservers.
|
||||
This API will not affect media that has been uploaded to external
|
||||
media repositories (e.g https://github.com/turt2live/matrix-media-repo/).
|
||||
See also [Purge Remote Media API](#purge-remote-media-api).
|
||||
See also [purge_remote_media.rst](purge_remote_media.rst).
|
||||
|
||||
## Delete a specific local media
|
||||
Delete a specific `media_id`.
|
||||
@@ -186,12 +128,12 @@ URL Parameters
|
||||
Response:
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted_media": [
|
||||
"abcdefghijklmnopqrstuvwx"
|
||||
],
|
||||
"total": 1
|
||||
}
|
||||
{
|
||||
"deleted_media": [
|
||||
"abcdefghijklmnopqrstuvwx"
|
||||
],
|
||||
"total": 1
|
||||
}
|
||||
```
|
||||
|
||||
The following fields are returned in the JSON response body:
|
||||
@@ -224,51 +166,16 @@ If `false` these files will be deleted. Defaults to `true`.
|
||||
Response:
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted_media": [
|
||||
"abcdefghijklmnopqrstuvwx",
|
||||
"abcdefghijklmnopqrstuvwz"
|
||||
],
|
||||
"total": 2
|
||||
}
|
||||
{
|
||||
"deleted_media": [
|
||||
"abcdefghijklmnopqrstuvwx",
|
||||
"abcdefghijklmnopqrstuvwz"
|
||||
],
|
||||
"total": 2
|
||||
}
|
||||
```
|
||||
|
||||
The following fields are returned in the JSON response body:
|
||||
|
||||
* `deleted_media`: an array of strings - List of deleted `media_id`
|
||||
* `total`: integer - Total number of deleted `media_id`
|
||||
|
||||
# Purge Remote Media API
|
||||
|
||||
The purge remote media API allows server admins to purge old cached remote media.
|
||||
|
||||
The API is:
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v1/purge_media_cache?before_ts=<unix_timestamp_in_ms>
|
||||
|
||||
{}
|
||||
```
|
||||
|
||||
URL Parameters
|
||||
|
||||
* `unix_timestamp_in_ms`: string representing a positive integer - Unix timestamp in ms.
|
||||
All cached media that was last accessed before this timestamp will be removed.
|
||||
|
||||
Response:
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 10
|
||||
}
|
||||
```
|
||||
|
||||
The following fields are returned in the JSON response body:
|
||||
|
||||
* `deleted`: integer - The number of media items successfully deleted
|
||||
|
||||
To use it, you will need to authenticate by providing an `access_token` for a
|
||||
server admin: see [README.rst](README.rst).
|
||||
|
||||
If the user re-requests purged remote media, synapse will re-request the media
|
||||
from the originating server.
|
||||
|
||||
20
docs/admin_api/purge_remote_media.rst
Normal file
20
docs/admin_api/purge_remote_media.rst
Normal file
@@ -0,0 +1,20 @@
|
||||
Purge Remote Media API
|
||||
======================
|
||||
|
||||
The purge remote media API allows server admins to purge old cached remote
|
||||
media.
|
||||
|
||||
The API is::
|
||||
|
||||
POST /_synapse/admin/v1/purge_media_cache?before_ts=<unix_timestamp_in_ms>
|
||||
|
||||
{}
|
||||
|
||||
\... which will remove all cached media that was last accessed before
|
||||
``<unix_timestamp_in_ms>``.
|
||||
|
||||
To use it, you will need to authenticate by providing an ``access_token`` for a
|
||||
server admin: see `README.rst <README.rst>`_.
|
||||
|
||||
If the user re-requests purged remote media, synapse will re-request the media
|
||||
from the originating server.
|
||||
@@ -1,13 +1,12 @@
|
||||
Deprecated: Purge room API
|
||||
==========================
|
||||
|
||||
**The old Purge room API is deprecated and will be removed in a future release.
|
||||
See the new [Delete Room API](rooms.md#delete-room-api) for more details.**
|
||||
Purge room API
|
||||
==============
|
||||
|
||||
This API will remove all trace of a room from your database.
|
||||
|
||||
All local users must have left the room before it can be removed.
|
||||
|
||||
See also: [Delete Room API](rooms.md#delete-room-api)
|
||||
|
||||
The API is:
|
||||
|
||||
```
|
||||
|
||||
@@ -18,8 +18,7 @@ To fetch the nonce, you need to request one from the API::
|
||||
|
||||
Once you have the nonce, you can make a ``POST`` to the same URL with a JSON
|
||||
body containing the nonce, username, password, whether they are an admin
|
||||
(optional, False by default), and a HMAC digest of the content. Also you can
|
||||
set the displayname (optional, ``username`` by default).
|
||||
(optional, False by default), and a HMAC digest of the content.
|
||||
|
||||
As an example::
|
||||
|
||||
@@ -27,7 +26,6 @@ As an example::
|
||||
> {
|
||||
"nonce": "thisisanonce",
|
||||
"username": "pepper_roni",
|
||||
"displayname": "Pepper Roni",
|
||||
"password": "pizza",
|
||||
"admin": true,
|
||||
"mac": "mac_digest_here"
|
||||
|
||||
@@ -1,17 +1,3 @@
|
||||
# Contents
|
||||
- [List Room API](#list-room-api)
|
||||
* [Parameters](#parameters)
|
||||
* [Usage](#usage)
|
||||
- [Room Details API](#room-details-api)
|
||||
- [Room Members API](#room-members-api)
|
||||
- [Delete Room API](#delete-room-api)
|
||||
* [Parameters](#parameters-1)
|
||||
* [Response](#response)
|
||||
* [Undoing room shutdowns](#undoing-room-shutdowns)
|
||||
- [Make Room Admin API](#make-room-admin-api)
|
||||
- [Forward Extremities Admin API](#forward-extremities-admin-api)
|
||||
- [Event Context API](#event-context-api)
|
||||
|
||||
# List Room API
|
||||
|
||||
The List Room admin API allows server admins to get a list of rooms on their
|
||||
@@ -90,7 +76,7 @@ GET /_synapse/admin/v1/rooms
|
||||
|
||||
Response:
|
||||
|
||||
```jsonc
|
||||
```
|
||||
{
|
||||
"rooms": [
|
||||
{
|
||||
@@ -142,7 +128,7 @@ GET /_synapse/admin/v1/rooms?search_term=TWIM
|
||||
|
||||
Response:
|
||||
|
||||
```json
|
||||
```
|
||||
{
|
||||
"rooms": [
|
||||
{
|
||||
@@ -177,7 +163,7 @@ GET /_synapse/admin/v1/rooms?order_by=size
|
||||
|
||||
Response:
|
||||
|
||||
```jsonc
|
||||
```
|
||||
{
|
||||
"rooms": [
|
||||
{
|
||||
@@ -233,14 +219,14 @@ GET /_synapse/admin/v1/rooms?order_by=size&from=100
|
||||
|
||||
Response:
|
||||
|
||||
```jsonc
|
||||
```
|
||||
{
|
||||
"rooms": [
|
||||
{
|
||||
"room_id": "!mscvqgqpHYjBGDxNym:matrix.org",
|
||||
"name": "Music Theory",
|
||||
"canonical_alias": "#musictheory:matrix.org",
|
||||
"joined_members": 127,
|
||||
"joined_members": 127
|
||||
"joined_local_members": 2,
|
||||
"version": "1",
|
||||
"creator": "@foo:matrix.org",
|
||||
@@ -257,7 +243,7 @@ Response:
|
||||
"room_id": "!twcBhHVdZlQWuuxBhN:termina.org.uk",
|
||||
"name": "weechat-matrix",
|
||||
"canonical_alias": "#weechat-matrix:termina.org.uk",
|
||||
"joined_members": 137,
|
||||
"joined_members": 137
|
||||
"joined_local_members": 20,
|
||||
"version": "4",
|
||||
"creator": "@foo:termina.org.uk",
|
||||
@@ -279,10 +265,12 @@ Response:
|
||||
Once the `next_token` parameter is no longer present, we know we've reached the
|
||||
end of the list.
|
||||
|
||||
# Room Details API
|
||||
# DRAFT: Room Details API
|
||||
|
||||
The Room Details admin API allows server admins to get all details of a room.
|
||||
|
||||
This API is still a draft and details might change!
|
||||
|
||||
The following fields are possible in the JSON response body:
|
||||
|
||||
* `room_id` - The ID of the room.
|
||||
@@ -292,7 +280,6 @@ The following fields are possible in the JSON response body:
|
||||
* `canonical_alias` - The canonical (main) alias address of the room.
|
||||
* `joined_members` - How many users are currently in the room.
|
||||
* `joined_local_members` - How many local users are currently in the room.
|
||||
* `joined_local_devices` - How many local devices are currently in the room.
|
||||
* `version` - The version of the room as a string.
|
||||
* `creator` - The `user_id` of the room creator.
|
||||
* `encryption` - Algorithm of end-to-end encryption of messages. Is `null` if encryption is not active.
|
||||
@@ -315,16 +302,15 @@ GET /_synapse/admin/v1/rooms/<room_id>
|
||||
|
||||
Response:
|
||||
|
||||
```json
|
||||
```
|
||||
{
|
||||
"room_id": "!mscvqgqpHYjBGDxNym:matrix.org",
|
||||
"name": "Music Theory",
|
||||
"avatar": "mxc://matrix.org/AQDaVFlbkQoErdOgqWRgiGSV",
|
||||
"topic": "Theory, Composition, Notation, Analysis",
|
||||
"canonical_alias": "#musictheory:matrix.org",
|
||||
"joined_members": 127,
|
||||
"joined_members": 127
|
||||
"joined_local_members": 2,
|
||||
"joined_local_devices": 2,
|
||||
"version": "1",
|
||||
"creator": "@foo:matrix.org",
|
||||
"encryption": null,
|
||||
@@ -358,51 +344,23 @@ GET /_synapse/admin/v1/rooms/<room_id>/members
|
||||
|
||||
Response:
|
||||
|
||||
```json
|
||||
```
|
||||
{
|
||||
"members": [
|
||||
"@foo:matrix.org",
|
||||
"@bar:matrix.org",
|
||||
"@foobar:matrix.org"
|
||||
],
|
||||
"@foobar:matrix.org
|
||||
],
|
||||
"total": 3
|
||||
}
|
||||
```
|
||||
|
||||
# Room State API
|
||||
|
||||
The Room State admin API allows server admins to get a list of all state events in a room.
|
||||
|
||||
The response includes the following fields:
|
||||
|
||||
* `state` - The current state of the room at the time of request.
|
||||
|
||||
## Usage
|
||||
|
||||
A standard request:
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/rooms/<room_id>/state
|
||||
|
||||
{}
|
||||
```
|
||||
|
||||
Response:
|
||||
|
||||
```json
|
||||
{
|
||||
"state": [
|
||||
{"type": "m.room.create", "state_key": "", "etc": true},
|
||||
{"type": "m.room.power_levels", "state_key": "", "etc": true},
|
||||
{"type": "m.room.name", "state_key": "", "etc": true}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
# Delete Room API
|
||||
|
||||
The Delete Room admin API allows server admins to remove rooms from server
|
||||
and block these rooms.
|
||||
It is a combination and improvement of "[Shutdown room](shutdown_room.md)"
|
||||
and "[Purge room](purge_room.md)" API.
|
||||
|
||||
Shuts down a room. Moves all local users and room aliases automatically to a
|
||||
new room if `new_room_user_id` is set. Otherwise local users only
|
||||
@@ -426,7 +384,7 @@ the new room. Users on other servers will be unaffected.
|
||||
|
||||
The API is:
|
||||
|
||||
```
|
||||
```json
|
||||
POST /_synapse/admin/v1/rooms/<room_id>/delete
|
||||
```
|
||||
|
||||
@@ -483,10 +441,6 @@ The following JSON body parameters are available:
|
||||
future attempts to join the room. Defaults to `false`.
|
||||
* `purge` - Optional. If set to `true`, it will remove all traces of the room from your database.
|
||||
Defaults to `true`.
|
||||
* `force_purge` - Optional, and ignored unless `purge` is `true`. If set to `true`, it
|
||||
will force a purge to go ahead even if there are local users still in the room. Do not
|
||||
use this unless a regular `purge` operation fails, as it could leave those users'
|
||||
clients in a confused state.
|
||||
|
||||
The JSON body must not be empty. The body must be at least `{}`.
|
||||
|
||||
@@ -499,217 +453,3 @@ The following fields are returned in the JSON response body:
|
||||
* `local_aliases` - An array of strings representing the local aliases that were migrated from
|
||||
the old room to the new.
|
||||
* `new_room_id` - A string representing the room ID of the new room.
|
||||
|
||||
|
||||
## Undoing room shutdowns
|
||||
|
||||
*Note*: This guide may be outdated by the time you read it. By nature of room shutdowns being performed at the database level,
|
||||
the structure can and does change without notice.
|
||||
|
||||
First, it's important to understand that a room shutdown is very destructive. Undoing a shutdown is not as simple as pretending it
|
||||
never happened - work has to be done to move forward instead of resetting the past. In fact, in some cases it might not be possible
|
||||
to recover at all:
|
||||
|
||||
* If the room was invite-only, your users will need to be re-invited.
|
||||
* If the room no longer has any members at all, it'll be impossible to rejoin.
|
||||
* The first user to rejoin will have to do so via an alias on a different server.
|
||||
|
||||
With all that being said, if you still want to try and recover the room:
|
||||
|
||||
1. For safety reasons, shut down Synapse.
|
||||
2. In the database, run `DELETE FROM blocked_rooms WHERE room_id = '!example:example.org';`
|
||||
* For caution: it's recommended to run this in a transaction: `BEGIN; DELETE ...;`, verify you got 1 result, then `COMMIT;`.
|
||||
* The room ID is the same one supplied to the shutdown room API, not the Content Violation room.
|
||||
3. Restart Synapse.
|
||||
|
||||
You will have to manually handle, if you so choose, the following:
|
||||
|
||||
* Aliases that would have been redirected to the Content Violation room.
|
||||
* Users that would have been booted from the room (and will have been force-joined to the Content Violation room).
|
||||
* Removal of the Content Violation room if desired.
|
||||
|
||||
|
||||
# Make Room Admin API
|
||||
|
||||
Grants another user the highest power available to a local user who is in the room.
|
||||
If the user is not in the room, and it is not publicly joinable, then invite the user.
|
||||
|
||||
By default the server admin (the caller) is granted power, but another user can
|
||||
optionally be specified, e.g.:
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v1/rooms/<room_id_or_alias>/make_room_admin
|
||||
{
|
||||
"user_id": "@foo:example.com"
|
||||
}
|
||||
```
|
||||
|
||||
# Forward Extremities Admin API
|
||||
|
||||
Enables querying and deleting forward extremities from rooms. When a lot of forward
|
||||
extremities accumulate in a room, performance can become degraded. For details, see
|
||||
[#1760](https://github.com/matrix-org/synapse/issues/1760).
|
||||
|
||||
## Check for forward extremities
|
||||
|
||||
To check the status of forward extremities for a room:
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/rooms/<room_id_or_alias>/forward_extremities
|
||||
```
|
||||
|
||||
A response as follows will be returned:
|
||||
|
||||
```json
|
||||
{
|
||||
"count": 1,
|
||||
"results": [
|
||||
{
|
||||
"event_id": "$M5SP266vsnxctfwFgFLNceaCo3ujhRtg_NiiHabcdefgh",
|
||||
"state_group": 439,
|
||||
"depth": 123,
|
||||
"received_ts": 1611263016761
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Deleting forward extremities
|
||||
|
||||
**WARNING**: Please ensure you know what you're doing and have read
|
||||
the related issue [#1760](https://github.com/matrix-org/synapse/issues/1760).
|
||||
Under no situations should this API be executed as an automated maintenance task!
|
||||
|
||||
If a room has lots of forward extremities, the extra can be
|
||||
deleted as follows:
|
||||
|
||||
```
|
||||
DELETE /_synapse/admin/v1/rooms/<room_id_or_alias>/forward_extremities
|
||||
```
|
||||
|
||||
A response as follows will be returned, indicating the amount of forward extremities
|
||||
that were deleted.
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 1
|
||||
}
|
||||
```
|
||||
|
||||
# Event Context API
|
||||
|
||||
This API lets a client find the context of an event. This is designed primarily to investigate abuse reports.
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/rooms/<room_id>/context/<event_id>
|
||||
```
|
||||
|
||||
This API mimmicks [GET /_matrix/client/r0/rooms/{roomId}/context/{eventId}](https://matrix.org/docs/spec/client_server/r0.6.1#get-matrix-client-r0-rooms-roomid-context-eventid). Please refer to the link for all details on parameters and reseponse.
|
||||
|
||||
Example response:
|
||||
|
||||
```json
|
||||
{
|
||||
"end": "t29-57_2_0_2",
|
||||
"events_after": [
|
||||
{
|
||||
"content": {
|
||||
"body": "This is an example text message",
|
||||
"msgtype": "m.text",
|
||||
"format": "org.matrix.custom.html",
|
||||
"formatted_body": "<b>This is an example text message</b>"
|
||||
},
|
||||
"type": "m.room.message",
|
||||
"event_id": "$143273582443PhrSn:example.org",
|
||||
"room_id": "!636q39766251:example.com",
|
||||
"sender": "@example:example.org",
|
||||
"origin_server_ts": 1432735824653,
|
||||
"unsigned": {
|
||||
"age": 1234
|
||||
}
|
||||
}
|
||||
],
|
||||
"event": {
|
||||
"content": {
|
||||
"body": "filename.jpg",
|
||||
"info": {
|
||||
"h": 398,
|
||||
"w": 394,
|
||||
"mimetype": "image/jpeg",
|
||||
"size": 31037
|
||||
},
|
||||
"url": "mxc://example.org/JWEIFJgwEIhweiWJE",
|
||||
"msgtype": "m.image"
|
||||
},
|
||||
"type": "m.room.message",
|
||||
"event_id": "$f3h4d129462ha:example.com",
|
||||
"room_id": "!636q39766251:example.com",
|
||||
"sender": "@example:example.org",
|
||||
"origin_server_ts": 1432735824653,
|
||||
"unsigned": {
|
||||
"age": 1234
|
||||
}
|
||||
},
|
||||
"events_before": [
|
||||
{
|
||||
"content": {
|
||||
"body": "something-important.doc",
|
||||
"filename": "something-important.doc",
|
||||
"info": {
|
||||
"mimetype": "application/msword",
|
||||
"size": 46144
|
||||
},
|
||||
"msgtype": "m.file",
|
||||
"url": "mxc://example.org/FHyPlCeYUSFFxlgbQYZmoEoe"
|
||||
},
|
||||
"type": "m.room.message",
|
||||
"event_id": "$143273582443PhrSn:example.org",
|
||||
"room_id": "!636q39766251:example.com",
|
||||
"sender": "@example:example.org",
|
||||
"origin_server_ts": 1432735824653,
|
||||
"unsigned": {
|
||||
"age": 1234
|
||||
}
|
||||
}
|
||||
],
|
||||
"start": "t27-54_2_0_2",
|
||||
"state": [
|
||||
{
|
||||
"content": {
|
||||
"creator": "@example:example.org",
|
||||
"room_version": "1",
|
||||
"m.federate": true,
|
||||
"predecessor": {
|
||||
"event_id": "$something:example.org",
|
||||
"room_id": "!oldroom:example.org"
|
||||
}
|
||||
},
|
||||
"type": "m.room.create",
|
||||
"event_id": "$143273582443PhrSn:example.org",
|
||||
"room_id": "!636q39766251:example.com",
|
||||
"sender": "@example:example.org",
|
||||
"origin_server_ts": 1432735824653,
|
||||
"unsigned": {
|
||||
"age": 1234
|
||||
},
|
||||
"state_key": ""
|
||||
},
|
||||
{
|
||||
"content": {
|
||||
"membership": "join",
|
||||
"avatar_url": "mxc://example.org/SEsfnsuifSDFSSEF",
|
||||
"displayname": "Alice Margatroid"
|
||||
},
|
||||
"type": "m.room.member",
|
||||
"event_id": "$143273582443PhrSn:example.org",
|
||||
"room_id": "!636q39766251:example.com",
|
||||
"sender": "@example:example.org",
|
||||
"origin_server_ts": 1432735824653,
|
||||
"unsigned": {
|
||||
"age": 1234
|
||||
},
|
||||
"state_key": "@alice:example.org"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
# Deprecated: Shutdown room API
|
||||
|
||||
**The old Shutdown room API is deprecated and will be removed in a future release.
|
||||
See the new [Delete Room API](rooms.md#delete-room-api) for more details.**
|
||||
# Shutdown room API
|
||||
|
||||
Shuts down a room, preventing new joins and moves local users and room aliases automatically
|
||||
to a new room. The new room will be created with the user specified by the
|
||||
@@ -13,6 +10,8 @@ disallow any further invites or joins.
|
||||
The local server will only have the power to move local user and room aliases to
|
||||
the new room. Users on other servers will be unaffected.
|
||||
|
||||
See also: [Delete Room API](rooms.md#delete-room-api)
|
||||
|
||||
## API
|
||||
|
||||
You will need to authenticate with an access token for an admin user.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user