Compare commits
145 Commits
rei/rss_in
...
travis/sam
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ed12c4ce66 | ||
|
|
ec3b927d28 | ||
|
|
a633a97c7a | ||
|
|
b617864cd9 | ||
|
|
3d882a7ba5 | ||
|
|
0388beafe4 | ||
|
|
59975f9a63 | ||
|
|
6db22e4702 | ||
|
|
642fad8bd4 | ||
|
|
dd2e5b0038 | ||
|
|
a8251da10f | ||
|
|
f1b40694ea | ||
|
|
6d847d8ce6 | ||
|
|
9fc71dc5ee | ||
|
|
cbcbfe64a2 | ||
|
|
7902bf1e1d | ||
|
|
66ace43546 | ||
|
|
9c555f37e3 | ||
|
|
6604b64fae | ||
|
|
57dd41a45b | ||
|
|
3505ffcda7 | ||
|
|
caa9d6fed7 | ||
|
|
c64c3bb4c5 | ||
|
|
8df88b5ff3 | ||
|
|
2434c0084b | ||
|
|
54ce81c86d | ||
|
|
cd17a2085e | ||
|
|
5e9b05d7da | ||
|
|
b5833a2abf | ||
|
|
60d3c57bd0 | ||
|
|
63f9317b8e | ||
|
|
470dc621ae | ||
|
|
aeb9b2179e | ||
|
|
aaed6b39e1 | ||
|
|
580f3df9b2 | ||
|
|
ea6956c55c | ||
|
|
e89fea4f04 | ||
|
|
8c03cd0e5f | ||
|
|
8b9ade8c78 | ||
|
|
e7184a4370 | ||
|
|
916c697228 | ||
|
|
fffe17b77d | ||
|
|
80e14a8546 | ||
|
|
62fac9d969 | ||
|
|
be618e0551 | ||
|
|
a852e93408 | ||
|
|
05bae6b4fc | ||
|
|
55d5b3af88 | ||
|
|
78801e7f9e | ||
|
|
a2a695b7ec | ||
|
|
85275c89d7 | ||
|
|
142c9325c2 | ||
|
|
30b67e0f63 | ||
|
|
5624d0f2ec | ||
|
|
cf5a420c8a | ||
|
|
5d833f0923 | ||
|
|
ca74b140f2 | ||
|
|
6ddda8152e | ||
|
|
5a7e9fdd84 | ||
|
|
e059c5e648 | ||
|
|
1ab1479a92 | ||
|
|
146af7b47f | ||
|
|
0c0b82b6d1 | ||
|
|
e5baf80237 | ||
|
|
4bc6b7130d | ||
|
|
d8517da85b | ||
|
|
f7c873a643 | ||
|
|
bc604e7f94 | ||
|
|
591d82f06b | ||
|
|
ad9b64b496 | ||
|
|
3ff0422d2d | ||
|
|
1a6ae33309 | ||
|
|
ef20aa52eb | ||
|
|
7093790fbc | ||
|
|
5ade977d08 | ||
|
|
909827b422 | ||
|
|
93bc9d73bf | ||
|
|
1d65292e94 | ||
|
|
a0d294c306 | ||
|
|
b9cfd3c375 | ||
|
|
90d17a3d28 | ||
|
|
b736c6cd3a | ||
|
|
b09d443632 | ||
|
|
6e834e94fc | ||
|
|
ea128a3e8e | ||
|
|
2f416fc997 | ||
|
|
6b6086b8bf | ||
|
|
a98b8583c6 | ||
|
|
894c1a5759 | ||
|
|
0eac7077c9 | ||
|
|
8401bcd206 | ||
|
|
2a44782666 | ||
|
|
a90d16dabc | ||
|
|
36f34e6f3d | ||
|
|
ce7803b8b0 | ||
|
|
cee00a3584 | ||
|
|
2a012e8a04 | ||
|
|
4548d1f87e | ||
|
|
4fca313389 | ||
|
|
4765f0cfd9 | ||
|
|
d19505a8c1 | ||
|
|
3057095a5d | ||
|
|
549f974897 | ||
|
|
a4bf72c30c | ||
|
|
5625abe503 | ||
|
|
e7011280c7 | ||
|
|
92c1550f4a | ||
|
|
c8fa620d7a | ||
|
|
deca277d09 | ||
|
|
5798a134c0 | ||
|
|
71fc04069a | ||
|
|
6d97843793 | ||
|
|
7dc398586c | ||
|
|
49ef8ec399 | ||
|
|
a3f0635686 | ||
|
|
1196ee32b3 | ||
|
|
7ccc251415 | ||
|
|
dfd10f5133 | ||
|
|
91caa5b430 | ||
|
|
1b959b6977 | ||
|
|
c88a119259 | ||
|
|
fbb758a7ce | ||
|
|
e70f0081da | ||
|
|
c998f25006 | ||
|
|
4a2d2c2b6f | ||
|
|
9ba32f6573 | ||
|
|
ffa5b757c7 | ||
|
|
971c980c6e | ||
|
|
d9b8cf81be | ||
|
|
0fb5189072 | ||
|
|
80793e813c | ||
|
|
ae38e0569f | ||
|
|
886eceba3e | ||
|
|
1e4b4d85e7 | ||
|
|
29763f01c6 | ||
|
|
74f016d343 | ||
|
|
1f9df1cc7b | ||
|
|
7777d353bf | ||
|
|
502728777c | ||
|
|
bb29bc2937 | ||
|
|
c03e3e8301 | ||
|
|
f299c5414c | ||
|
|
a3df04a899 | ||
|
|
2253b083d9 | ||
|
|
6fadb560fc |
@@ -6,6 +6,7 @@ services:
|
||||
image: postgres:9.5
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
command: -c fsync=off
|
||||
|
||||
testenv:
|
||||
image: python:3.5
|
||||
@@ -16,6 +17,6 @@ services:
|
||||
SYNAPSE_POSTGRES_HOST: postgres
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
working_dir: /app
|
||||
working_dir: /src
|
||||
volumes:
|
||||
- ..:/app
|
||||
- ..:/src
|
||||
|
||||
@@ -6,6 +6,7 @@ services:
|
||||
image: postgres:11
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
command: -c fsync=off
|
||||
|
||||
testenv:
|
||||
image: python:3.7
|
||||
@@ -16,6 +17,6 @@ services:
|
||||
SYNAPSE_POSTGRES_HOST: postgres
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
working_dir: /app
|
||||
working_dir: /src
|
||||
volumes:
|
||||
- ..:/app
|
||||
- ..:/src
|
||||
|
||||
@@ -6,6 +6,7 @@ services:
|
||||
image: postgres:9.5
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
command: -c fsync=off
|
||||
|
||||
testenv:
|
||||
image: python:3.7
|
||||
@@ -16,6 +17,6 @@ services:
|
||||
SYNAPSE_POSTGRES_HOST: postgres
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
working_dir: /app
|
||||
working_dir: /src
|
||||
volumes:
|
||||
- ..:/app
|
||||
- ..:/src
|
||||
|
||||
@@ -1,3 +1,18 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from tap.parser import Parser
|
||||
from tap.line import Result, Unknown, Diagnostic
|
||||
|
||||
@@ -27,7 +27,7 @@ git config --global user.name "A robot"
|
||||
|
||||
# Fetch and merge. If it doesn't work, it will raise due to set -e.
|
||||
git fetch -u origin $GITBASE
|
||||
git merge --no-edit origin/$GITBASE
|
||||
git merge --no-edit --no-commit origin/$GITBASE
|
||||
|
||||
# Show what we are after.
|
||||
git --no-pager show -s
|
||||
|
||||
@@ -1,240 +0,0 @@
|
||||
env:
|
||||
CODECOV_TOKEN: "2dd7eb9b-0eda-45fe-a47c-9b5ac040045f"
|
||||
|
||||
steps:
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e check_codestyle"
|
||||
label: "\U0001F9F9 Check Style"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e packaging"
|
||||
label: "\U0001F9F9 packaging"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e check_isort"
|
||||
label: "\U0001F9F9 isort"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "scripts-dev/check-newsfragment"
|
||||
label: ":newspaper: Newsfile"
|
||||
branches: "!master !develop !release-*"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
propagate-environment: true
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e check-sampleconfig"
|
||||
label: "\U0001F9F9 check-sample-config"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
|
||||
- wait
|
||||
|
||||
|
||||
- command:
|
||||
- "apt-get update && apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev zlib1g-dev"
|
||||
- "python3.5 -m pip install tox"
|
||||
- "tox -e py35-old,codecov"
|
||||
label: ":python: 3.5 / SQLite / Old Deps"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 2"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "ubuntu:xenial" # We use xenail to get an old sqlite and python
|
||||
propagate-environment: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e py35,codecov"
|
||||
label: ":python: 3.5 / SQLite"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 2"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.5"
|
||||
propagate-environment: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e py36,codecov"
|
||||
label: ":python: 3.6 / SQLite"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 2"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
propagate-environment: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e py37,codecov"
|
||||
label: ":python: 3.7 / SQLite"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 2"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.7"
|
||||
propagate-environment: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: ":python: 3.5 / :postgres: 9.5"
|
||||
agents:
|
||||
queue: "medium"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 8"
|
||||
command:
|
||||
- "bash -c 'python -m pip install tox && python -m tox -e py35-postgres,codecov'"
|
||||
plugins:
|
||||
- docker-compose#v2.1.0:
|
||||
run: testenv
|
||||
config:
|
||||
- .buildkite/docker-compose.py35.pg95.yaml
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: ":python: 3.7 / :postgres: 9.5"
|
||||
agents:
|
||||
queue: "medium"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 8"
|
||||
command:
|
||||
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'"
|
||||
plugins:
|
||||
- docker-compose#v2.1.0:
|
||||
run: testenv
|
||||
config:
|
||||
- .buildkite/docker-compose.py37.pg95.yaml
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: ":python: 3.7 / :postgres: 11"
|
||||
agents:
|
||||
queue: "medium"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 8"
|
||||
command:
|
||||
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'"
|
||||
plugins:
|
||||
- docker-compose#v2.1.0:
|
||||
run: testenv
|
||||
config:
|
||||
- .buildkite/docker-compose.py37.pg11.yaml
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
|
||||
- label: "SyTest - :python: 3.5 / SQLite / Monolith"
|
||||
agents:
|
||||
queue: "medium"
|
||||
command:
|
||||
- "bash .buildkite/merge_base_branch.sh"
|
||||
- "bash /synapse_sytest.sh"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "matrixdotorg/sytest-synapse:py35"
|
||||
propagate-environment: true
|
||||
always-pull: true
|
||||
workdir: "/src"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Monolith"
|
||||
agents:
|
||||
queue: "medium"
|
||||
env:
|
||||
POSTGRES: "1"
|
||||
command:
|
||||
- "bash .buildkite/merge_base_branch.sh"
|
||||
- "bash /synapse_sytest.sh"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "matrixdotorg/sytest-synapse:py35"
|
||||
propagate-environment: true
|
||||
always-pull: true
|
||||
workdir: "/src"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Workers"
|
||||
agents:
|
||||
queue: "medium"
|
||||
env:
|
||||
POSTGRES: "1"
|
||||
WORKERS: "1"
|
||||
BLACKLIST: "synapse-blacklist-with-workers"
|
||||
command:
|
||||
- "bash .buildkite/merge_base_branch.sh"
|
||||
- "bash -c 'cat /src/sytest-blacklist /src/.buildkite/worker-blacklist > /src/synapse-blacklist-with-workers'"
|
||||
- "bash /synapse_sytest.sh"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "matrixdotorg/sytest-synapse:py35"
|
||||
propagate-environment: true
|
||||
always-pull: true
|
||||
workdir: "/src"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
@@ -1,7 +1,8 @@
|
||||
[run]
|
||||
branch = True
|
||||
parallel = True
|
||||
include = synapse/*
|
||||
include=$TOP/synapse/*
|
||||
data_file = $TOP/.coverage
|
||||
|
||||
[report]
|
||||
precision = 2
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -20,6 +20,7 @@ _trial_temp*/
|
||||
/*.signing.key
|
||||
/env/
|
||||
/homeserver*.yaml
|
||||
/logs
|
||||
/media_store/
|
||||
/uploads
|
||||
|
||||
@@ -29,8 +30,9 @@ _trial_temp*/
|
||||
/.vscode/
|
||||
|
||||
# build products
|
||||
/.coverage*
|
||||
!/.coveragerc
|
||||
/.coverage*
|
||||
/.mypy_cache/
|
||||
/.tox
|
||||
/build/
|
||||
/coverage.*
|
||||
@@ -38,4 +40,3 @@ _trial_temp*/
|
||||
/docs/build/
|
||||
/htmlcov
|
||||
/pip-wheel-metadata/
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ that your email address is probably `user@example.com` rather than
|
||||
System requirements:
|
||||
|
||||
- POSIX-compliant system (tested on Linux & OS X)
|
||||
- Python 3.5, 3.6, 3.7, or 2.7
|
||||
- Python 3.5, 3.6, or 3.7
|
||||
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||
|
||||
Synapse is written in Python but some of the libraries it uses are written in
|
||||
@@ -421,7 +421,7 @@ If Synapse is not configured with an SMTP server, password reset via email will
|
||||
|
||||
The easiest way to create a new user is to do so from a client like [Riot](https://riot.im).
|
||||
|
||||
Alternatively you can do so from the command line if you have installed via pip.
|
||||
Alternatively you can do so from the command line if you have installed via pip.
|
||||
|
||||
This can be done as follows:
|
||||
|
||||
|
||||
12
MANIFEST.in
12
MANIFEST.in
@@ -38,14 +38,16 @@ exclude sytest-blacklist
|
||||
include pyproject.toml
|
||||
recursive-include changelog.d *
|
||||
|
||||
prune .buildkite
|
||||
prune .circleci
|
||||
prune .codecov.yml
|
||||
prune .coveragerc
|
||||
prune .github
|
||||
prune debian
|
||||
prune demo/etc
|
||||
prune docker
|
||||
prune .circleci
|
||||
prune .coveragerc
|
||||
prune debian
|
||||
prune .codecov.yml
|
||||
prune .buildkite
|
||||
prune mypy.ini
|
||||
prune stubs
|
||||
|
||||
exclude jenkins*
|
||||
recursive-exclude jenkins *.sh
|
||||
|
||||
63
UPGRADE.rst
63
UPGRADE.rst
@@ -49,6 +49,56 @@ returned by the Client-Server API:
|
||||
# configured on port 443.
|
||||
curl -kv https://<host.name>/_matrix/client/versions 2>&1 | grep "Server:"
|
||||
|
||||
Upgrading to v1.4.0
|
||||
===================
|
||||
|
||||
Config options
|
||||
--------------
|
||||
|
||||
**Note: Registration by email address or phone number will not work in this release unless
|
||||
some config options are changed from their defaults.**
|
||||
|
||||
This is due to Synapse v1.4.0 now defaulting to sending registration and password reset tokens
|
||||
itself. This is for security reasons as well as putting less reliance on identity servers.
|
||||
However, currently Synapse only supports sending emails, and does not have support for
|
||||
phone-based password reset or account registration. If Synapse is configured to handle these on
|
||||
its own, phone-based password resets and registration will be disabled. For Synapse to send
|
||||
emails, the ``email`` block of the config must be filled out. If not, then password resets and
|
||||
registration via email will be disabled entirely.
|
||||
|
||||
This release also deprecates the ``email.trust_identity_server_for_password_resets`` option and
|
||||
replaces it with the ``account_threepid_delegates`` dictionary. This option defines whether the
|
||||
homeserver should delegate an external server (typically an `identity server
|
||||
<https://matrix.org/docs/spec/identity_service/r0.2.1>`_) to handle sending password reset or
|
||||
registration messages via email and SMS.
|
||||
|
||||
If ``email.trust_identity_server_for_password_resets`` is set to ``true``, and
|
||||
``account_threepid_delegates.email`` is not set, then the first entry in
|
||||
``trusted_third_party_id_servers`` will be used as the account threepid delegate for email.
|
||||
This is to ensure compatibility with existing Synapse installs that set up external server
|
||||
handling for these tasks before v1.4.0. If ``email.trust_identity_server_for_password_resets``
|
||||
is ``true`` and no trusted identity server domains are configured, Synapse will throw an error.
|
||||
|
||||
If ``email.trust_identity_server_for_password_resets`` is ``false`` or absent and a threepid
|
||||
type in ``account_threepid_delegates`` is not set to a domain, then Synapse will attempt to
|
||||
send password reset and registration messages for that type.
|
||||
|
||||
Email templates
|
||||
---------------
|
||||
|
||||
If you have configured a custom template directory with the ``email.template_dir`` option, be
|
||||
aware that there are new templates regarding registration. ``registration.html`` and
|
||||
``registration.txt`` have been added and contain the content that is sent to a client upon
|
||||
registering via an email address.
|
||||
|
||||
``registration_success.html`` and ``registration_failure.html`` are also new HTML templates
|
||||
that will be shown to the user when they click the link in their registration emai , either
|
||||
showing them a success or failure page (assuming a redirect URL is not configured).
|
||||
|
||||
Synapse will expect these files to exist inside the configured template directory. To view the
|
||||
default templates, see `synapse/res/templates
|
||||
<https://github.com/matrix-org/synapse/tree/master/synapse/res/templates>`_.
|
||||
|
||||
Upgrading to v1.2.0
|
||||
===================
|
||||
|
||||
@@ -132,6 +182,19 @@ server for password resets, set ``trust_identity_server_for_password_resets`` to
|
||||
See the `sample configuration file <docs/sample_config.yaml>`_
|
||||
for more details on these settings.
|
||||
|
||||
New email templates
|
||||
---------------
|
||||
Some new templates have been added to the default template directory for the purpose of the
|
||||
homeserver sending its own password reset emails. If you have configured a custom
|
||||
``template_dir`` in your Synapse config, these files will need to be added.
|
||||
|
||||
``password_reset.html`` and ``password_reset.txt`` are HTML and plain text templates
|
||||
respectively that contain the contents of what will be emailed to the user upon attempting to
|
||||
reset their password via email. ``password_reset_success.html`` and
|
||||
``password_reset_failure.html`` are HTML files that the content of which (assuming no redirect
|
||||
URL is set) will be shown to the user after they attempt to click the link in the email sent
|
||||
to them.
|
||||
|
||||
Upgrading to v0.99.0
|
||||
====================
|
||||
|
||||
|
||||
1
changelog.d/5680.misc
Normal file
1
changelog.d/5680.misc
Normal file
@@ -0,0 +1 @@
|
||||
Lay the groundwork for structured logging output.
|
||||
1
changelog.d/5835.feature
Normal file
1
changelog.d/5835.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add the ability to send registration emails from the homeserver rather than delegating to an identity server.
|
||||
1
changelog.d/5853.feature
Normal file
1
changelog.d/5853.feature
Normal file
@@ -0,0 +1 @@
|
||||
Opentracing for device list updates.
|
||||
1
changelog.d/5859.feature
Normal file
1
changelog.d/5859.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add unstable support for MSC2197 (filtered search requests over federation), in order to allow upcoming room directory query performance improvements.
|
||||
1
changelog.d/5864.feature
Normal file
1
changelog.d/5864.feature
Normal file
@@ -0,0 +1 @@
|
||||
Correctly retry all hosts returned from SRV when we fail to connect.
|
||||
1
changelog.d/5868.feature
Normal file
1
changelog.d/5868.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add `m.require_identity_server` key to `/versions`'s `unstable_features` section.
|
||||
1
changelog.d/5875.misc
Normal file
1
changelog.d/5875.misc
Normal file
@@ -0,0 +1 @@
|
||||
Deprecate the `trusted_third_party_id_servers` option.
|
||||
1
changelog.d/5876.feature
Normal file
1
changelog.d/5876.feature
Normal file
@@ -0,0 +1 @@
|
||||
Replace `trust_identity_server_for_password_resets` config option with `account_threepid_delegates`.
|
||||
@@ -1 +0,0 @@
|
||||
Rework room and user statistics to separate current & historical rows, as well as track stats correctly.
|
||||
1
changelog.d/5892.misc
Normal file
1
changelog.d/5892.misc
Normal file
@@ -0,0 +1 @@
|
||||
Compatibility with v2 Identity Service APIs other than /lookup.
|
||||
1
changelog.d/5897.feature
Normal file
1
changelog.d/5897.feature
Normal file
@@ -0,0 +1 @@
|
||||
Switch to using the v2 Identity Service `/lookup` API where available, with fallback to v1. (Implements [MSC2134](https://github.com/matrix-org/matrix-doc/pull/2134) plus id_access_token authentication for v2 Identity Service APIs from [MSC2140](https://github.com/matrix-org/matrix-doc/pull/2140)).
|
||||
1
changelog.d/5900.feature
Normal file
1
changelog.d/5900.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add support for config templating.
|
||||
1
changelog.d/5902.feature
Normal file
1
changelog.d/5902.feature
Normal file
@@ -0,0 +1 @@
|
||||
Users with the type of "support" or "bot" are no longer required to consent.
|
||||
1
changelog.d/5904.feature
Normal file
1
changelog.d/5904.feature
Normal file
@@ -0,0 +1 @@
|
||||
Let synctl accept a directory of config files.
|
||||
1
changelog.d/5914.feature
Normal file
1
changelog.d/5914.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add admin API endpoint for getting whether or not a user is a server administrator.
|
||||
1
changelog.d/5915.bugfix
Normal file
1
changelog.d/5915.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix 404 for thumbnail download when `dynamic_thumbnails` is `false` and the thumbnail was dynamically generated. Fix reported by rkfg.
|
||||
1
changelog.d/5920.bugfix
Normal file
1
changelog.d/5920.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a cache-invalidation bug for worker-based deployments.
|
||||
1
changelog.d/5922.misc
Normal file
1
changelog.d/5922.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update Buildkite pipeline to use plugins instead of buildkite-agent commands.
|
||||
1
changelog.d/5926.misc
Normal file
1
changelog.d/5926.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add link in sample config to the logging config schema.
|
||||
1
changelog.d/5931.misc
Normal file
1
changelog.d/5931.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove unnecessary parentheses in return statements.
|
||||
1
changelog.d/5934.feature
Normal file
1
changelog.d/5934.feature
Normal file
@@ -0,0 +1 @@
|
||||
Redact events in the database that have been redacted for a month.
|
||||
1
changelog.d/5938.misc
Normal file
1
changelog.d/5938.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove unused jenkins/prepare_sytest.sh file.
|
||||
1
changelog.d/5940.feature
Normal file
1
changelog.d/5940.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add the ability to send registration emails from the homeserver rather than delegating to an identity server.
|
||||
1
changelog.d/5943.misc
Normal file
1
changelog.d/5943.misc
Normal file
@@ -0,0 +1 @@
|
||||
Move Buildkite pipeline config to the pipelines repo.
|
||||
1
changelog.d/5953.misc
Normal file
1
changelog.d/5953.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update INSTALL.md to say that Python 2 is no longer supported.
|
||||
1
changelog.d/5962.misc
Normal file
1
changelog.d/5962.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove unnecessary return statements in the codebase which were the result of a regex run.
|
||||
1
changelog.d/5963.misc
Normal file
1
changelog.d/5963.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove left-over methods from C/S registration API.
|
||||
1
changelog.d/5964.feature
Normal file
1
changelog.d/5964.feature
Normal file
@@ -0,0 +1 @@
|
||||
Remove `bind_email` and `bind_msisdn` parameters from /register ala MSC2140.
|
||||
1
changelog.d/5966.bugfix
Normal file
1
changelog.d/5966.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix admin API for listing media in a room not being available with an external media repo.
|
||||
1
changelog.d/5967.bugfix
Normal file
1
changelog.d/5967.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix list media admin API always returning an error.
|
||||
1
changelog.d/5969.feature
Normal file
1
changelog.d/5969.feature
Normal file
@@ -0,0 +1 @@
|
||||
Replace `trust_identity_server_for_password_resets` config option with `account_threepid_delegates`.
|
||||
1
changelog.d/5970.docker
Normal file
1
changelog.d/5970.docker
Normal file
@@ -0,0 +1 @@
|
||||
Avoid changing UID/GID if they are already correct.
|
||||
1
changelog.d/5971.bugfix
Normal file
1
changelog.d/5971.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix room and user stats tracking.
|
||||
1
changelog.d/5975.misc
Normal file
1
changelog.d/5975.misc
Normal file
@@ -0,0 +1 @@
|
||||
Cleanup event auth type initialisation.
|
||||
1
changelog.d/5980.feature
Normal file
1
changelog.d/5980.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add POST /_matrix/client/r0/account/3pid/unbind endpoint from MSC2140 for unbinding a 3PID from an identity server without removing it from the homeserver user account.
|
||||
1
changelog.d/5981.feature
Normal file
1
changelog.d/5981.feature
Normal file
@@ -0,0 +1 @@
|
||||
Setting metrics_flags.known_servers to True in the configuration will publish the synapse_federation_known_servers metric over Prometheus. This represents the total number of servers your server knows about (i.e. is in rooms with), including itself.
|
||||
1
changelog.d/5982.bugfix
Normal file
1
changelog.d/5982.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Include missing opentracing contexts in outbout replication requests.
|
||||
1
changelog.d/5983.feature
Normal file
1
changelog.d/5983.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add minimum opentracing for client servlets.
|
||||
1
changelog.d/5984.bugfix
Normal file
1
changelog.d/5984.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix sending of EDUs when opentracing is enabled with an empty whitelist.
|
||||
1
changelog.d/5985.feature
Normal file
1
changelog.d/5985.feature
Normal file
@@ -0,0 +1 @@
|
||||
Check at setup that opentracing is installed if it's enabled in the config.
|
||||
1
changelog.d/5986.feature
Normal file
1
changelog.d/5986.feature
Normal file
@@ -0,0 +1 @@
|
||||
Trace replication send times.
|
||||
1
changelog.d/5988.bugfix
Normal file
1
changelog.d/5988.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix invalid references to None while opentracing if the log context slips.
|
||||
1
changelog.d/5989.misc
Normal file
1
changelog.d/5989.misc
Normal file
@@ -0,0 +1 @@
|
||||
Clean up dependency checking at setup.
|
||||
1
changelog.d/5991.bugfix
Normal file
1
changelog.d/5991.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix invalid references to None while opentracing if the log context slips.
|
||||
1
changelog.d/5993.feature
Normal file
1
changelog.d/5993.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add the ability to send registration emails from the homeserver rather than delegating to an identity server.
|
||||
1
changelog.d/5994.feature
Normal file
1
changelog.d/5994.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add the ability to send registration emails from the homeserver rather than delegating to an identity server.
|
||||
1
changelog.d/5995.bugfix
Normal file
1
changelog.d/5995.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Return a M_MISSING_PARAM if `sid` is not provided to `/account/3pid`.
|
||||
1
changelog.d/5998.bugfix
Normal file
1
changelog.d/5998.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix room and user stats tracking.
|
||||
1
changelog.d/6003.misc
Normal file
1
changelog.d/6003.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add opentracing span over HTTP push processing.
|
||||
1
changelog.d/6004.bugfix
Normal file
1
changelog.d/6004.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Only count real users when checking for auto-creation of auto-join room.
|
||||
1
changelog.d/6005.feature
Normal file
1
changelog.d/6005.feature
Normal file
@@ -0,0 +1 @@
|
||||
The new Prometheus metric `synapse_build_info` exposes the Python version, OS version, and Synapse version of the running server.
|
||||
1
changelog.d/6009.misc
Normal file
1
changelog.d/6009.misc
Normal file
@@ -0,0 +1 @@
|
||||
Small refactor of function arguments and docstrings in RoomMemberHandler.
|
||||
1
changelog.d/6010.misc
Normal file
1
changelog.d/6010.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove unused `origin` argument on FederationHandler.add_display_name_to_third_party_invite.
|
||||
1
changelog.d/6011.feature
Normal file
1
changelog.d/6011.feature
Normal file
@@ -0,0 +1 @@
|
||||
Use account_threepid_delegate.email and account_threepid_delegate.msisdn for validating threepid sessions.
|
||||
1
changelog.d/6012.feature
Normal file
1
changelog.d/6012.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add report_stats_endpoint option to configure where stats are reported to, if enabled. Contributed by @Sorunome.
|
||||
1
changelog.d/6013.misc
Normal file
1
changelog.d/6013.misc
Normal file
@@ -0,0 +1 @@
|
||||
Compatibility with v2 Identity Service APIs other than /lookup.
|
||||
1
changelog.d/6015.feature
Normal file
1
changelog.d/6015.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add config option to increase ratelimits for room admins redacting messages.
|
||||
1
changelog.d/6017.misc
Normal file
1
changelog.d/6017.misc
Normal file
@@ -0,0 +1 @@
|
||||
Clean up some code in the retry logic.
|
||||
1
changelog.d/6020.bugfix
Normal file
1
changelog.d/6020.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Ensure support users can be registered even if MAU limit is reached.
|
||||
1
changelog.d/6023.misc
Normal file
1
changelog.d/6023.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix the structured logging tests stomping on the global log configuration for subsequent tests.
|
||||
1
changelog.d/6024.bugfix
Normal file
1
changelog.d/6024.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix bug where login error was shown incorrectly on SSO fallback login.
|
||||
1
changelog.d/6025.bugfix
Normal file
1
changelog.d/6025.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix bug in calculating the federation retry backoff period.
|
||||
1
changelog.d/6026.feature
Normal file
1
changelog.d/6026.feature
Normal file
@@ -0,0 +1 @@
|
||||
Stop sending federation transactions to servers which have been down for a long time.
|
||||
1
changelog.d/6032.misc
Normal file
1
changelog.d/6032.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add developer documentation for using SAML2.
|
||||
@@ -37,6 +37,8 @@ from signedjson.sign import verify_signed_json, SignatureVerifyException
|
||||
|
||||
CONFIG_JSON = "cmdclient_config.json"
|
||||
|
||||
# TODO: The concept of trusted identity servers has been deprecated. This option and checks
|
||||
# should be removed
|
||||
TRUSTED_ID_SERVERS = ["localhost:8001"]
|
||||
|
||||
|
||||
@@ -268,6 +270,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_emailrequest(self, args):
|
||||
# TODO: Update to use v2 Identity Service API endpoint
|
||||
url = (
|
||||
self._identityServerUrl()
|
||||
+ "/_matrix/identity/api/v1/validate/email/requestToken"
|
||||
@@ -302,6 +305,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_emailvalidate(self, args):
|
||||
# TODO: Update to use v2 Identity Service API endpoint
|
||||
url = (
|
||||
self._identityServerUrl()
|
||||
+ "/_matrix/identity/api/v1/validate/email/submitToken"
|
||||
@@ -330,6 +334,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_3pidbind(self, args):
|
||||
# TODO: Update to use v2 Identity Service API endpoint
|
||||
url = self._identityServerUrl() + "/_matrix/identity/api/v1/3pid/bind"
|
||||
|
||||
json_res = yield self.http_client.do_request(
|
||||
@@ -398,6 +403,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
@defer.inlineCallbacks
|
||||
def _do_invite(self, roomid, userstring):
|
||||
if not userstring.startswith("@") and self._is_on("complete_usernames"):
|
||||
# TODO: Update to use v2 Identity Service API endpoint
|
||||
url = self._identityServerUrl() + "/_matrix/identity/api/v1/lookup"
|
||||
|
||||
json_res = yield self.http_client.do_request(
|
||||
@@ -407,6 +413,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
mxid = None
|
||||
|
||||
if "mxid" in json_res and "signatures" in json_res:
|
||||
# TODO: Update to use v2 Identity Service API endpoint
|
||||
url = (
|
||||
self._identityServerUrl()
|
||||
+ "/_matrix/identity/api/v1/pubkey/ed25519"
|
||||
|
||||
@@ -17,7 +17,7 @@ By default, the image expects a single volume, located at ``/data``, that will h
|
||||
* the appservices configuration.
|
||||
|
||||
You are free to use separate volumes depending on storage endpoints at your
|
||||
disposal. For instance, ``/data/media`` coud be stored on a large but low
|
||||
disposal. For instance, ``/data/media`` could be stored on a large but low
|
||||
performance hdd storage while other files could be stored on high performance
|
||||
endpoints.
|
||||
|
||||
@@ -27,8 +27,8 @@ configuration file there. Multiple application services are supported.
|
||||
|
||||
## Generating a configuration file
|
||||
|
||||
The first step is to genearte a valid config file. To do this, you can run the
|
||||
image with the `generate` commandline option.
|
||||
The first step is to generate a valid config file. To do this, you can run the
|
||||
image with the `generate` command line option.
|
||||
|
||||
You will need to specify values for the `SYNAPSE_SERVER_NAME` and
|
||||
`SYNAPSE_REPORT_STATS` environment variable, and mount a docker volume to store
|
||||
@@ -59,7 +59,7 @@ The following environment variables are supported in `generate` mode:
|
||||
* `SYNAPSE_CONFIG_PATH`: path to the file to be generated. Defaults to
|
||||
`<SYNAPSE_CONFIG_DIR>/homeserver.yaml`.
|
||||
* `SYNAPSE_DATA_DIR`: where the generated config will put persistent data
|
||||
such as the datatase and media store. Defaults to `/data`.
|
||||
such as the database and media store. Defaults to `/data`.
|
||||
* `UID`, `GID`: the user id and group id to use for creating the data
|
||||
directories. Defaults to `991`, `991`.
|
||||
|
||||
@@ -115,7 +115,7 @@ not given).
|
||||
|
||||
To migrate from a dynamic configuration file to a static one, run the docker
|
||||
container once with the environment variables set, and `migrate_config`
|
||||
commandline option. For example:
|
||||
command line option. For example:
|
||||
|
||||
```
|
||||
docker run -it --rm \
|
||||
|
||||
@@ -41,8 +41,8 @@ def generate_config_from_template(config_dir, config_path, environ, ownership):
|
||||
config_dir (str): where to put generated config files
|
||||
config_path (str): where to put the main config file
|
||||
environ (dict): environment dictionary
|
||||
ownership (str): "<user>:<group>" string which will be used to set
|
||||
ownership of the generated configs
|
||||
ownership (str|None): "<user>:<group>" string which will be used to set
|
||||
ownership of the generated configs. If None, ownership will not change.
|
||||
"""
|
||||
for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"):
|
||||
if v not in environ:
|
||||
@@ -105,24 +105,24 @@ def generate_config_from_template(config_dir, config_path, environ, ownership):
|
||||
log("Generating log config file " + log_config_file)
|
||||
convert("/conf/log.config", log_config_file, environ)
|
||||
|
||||
subprocess.check_output(["chown", "-R", ownership, "/data"])
|
||||
|
||||
# Hopefully we already have a signing key, but generate one if not.
|
||||
subprocess.check_output(
|
||||
[
|
||||
"su-exec",
|
||||
ownership,
|
||||
"python",
|
||||
"-m",
|
||||
"synapse.app.homeserver",
|
||||
"--config-path",
|
||||
config_path,
|
||||
# tell synapse to put generated keys in /data rather than /compiled
|
||||
"--keys-directory",
|
||||
config_dir,
|
||||
"--generate-keys",
|
||||
]
|
||||
)
|
||||
args = [
|
||||
"python",
|
||||
"-m",
|
||||
"synapse.app.homeserver",
|
||||
"--config-path",
|
||||
config_path,
|
||||
# tell synapse to put generated keys in /data rather than /compiled
|
||||
"--keys-directory",
|
||||
config_dir,
|
||||
"--generate-keys",
|
||||
]
|
||||
|
||||
if ownership is not None:
|
||||
subprocess.check_output(["chown", "-R", ownership, "/data"])
|
||||
args = ["su-exec", ownership] + args
|
||||
|
||||
subprocess.check_output(args)
|
||||
|
||||
|
||||
def run_generate_config(environ, ownership):
|
||||
@@ -130,7 +130,7 @@ def run_generate_config(environ, ownership):
|
||||
|
||||
Args:
|
||||
environ (dict): env var dict
|
||||
ownership (str): "userid:groupid" arg for chmod
|
||||
ownership (str|None): "userid:groupid" arg for chmod. If None, ownership will not change.
|
||||
|
||||
Never returns.
|
||||
"""
|
||||
@@ -149,9 +149,6 @@ def run_generate_config(environ, ownership):
|
||||
log("Creating log config %s" % (log_config_file,))
|
||||
convert("/conf/log.config", log_config_file, environ)
|
||||
|
||||
# make sure that synapse has perms to write to the data dir.
|
||||
subprocess.check_output(["chown", ownership, data_dir])
|
||||
|
||||
args = [
|
||||
"python",
|
||||
"-m",
|
||||
@@ -170,12 +167,33 @@ def run_generate_config(environ, ownership):
|
||||
"--open-private-ports",
|
||||
]
|
||||
# log("running %s" % (args, ))
|
||||
os.execv("/usr/local/bin/python", args)
|
||||
|
||||
if ownership is not None:
|
||||
args = ["su-exec", ownership] + args
|
||||
os.execv("/sbin/su-exec", args)
|
||||
|
||||
# make sure that synapse has perms to write to the data dir.
|
||||
subprocess.check_output(["chown", ownership, data_dir])
|
||||
else:
|
||||
os.execv("/usr/local/bin/python", args)
|
||||
|
||||
|
||||
def main(args, environ):
|
||||
mode = args[1] if len(args) > 1 else None
|
||||
ownership = "{}:{}".format(environ.get("UID", 991), environ.get("GID", 991))
|
||||
desired_uid = int(environ.get("UID", "991"))
|
||||
desired_gid = int(environ.get("GID", "991"))
|
||||
if (desired_uid == os.getuid()) and (desired_gid == os.getgid()):
|
||||
ownership = None
|
||||
else:
|
||||
ownership = "{}:{}".format(desired_uid, desired_gid)
|
||||
|
||||
log(
|
||||
"Container running as UserID %s:%s, ENV (or defaults) requests %s:%s"
|
||||
% (os.getuid(), os.getgid(), desired_uid, desired_gid)
|
||||
)
|
||||
|
||||
if ownership is None:
|
||||
log("Will not perform chmod/su-exec as UserID already matches request")
|
||||
|
||||
# In generate mode, generate a configuration and missing keys, then exit
|
||||
if mode == "generate":
|
||||
@@ -227,16 +245,12 @@ def main(args, environ):
|
||||
|
||||
log("Starting synapse with config file " + config_path)
|
||||
|
||||
args = [
|
||||
"su-exec",
|
||||
ownership,
|
||||
"python",
|
||||
"-m",
|
||||
"synapse.app.homeserver",
|
||||
"--config-path",
|
||||
config_path,
|
||||
]
|
||||
os.execv("/sbin/su-exec", args)
|
||||
args = ["python", "-m", "synapse.app.homeserver", "--config-path", config_path]
|
||||
if ownership is not None:
|
||||
args = ["su-exec", ownership] + args
|
||||
os.execv("/sbin/su-exec", args)
|
||||
else:
|
||||
os.execv("/usr/local/bin/python", args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -86,6 +86,25 @@ with a body of:
|
||||
including an ``access_token`` of a server admin.
|
||||
|
||||
|
||||
Get whether a user is a server administrator or not
|
||||
===================================================
|
||||
|
||||
|
||||
The api is::
|
||||
|
||||
GET /_synapse/admin/v1/users/<user_id>/admin
|
||||
|
||||
including an ``access_token`` of a server admin.
|
||||
|
||||
A response body like the following is returned:
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"admin": true
|
||||
}
|
||||
|
||||
|
||||
Change whether a user is a server administrator or not
|
||||
======================================================
|
||||
|
||||
|
||||
37
docs/dev/saml.md
Normal file
37
docs/dev/saml.md
Normal file
@@ -0,0 +1,37 @@
|
||||
# How to test SAML as a developer without a server
|
||||
|
||||
https://capriza.github.io/samling/samling.html (https://github.com/capriza/samling) is a great
|
||||
resource for being able to tinker with the SAML options within Synapse without needing to
|
||||
deploy and configure a complicated software stack.
|
||||
|
||||
To make Synapse (and therefore Riot) use it:
|
||||
|
||||
1. Use the samling.html URL above or deploy your own and visit the IdP Metadata tab.
|
||||
2. Copy the XML to your clipboard.
|
||||
3. On your Synapse server, create a new file `samling.xml` next to your `homeserver.yaml` with
|
||||
the XML from step 2 as the contents.
|
||||
4. Edit your `homeserver.yaml` to include:
|
||||
```yaml
|
||||
saml2_config:
|
||||
sp_config:
|
||||
allow_unknown_attributes: true # Works around a bug with AVA Hashes: https://github.com/IdentityPython/pysaml2/issues/388
|
||||
metadata:
|
||||
local: ["samling.xml"]
|
||||
```
|
||||
5. Run `apt-get install xmlsec1` and `pip install --upgrade --force 'pysaml2>=4.5.0'` to ensure
|
||||
the dependencies are installed and ready to go.
|
||||
6. Restart Synapse.
|
||||
|
||||
Then in Riot:
|
||||
|
||||
1. Visit the login page with a Riot pointing at your homeserver.
|
||||
2. Click the Single Sign-On button.
|
||||
3. On the samling page, enter a Name Identifier and add a SAML Attribute for `uid=your_localpart`.
|
||||
The response must also be signed.
|
||||
4. Click "Next".
|
||||
5. Click "Post Response" (change nothing).
|
||||
6. You should be logged in.
|
||||
|
||||
If you try and repeat this process, you may be automatically logged in using the information you
|
||||
gave previously. To fix this, open your developer console (`F12` or `Ctrl+Shift+I`) while on the
|
||||
samling page and clear the site data. In Chrome, this will be a button on the Application tab.
|
||||
62
docs/room_and_user_statistics.md
Normal file
62
docs/room_and_user_statistics.md
Normal file
@@ -0,0 +1,62 @@
|
||||
Room and User Statistics
|
||||
========================
|
||||
|
||||
Synapse maintains room and user statistics (as well as a cache of room state),
|
||||
in various tables. These can be used for administrative purposes but are also
|
||||
used when generating the public room directory.
|
||||
|
||||
|
||||
# Synapse Developer Documentation
|
||||
|
||||
## High-Level Concepts
|
||||
|
||||
### Definitions
|
||||
|
||||
* **subject**: Something we are tracking stats about – currently a room or user.
|
||||
* **current row**: An entry for a subject in the appropriate current statistics
|
||||
table. Each subject can have only one.
|
||||
* **historical row**: An entry for a subject in the appropriate historical
|
||||
statistics table. Each subject can have any number of these.
|
||||
|
||||
### Overview
|
||||
|
||||
Stats are maintained as time series. There are two kinds of column:
|
||||
|
||||
* absolute columns – where the value is correct for the time given by `end_ts`
|
||||
in the stats row. (Imagine a line graph for these values)
|
||||
* They can also be thought of as 'gauges' in Prometheus, if you are familiar.
|
||||
* per-slice columns – where the value corresponds to how many of the occurrences
|
||||
occurred within the time slice given by `(end_ts − bucket_size)…end_ts`
|
||||
or `start_ts…end_ts`. (Imagine a histogram for these values)
|
||||
|
||||
Stats are maintained in two tables (for each type): current and historical.
|
||||
|
||||
Current stats correspond to the present values. Each subject can only have one
|
||||
entry.
|
||||
|
||||
Historical stats correspond to values in the past. Subjects may have multiple
|
||||
entries.
|
||||
|
||||
## Concepts around the management of stats
|
||||
|
||||
### Current rows
|
||||
|
||||
Current rows contain the most up-to-date statistics for a room.
|
||||
They only contain absolute columns
|
||||
|
||||
### Historical rows
|
||||
|
||||
Historical rows can always be considered to be valid for the time slice and
|
||||
end time specified.
|
||||
|
||||
* historical rows will not exist for every time slice – they will be omitted
|
||||
if there were no changes. In this case, the following assumptions can be
|
||||
made to interpolate/recreate missing rows:
|
||||
- absolute fields have the same values as in the preceding row
|
||||
- per-slice fields are zero (`0`)
|
||||
* historical rows will not be retained forever – rows older than a configurable
|
||||
time will be purged.
|
||||
|
||||
#### Purge
|
||||
|
||||
The purging of historical rows is not yet implemented.
|
||||
@@ -205,9 +205,9 @@ listeners:
|
||||
#
|
||||
- port: 8008
|
||||
tls: false
|
||||
bind_addresses: ['::1', '127.0.0.1']
|
||||
type: http
|
||||
x_forwarded: true
|
||||
bind_addresses: ['::1', '127.0.0.1']
|
||||
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
@@ -306,6 +306,13 @@ listeners:
|
||||
#
|
||||
#allow_per_room_profiles: false
|
||||
|
||||
# How long to keep redacted events in unredacted form in the database. After
|
||||
# this period redacted events get replaced with their redacted form in the DB.
|
||||
#
|
||||
# Defaults to `7d`. Set to `null` to disable.
|
||||
#
|
||||
redaction_retention_period: 7d
|
||||
|
||||
|
||||
## TLS ##
|
||||
|
||||
@@ -392,10 +399,10 @@ listeners:
|
||||
# permission to listen on port 80.
|
||||
#
|
||||
acme:
|
||||
# ACME support is disabled by default. Uncomment the following line
|
||||
# (and tls_certificate_path and tls_private_key_path above) to enable it.
|
||||
# ACME support is disabled by default. Set this to `true` and uncomment
|
||||
# tls_certificate_path and tls_private_key_path above to enable it.
|
||||
#
|
||||
#enabled: true
|
||||
enabled: False
|
||||
|
||||
# Endpoint to use to request certificates. If you only want to test,
|
||||
# use Let's Encrypt's staging url:
|
||||
@@ -406,17 +413,17 @@ acme:
|
||||
# Port number to listen on for the HTTP-01 challenge. Change this if
|
||||
# you are forwarding connections through Apache/Nginx/etc.
|
||||
#
|
||||
#port: 80
|
||||
port: 80
|
||||
|
||||
# Local addresses to listen on for incoming connections.
|
||||
# Again, you may want to change this if you are forwarding connections
|
||||
# through Apache/Nginx/etc.
|
||||
#
|
||||
#bind_addresses: ['::', '0.0.0.0']
|
||||
bind_addresses: ['::', '0.0.0.0']
|
||||
|
||||
# How many days remaining on a certificate before it is renewed.
|
||||
#
|
||||
#reprovision_threshold: 30
|
||||
reprovision_threshold: 30
|
||||
|
||||
# The domain that the certificate should be for. Normally this
|
||||
# should be the same as your Matrix domain (i.e., 'server_name'), but,
|
||||
@@ -430,7 +437,7 @@ acme:
|
||||
#
|
||||
# If not set, defaults to your 'server_name'.
|
||||
#
|
||||
#domain: matrix.example.com
|
||||
domain: matrix.example.com
|
||||
|
||||
# file to use for the account key. This will be generated if it doesn't
|
||||
# exist.
|
||||
@@ -485,7 +492,8 @@ database:
|
||||
|
||||
## Logging ##
|
||||
|
||||
# A yaml python logging config file
|
||||
# A yaml python logging config file as described by
|
||||
# https://docs.python.org/3.7/library/logging.config.html#configuration-dictionary-schema
|
||||
#
|
||||
log_config: "CONFDIR/SERVERNAME.log.config"
|
||||
|
||||
@@ -510,6 +518,9 @@ log_config: "CONFDIR/SERVERNAME.log.config"
|
||||
# - one for login that ratelimits login requests based on the account the
|
||||
# client is attempting to log into, based on the amount of failed login
|
||||
# attempts for this account.
|
||||
# - one for ratelimiting redactions by room admins. If this is not explicitly
|
||||
# set then it uses the same ratelimiting as per rc_message. This is useful
|
||||
# to allow room admins to deal with abuse quickly.
|
||||
#
|
||||
# The defaults are as shown below.
|
||||
#
|
||||
@@ -531,6 +542,10 @@ log_config: "CONFDIR/SERVERNAME.log.config"
|
||||
# failed_attempts:
|
||||
# per_second: 0.17
|
||||
# burst_count: 3
|
||||
#
|
||||
#rc_admin_redaction:
|
||||
# per_second: 1
|
||||
# burst_count: 50
|
||||
|
||||
|
||||
# Ratelimiting settings for incoming federation
|
||||
@@ -890,10 +905,42 @@ uploads_path: "DATADIR/uploads"
|
||||
# Also defines the ID server which will be called when an account is
|
||||
# deactivated (one will be picked arbitrarily).
|
||||
#
|
||||
# Note: This option is deprecated. Since v0.99.4, Synapse has tracked which identity
|
||||
# server a 3PID has been bound to. For 3PIDs bound before then, Synapse runs a
|
||||
# background migration script, informing itself that the identity server all of its
|
||||
# 3PIDs have been bound to is likely one of the below.
|
||||
#
|
||||
# As of Synapse v1.4.0, all other functionality of this option has been deprecated, and
|
||||
# it is now solely used for the purposes of the background migration script, and can be
|
||||
# removed once it has run.
|
||||
#trusted_third_party_id_servers:
|
||||
# - matrix.org
|
||||
# - vector.im
|
||||
|
||||
# Handle threepid (email/phone etc) registration and password resets through a set of
|
||||
# *trusted* identity servers. Note that this allows the configured identity server to
|
||||
# reset passwords for accounts!
|
||||
#
|
||||
# Be aware that if `email` is not set, and SMTP options have not been
|
||||
# configured in the email config block, registration and user password resets via
|
||||
# email will be globally disabled.
|
||||
#
|
||||
# Additionally, if `msisdn` is not set, registration and password resets via msisdn
|
||||
# will be disabled regardless. This is due to Synapse currently not supporting any
|
||||
# method of sending SMS messages on its own.
|
||||
#
|
||||
# To enable using an identity server for operations regarding a particular third-party
|
||||
# identifier type, set the value to the URL of that identity server as shown in the
|
||||
# examples below.
|
||||
#
|
||||
# Servers handling the these requests must answer the `/requestToken` endpoints defined
|
||||
# by the Matrix Identity Service API specification:
|
||||
# https://matrix.org/docs/spec/identity_service/latest
|
||||
#
|
||||
account_threepid_delegates:
|
||||
#email: https://example.com # Delegate email sending to matrix.org
|
||||
#msisdn: http://localhost:8090 # Delegate SMS sending to this local process
|
||||
|
||||
# Users who register on this homeserver will automatically be joined
|
||||
# to these rooms
|
||||
#
|
||||
@@ -925,9 +972,24 @@ uploads_path: "DATADIR/uploads"
|
||||
#sentry:
|
||||
# dsn: "..."
|
||||
|
||||
# Flags to enable Prometheus metrics which are not suitable to be
|
||||
# enabled by default, either for performance reasons or limited use.
|
||||
#
|
||||
metrics_flags:
|
||||
# Publish synapse_federation_known_servers, a g auge of the number of
|
||||
# servers this homeserver knows about, including itself. May cause
|
||||
# performance problems on large homeservers.
|
||||
#
|
||||
#known_servers: true
|
||||
|
||||
# Whether or not to report anonymized homeserver usage statistics.
|
||||
# report_stats: true|false
|
||||
|
||||
# The endpoint to report the anonymized homeserver usage statistics to.
|
||||
# Defaults to https://matrix.org/report-usage-stats/push
|
||||
#
|
||||
#report_stats_endpoint: https://example.com/report-usage-stats/push
|
||||
|
||||
|
||||
## API Configuration ##
|
||||
|
||||
@@ -1163,19 +1225,6 @@ password_config:
|
||||
# #
|
||||
# riot_base_url: "http://localhost/riot"
|
||||
#
|
||||
# # Enable sending password reset emails via the configured, trusted
|
||||
# # identity servers
|
||||
# #
|
||||
# # IMPORTANT! This will give a malicious or overtaken identity server
|
||||
# # the ability to reset passwords for your users! Make absolutely sure
|
||||
# # that you want to do this! It is strongly recommended that password
|
||||
# # reset emails be sent by the homeserver instead
|
||||
# #
|
||||
# # If this option is set to false and SMTP options have not been
|
||||
# # configured, resetting user passwords via email will be disabled
|
||||
# #
|
||||
# #trust_identity_server_for_password_resets: false
|
||||
#
|
||||
# # Configure the time that a validation email or text message code
|
||||
# # will expire after sending
|
||||
# #
|
||||
@@ -1207,11 +1256,22 @@ password_config:
|
||||
# #password_reset_template_html: password_reset.html
|
||||
# #password_reset_template_text: password_reset.txt
|
||||
#
|
||||
# # Templates for registration emails sent by the homeserver
|
||||
# #
|
||||
# #registration_template_html: registration.html
|
||||
# #registration_template_text: registration.txt
|
||||
#
|
||||
# # Templates for password reset success and failure pages that a user
|
||||
# # will see after attempting to reset their password
|
||||
# #
|
||||
# #password_reset_template_success_html: password_reset_success.html
|
||||
# #password_reset_template_failure_html: password_reset_failure.html
|
||||
#
|
||||
# # Templates for registration success and failure pages that a user
|
||||
# # will see after attempting to register using an email or phone
|
||||
# #
|
||||
# #registration_template_success_html: registration_success.html
|
||||
# #registration_template_failure_html: registration_failure.html
|
||||
|
||||
|
||||
#password_providers:
|
||||
|
||||
83
docs/structured_logging.md
Normal file
83
docs/structured_logging.md
Normal file
@@ -0,0 +1,83 @@
|
||||
# Structured Logging
|
||||
|
||||
A structured logging system can be useful when your logs are destined for a machine to parse and process. By maintaining its machine-readable characteristics, it enables more efficient searching and aggregations when consumed by software such as the "ELK stack".
|
||||
|
||||
Synapse's structured logging system is configured via the file that Synapse's `log_config` config option points to. The file must be YAML and contain `structured: true`. It must contain a list of "drains" (places where logs go to).
|
||||
|
||||
A structured logging configuration looks similar to the following:
|
||||
|
||||
```yaml
|
||||
structured: true
|
||||
|
||||
loggers:
|
||||
synapse:
|
||||
level: INFO
|
||||
synapse.storage.SQL:
|
||||
level: WARNING
|
||||
|
||||
drains:
|
||||
console:
|
||||
type: console
|
||||
location: stdout
|
||||
file:
|
||||
type: file_json
|
||||
location: homeserver.log
|
||||
```
|
||||
|
||||
The above logging config will set Synapse as 'INFO' logging level by default, with the SQL layer at 'WARNING', and will have two logging drains (to the console and to a file, stored as JSON).
|
||||
|
||||
## Drain Types
|
||||
|
||||
Drain types can be specified by the `type` key.
|
||||
|
||||
### `console`
|
||||
|
||||
Outputs human-readable logs to the console.
|
||||
|
||||
Arguments:
|
||||
|
||||
- `location`: Either `stdout` or `stderr`.
|
||||
|
||||
### `console_json`
|
||||
|
||||
Outputs machine-readable JSON logs to the console.
|
||||
|
||||
Arguments:
|
||||
|
||||
- `location`: Either `stdout` or `stderr`.
|
||||
|
||||
### `console_json_terse`
|
||||
|
||||
Outputs machine-readable JSON logs to the console, separated by newlines. This
|
||||
format is not designed to be read and re-formatted into human-readable text, but
|
||||
is optimal for a logging aggregation system.
|
||||
|
||||
Arguments:
|
||||
|
||||
- `location`: Either `stdout` or `stderr`.
|
||||
|
||||
### `file`
|
||||
|
||||
Outputs human-readable logs to a file.
|
||||
|
||||
Arguments:
|
||||
|
||||
- `location`: An absolute path to the file to log to.
|
||||
|
||||
### `file_json`
|
||||
|
||||
Outputs machine-readable logs to a file.
|
||||
|
||||
Arguments:
|
||||
|
||||
- `location`: An absolute path to the file to log to.
|
||||
|
||||
### `network_json_terse`
|
||||
|
||||
Delivers machine-readable JSON logs to a log aggregator over TCP. This is
|
||||
compatible with LogStash's TCP input with the codec set to `json_lines`.
|
||||
|
||||
Arguments:
|
||||
|
||||
- `host`: Hostname or IP address of the log aggregator.
|
||||
- `port`: Numerical port to contact on the host.
|
||||
@@ -1,16 +0,0 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -eux
|
||||
|
||||
cd "`dirname $0`/.."
|
||||
|
||||
TOX_DIR=$WORKSPACE/.tox
|
||||
|
||||
mkdir -p $TOX_DIR
|
||||
|
||||
if ! [ $TOX_DIR -ef .tox ]; then
|
||||
ln -s "$TOX_DIR" .tox
|
||||
fi
|
||||
|
||||
# set up the virtualenv
|
||||
tox -e py27 --notest -v
|
||||
54
mypy.ini
Normal file
54
mypy.ini
Normal file
@@ -0,0 +1,54 @@
|
||||
[mypy]
|
||||
namespace_packages=True
|
||||
plugins=mypy_zope:plugin
|
||||
follow_imports=skip
|
||||
mypy_path=stubs
|
||||
|
||||
[mypy-synapse.config.homeserver]
|
||||
# this is a mess because of the metaclass shenanigans
|
||||
ignore_errors = True
|
||||
|
||||
[mypy-zope]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-constantly]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-twisted.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-treq.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-hyperlink]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-h11]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-opentracing]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-OpenSSL]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-netaddr]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-saml2.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-unpaddedbase64]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-canonicaljson]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jaeger_client]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jsonschema]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-signedjson.*]
|
||||
ignore_missing_imports = True
|
||||
@@ -25,7 +25,7 @@ from twisted.internet import defer
|
||||
import synapse.logging.opentracing as opentracing
|
||||
import synapse.types
|
||||
from synapse import event_auth
|
||||
from synapse.api.constants import EventTypes, JoinRules, Membership
|
||||
from synapse.api.constants import EventTypes, JoinRules, Membership, UserTypes
|
||||
from synapse.api.errors import (
|
||||
AuthError,
|
||||
Codes,
|
||||
@@ -276,25 +276,25 @@ class Auth(object):
|
||||
self.get_access_token_from_request(request)
|
||||
)
|
||||
if app_service is None:
|
||||
return (None, None)
|
||||
return None, None
|
||||
|
||||
if app_service.ip_range_whitelist:
|
||||
ip_address = IPAddress(self.hs.get_ip_from_request(request))
|
||||
if ip_address not in app_service.ip_range_whitelist:
|
||||
return (None, None)
|
||||
return None, None
|
||||
|
||||
if b"user_id" not in request.args:
|
||||
return (app_service.sender, app_service)
|
||||
return app_service.sender, app_service
|
||||
|
||||
user_id = request.args[b"user_id"][0].decode("utf8")
|
||||
if app_service.sender == user_id:
|
||||
return (app_service.sender, app_service)
|
||||
return app_service.sender, app_service
|
||||
|
||||
if not app_service.is_interested_in_user(user_id):
|
||||
raise AuthError(403, "Application service cannot masquerade as this user.")
|
||||
if not (yield self.store.get_user_by_id(user_id)):
|
||||
raise AuthError(403, "Application service has not registered this user")
|
||||
return (user_id, app_service)
|
||||
return user_id, app_service
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_user_by_access_token(self, token, rights="access"):
|
||||
@@ -694,7 +694,7 @@ class Auth(object):
|
||||
# * The user is a guest user, and has joined the room
|
||||
# else it will throw.
|
||||
member_event = yield self.check_user_was_in_room(room_id, user_id)
|
||||
return (member_event.membership, member_event.event_id)
|
||||
return member_event.membership, member_event.event_id
|
||||
except AuthError:
|
||||
visibility = yield self.state.get_current_state(
|
||||
room_id, EventTypes.RoomHistoryVisibility, ""
|
||||
@@ -703,14 +703,13 @@ class Auth(object):
|
||||
visibility
|
||||
and visibility.content["history_visibility"] == "world_readable"
|
||||
):
|
||||
return (Membership.JOIN, None)
|
||||
return
|
||||
return Membership.JOIN, None
|
||||
raise AuthError(
|
||||
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_auth_blocking(self, user_id=None, threepid=None):
|
||||
def check_auth_blocking(self, user_id=None, threepid=None, user_type=None):
|
||||
"""Checks if the user should be rejected for some external reason,
|
||||
such as monthly active user limiting or global disable flag
|
||||
|
||||
@@ -723,6 +722,9 @@ class Auth(object):
|
||||
with a MAU blocked server, normally they would be rejected but their
|
||||
threepid is on the reserved list. user_id and
|
||||
threepid should never be set at the same time.
|
||||
|
||||
user_type(str|None): If present, is used to decide whether to check against
|
||||
certain blocking reasons like MAU.
|
||||
"""
|
||||
|
||||
# Never fail an auth check for the server notices users or support user
|
||||
@@ -760,6 +762,10 @@ class Auth(object):
|
||||
self.hs.config.mau_limits_reserved_threepids, threepid
|
||||
):
|
||||
return
|
||||
elif user_type == UserTypes.SUPPORT:
|
||||
# If the user does not exist yet and is of type "support",
|
||||
# allow registration. Support users are excluded from MAU checks.
|
||||
return
|
||||
# Else if there is no room in the MAU bucket, bail
|
||||
current_mau = yield self.store.get_monthly_active_count()
|
||||
if current_mau >= self.hs.config.max_mau_value:
|
||||
|
||||
@@ -122,7 +122,8 @@ class UserTypes(object):
|
||||
"""
|
||||
|
||||
SUPPORT = "support"
|
||||
ALL_USER_TYPES = (SUPPORT,)
|
||||
BOT = "bot"
|
||||
ALL_USER_TYPES = (SUPPORT, BOT)
|
||||
|
||||
|
||||
class RelationTypes(object):
|
||||
|
||||
@@ -36,18 +36,20 @@ from synapse.util.versionstring import get_version_string
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# list of tuples of function, args list, kwargs dict
|
||||
_sighup_callbacks = []
|
||||
|
||||
|
||||
def register_sighup(func):
|
||||
def register_sighup(func, *args, **kwargs):
|
||||
"""
|
||||
Register a function to be called when a SIGHUP occurs.
|
||||
|
||||
Args:
|
||||
func (function): Function to be called when sent a SIGHUP signal.
|
||||
Will be called with a single argument, the homeserver.
|
||||
Will be called with a single default argument, the homeserver.
|
||||
*args, **kwargs: args and kwargs to be passed to the target function.
|
||||
"""
|
||||
_sighup_callbacks.append(func)
|
||||
_sighup_callbacks.append((func, args, kwargs))
|
||||
|
||||
|
||||
def start_worker_reactor(appname, config, run_command=reactor.run):
|
||||
@@ -248,8 +250,8 @@ def start(hs, listeners=None):
|
||||
# we're not using systemd.
|
||||
sdnotify(b"RELOADING=1")
|
||||
|
||||
for i in _sighup_callbacks:
|
||||
i(hs)
|
||||
for i, args, kwargs in _sighup_callbacks:
|
||||
i(hs, *args, **kwargs)
|
||||
|
||||
sdnotify(b"READY=1")
|
||||
|
||||
|
||||
@@ -227,8 +227,6 @@ def start(config_options):
|
||||
config.start_pushers = False
|
||||
config.send_federation = False
|
||||
|
||||
setup_logging(config, use_worker_options=True)
|
||||
|
||||
synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
database_engine = create_engine(config.database_config)
|
||||
@@ -241,6 +239,8 @@ def start(config_options):
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
|
||||
ss.setup()
|
||||
|
||||
# We use task.react as the basic run command as it correctly handles tearing
|
||||
|
||||
@@ -141,8 +141,6 @@ def start(config_options):
|
||||
|
||||
assert config.worker_app == "synapse.app.appservice"
|
||||
|
||||
setup_logging(config, use_worker_options=True)
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
database_engine = create_engine(config.database_config)
|
||||
@@ -167,6 +165,8 @@ def start(config_options):
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
setup_logging(ps, config, use_worker_options=True)
|
||||
|
||||
ps.setup()
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, ps, config.worker_listeners
|
||||
|
||||
@@ -119,7 +119,7 @@ class ClientReaderServer(HomeServer):
|
||||
KeyChangesServlet(self).register(resource)
|
||||
VoipRestServlet(self).register(resource)
|
||||
PushRuleRestServlet(self).register(resource)
|
||||
VersionsRestServlet().register(resource)
|
||||
VersionsRestServlet(self).register(resource)
|
||||
|
||||
resources.update({"/_matrix/client": resource})
|
||||
|
||||
@@ -179,8 +179,6 @@ def start(config_options):
|
||||
|
||||
assert config.worker_app == "synapse.app.client_reader"
|
||||
|
||||
setup_logging(config, use_worker_options=True)
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
database_engine = create_engine(config.database_config)
|
||||
@@ -193,6 +191,8 @@ def start(config_options):
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
|
||||
ss.setup()
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, ss, config.worker_listeners
|
||||
|
||||
@@ -175,8 +175,6 @@ def start(config_options):
|
||||
|
||||
assert config.worker_replication_http_port is not None
|
||||
|
||||
setup_logging(config, use_worker_options=True)
|
||||
|
||||
# This should only be done on the user directory worker or the master
|
||||
config.update_user_directory = False
|
||||
|
||||
@@ -192,6 +190,8 @@ def start(config_options):
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
|
||||
ss.setup()
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, ss, config.worker_listeners
|
||||
|
||||
@@ -160,8 +160,6 @@ def start(config_options):
|
||||
|
||||
assert config.worker_app == "synapse.app.federation_reader"
|
||||
|
||||
setup_logging(config, use_worker_options=True)
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
database_engine = create_engine(config.database_config)
|
||||
@@ -174,6 +172,8 @@ def start(config_options):
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
|
||||
ss.setup()
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, ss, config.worker_listeners
|
||||
|
||||
@@ -171,8 +171,6 @@ def start(config_options):
|
||||
|
||||
assert config.worker_app == "synapse.app.federation_sender"
|
||||
|
||||
setup_logging(config, use_worker_options=True)
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
database_engine = create_engine(config.database_config)
|
||||
@@ -197,6 +195,8 @@ def start(config_options):
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
|
||||
ss.setup()
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, ss, config.worker_listeners
|
||||
|
||||
@@ -70,12 +70,12 @@ class PresenceStatusStubServlet(RestServlet):
|
||||
except HttpResponseException as e:
|
||||
raise e.to_synapse_error()
|
||||
|
||||
return (200, result)
|
||||
return 200, result
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_PUT(self, request, user_id):
|
||||
yield self.auth.get_user_by_req(request)
|
||||
return (200, {})
|
||||
return 200, {}
|
||||
|
||||
|
||||
class KeyUploadServlet(RestServlet):
|
||||
@@ -126,11 +126,11 @@ class KeyUploadServlet(RestServlet):
|
||||
self.main_uri + request.uri.decode("ascii"), body, headers=headers
|
||||
)
|
||||
|
||||
return (200, result)
|
||||
return 200, result
|
||||
else:
|
||||
# Just interested in counts.
|
||||
result = yield self.store.count_e2e_one_time_keys(user_id, device_id)
|
||||
return (200, {"one_time_key_counts": result})
|
||||
return 200, {"one_time_key_counts": result}
|
||||
|
||||
|
||||
class FrontendProxySlavedStore(
|
||||
@@ -232,8 +232,6 @@ def start(config_options):
|
||||
|
||||
assert config.worker_main_http_uri is not None
|
||||
|
||||
setup_logging(config, use_worker_options=True)
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
database_engine = create_engine(config.database_config)
|
||||
@@ -246,6 +244,8 @@ def start(config_options):
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
|
||||
ss.setup()
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, ss, config.worker_listeners
|
||||
|
||||
@@ -341,8 +341,6 @@ def setup(config_options):
|
||||
# generating config files and shouldn't try to continue.
|
||||
sys.exit(0)
|
||||
|
||||
synapse.config.logger.setup_logging(config, use_worker_options=False)
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
database_engine = create_engine(config.database_config)
|
||||
@@ -356,6 +354,8 @@ def setup(config_options):
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
synapse.config.logger.setup_logging(hs, config, use_worker_options=False)
|
||||
|
||||
logger.info("Preparing database: %s...", config.database_config["name"])
|
||||
|
||||
try:
|
||||
@@ -561,10 +561,12 @@ def run(hs):
|
||||
|
||||
stats["database_engine"] = hs.get_datastore().database_engine_name
|
||||
stats["database_server_version"] = hs.get_datastore().get_server_version()
|
||||
logger.info("Reporting stats to matrix.org: %s" % (stats,))
|
||||
logger.info(
|
||||
"Reporting stats to %s: %s" % (hs.config.report_stats_endpoint, stats)
|
||||
)
|
||||
try:
|
||||
yield hs.get_simple_http_client().put_json(
|
||||
"https://matrix.org/report-usage-stats/push", stats
|
||||
hs.config.report_stats_endpoint, stats
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warn("Error reporting stats: %s", e)
|
||||
|
||||
@@ -155,8 +155,6 @@ def start(config_options):
|
||||
"Please add ``enable_media_repo: false`` to the main config\n"
|
||||
)
|
||||
|
||||
setup_logging(config, use_worker_options=True)
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
database_engine = create_engine(config.database_config)
|
||||
@@ -169,6 +167,8 @@ def start(config_options):
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
|
||||
ss.setup()
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, ss, config.worker_listeners
|
||||
|
||||
@@ -184,8 +184,6 @@ def start(config_options):
|
||||
|
||||
assert config.worker_app == "synapse.app.pusher"
|
||||
|
||||
setup_logging(config, use_worker_options=True)
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
if config.start_pushers:
|
||||
@@ -210,6 +208,8 @@ def start(config_options):
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
setup_logging(ps, config, use_worker_options=True)
|
||||
|
||||
ps.setup()
|
||||
|
||||
def start():
|
||||
|
||||
@@ -435,8 +435,6 @@ def start(config_options):
|
||||
|
||||
assert config.worker_app == "synapse.app.synchrotron"
|
||||
|
||||
setup_logging(config, use_worker_options=True)
|
||||
|
||||
synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
database_engine = create_engine(config.database_config)
|
||||
@@ -450,6 +448,8 @@ def start(config_options):
|
||||
application_service_handler=SynchrotronApplicationService(),
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
|
||||
ss.setup()
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, ss, config.worker_listeners
|
||||
|
||||
@@ -197,8 +197,6 @@ def start(config_options):
|
||||
|
||||
assert config.worker_app == "synapse.app.user_dir"
|
||||
|
||||
setup_logging(config, use_worker_options=True)
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
database_engine = create_engine(config.database_config)
|
||||
@@ -223,6 +221,8 @@ def start(config_options):
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
|
||||
ss.setup()
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, ss, config.worker_listeners
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user