Compare commits
101 Commits
anoa/remov
...
travis/sam
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ed12c4ce66 | ||
|
|
ec3b927d28 | ||
|
|
a633a97c7a | ||
|
|
b617864cd9 | ||
|
|
3d882a7ba5 | ||
|
|
0388beafe4 | ||
|
|
59975f9a63 | ||
|
|
6db22e4702 | ||
|
|
642fad8bd4 | ||
|
|
dd2e5b0038 | ||
|
|
a8251da10f | ||
|
|
f1b40694ea | ||
|
|
6d847d8ce6 | ||
|
|
9fc71dc5ee | ||
|
|
cbcbfe64a2 | ||
|
|
7902bf1e1d | ||
|
|
66ace43546 | ||
|
|
9c555f37e3 | ||
|
|
6604b64fae | ||
|
|
57dd41a45b | ||
|
|
3505ffcda7 | ||
|
|
caa9d6fed7 | ||
|
|
c64c3bb4c5 | ||
|
|
8df88b5ff3 | ||
|
|
2434c0084b | ||
|
|
54ce81c86d | ||
|
|
cd17a2085e | ||
|
|
5e9b05d7da | ||
|
|
b5833a2abf | ||
|
|
60d3c57bd0 | ||
|
|
63f9317b8e | ||
|
|
470dc621ae | ||
|
|
aeb9b2179e | ||
|
|
aaed6b39e1 | ||
|
|
580f3df9b2 | ||
|
|
ea6956c55c | ||
|
|
e89fea4f04 | ||
|
|
8c03cd0e5f | ||
|
|
8b9ade8c78 | ||
|
|
e7184a4370 | ||
|
|
916c697228 | ||
|
|
fffe17b77d | ||
|
|
80e14a8546 | ||
|
|
62fac9d969 | ||
|
|
be618e0551 | ||
|
|
a852e93408 | ||
|
|
05bae6b4fc | ||
|
|
55d5b3af88 | ||
|
|
78801e7f9e | ||
|
|
a2a695b7ec | ||
|
|
85275c89d7 | ||
|
|
142c9325c2 | ||
|
|
30b67e0f63 | ||
|
|
5624d0f2ec | ||
|
|
cf5a420c8a | ||
|
|
5d833f0923 | ||
|
|
ca74b140f2 | ||
|
|
6ddda8152e | ||
|
|
5a7e9fdd84 | ||
|
|
e059c5e648 | ||
|
|
1ab1479a92 | ||
|
|
146af7b47f | ||
|
|
0c0b82b6d1 | ||
|
|
e5baf80237 | ||
|
|
4bc6b7130d | ||
|
|
d8517da85b | ||
|
|
f7c873a643 | ||
|
|
bc604e7f94 | ||
|
|
591d82f06b | ||
|
|
ad9b64b496 | ||
|
|
3ff0422d2d | ||
|
|
1a6ae33309 | ||
|
|
ef20aa52eb | ||
|
|
7093790fbc | ||
|
|
5ade977d08 | ||
|
|
909827b422 | ||
|
|
93bc9d73bf | ||
|
|
1d65292e94 | ||
|
|
a0d294c306 | ||
|
|
b9cfd3c375 | ||
|
|
90d17a3d28 | ||
|
|
b736c6cd3a | ||
|
|
b09d443632 | ||
|
|
6e834e94fc | ||
|
|
ea128a3e8e | ||
|
|
2f416fc997 | ||
|
|
6b6086b8bf | ||
|
|
a98b8583c6 | ||
|
|
894c1a5759 | ||
|
|
0eac7077c9 | ||
|
|
8401bcd206 | ||
|
|
2a44782666 | ||
|
|
a90d16dabc | ||
|
|
36f34e6f3d | ||
|
|
ce7803b8b0 | ||
|
|
cee00a3584 | ||
|
|
2a012e8a04 | ||
|
|
4548d1f87e | ||
|
|
4fca313389 | ||
|
|
549f974897 | ||
|
|
a4bf72c30c |
@@ -1,3 +1,18 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
from tap.parser import Parser
|
||||
from tap.line import Result, Unknown, Diagnostic
|
||||
|
||||
@@ -1,310 +0,0 @@
|
||||
env:
|
||||
COVERALLS_REPO_TOKEN: wsJWOby6j0uCYFiCes3r0XauxO27mx8lD
|
||||
|
||||
steps:
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e check_codestyle"
|
||||
label: "\U0001F9F9 Check Style"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
mount-buildkite-agent: false
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e packaging"
|
||||
label: "\U0001F9F9 packaging"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
mount-buildkite-agent: false
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e check_isort"
|
||||
label: "\U0001F9F9 isort"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
mount-buildkite-agent: false
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "scripts-dev/check-newsfragment"
|
||||
label: ":newspaper: Newsfile"
|
||||
branches: "!master !develop !release-*"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
propagate-environment: true
|
||||
mount-buildkite-agent: false
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e check-sampleconfig"
|
||||
label: "\U0001F9F9 check-sample-config"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
mount-buildkite-agent: false
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e mypy"
|
||||
label: ":mypy: mypy"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.5"
|
||||
mount-buildkite-agent: false
|
||||
|
||||
- wait
|
||||
|
||||
- command:
|
||||
- "apt-get update && apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev zlib1g-dev"
|
||||
- "python3.5 -m pip install tox"
|
||||
- "tox -e py35-old,combine"
|
||||
label: ":python: 3.5 / SQLite / Old Deps"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 2"
|
||||
LANG: "C.UTF-8"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "ubuntu:xenial" # We use xenial to get an old sqlite and python
|
||||
workdir: "/src"
|
||||
mount-buildkite-agent: false
|
||||
propagate-environment: true
|
||||
- matrix-org/coveralls#v1.0:
|
||||
parallel: "true"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e py35,combine"
|
||||
label: ":python: 3.5 / SQLite"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 2"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.5"
|
||||
workdir: "/src"
|
||||
mount-buildkite-agent: false
|
||||
propagate-environment: true
|
||||
- matrix-org/coveralls#v1.0:
|
||||
parallel: "true"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e py36,combine"
|
||||
label: ":python: 3.6 / SQLite"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 2"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
workdir: "/src"
|
||||
mount-buildkite-agent: false
|
||||
propagate-environment: true
|
||||
- matrix-org/coveralls#v1.0:
|
||||
parallel: "true"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e py37,combine"
|
||||
label: ":python: 3.7 / SQLite"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 2"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.7"
|
||||
workdir: "/src"
|
||||
mount-buildkite-agent: false
|
||||
propagate-environment: true
|
||||
- matrix-org/coveralls#v1.0:
|
||||
parallel: "true"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: ":python: 3.5 / :postgres: 9.5"
|
||||
agents:
|
||||
queue: "medium"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 8"
|
||||
command:
|
||||
- "bash -c 'python -m pip install tox && python -m tox -e py35-postgres,combine'"
|
||||
plugins:
|
||||
- docker-compose#v2.1.0:
|
||||
run: testenv
|
||||
config:
|
||||
- .buildkite/docker-compose.py35.pg95.yaml
|
||||
- matrix-org/coveralls#v1.0:
|
||||
parallel: "true"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: ":python: 3.7 / :postgres: 9.5"
|
||||
agents:
|
||||
queue: "medium"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 8"
|
||||
command:
|
||||
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,combine'"
|
||||
plugins:
|
||||
- docker-compose#v2.1.0:
|
||||
run: testenv
|
||||
config:
|
||||
- .buildkite/docker-compose.py37.pg95.yaml
|
||||
- matrix-org/coveralls#v1.0:
|
||||
parallel: "true"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: ":python: 3.7 / :postgres: 11"
|
||||
agents:
|
||||
queue: "medium"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 8"
|
||||
command:
|
||||
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,combine'"
|
||||
plugins:
|
||||
- docker-compose#v2.1.0:
|
||||
run: testenv
|
||||
config:
|
||||
- .buildkite/docker-compose.py37.pg11.yaml
|
||||
- matrix-org/coveralls#v1.0:
|
||||
parallel: "true"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: "SyTest - :python: 3.5 / SQLite / Monolith"
|
||||
agents:
|
||||
queue: "medium"
|
||||
command:
|
||||
- "bash .buildkite/merge_base_branch.sh"
|
||||
- "bash /synapse_sytest.sh"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "matrixdotorg/sytest-synapse:py35"
|
||||
propagate-environment: true
|
||||
always-pull: true
|
||||
workdir: "/src"
|
||||
entrypoint: ["/bin/sh", "-e", "-c"]
|
||||
mount-buildkite-agent: false
|
||||
volumes: ["./logs:/logs"]
|
||||
- artifacts#v1.2.0:
|
||||
upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/coverage.xml" ]
|
||||
- matrix-org/annotate:
|
||||
path: "logs/annotate.md"
|
||||
style: "error"
|
||||
- matrix-org/coveralls#v1.0:
|
||||
parallel: "true"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Monolith"
|
||||
agents:
|
||||
queue: "medium"
|
||||
env:
|
||||
POSTGRES: "1"
|
||||
command:
|
||||
- "bash .buildkite/merge_base_branch.sh"
|
||||
- "bash /synapse_sytest.sh"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "matrixdotorg/sytest-synapse:py35"
|
||||
propagate-environment: true
|
||||
always-pull: true
|
||||
workdir: "/src"
|
||||
entrypoint: ["/bin/sh", "-e", "-c"]
|
||||
mount-buildkite-agent: false
|
||||
volumes: ["./logs:/logs"]
|
||||
- artifacts#v1.2.0:
|
||||
upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/coverage.xml" ]
|
||||
- matrix-org/annotate:
|
||||
path: "logs/annotate.md"
|
||||
style: "error"
|
||||
- matrix-org/coveralls#v1.0:
|
||||
parallel: "true"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Workers"
|
||||
agents:
|
||||
queue: "medium"
|
||||
env:
|
||||
POSTGRES: "1"
|
||||
WORKERS: "1"
|
||||
BLACKLIST: "synapse-blacklist-with-workers"
|
||||
command:
|
||||
- "bash .buildkite/merge_base_branch.sh"
|
||||
- "bash -c 'cat /src/sytest-blacklist /src/.buildkite/worker-blacklist > /src/synapse-blacklist-with-workers'"
|
||||
- "bash /synapse_sytest.sh"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "matrixdotorg/sytest-synapse:py35"
|
||||
propagate-environment: true
|
||||
always-pull: true
|
||||
workdir: "/src"
|
||||
entrypoint: ["/bin/sh", "-e", "-c"]
|
||||
mount-buildkite-agent: false
|
||||
volumes: ["./logs:/logs"]
|
||||
- artifacts#v1.2.0:
|
||||
upload: [ "logs/**/*.log", "logs/**/*.log.*", "logs/coverage.xml" ]
|
||||
- matrix-org/annotate:
|
||||
path: "logs/annotate.md"
|
||||
style: "error"
|
||||
- matrix-org/coveralls#v1.0:
|
||||
parallel: "true"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- wait: ~
|
||||
continue_on_failure: true
|
||||
|
||||
- label: Trigger webhook
|
||||
command: "curl -k https://coveralls.io/webhook?repo_token=$COVERALLS_REPO_TOKEN -d \"payload[build_num]=$BUILDKITE_BUILD_NUMBER&payload[status]=done\""
|
||||
@@ -36,7 +36,7 @@ that your email address is probably `user@example.com` rather than
|
||||
System requirements:
|
||||
|
||||
- POSIX-compliant system (tested on Linux & OS X)
|
||||
- Python 3.5, 3.6, 3.7, or 2.7
|
||||
- Python 3.5, 3.6, or 3.7
|
||||
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||
|
||||
Synapse is written in Python but some of the libraries it uses are written in
|
||||
@@ -421,7 +421,7 @@ If Synapse is not configured with an SMTP server, password reset via email will
|
||||
|
||||
The easiest way to create a new user is to do so from a client like [Riot](https://riot.im).
|
||||
|
||||
Alternatively you can do so from the command line if you have installed via pip.
|
||||
Alternatively you can do so from the command line if you have installed via pip.
|
||||
|
||||
This can be done as follows:
|
||||
|
||||
|
||||
12
MANIFEST.in
12
MANIFEST.in
@@ -38,14 +38,16 @@ exclude sytest-blacklist
|
||||
include pyproject.toml
|
||||
recursive-include changelog.d *
|
||||
|
||||
prune .buildkite
|
||||
prune .circleci
|
||||
prune .codecov.yml
|
||||
prune .coveragerc
|
||||
prune .github
|
||||
prune debian
|
||||
prune demo/etc
|
||||
prune docker
|
||||
prune .circleci
|
||||
prune .coveragerc
|
||||
prune debian
|
||||
prune .codecov.yml
|
||||
prune .buildkite
|
||||
prune mypy.ini
|
||||
prune stubs
|
||||
|
||||
exclude jenkins*
|
||||
recursive-exclude jenkins *.sh
|
||||
|
||||
63
UPGRADE.rst
63
UPGRADE.rst
@@ -49,6 +49,56 @@ returned by the Client-Server API:
|
||||
# configured on port 443.
|
||||
curl -kv https://<host.name>/_matrix/client/versions 2>&1 | grep "Server:"
|
||||
|
||||
Upgrading to v1.4.0
|
||||
===================
|
||||
|
||||
Config options
|
||||
--------------
|
||||
|
||||
**Note: Registration by email address or phone number will not work in this release unless
|
||||
some config options are changed from their defaults.**
|
||||
|
||||
This is due to Synapse v1.4.0 now defaulting to sending registration and password reset tokens
|
||||
itself. This is for security reasons as well as putting less reliance on identity servers.
|
||||
However, currently Synapse only supports sending emails, and does not have support for
|
||||
phone-based password reset or account registration. If Synapse is configured to handle these on
|
||||
its own, phone-based password resets and registration will be disabled. For Synapse to send
|
||||
emails, the ``email`` block of the config must be filled out. If not, then password resets and
|
||||
registration via email will be disabled entirely.
|
||||
|
||||
This release also deprecates the ``email.trust_identity_server_for_password_resets`` option and
|
||||
replaces it with the ``account_threepid_delegates`` dictionary. This option defines whether the
|
||||
homeserver should delegate an external server (typically an `identity server
|
||||
<https://matrix.org/docs/spec/identity_service/r0.2.1>`_) to handle sending password reset or
|
||||
registration messages via email and SMS.
|
||||
|
||||
If ``email.trust_identity_server_for_password_resets`` is set to ``true``, and
|
||||
``account_threepid_delegates.email`` is not set, then the first entry in
|
||||
``trusted_third_party_id_servers`` will be used as the account threepid delegate for email.
|
||||
This is to ensure compatibility with existing Synapse installs that set up external server
|
||||
handling for these tasks before v1.4.0. If ``email.trust_identity_server_for_password_resets``
|
||||
is ``true`` and no trusted identity server domains are configured, Synapse will throw an error.
|
||||
|
||||
If ``email.trust_identity_server_for_password_resets`` is ``false`` or absent and a threepid
|
||||
type in ``account_threepid_delegates`` is not set to a domain, then Synapse will attempt to
|
||||
send password reset and registration messages for that type.
|
||||
|
||||
Email templates
|
||||
---------------
|
||||
|
||||
If you have configured a custom template directory with the ``email.template_dir`` option, be
|
||||
aware that there are new templates regarding registration. ``registration.html`` and
|
||||
``registration.txt`` have been added and contain the content that is sent to a client upon
|
||||
registering via an email address.
|
||||
|
||||
``registration_success.html`` and ``registration_failure.html`` are also new HTML templates
|
||||
that will be shown to the user when they click the link in their registration emai , either
|
||||
showing them a success or failure page (assuming a redirect URL is not configured).
|
||||
|
||||
Synapse will expect these files to exist inside the configured template directory. To view the
|
||||
default templates, see `synapse/res/templates
|
||||
<https://github.com/matrix-org/synapse/tree/master/synapse/res/templates>`_.
|
||||
|
||||
Upgrading to v1.2.0
|
||||
===================
|
||||
|
||||
@@ -132,6 +182,19 @@ server for password resets, set ``trust_identity_server_for_password_resets`` to
|
||||
See the `sample configuration file <docs/sample_config.yaml>`_
|
||||
for more details on these settings.
|
||||
|
||||
New email templates
|
||||
---------------
|
||||
Some new templates have been added to the default template directory for the purpose of the
|
||||
homeserver sending its own password reset emails. If you have configured a custom
|
||||
``template_dir`` in your Synapse config, these files will need to be added.
|
||||
|
||||
``password_reset.html`` and ``password_reset.txt`` are HTML and plain text templates
|
||||
respectively that contain the contents of what will be emailed to the user upon attempting to
|
||||
reset their password via email. ``password_reset_success.html`` and
|
||||
``password_reset_failure.html`` are HTML files that the content of which (assuming no redirect
|
||||
URL is set) will be shown to the user after they attempt to click the link in the email sent
|
||||
to them.
|
||||
|
||||
Upgrading to v0.99.0
|
||||
====================
|
||||
|
||||
|
||||
1
changelog.d/5835.feature
Normal file
1
changelog.d/5835.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add the ability to send registration emails from the homeserver rather than delegating to an identity server.
|
||||
1
changelog.d/5853.feature
Normal file
1
changelog.d/5853.feature
Normal file
@@ -0,0 +1 @@
|
||||
Opentracing for device list updates.
|
||||
1
changelog.d/5868.feature
Normal file
1
changelog.d/5868.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add `m.require_identity_server` key to `/versions`'s `unstable_features` section.
|
||||
1
changelog.d/5875.misc
Normal file
1
changelog.d/5875.misc
Normal file
@@ -0,0 +1 @@
|
||||
Deprecate the `trusted_third_party_id_servers` option.
|
||||
1
changelog.d/5876.feature
Normal file
1
changelog.d/5876.feature
Normal file
@@ -0,0 +1 @@
|
||||
Replace `trust_identity_server_for_password_resets` config option with `account_threepid_delegates`.
|
||||
1
changelog.d/5892.misc
Normal file
1
changelog.d/5892.misc
Normal file
@@ -0,0 +1 @@
|
||||
Compatibility with v2 Identity Service APIs other than /lookup.
|
||||
1
changelog.d/5897.feature
Normal file
1
changelog.d/5897.feature
Normal file
@@ -0,0 +1 @@
|
||||
Switch to using the v2 Identity Service `/lookup` API where available, with fallback to v1. (Implements [MSC2134](https://github.com/matrix-org/matrix-doc/pull/2134) plus id_access_token authentication for v2 Identity Service APIs from [MSC2140](https://github.com/matrix-org/matrix-doc/pull/2140)).
|
||||
1
changelog.d/5915.bugfix
Normal file
1
changelog.d/5915.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix 404 for thumbnail download when `dynamic_thumbnails` is `false` and the thumbnail was dynamically generated. Fix reported by rkfg.
|
||||
@@ -1 +0,0 @@
|
||||
Add temporary flag to /versions in unstable_features to indicate this Synapse supports receiving id_access_token parameters on calls to identity server-proxying endpoints.
|
||||
1
changelog.d/5931.misc
Normal file
1
changelog.d/5931.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove unnecessary parentheses in return statements.
|
||||
1
changelog.d/5934.feature
Normal file
1
changelog.d/5934.feature
Normal file
@@ -0,0 +1 @@
|
||||
Redact events in the database that have been redacted for a month.
|
||||
1
changelog.d/5940.feature
Normal file
1
changelog.d/5940.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add the ability to send registration emails from the homeserver rather than delegating to an identity server.
|
||||
1
changelog.d/5943.misc
Normal file
1
changelog.d/5943.misc
Normal file
@@ -0,0 +1 @@
|
||||
Move Buildkite pipeline config to the pipelines repo.
|
||||
1
changelog.d/5953.misc
Normal file
1
changelog.d/5953.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update INSTALL.md to say that Python 2 is no longer supported.
|
||||
1
changelog.d/5962.misc
Normal file
1
changelog.d/5962.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove unnecessary return statements in the codebase which were the result of a regex run.
|
||||
1
changelog.d/5963.misc
Normal file
1
changelog.d/5963.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove left-over methods from C/S registration API.
|
||||
1
changelog.d/5964.feature
Normal file
1
changelog.d/5964.feature
Normal file
@@ -0,0 +1 @@
|
||||
Remove `bind_email` and `bind_msisdn` parameters from /register ala MSC2140.
|
||||
1
changelog.d/5966.bugfix
Normal file
1
changelog.d/5966.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix admin API for listing media in a room not being available with an external media repo.
|
||||
1
changelog.d/5967.bugfix
Normal file
1
changelog.d/5967.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix list media admin API always returning an error.
|
||||
1
changelog.d/5969.feature
Normal file
1
changelog.d/5969.feature
Normal file
@@ -0,0 +1 @@
|
||||
Replace `trust_identity_server_for_password_resets` config option with `account_threepid_delegates`.
|
||||
1
changelog.d/5970.docker
Normal file
1
changelog.d/5970.docker
Normal file
@@ -0,0 +1 @@
|
||||
Avoid changing UID/GID if they are already correct.
|
||||
1
changelog.d/5971.bugfix
Normal file
1
changelog.d/5971.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix room and user stats tracking.
|
||||
1
changelog.d/5975.misc
Normal file
1
changelog.d/5975.misc
Normal file
@@ -0,0 +1 @@
|
||||
Cleanup event auth type initialisation.
|
||||
1
changelog.d/5980.feature
Normal file
1
changelog.d/5980.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add POST /_matrix/client/r0/account/3pid/unbind endpoint from MSC2140 for unbinding a 3PID from an identity server without removing it from the homeserver user account.
|
||||
1
changelog.d/5981.feature
Normal file
1
changelog.d/5981.feature
Normal file
@@ -0,0 +1 @@
|
||||
Setting metrics_flags.known_servers to True in the configuration will publish the synapse_federation_known_servers metric over Prometheus. This represents the total number of servers your server knows about (i.e. is in rooms with), including itself.
|
||||
1
changelog.d/5982.bugfix
Normal file
1
changelog.d/5982.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Include missing opentracing contexts in outbout replication requests.
|
||||
1
changelog.d/5983.feature
Normal file
1
changelog.d/5983.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add minimum opentracing for client servlets.
|
||||
1
changelog.d/5984.bugfix
Normal file
1
changelog.d/5984.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix sending of EDUs when opentracing is enabled with an empty whitelist.
|
||||
1
changelog.d/5985.feature
Normal file
1
changelog.d/5985.feature
Normal file
@@ -0,0 +1 @@
|
||||
Check at setup that opentracing is installed if it's enabled in the config.
|
||||
1
changelog.d/5986.feature
Normal file
1
changelog.d/5986.feature
Normal file
@@ -0,0 +1 @@
|
||||
Trace replication send times.
|
||||
1
changelog.d/5988.bugfix
Normal file
1
changelog.d/5988.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix invalid references to None while opentracing if the log context slips.
|
||||
1
changelog.d/5989.misc
Normal file
1
changelog.d/5989.misc
Normal file
@@ -0,0 +1 @@
|
||||
Clean up dependency checking at setup.
|
||||
1
changelog.d/5991.bugfix
Normal file
1
changelog.d/5991.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix invalid references to None while opentracing if the log context slips.
|
||||
1
changelog.d/5993.feature
Normal file
1
changelog.d/5993.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add the ability to send registration emails from the homeserver rather than delegating to an identity server.
|
||||
1
changelog.d/5994.feature
Normal file
1
changelog.d/5994.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add the ability to send registration emails from the homeserver rather than delegating to an identity server.
|
||||
1
changelog.d/5995.bugfix
Normal file
1
changelog.d/5995.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Return a M_MISSING_PARAM if `sid` is not provided to `/account/3pid`.
|
||||
1
changelog.d/5998.bugfix
Normal file
1
changelog.d/5998.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix room and user stats tracking.
|
||||
1
changelog.d/6003.misc
Normal file
1
changelog.d/6003.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add opentracing span over HTTP push processing.
|
||||
1
changelog.d/6004.bugfix
Normal file
1
changelog.d/6004.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Only count real users when checking for auto-creation of auto-join room.
|
||||
1
changelog.d/6005.feature
Normal file
1
changelog.d/6005.feature
Normal file
@@ -0,0 +1 @@
|
||||
The new Prometheus metric `synapse_build_info` exposes the Python version, OS version, and Synapse version of the running server.
|
||||
1
changelog.d/6009.misc
Normal file
1
changelog.d/6009.misc
Normal file
@@ -0,0 +1 @@
|
||||
Small refactor of function arguments and docstrings in RoomMemberHandler.
|
||||
1
changelog.d/6010.misc
Normal file
1
changelog.d/6010.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove unused `origin` argument on FederationHandler.add_display_name_to_third_party_invite.
|
||||
1
changelog.d/6011.feature
Normal file
1
changelog.d/6011.feature
Normal file
@@ -0,0 +1 @@
|
||||
Use account_threepid_delegate.email and account_threepid_delegate.msisdn for validating threepid sessions.
|
||||
1
changelog.d/6012.feature
Normal file
1
changelog.d/6012.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add report_stats_endpoint option to configure where stats are reported to, if enabled. Contributed by @Sorunome.
|
||||
1
changelog.d/6013.misc
Normal file
1
changelog.d/6013.misc
Normal file
@@ -0,0 +1 @@
|
||||
Compatibility with v2 Identity Service APIs other than /lookup.
|
||||
1
changelog.d/6015.feature
Normal file
1
changelog.d/6015.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add config option to increase ratelimits for room admins redacting messages.
|
||||
1
changelog.d/6017.misc
Normal file
1
changelog.d/6017.misc
Normal file
@@ -0,0 +1 @@
|
||||
Clean up some code in the retry logic.
|
||||
1
changelog.d/6020.bugfix
Normal file
1
changelog.d/6020.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Ensure support users can be registered even if MAU limit is reached.
|
||||
1
changelog.d/6023.misc
Normal file
1
changelog.d/6023.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix the structured logging tests stomping on the global log configuration for subsequent tests.
|
||||
1
changelog.d/6024.bugfix
Normal file
1
changelog.d/6024.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix bug where login error was shown incorrectly on SSO fallback login.
|
||||
1
changelog.d/6025.bugfix
Normal file
1
changelog.d/6025.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix bug in calculating the federation retry backoff period.
|
||||
1
changelog.d/6026.feature
Normal file
1
changelog.d/6026.feature
Normal file
@@ -0,0 +1 @@
|
||||
Stop sending federation transactions to servers which have been down for a long time.
|
||||
1
changelog.d/6032.misc
Normal file
1
changelog.d/6032.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add developer documentation for using SAML2.
|
||||
@@ -37,6 +37,8 @@ from signedjson.sign import verify_signed_json, SignatureVerifyException
|
||||
|
||||
CONFIG_JSON = "cmdclient_config.json"
|
||||
|
||||
# TODO: The concept of trusted identity servers has been deprecated. This option and checks
|
||||
# should be removed
|
||||
TRUSTED_ID_SERVERS = ["localhost:8001"]
|
||||
|
||||
|
||||
@@ -268,6 +270,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_emailrequest(self, args):
|
||||
# TODO: Update to use v2 Identity Service API endpoint
|
||||
url = (
|
||||
self._identityServerUrl()
|
||||
+ "/_matrix/identity/api/v1/validate/email/requestToken"
|
||||
@@ -302,6 +305,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_emailvalidate(self, args):
|
||||
# TODO: Update to use v2 Identity Service API endpoint
|
||||
url = (
|
||||
self._identityServerUrl()
|
||||
+ "/_matrix/identity/api/v1/validate/email/submitToken"
|
||||
@@ -330,6 +334,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_3pidbind(self, args):
|
||||
# TODO: Update to use v2 Identity Service API endpoint
|
||||
url = self._identityServerUrl() + "/_matrix/identity/api/v1/3pid/bind"
|
||||
|
||||
json_res = yield self.http_client.do_request(
|
||||
@@ -398,6 +403,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
@defer.inlineCallbacks
|
||||
def _do_invite(self, roomid, userstring):
|
||||
if not userstring.startswith("@") and self._is_on("complete_usernames"):
|
||||
# TODO: Update to use v2 Identity Service API endpoint
|
||||
url = self._identityServerUrl() + "/_matrix/identity/api/v1/lookup"
|
||||
|
||||
json_res = yield self.http_client.do_request(
|
||||
@@ -407,6 +413,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
mxid = None
|
||||
|
||||
if "mxid" in json_res and "signatures" in json_res:
|
||||
# TODO: Update to use v2 Identity Service API endpoint
|
||||
url = (
|
||||
self._identityServerUrl()
|
||||
+ "/_matrix/identity/api/v1/pubkey/ed25519"
|
||||
|
||||
@@ -41,8 +41,8 @@ def generate_config_from_template(config_dir, config_path, environ, ownership):
|
||||
config_dir (str): where to put generated config files
|
||||
config_path (str): where to put the main config file
|
||||
environ (dict): environment dictionary
|
||||
ownership (str): "<user>:<group>" string which will be used to set
|
||||
ownership of the generated configs
|
||||
ownership (str|None): "<user>:<group>" string which will be used to set
|
||||
ownership of the generated configs. If None, ownership will not change.
|
||||
"""
|
||||
for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"):
|
||||
if v not in environ:
|
||||
@@ -105,24 +105,24 @@ def generate_config_from_template(config_dir, config_path, environ, ownership):
|
||||
log("Generating log config file " + log_config_file)
|
||||
convert("/conf/log.config", log_config_file, environ)
|
||||
|
||||
subprocess.check_output(["chown", "-R", ownership, "/data"])
|
||||
|
||||
# Hopefully we already have a signing key, but generate one if not.
|
||||
subprocess.check_output(
|
||||
[
|
||||
"su-exec",
|
||||
ownership,
|
||||
"python",
|
||||
"-m",
|
||||
"synapse.app.homeserver",
|
||||
"--config-path",
|
||||
config_path,
|
||||
# tell synapse to put generated keys in /data rather than /compiled
|
||||
"--keys-directory",
|
||||
config_dir,
|
||||
"--generate-keys",
|
||||
]
|
||||
)
|
||||
args = [
|
||||
"python",
|
||||
"-m",
|
||||
"synapse.app.homeserver",
|
||||
"--config-path",
|
||||
config_path,
|
||||
# tell synapse to put generated keys in /data rather than /compiled
|
||||
"--keys-directory",
|
||||
config_dir,
|
||||
"--generate-keys",
|
||||
]
|
||||
|
||||
if ownership is not None:
|
||||
subprocess.check_output(["chown", "-R", ownership, "/data"])
|
||||
args = ["su-exec", ownership] + args
|
||||
|
||||
subprocess.check_output(args)
|
||||
|
||||
|
||||
def run_generate_config(environ, ownership):
|
||||
@@ -130,7 +130,7 @@ def run_generate_config(environ, ownership):
|
||||
|
||||
Args:
|
||||
environ (dict): env var dict
|
||||
ownership (str): "userid:groupid" arg for chmod
|
||||
ownership (str|None): "userid:groupid" arg for chmod. If None, ownership will not change.
|
||||
|
||||
Never returns.
|
||||
"""
|
||||
@@ -149,9 +149,6 @@ def run_generate_config(environ, ownership):
|
||||
log("Creating log config %s" % (log_config_file,))
|
||||
convert("/conf/log.config", log_config_file, environ)
|
||||
|
||||
# make sure that synapse has perms to write to the data dir.
|
||||
subprocess.check_output(["chown", ownership, data_dir])
|
||||
|
||||
args = [
|
||||
"python",
|
||||
"-m",
|
||||
@@ -170,12 +167,33 @@ def run_generate_config(environ, ownership):
|
||||
"--open-private-ports",
|
||||
]
|
||||
# log("running %s" % (args, ))
|
||||
os.execv("/usr/local/bin/python", args)
|
||||
|
||||
if ownership is not None:
|
||||
args = ["su-exec", ownership] + args
|
||||
os.execv("/sbin/su-exec", args)
|
||||
|
||||
# make sure that synapse has perms to write to the data dir.
|
||||
subprocess.check_output(["chown", ownership, data_dir])
|
||||
else:
|
||||
os.execv("/usr/local/bin/python", args)
|
||||
|
||||
|
||||
def main(args, environ):
|
||||
mode = args[1] if len(args) > 1 else None
|
||||
ownership = "{}:{}".format(environ.get("UID", 991), environ.get("GID", 991))
|
||||
desired_uid = int(environ.get("UID", "991"))
|
||||
desired_gid = int(environ.get("GID", "991"))
|
||||
if (desired_uid == os.getuid()) and (desired_gid == os.getgid()):
|
||||
ownership = None
|
||||
else:
|
||||
ownership = "{}:{}".format(desired_uid, desired_gid)
|
||||
|
||||
log(
|
||||
"Container running as UserID %s:%s, ENV (or defaults) requests %s:%s"
|
||||
% (os.getuid(), os.getgid(), desired_uid, desired_gid)
|
||||
)
|
||||
|
||||
if ownership is None:
|
||||
log("Will not perform chmod/su-exec as UserID already matches request")
|
||||
|
||||
# In generate mode, generate a configuration and missing keys, then exit
|
||||
if mode == "generate":
|
||||
@@ -227,16 +245,12 @@ def main(args, environ):
|
||||
|
||||
log("Starting synapse with config file " + config_path)
|
||||
|
||||
args = [
|
||||
"su-exec",
|
||||
ownership,
|
||||
"python",
|
||||
"-m",
|
||||
"synapse.app.homeserver",
|
||||
"--config-path",
|
||||
config_path,
|
||||
]
|
||||
os.execv("/sbin/su-exec", args)
|
||||
args = ["python", "-m", "synapse.app.homeserver", "--config-path", config_path]
|
||||
if ownership is not None:
|
||||
args = ["su-exec", ownership] + args
|
||||
os.execv("/sbin/su-exec", args)
|
||||
else:
|
||||
os.execv("/usr/local/bin/python", args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
37
docs/dev/saml.md
Normal file
37
docs/dev/saml.md
Normal file
@@ -0,0 +1,37 @@
|
||||
# How to test SAML as a developer without a server
|
||||
|
||||
https://capriza.github.io/samling/samling.html (https://github.com/capriza/samling) is a great
|
||||
resource for being able to tinker with the SAML options within Synapse without needing to
|
||||
deploy and configure a complicated software stack.
|
||||
|
||||
To make Synapse (and therefore Riot) use it:
|
||||
|
||||
1. Use the samling.html URL above or deploy your own and visit the IdP Metadata tab.
|
||||
2. Copy the XML to your clipboard.
|
||||
3. On your Synapse server, create a new file `samling.xml` next to your `homeserver.yaml` with
|
||||
the XML from step 2 as the contents.
|
||||
4. Edit your `homeserver.yaml` to include:
|
||||
```yaml
|
||||
saml2_config:
|
||||
sp_config:
|
||||
allow_unknown_attributes: true # Works around a bug with AVA Hashes: https://github.com/IdentityPython/pysaml2/issues/388
|
||||
metadata:
|
||||
local: ["samling.xml"]
|
||||
```
|
||||
5. Run `apt-get install xmlsec1` and `pip install --upgrade --force 'pysaml2>=4.5.0'` to ensure
|
||||
the dependencies are installed and ready to go.
|
||||
6. Restart Synapse.
|
||||
|
||||
Then in Riot:
|
||||
|
||||
1. Visit the login page with a Riot pointing at your homeserver.
|
||||
2. Click the Single Sign-On button.
|
||||
3. On the samling page, enter a Name Identifier and add a SAML Attribute for `uid=your_localpart`.
|
||||
The response must also be signed.
|
||||
4. Click "Next".
|
||||
5. Click "Post Response" (change nothing).
|
||||
6. You should be logged in.
|
||||
|
||||
If you try and repeat this process, you may be automatically logged in using the information you
|
||||
gave previously. To fix this, open your developer console (`F12` or `Ctrl+Shift+I`) while on the
|
||||
samling page and clear the site data. In Chrome, this will be a button on the Application tab.
|
||||
62
docs/room_and_user_statistics.md
Normal file
62
docs/room_and_user_statistics.md
Normal file
@@ -0,0 +1,62 @@
|
||||
Room and User Statistics
|
||||
========================
|
||||
|
||||
Synapse maintains room and user statistics (as well as a cache of room state),
|
||||
in various tables. These can be used for administrative purposes but are also
|
||||
used when generating the public room directory.
|
||||
|
||||
|
||||
# Synapse Developer Documentation
|
||||
|
||||
## High-Level Concepts
|
||||
|
||||
### Definitions
|
||||
|
||||
* **subject**: Something we are tracking stats about – currently a room or user.
|
||||
* **current row**: An entry for a subject in the appropriate current statistics
|
||||
table. Each subject can have only one.
|
||||
* **historical row**: An entry for a subject in the appropriate historical
|
||||
statistics table. Each subject can have any number of these.
|
||||
|
||||
### Overview
|
||||
|
||||
Stats are maintained as time series. There are two kinds of column:
|
||||
|
||||
* absolute columns – where the value is correct for the time given by `end_ts`
|
||||
in the stats row. (Imagine a line graph for these values)
|
||||
* They can also be thought of as 'gauges' in Prometheus, if you are familiar.
|
||||
* per-slice columns – where the value corresponds to how many of the occurrences
|
||||
occurred within the time slice given by `(end_ts − bucket_size)…end_ts`
|
||||
or `start_ts…end_ts`. (Imagine a histogram for these values)
|
||||
|
||||
Stats are maintained in two tables (for each type): current and historical.
|
||||
|
||||
Current stats correspond to the present values. Each subject can only have one
|
||||
entry.
|
||||
|
||||
Historical stats correspond to values in the past. Subjects may have multiple
|
||||
entries.
|
||||
|
||||
## Concepts around the management of stats
|
||||
|
||||
### Current rows
|
||||
|
||||
Current rows contain the most up-to-date statistics for a room.
|
||||
They only contain absolute columns
|
||||
|
||||
### Historical rows
|
||||
|
||||
Historical rows can always be considered to be valid for the time slice and
|
||||
end time specified.
|
||||
|
||||
* historical rows will not exist for every time slice – they will be omitted
|
||||
if there were no changes. In this case, the following assumptions can be
|
||||
made to interpolate/recreate missing rows:
|
||||
- absolute fields have the same values as in the preceding row
|
||||
- per-slice fields are zero (`0`)
|
||||
* historical rows will not be retained forever – rows older than a configurable
|
||||
time will be purged.
|
||||
|
||||
#### Purge
|
||||
|
||||
The purging of historical rows is not yet implemented.
|
||||
@@ -306,6 +306,13 @@ listeners:
|
||||
#
|
||||
#allow_per_room_profiles: false
|
||||
|
||||
# How long to keep redacted events in unredacted form in the database. After
|
||||
# this period redacted events get replaced with their redacted form in the DB.
|
||||
#
|
||||
# Defaults to `7d`. Set to `null` to disable.
|
||||
#
|
||||
redaction_retention_period: 7d
|
||||
|
||||
|
||||
## TLS ##
|
||||
|
||||
@@ -511,6 +518,9 @@ log_config: "CONFDIR/SERVERNAME.log.config"
|
||||
# - one for login that ratelimits login requests based on the account the
|
||||
# client is attempting to log into, based on the amount of failed login
|
||||
# attempts for this account.
|
||||
# - one for ratelimiting redactions by room admins. If this is not explicitly
|
||||
# set then it uses the same ratelimiting as per rc_message. This is useful
|
||||
# to allow room admins to deal with abuse quickly.
|
||||
#
|
||||
# The defaults are as shown below.
|
||||
#
|
||||
@@ -532,6 +542,10 @@ log_config: "CONFDIR/SERVERNAME.log.config"
|
||||
# failed_attempts:
|
||||
# per_second: 0.17
|
||||
# burst_count: 3
|
||||
#
|
||||
#rc_admin_redaction:
|
||||
# per_second: 1
|
||||
# burst_count: 50
|
||||
|
||||
|
||||
# Ratelimiting settings for incoming federation
|
||||
@@ -891,10 +905,42 @@ uploads_path: "DATADIR/uploads"
|
||||
# Also defines the ID server which will be called when an account is
|
||||
# deactivated (one will be picked arbitrarily).
|
||||
#
|
||||
# Note: This option is deprecated. Since v0.99.4, Synapse has tracked which identity
|
||||
# server a 3PID has been bound to. For 3PIDs bound before then, Synapse runs a
|
||||
# background migration script, informing itself that the identity server all of its
|
||||
# 3PIDs have been bound to is likely one of the below.
|
||||
#
|
||||
# As of Synapse v1.4.0, all other functionality of this option has been deprecated, and
|
||||
# it is now solely used for the purposes of the background migration script, and can be
|
||||
# removed once it has run.
|
||||
#trusted_third_party_id_servers:
|
||||
# - matrix.org
|
||||
# - vector.im
|
||||
|
||||
# Handle threepid (email/phone etc) registration and password resets through a set of
|
||||
# *trusted* identity servers. Note that this allows the configured identity server to
|
||||
# reset passwords for accounts!
|
||||
#
|
||||
# Be aware that if `email` is not set, and SMTP options have not been
|
||||
# configured in the email config block, registration and user password resets via
|
||||
# email will be globally disabled.
|
||||
#
|
||||
# Additionally, if `msisdn` is not set, registration and password resets via msisdn
|
||||
# will be disabled regardless. This is due to Synapse currently not supporting any
|
||||
# method of sending SMS messages on its own.
|
||||
#
|
||||
# To enable using an identity server for operations regarding a particular third-party
|
||||
# identifier type, set the value to the URL of that identity server as shown in the
|
||||
# examples below.
|
||||
#
|
||||
# Servers handling the these requests must answer the `/requestToken` endpoints defined
|
||||
# by the Matrix Identity Service API specification:
|
||||
# https://matrix.org/docs/spec/identity_service/latest
|
||||
#
|
||||
account_threepid_delegates:
|
||||
#email: https://example.com # Delegate email sending to matrix.org
|
||||
#msisdn: http://localhost:8090 # Delegate SMS sending to this local process
|
||||
|
||||
# Users who register on this homeserver will automatically be joined
|
||||
# to these rooms
|
||||
#
|
||||
@@ -926,9 +972,24 @@ uploads_path: "DATADIR/uploads"
|
||||
#sentry:
|
||||
# dsn: "..."
|
||||
|
||||
# Flags to enable Prometheus metrics which are not suitable to be
|
||||
# enabled by default, either for performance reasons or limited use.
|
||||
#
|
||||
metrics_flags:
|
||||
# Publish synapse_federation_known_servers, a g auge of the number of
|
||||
# servers this homeserver knows about, including itself. May cause
|
||||
# performance problems on large homeservers.
|
||||
#
|
||||
#known_servers: true
|
||||
|
||||
# Whether or not to report anonymized homeserver usage statistics.
|
||||
# report_stats: true|false
|
||||
|
||||
# The endpoint to report the anonymized homeserver usage statistics to.
|
||||
# Defaults to https://matrix.org/report-usage-stats/push
|
||||
#
|
||||
#report_stats_endpoint: https://example.com/report-usage-stats/push
|
||||
|
||||
|
||||
## API Configuration ##
|
||||
|
||||
@@ -1164,19 +1225,6 @@ password_config:
|
||||
# #
|
||||
# riot_base_url: "http://localhost/riot"
|
||||
#
|
||||
# # Enable sending password reset emails via the configured, trusted
|
||||
# # identity servers
|
||||
# #
|
||||
# # IMPORTANT! This will give a malicious or overtaken identity server
|
||||
# # the ability to reset passwords for your users! Make absolutely sure
|
||||
# # that you want to do this! It is strongly recommended that password
|
||||
# # reset emails be sent by the homeserver instead
|
||||
# #
|
||||
# # If this option is set to false and SMTP options have not been
|
||||
# # configured, resetting user passwords via email will be disabled
|
||||
# #
|
||||
# #trust_identity_server_for_password_resets: false
|
||||
#
|
||||
# # Configure the time that a validation email or text message code
|
||||
# # will expire after sending
|
||||
# #
|
||||
@@ -1208,11 +1256,22 @@ password_config:
|
||||
# #password_reset_template_html: password_reset.html
|
||||
# #password_reset_template_text: password_reset.txt
|
||||
#
|
||||
# # Templates for registration emails sent by the homeserver
|
||||
# #
|
||||
# #registration_template_html: registration.html
|
||||
# #registration_template_text: registration.txt
|
||||
#
|
||||
# # Templates for password reset success and failure pages that a user
|
||||
# # will see after attempting to reset their password
|
||||
# #
|
||||
# #password_reset_template_success_html: password_reset_success.html
|
||||
# #password_reset_template_failure_html: password_reset_failure.html
|
||||
#
|
||||
# # Templates for registration success and failure pages that a user
|
||||
# # will see after attempting to register using an email or phone
|
||||
# #
|
||||
# #registration_template_success_html: registration_success.html
|
||||
# #registration_template_failure_html: registration_failure.html
|
||||
|
||||
|
||||
#password_providers:
|
||||
|
||||
54
mypy.ini
Normal file
54
mypy.ini
Normal file
@@ -0,0 +1,54 @@
|
||||
[mypy]
|
||||
namespace_packages=True
|
||||
plugins=mypy_zope:plugin
|
||||
follow_imports=skip
|
||||
mypy_path=stubs
|
||||
|
||||
[mypy-synapse.config.homeserver]
|
||||
# this is a mess because of the metaclass shenanigans
|
||||
ignore_errors = True
|
||||
|
||||
[mypy-zope]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-constantly]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-twisted.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-treq.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-hyperlink]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-h11]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-opentracing]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-OpenSSL]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-netaddr]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-saml2.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-unpaddedbase64]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-canonicaljson]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jaeger_client]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jsonschema]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-signedjson.*]
|
||||
ignore_missing_imports = True
|
||||
@@ -25,7 +25,7 @@ from twisted.internet import defer
|
||||
import synapse.logging.opentracing as opentracing
|
||||
import synapse.types
|
||||
from synapse import event_auth
|
||||
from synapse.api.constants import EventTypes, JoinRules, Membership
|
||||
from synapse.api.constants import EventTypes, JoinRules, Membership, UserTypes
|
||||
from synapse.api.errors import (
|
||||
AuthError,
|
||||
Codes,
|
||||
@@ -276,25 +276,25 @@ class Auth(object):
|
||||
self.get_access_token_from_request(request)
|
||||
)
|
||||
if app_service is None:
|
||||
return (None, None)
|
||||
return None, None
|
||||
|
||||
if app_service.ip_range_whitelist:
|
||||
ip_address = IPAddress(self.hs.get_ip_from_request(request))
|
||||
if ip_address not in app_service.ip_range_whitelist:
|
||||
return (None, None)
|
||||
return None, None
|
||||
|
||||
if b"user_id" not in request.args:
|
||||
return (app_service.sender, app_service)
|
||||
return app_service.sender, app_service
|
||||
|
||||
user_id = request.args[b"user_id"][0].decode("utf8")
|
||||
if app_service.sender == user_id:
|
||||
return (app_service.sender, app_service)
|
||||
return app_service.sender, app_service
|
||||
|
||||
if not app_service.is_interested_in_user(user_id):
|
||||
raise AuthError(403, "Application service cannot masquerade as this user.")
|
||||
if not (yield self.store.get_user_by_id(user_id)):
|
||||
raise AuthError(403, "Application service has not registered this user")
|
||||
return (user_id, app_service)
|
||||
return user_id, app_service
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_user_by_access_token(self, token, rights="access"):
|
||||
@@ -694,7 +694,7 @@ class Auth(object):
|
||||
# * The user is a guest user, and has joined the room
|
||||
# else it will throw.
|
||||
member_event = yield self.check_user_was_in_room(room_id, user_id)
|
||||
return (member_event.membership, member_event.event_id)
|
||||
return member_event.membership, member_event.event_id
|
||||
except AuthError:
|
||||
visibility = yield self.state.get_current_state(
|
||||
room_id, EventTypes.RoomHistoryVisibility, ""
|
||||
@@ -703,14 +703,13 @@ class Auth(object):
|
||||
visibility
|
||||
and visibility.content["history_visibility"] == "world_readable"
|
||||
):
|
||||
return (Membership.JOIN, None)
|
||||
return
|
||||
return Membership.JOIN, None
|
||||
raise AuthError(
|
||||
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_auth_blocking(self, user_id=None, threepid=None):
|
||||
def check_auth_blocking(self, user_id=None, threepid=None, user_type=None):
|
||||
"""Checks if the user should be rejected for some external reason,
|
||||
such as monthly active user limiting or global disable flag
|
||||
|
||||
@@ -723,6 +722,9 @@ class Auth(object):
|
||||
with a MAU blocked server, normally they would be rejected but their
|
||||
threepid is on the reserved list. user_id and
|
||||
threepid should never be set at the same time.
|
||||
|
||||
user_type(str|None): If present, is used to decide whether to check against
|
||||
certain blocking reasons like MAU.
|
||||
"""
|
||||
|
||||
# Never fail an auth check for the server notices users or support user
|
||||
@@ -760,6 +762,10 @@ class Auth(object):
|
||||
self.hs.config.mau_limits_reserved_threepids, threepid
|
||||
):
|
||||
return
|
||||
elif user_type == UserTypes.SUPPORT:
|
||||
# If the user does not exist yet and is of type "support",
|
||||
# allow registration. Support users are excluded from MAU checks.
|
||||
return
|
||||
# Else if there is no room in the MAU bucket, bail
|
||||
current_mau = yield self.store.get_monthly_active_count()
|
||||
if current_mau >= self.hs.config.max_mau_value:
|
||||
|
||||
@@ -119,7 +119,7 @@ class ClientReaderServer(HomeServer):
|
||||
KeyChangesServlet(self).register(resource)
|
||||
VoipRestServlet(self).register(resource)
|
||||
PushRuleRestServlet(self).register(resource)
|
||||
VersionsRestServlet().register(resource)
|
||||
VersionsRestServlet(self).register(resource)
|
||||
|
||||
resources.update({"/_matrix/client": resource})
|
||||
|
||||
|
||||
@@ -70,12 +70,12 @@ class PresenceStatusStubServlet(RestServlet):
|
||||
except HttpResponseException as e:
|
||||
raise e.to_synapse_error()
|
||||
|
||||
return (200, result)
|
||||
return 200, result
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_PUT(self, request, user_id):
|
||||
yield self.auth.get_user_by_req(request)
|
||||
return (200, {})
|
||||
return 200, {}
|
||||
|
||||
|
||||
class KeyUploadServlet(RestServlet):
|
||||
@@ -126,11 +126,11 @@ class KeyUploadServlet(RestServlet):
|
||||
self.main_uri + request.uri.decode("ascii"), body, headers=headers
|
||||
)
|
||||
|
||||
return (200, result)
|
||||
return 200, result
|
||||
else:
|
||||
# Just interested in counts.
|
||||
result = yield self.store.count_e2e_one_time_keys(user_id, device_id)
|
||||
return (200, {"one_time_key_counts": result})
|
||||
return 200, {"one_time_key_counts": result}
|
||||
|
||||
|
||||
class FrontendProxySlavedStore(
|
||||
|
||||
@@ -561,10 +561,12 @@ def run(hs):
|
||||
|
||||
stats["database_engine"] = hs.get_datastore().database_engine_name
|
||||
stats["database_server_version"] = hs.get_datastore().get_server_version()
|
||||
logger.info("Reporting stats to matrix.org: %s" % (stats,))
|
||||
logger.info(
|
||||
"Reporting stats to %s: %s" % (hs.config.report_stats_endpoint, stats)
|
||||
)
|
||||
try:
|
||||
yield hs.get_simple_http_client().put_json(
|
||||
"https://matrix.org/report-usage-stats/push", stats
|
||||
hs.config.report_stats_endpoint, stats
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warn("Error reporting stats: %s", e)
|
||||
|
||||
@@ -107,7 +107,6 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
except CodeMessageException as e:
|
||||
if e.code == 404:
|
||||
return False
|
||||
return
|
||||
logger.warning("query_user to %s received %s", uri, e.code)
|
||||
except Exception as ex:
|
||||
logger.warning("query_user to %s threw exception %s", uri, ex)
|
||||
@@ -127,7 +126,6 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
logger.warning("query_alias to %s received %s", uri, e.code)
|
||||
if e.code == 404:
|
||||
return False
|
||||
return
|
||||
except Exception as ex:
|
||||
logger.warning("query_alias to %s threw exception %s", uri, ex)
|
||||
return False
|
||||
@@ -230,7 +228,6 @@ class ApplicationServiceApi(SimpleHttpClient):
|
||||
sent_transactions_counter.labels(service.id).inc()
|
||||
sent_events_counter.labels(service.id).inc(len(events))
|
||||
return True
|
||||
return
|
||||
except CodeMessageException as e:
|
||||
logger.warning("push_bulk to %s received %s", uri, e.code)
|
||||
except Exception as ex:
|
||||
|
||||
@@ -20,6 +20,7 @@ from __future__ import print_function
|
||||
# This file can't be called email.py because if it is, we cannot:
|
||||
import email.utils
|
||||
import os
|
||||
from enum import Enum
|
||||
|
||||
import pkg_resources
|
||||
|
||||
@@ -74,19 +75,48 @@ class EmailConfig(Config):
|
||||
"renew_at"
|
||||
)
|
||||
|
||||
email_trust_identity_server_for_password_resets = email_config.get(
|
||||
"trust_identity_server_for_password_resets", False
|
||||
self.threepid_behaviour_email = (
|
||||
# Have Synapse handle the email sending if account_threepid_delegates.email
|
||||
# is not defined
|
||||
# msisdn is currently always remote while Synapse does not support any method of
|
||||
# sending SMS messages
|
||||
ThreepidBehaviour.REMOTE
|
||||
if self.account_threepid_delegate_email
|
||||
else ThreepidBehaviour.LOCAL
|
||||
)
|
||||
self.email_password_reset_behaviour = (
|
||||
"remote" if email_trust_identity_server_for_password_resets else "local"
|
||||
)
|
||||
self.password_resets_were_disabled_due_to_email_config = False
|
||||
if self.email_password_reset_behaviour == "local" and email_config == {}:
|
||||
# Prior to Synapse v1.4.0, there was another option that defined whether Synapse would
|
||||
# use an identity server to password reset tokens on its behalf. We now warn the user
|
||||
# if they have this set and tell them to use the updated option, while using a default
|
||||
# identity server in the process.
|
||||
self.using_identity_server_from_trusted_list = False
|
||||
if (
|
||||
not self.account_threepid_delegate_email
|
||||
and config.get("trust_identity_server_for_password_resets", False) is True
|
||||
):
|
||||
# Use the first entry in self.trusted_third_party_id_servers instead
|
||||
if self.trusted_third_party_id_servers:
|
||||
# XXX: It's a little confusing that account_threepid_delegate_email is modified
|
||||
# both in RegistrationConfig and here. We should factor this bit out
|
||||
self.account_threepid_delegate_email = self.trusted_third_party_id_servers[
|
||||
0
|
||||
]
|
||||
self.using_identity_server_from_trusted_list = True
|
||||
else:
|
||||
raise ConfigError(
|
||||
"Attempted to use an identity server from"
|
||||
'"trusted_third_party_id_servers" but it is empty.'
|
||||
)
|
||||
|
||||
self.local_threepid_handling_disabled_due_to_email_config = False
|
||||
if (
|
||||
self.threepid_behaviour_email == ThreepidBehaviour.LOCAL
|
||||
and email_config == {}
|
||||
):
|
||||
# We cannot warn the user this has happened here
|
||||
# Instead do so when a user attempts to reset their password
|
||||
self.password_resets_were_disabled_due_to_email_config = True
|
||||
self.local_threepid_handling_disabled_due_to_email_config = True
|
||||
|
||||
self.email_password_reset_behaviour = "off"
|
||||
self.threepid_behaviour_email = ThreepidBehaviour.OFF
|
||||
|
||||
# Get lifetime of a validation token in milliseconds
|
||||
self.email_validation_token_lifetime = self.parse_duration(
|
||||
@@ -96,7 +126,7 @@ class EmailConfig(Config):
|
||||
if (
|
||||
self.email_enable_notifs
|
||||
or account_validity_renewal_enabled
|
||||
or self.email_password_reset_behaviour == "local"
|
||||
or self.threepid_behaviour_email == ThreepidBehaviour.LOCAL
|
||||
):
|
||||
# make sure we can import the required deps
|
||||
import jinja2
|
||||
@@ -106,7 +136,7 @@ class EmailConfig(Config):
|
||||
jinja2
|
||||
bleach
|
||||
|
||||
if self.email_password_reset_behaviour == "local":
|
||||
if self.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
|
||||
required = ["smtp_host", "smtp_port", "notif_from"]
|
||||
|
||||
missing = []
|
||||
@@ -125,28 +155,45 @@ class EmailConfig(Config):
|
||||
% (", ".join(missing),)
|
||||
)
|
||||
|
||||
# Templates for password reset emails
|
||||
# These email templates have placeholders in them, and thus must be
|
||||
# parsed using a templating engine during a request
|
||||
self.email_password_reset_template_html = email_config.get(
|
||||
"password_reset_template_html", "password_reset.html"
|
||||
)
|
||||
self.email_password_reset_template_text = email_config.get(
|
||||
"password_reset_template_text", "password_reset.txt"
|
||||
)
|
||||
self.email_registration_template_html = email_config.get(
|
||||
"registration_template_html", "registration.html"
|
||||
)
|
||||
self.email_registration_template_text = email_config.get(
|
||||
"registration_template_text", "registration.txt"
|
||||
)
|
||||
self.email_password_reset_template_failure_html = email_config.get(
|
||||
"password_reset_template_failure_html", "password_reset_failure.html"
|
||||
)
|
||||
# This template does not support any replaceable variables, so we will
|
||||
# read it from the disk once during setup
|
||||
self.email_registration_template_failure_html = email_config.get(
|
||||
"registration_template_failure_html", "registration_failure.html"
|
||||
)
|
||||
|
||||
# These templates do not support any placeholder variables, so we
|
||||
# will read them from disk once during setup
|
||||
email_password_reset_template_success_html = email_config.get(
|
||||
"password_reset_template_success_html", "password_reset_success.html"
|
||||
)
|
||||
email_registration_template_success_html = email_config.get(
|
||||
"registration_template_success_html", "registration_success.html"
|
||||
)
|
||||
|
||||
# Check templates exist
|
||||
for f in [
|
||||
self.email_password_reset_template_html,
|
||||
self.email_password_reset_template_text,
|
||||
self.email_registration_template_html,
|
||||
self.email_registration_template_text,
|
||||
self.email_password_reset_template_failure_html,
|
||||
email_password_reset_template_success_html,
|
||||
email_registration_template_success_html,
|
||||
]:
|
||||
p = os.path.join(self.email_template_dir, f)
|
||||
if not os.path.isfile(p):
|
||||
@@ -156,9 +203,15 @@ class EmailConfig(Config):
|
||||
filepath = os.path.join(
|
||||
self.email_template_dir, email_password_reset_template_success_html
|
||||
)
|
||||
self.email_password_reset_template_success_html_content = self.read_file(
|
||||
self.email_password_reset_template_success_html = self.read_file(
|
||||
filepath, "email.password_reset_template_success_html"
|
||||
)
|
||||
filepath = os.path.join(
|
||||
self.email_template_dir, email_registration_template_success_html
|
||||
)
|
||||
self.email_registration_template_success_html_content = self.read_file(
|
||||
filepath, "email.registration_template_success_html"
|
||||
)
|
||||
|
||||
if self.email_enable_notifs:
|
||||
required = [
|
||||
@@ -239,19 +292,6 @@ class EmailConfig(Config):
|
||||
# #
|
||||
# riot_base_url: "http://localhost/riot"
|
||||
#
|
||||
# # Enable sending password reset emails via the configured, trusted
|
||||
# # identity servers
|
||||
# #
|
||||
# # IMPORTANT! This will give a malicious or overtaken identity server
|
||||
# # the ability to reset passwords for your users! Make absolutely sure
|
||||
# # that you want to do this! It is strongly recommended that password
|
||||
# # reset emails be sent by the homeserver instead
|
||||
# #
|
||||
# # If this option is set to false and SMTP options have not been
|
||||
# # configured, resetting user passwords via email will be disabled
|
||||
# #
|
||||
# #trust_identity_server_for_password_resets: false
|
||||
#
|
||||
# # Configure the time that a validation email or text message code
|
||||
# # will expire after sending
|
||||
# #
|
||||
@@ -283,9 +323,35 @@ class EmailConfig(Config):
|
||||
# #password_reset_template_html: password_reset.html
|
||||
# #password_reset_template_text: password_reset.txt
|
||||
#
|
||||
# # Templates for registration emails sent by the homeserver
|
||||
# #
|
||||
# #registration_template_html: registration.html
|
||||
# #registration_template_text: registration.txt
|
||||
#
|
||||
# # Templates for password reset success and failure pages that a user
|
||||
# # will see after attempting to reset their password
|
||||
# #
|
||||
# #password_reset_template_success_html: password_reset_success.html
|
||||
# #password_reset_template_failure_html: password_reset_failure.html
|
||||
#
|
||||
# # Templates for registration success and failure pages that a user
|
||||
# # will see after attempting to register using an email or phone
|
||||
# #
|
||||
# #registration_template_success_html: registration_success.html
|
||||
# #registration_template_failure_html: registration_failure.html
|
||||
"""
|
||||
|
||||
|
||||
class ThreepidBehaviour(Enum):
|
||||
"""
|
||||
Enum to define the behaviour of Synapse with regards to when it contacts an identity
|
||||
server for 3pid registration and password resets
|
||||
|
||||
REMOTE = use an external server to send tokens
|
||||
LOCAL = send tokens ourselves
|
||||
OFF = disable registration via 3pid and password resets
|
||||
"""
|
||||
|
||||
REMOTE = "remote"
|
||||
LOCAL = "local"
|
||||
OFF = "off"
|
||||
|
||||
@@ -21,7 +21,12 @@ from string import Template
|
||||
|
||||
import yaml
|
||||
|
||||
from twisted.logger import STDLibLogObserver, globalLogBeginner
|
||||
from twisted.logger import (
|
||||
ILogObserver,
|
||||
LogBeginner,
|
||||
STDLibLogObserver,
|
||||
globalLogBeginner,
|
||||
)
|
||||
|
||||
import synapse
|
||||
from synapse.app import _base as appbase
|
||||
@@ -124,7 +129,7 @@ class LoggingConfig(Config):
|
||||
log_config_file.write(DEFAULT_LOG_CONFIG.substitute(log_file=log_file))
|
||||
|
||||
|
||||
def _setup_stdlib_logging(config, log_config):
|
||||
def _setup_stdlib_logging(config, log_config, logBeginner: LogBeginner):
|
||||
"""
|
||||
Set up Python stdlib logging.
|
||||
"""
|
||||
@@ -165,12 +170,12 @@ def _setup_stdlib_logging(config, log_config):
|
||||
|
||||
return observer(event)
|
||||
|
||||
globalLogBeginner.beginLoggingTo(
|
||||
[_log], redirectStandardIO=not config.no_redirect_stdio
|
||||
)
|
||||
logBeginner.beginLoggingTo([_log], redirectStandardIO=not config.no_redirect_stdio)
|
||||
if not config.no_redirect_stdio:
|
||||
print("Redirected stdout/stderr to logs")
|
||||
|
||||
return observer
|
||||
|
||||
|
||||
def _reload_stdlib_logging(*args, log_config=None):
|
||||
logger = logging.getLogger("")
|
||||
@@ -181,7 +186,9 @@ def _reload_stdlib_logging(*args, log_config=None):
|
||||
logging.config.dictConfig(log_config)
|
||||
|
||||
|
||||
def setup_logging(hs, config, use_worker_options=False):
|
||||
def setup_logging(
|
||||
hs, config, use_worker_options=False, logBeginner: LogBeginner = globalLogBeginner
|
||||
) -> ILogObserver:
|
||||
"""
|
||||
Set up the logging subsystem.
|
||||
|
||||
@@ -191,6 +198,12 @@ def setup_logging(hs, config, use_worker_options=False):
|
||||
|
||||
use_worker_options (bool): True to use the 'worker_log_config' option
|
||||
instead of 'log_config'.
|
||||
|
||||
logBeginner: The Twisted logBeginner to use.
|
||||
|
||||
Returns:
|
||||
The "root" Twisted Logger observer, suitable for sending logs to from a
|
||||
Logger instance.
|
||||
"""
|
||||
log_config = config.worker_log_config if use_worker_options else config.log_config
|
||||
|
||||
@@ -210,10 +223,12 @@ def setup_logging(hs, config, use_worker_options=False):
|
||||
log_config_body = read_config()
|
||||
|
||||
if log_config_body and log_config_body.get("structured") is True:
|
||||
setup_structured_logging(hs, config, log_config_body)
|
||||
logger = setup_structured_logging(
|
||||
hs, config, log_config_body, logBeginner=logBeginner
|
||||
)
|
||||
appbase.register_sighup(read_config, callback=reload_structured_logging)
|
||||
else:
|
||||
_setup_stdlib_logging(config, log_config_body)
|
||||
logger = _setup_stdlib_logging(config, log_config_body, logBeginner=logBeginner)
|
||||
appbase.register_sighup(read_config, callback=_reload_stdlib_logging)
|
||||
|
||||
# make sure that the first thing we log is a thing we can grep backwards
|
||||
@@ -221,3 +236,5 @@ def setup_logging(hs, config, use_worker_options=False):
|
||||
logging.warn("***** STARTING SERVER *****")
|
||||
logging.warn("Server %s version %s", sys.argv[0], get_version_string(synapse))
|
||||
logging.info("Server hostname: %s", config.server_name)
|
||||
|
||||
return logger
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2015, 2016 OpenMarket Ltd
|
||||
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -13,26 +14,47 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import attr
|
||||
|
||||
from synapse.python_dependencies import DependencyException, check_requirements
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
MISSING_SENTRY = """Missing sentry-sdk library. This is required to enable sentry
|
||||
integration.
|
||||
"""
|
||||
|
||||
@attr.s
|
||||
class MetricsFlags(object):
|
||||
known_servers = attr.ib(default=False, validator=attr.validators.instance_of(bool))
|
||||
|
||||
@classmethod
|
||||
def all_off(cls):
|
||||
"""
|
||||
Instantiate the flags with all options set to off.
|
||||
"""
|
||||
return cls(**{x.name: False for x in attr.fields(cls)})
|
||||
|
||||
|
||||
class MetricsConfig(Config):
|
||||
def read_config(self, config, **kwargs):
|
||||
self.enable_metrics = config.get("enable_metrics", False)
|
||||
self.report_stats = config.get("report_stats", None)
|
||||
self.report_stats_endpoint = config.get(
|
||||
"report_stats_endpoint", "https://matrix.org/report-usage-stats/push"
|
||||
)
|
||||
self.metrics_port = config.get("metrics_port")
|
||||
self.metrics_bind_host = config.get("metrics_bind_host", "127.0.0.1")
|
||||
|
||||
if self.enable_metrics:
|
||||
_metrics_config = config.get("metrics_flags") or {}
|
||||
self.metrics_flags = MetricsFlags(**_metrics_config)
|
||||
else:
|
||||
self.metrics_flags = MetricsFlags.all_off()
|
||||
|
||||
self.sentry_enabled = "sentry" in config
|
||||
if self.sentry_enabled:
|
||||
try:
|
||||
import sentry_sdk # noqa F401
|
||||
except ImportError:
|
||||
raise ConfigError(MISSING_SENTRY)
|
||||
check_requirements("sentry")
|
||||
except DependencyException as e:
|
||||
raise ConfigError(e.message)
|
||||
|
||||
self.sentry_dsn = config["sentry"].get("dsn")
|
||||
if not self.sentry_dsn:
|
||||
@@ -58,6 +80,16 @@ class MetricsConfig(Config):
|
||||
#sentry:
|
||||
# dsn: "..."
|
||||
|
||||
# Flags to enable Prometheus metrics which are not suitable to be
|
||||
# enabled by default, either for performance reasons or limited use.
|
||||
#
|
||||
metrics_flags:
|
||||
# Publish synapse_federation_known_servers, a g auge of the number of
|
||||
# servers this homeserver knows about, including itself. May cause
|
||||
# performance problems on large homeservers.
|
||||
#
|
||||
#known_servers: true
|
||||
|
||||
# Whether or not to report anonymized homeserver usage statistics.
|
||||
"""
|
||||
|
||||
@@ -66,4 +98,10 @@ class MetricsConfig(Config):
|
||||
else:
|
||||
res += "report_stats: %s\n" % ("true" if report_stats else "false")
|
||||
|
||||
res += """
|
||||
# The endpoint to report the anonymized homeserver usage statistics to.
|
||||
# Defaults to https://matrix.org/report-usage-stats/push
|
||||
#
|
||||
#report_stats_endpoint: https://example.com/report-usage-stats/push
|
||||
"""
|
||||
return res
|
||||
|
||||
@@ -80,6 +80,12 @@ class RatelimitConfig(Config):
|
||||
"federation_rr_transactions_per_room_per_second", 50
|
||||
)
|
||||
|
||||
rc_admin_redaction = config.get("rc_admin_redaction")
|
||||
if rc_admin_redaction:
|
||||
self.rc_admin_redaction = RateLimitConfig(rc_admin_redaction)
|
||||
else:
|
||||
self.rc_admin_redaction = None
|
||||
|
||||
def generate_config_section(self, **kwargs):
|
||||
return """\
|
||||
## Ratelimiting ##
|
||||
@@ -102,6 +108,9 @@ class RatelimitConfig(Config):
|
||||
# - one for login that ratelimits login requests based on the account the
|
||||
# client is attempting to log into, based on the amount of failed login
|
||||
# attempts for this account.
|
||||
# - one for ratelimiting redactions by room admins. If this is not explicitly
|
||||
# set then it uses the same ratelimiting as per rc_message. This is useful
|
||||
# to allow room admins to deal with abuse quickly.
|
||||
#
|
||||
# The defaults are as shown below.
|
||||
#
|
||||
@@ -123,6 +132,10 @@ class RatelimitConfig(Config):
|
||||
# failed_attempts:
|
||||
# per_second: 0.17
|
||||
# burst_count: 3
|
||||
#
|
||||
#rc_admin_redaction:
|
||||
# per_second: 1
|
||||
# burst_count: 50
|
||||
|
||||
|
||||
# Ratelimiting settings for incoming federation
|
||||
|
||||
@@ -99,6 +99,10 @@ class RegistrationConfig(Config):
|
||||
self.trusted_third_party_id_servers = config.get(
|
||||
"trusted_third_party_id_servers", ["matrix.org", "vector.im"]
|
||||
)
|
||||
account_threepid_delegates = config.get("account_threepid_delegates") or {}
|
||||
self.account_threepid_delegate_email = account_threepid_delegates.get("email")
|
||||
self.account_threepid_delegate_msisdn = account_threepid_delegates.get("msisdn")
|
||||
|
||||
self.default_identity_server = config.get("default_identity_server")
|
||||
self.allow_guest_access = config.get("allow_guest_access", False)
|
||||
|
||||
@@ -257,10 +261,42 @@ class RegistrationConfig(Config):
|
||||
# Also defines the ID server which will be called when an account is
|
||||
# deactivated (one will be picked arbitrarily).
|
||||
#
|
||||
# Note: This option is deprecated. Since v0.99.4, Synapse has tracked which identity
|
||||
# server a 3PID has been bound to. For 3PIDs bound before then, Synapse runs a
|
||||
# background migration script, informing itself that the identity server all of its
|
||||
# 3PIDs have been bound to is likely one of the below.
|
||||
#
|
||||
# As of Synapse v1.4.0, all other functionality of this option has been deprecated, and
|
||||
# it is now solely used for the purposes of the background migration script, and can be
|
||||
# removed once it has run.
|
||||
#trusted_third_party_id_servers:
|
||||
# - matrix.org
|
||||
# - vector.im
|
||||
|
||||
# Handle threepid (email/phone etc) registration and password resets through a set of
|
||||
# *trusted* identity servers. Note that this allows the configured identity server to
|
||||
# reset passwords for accounts!
|
||||
#
|
||||
# Be aware that if `email` is not set, and SMTP options have not been
|
||||
# configured in the email config block, registration and user password resets via
|
||||
# email will be globally disabled.
|
||||
#
|
||||
# Additionally, if `msisdn` is not set, registration and password resets via msisdn
|
||||
# will be disabled regardless. This is due to Synapse currently not supporting any
|
||||
# method of sending SMS messages on its own.
|
||||
#
|
||||
# To enable using an identity server for operations regarding a particular third-party
|
||||
# identifier type, set the value to the URL of that identity server as shown in the
|
||||
# examples below.
|
||||
#
|
||||
# Servers handling the these requests must answer the `/requestToken` endpoints defined
|
||||
# by the Matrix Identity Service API specification:
|
||||
# https://matrix.org/docs/spec/identity_service/latest
|
||||
#
|
||||
account_threepid_delegates:
|
||||
#email: https://example.com # Delegate email sending to matrix.org
|
||||
#msisdn: http://localhost:8090 # Delegate SMS sending to this local process
|
||||
|
||||
# Users who register on this homeserver will automatically be joined
|
||||
# to these rooms
|
||||
#
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
import os
|
||||
from collections import namedtuple
|
||||
|
||||
from synapse.python_dependencies import DependencyException, check_requirements
|
||||
from synapse.util.module_loader import load_module
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
@@ -34,17 +35,6 @@ THUMBNAIL_SIZE_YAML = """\
|
||||
# method: %(method)s
|
||||
"""
|
||||
|
||||
MISSING_NETADDR = "Missing netaddr library. This is required for URL preview API."
|
||||
|
||||
MISSING_LXML = """Missing lxml library. This is required for URL preview API.
|
||||
|
||||
Install by running:
|
||||
pip install lxml
|
||||
|
||||
Requires libxslt1-dev system package.
|
||||
"""
|
||||
|
||||
|
||||
ThumbnailRequirement = namedtuple(
|
||||
"ThumbnailRequirement", ["width", "height", "method", "media_type"]
|
||||
)
|
||||
@@ -171,16 +161,10 @@ class ContentRepositoryConfig(Config):
|
||||
self.url_preview_enabled = config.get("url_preview_enabled", False)
|
||||
if self.url_preview_enabled:
|
||||
try:
|
||||
import lxml
|
||||
check_requirements("url_preview")
|
||||
|
||||
lxml # To stop unused lint.
|
||||
except ImportError:
|
||||
raise ConfigError(MISSING_LXML)
|
||||
|
||||
try:
|
||||
from netaddr import IPSet
|
||||
except ImportError:
|
||||
raise ConfigError(MISSING_NETADDR)
|
||||
except DependencyException as e:
|
||||
raise ConfigError(e.message)
|
||||
|
||||
if "url_preview_ip_range_blacklist" not in config:
|
||||
raise ConfigError(
|
||||
@@ -189,6 +173,9 @@ class ContentRepositoryConfig(Config):
|
||||
"to work"
|
||||
)
|
||||
|
||||
# netaddr is a dependency for url_preview
|
||||
from netaddr import IPSet
|
||||
|
||||
self.url_preview_ip_range_blacklist = IPSet(
|
||||
config["url_preview_ip_range_blacklist"]
|
||||
)
|
||||
|
||||
@@ -162,6 +162,16 @@ class ServerConfig(Config):
|
||||
|
||||
self.mau_trial_days = config.get("mau_trial_days", 0)
|
||||
|
||||
# How long to keep redacted events in the database in unredacted form
|
||||
# before redacting them.
|
||||
redaction_retention_period = config.get("redaction_retention_period", "7d")
|
||||
if redaction_retention_period is not None:
|
||||
self.redaction_retention_period = self.parse_duration(
|
||||
redaction_retention_period
|
||||
)
|
||||
else:
|
||||
self.redaction_retention_period = None
|
||||
|
||||
# Options to disable HS
|
||||
self.hs_disabled = config.get("hs_disabled", False)
|
||||
self.hs_disabled_message = config.get("hs_disabled_message", "")
|
||||
@@ -718,6 +728,13 @@ class ServerConfig(Config):
|
||||
# Defaults to 'true'.
|
||||
#
|
||||
#allow_per_room_profiles: false
|
||||
|
||||
# How long to keep redacted events in unredacted form in the database. After
|
||||
# this period redacted events get replaced with their redacted form in the DB.
|
||||
#
|
||||
# Defaults to `7d`. Set to `null` to disable.
|
||||
#
|
||||
redaction_retention_period: 7d
|
||||
"""
|
||||
% locals()
|
||||
)
|
||||
|
||||
@@ -27,19 +27,16 @@ class StatsConfig(Config):
|
||||
|
||||
def read_config(self, config, **kwargs):
|
||||
self.stats_enabled = True
|
||||
self.stats_bucket_size = 86400
|
||||
self.stats_bucket_size = 86400 * 1000
|
||||
self.stats_retention = sys.maxsize
|
||||
stats_config = config.get("stats", None)
|
||||
if stats_config:
|
||||
self.stats_enabled = stats_config.get("enabled", self.stats_enabled)
|
||||
self.stats_bucket_size = (
|
||||
self.parse_duration(stats_config.get("bucket_size", "1d")) / 1000
|
||||
self.stats_bucket_size = self.parse_duration(
|
||||
stats_config.get("bucket_size", "1d")
|
||||
)
|
||||
self.stats_retention = (
|
||||
self.parse_duration(
|
||||
stats_config.get("retention", "%ds" % (sys.maxsize,))
|
||||
)
|
||||
/ 1000
|
||||
self.stats_retention = self.parse_duration(
|
||||
stats_config.get("retention", "%ds" % (sys.maxsize,))
|
||||
)
|
||||
|
||||
def generate_config_section(self, config_dir_path, server_name, **kwargs):
|
||||
|
||||
@@ -13,6 +13,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse.python_dependencies import DependencyException, check_requirements
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
|
||||
@@ -32,6 +34,11 @@ class TracerConfig(Config):
|
||||
if not self.opentracer_enabled:
|
||||
return
|
||||
|
||||
try:
|
||||
check_requirements("opentracing")
|
||||
except DependencyException as e:
|
||||
raise ConfigError(e.message)
|
||||
|
||||
# The tracer is enabled so sanitize the config
|
||||
|
||||
self.opentracer_whitelist = opentracing_config.get("homeserver_whitelist", [])
|
||||
|
||||
@@ -83,7 +83,7 @@ def compute_content_hash(event_dict, hash_algorithm):
|
||||
event_json_bytes = encode_canonical_json(event_dict)
|
||||
|
||||
hashed = hash_algorithm(event_json_bytes)
|
||||
return (hashed.name, hashed.digest())
|
||||
return hashed.name, hashed.digest()
|
||||
|
||||
|
||||
def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256):
|
||||
@@ -106,7 +106,7 @@ def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256):
|
||||
event_dict.pop("unsigned", None)
|
||||
event_json_bytes = encode_canonical_json(event_dict)
|
||||
hashed = hash_algorithm(event_json_bytes)
|
||||
return (hashed.name, hashed.digest())
|
||||
return hashed.name, hashed.digest()
|
||||
|
||||
|
||||
def compute_event_signature(event_dict, signature_name, signing_key):
|
||||
|
||||
@@ -637,11 +637,11 @@ def auth_types_for_event(event):
|
||||
if event.type == EventTypes.Create:
|
||||
return []
|
||||
|
||||
auth_types = []
|
||||
|
||||
auth_types.append((EventTypes.PowerLevels, ""))
|
||||
auth_types.append((EventTypes.Member, event.sender))
|
||||
auth_types.append((EventTypes.Create, ""))
|
||||
auth_types = [
|
||||
(EventTypes.PowerLevels, ""),
|
||||
(EventTypes.Member, event.sender),
|
||||
(EventTypes.Create, ""),
|
||||
]
|
||||
|
||||
if event.type == EventTypes.Member:
|
||||
membership = event.content["membership"]
|
||||
|
||||
@@ -355,7 +355,7 @@ class FederationClient(FederationBase):
|
||||
|
||||
auth_chain.sort(key=lambda e: e.depth)
|
||||
|
||||
return (pdus, auth_chain)
|
||||
return pdus, auth_chain
|
||||
except HttpResponseException as e:
|
||||
if e.code == 400 or e.code == 404:
|
||||
logger.info("Failed to use get_room_state_ids API, falling back")
|
||||
@@ -404,7 +404,7 @@ class FederationClient(FederationBase):
|
||||
|
||||
signed_auth.sort(key=lambda e: e.depth)
|
||||
|
||||
return (signed_pdus, signed_auth)
|
||||
return signed_pdus, signed_auth
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_events_from_store_or_dest(self, destination, room_id, event_ids):
|
||||
@@ -429,7 +429,7 @@ class FederationClient(FederationBase):
|
||||
missing_events.discard(k)
|
||||
|
||||
if not missing_events:
|
||||
return (signed_events, failed_to_fetch)
|
||||
return signed_events, failed_to_fetch
|
||||
|
||||
logger.debug(
|
||||
"Fetching unknown state/auth events %s for room %s",
|
||||
@@ -465,7 +465,7 @@ class FederationClient(FederationBase):
|
||||
# We removed all events we successfully fetched from `batch`
|
||||
failed_to_fetch.update(batch)
|
||||
|
||||
return (signed_events, failed_to_fetch)
|
||||
return signed_events, failed_to_fetch
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
|
||||
@@ -100,7 +100,7 @@ class FederationServer(FederationBase):
|
||||
|
||||
res = self._transaction_from_pdus(pdus).get_dict()
|
||||
|
||||
return (200, res)
|
||||
return 200, res
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
@@ -163,7 +163,7 @@ class FederationServer(FederationBase):
|
||||
yield self.transaction_actions.set_response(
|
||||
origin, transaction, 400, response
|
||||
)
|
||||
return (400, response)
|
||||
return 400, response
|
||||
|
||||
received_pdus_counter.inc(len(transaction.pdus))
|
||||
|
||||
@@ -265,7 +265,7 @@ class FederationServer(FederationBase):
|
||||
logger.debug("Returning: %s", str(response))
|
||||
|
||||
yield self.transaction_actions.set_response(origin, transaction, 200, response)
|
||||
return (200, response)
|
||||
return 200, response
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def received_edu(self, origin, edu_type, content):
|
||||
@@ -298,7 +298,7 @@ class FederationServer(FederationBase):
|
||||
event_id,
|
||||
)
|
||||
|
||||
return (200, resp)
|
||||
return 200, resp
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_state_ids_request(self, origin, room_id, event_id):
|
||||
@@ -315,7 +315,7 @@ class FederationServer(FederationBase):
|
||||
state_ids = yield self.handler.get_state_ids_for_pdu(room_id, event_id)
|
||||
auth_chain_ids = yield self.store.get_auth_chain_ids(state_ids)
|
||||
|
||||
return (200, {"pdu_ids": state_ids, "auth_chain_ids": auth_chain_ids})
|
||||
return 200, {"pdu_ids": state_ids, "auth_chain_ids": auth_chain_ids}
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _on_context_state_request_compute(self, room_id, event_id):
|
||||
@@ -345,15 +345,15 @@ class FederationServer(FederationBase):
|
||||
pdu = yield self.handler.get_persisted_pdu(origin, event_id)
|
||||
|
||||
if pdu:
|
||||
return (200, self._transaction_from_pdus([pdu]).get_dict())
|
||||
return 200, self._transaction_from_pdus([pdu]).get_dict()
|
||||
else:
|
||||
return (404, "")
|
||||
return 404, ""
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_query_request(self, query_type, args):
|
||||
received_queries_counter.labels(query_type).inc()
|
||||
resp = yield self.registry.on_query(query_type, args)
|
||||
return (200, resp)
|
||||
return 200, resp
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_make_join_request(self, origin, room_id, user_id, supported_versions):
|
||||
@@ -435,7 +435,7 @@ class FederationServer(FederationBase):
|
||||
|
||||
logger.debug("on_send_leave_request: pdu sigs: %s", pdu.signatures)
|
||||
yield self.handler.on_send_leave_request(origin, pdu)
|
||||
return (200, {})
|
||||
return 200, {}
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_event_auth(self, origin, room_id, event_id):
|
||||
@@ -446,7 +446,7 @@ class FederationServer(FederationBase):
|
||||
time_now = self._clock.time_msec()
|
||||
auth_pdus = yield self.handler.on_event_auth(event_id)
|
||||
res = {"auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus]}
|
||||
return (200, res)
|
||||
return 200, res
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_query_auth_request(self, origin, content, room_id, event_id):
|
||||
@@ -499,7 +499,7 @@ class FederationServer(FederationBase):
|
||||
"missing": ret.get("missing", []),
|
||||
}
|
||||
|
||||
return (200, send_content)
|
||||
return 200, send_content
|
||||
|
||||
@log_function
|
||||
def on_query_client_keys(self, origin, content):
|
||||
@@ -669,9 +669,9 @@ class FederationServer(FederationBase):
|
||||
return ret
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_exchange_third_party_invite_request(self, origin, room_id, event_dict):
|
||||
def on_exchange_third_party_invite_request(self, room_id, event_dict):
|
||||
ret = yield self.handler.on_exchange_third_party_invite_request(
|
||||
origin, room_id, event_dict
|
||||
room_id, event_dict
|
||||
)
|
||||
return ret
|
||||
|
||||
|
||||
@@ -26,6 +26,7 @@ from synapse.logging.opentracing import (
|
||||
set_tag,
|
||||
start_active_span_follows_from,
|
||||
tags,
|
||||
whitelisted_homeserver,
|
||||
)
|
||||
from synapse.util.metrics import measure_func
|
||||
|
||||
@@ -59,9 +60,15 @@ class TransactionManager(object):
|
||||
# The span_contexts is a generator so that it won't be evaluated if
|
||||
# opentracing is disabled. (Yay speed!)
|
||||
|
||||
span_contexts = (
|
||||
extract_text_map(json.loads(edu.get_context())) for edu in pending_edus
|
||||
)
|
||||
span_contexts = []
|
||||
keep_destination = whitelisted_homeserver(destination)
|
||||
|
||||
for edu in pending_edus:
|
||||
context = edu.get_context()
|
||||
if context:
|
||||
span_contexts.append(extract_text_map(json.loads(context)))
|
||||
if keep_destination:
|
||||
edu.strip_context()
|
||||
|
||||
with start_active_span_follows_from("send_transaction", span_contexts):
|
||||
|
||||
|
||||
@@ -342,7 +342,11 @@ class BaseFederationServlet(object):
|
||||
continue
|
||||
|
||||
server.register_paths(
|
||||
method, (pattern,), self._wrap(code), self.__class__.__name__
|
||||
method,
|
||||
(pattern,),
|
||||
self._wrap(code),
|
||||
self.__class__.__name__,
|
||||
trace=False,
|
||||
)
|
||||
|
||||
|
||||
@@ -571,7 +575,7 @@ class FederationThirdPartyInviteExchangeServlet(BaseFederationServlet):
|
||||
|
||||
async def on_PUT(self, origin, content, query, room_id):
|
||||
content = await self.handler.on_exchange_third_party_invite_request(
|
||||
origin, room_id, content
|
||||
room_id, content
|
||||
)
|
||||
return 200, content
|
||||
|
||||
|
||||
@@ -41,6 +41,9 @@ class Edu(JsonEncodedObject):
|
||||
def get_context(self):
|
||||
return getattr(self, "content", {}).get("org.matrix.opentracing_context", "{}")
|
||||
|
||||
def strip_context(self):
|
||||
getattr(self, "content", {})["org.matrix.opentracing_context"] = "{}"
|
||||
|
||||
|
||||
class Transaction(JsonEncodedObject):
|
||||
""" A transaction is a list of Pdus and Edus to be sent to a remote home
|
||||
|
||||
@@ -45,6 +45,7 @@ class BaseHandler(object):
|
||||
self.state_handler = hs.get_state_handler()
|
||||
self.distributor = hs.get_distributor()
|
||||
self.ratelimiter = hs.get_ratelimiter()
|
||||
self.admin_redaction_ratelimiter = hs.get_admin_redaction_ratelimiter()
|
||||
self.clock = hs.get_clock()
|
||||
self.hs = hs
|
||||
|
||||
@@ -53,7 +54,7 @@ class BaseHandler(object):
|
||||
self.event_builder_factory = hs.get_event_builder_factory()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def ratelimit(self, requester, update=True):
|
||||
def ratelimit(self, requester, update=True, is_admin_redaction=False):
|
||||
"""Ratelimits requests.
|
||||
|
||||
Args:
|
||||
@@ -62,6 +63,9 @@ class BaseHandler(object):
|
||||
Set to False when doing multiple checks for one request (e.g.
|
||||
to check up front if we would reject the request), and set to
|
||||
True for the last call for a given request.
|
||||
is_admin_redaction (bool): Whether this is a room admin/moderator
|
||||
redacting an event. If so then we may apply different
|
||||
ratelimits depending on config.
|
||||
|
||||
Raises:
|
||||
LimitExceededError if the request should be ratelimited
|
||||
@@ -90,16 +94,33 @@ class BaseHandler(object):
|
||||
messages_per_second = override.messages_per_second
|
||||
burst_count = override.burst_count
|
||||
else:
|
||||
messages_per_second = self.hs.config.rc_message.per_second
|
||||
burst_count = self.hs.config.rc_message.burst_count
|
||||
# We default to different values if this is an admin redaction and
|
||||
# the config is set
|
||||
if is_admin_redaction and self.hs.config.rc_admin_redaction:
|
||||
messages_per_second = self.hs.config.rc_admin_redaction.per_second
|
||||
burst_count = self.hs.config.rc_admin_redaction.burst_count
|
||||
else:
|
||||
messages_per_second = self.hs.config.rc_message.per_second
|
||||
burst_count = self.hs.config.rc_message.burst_count
|
||||
|
||||
allowed, time_allowed = self.ratelimiter.can_do_action(
|
||||
user_id,
|
||||
time_now,
|
||||
rate_hz=messages_per_second,
|
||||
burst_count=burst_count,
|
||||
update=update,
|
||||
)
|
||||
if is_admin_redaction and self.hs.config.rc_admin_redaction:
|
||||
# If we have separate config for admin redactions we use a separate
|
||||
# ratelimiter
|
||||
allowed, time_allowed = self.admin_redaction_ratelimiter.can_do_action(
|
||||
user_id,
|
||||
time_now,
|
||||
rate_hz=messages_per_second,
|
||||
burst_count=burst_count,
|
||||
update=update,
|
||||
)
|
||||
else:
|
||||
allowed, time_allowed = self.ratelimiter.can_do_action(
|
||||
user_id,
|
||||
time_now,
|
||||
rate_hz=messages_per_second,
|
||||
burst_count=burst_count,
|
||||
update=update,
|
||||
)
|
||||
if not allowed:
|
||||
raise LimitExceededError(
|
||||
retry_after_ms=int(1000 * (time_allowed - time_now))
|
||||
|
||||
@@ -51,8 +51,8 @@ class AccountDataEventSource(object):
|
||||
{"type": account_data_type, "content": content, "room_id": room_id}
|
||||
)
|
||||
|
||||
return (results, current_stream_id)
|
||||
return results, current_stream_id
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_pagination_rows(self, user, config, key):
|
||||
return ([], config.to_id)
|
||||
return [], config.to_id
|
||||
|
||||
@@ -38,6 +38,7 @@ logger = logging.getLogger(__name__)
|
||||
class AccountValidityHandler(object):
|
||||
def __init__(self, hs):
|
||||
self.hs = hs
|
||||
self.config = hs.config
|
||||
self.store = self.hs.get_datastore()
|
||||
self.sendmail = self.hs.get_sendmail()
|
||||
self.clock = self.hs.get_clock()
|
||||
@@ -62,9 +63,14 @@ class AccountValidityHandler(object):
|
||||
self._raw_from = email.utils.parseaddr(self._from_string)[1]
|
||||
|
||||
self._template_html, self._template_text = load_jinja2_templates(
|
||||
config=self.hs.config,
|
||||
template_html_name=self.hs.config.email_expiry_template_html,
|
||||
template_text_name=self.hs.config.email_expiry_template_text,
|
||||
self.config.email_template_dir,
|
||||
[
|
||||
self.config.email_expiry_template_html,
|
||||
self.config.email_expiry_template_text,
|
||||
],
|
||||
apply_format_ts_filter=True,
|
||||
apply_mxc_to_http_filter=True,
|
||||
public_baseurl=self.config.public_baseurl,
|
||||
)
|
||||
|
||||
# Check the renewal emails to send and send them every 30min.
|
||||
|
||||
@@ -294,12 +294,10 @@ class ApplicationServicesHandler(object):
|
||||
# we don't know if they are unknown or not since it isn't one of our
|
||||
# users. We can't poke ASes.
|
||||
return False
|
||||
return
|
||||
|
||||
user_info = yield self.store.get_user_by_id(user_id)
|
||||
if user_info:
|
||||
return False
|
||||
return
|
||||
|
||||
# user not found; could be the AS though, so check.
|
||||
services = self.store.get_app_services()
|
||||
|
||||
@@ -38,6 +38,7 @@ from synapse.api.errors import (
|
||||
UserDeactivatedError,
|
||||
)
|
||||
from synapse.api.ratelimiting import Ratelimiter
|
||||
from synapse.config.emailconfig import ThreepidBehaviour
|
||||
from synapse.logging.context import defer_to_thread
|
||||
from synapse.module_api import ModuleApi
|
||||
from synapse.types import UserID
|
||||
@@ -158,7 +159,7 @@ class AuthHandler(BaseHandler):
|
||||
return params
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_auth(self, flows, clientdict, clientip, password_servlet=False):
|
||||
def check_auth(self, flows, clientdict, clientip):
|
||||
"""
|
||||
Takes a dictionary sent by the client in the login / registration
|
||||
protocol and handles the User-Interactive Auth flow.
|
||||
@@ -182,16 +183,6 @@ class AuthHandler(BaseHandler):
|
||||
|
||||
clientip (str): The IP address of the client.
|
||||
|
||||
password_servlet (bool): Whether the request originated from
|
||||
PasswordRestServlet.
|
||||
XXX: This is a temporary hack to distinguish between checking
|
||||
for threepid validations locally (in the case of password
|
||||
resets) and using the identity server (in the case of binding
|
||||
a 3PID during registration). Once we start using the
|
||||
homeserver for both tasks, this distinction will no longer be
|
||||
necessary.
|
||||
|
||||
|
||||
Returns:
|
||||
defer.Deferred[dict, dict, str]: a deferred tuple of
|
||||
(creds, params, session_id).
|
||||
@@ -247,9 +238,7 @@ class AuthHandler(BaseHandler):
|
||||
if "type" in authdict:
|
||||
login_type = authdict["type"]
|
||||
try:
|
||||
result = yield self._check_auth_dict(
|
||||
authdict, clientip, password_servlet=password_servlet
|
||||
)
|
||||
result = yield self._check_auth_dict(authdict, clientip)
|
||||
if result:
|
||||
creds[login_type] = result
|
||||
self._save_session(session)
|
||||
@@ -280,7 +269,7 @@ class AuthHandler(BaseHandler):
|
||||
creds,
|
||||
list(clientdict),
|
||||
)
|
||||
return (creds, clientdict, session["id"])
|
||||
return creds, clientdict, session["id"]
|
||||
|
||||
ret = self._auth_dict_for_flows(flows, session)
|
||||
ret["completed"] = list(creds)
|
||||
@@ -356,7 +345,7 @@ class AuthHandler(BaseHandler):
|
||||
return sess.setdefault("serverdict", {}).get(key, default)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _check_auth_dict(self, authdict, clientip, password_servlet=False):
|
||||
def _check_auth_dict(self, authdict, clientip):
|
||||
"""Attempt to validate the auth dict provided by a client
|
||||
|
||||
Args:
|
||||
@@ -374,11 +363,7 @@ class AuthHandler(BaseHandler):
|
||||
login_type = authdict["type"]
|
||||
checker = self.checkers.get(login_type)
|
||||
if checker is not None:
|
||||
# XXX: Temporary workaround for having Synapse handle password resets
|
||||
# See AuthHandler.check_auth for further details
|
||||
res = yield checker(
|
||||
authdict, clientip=clientip, password_servlet=password_servlet
|
||||
)
|
||||
res = yield checker(authdict, clientip=clientip)
|
||||
return res
|
||||
|
||||
# build a v1-login-style dict out of the authdict and fall back to the
|
||||
@@ -449,7 +434,7 @@ class AuthHandler(BaseHandler):
|
||||
return defer.succeed(True)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _check_threepid(self, medium, authdict, password_servlet=False, **kwargs):
|
||||
def _check_threepid(self, medium, authdict, **kwargs):
|
||||
if "threepid_creds" not in authdict:
|
||||
raise LoginError(400, "Missing threepid_creds", Codes.MISSING_PARAM)
|
||||
|
||||
@@ -458,12 +443,18 @@ class AuthHandler(BaseHandler):
|
||||
identity_handler = self.hs.get_handlers().identity_handler
|
||||
|
||||
logger.info("Getting validated threepid. threepidcreds: %r", (threepid_creds,))
|
||||
if (
|
||||
not password_servlet
|
||||
or self.hs.config.email_password_reset_behaviour == "remote"
|
||||
):
|
||||
threepid = yield identity_handler.threepid_from_creds(threepid_creds)
|
||||
elif self.hs.config.email_password_reset_behaviour == "local":
|
||||
if self.hs.config.threepid_behaviour_email == ThreepidBehaviour.REMOTE:
|
||||
if medium == "email":
|
||||
threepid = yield identity_handler.threepid_from_creds(
|
||||
self.hs.config.account_threepid_delegate_email, threepid_creds
|
||||
)
|
||||
elif medium == "msisdn":
|
||||
threepid = yield identity_handler.threepid_from_creds(
|
||||
self.hs.config.account_threepid_delegate_msisdn, threepid_creds
|
||||
)
|
||||
else:
|
||||
raise SynapseError(400, "Unrecognized threepid medium: %s" % (medium,))
|
||||
elif self.hs.config.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
|
||||
row = yield self.store.get_threepid_validation_session(
|
||||
medium,
|
||||
threepid_creds["client_secret"],
|
||||
@@ -722,7 +713,7 @@ class AuthHandler(BaseHandler):
|
||||
known_login_type = True
|
||||
is_valid = yield provider.check_password(qualified_user_id, password)
|
||||
if is_valid:
|
||||
return (qualified_user_id, None)
|
||||
return qualified_user_id, None
|
||||
|
||||
if not hasattr(provider, "get_supported_login_types") or not hasattr(
|
||||
provider, "check_auth"
|
||||
@@ -766,7 +757,7 @@ class AuthHandler(BaseHandler):
|
||||
)
|
||||
|
||||
if canonical_user_id:
|
||||
return (canonical_user_id, None)
|
||||
return canonical_user_id, None
|
||||
|
||||
if not known_login_type:
|
||||
raise SynapseError(400, "Unknown login type %s" % login_type)
|
||||
@@ -816,7 +807,7 @@ class AuthHandler(BaseHandler):
|
||||
result = (result, None)
|
||||
return result
|
||||
|
||||
return (None, None)
|
||||
return None, None
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _check_local_password(self, user_id, password):
|
||||
|
||||
@@ -25,6 +25,7 @@ from synapse.api.errors import (
|
||||
HttpResponseException,
|
||||
RequestSendFailed,
|
||||
)
|
||||
from synapse.logging.opentracing import log_kv, set_tag, trace
|
||||
from synapse.types import RoomStreamToken, get_domain_from_id
|
||||
from synapse.util import stringutils
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
@@ -45,6 +46,7 @@ class DeviceWorkerHandler(BaseHandler):
|
||||
self.state = hs.get_state_handler()
|
||||
self._auth_handler = hs.get_auth_handler()
|
||||
|
||||
@trace
|
||||
@defer.inlineCallbacks
|
||||
def get_devices_by_user(self, user_id):
|
||||
"""
|
||||
@@ -56,6 +58,7 @@ class DeviceWorkerHandler(BaseHandler):
|
||||
defer.Deferred: list[dict[str, X]]: info on each device
|
||||
"""
|
||||
|
||||
set_tag("user_id", user_id)
|
||||
device_map = yield self.store.get_devices_by_user(user_id)
|
||||
|
||||
ips = yield self.store.get_last_client_ip_by_device(user_id, device_id=None)
|
||||
@@ -64,8 +67,10 @@ class DeviceWorkerHandler(BaseHandler):
|
||||
for device in devices:
|
||||
_update_device_from_client_ips(device, ips)
|
||||
|
||||
log_kv(device_map)
|
||||
return devices
|
||||
|
||||
@trace
|
||||
@defer.inlineCallbacks
|
||||
def get_device(self, user_id, device_id):
|
||||
""" Retrieve the given device
|
||||
@@ -85,9 +90,14 @@ class DeviceWorkerHandler(BaseHandler):
|
||||
raise errors.NotFoundError
|
||||
ips = yield self.store.get_last_client_ip_by_device(user_id, device_id)
|
||||
_update_device_from_client_ips(device, ips)
|
||||
|
||||
set_tag("device", device)
|
||||
set_tag("ips", ips)
|
||||
|
||||
return device
|
||||
|
||||
@measure_func("device.get_user_ids_changed")
|
||||
@trace
|
||||
@defer.inlineCallbacks
|
||||
def get_user_ids_changed(self, user_id, from_token):
|
||||
"""Get list of users that have had the devices updated, or have newly
|
||||
@@ -97,6 +107,9 @@ class DeviceWorkerHandler(BaseHandler):
|
||||
user_id (str)
|
||||
from_token (StreamToken)
|
||||
"""
|
||||
|
||||
set_tag("user_id", user_id)
|
||||
set_tag("from_token", from_token)
|
||||
now_room_key = yield self.store.get_room_events_max_id()
|
||||
|
||||
room_ids = yield self.store.get_rooms_for_user(user_id)
|
||||
@@ -148,6 +161,9 @@ class DeviceWorkerHandler(BaseHandler):
|
||||
# special-case for an empty prev state: include all members
|
||||
# in the changed list
|
||||
if not event_ids:
|
||||
log_kv(
|
||||
{"event": "encountered empty previous state", "room_id": room_id}
|
||||
)
|
||||
for key, event_id in iteritems(current_state_ids):
|
||||
etype, state_key = key
|
||||
if etype != EventTypes.Member:
|
||||
@@ -200,7 +216,11 @@ class DeviceWorkerHandler(BaseHandler):
|
||||
possibly_joined = []
|
||||
possibly_left = []
|
||||
|
||||
return {"changed": list(possibly_joined), "left": list(possibly_left)}
|
||||
result = {"changed": list(possibly_joined), "left": list(possibly_left)}
|
||||
|
||||
log_kv(result)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class DeviceHandler(DeviceWorkerHandler):
|
||||
@@ -267,6 +287,7 @@ class DeviceHandler(DeviceWorkerHandler):
|
||||
|
||||
raise errors.StoreError(500, "Couldn't generate a device ID.")
|
||||
|
||||
@trace
|
||||
@defer.inlineCallbacks
|
||||
def delete_device(self, user_id, device_id):
|
||||
""" Delete the given device
|
||||
@@ -284,6 +305,10 @@ class DeviceHandler(DeviceWorkerHandler):
|
||||
except errors.StoreError as e:
|
||||
if e.code == 404:
|
||||
# no match
|
||||
set_tag("error", True)
|
||||
log_kv(
|
||||
{"reason": "User doesn't have device id.", "device_id": device_id}
|
||||
)
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
@@ -296,6 +321,7 @@ class DeviceHandler(DeviceWorkerHandler):
|
||||
|
||||
yield self.notify_device_update(user_id, [device_id])
|
||||
|
||||
@trace
|
||||
@defer.inlineCallbacks
|
||||
def delete_all_devices_for_user(self, user_id, except_device_id=None):
|
||||
"""Delete all of the user's devices
|
||||
@@ -331,6 +357,8 @@ class DeviceHandler(DeviceWorkerHandler):
|
||||
except errors.StoreError as e:
|
||||
if e.code == 404:
|
||||
# no match
|
||||
set_tag("error", True)
|
||||
set_tag("reason", "User doesn't have that device id.")
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
@@ -371,6 +399,7 @@ class DeviceHandler(DeviceWorkerHandler):
|
||||
else:
|
||||
raise
|
||||
|
||||
@trace
|
||||
@measure_func("notify_device_update")
|
||||
@defer.inlineCallbacks
|
||||
def notify_device_update(self, user_id, device_ids):
|
||||
@@ -386,6 +415,8 @@ class DeviceHandler(DeviceWorkerHandler):
|
||||
hosts.update(get_domain_from_id(u) for u in users_who_share_room)
|
||||
hosts.discard(self.server_name)
|
||||
|
||||
set_tag("target_hosts", hosts)
|
||||
|
||||
position = yield self.store.add_device_change_to_streams(
|
||||
user_id, device_ids, list(hosts)
|
||||
)
|
||||
@@ -405,6 +436,7 @@ class DeviceHandler(DeviceWorkerHandler):
|
||||
)
|
||||
for host in hosts:
|
||||
self.federation_sender.send_device_messages(host)
|
||||
log_kv({"message": "sent device update to host", "host": host})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_federation_query_user_devices(self, user_id):
|
||||
@@ -451,12 +483,15 @@ class DeviceListUpdater(object):
|
||||
iterable=True,
|
||||
)
|
||||
|
||||
@trace
|
||||
@defer.inlineCallbacks
|
||||
def incoming_device_list_update(self, origin, edu_content):
|
||||
"""Called on incoming device list update from federation. Responsible
|
||||
for parsing the EDU and adding to pending updates list.
|
||||
"""
|
||||
|
||||
set_tag("origin", origin)
|
||||
set_tag("edu_content", edu_content)
|
||||
user_id = edu_content.pop("user_id")
|
||||
device_id = edu_content.pop("device_id")
|
||||
stream_id = str(edu_content.pop("stream_id")) # They may come as ints
|
||||
@@ -471,12 +506,30 @@ class DeviceListUpdater(object):
|
||||
device_id,
|
||||
origin,
|
||||
)
|
||||
|
||||
set_tag("error", True)
|
||||
log_kv(
|
||||
{
|
||||
"message": "Got a device list update edu from a user and "
|
||||
"device which does not match the origin of the request.",
|
||||
"user_id": user_id,
|
||||
"device_id": device_id,
|
||||
}
|
||||
)
|
||||
return
|
||||
|
||||
room_ids = yield self.store.get_rooms_for_user(user_id)
|
||||
if not room_ids:
|
||||
# We don't share any rooms with this user. Ignore update, as we
|
||||
# probably won't get any further updates.
|
||||
set_tag("error", True)
|
||||
log_kv(
|
||||
{
|
||||
"message": "Got an update from a user for which "
|
||||
"we don't share any rooms",
|
||||
"other user_id": user_id,
|
||||
}
|
||||
)
|
||||
logger.warning(
|
||||
"Got device list update edu for %r/%r, but don't share a room",
|
||||
user_id,
|
||||
@@ -578,6 +631,7 @@ class DeviceListUpdater(object):
|
||||
request:
|
||||
https://matrix.org/docs/spec/server_server/r0.1.2#get-matrix-federation-v1-user-devices-userid
|
||||
"""
|
||||
log_kv({"message": "Doing resync to update device list."})
|
||||
# Fetch all devices for the user.
|
||||
origin = get_domain_from_id(user_id)
|
||||
try:
|
||||
@@ -594,13 +648,20 @@ class DeviceListUpdater(object):
|
||||
# eventually become consistent.
|
||||
return
|
||||
except FederationDeniedError as e:
|
||||
set_tag("error", True)
|
||||
log_kv({"reason": "FederationDeniedError"})
|
||||
logger.info(e)
|
||||
return
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
# TODO: Remember that we are now out of sync and try again
|
||||
# later
|
||||
set_tag("error", True)
|
||||
log_kv(
|
||||
{"message": "Exception raised by federation request", "exception": e}
|
||||
)
|
||||
logger.exception("Failed to handle device list update for %s", user_id)
|
||||
return
|
||||
log_kv({"result": result})
|
||||
stream_id = result["stream_id"]
|
||||
devices = result["devices"]
|
||||
|
||||
|
||||
@@ -22,9 +22,9 @@ from twisted.internet import defer
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.logging.opentracing import (
|
||||
get_active_span_text_map,
|
||||
log_kv,
|
||||
set_tag,
|
||||
start_active_span,
|
||||
whitelisted_homeserver,
|
||||
)
|
||||
from synapse.types import UserID, get_domain_from_id
|
||||
from synapse.util.stringutils import random_string
|
||||
@@ -86,7 +86,8 @@ class DeviceMessageHandler(object):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def send_device_message(self, sender_user_id, message_type, messages):
|
||||
|
||||
set_tag("number_of_messages", len(messages))
|
||||
set_tag("sender", sender_user_id)
|
||||
local_messages = {}
|
||||
remote_messages = {}
|
||||
for user_id, by_device in messages.items():
|
||||
@@ -119,11 +120,10 @@ class DeviceMessageHandler(object):
|
||||
"sender": sender_user_id,
|
||||
"type": message_type,
|
||||
"message_id": message_id,
|
||||
"org.matrix.opentracing_context": json.dumps(context)
|
||||
if whitelisted_homeserver(destination)
|
||||
else None,
|
||||
"org.matrix.opentracing_context": json.dumps(context),
|
||||
}
|
||||
|
||||
log_kv({"local_messages": local_messages})
|
||||
stream_id = yield self.store.add_messages_to_device_inbox(
|
||||
local_messages, remote_edu_contents
|
||||
)
|
||||
@@ -132,6 +132,7 @@ class DeviceMessageHandler(object):
|
||||
"to_device_key", stream_id, users=local_messages.keys()
|
||||
)
|
||||
|
||||
log_kv({"remote_messages": remote_messages})
|
||||
for destination in remote_messages.keys():
|
||||
# Enqueue a new federation transaction to send the new
|
||||
# device messages to each remote destination.
|
||||
|
||||
@@ -167,7 +167,6 @@ class EventHandler(BaseHandler):
|
||||
|
||||
if not event:
|
||||
return None
|
||||
return
|
||||
|
||||
users = yield self.store.get_users_in_room(event.room_id)
|
||||
is_peeking = user.to_string() not in users
|
||||
|
||||
@@ -1428,7 +1428,7 @@ class FederationHandler(BaseHandler):
|
||||
assert event.user_id == user_id
|
||||
assert event.state_key == user_id
|
||||
assert event.room_id == room_id
|
||||
return (origin, event, format_ver)
|
||||
return origin, event, format_ver
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
@@ -2530,12 +2530,17 @@ class FederationHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def on_exchange_third_party_invite_request(self, origin, room_id, event_dict):
|
||||
def on_exchange_third_party_invite_request(self, room_id, event_dict):
|
||||
"""Handle an exchange_third_party_invite request from a remote server
|
||||
|
||||
The remote server will call this when it wants to turn a 3pid invite
|
||||
into a normal m.room.member invite.
|
||||
|
||||
Args:
|
||||
room_id (str): The ID of the room.
|
||||
|
||||
event_dict (dict[str, Any]): Dictionary containing the event body.
|
||||
|
||||
Returns:
|
||||
Deferred: resolves (to None)
|
||||
"""
|
||||
|
||||
@@ -29,6 +29,7 @@ from synapse.api.errors import (
|
||||
HttpResponseException,
|
||||
SynapseError,
|
||||
)
|
||||
from synapse.util.stringutils import random_string
|
||||
|
||||
from ._base import BaseHandler
|
||||
|
||||
@@ -41,86 +42,130 @@ class IdentityHandler(BaseHandler):
|
||||
|
||||
self.http_client = hs.get_simple_http_client()
|
||||
self.federation_http_client = hs.get_http_client()
|
||||
self.hs = hs
|
||||
|
||||
self.trusted_id_servers = set(hs.config.trusted_third_party_id_servers)
|
||||
self.trust_any_id_server_just_for_testing_do_not_use = (
|
||||
hs.config.use_insecure_ssl_client_just_for_testing_do_not_use
|
||||
def _extract_items_from_creds_dict(self, creds):
|
||||
"""
|
||||
Retrieve entries from a "credentials" dictionary
|
||||
|
||||
Args:
|
||||
creds (dict[str, str]): Dictionary of credentials that contain the following keys:
|
||||
* client_secret|clientSecret: A unique secret str provided by the client
|
||||
* id_server|idServer: the domain of the identity server to query
|
||||
* id_access_token: The access token to authenticate to the identity
|
||||
server with.
|
||||
|
||||
Returns:
|
||||
tuple(str, str, str|None): A tuple containing the client_secret, the id_server,
|
||||
and the id_access_token value if available.
|
||||
"""
|
||||
client_secret = creds.get("client_secret") or creds.get("clientSecret")
|
||||
if not client_secret:
|
||||
raise SynapseError(
|
||||
400, "No client_secret in creds", errcode=Codes.MISSING_PARAM
|
||||
)
|
||||
|
||||
id_server = creds.get("id_server") or creds.get("idServer")
|
||||
if not id_server:
|
||||
raise SynapseError(
|
||||
400, "No id_server in creds", errcode=Codes.MISSING_PARAM
|
||||
)
|
||||
|
||||
id_access_token = creds.get("id_access_token")
|
||||
return client_secret, id_server, id_access_token
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def threepid_from_creds(self, id_server, creds):
|
||||
"""
|
||||
Retrieve and validate a threepid identifier from a "credentials" dictionary against a
|
||||
given identity server
|
||||
|
||||
Args:
|
||||
id_server (str|None): The identity server to validate 3PIDs against. If None,
|
||||
we will attempt to extract id_server creds
|
||||
|
||||
creds (dict[str, str]): Dictionary containing the following keys:
|
||||
* id_server|idServer: An optional domain name of an identity server
|
||||
* client_secret|clientSecret: A unique secret str provided by the client
|
||||
* sid: The ID of the validation session
|
||||
|
||||
Returns:
|
||||
Deferred[dict[str,str|int]|None]: A dictionary consisting of response params to
|
||||
the /getValidated3pid endpoint of the Identity Service API, or None if the
|
||||
threepid was not found
|
||||
"""
|
||||
client_secret = creds.get("client_secret") or creds.get("clientSecret")
|
||||
if not client_secret:
|
||||
raise SynapseError(
|
||||
400, "Missing param client_secret in creds", errcode=Codes.MISSING_PARAM
|
||||
)
|
||||
session_id = creds.get("sid")
|
||||
if not session_id:
|
||||
raise SynapseError(
|
||||
400, "Missing param session_id in creds", errcode=Codes.MISSING_PARAM
|
||||
)
|
||||
if not id_server:
|
||||
# Attempt to get the id_server from the creds dict
|
||||
id_server = creds.get("id_server") or creds.get("idServer")
|
||||
if not id_server:
|
||||
raise SynapseError(
|
||||
400, "Missing param id_server in creds", errcode=Codes.MISSING_PARAM
|
||||
)
|
||||
|
||||
query_params = {"sid": session_id, "client_secret": client_secret}
|
||||
|
||||
url = "https://%s%s" % (
|
||||
id_server,
|
||||
"/_matrix/identity/api/v1/3pid/getValidated3pid",
|
||||
)
|
||||
|
||||
def _should_trust_id_server(self, id_server):
|
||||
if id_server not in self.trusted_id_servers:
|
||||
if self.trust_any_id_server_just_for_testing_do_not_use:
|
||||
logger.warn(
|
||||
"Trusting untrustworthy ID server %r even though it isn't"
|
||||
" in the trusted id list for testing because"
|
||||
" 'use_insecure_ssl_client_just_for_testing_do_not_use'"
|
||||
" is set in the config",
|
||||
id_server,
|
||||
)
|
||||
else:
|
||||
return False
|
||||
return True
|
||||
data = yield self.http_client.get_json(url, query_params)
|
||||
return data if "medium" in data else None
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def threepid_from_creds(self, creds):
|
||||
if "id_server" in creds:
|
||||
id_server = creds["id_server"]
|
||||
elif "idServer" in creds:
|
||||
id_server = creds["idServer"]
|
||||
else:
|
||||
raise SynapseError(400, "No id_server in creds")
|
||||
def bind_threepid(self, creds, mxid, use_v2=True):
|
||||
"""Bind a 3PID to an identity server
|
||||
|
||||
if "client_secret" in creds:
|
||||
client_secret = creds["client_secret"]
|
||||
elif "clientSecret" in creds:
|
||||
client_secret = creds["clientSecret"]
|
||||
else:
|
||||
raise SynapseError(400, "No client_secret in creds")
|
||||
Args:
|
||||
creds (dict[str, str]): Dictionary of credentials that contain the following keys:
|
||||
* client_secret|clientSecret: A unique secret str provided by the client
|
||||
* id_server|idServer: the domain of the identity server to query
|
||||
* id_access_token: The access token to authenticate to the identity
|
||||
server with. Required if use_v2 is true
|
||||
mxid (str): The MXID to bind the 3PID to
|
||||
use_v2 (bool): Whether to use v2 Identity Service API endpoints
|
||||
|
||||
if not self._should_trust_id_server(id_server):
|
||||
logger.warn(
|
||||
"%s is not a trusted ID server: rejecting 3pid " + "credentials",
|
||||
id_server,
|
||||
)
|
||||
return None
|
||||
|
||||
try:
|
||||
data = yield self.http_client.get_json(
|
||||
"https://%s%s"
|
||||
% (id_server, "/_matrix/identity/api/v1/3pid/getValidated3pid"),
|
||||
{"sid": creds["sid"], "client_secret": client_secret},
|
||||
)
|
||||
except HttpResponseException as e:
|
||||
logger.info("getValidated3pid failed with Matrix error: %r", e)
|
||||
raise e.to_synapse_error()
|
||||
|
||||
if "medium" in data:
|
||||
return data
|
||||
return None
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def bind_threepid(self, creds, mxid):
|
||||
Returns:
|
||||
Deferred[dict]: The response from the identity server
|
||||
"""
|
||||
logger.debug("binding threepid %r to %s", creds, mxid)
|
||||
data = None
|
||||
|
||||
if "id_server" in creds:
|
||||
id_server = creds["id_server"]
|
||||
elif "idServer" in creds:
|
||||
id_server = creds["idServer"]
|
||||
else:
|
||||
raise SynapseError(400, "No id_server in creds")
|
||||
client_secret, id_server, id_access_token = self._extract_items_from_creds_dict(
|
||||
creds
|
||||
)
|
||||
|
||||
if "client_secret" in creds:
|
||||
client_secret = creds["client_secret"]
|
||||
elif "clientSecret" in creds:
|
||||
client_secret = creds["clientSecret"]
|
||||
sid = creds.get("sid")
|
||||
if not sid:
|
||||
raise SynapseError(
|
||||
400, "No sid in three_pid_creds", errcode=Codes.MISSING_PARAM
|
||||
)
|
||||
|
||||
# If an id_access_token is not supplied, force usage of v1
|
||||
if id_access_token is None:
|
||||
use_v2 = False
|
||||
|
||||
# Decide which API endpoint URLs to use
|
||||
headers = {}
|
||||
bind_data = {"sid": sid, "client_secret": client_secret, "mxid": mxid}
|
||||
if use_v2:
|
||||
bind_url = "https://%s/_matrix/identity/v2/3pid/bind" % (id_server,)
|
||||
headers["Authorization"] = create_id_access_token_header(id_access_token)
|
||||
else:
|
||||
raise SynapseError(400, "No client_secret in creds")
|
||||
bind_url = "https://%s/_matrix/identity/api/v1/3pid/bind" % (id_server,)
|
||||
|
||||
try:
|
||||
data = yield self.http_client.post_json_get_json(
|
||||
"https://%s%s" % (id_server, "/_matrix/identity/api/v1/3pid/bind"),
|
||||
{"sid": creds["sid"], "client_secret": client_secret, "mxid": mxid},
|
||||
bind_url, bind_data, headers=headers
|
||||
)
|
||||
logger.debug("bound threepid %r to %s", creds, mxid)
|
||||
|
||||
@@ -131,13 +176,23 @@ class IdentityHandler(BaseHandler):
|
||||
address=data["address"],
|
||||
id_server=id_server,
|
||||
)
|
||||
|
||||
return data
|
||||
except HttpResponseException as e:
|
||||
if e.code != 404 or not use_v2:
|
||||
logger.error("3PID bind failed with Matrix error: %r", e)
|
||||
raise e.to_synapse_error()
|
||||
except CodeMessageException as e:
|
||||
data = json.loads(e.msg) # XXX WAT?
|
||||
return data
|
||||
return data
|
||||
|
||||
logger.info("Got 404 when POSTing JSON %s, falling back to v1 URL", bind_url)
|
||||
return (yield self.bind_threepid(creds, mxid, use_v2=False))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def try_unbind_threepid(self, mxid, threepid):
|
||||
"""Removes a binding from an identity server
|
||||
"""Attempt to remove a 3PID from an identity server, or if one is not provided, all
|
||||
identity servers we're aware the binding is present on
|
||||
|
||||
Args:
|
||||
mxid (str): Matrix user ID of binding to be removed
|
||||
@@ -188,6 +243,8 @@ class IdentityHandler(BaseHandler):
|
||||
server doesn't support unbinding
|
||||
"""
|
||||
url = "https://%s/_matrix/identity/api/v1/3pid/unbind" % (id_server,)
|
||||
url_bytes = "/_matrix/identity/api/v1/3pid/unbind".encode("ascii")
|
||||
|
||||
content = {
|
||||
"mxid": mxid,
|
||||
"threepid": {"medium": threepid["medium"], "address": threepid["address"]},
|
||||
@@ -199,7 +256,7 @@ class IdentityHandler(BaseHandler):
|
||||
auth_headers = self.federation_http_client.build_auth_headers(
|
||||
destination=None,
|
||||
method="POST",
|
||||
url_bytes="/_matrix/identity/api/v1/3pid/unbind".encode("ascii"),
|
||||
url_bytes=url_bytes,
|
||||
content=content,
|
||||
destination_is=id_server,
|
||||
)
|
||||
@@ -226,28 +283,122 @@ class IdentityHandler(BaseHandler):
|
||||
|
||||
return changed
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def send_threepid_validation(
|
||||
self,
|
||||
email_address,
|
||||
client_secret,
|
||||
send_attempt,
|
||||
send_email_func,
|
||||
next_link=None,
|
||||
):
|
||||
"""Send a threepid validation email for password reset or
|
||||
registration purposes
|
||||
|
||||
Args:
|
||||
email_address (str): The user's email address
|
||||
client_secret (str): The provided client secret
|
||||
send_attempt (int): Which send attempt this is
|
||||
send_email_func (func): A function that takes an email address, token,
|
||||
client_secret and session_id, sends an email
|
||||
and returns a Deferred.
|
||||
next_link (str|None): The URL to redirect the user to after validation
|
||||
|
||||
Returns:
|
||||
The new session_id upon success
|
||||
|
||||
Raises:
|
||||
SynapseError is an error occurred when sending the email
|
||||
"""
|
||||
# Check that this email/client_secret/send_attempt combo is new or
|
||||
# greater than what we've seen previously
|
||||
session = yield self.store.get_threepid_validation_session(
|
||||
"email", client_secret, address=email_address, validated=False
|
||||
)
|
||||
|
||||
# Check to see if a session already exists and that it is not yet
|
||||
# marked as validated
|
||||
if session and session.get("validated_at") is None:
|
||||
session_id = session["session_id"]
|
||||
last_send_attempt = session["last_send_attempt"]
|
||||
|
||||
# Check that the send_attempt is higher than previous attempts
|
||||
if send_attempt <= last_send_attempt:
|
||||
# If not, just return a success without sending an email
|
||||
return session_id
|
||||
else:
|
||||
# An non-validated session does not exist yet.
|
||||
# Generate a session id
|
||||
session_id = random_string(16)
|
||||
|
||||
# Generate a new validation token
|
||||
token = random_string(32)
|
||||
|
||||
# Send the mail with the link containing the token, client_secret
|
||||
# and session_id
|
||||
try:
|
||||
yield send_email_func(email_address, token, client_secret, session_id)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Error sending threepid validation email to %s", email_address
|
||||
)
|
||||
raise SynapseError(500, "An error was encountered when sending the email")
|
||||
|
||||
token_expires = (
|
||||
self.hs.clock.time_msec() + self.hs.config.email_validation_token_lifetime
|
||||
)
|
||||
|
||||
yield self.store.start_or_continue_validation_session(
|
||||
"email",
|
||||
email_address,
|
||||
session_id,
|
||||
client_secret,
|
||||
send_attempt,
|
||||
next_link,
|
||||
token,
|
||||
token_expires,
|
||||
)
|
||||
|
||||
return session_id
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def requestEmailToken(
|
||||
self, id_server, email, client_secret, send_attempt, next_link=None
|
||||
):
|
||||
if not self._should_trust_id_server(id_server):
|
||||
raise SynapseError(
|
||||
400, "Untrusted ID server '%s'" % id_server, Codes.SERVER_NOT_TRUSTED
|
||||
)
|
||||
"""
|
||||
Request an external server send an email on our behalf for the purposes of threepid
|
||||
validation.
|
||||
|
||||
Args:
|
||||
id_server (str): The identity server to proxy to
|
||||
email (str): The email to send the message to
|
||||
client_secret (str): The unique client_secret sends by the user
|
||||
send_attempt (int): Which attempt this is
|
||||
next_link: A link to redirect the user to once they submit the token
|
||||
|
||||
Returns:
|
||||
The json response body from the server
|
||||
"""
|
||||
params = {
|
||||
"email": email,
|
||||
"client_secret": client_secret,
|
||||
"send_attempt": send_attempt,
|
||||
}
|
||||
|
||||
if next_link:
|
||||
params.update({"next_link": next_link})
|
||||
params["next_link"] = next_link
|
||||
|
||||
if self.hs.config.using_identity_server_from_trusted_list:
|
||||
# Warn that a deprecated config option is in use
|
||||
logger.warn(
|
||||
'The config option "trust_identity_server_for_password_resets" '
|
||||
'has been replaced by "account_threepid_delegate". '
|
||||
"Please consult the sample config at docs/sample_config.yaml for "
|
||||
"details and update your config file."
|
||||
)
|
||||
|
||||
try:
|
||||
data = yield self.http_client.post_json_get_json(
|
||||
"https://%s%s"
|
||||
% (id_server, "/_matrix/identity/api/v1/validate/email/requestToken"),
|
||||
id_server + "/_matrix/identity/api/v1/validate/email/requestToken",
|
||||
params,
|
||||
)
|
||||
return data
|
||||
@@ -257,28 +408,85 @@ class IdentityHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def requestMsisdnToken(
|
||||
self, id_server, country, phone_number, client_secret, send_attempt, **kwargs
|
||||
self,
|
||||
id_server,
|
||||
country,
|
||||
phone_number,
|
||||
client_secret,
|
||||
send_attempt,
|
||||
next_link=None,
|
||||
):
|
||||
if not self._should_trust_id_server(id_server):
|
||||
raise SynapseError(
|
||||
400, "Untrusted ID server '%s'" % id_server, Codes.SERVER_NOT_TRUSTED
|
||||
)
|
||||
"""
|
||||
Request an external server send an SMS message on our behalf for the purposes of
|
||||
threepid validation.
|
||||
Args:
|
||||
id_server (str): The identity server to proxy to
|
||||
country (str): The country code of the phone number
|
||||
phone_number (str): The number to send the message to
|
||||
client_secret (str): The unique client_secret sends by the user
|
||||
send_attempt (int): Which attempt this is
|
||||
next_link: A link to redirect the user to once they submit the token
|
||||
|
||||
Returns:
|
||||
The json response body from the server
|
||||
"""
|
||||
params = {
|
||||
"country": country,
|
||||
"phone_number": phone_number,
|
||||
"client_secret": client_secret,
|
||||
"send_attempt": send_attempt,
|
||||
}
|
||||
params.update(kwargs)
|
||||
if next_link:
|
||||
params["next_link"] = next_link
|
||||
|
||||
if self.hs.config.using_identity_server_from_trusted_list:
|
||||
# Warn that a deprecated config option is in use
|
||||
logger.warn(
|
||||
'The config option "trust_identity_server_for_password_resets" '
|
||||
'has been replaced by "account_threepid_delegate". '
|
||||
"Please consult the sample config at docs/sample_config.yaml for "
|
||||
"details and update your config file."
|
||||
)
|
||||
|
||||
try:
|
||||
data = yield self.http_client.post_json_get_json(
|
||||
"https://%s%s"
|
||||
% (id_server, "/_matrix/identity/api/v1/validate/msisdn/requestToken"),
|
||||
id_server + "/_matrix/identity/api/v1/validate/msisdn/requestToken",
|
||||
params,
|
||||
)
|
||||
return data
|
||||
except HttpResponseException as e:
|
||||
logger.info("Proxied requestToken failed: %r", e)
|
||||
raise e.to_synapse_error()
|
||||
|
||||
|
||||
def create_id_access_token_header(id_access_token):
|
||||
"""Create an Authorization header for passing to SimpleHttpClient as the header value
|
||||
of an HTTP request.
|
||||
|
||||
Args:
|
||||
id_access_token (str): An identity server access token.
|
||||
|
||||
Returns:
|
||||
list[str]: The ascii-encoded bearer token encased in a list.
|
||||
"""
|
||||
# Prefix with Bearer
|
||||
bearer_token = "Bearer %s" % id_access_token
|
||||
|
||||
# Encode headers to standard ascii
|
||||
bearer_token.encode("ascii")
|
||||
|
||||
# Return as a list as that's how SimpleHttpClient takes header values
|
||||
return [bearer_token]
|
||||
|
||||
|
||||
class LookupAlgorithm:
|
||||
"""
|
||||
Supported hashing algorithms when performing a 3PID lookup.
|
||||
|
||||
SHA256 - Hashing an (address, medium, pepper) combo with sha256, then url-safe base64
|
||||
encoding
|
||||
NONE - Not performing any hashing. Simply sending an (address, medium) combo in plaintext
|
||||
"""
|
||||
|
||||
SHA256 = "sha256"
|
||||
NONE = "none"
|
||||
|
||||
@@ -449,8 +449,7 @@ class InitialSyncHandler(BaseHandler):
|
||||
# * The user is a guest user, and has joined the room
|
||||
# else it will throw.
|
||||
member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
|
||||
return (member_event.membership, member_event.event_id)
|
||||
return
|
||||
return member_event.membership, member_event.event_id
|
||||
except AuthError:
|
||||
visibility = yield self.state_handler.get_current_state(
|
||||
room_id, EventTypes.RoomHistoryVisibility, ""
|
||||
@@ -459,8 +458,7 @@ class InitialSyncHandler(BaseHandler):
|
||||
visibility
|
||||
and visibility.content["history_visibility"] == "world_readable"
|
||||
):
|
||||
return (Membership.JOIN, None)
|
||||
return
|
||||
return Membership.JOIN, None
|
||||
raise AuthError(
|
||||
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
|
||||
)
|
||||
|
||||
@@ -729,7 +729,27 @@ class EventCreationHandler(object):
|
||||
assert not self.config.worker_app
|
||||
|
||||
if ratelimit:
|
||||
yield self.base_handler.ratelimit(requester)
|
||||
# We check if this is a room admin redacting an event so that we
|
||||
# can apply different ratelimiting. We do this by simply checking
|
||||
# it's not a self-redaction (to avoid having to look up whether the
|
||||
# user is actually admin or not).
|
||||
is_admin_redaction = False
|
||||
if event.type == EventTypes.Redaction:
|
||||
original_event = yield self.store.get_event(
|
||||
event.redacts,
|
||||
check_redacted=False,
|
||||
get_prev_content=False,
|
||||
allow_rejected=False,
|
||||
allow_none=True,
|
||||
)
|
||||
|
||||
is_admin_redaction = (
|
||||
original_event and event.sender != original_event.sender
|
||||
)
|
||||
|
||||
yield self.base_handler.ratelimit(
|
||||
requester, is_admin_redaction=is_admin_redaction
|
||||
)
|
||||
|
||||
yield self.base_handler.maybe_kick_guest_users(event, context)
|
||||
|
||||
|
||||
@@ -255,7 +255,7 @@ class PresenceHandler(object):
|
||||
self.unpersisted_users_changes = set()
|
||||
|
||||
if unpersisted:
|
||||
logger.info("Persisting %d upersisted presence updates", len(unpersisted))
|
||||
logger.info("Persisting %d unpersisted presence updates", len(unpersisted))
|
||||
yield self.store.update_presence(
|
||||
[self.user_to_current_state[user_id] for user_id in unpersisted]
|
||||
)
|
||||
@@ -1032,7 +1032,7 @@ class PresenceEventSource(object):
|
||||
#
|
||||
# Hence this guard where we just return nothing so that the sync
|
||||
# doesn't return. C.f. #5503.
|
||||
return ([], max_token)
|
||||
return [], max_token
|
||||
|
||||
presence = self.get_presence_handler()
|
||||
stream_change_cache = self.store.presence_stream_cache
|
||||
@@ -1279,7 +1279,7 @@ def get_interested_parties(store, states):
|
||||
# Always notify self
|
||||
users_to_states.setdefault(state.user_id, []).append(state)
|
||||
|
||||
return (room_ids_to_states, users_to_states)
|
||||
return room_ids_to_states, users_to_states
|
||||
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
||||
@@ -148,7 +148,7 @@ class ReceiptEventSource(object):
|
||||
to_key = yield self.get_current_key()
|
||||
|
||||
if from_key == to_key:
|
||||
return ([], to_key)
|
||||
return [], to_key
|
||||
|
||||
events = yield self.store.get_linearized_receipts_for_rooms(
|
||||
room_ids, from_key=from_key, to_key=to_key
|
||||
|
||||
@@ -24,13 +24,11 @@ from synapse.api.errors import (
|
||||
AuthError,
|
||||
Codes,
|
||||
ConsentNotGivenError,
|
||||
InvalidCaptchaError,
|
||||
LimitExceededError,
|
||||
RegistrationError,
|
||||
SynapseError,
|
||||
)
|
||||
from synapse.config.server import is_threepid_reserved
|
||||
from synapse.http.client import CaptchaServerHttpClient
|
||||
from synapse.http.servlet import assert_params_in_dict
|
||||
from synapse.replication.http.login import RegisterDeviceReplicationServlet
|
||||
from synapse.replication.http.register import (
|
||||
@@ -39,7 +37,6 @@ from synapse.replication.http.register import (
|
||||
)
|
||||
from synapse.types import RoomAlias, RoomID, UserID, create_requester
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.util.threepids import check_3pid_allowed
|
||||
|
||||
from ._base import BaseHandler
|
||||
|
||||
@@ -59,7 +56,6 @@ class RegistrationHandler(BaseHandler):
|
||||
self._auth_handler = hs.get_auth_handler()
|
||||
self.profile_handler = hs.get_profile_handler()
|
||||
self.user_directory_handler = hs.get_user_directory_handler()
|
||||
self.captcha_client = CaptchaServerHttpClient(hs)
|
||||
self.identity_handler = self.hs.get_handlers().identity_handler
|
||||
self.ratelimiter = hs.get_registration_ratelimiter()
|
||||
|
||||
@@ -279,16 +275,12 @@ class RegistrationHandler(BaseHandler):
|
||||
fake_requester = create_requester(user_id)
|
||||
|
||||
# try to create the room if we're the first real user on the server. Note
|
||||
# that an auto-generated support user is not a real user and will never be
|
||||
# that an auto-generated support or bot user is not a real user and will never be
|
||||
# the user to create the room
|
||||
should_auto_create_rooms = False
|
||||
is_support = yield self.store.is_support_user(user_id)
|
||||
# There is an edge case where the first user is the support user, then
|
||||
# the room is never created, though this seems unlikely and
|
||||
# recoverable from given the support user being involved in the first
|
||||
# place.
|
||||
if self.hs.config.autocreate_auto_join_rooms and not is_support:
|
||||
count = yield self.store.count_all_users()
|
||||
is_real_user = yield self.store.is_real_user(user_id)
|
||||
if self.hs.config.autocreate_auto_join_rooms and is_real_user:
|
||||
count = yield self.store.count_real_users()
|
||||
should_auto_create_rooms = count == 1
|
||||
for r in self.hs.config.auto_join_rooms:
|
||||
logger.info("Auto-joining %s to %s", user_id, r)
|
||||
@@ -362,70 +354,6 @@ class RegistrationHandler(BaseHandler):
|
||||
)
|
||||
return user_id
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_recaptcha(self, ip, private_key, challenge, response):
|
||||
"""
|
||||
Checks a recaptcha is correct.
|
||||
|
||||
Used only by c/s api v1
|
||||
"""
|
||||
|
||||
captcha_response = yield self._validate_captcha(
|
||||
ip, private_key, challenge, response
|
||||
)
|
||||
if not captcha_response["valid"]:
|
||||
logger.info(
|
||||
"Invalid captcha entered from %s. Error: %s",
|
||||
ip,
|
||||
captcha_response["error_url"],
|
||||
)
|
||||
raise InvalidCaptchaError(error_url=captcha_response["error_url"])
|
||||
else:
|
||||
logger.info("Valid captcha entered from %s", ip)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def register_email(self, threepidCreds):
|
||||
"""
|
||||
Registers emails with an identity server.
|
||||
|
||||
Used only by c/s api v1
|
||||
"""
|
||||
|
||||
for c in threepidCreds:
|
||||
logger.info(
|
||||
"validating threepidcred sid %s on id server %s",
|
||||
c["sid"],
|
||||
c["idServer"],
|
||||
)
|
||||
try:
|
||||
threepid = yield self.identity_handler.threepid_from_creds(c)
|
||||
except Exception:
|
||||
logger.exception("Couldn't validate 3pid")
|
||||
raise RegistrationError(400, "Couldn't validate 3pid")
|
||||
|
||||
if not threepid:
|
||||
raise RegistrationError(400, "Couldn't validate 3pid")
|
||||
logger.info(
|
||||
"got threepid with medium '%s' and address '%s'",
|
||||
threepid["medium"],
|
||||
threepid["address"],
|
||||
)
|
||||
|
||||
if not check_3pid_allowed(self.hs, threepid["medium"], threepid["address"]):
|
||||
raise RegistrationError(403, "Third party identifier is not allowed")
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def bind_emails(self, user_id, threepidCreds):
|
||||
"""Links emails with a user ID and informs an identity server.
|
||||
|
||||
Used only by c/s api v1
|
||||
"""
|
||||
|
||||
# Now we have a matrix ID, bind it to the threepids we were given
|
||||
for c in threepidCreds:
|
||||
# XXX: This should be a deferred list, shouldn't it?
|
||||
yield self.identity_handler.bind_threepid(c, user_id)
|
||||
|
||||
def check_user_id_not_appservice_exclusive(self, user_id, allowed_appservice=None):
|
||||
# don't allow people to register the server notices mxid
|
||||
if self._server_notices_mxid is not None:
|
||||
@@ -463,45 +391,8 @@ class RegistrationHandler(BaseHandler):
|
||||
self._next_generated_user_id += 1
|
||||
return str(id)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _validate_captcha(self, ip_addr, private_key, challenge, response):
|
||||
"""Validates the captcha provided.
|
||||
|
||||
Used only by c/s api v1
|
||||
|
||||
Returns:
|
||||
dict: Containing 'valid'(bool) and 'error_url'(str) if invalid.
|
||||
|
||||
"""
|
||||
response = yield self._submit_captcha(ip_addr, private_key, challenge, response)
|
||||
# parse Google's response. Lovely format..
|
||||
lines = response.split("\n")
|
||||
json = {
|
||||
"valid": lines[0] == "true",
|
||||
"error_url": "http://www.recaptcha.net/recaptcha/api/challenge?"
|
||||
+ "error=%s" % lines[1],
|
||||
}
|
||||
return json
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _submit_captcha(self, ip_addr, private_key, challenge, response):
|
||||
"""
|
||||
Used only by c/s api v1
|
||||
"""
|
||||
data = yield self.captcha_client.post_urlencoded_get_raw(
|
||||
"http://www.recaptcha.net:80/recaptcha/api/verify",
|
||||
args={
|
||||
"privatekey": private_key,
|
||||
"remoteip": ip_addr,
|
||||
"challenge": challenge,
|
||||
"response": response,
|
||||
},
|
||||
)
|
||||
return data
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _join_user_to_room(self, requester, room_identifier):
|
||||
room_id = None
|
||||
room_member_handler = self.hs.get_room_member_handler()
|
||||
if RoomID.is_valid(room_identifier):
|
||||
room_id = room_identifier
|
||||
@@ -622,7 +513,7 @@ class RegistrationHandler(BaseHandler):
|
||||
initial_display_name=initial_display_name,
|
||||
is_guest=is_guest,
|
||||
)
|
||||
return (r["device_id"], r["access_token"])
|
||||
return r["device_id"], r["access_token"]
|
||||
|
||||
valid_until_ms = None
|
||||
if self.session_lifetime is not None:
|
||||
@@ -648,9 +539,7 @@ class RegistrationHandler(BaseHandler):
|
||||
return (device_id, access_token)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def post_registration_actions(
|
||||
self, user_id, auth_result, access_token, bind_email, bind_msisdn
|
||||
):
|
||||
def post_registration_actions(self, user_id, auth_result, access_token):
|
||||
"""A user has completed registration
|
||||
|
||||
Args:
|
||||
@@ -659,18 +548,10 @@ class RegistrationHandler(BaseHandler):
|
||||
registered user.
|
||||
access_token (str|None): The access token of the newly logged in
|
||||
device, or None if `inhibit_login` enabled.
|
||||
bind_email (bool): Whether to bind the email with the identity
|
||||
server.
|
||||
bind_msisdn (bool): Whether to bind the msisdn with the identity
|
||||
server.
|
||||
"""
|
||||
if self.hs.config.worker_app:
|
||||
yield self._post_registration_client(
|
||||
user_id=user_id,
|
||||
auth_result=auth_result,
|
||||
access_token=access_token,
|
||||
bind_email=bind_email,
|
||||
bind_msisdn=bind_msisdn,
|
||||
user_id=user_id, auth_result=auth_result, access_token=access_token
|
||||
)
|
||||
return
|
||||
|
||||
@@ -683,13 +564,11 @@ class RegistrationHandler(BaseHandler):
|
||||
):
|
||||
yield self.store.upsert_monthly_active_user(user_id)
|
||||
|
||||
yield self._register_email_threepid(
|
||||
user_id, threepid, access_token, bind_email
|
||||
)
|
||||
yield self._register_email_threepid(user_id, threepid, access_token)
|
||||
|
||||
if auth_result and LoginType.MSISDN in auth_result:
|
||||
threepid = auth_result[LoginType.MSISDN]
|
||||
yield self._register_msisdn_threepid(user_id, threepid, bind_msisdn)
|
||||
yield self._register_msisdn_threepid(user_id, threepid)
|
||||
|
||||
if auth_result and LoginType.TERMS in auth_result:
|
||||
yield self._on_user_consented(user_id, self.hs.config.user_consent_version)
|
||||
@@ -708,14 +587,12 @@ class RegistrationHandler(BaseHandler):
|
||||
yield self.post_consent_actions(user_id)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _register_email_threepid(self, user_id, threepid, token, bind_email):
|
||||
def _register_email_threepid(self, user_id, threepid, token):
|
||||
"""Add an email address as a 3pid identifier
|
||||
|
||||
Also adds an email pusher for the email address, if configured in the
|
||||
HS config
|
||||
|
||||
Also optionally binds emails to the given user_id on the identity server
|
||||
|
||||
Must be called on master.
|
||||
|
||||
Args:
|
||||
@@ -723,8 +600,6 @@ class RegistrationHandler(BaseHandler):
|
||||
threepid (object): m.login.email.identity auth response
|
||||
token (str|None): access_token for the user, or None if not logged
|
||||
in.
|
||||
bind_email (bool): true if the client requested the email to be
|
||||
bound at the identity server
|
||||
Returns:
|
||||
defer.Deferred:
|
||||
"""
|
||||
@@ -766,29 +641,15 @@ class RegistrationHandler(BaseHandler):
|
||||
data={},
|
||||
)
|
||||
|
||||
if bind_email:
|
||||
logger.info("bind_email specified: binding")
|
||||
logger.debug("Binding emails %s to %s" % (threepid, user_id))
|
||||
yield self.identity_handler.bind_threepid(
|
||||
threepid["threepid_creds"], user_id
|
||||
)
|
||||
else:
|
||||
logger.info("bind_email not specified: not binding email")
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _register_msisdn_threepid(self, user_id, threepid, bind_msisdn):
|
||||
def _register_msisdn_threepid(self, user_id, threepid):
|
||||
"""Add a phone number as a 3pid identifier
|
||||
|
||||
Also optionally binds msisdn to the given user_id on the identity server
|
||||
|
||||
Must be called on master.
|
||||
|
||||
Args:
|
||||
user_id (str): id of user
|
||||
threepid (object): m.login.msisdn auth response
|
||||
token (str): access_token for the user
|
||||
bind_email (bool): true if the client requested the email to be
|
||||
bound at the identity server
|
||||
Returns:
|
||||
defer.Deferred:
|
||||
"""
|
||||
@@ -804,12 +665,3 @@ class RegistrationHandler(BaseHandler):
|
||||
yield self._auth_handler.add_threepid(
|
||||
user_id, threepid["medium"], threepid["address"], threepid["validated_at"]
|
||||
)
|
||||
|
||||
if bind_msisdn:
|
||||
logger.info("bind_msisdn specified: binding")
|
||||
logger.debug("Binding msisdn %s to %s", threepid, user_id)
|
||||
yield self.identity_handler.bind_threepid(
|
||||
threepid["threepid_creds"], user_id
|
||||
)
|
||||
else:
|
||||
logger.info("bind_msisdn not specified: not binding msisdn")
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user