Compare commits
7 Commits
anoa/user_
...
anoa/backf
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3faa0974be | ||
|
|
858ef5e144 | ||
|
|
5ffd68dca1 | ||
|
|
f1c9ded738 | ||
|
|
97659b7489 | ||
|
|
c570f24acc | ||
|
|
eadfda3ebc |
23
CHANGES.md
23
CHANGES.md
@@ -1,15 +1,18 @@
|
||||
For the next release
|
||||
====================
|
||||
Synapse 1.19.2 (2020-09-16)
|
||||
===========================
|
||||
|
||||
Removal warning
|
||||
---------------
|
||||
Due to the issue below server admins are encouraged to upgrade as soon as possible.
|
||||
|
||||
Some older clients used a
|
||||
[disallowed character](https://matrix.org/docs/spec/client_server/r0.6.1#post-matrix-client-r0-register-email-requesttoken)
|
||||
(`:`) in the `client_secret` parameter of various endpoints. The incorrect
|
||||
behaviour was allowed for backwards compatibility, but is now being removed
|
||||
from Synapse as most users have updated their client. Further context can be
|
||||
found at [\#6766](https://github.com/matrix-org/synapse/issues/6766).
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix joining rooms over federation that include malformed events. ([\#8324](https://github.com/matrix-org/synapse/issues/8324))
|
||||
|
||||
|
||||
Synapse 1.19.1 (2020-08-27)
|
||||
===========================
|
||||
|
||||
No significant changes.
|
||||
|
||||
|
||||
Synapse 1.19.1rc1 (2020-08-25)
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
Add filter `name` to the `/users` admin API, which filters by user ID or displayname. Contributed by Awesome Technologies Innovationslabor GmbH.
|
||||
@@ -1 +0,0 @@
|
||||
Support `identifier` dictionary fields in User-Interactive Authentication flows. Relax requirement of the `user` parameter.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a memory leak by limiting the length of time that messages will be queued for a remote server that has been unreachable.
|
||||
@@ -1 +0,0 @@
|
||||
Don't fail `/submit_token` requests on incorrect session ID if `request_token_inhibit_3pid_errors` is turned on.
|
||||
@@ -1 +0,0 @@
|
||||
Iteratively encode JSON to avoid blocking the reactor.
|
||||
@@ -1 +0,0 @@
|
||||
Add support for shadow-banning users (ignoring any message send requests).
|
||||
@@ -1 +0,0 @@
|
||||
Use the default template file when its equivalent is not found in a custom template directory.
|
||||
@@ -1 +0,0 @@
|
||||
Convert various parts of the codebase to async/await.
|
||||
@@ -1 +0,0 @@
|
||||
Convert various parts of the codebase to async/await.
|
||||
@@ -1 +0,0 @@
|
||||
Convert various parts of the codebase to async/await.
|
||||
@@ -1 +0,0 @@
|
||||
Convert various parts of the codebase to async/await.
|
||||
@@ -1 +0,0 @@
|
||||
Convert various parts of the codebase to async/await.
|
||||
@@ -1 +0,0 @@
|
||||
Fix `Re-starting finished log context PUT-nnnn` warning when event persistence failed.
|
||||
@@ -1 +0,0 @@
|
||||
Remove some unused database functions.
|
||||
@@ -1 +0,0 @@
|
||||
Convert various parts of the codebase to async/await.
|
||||
@@ -1 +0,0 @@
|
||||
Add type hints to `synapse.handlers.room`.
|
||||
@@ -1 +0,0 @@
|
||||
Add support for shadow-banning users (ignoring any message send requests).
|
||||
@@ -1 +0,0 @@
|
||||
Return the previous stream token if a non-member event is a duplicate.
|
||||
@@ -1 +0,0 @@
|
||||
Add support for shadow-banning users (ignoring any message send requests).
|
||||
@@ -1 +0,0 @@
|
||||
Convert various parts of the codebase to async/await.
|
||||
@@ -1 +0,0 @@
|
||||
Synapse now correctly enforces the valid characters in the `client_secret` parameter used in various endpoints.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug introduced in v1.7.2 impacting message retention policies that would allow federated homeservers to dictate a retention period that's lower than the configured minimum allowed duration in the configuration file.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a long-standing bug where invalid JSON would be accepted by Synapse.
|
||||
@@ -1 +0,0 @@
|
||||
Use the default template file when its equivalent is not found in a custom template directory.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug introduced in Synapse 1.12.0 which could cause `/sync` requests to fail with a 404 if you had a very old outstanding room invite.
|
||||
@@ -1 +0,0 @@
|
||||
Link to matrix-synapse-rest-password-provider in the password provider documentation.
|
||||
@@ -1 +0,0 @@
|
||||
Return the previous stream token if a non-member event is a duplicate.
|
||||
@@ -1 +0,0 @@
|
||||
Separate `get_current_token` into two since there are two different use cases for it.
|
||||
@@ -1 +0,0 @@
|
||||
Iteratively encode JSON to avoid blocking the reactor.
|
||||
@@ -1 +0,0 @@
|
||||
Convert various parts of the codebase to async/await.
|
||||
@@ -1 +0,0 @@
|
||||
Updated documentation to note that Synapse does not follow `HTTP 308` redirects due to an upstream library not supporting them. Contributed by Ryan Cole.
|
||||
@@ -1 +0,0 @@
|
||||
Convert various parts of the codebase to async/await.
|
||||
@@ -1 +0,0 @@
|
||||
Remove `ChainedIdGenerator`.
|
||||
@@ -1 +0,0 @@
|
||||
Reduce the amount of whitespace in JSON stored and sent in responses.
|
||||
@@ -1 +0,0 @@
|
||||
Add type hints to `synapse.storage.database`.
|
||||
@@ -1 +0,0 @@
|
||||
Return a proper error code when the rooms of an invalid group are requested.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug which could cause a leaked postgres connection if synapse was set to daemonize.
|
||||
@@ -1 +0,0 @@
|
||||
Micro-optimisations to get_auth_chain_ids.
|
||||
@@ -1 +0,0 @@
|
||||
Convert various parts of the codebase to async/await.
|
||||
@@ -1 +0,0 @@
|
||||
Clarify the error code if a user tries to register with a numeric ID. This bug was introduced in v1.15.0.
|
||||
@@ -1 +0,0 @@
|
||||
Fixes a bug where appservices with ratelimiting disabled would still be ratelimited when joining rooms. This bug was introduced in v1.19.0.
|
||||
@@ -1 +0,0 @@
|
||||
Add type hints to `synapse.state`.
|
||||
@@ -1 +0,0 @@
|
||||
Add support for shadow-banning users (ignoring any message send requests).
|
||||
@@ -1 +0,0 @@
|
||||
Added curl for healthcheck support and readme updates for the change. Contributed by @maquis196.
|
||||
@@ -1 +0,0 @@
|
||||
Add support for shadow-banning users (ignoring any message send requests).
|
||||
@@ -1 +0,0 @@
|
||||
Add support for shadow-banning users (ignoring any message send requests).
|
||||
@@ -1 +0,0 @@
|
||||
Refactor `StreamIdGenerator` and `MultiWriterIdGenerator` to have the same interface.
|
||||
@@ -1 +0,0 @@
|
||||
Add filter `name` to the `/users` admin API, which filters by user ID or displayname. Contributed by Awesome Technologies Innovationslabor GmbH.
|
||||
@@ -1 +0,0 @@
|
||||
Add functions to `MultiWriterIdGen` used by events stream.
|
||||
12
debian/changelog
vendored
12
debian/changelog
vendored
@@ -1,3 +1,15 @@
|
||||
matrix-synapse-py3 (1.19.2) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.19.2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 16 Sep 2020 12:50:30 +0100
|
||||
|
||||
matrix-synapse-py3 (1.19.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.19.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 27 Aug 2020 10:50:19 +0100
|
||||
|
||||
matrix-synapse-py3 (1.19.0) stable; urgency=medium
|
||||
|
||||
[ Synapse Packaging team ]
|
||||
|
||||
@@ -55,7 +55,6 @@ RUN pip install --prefix="/install" --no-warn-script-location \
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
curl \
|
||||
libpq5 \
|
||||
xmlsec1 \
|
||||
gosu \
|
||||
@@ -70,6 +69,3 @@ VOLUME ["/data"]
|
||||
EXPOSE 8008/tcp 8009/tcp 8448/tcp
|
||||
|
||||
ENTRYPOINT ["/start.py"]
|
||||
|
||||
HEALTHCHECK --interval=1m --timeout=5s \
|
||||
CMD curl -fSs http://localhost:8008/health || exit 1
|
||||
|
||||
@@ -162,32 +162,3 @@ docker build -t matrixdotorg/synapse -f docker/Dockerfile .
|
||||
|
||||
You can choose to build a different docker image by changing the value of the `-f` flag to
|
||||
point to another Dockerfile.
|
||||
|
||||
## Disabling the healthcheck
|
||||
|
||||
If you are using a non-standard port or tls inside docker you can disable the healthcheck
|
||||
whilst running the above `docker run` commands.
|
||||
|
||||
```
|
||||
--no-healthcheck
|
||||
```
|
||||
## Setting custom healthcheck on docker run
|
||||
|
||||
If you wish to point the healthcheck at a different port with docker command, add the following
|
||||
|
||||
```
|
||||
--health-cmd 'curl -fSs http://localhost:1234/health'
|
||||
```
|
||||
|
||||
## Setting the healthcheck in docker-compose file
|
||||
|
||||
You can add the following to set a custom healthcheck in a docker compose file.
|
||||
You will need version >2.1 for this to work.
|
||||
|
||||
```
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-fSs", "http://localhost:8008/health"]
|
||||
interval: 1m
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
```
|
||||
|
||||
@@ -108,7 +108,7 @@ The api is::
|
||||
|
||||
GET /_synapse/admin/v2/users?from=0&limit=10&guests=false
|
||||
|
||||
To use it, you will need to authenticate by providing an ``access_token`` for a
|
||||
To use it, you will need to authenticate by providing an `access_token` for a
|
||||
server admin: see `README.rst <README.rst>`_.
|
||||
|
||||
The parameter ``from`` is optional but used for pagination, denoting the
|
||||
@@ -119,11 +119,8 @@ from a previous call.
|
||||
The parameter ``limit`` is optional but is used for pagination, denoting the
|
||||
maximum number of items to return in this call. Defaults to ``100``.
|
||||
|
||||
The parameter ``user_id`` is optional and filters to only return users with user IDs
|
||||
that contain this value. This parameter is ignored when using the ``name`` parameter.
|
||||
|
||||
The parameter ``name`` is optional and filters to only return users with user ID localparts
|
||||
**or** displaynames that contain this value.
|
||||
The parameter ``user_id`` is optional and filters to only users with user IDs
|
||||
that contain this value.
|
||||
|
||||
The parameter ``guests`` is optional and if ``false`` will **exclude** guest users.
|
||||
Defaults to ``true`` to include guest users.
|
||||
|
||||
@@ -47,18 +47,6 @@ you invite them to. This can be caused by an incorrectly-configured reverse
|
||||
proxy: see [reverse_proxy.md](<reverse_proxy.md>) for instructions on how to correctly
|
||||
configure a reverse proxy.
|
||||
|
||||
### Known issues
|
||||
|
||||
**HTTP `308 Permanent Redirect` redirects are not followed**: Due to missing features
|
||||
in the HTTP library used by Synapse, 308 redirects are currently not followed by
|
||||
federating servers, which can cause `M_UNKNOWN` or `401 Unauthorized` errors. This
|
||||
may affect users who are redirecting apex-to-www (e.g. `example.com` -> `www.example.com`),
|
||||
and especially users of the Kubernetes *Nginx Ingress* module, which uses 308 redirect
|
||||
codes by default. For those Kubernetes users, [this Stackoverflow post](https://stackoverflow.com/a/52617528/5096871)
|
||||
might be helpful. For other users, switching to a `301 Moved Permanently` code may be
|
||||
an option. 308 redirect codes will be supported properly in a future
|
||||
release of Synapse.
|
||||
|
||||
## Running a demo federation of Synapses
|
||||
|
||||
If you want to get up and running quickly with a trio of homeservers in a
|
||||
|
||||
@@ -14,7 +14,6 @@ password auth provider module implementations:
|
||||
|
||||
* [matrix-synapse-ldap3](https://github.com/matrix-org/matrix-synapse-ldap3/)
|
||||
* [matrix-synapse-shared-secret-auth](https://github.com/devture/matrix-synapse-shared-secret-auth)
|
||||
* [matrix-synapse-rest-password-provider](https://github.com/ma1uta/matrix-synapse-rest-password-provider)
|
||||
|
||||
## Required methods
|
||||
|
||||
|
||||
@@ -378,10 +378,11 @@ retention:
|
||||
# min_lifetime: 1d
|
||||
# max_lifetime: 1y
|
||||
|
||||
# Retention policy limits. If set, and the state of a room contains a
|
||||
# 'm.room.retention' event in its state which contains a 'min_lifetime' or a
|
||||
# 'max_lifetime' that's out of these bounds, Synapse will cap the room's policy
|
||||
# to these limits when running purge jobs.
|
||||
# Retention policy limits. If set, a user won't be able to send a
|
||||
# 'm.room.retention' event which features a 'min_lifetime' or a 'max_lifetime'
|
||||
# that's not within this range. This is especially useful in closed federations,
|
||||
# in which server admins can make sure every federating server applies the same
|
||||
# rules.
|
||||
#
|
||||
#allowed_lifetime_min: 1d
|
||||
#allowed_lifetime_max: 1y
|
||||
@@ -407,19 +408,12 @@ retention:
|
||||
# (e.g. every 12h), but not want that purge to be performed by a job that's
|
||||
# iterating over every room it knows, which could be heavy on the server.
|
||||
#
|
||||
# If any purge job is configured, it is strongly recommended to have at least
|
||||
# a single job with neither 'shortest_max_lifetime' nor 'longest_max_lifetime'
|
||||
# set, or one job without 'shortest_max_lifetime' and one job without
|
||||
# 'longest_max_lifetime' set. Otherwise some rooms might be ignored, even if
|
||||
# 'allowed_lifetime_min' and 'allowed_lifetime_max' are set, because capping a
|
||||
# room's policy to these values is done after the policies are retrieved from
|
||||
# Synapse's database (which is done using the range specified in a purge job's
|
||||
# configuration).
|
||||
#
|
||||
#purge_jobs:
|
||||
# - longest_max_lifetime: 3d
|
||||
# - shortest_max_lifetime: 1d
|
||||
# longest_max_lifetime: 3d
|
||||
# interval: 12h
|
||||
# - shortest_max_lifetime: 3d
|
||||
# longest_max_lifetime: 1y
|
||||
# interval: 1d
|
||||
|
||||
# Inhibits the /requestToken endpoints from returning an error that might leak
|
||||
@@ -2008,7 +2002,9 @@ email:
|
||||
# Directory in which Synapse will try to find the template files below.
|
||||
# If not set, default templates from within the Synapse package will be used.
|
||||
#
|
||||
# Do not uncomment this setting unless you want to customise the templates.
|
||||
# DO NOT UNCOMMENT THIS SETTING unless you want to customise the templates.
|
||||
# If you *do* uncomment it, you will need to make sure that all the templates
|
||||
# below are in the directory.
|
||||
#
|
||||
# Synapse will look for the following templates in this directory:
|
||||
#
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Stub for frozendict.
|
||||
|
||||
from typing import (
|
||||
Any,
|
||||
Hashable,
|
||||
Iterable,
|
||||
Iterator,
|
||||
Mapping,
|
||||
overload,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
_KT = TypeVar("_KT", bound=Hashable) # Key type.
|
||||
_VT = TypeVar("_VT") # Value type.
|
||||
|
||||
class frozendict(Mapping[_KT, _VT]):
|
||||
@overload
|
||||
def __init__(self, **kwargs: _VT) -> None: ...
|
||||
@overload
|
||||
def __init__(self, __map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self, __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT
|
||||
) -> None: ...
|
||||
def __getitem__(self, key: _KT) -> _VT: ...
|
||||
def __contains__(self, key: Any) -> bool: ...
|
||||
def copy(self, **add_or_replace: Any) -> frozendict: ...
|
||||
def __iter__(self) -> Iterator[_KT]: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __repr__(self) -> str: ...
|
||||
def __hash__(self) -> int: ...
|
||||
@@ -48,7 +48,7 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
__version__ = "1.19.1rc1"
|
||||
__version__ = "1.19.2"
|
||||
|
||||
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
|
||||
# We import here so that we don't have to install a bunch of deps when
|
||||
|
||||
@@ -213,7 +213,6 @@ class Auth(object):
|
||||
user = user_info["user"]
|
||||
token_id = user_info["token_id"]
|
||||
is_guest = user_info["is_guest"]
|
||||
shadow_banned = user_info["shadow_banned"]
|
||||
|
||||
# Deny the request if the user account has expired.
|
||||
if self._account_validity.enabled and not allow_expired:
|
||||
@@ -253,12 +252,7 @@ class Auth(object):
|
||||
opentracing.set_tag("device_id", device_id)
|
||||
|
||||
return synapse.types.create_requester(
|
||||
user,
|
||||
token_id,
|
||||
is_guest,
|
||||
shadow_banned,
|
||||
device_id,
|
||||
app_service=app_service,
|
||||
user, token_id, is_guest, device_id, app_service=app_service
|
||||
)
|
||||
except KeyError:
|
||||
raise MissingClientTokenError()
|
||||
@@ -303,7 +297,6 @@ class Auth(object):
|
||||
dict that includes:
|
||||
`user` (UserID)
|
||||
`is_guest` (bool)
|
||||
`shadow_banned` (bool)
|
||||
`token_id` (int|None): access token id. May be None if guest
|
||||
`device_id` (str|None): device corresponding to access token
|
||||
Raises:
|
||||
@@ -363,7 +356,6 @@ class Auth(object):
|
||||
ret = {
|
||||
"user": user,
|
||||
"is_guest": True,
|
||||
"shadow_banned": False,
|
||||
"token_id": None,
|
||||
# all guests get the same device id
|
||||
"device_id": GUEST_DEVICE_ID,
|
||||
@@ -373,7 +365,6 @@ class Auth(object):
|
||||
ret = {
|
||||
"user": user,
|
||||
"is_guest": False,
|
||||
"shadow_banned": False,
|
||||
"token_id": None,
|
||||
"device_id": None,
|
||||
}
|
||||
@@ -497,7 +488,6 @@ class Auth(object):
|
||||
"user": UserID.from_string(ret.get("name")),
|
||||
"token_id": ret.get("token_id", None),
|
||||
"is_guest": False,
|
||||
"shadow_banned": ret.get("shadow_banned"),
|
||||
"device_id": ret.get("device_id"),
|
||||
"valid_until_ms": ret.get("valid_until_ms"),
|
||||
}
|
||||
|
||||
@@ -21,9 +21,9 @@ import typing
|
||||
from http import HTTPStatus
|
||||
from typing import Dict, List, Optional, Union
|
||||
|
||||
from twisted.web import http
|
||||
from canonicaljson import json
|
||||
|
||||
from synapse.util import json_decoder
|
||||
from twisted.web import http
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from synapse.types import JsonDict
|
||||
@@ -593,7 +593,7 @@ class HttpResponseException(CodeMessageException):
|
||||
# try to parse the body as json, to get better errcode/msg, but
|
||||
# default to M_UNKNOWN with the HTTP status as the error text
|
||||
try:
|
||||
j = json_decoder.decode(self.response.decode("utf-8"))
|
||||
j = json.loads(self.response.decode("utf-8"))
|
||||
except ValueError:
|
||||
j = {}
|
||||
|
||||
@@ -604,11 +604,3 @@ class HttpResponseException(CodeMessageException):
|
||||
errmsg = j.pop("error", self.msg)
|
||||
|
||||
return ProxiedRequestError(self.code, errmsg, errcode, j)
|
||||
|
||||
|
||||
class ShadowBanError(Exception):
|
||||
"""
|
||||
Raised when a shadow-banned user attempts to perform an action.
|
||||
|
||||
This should be caught and a proper "fake" success response sent to the user.
|
||||
"""
|
||||
|
||||
@@ -23,7 +23,7 @@ from jsonschema import FormatChecker
|
||||
|
||||
from synapse.api.constants import EventContentFields
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.api.presence import UserPresenceState
|
||||
from synapse.storage.presence import UserPresenceState
|
||||
from synapse.types import RoomID, UserID
|
||||
|
||||
FILTER_SCHEMA = {
|
||||
|
||||
@@ -18,16 +18,12 @@
|
||||
import argparse
|
||||
import errno
|
||||
import os
|
||||
import time
|
||||
import urllib.parse
|
||||
from collections import OrderedDict
|
||||
from hashlib import sha256
|
||||
from textwrap import dedent
|
||||
from typing import Any, Callable, List, MutableMapping, Optional
|
||||
from typing import Any, List, MutableMapping, Optional
|
||||
|
||||
import attr
|
||||
import jinja2
|
||||
import pkg_resources
|
||||
import yaml
|
||||
|
||||
|
||||
@@ -104,11 +100,6 @@ class Config(object):
|
||||
def __init__(self, root_config=None):
|
||||
self.root = root_config
|
||||
|
||||
# Get the path to the default Synapse template directory
|
||||
self.default_template_dir = pkg_resources.resource_filename(
|
||||
"synapse", "res/templates"
|
||||
)
|
||||
|
||||
def __getattr__(self, item: str) -> Any:
|
||||
"""
|
||||
Try and fetch a configuration option that does not exist on this class.
|
||||
@@ -193,95 +184,6 @@ class Config(object):
|
||||
with open(file_path) as file_stream:
|
||||
return file_stream.read()
|
||||
|
||||
def read_templates(
|
||||
self, filenames: List[str], custom_template_directory: Optional[str] = None,
|
||||
) -> List[jinja2.Template]:
|
||||
"""Load a list of template files from disk using the given variables.
|
||||
|
||||
This function will attempt to load the given templates from the default Synapse
|
||||
template directory. If `custom_template_directory` is supplied, that directory
|
||||
is tried first.
|
||||
|
||||
Files read are treated as Jinja templates. These templates are not rendered yet.
|
||||
|
||||
Args:
|
||||
filenames: A list of template filenames to read.
|
||||
|
||||
custom_template_directory: A directory to try to look for the templates
|
||||
before using the default Synapse template directory instead.
|
||||
|
||||
Raises:
|
||||
ConfigError: if the file's path is incorrect or otherwise cannot be read.
|
||||
|
||||
Returns:
|
||||
A list of jinja2 templates.
|
||||
"""
|
||||
templates = []
|
||||
search_directories = [self.default_template_dir]
|
||||
|
||||
# The loader will first look in the custom template directory (if specified) for the
|
||||
# given filename. If it doesn't find it, it will use the default template dir instead
|
||||
if custom_template_directory:
|
||||
# Check that the given template directory exists
|
||||
if not self.path_exists(custom_template_directory):
|
||||
raise ConfigError(
|
||||
"Configured template directory does not exist: %s"
|
||||
% (custom_template_directory,)
|
||||
)
|
||||
|
||||
# Search the custom template directory as well
|
||||
search_directories.insert(0, custom_template_directory)
|
||||
|
||||
loader = jinja2.FileSystemLoader(search_directories)
|
||||
env = jinja2.Environment(loader=loader, autoescape=True)
|
||||
|
||||
# Update the environment with our custom filters
|
||||
env.filters.update(
|
||||
{
|
||||
"format_ts": _format_ts_filter,
|
||||
"mxc_to_http": _create_mxc_to_http_filter(self.public_baseurl),
|
||||
}
|
||||
)
|
||||
|
||||
for filename in filenames:
|
||||
# Load the template
|
||||
template = env.get_template(filename)
|
||||
templates.append(template)
|
||||
|
||||
return templates
|
||||
|
||||
|
||||
def _format_ts_filter(value: int, format: str):
|
||||
return time.strftime(format, time.localtime(value / 1000))
|
||||
|
||||
|
||||
def _create_mxc_to_http_filter(public_baseurl: str) -> Callable:
|
||||
"""Create and return a jinja2 filter that converts MXC urls to HTTP
|
||||
|
||||
Args:
|
||||
public_baseurl: The public, accessible base URL of the homeserver
|
||||
"""
|
||||
|
||||
def mxc_to_http_filter(value, width, height, resize_method="crop"):
|
||||
if value[0:6] != "mxc://":
|
||||
return ""
|
||||
|
||||
server_and_media_id = value[6:]
|
||||
fragment = None
|
||||
if "#" in server_and_media_id:
|
||||
server_and_media_id, fragment = server_and_media_id.split("#", 1)
|
||||
fragment = "#" + fragment
|
||||
|
||||
params = {"width": width, "height": height, "method": resize_method}
|
||||
return "%s_matrix/media/v1/thumbnail/%s?%s%s" % (
|
||||
public_baseurl,
|
||||
server_and_media_id,
|
||||
urllib.parse.urlencode(params),
|
||||
fragment or "",
|
||||
)
|
||||
|
||||
return mxc_to_http_filter
|
||||
|
||||
|
||||
class RootConfig(object):
|
||||
"""
|
||||
|
||||
@@ -23,6 +23,7 @@ from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
import attr
|
||||
import pkg_resources
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
@@ -97,18 +98,21 @@ class EmailConfig(Config):
|
||||
if parsed[1] == "":
|
||||
raise RuntimeError("Invalid notif_from address")
|
||||
|
||||
# A user-configurable template directory
|
||||
template_dir = email_config.get("template_dir")
|
||||
if isinstance(template_dir, str):
|
||||
# We need an absolute path, because we change directory after starting (and
|
||||
# we don't yet know what auxiliary templates like mail.css we will need).
|
||||
template_dir = os.path.abspath(template_dir)
|
||||
elif template_dir is not None:
|
||||
# If template_dir is something other than a str or None, warn the user
|
||||
raise ConfigError("Config option email.template_dir must be type str")
|
||||
# we need an absolute path, because we change directory after starting (and
|
||||
# we don't yet know what auxiliary templates like mail.css we will need).
|
||||
# (Note that loading as package_resources with jinja.PackageLoader doesn't
|
||||
# work for the same reason.)
|
||||
if not template_dir:
|
||||
template_dir = pkg_resources.resource_filename("synapse", "res/templates")
|
||||
|
||||
self.email_template_dir = os.path.abspath(template_dir)
|
||||
|
||||
self.email_enable_notifs = email_config.get("enable_notifs", False)
|
||||
|
||||
account_validity_config = config.get("account_validity") or {}
|
||||
account_validity_renewal_enabled = account_validity_config.get("renew_at")
|
||||
|
||||
self.threepid_behaviour_email = (
|
||||
# Have Synapse handle the email sending if account_threepid_delegates.email
|
||||
# is not defined
|
||||
@@ -162,6 +166,19 @@ class EmailConfig(Config):
|
||||
email_config.get("validation_token_lifetime", "1h")
|
||||
)
|
||||
|
||||
if (
|
||||
self.email_enable_notifs
|
||||
or account_validity_renewal_enabled
|
||||
or self.threepid_behaviour_email == ThreepidBehaviour.LOCAL
|
||||
):
|
||||
# make sure we can import the required deps
|
||||
import bleach
|
||||
import jinja2
|
||||
|
||||
# prevent unused warnings
|
||||
jinja2
|
||||
bleach
|
||||
|
||||
if self.threepid_behaviour_email == ThreepidBehaviour.LOCAL:
|
||||
missing = []
|
||||
if not self.email_notif_from:
|
||||
@@ -179,49 +196,49 @@ class EmailConfig(Config):
|
||||
|
||||
# These email templates have placeholders in them, and thus must be
|
||||
# parsed using a templating engine during a request
|
||||
password_reset_template_html = email_config.get(
|
||||
self.email_password_reset_template_html = email_config.get(
|
||||
"password_reset_template_html", "password_reset.html"
|
||||
)
|
||||
password_reset_template_text = email_config.get(
|
||||
self.email_password_reset_template_text = email_config.get(
|
||||
"password_reset_template_text", "password_reset.txt"
|
||||
)
|
||||
registration_template_html = email_config.get(
|
||||
self.email_registration_template_html = email_config.get(
|
||||
"registration_template_html", "registration.html"
|
||||
)
|
||||
registration_template_text = email_config.get(
|
||||
self.email_registration_template_text = email_config.get(
|
||||
"registration_template_text", "registration.txt"
|
||||
)
|
||||
add_threepid_template_html = email_config.get(
|
||||
self.email_add_threepid_template_html = email_config.get(
|
||||
"add_threepid_template_html", "add_threepid.html"
|
||||
)
|
||||
add_threepid_template_text = email_config.get(
|
||||
self.email_add_threepid_template_text = email_config.get(
|
||||
"add_threepid_template_text", "add_threepid.txt"
|
||||
)
|
||||
|
||||
password_reset_template_failure_html = email_config.get(
|
||||
self.email_password_reset_template_failure_html = email_config.get(
|
||||
"password_reset_template_failure_html", "password_reset_failure.html"
|
||||
)
|
||||
registration_template_failure_html = email_config.get(
|
||||
self.email_registration_template_failure_html = email_config.get(
|
||||
"registration_template_failure_html", "registration_failure.html"
|
||||
)
|
||||
add_threepid_template_failure_html = email_config.get(
|
||||
self.email_add_threepid_template_failure_html = email_config.get(
|
||||
"add_threepid_template_failure_html", "add_threepid_failure.html"
|
||||
)
|
||||
|
||||
# These templates do not support any placeholder variables, so we
|
||||
# will read them from disk once during setup
|
||||
password_reset_template_success_html = email_config.get(
|
||||
email_password_reset_template_success_html = email_config.get(
|
||||
"password_reset_template_success_html", "password_reset_success.html"
|
||||
)
|
||||
registration_template_success_html = email_config.get(
|
||||
email_registration_template_success_html = email_config.get(
|
||||
"registration_template_success_html", "registration_success.html"
|
||||
)
|
||||
add_threepid_template_success_html = email_config.get(
|
||||
email_add_threepid_template_success_html = email_config.get(
|
||||
"add_threepid_template_success_html", "add_threepid_success.html"
|
||||
)
|
||||
|
||||
# Read all templates from disk
|
||||
(
|
||||
# Check templates exist
|
||||
for f in [
|
||||
self.email_password_reset_template_html,
|
||||
self.email_password_reset_template_text,
|
||||
self.email_registration_template_html,
|
||||
@@ -231,36 +248,32 @@ class EmailConfig(Config):
|
||||
self.email_password_reset_template_failure_html,
|
||||
self.email_registration_template_failure_html,
|
||||
self.email_add_threepid_template_failure_html,
|
||||
password_reset_template_success_html_template,
|
||||
registration_template_success_html_template,
|
||||
add_threepid_template_success_html_template,
|
||||
) = self.read_templates(
|
||||
[
|
||||
password_reset_template_html,
|
||||
password_reset_template_text,
|
||||
registration_template_html,
|
||||
registration_template_text,
|
||||
add_threepid_template_html,
|
||||
add_threepid_template_text,
|
||||
password_reset_template_failure_html,
|
||||
registration_template_failure_html,
|
||||
add_threepid_template_failure_html,
|
||||
password_reset_template_success_html,
|
||||
registration_template_success_html,
|
||||
add_threepid_template_success_html,
|
||||
],
|
||||
template_dir,
|
||||
)
|
||||
email_password_reset_template_success_html,
|
||||
email_registration_template_success_html,
|
||||
email_add_threepid_template_success_html,
|
||||
]:
|
||||
p = os.path.join(self.email_template_dir, f)
|
||||
if not os.path.isfile(p):
|
||||
raise ConfigError("Unable to find template file %s" % (p,))
|
||||
|
||||
# Render templates that do not contain any placeholders
|
||||
self.email_password_reset_template_success_html_content = (
|
||||
password_reset_template_success_html_template.render()
|
||||
# Retrieve content of web templates
|
||||
filepath = os.path.join(
|
||||
self.email_template_dir, email_password_reset_template_success_html
|
||||
)
|
||||
self.email_registration_template_success_html_content = (
|
||||
registration_template_success_html_template.render()
|
||||
self.email_password_reset_template_success_html = self.read_file(
|
||||
filepath, "email.password_reset_template_success_html"
|
||||
)
|
||||
self.email_add_threepid_template_success_html_content = (
|
||||
add_threepid_template_success_html_template.render()
|
||||
filepath = os.path.join(
|
||||
self.email_template_dir, email_registration_template_success_html
|
||||
)
|
||||
self.email_registration_template_success_html_content = self.read_file(
|
||||
filepath, "email.registration_template_success_html"
|
||||
)
|
||||
filepath = os.path.join(
|
||||
self.email_template_dir, email_add_threepid_template_success_html
|
||||
)
|
||||
self.email_add_threepid_template_success_html_content = self.read_file(
|
||||
filepath, "email.add_threepid_template_success_html"
|
||||
)
|
||||
|
||||
if self.email_enable_notifs:
|
||||
@@ -277,19 +290,17 @@ class EmailConfig(Config):
|
||||
% (", ".join(missing),)
|
||||
)
|
||||
|
||||
notif_template_html = email_config.get(
|
||||
self.email_notif_template_html = email_config.get(
|
||||
"notif_template_html", "notif_mail.html"
|
||||
)
|
||||
notif_template_text = email_config.get(
|
||||
self.email_notif_template_text = email_config.get(
|
||||
"notif_template_text", "notif_mail.txt"
|
||||
)
|
||||
|
||||
(
|
||||
self.email_notif_template_html,
|
||||
self.email_notif_template_text,
|
||||
) = self.read_templates(
|
||||
[notif_template_html, notif_template_text], template_dir,
|
||||
)
|
||||
for f in self.email_notif_template_text, self.email_notif_template_html:
|
||||
p = os.path.join(self.email_template_dir, f)
|
||||
if not os.path.isfile(p):
|
||||
raise ConfigError("Unable to find email template file %s" % (p,))
|
||||
|
||||
self.email_notif_for_new_users = email_config.get(
|
||||
"notif_for_new_users", True
|
||||
@@ -298,20 +309,18 @@ class EmailConfig(Config):
|
||||
"client_base_url", email_config.get("riot_base_url", None)
|
||||
)
|
||||
|
||||
if self.account_validity.renew_by_email_enabled:
|
||||
expiry_template_html = email_config.get(
|
||||
if account_validity_renewal_enabled:
|
||||
self.email_expiry_template_html = email_config.get(
|
||||
"expiry_template_html", "notice_expiry.html"
|
||||
)
|
||||
expiry_template_text = email_config.get(
|
||||
self.email_expiry_template_text = email_config.get(
|
||||
"expiry_template_text", "notice_expiry.txt"
|
||||
)
|
||||
|
||||
(
|
||||
self.account_validity_template_html,
|
||||
self.account_validity_template_text,
|
||||
) = self.read_templates(
|
||||
[expiry_template_html, expiry_template_text], template_dir,
|
||||
)
|
||||
for f in self.email_expiry_template_text, self.email_expiry_template_html:
|
||||
p = os.path.join(self.email_template_dir, f)
|
||||
if not os.path.isfile(p):
|
||||
raise ConfigError("Unable to find email template file %s" % (p,))
|
||||
|
||||
subjects_config = email_config.get("subjects", {})
|
||||
subjects = {}
|
||||
@@ -391,7 +400,9 @@ class EmailConfig(Config):
|
||||
# Directory in which Synapse will try to find the template files below.
|
||||
# If not set, default templates from within the Synapse package will be used.
|
||||
#
|
||||
# Do not uncomment this setting unless you want to customise the templates.
|
||||
# DO NOT UNCOMMENT THIS SETTING unless you want to customise the templates.
|
||||
# If you *do* uncomment it, you will need to make sure that all the templates
|
||||
# below are in the directory.
|
||||
#
|
||||
# Synapse will look for the following templates in this directory:
|
||||
#
|
||||
|
||||
@@ -18,6 +18,8 @@ import logging
|
||||
from typing import Any, List
|
||||
|
||||
import attr
|
||||
import jinja2
|
||||
import pkg_resources
|
||||
|
||||
from synapse.python_dependencies import DependencyException, check_requirements
|
||||
from synapse.util.module_loader import load_module, load_python_module
|
||||
@@ -169,9 +171,15 @@ class SAML2Config(Config):
|
||||
saml2_config.get("saml_session_lifetime", "15m")
|
||||
)
|
||||
|
||||
self.saml2_error_html_template = self.read_templates(
|
||||
["saml_error.html"], saml2_config.get("template_dir")
|
||||
)
|
||||
template_dir = saml2_config.get("template_dir")
|
||||
if not template_dir:
|
||||
template_dir = pkg_resources.resource_filename("synapse", "res/templates",)
|
||||
|
||||
loader = jinja2.FileSystemLoader(template_dir)
|
||||
# enable auto-escape here, to having to remember to escape manually in the
|
||||
# template
|
||||
env = jinja2.Environment(loader=loader, autoescape=True)
|
||||
self.saml2_error_html_template = env.get_template("saml_error.html")
|
||||
|
||||
def _default_saml_config_dict(
|
||||
self, required_attributes: set, optional_attributes: set
|
||||
|
||||
@@ -26,6 +26,7 @@ import yaml
|
||||
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
||||
from synapse.http.endpoint import parse_and_validate_server_name
|
||||
from synapse.python_dependencies import DependencyException, check_requirements
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
@@ -507,6 +508,8 @@ class ServerConfig(Config):
|
||||
)
|
||||
)
|
||||
|
||||
_check_resource_config(self.listeners)
|
||||
|
||||
self.cleanup_extremities_with_dummy_events = config.get(
|
||||
"cleanup_extremities_with_dummy_events", True
|
||||
)
|
||||
@@ -961,10 +964,11 @@ class ServerConfig(Config):
|
||||
# min_lifetime: 1d
|
||||
# max_lifetime: 1y
|
||||
|
||||
# Retention policy limits. If set, and the state of a room contains a
|
||||
# 'm.room.retention' event in its state which contains a 'min_lifetime' or a
|
||||
# 'max_lifetime' that's out of these bounds, Synapse will cap the room's policy
|
||||
# to these limits when running purge jobs.
|
||||
# Retention policy limits. If set, a user won't be able to send a
|
||||
# 'm.room.retention' event which features a 'min_lifetime' or a 'max_lifetime'
|
||||
# that's not within this range. This is especially useful in closed federations,
|
||||
# in which server admins can make sure every federating server applies the same
|
||||
# rules.
|
||||
#
|
||||
#allowed_lifetime_min: 1d
|
||||
#allowed_lifetime_max: 1y
|
||||
@@ -990,19 +994,12 @@ class ServerConfig(Config):
|
||||
# (e.g. every 12h), but not want that purge to be performed by a job that's
|
||||
# iterating over every room it knows, which could be heavy on the server.
|
||||
#
|
||||
# If any purge job is configured, it is strongly recommended to have at least
|
||||
# a single job with neither 'shortest_max_lifetime' nor 'longest_max_lifetime'
|
||||
# set, or one job without 'shortest_max_lifetime' and one job without
|
||||
# 'longest_max_lifetime' set. Otherwise some rooms might be ignored, even if
|
||||
# 'allowed_lifetime_min' and 'allowed_lifetime_max' are set, because capping a
|
||||
# room's policy to these values is done after the policies are retrieved from
|
||||
# Synapse's database (which is done using the range specified in a purge job's
|
||||
# configuration).
|
||||
#
|
||||
#purge_jobs:
|
||||
# - longest_max_lifetime: 3d
|
||||
# - shortest_max_lifetime: 1d
|
||||
# longest_max_lifetime: 3d
|
||||
# interval: 12h
|
||||
# - shortest_max_lifetime: 3d
|
||||
# longest_max_lifetime: 1y
|
||||
# interval: 1d
|
||||
|
||||
# Inhibits the /requestToken endpoints from returning an error that might leak
|
||||
@@ -1136,3 +1133,20 @@ def _warn_if_webclient_configured(listeners: Iterable[ListenerConfig]) -> None:
|
||||
if name == "webclient":
|
||||
logger.warning(NO_MORE_WEB_CLIENT_WARNING)
|
||||
return
|
||||
|
||||
|
||||
def _check_resource_config(listeners: Iterable[ListenerConfig]) -> None:
|
||||
resource_names = {
|
||||
res_name
|
||||
for listener in listeners
|
||||
if listener.http_options
|
||||
for res in listener.http_options.resources
|
||||
for res_name in res.names
|
||||
}
|
||||
|
||||
for resource in resource_names:
|
||||
if resource == "consent":
|
||||
try:
|
||||
check_requirements("resources.consent")
|
||||
except DependencyException as e:
|
||||
raise ConfigError(e.message)
|
||||
|
||||
@@ -12,8 +12,11 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import os
|
||||
from typing import Any, Dict
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from ._base import Config
|
||||
|
||||
|
||||
@@ -26,32 +29,22 @@ class SSOConfig(Config):
|
||||
def read_config(self, config, **kwargs):
|
||||
sso_config = config.get("sso") or {} # type: Dict[str, Any]
|
||||
|
||||
# The sso-specific template_dir
|
||||
# Pick a template directory in order of:
|
||||
# * The sso-specific template_dir
|
||||
# * /path/to/synapse/install/res/templates
|
||||
template_dir = sso_config.get("template_dir")
|
||||
if not template_dir:
|
||||
template_dir = pkg_resources.resource_filename("synapse", "res/templates",)
|
||||
|
||||
# Read templates from disk
|
||||
(
|
||||
self.sso_redirect_confirm_template,
|
||||
self.sso_auth_confirm_template,
|
||||
self.sso_error_template,
|
||||
sso_account_deactivated_template,
|
||||
sso_auth_success_template,
|
||||
) = self.read_templates(
|
||||
[
|
||||
"sso_redirect_confirm.html",
|
||||
"sso_auth_confirm.html",
|
||||
"sso_error.html",
|
||||
"sso_account_deactivated.html",
|
||||
"sso_auth_success.html",
|
||||
],
|
||||
template_dir,
|
||||
self.sso_template_dir = template_dir
|
||||
self.sso_account_deactivated_template = self.read_file(
|
||||
os.path.join(self.sso_template_dir, "sso_account_deactivated.html"),
|
||||
"sso_account_deactivated_template",
|
||||
)
|
||||
|
||||
# These templates have no placeholders, so render them here
|
||||
self.sso_account_deactivated_template = (
|
||||
sso_account_deactivated_template.render()
|
||||
self.sso_auth_success_template = self.read_file(
|
||||
os.path.join(self.sso_template_dir, "sso_auth_success.html"),
|
||||
"sso_auth_success_template",
|
||||
)
|
||||
self.sso_auth_success_template = sso_auth_success_template.render()
|
||||
|
||||
self.sso_client_whitelist = sso_config.get("client_whitelist") or []
|
||||
|
||||
|
||||
@@ -757,8 +757,9 @@ class ServerKeyFetcher(BaseV2KeyFetcher):
|
||||
except Exception:
|
||||
logger.exception("Error getting keys %s from %s", key_ids, server_name)
|
||||
|
||||
await yieldable_gather_results(get_key, keys_to_fetch.items())
|
||||
return results
|
||||
return await yieldable_gather_results(
|
||||
get_key, keys_to_fetch.items()
|
||||
).addCallback(lambda _: results)
|
||||
|
||||
async def get_server_verify_key_v2_direct(self, server_name, key_ids):
|
||||
"""
|
||||
@@ -768,7 +769,7 @@ class ServerKeyFetcher(BaseV2KeyFetcher):
|
||||
key_ids (iterable[str]):
|
||||
|
||||
Returns:
|
||||
dict[str, FetchKeyResult]: map from key ID to lookup result
|
||||
Deferred[dict[str, FetchKeyResult]]: map from key ID to lookup result
|
||||
|
||||
Raises:
|
||||
KeyLookupError if there was a problem making the lookup
|
||||
|
||||
@@ -47,7 +47,7 @@ def check(
|
||||
Args:
|
||||
room_version_obj: the version of the room
|
||||
event: the event being checked.
|
||||
auth_events: the existing room state.
|
||||
auth_events (dict: event-key -> event): the existing room state.
|
||||
|
||||
Raises:
|
||||
AuthError if the checks fail
|
||||
|
||||
@@ -133,8 +133,6 @@ class _EventInternalMetadata(object):
|
||||
rejection. This is needed as those events are marked as outliers, but
|
||||
they still need to be processed as if they're new events (e.g. updating
|
||||
invite state in the database, relaying to clients, etc).
|
||||
|
||||
(Added in synapse 0.99.0, so may be unreliable for events received before that)
|
||||
"""
|
||||
return self._dict.get("out_of_band_membership", False)
|
||||
|
||||
|
||||
@@ -15,10 +15,9 @@
|
||||
# limitations under the License.
|
||||
|
||||
import inspect
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from synapse.spam_checker_api import RegistrationBehaviour, SpamCheckerApi
|
||||
from synapse.types import Collection
|
||||
from synapse.spam_checker_api import SpamCheckerApi
|
||||
|
||||
MYPY = False
|
||||
if MYPY:
|
||||
@@ -161,33 +160,3 @@ class SpamChecker(object):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def check_registration_for_spam(
|
||||
self,
|
||||
email_threepid: Optional[dict],
|
||||
username: Optional[str],
|
||||
request_info: Collection[Tuple[str, str]],
|
||||
) -> RegistrationBehaviour:
|
||||
"""Checks if we should allow the given registration request.
|
||||
|
||||
Args:
|
||||
email_threepid: The email threepid used for registering, if any
|
||||
username: The request user name, if any
|
||||
request_info: List of tuples of user agent and IP that
|
||||
were used during the registration process.
|
||||
|
||||
Returns:
|
||||
Enum for how the request should be handled
|
||||
"""
|
||||
|
||||
for spam_checker in self.spam_checkers:
|
||||
# For backwards compatibility, only run if the method exists on the
|
||||
# spam checker
|
||||
checker = getattr(spam_checker, "check_registration_for_spam", None)
|
||||
if checker:
|
||||
behaviour = checker(email_threepid, username, request_info)
|
||||
assert isinstance(behaviour, RegistrationBehaviour)
|
||||
if behaviour != RegistrationBehaviour.ALLOW:
|
||||
return behaviour
|
||||
|
||||
return RegistrationBehaviour.ALLOW
|
||||
|
||||
@@ -74,14 +74,15 @@ class EventValidator(object):
|
||||
)
|
||||
|
||||
if event.type == EventTypes.Retention:
|
||||
self._validate_retention(event)
|
||||
self._validate_retention(event, config)
|
||||
|
||||
def _validate_retention(self, event):
|
||||
def _validate_retention(self, event, config):
|
||||
"""Checks that an event that defines the retention policy for a room respects the
|
||||
format enforced by the spec.
|
||||
boundaries imposed by the server's administrator.
|
||||
|
||||
Args:
|
||||
event (FrozenEvent): The event to validate.
|
||||
config (Config): The homeserver's configuration.
|
||||
"""
|
||||
min_lifetime = event.content.get("min_lifetime")
|
||||
max_lifetime = event.content.get("max_lifetime")
|
||||
@@ -94,6 +95,32 @@ class EventValidator(object):
|
||||
errcode=Codes.BAD_JSON,
|
||||
)
|
||||
|
||||
if (
|
||||
config.retention_allowed_lifetime_min is not None
|
||||
and min_lifetime < config.retention_allowed_lifetime_min
|
||||
):
|
||||
raise SynapseError(
|
||||
code=400,
|
||||
msg=(
|
||||
"'min_lifetime' can't be lower than the minimum allowed"
|
||||
" value enforced by the server's administrator"
|
||||
),
|
||||
errcode=Codes.BAD_JSON,
|
||||
)
|
||||
|
||||
if (
|
||||
config.retention_allowed_lifetime_max is not None
|
||||
and min_lifetime > config.retention_allowed_lifetime_max
|
||||
):
|
||||
raise SynapseError(
|
||||
code=400,
|
||||
msg=(
|
||||
"'min_lifetime' can't be greater than the maximum allowed"
|
||||
" value enforced by the server's administrator"
|
||||
),
|
||||
errcode=Codes.BAD_JSON,
|
||||
)
|
||||
|
||||
if max_lifetime is not None:
|
||||
if not isinstance(max_lifetime, int):
|
||||
raise SynapseError(
|
||||
@@ -102,6 +129,32 @@ class EventValidator(object):
|
||||
errcode=Codes.BAD_JSON,
|
||||
)
|
||||
|
||||
if (
|
||||
config.retention_allowed_lifetime_min is not None
|
||||
and max_lifetime < config.retention_allowed_lifetime_min
|
||||
):
|
||||
raise SynapseError(
|
||||
code=400,
|
||||
msg=(
|
||||
"'max_lifetime' can't be lower than the minimum allowed value"
|
||||
" enforced by the server's administrator"
|
||||
),
|
||||
errcode=Codes.BAD_JSON,
|
||||
)
|
||||
|
||||
if (
|
||||
config.retention_allowed_lifetime_max is not None
|
||||
and max_lifetime > config.retention_allowed_lifetime_max
|
||||
):
|
||||
raise SynapseError(
|
||||
code=400,
|
||||
msg=(
|
||||
"'max_lifetime' can't be greater than the maximum allowed"
|
||||
" value enforced by the server's administrator"
|
||||
),
|
||||
errcode=Codes.BAD_JSON,
|
||||
)
|
||||
|
||||
if (
|
||||
min_lifetime is not None
|
||||
and max_lifetime is not None
|
||||
|
||||
@@ -54,7 +54,7 @@ from synapse.events import EventBase, builder
|
||||
from synapse.federation.federation_base import FederationBase, event_from_pdu_json
|
||||
from synapse.logging.context import make_deferred_yieldable, preserve_fn
|
||||
from synapse.logging.utils import log_function
|
||||
from synapse.types import JsonDict
|
||||
from synapse.types import JsonDict, get_domain_from_id
|
||||
from synapse.util import unwrapFirstError
|
||||
from synapse.util.caches.expiringcache import ExpiringCache
|
||||
from synapse.util.retryutils import NotRetryingDestination
|
||||
@@ -217,11 +217,8 @@ class FederationClient(FederationBase):
|
||||
for p in transaction_data["pdus"]
|
||||
]
|
||||
|
||||
# FIXME: We should handle signature failures more gracefully.
|
||||
pdus[:] = await make_deferred_yieldable(
|
||||
defer.gatherResults(
|
||||
self._check_sigs_and_hashes(room_version, pdus), consumeErrors=True,
|
||||
).addErrback(unwrapFirstError)
|
||||
pdus[:] = await self._check_sigs_and_hash_and_fetch(
|
||||
dest, pdus, outlier=True, room_version=room_version
|
||||
)
|
||||
|
||||
return pdus
|
||||
@@ -386,10 +383,11 @@ class FederationClient(FederationBase):
|
||||
pdu.event_id, allow_rejected=True, allow_none=True
|
||||
)
|
||||
|
||||
if not res and pdu.origin != origin:
|
||||
pdu_origin = get_domain_from_id(pdu.sender)
|
||||
if not res and pdu_origin != origin:
|
||||
try:
|
||||
res = await self.get_pdu(
|
||||
destinations=[pdu.origin],
|
||||
destinations=[pdu_origin],
|
||||
event_id=pdu.event_id,
|
||||
room_version=room_version,
|
||||
outlier=outlier,
|
||||
|
||||
@@ -28,6 +28,7 @@ from typing import (
|
||||
Union,
|
||||
)
|
||||
|
||||
from canonicaljson import json
|
||||
from prometheus_client import Counter, Histogram
|
||||
|
||||
from twisted.internet import defer
|
||||
@@ -62,7 +63,7 @@ from synapse.replication.http.federation import (
|
||||
ReplicationGetQueryRestServlet,
|
||||
)
|
||||
from synapse.types import JsonDict, get_domain_from_id
|
||||
from synapse.util import glob_to_regex, json_decoder, unwrapFirstError
|
||||
from synapse.util import glob_to_regex, unwrapFirstError
|
||||
from synapse.util.async_helpers import Linearizer, concurrently_execute
|
||||
from synapse.util.caches.response_cache import ResponseCache
|
||||
|
||||
@@ -550,7 +551,7 @@ class FederationServer(FederationBase):
|
||||
for device_id, keys in device_keys.items():
|
||||
for key_id, json_str in keys.items():
|
||||
json_result.setdefault(user_id, {})[device_id] = {
|
||||
key_id: json_decoder.decode(json_str)
|
||||
key_id: json.loads(json_str)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
|
||||
@@ -37,8 +37,8 @@ from sortedcontainers import SortedDict
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.api.presence import UserPresenceState
|
||||
from synapse.metrics import LaterGauge
|
||||
from synapse.storage.presence import UserPresenceState
|
||||
from synapse.util.metrics import Measure
|
||||
|
||||
from .units import Edu
|
||||
|
||||
@@ -22,7 +22,6 @@ from twisted.internet import defer
|
||||
|
||||
import synapse
|
||||
import synapse.metrics
|
||||
from synapse.api.presence import UserPresenceState
|
||||
from synapse.events import EventBase
|
||||
from synapse.federation.sender.per_destination_queue import PerDestinationQueue
|
||||
from synapse.federation.sender.transaction_manager import TransactionManager
|
||||
@@ -40,6 +39,7 @@ from synapse.metrics import (
|
||||
events_processed_counter,
|
||||
)
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.storage.presence import UserPresenceState
|
||||
from synapse.types import ReadReceipt
|
||||
from synapse.util.metrics import Measure, measure_func
|
||||
|
||||
@@ -329,10 +329,10 @@ class FederationSender(object):
|
||||
room_id = receipt.room_id
|
||||
|
||||
# Work out which remote servers should be poked and poke them.
|
||||
domains_set = await self.state.get_current_hosts_in_room(room_id)
|
||||
domains = await self.state.get_current_hosts_in_room(room_id)
|
||||
domains = [
|
||||
d
|
||||
for d in domains_set
|
||||
for d in domains
|
||||
if d != self.server_name
|
||||
and self._federation_shard_config.should_handle(self._instance_name, d)
|
||||
]
|
||||
|
||||
@@ -24,12 +24,12 @@ from synapse.api.errors import (
|
||||
HttpResponseException,
|
||||
RequestSendFailed,
|
||||
)
|
||||
from synapse.api.presence import UserPresenceState
|
||||
from synapse.events import EventBase
|
||||
from synapse.federation.units import Edu
|
||||
from synapse.handlers.presence import format_user_presence_state
|
||||
from synapse.metrics import sent_transactions_counter
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.storage.presence import UserPresenceState
|
||||
from synapse.types import ReadReceipt
|
||||
from synapse.util.retryutils import NotRetryingDestination, get_retry_limiter
|
||||
|
||||
@@ -337,28 +337,6 @@ class PerDestinationQueue(object):
|
||||
(e.retry_last_ts + e.retry_interval) / 1000.0
|
||||
),
|
||||
)
|
||||
|
||||
if e.retry_interval > 60 * 60 * 1000:
|
||||
# we won't retry for another hour!
|
||||
# (this suggests a significant outage)
|
||||
# We drop pending PDUs and EDUs because otherwise they will
|
||||
# rack up indefinitely.
|
||||
# Note that:
|
||||
# - the EDUs that are being dropped here are those that we can
|
||||
# afford to drop (specifically, only typing notifications,
|
||||
# read receipts and presence updates are being dropped here)
|
||||
# - Other EDUs such as to_device messages are queued with a
|
||||
# different mechanism
|
||||
# - this is all volatile state that would be lost if the
|
||||
# federation sender restarted anyway
|
||||
|
||||
# dropping read receipts is a bit sad but should be solved
|
||||
# through another mechanism, because this is all volatile!
|
||||
self._pending_pdus = []
|
||||
self._pending_edus = []
|
||||
self._pending_edus_keyed = {}
|
||||
self._pending_presence = {}
|
||||
self._pending_rrs = {}
|
||||
except FederationDeniedError as e:
|
||||
logger.info(e)
|
||||
except HttpResponseException as e:
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, List, Tuple
|
||||
|
||||
from canonicaljson import json
|
||||
|
||||
from synapse.api.errors import HttpResponseException
|
||||
from synapse.events import EventBase
|
||||
from synapse.federation.persistence import TransactionActions
|
||||
@@ -26,7 +28,6 @@ from synapse.logging.opentracing import (
|
||||
tags,
|
||||
whitelisted_homeserver,
|
||||
)
|
||||
from synapse.util import json_decoder
|
||||
from synapse.util.metrics import measure_func
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -70,7 +71,7 @@ class TransactionManager(object):
|
||||
for edu in pending_edus:
|
||||
context = edu.get_context()
|
||||
if context:
|
||||
span_contexts.append(extract_text_map(json_decoder.decode(context)))
|
||||
span_contexts.append(extract_text_map(json.loads(context)))
|
||||
if keep_destination:
|
||||
edu.strip_context()
|
||||
|
||||
|
||||
@@ -26,6 +26,11 @@ from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.types import UserID
|
||||
from synapse.util import stringutils
|
||||
|
||||
try:
|
||||
from synapse.push.mailer import load_jinja2_templates
|
||||
except ImportError:
|
||||
load_jinja2_templates = None
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -42,11 +47,9 @@ class AccountValidityHandler(object):
|
||||
if (
|
||||
self._account_validity.enabled
|
||||
and self._account_validity.renew_by_email_enabled
|
||||
and load_jinja2_templates
|
||||
):
|
||||
# Don't do email-specific configuration if renewal by email is disabled.
|
||||
self._template_html = self.config.account_validity_template_html
|
||||
self._template_text = self.config.account_validity_template_text
|
||||
|
||||
try:
|
||||
app_name = self.hs.config.email_app_name
|
||||
|
||||
@@ -62,6 +65,17 @@ class AccountValidityHandler(object):
|
||||
|
||||
self._raw_from = email.utils.parseaddr(self._from_string)[1]
|
||||
|
||||
self._template_html, self._template_text = load_jinja2_templates(
|
||||
self.config.email_template_dir,
|
||||
[
|
||||
self.config.email_expiry_template_html,
|
||||
self.config.email_expiry_template_text,
|
||||
],
|
||||
apply_format_ts_filter=True,
|
||||
apply_mxc_to_http_filter=True,
|
||||
public_baseurl=self.config.public_baseurl,
|
||||
)
|
||||
|
||||
# Check the renewal emails to send and send them every 30min.
|
||||
def send_emails():
|
||||
# run as a background process to make sure that the database transactions
|
||||
|
||||
@@ -38,14 +38,13 @@ from synapse.api.ratelimiting import Ratelimiter
|
||||
from synapse.handlers.ui_auth import INTERACTIVE_AUTH_CHECKERS
|
||||
from synapse.handlers.ui_auth.checkers import UserInteractiveAuthChecker
|
||||
from synapse.http.server import finish_request, respond_with_html
|
||||
from synapse.http.servlet import assert_params_in_dict
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.context import defer_to_thread
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.module_api import ModuleApi
|
||||
from synapse.push.mailer import load_jinja2_templates
|
||||
from synapse.types import Requester, UserID
|
||||
from synapse.util import stringutils as stringutils
|
||||
from synapse.util.msisdn import phone_number_to_msisdn
|
||||
from synapse.util.threepids import canonicalise_email
|
||||
|
||||
from ._base import BaseHandler
|
||||
@@ -53,82 +52,6 @@ from ._base import BaseHandler
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def client_dict_convert_legacy_fields_to_identifier(
|
||||
submission: Dict[str, Union[str, Dict]]
|
||||
):
|
||||
"""
|
||||
Convert a legacy-formatted login submission to an identifier dict.
|
||||
|
||||
Legacy login submissions (used in both login and user-interactive authentication)
|
||||
provide user-identifying information at the top-level instead of in an `indentifier`
|
||||
property. This is now deprecated and replaced with identifiers:
|
||||
https://matrix.org/docs/spec/client_server/r0.6.1#identifier-types
|
||||
|
||||
Args:
|
||||
submission: The client dict to convert. Passed by reference and modified
|
||||
|
||||
Raises:
|
||||
SynapseError: If the format of the client dict is invalid
|
||||
"""
|
||||
if "user" in submission:
|
||||
submission["identifier"] = {"type": "m.id.user", "user": submission.pop("user")}
|
||||
|
||||
if "medium" in submission and "address" in submission:
|
||||
submission["identifier"] = {
|
||||
"type": "m.id.thirdparty",
|
||||
"medium": submission.pop("medium"),
|
||||
"address": submission.pop("address"),
|
||||
}
|
||||
|
||||
# We've converted valid, legacy login submissions to an identifier. If the
|
||||
# dict still doesn't have an identifier, it's invalid
|
||||
assert_params_in_dict(submission, required=["identifier"])
|
||||
|
||||
# Ensure the identifier has a type
|
||||
if "type" not in submission["identifier"]:
|
||||
raise SynapseError(
|
||||
400, "'identifier' dict has no key 'type'", errcode=Codes.MISSING_PARAM,
|
||||
)
|
||||
|
||||
|
||||
def login_id_phone_to_thirdparty(identifier: Dict[str, str]) -> Dict[str, str]:
|
||||
"""Convert a phone login identifier type to a generic threepid identifier.
|
||||
|
||||
Args:
|
||||
identifier: Login identifier dict of type 'm.id.phone'
|
||||
|
||||
Returns:
|
||||
An equivalent m.id.thirdparty identifier dict.
|
||||
"""
|
||||
if "type" not in identifier:
|
||||
raise SynapseError(
|
||||
400, "Invalid phone-type identifier", errcode=Codes.MISSING_PARAM
|
||||
)
|
||||
|
||||
if "country" not in identifier or (
|
||||
# XXX: We used to require `number` instead of `phone`. The spec
|
||||
# defines `phone`. So accept both
|
||||
"phone" not in identifier
|
||||
and "number" not in identifier
|
||||
):
|
||||
raise SynapseError(
|
||||
400, "Invalid phone-type identifier", errcode=Codes.INVALID_PARAM
|
||||
)
|
||||
|
||||
# Accept both "phone" and "number" as valid keys in m.id.phone
|
||||
phone_number = identifier.get("phone", identifier.get("number"))
|
||||
|
||||
# Convert user-provided phone number to a consistent representation
|
||||
msisdn = phone_number_to_msisdn(identifier["country"], phone_number)
|
||||
|
||||
# Return the new dictionary
|
||||
return {
|
||||
"type": "m.id.thirdparty",
|
||||
"medium": "msisdn",
|
||||
"address": msisdn,
|
||||
}
|
||||
|
||||
|
||||
class AuthHandler(BaseHandler):
|
||||
SESSION_EXPIRE_MS = 48 * 60 * 60 * 1000
|
||||
|
||||
@@ -209,17 +132,18 @@ class AuthHandler(BaseHandler):
|
||||
# after the SSO completes and before redirecting them back to their client.
|
||||
# It notifies the user they are about to give access to their matrix account
|
||||
# to the client.
|
||||
self._sso_redirect_confirm_template = hs.config.sso_redirect_confirm_template
|
||||
|
||||
self._sso_redirect_confirm_template = load_jinja2_templates(
|
||||
hs.config.sso_template_dir, ["sso_redirect_confirm.html"],
|
||||
)[0]
|
||||
# The following template is shown during user interactive authentication
|
||||
# in the fallback auth scenario. It notifies the user that they are
|
||||
# authenticating for an operation to occur on their account.
|
||||
self._sso_auth_confirm_template = hs.config.sso_auth_confirm_template
|
||||
|
||||
self._sso_auth_confirm_template = load_jinja2_templates(
|
||||
hs.config.sso_template_dir, ["sso_auth_confirm.html"],
|
||||
)[0]
|
||||
# The following template is shown after a successful user interactive
|
||||
# authentication session. It tells the user they can close the window.
|
||||
self._sso_auth_success_template = hs.config.sso_auth_success_template
|
||||
|
||||
# The following template is shown during the SSO authentication process if
|
||||
# the account is deactivated.
|
||||
self._sso_account_deactivated_template = (
|
||||
@@ -397,7 +321,7 @@ class AuthHandler(BaseHandler):
|
||||
# otherwise use whatever was last provided.
|
||||
#
|
||||
# This was designed to allow the client to omit the parameters
|
||||
# and just supply the session in subsequent calls. So it splits
|
||||
# and just supply the session in subsequent calls so it split
|
||||
# auth between devices by just sharing the session, (eg. so you
|
||||
# could continue registration from your phone having clicked the
|
||||
# email auth link on there). It's probably too open to abuse
|
||||
@@ -442,14 +366,6 @@ class AuthHandler(BaseHandler):
|
||||
# authentication flow.
|
||||
await self.store.set_ui_auth_clientdict(sid, clientdict)
|
||||
|
||||
user_agent = request.requestHeaders.getRawHeaders(b"User-Agent", default=[b""])[
|
||||
0
|
||||
].decode("ascii", "surrogateescape")
|
||||
|
||||
await self.store.add_user_agent_ip_to_ui_auth_session(
|
||||
session.session_id, user_agent, clientip
|
||||
)
|
||||
|
||||
if not authdict:
|
||||
raise InteractiveAuthIncompleteError(
|
||||
session.session_id, self._auth_dict_for_flows(flows, session.session_id)
|
||||
@@ -602,129 +518,16 @@ class AuthHandler(BaseHandler):
|
||||
res = await checker.check_auth(authdict, clientip=clientip)
|
||||
return res
|
||||
|
||||
# We don't have a checker for the auth type provided by the client
|
||||
# Assume that it is `m.login.password`.
|
||||
if login_type != LoginType.PASSWORD:
|
||||
raise SynapseError(
|
||||
400, "Unknown authentication type", errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
# build a v1-login-style dict out of the authdict and fall back to the
|
||||
# v1 code
|
||||
user_id = authdict.get("user")
|
||||
|
||||
password = authdict.get("password")
|
||||
if password is None:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"Missing parameter for m.login.password dict: 'password'",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
# Retrieve the user ID using details provided in the authdict
|
||||
|
||||
# Deprecation notice: Clients used to be able to simply provide a
|
||||
# `user` field which pointed to a user_id or localpart. This has
|
||||
# been deprecated in favour of an `identifier` key, which is a
|
||||
# dictionary providing information on how to identify a single
|
||||
# user.
|
||||
# https://matrix.org/docs/spec/client_server/r0.6.1#identifier-types
|
||||
#
|
||||
# We convert old-style dicts to new ones here
|
||||
client_dict_convert_legacy_fields_to_identifier(authdict)
|
||||
|
||||
# Extract a user ID from the values in the identifier
|
||||
username = await self.username_from_identifier(authdict["identifier"], password)
|
||||
|
||||
if username is None:
|
||||
raise SynapseError(400, "Valid username not found")
|
||||
|
||||
# Now that we've found the username, validate that the password is correct
|
||||
canonical_id, _ = await self.validate_login(username, authdict)
|
||||
if user_id is None:
|
||||
raise SynapseError(400, "", Codes.MISSING_PARAM)
|
||||
|
||||
(canonical_id, callback) = await self.validate_login(user_id, authdict)
|
||||
return canonical_id
|
||||
|
||||
async def username_from_identifier(
|
||||
self, identifier: Dict[str, str], password: Optional[str] = None
|
||||
) -> Optional[str]:
|
||||
"""Given a dictionary containing an identifier from a client, extract the
|
||||
possibly unqualified username of the user that it identifies. Does *not*
|
||||
guarantee that the user exists.
|
||||
|
||||
If this identifier dict contains a threepid, we attempt to ask password
|
||||
auth providers about it or, failing that, look up an associated user in
|
||||
the database.
|
||||
|
||||
Args:
|
||||
identifier: The identifier dictionary provided by the client
|
||||
password: The user provided password if one exists. Used for asking
|
||||
password auth providers for usernames from 3pid+password combos.
|
||||
|
||||
Returns:
|
||||
A username if one was found, or None otherwise
|
||||
|
||||
Raises:
|
||||
SynapseError: If the identifier dict is invalid
|
||||
"""
|
||||
|
||||
# Convert phone type identifiers to generic threepid identifiers, which
|
||||
# will be handled in the next step
|
||||
if identifier["type"] == "m.id.phone":
|
||||
identifier = login_id_phone_to_thirdparty(identifier)
|
||||
|
||||
# Convert a threepid identifier to an user identifier
|
||||
if identifier["type"] == "m.id.thirdparty":
|
||||
address = identifier.get("address")
|
||||
medium = identifier.get("medium")
|
||||
|
||||
if not medium or not address:
|
||||
# An error would've already been raised in
|
||||
# `login_id_thirdparty_from_phone` if the original submission
|
||||
# was a phone identifier
|
||||
raise SynapseError(
|
||||
400, "Invalid thirdparty identifier", errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
if medium == "email":
|
||||
# For emails, transform the address to lowercase.
|
||||
# We store all email addresses as lowercase in the DB.
|
||||
# (See add_threepid in synapse/handlers/auth.py)
|
||||
address = address.lower()
|
||||
|
||||
# Check for auth providers that support 3pid login types
|
||||
if password is not None:
|
||||
canonical_user_id, _ = await self.check_password_provider_3pid(
|
||||
medium, address, password,
|
||||
)
|
||||
if canonical_user_id:
|
||||
# Authentication through password provider and 3pid succeeded
|
||||
return canonical_user_id
|
||||
|
||||
# Check local store
|
||||
user_id = await self.hs.get_datastore().get_user_id_by_threepid(
|
||||
medium, address
|
||||
)
|
||||
if not user_id:
|
||||
# We were unable to find a user_id that belonged to the threepid returned
|
||||
# by the password auth provider
|
||||
return None
|
||||
|
||||
identifier = {"type": "m.id.user", "user": user_id}
|
||||
|
||||
# By this point, the identifier should be a `m.id.user`: if it's anything
|
||||
# else, we haven't understood it.
|
||||
if identifier["type"] != "m.id.user":
|
||||
raise SynapseError(
|
||||
400, "Unknown login identifier type", errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
# User identifiers have a "user" key
|
||||
user = identifier.get("user")
|
||||
if user is None:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"User identifier is missing 'user' key",
|
||||
errcode=Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
return user
|
||||
|
||||
def _get_params_recaptcha(self) -> dict:
|
||||
return {"public_key": self.hs.config.recaptcha_public_key}
|
||||
|
||||
@@ -889,8 +692,7 @@ class AuthHandler(BaseHandler):
|
||||
m.login.password auth types.
|
||||
|
||||
Args:
|
||||
username: a localpart or fully qualified user ID - what is provided by the
|
||||
client
|
||||
username: username supplied by the user
|
||||
login_submission: the whole of the login submission
|
||||
(including 'type' and other relevant fields)
|
||||
Returns:
|
||||
@@ -902,10 +704,10 @@ class AuthHandler(BaseHandler):
|
||||
LoginError if there was an authentication problem.
|
||||
"""
|
||||
|
||||
# We need a fully qualified User ID for some method calls here
|
||||
qualified_user_id = username
|
||||
if not qualified_user_id.startswith("@"):
|
||||
qualified_user_id = UserID(qualified_user_id, self.hs.hostname).to_string()
|
||||
if username.startswith("@"):
|
||||
qualified_user_id = username
|
||||
else:
|
||||
qualified_user_id = UserID(username, self.hs.hostname).to_string()
|
||||
|
||||
login_type = login_submission.get("type")
|
||||
known_login_type = False
|
||||
|
||||
@@ -35,7 +35,6 @@ class CasHandler:
|
||||
"""
|
||||
|
||||
def __init__(self, hs):
|
||||
self.hs = hs
|
||||
self._hostname = hs.hostname
|
||||
self._auth_handler = hs.get_auth_handler()
|
||||
self._registration_handler = hs.get_registration_handler()
|
||||
@@ -211,16 +210,8 @@ class CasHandler:
|
||||
|
||||
else:
|
||||
if not registered_user_id:
|
||||
# Pull out the user-agent and IP from the request.
|
||||
user_agent = request.requestHeaders.getRawHeaders(
|
||||
b"User-Agent", default=[b""]
|
||||
)[0].decode("ascii", "surrogateescape")
|
||||
ip_address = self.hs.get_ip_from_request(request)
|
||||
|
||||
registered_user_id = await self._registration_handler.register_user(
|
||||
localpart=localpart,
|
||||
default_display_name=user_display_name,
|
||||
user_agent_ips=(user_agent, ip_address),
|
||||
localpart=localpart, default_display_name=user_display_name
|
||||
)
|
||||
|
||||
await self._auth_handler.complete_sso_login(
|
||||
|
||||
@@ -16,6 +16,8 @@
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from canonicaljson import json
|
||||
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.logging.context import run_in_background
|
||||
from synapse.logging.opentracing import (
|
||||
@@ -25,7 +27,6 @@ from synapse.logging.opentracing import (
|
||||
start_active_span,
|
||||
)
|
||||
from synapse.types import UserID, get_domain_from_id
|
||||
from synapse.util import json_encoder
|
||||
from synapse.util.stringutils import random_string
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -173,7 +174,7 @@ class DeviceMessageHandler(object):
|
||||
"sender": sender_user_id,
|
||||
"type": message_type,
|
||||
"message_id": message_id,
|
||||
"org.matrix.opentracing_context": json_encoder.encode(context),
|
||||
"org.matrix.opentracing_context": json.dumps(context),
|
||||
}
|
||||
|
||||
log_kv({"local_messages": local_messages})
|
||||
|
||||
@@ -23,7 +23,6 @@ from synapse.api.errors import (
|
||||
CodeMessageException,
|
||||
Codes,
|
||||
NotFoundError,
|
||||
ShadowBanError,
|
||||
StoreError,
|
||||
SynapseError,
|
||||
)
|
||||
@@ -200,8 +199,6 @@ class DirectoryHandler(BaseHandler):
|
||||
|
||||
try:
|
||||
await self._update_canonical_alias(requester, user_id, room_id, room_alias)
|
||||
except ShadowBanError as e:
|
||||
logger.info("Failed to update alias events due to shadow-ban: %s", e)
|
||||
except AuthError as e:
|
||||
logger.info("Failed to update alias events: %s", e)
|
||||
|
||||
@@ -295,9 +292,6 @@ class DirectoryHandler(BaseHandler):
|
||||
"""
|
||||
Send an updated canonical alias event if the removed alias was set as
|
||||
the canonical alias or listed in the alt_aliases field.
|
||||
|
||||
Raises:
|
||||
ShadowBanError if the requester has been shadow-banned.
|
||||
"""
|
||||
alias_event = await self.state.get_current_state(
|
||||
room_id, EventTypes.CanonicalAlias, ""
|
||||
|
||||
@@ -19,7 +19,7 @@ import logging
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
import attr
|
||||
from canonicaljson import encode_canonical_json
|
||||
from canonicaljson import encode_canonical_json, json
|
||||
from signedjson.key import VerifyKey, decode_verify_key_bytes
|
||||
from signedjson.sign import SignatureVerifyException, verify_signed_json
|
||||
from unpaddedbase64 import decode_base64
|
||||
@@ -35,7 +35,7 @@ from synapse.types import (
|
||||
get_domain_from_id,
|
||||
get_verify_key_from_cross_signing_key,
|
||||
)
|
||||
from synapse.util import json_decoder, unwrapFirstError
|
||||
from synapse.util import unwrapFirstError
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.util.caches.expiringcache import ExpiringCache
|
||||
from synapse.util.retryutils import NotRetryingDestination
|
||||
@@ -404,7 +404,7 @@ class E2eKeysHandler(object):
|
||||
for device_id, keys in device_keys.items():
|
||||
for key_id, json_bytes in keys.items():
|
||||
json_result.setdefault(user_id, {})[device_id] = {
|
||||
key_id: json_decoder.decode(json_bytes)
|
||||
key_id: json.loads(json_bytes)
|
||||
}
|
||||
|
||||
@trace
|
||||
@@ -1186,7 +1186,7 @@ def _exception_to_failure(e):
|
||||
|
||||
|
||||
def _one_time_keys_match(old_key_json, new_key):
|
||||
old_key = json_decoder.decode(old_key_json)
|
||||
old_key = json.loads(old_key_json)
|
||||
|
||||
# if either is a string rather than an object, they must match exactly
|
||||
if not isinstance(old_key, dict) or not isinstance(new_key, dict):
|
||||
|
||||
@@ -937,9 +937,18 @@ class FederationHandler(BaseHandler):
|
||||
|
||||
return events
|
||||
|
||||
async def maybe_backfill(self, room_id, current_depth):
|
||||
async def maybe_backfill(self, room_id: str, current_depth: int, limit: int):
|
||||
"""Checks the database to see if we should backfill before paginating,
|
||||
and if so do.
|
||||
|
||||
Args:
|
||||
room_id
|
||||
current_depth: The depth from which we're paginating from. This is
|
||||
used to decide if we should backfill and what extremities to
|
||||
use.
|
||||
limit: The number of events that the pagination request will
|
||||
return. This is used as part of the heuristic to decide if we
|
||||
should back paginate.
|
||||
"""
|
||||
extremities = await self.store.get_oldest_events_with_depth_in_room(room_id)
|
||||
|
||||
@@ -998,16 +1007,47 @@ class FederationHandler(BaseHandler):
|
||||
sorted_extremeties_tuple = sorted(extremities.items(), key=lambda e: -int(e[1]))
|
||||
max_depth = sorted_extremeties_tuple[0][1]
|
||||
|
||||
# If we're approaching an extremity we trigger a backfill, otherwise we
|
||||
# no-op.
|
||||
if current_depth - 2 * limit > max_depth:
|
||||
logger.debug(
|
||||
"Not backfilling as we don't need to. %d < %d - 2 * %d",
|
||||
max_depth,
|
||||
current_depth,
|
||||
limit,
|
||||
)
|
||||
return
|
||||
|
||||
logger.debug(
|
||||
"room_id: %s, backfill: current_depth: %s, max_depth: %s, extrems: %s",
|
||||
room_id,
|
||||
current_depth,
|
||||
max_depth,
|
||||
sorted_extremeties_tuple,
|
||||
)
|
||||
|
||||
# We ignore extremities that have a greater depth than our current depth
|
||||
# as:
|
||||
# 1. we don't really care about getting events that has happened
|
||||
# before our current position; and
|
||||
# 2. we have likely previously tried and failed to backfill from that
|
||||
# extremity, so to avoid getting "stuck" requesting the same
|
||||
# backfill repeatedly we drop those extremities.
|
||||
filtered_sorted_extremeties_tuple = [
|
||||
t for t in sorted_extremeties_tuple if int(t[1]) <= current_depth
|
||||
]
|
||||
|
||||
# However, we need to check that the filtered extremities are non-empty.
|
||||
# If they are empty then either we can a) bail or b) still attempt to
|
||||
# backill. We opt to try backfilling anyway just in case we do get
|
||||
# relevant events.
|
||||
if filtered_sorted_extremeties_tuple:
|
||||
sorted_extremeties_tuple = filtered_sorted_extremeties_tuple
|
||||
|
||||
# We don't want to specify too many extremities as it causes the backfill
|
||||
# request URI to be too long.
|
||||
extremities = dict(sorted_extremeties_tuple[:5])
|
||||
|
||||
if current_depth > max_depth:
|
||||
logger.debug(
|
||||
"Not backfilling as we don't need to. %d < %d", max_depth, current_depth
|
||||
)
|
||||
return
|
||||
|
||||
# Now we need to decide which hosts to hit first.
|
||||
|
||||
# First we try hosts that are already in the room
|
||||
@@ -1777,7 +1817,9 @@ class FederationHandler(BaseHandler):
|
||||
"""Returns the state at the event. i.e. not including said event.
|
||||
"""
|
||||
|
||||
event = await self.store.get_event(event_id, check_room_id=room_id)
|
||||
event = await self.store.get_event(
|
||||
event_id, allow_none=False, check_room_id=room_id
|
||||
)
|
||||
|
||||
state_groups = await self.state_store.get_state_groups(room_id, [event_id])
|
||||
|
||||
@@ -1803,7 +1845,9 @@ class FederationHandler(BaseHandler):
|
||||
async def get_state_ids_for_pdu(self, room_id: str, event_id: str) -> List[str]:
|
||||
"""Returns the state at the event. i.e. not including said event.
|
||||
"""
|
||||
event = await self.store.get_event(event_id, check_room_id=room_id)
|
||||
event = await self.store.get_event(
|
||||
event_id, allow_none=False, check_room_id=room_id
|
||||
)
|
||||
|
||||
state_groups = await self.state_store.get_state_groups_ids(room_id, [event_id])
|
||||
|
||||
@@ -2134,10 +2178,10 @@ class FederationHandler(BaseHandler):
|
||||
)
|
||||
state_sets = list(state_sets.values())
|
||||
state_sets.append(state)
|
||||
current_states = await self.state_handler.resolve_events(
|
||||
current_state_ids = await self.state_handler.resolve_events(
|
||||
room_version, state_sets, event
|
||||
)
|
||||
current_state_ids = {k: e.event_id for k, e in current_states.items()}
|
||||
current_state_ids = {k: e.event_id for k, e in current_state_ids.items()}
|
||||
else:
|
||||
current_state_ids = await self.state_handler.get_current_state_ids(
|
||||
event.room_id, latest_event_ids=extrem_ids
|
||||
@@ -2149,13 +2193,11 @@ class FederationHandler(BaseHandler):
|
||||
|
||||
# Now check if event pass auth against said current state
|
||||
auth_types = auth_types_for_event(event)
|
||||
current_state_ids_list = [
|
||||
e for k, e in current_state_ids.items() if k in auth_types
|
||||
]
|
||||
current_state_ids = [e for k, e in current_state_ids.items() if k in auth_types]
|
||||
|
||||
auth_events_map = await self.store.get_events(current_state_ids_list)
|
||||
current_auth_events = await self.store.get_events(current_state_ids)
|
||||
current_auth_events = {
|
||||
(e.type, e.state_key): e for e in auth_events_map.values()
|
||||
(e.type, e.state_key): e for e in current_auth_events.values()
|
||||
}
|
||||
|
||||
try:
|
||||
@@ -2171,7 +2213,9 @@ class FederationHandler(BaseHandler):
|
||||
if not in_room:
|
||||
raise AuthError(403, "Host not in room.")
|
||||
|
||||
event = await self.store.get_event(event_id, check_room_id=room_id)
|
||||
event = await self.store.get_event(
|
||||
event_id, allow_none=False, check_room_id=room_id
|
||||
)
|
||||
|
||||
# Just go through and process each event in `remote_auth_chain`. We
|
||||
# don't want to fall into the trap of `missing` being wrong.
|
||||
|
||||
@@ -21,6 +21,8 @@ import logging
|
||||
import urllib.parse
|
||||
from typing import Awaitable, Callable, Dict, List, Optional, Tuple
|
||||
|
||||
from canonicaljson import json
|
||||
|
||||
from twisted.internet.error import TimeoutError
|
||||
|
||||
from synapse.api.errors import (
|
||||
@@ -32,7 +34,6 @@ from synapse.api.errors import (
|
||||
from synapse.config.emailconfig import ThreepidBehaviour
|
||||
from synapse.http.client import SimpleHttpClient
|
||||
from synapse.types import JsonDict, Requester
|
||||
from synapse.util import json_decoder
|
||||
from synapse.util.hash import sha256_and_url_safe_base64
|
||||
from synapse.util.stringutils import assert_valid_client_secret, random_string
|
||||
|
||||
@@ -176,7 +177,7 @@ class IdentityHandler(BaseHandler):
|
||||
except TimeoutError:
|
||||
raise SynapseError(500, "Timed out contacting identity server")
|
||||
except CodeMessageException as e:
|
||||
data = json_decoder.decode(e.msg) # XXX WAT?
|
||||
data = json.loads(e.msg) # XXX WAT?
|
||||
return data
|
||||
|
||||
logger.info("Got 404 when POSTing JSON %s, falling back to v1 URL", bind_url)
|
||||
|
||||
@@ -15,10 +15,9 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
import random
|
||||
from typing import TYPE_CHECKING, Dict, List, Optional, Tuple
|
||||
|
||||
from canonicaljson import encode_canonical_json
|
||||
from canonicaljson import encode_canonical_json, json
|
||||
|
||||
from twisted.internet.interfaces import IDelayedCall
|
||||
|
||||
@@ -35,7 +34,6 @@ from synapse.api.errors import (
|
||||
Codes,
|
||||
ConsentNotGivenError,
|
||||
NotFoundError,
|
||||
ShadowBanError,
|
||||
SynapseError,
|
||||
)
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersions
|
||||
@@ -57,7 +55,6 @@ from synapse.types import (
|
||||
UserID,
|
||||
create_requester,
|
||||
)
|
||||
from synapse.util import json_decoder
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.util.frozenutils import frozendict_json_encoder
|
||||
from synapse.util.metrics import measure_func
|
||||
@@ -647,48 +644,37 @@ class EventCreationHandler(object):
|
||||
event: EventBase,
|
||||
context: EventContext,
|
||||
ratelimit: bool = True,
|
||||
ignore_shadow_ban: bool = False,
|
||||
) -> int:
|
||||
"""
|
||||
Persists and notifies local clients and federation of an event.
|
||||
|
||||
Args:
|
||||
requester: The requester sending the event.
|
||||
event: The event to send.
|
||||
context: The context of the event.
|
||||
requester
|
||||
event the event to send.
|
||||
context: the context of the event.
|
||||
ratelimit: Whether to rate limit this send.
|
||||
ignore_shadow_ban: True if shadow-banned users should be allowed to
|
||||
send this event.
|
||||
|
||||
Return:
|
||||
The stream_id of the persisted event.
|
||||
|
||||
Raises:
|
||||
ShadowBanError if the requester has been shadow-banned.
|
||||
"""
|
||||
if event.type == EventTypes.Member:
|
||||
raise SynapseError(
|
||||
500, "Tried to send member event through non-member codepath"
|
||||
)
|
||||
|
||||
if not ignore_shadow_ban and requester.shadow_banned:
|
||||
# We randomly sleep a bit just to annoy the requester.
|
||||
await self.clock.sleep(random.randint(1, 10))
|
||||
raise ShadowBanError()
|
||||
|
||||
user = UserID.from_string(event.sender)
|
||||
|
||||
assert self.hs.is_mine(user), "User must be our own: %s" % (user,)
|
||||
|
||||
if event.is_state():
|
||||
prev_event = await self.deduplicate_state_event(event, context)
|
||||
if prev_event is not None:
|
||||
prev_state = await self.deduplicate_state_event(event, context)
|
||||
if prev_state is not None:
|
||||
logger.info(
|
||||
"Not bothering to persist state event %s duplicated by %s",
|
||||
event.event_id,
|
||||
prev_event.event_id,
|
||||
prev_state.event_id,
|
||||
)
|
||||
return await self.store.get_stream_id_for_event(prev_event.event_id)
|
||||
return prev_state
|
||||
|
||||
return await self.handle_new_client_event(
|
||||
requester=requester, event=event, context=context, ratelimit=ratelimit
|
||||
@@ -696,32 +682,27 @@ class EventCreationHandler(object):
|
||||
|
||||
async def deduplicate_state_event(
|
||||
self, event: EventBase, context: EventContext
|
||||
) -> Optional[EventBase]:
|
||||
) -> None:
|
||||
"""
|
||||
Checks whether event is in the latest resolved state in context.
|
||||
|
||||
Args:
|
||||
event: The event to check for duplication.
|
||||
context: The event context.
|
||||
|
||||
Returns:
|
||||
The previous verion of the event is returned, if it is found in the
|
||||
event context. Otherwise, None is returned.
|
||||
If so, returns the version of the event in context.
|
||||
Otherwise, returns None.
|
||||
"""
|
||||
prev_state_ids = await context.get_prev_state_ids()
|
||||
prev_event_id = prev_state_ids.get((event.type, event.state_key))
|
||||
if not prev_event_id:
|
||||
return None
|
||||
return
|
||||
prev_event = await self.store.get_event(prev_event_id, allow_none=True)
|
||||
if not prev_event:
|
||||
return None
|
||||
return
|
||||
|
||||
if prev_event and event.user_id == prev_event.user_id:
|
||||
prev_content = encode_canonical_json(prev_event.content)
|
||||
next_content = encode_canonical_json(event.content)
|
||||
if prev_content == next_content:
|
||||
return prev_event
|
||||
return None
|
||||
return
|
||||
|
||||
async def create_and_send_nonmember_event(
|
||||
self,
|
||||
@@ -729,28 +710,12 @@ class EventCreationHandler(object):
|
||||
event_dict: dict,
|
||||
ratelimit: bool = True,
|
||||
txn_id: Optional[str] = None,
|
||||
ignore_shadow_ban: bool = False,
|
||||
) -> Tuple[EventBase, int]:
|
||||
"""
|
||||
Creates an event, then sends it.
|
||||
|
||||
See self.create_event and self.send_nonmember_event.
|
||||
|
||||
Args:
|
||||
requester: The requester sending the event.
|
||||
event_dict: An entire event.
|
||||
ratelimit: Whether to rate limit this send.
|
||||
txn_id: The transaction ID.
|
||||
ignore_shadow_ban: True if shadow-banned users should be allowed to
|
||||
send this event.
|
||||
|
||||
Raises:
|
||||
ShadowBanError if the requester has been shadow-banned.
|
||||
"""
|
||||
if not ignore_shadow_ban and requester.shadow_banned:
|
||||
# We randomly sleep a bit just to annoy the requester.
|
||||
await self.clock.sleep(random.randint(1, 10))
|
||||
raise ShadowBanError()
|
||||
|
||||
# We limit the number of concurrent event sends in a room so that we
|
||||
# don't fork the DAG too much. If we don't limit then we can end up in
|
||||
@@ -769,11 +734,7 @@ class EventCreationHandler(object):
|
||||
raise SynapseError(403, spam_error, Codes.FORBIDDEN)
|
||||
|
||||
stream_id = await self.send_nonmember_event(
|
||||
requester,
|
||||
event,
|
||||
context,
|
||||
ratelimit=ratelimit,
|
||||
ignore_shadow_ban=ignore_shadow_ban,
|
||||
requester, event, context, ratelimit=ratelimit
|
||||
)
|
||||
return event, stream_id
|
||||
|
||||
@@ -898,7 +859,7 @@ class EventCreationHandler(object):
|
||||
# Ensure that we can round trip before trying to persist in db
|
||||
try:
|
||||
dump = frozendict_json_encoder.encode(event.content)
|
||||
json_decoder.decode(dump)
|
||||
json.loads(dump)
|
||||
except Exception:
|
||||
logger.exception("Failed to encode content: %r", event.content)
|
||||
raise
|
||||
@@ -930,7 +891,9 @@ class EventCreationHandler(object):
|
||||
except Exception:
|
||||
# Ensure that we actually remove the entries in the push actions
|
||||
# staging area, if we calculated them.
|
||||
await self.store.remove_push_actions_from_staging(event.event_id)
|
||||
run_in_background(
|
||||
self.store.remove_push_actions_from_staging, event.event_id
|
||||
)
|
||||
raise
|
||||
|
||||
async def _validate_canonical_alias(
|
||||
@@ -994,7 +957,7 @@ class EventCreationHandler(object):
|
||||
allow_none=True,
|
||||
)
|
||||
|
||||
is_admin_redaction = bool(
|
||||
is_admin_redaction = (
|
||||
original_event and event.sender != original_event.sender
|
||||
)
|
||||
|
||||
@@ -1114,8 +1077,8 @@ class EventCreationHandler(object):
|
||||
auth_events_ids = self.auth.compute_auth_events(
|
||||
event, prev_state_ids, for_verification=True
|
||||
)
|
||||
auth_events_map = await self.store.get_events(auth_events_ids)
|
||||
auth_events = {(e.type, e.state_key): e for e in auth_events_map.values()}
|
||||
auth_events = await self.store.get_events(auth_events_ids)
|
||||
auth_events = {(e.type, e.state_key): e for e in auth_events.values()}
|
||||
|
||||
room_version = await self.store.get_room_version_id(event.room_id)
|
||||
room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
|
||||
@@ -1213,14 +1176,8 @@ class EventCreationHandler(object):
|
||||
|
||||
event.internal_metadata.proactively_send = False
|
||||
|
||||
# Since this is a dummy-event it is OK if it is sent by a
|
||||
# shadow-banned user.
|
||||
await self.send_nonmember_event(
|
||||
requester,
|
||||
event,
|
||||
context,
|
||||
ratelimit=False,
|
||||
ignore_shadow_ban=True,
|
||||
requester, event, context, ratelimit=False
|
||||
)
|
||||
dummy_event_sent = True
|
||||
break
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import json
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Dict, Generic, List, Optional, Tuple, TypeVar
|
||||
from urllib.parse import urlencode
|
||||
@@ -37,8 +38,8 @@ from synapse.config import ConfigError
|
||||
from synapse.http.server import respond_with_html
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.context import make_deferred_yieldable
|
||||
from synapse.push.mailer import load_jinja2_templates
|
||||
from synapse.types import UserID, map_username_to_mxid_localpart
|
||||
from synapse.util import json_decoder
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
@@ -93,7 +94,6 @@ class OidcHandler:
|
||||
"""
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.hs = hs
|
||||
self._callback_url = hs.config.oidc_callback_url # type: str
|
||||
self._scopes = hs.config.oidc_scopes # type: List[str]
|
||||
self._client_auth = ClientAuth(
|
||||
@@ -123,7 +123,9 @@ class OidcHandler:
|
||||
self._hostname = hs.hostname # type: str
|
||||
self._server_name = hs.config.server_name # type: str
|
||||
self._macaroon_secret_key = hs.config.macaroon_secret_key
|
||||
self._error_template = hs.config.sso_error_template
|
||||
self._error_template = load_jinja2_templates(
|
||||
hs.config.sso_template_dir, ["sso_error.html"]
|
||||
)[0]
|
||||
|
||||
# identifier for the external_ids table
|
||||
self._auth_provider_id = "oidc"
|
||||
@@ -368,7 +370,7 @@ class OidcHandler:
|
||||
# and check for an error field. If not, we respond with a generic
|
||||
# error message.
|
||||
try:
|
||||
resp = json_decoder.decode(resp_body.decode("utf-8"))
|
||||
resp = json.loads(resp_body.decode("utf-8"))
|
||||
error = resp["error"]
|
||||
description = resp.get("error_description", error)
|
||||
except (ValueError, KeyError):
|
||||
@@ -385,7 +387,7 @@ class OidcHandler:
|
||||
|
||||
# Since it is a not a 5xx code, body should be a valid JSON. It will
|
||||
# raise if not.
|
||||
resp = json_decoder.decode(resp_body.decode("utf-8"))
|
||||
resp = json.loads(resp_body.decode("utf-8"))
|
||||
|
||||
if "error" in resp:
|
||||
error = resp["error"]
|
||||
@@ -690,17 +692,9 @@ class OidcHandler:
|
||||
self._render_error(request, "invalid_token", str(e))
|
||||
return
|
||||
|
||||
# Pull out the user-agent and IP from the request.
|
||||
user_agent = request.requestHeaders.getRawHeaders(b"User-Agent", default=[b""])[
|
||||
0
|
||||
].decode("ascii", "surrogateescape")
|
||||
ip_address = self.hs.get_ip_from_request(request)
|
||||
|
||||
# Call the mapper to register/login the user
|
||||
try:
|
||||
user_id = await self._map_userinfo_to_user(
|
||||
userinfo, token, user_agent, ip_address
|
||||
)
|
||||
user_id = await self._map_userinfo_to_user(userinfo, token)
|
||||
except MappingException as e:
|
||||
logger.exception("Could not map user")
|
||||
self._render_error(request, "mapping_error", str(e))
|
||||
@@ -837,9 +831,7 @@ class OidcHandler:
|
||||
now = self._clock.time_msec()
|
||||
return now < expiry
|
||||
|
||||
async def _map_userinfo_to_user(
|
||||
self, userinfo: UserInfo, token: Token, user_agent: str, ip_address: str
|
||||
) -> str:
|
||||
async def _map_userinfo_to_user(self, userinfo: UserInfo, token: Token) -> str:
|
||||
"""Maps a UserInfo object to a mxid.
|
||||
|
||||
UserInfo should have a claim that uniquely identifies users. This claim
|
||||
@@ -854,8 +846,6 @@ class OidcHandler:
|
||||
Args:
|
||||
userinfo: an object representing the user
|
||||
token: a dict with the tokens obtained from the provider
|
||||
user_agent: The user agent of the client making the request.
|
||||
ip_address: The IP address of the client making the request.
|
||||
|
||||
Raises:
|
||||
MappingException: if there was an error while mapping some properties
|
||||
@@ -912,9 +902,7 @@ class OidcHandler:
|
||||
# It's the first time this user is logging in and the mapped mxid was
|
||||
# not taken, register the user
|
||||
registered_user_id = await self._registration_handler.register_user(
|
||||
localpart=localpart,
|
||||
default_display_name=attributes["display_name"],
|
||||
user_agent_ips=(user_agent, ip_address),
|
||||
localpart=localpart, default_display_name=attributes["display_name"],
|
||||
)
|
||||
|
||||
await self._datastore.record_user_external_id(
|
||||
|
||||
@@ -82,9 +82,6 @@ class PaginationHandler(object):
|
||||
|
||||
self._retention_default_max_lifetime = hs.config.retention_default_max_lifetime
|
||||
|
||||
self._retention_allowed_lifetime_min = hs.config.retention_allowed_lifetime_min
|
||||
self._retention_allowed_lifetime_max = hs.config.retention_allowed_lifetime_max
|
||||
|
||||
if hs.config.retention_enabled:
|
||||
# Run the purge jobs described in the configuration file.
|
||||
for job in hs.config.retention_purge_jobs:
|
||||
@@ -114,7 +111,7 @@ class PaginationHandler(object):
|
||||
the range to handle (inclusive). If None, it means that the range has no
|
||||
upper limit.
|
||||
"""
|
||||
# We want the storage layer to include rooms with no retention policy in its
|
||||
# We want the storage layer to to include rooms with no retention policy in its
|
||||
# return value only if a default retention policy is defined in the server's
|
||||
# configuration and that policy's 'max_lifetime' is either lower (or equal) than
|
||||
# max_ms or higher than min_ms (or both).
|
||||
@@ -155,32 +152,13 @@ class PaginationHandler(object):
|
||||
)
|
||||
continue
|
||||
|
||||
# If max_lifetime is None, it means that the room has no retention policy.
|
||||
# Given we only retrieve such rooms when there's a default retention policy
|
||||
# defined in the server's configuration, we can safely assume that's the
|
||||
# case and use it for this room.
|
||||
max_lifetime = (
|
||||
retention_policy["max_lifetime"] or self._retention_default_max_lifetime
|
||||
)
|
||||
max_lifetime = retention_policy["max_lifetime"]
|
||||
|
||||
# Cap the effective max_lifetime to be within the range allowed in the
|
||||
# config.
|
||||
# We do this in two steps:
|
||||
# 1. Make sure it's higher or equal to the minimum allowed value, and if
|
||||
# it's not replace it with that value. This is because the server
|
||||
# operator can be required to not delete information before a given
|
||||
# time, e.g. to comply with freedom of information laws.
|
||||
# 2. Make sure the resulting value is lower or equal to the maximum allowed
|
||||
# value, and if it's not replace it with that value. This is because the
|
||||
# server operator can be required to delete any data after a specific
|
||||
# amount of time.
|
||||
if self._retention_allowed_lifetime_min is not None:
|
||||
max_lifetime = max(self._retention_allowed_lifetime_min, max_lifetime)
|
||||
|
||||
if self._retention_allowed_lifetime_max is not None:
|
||||
max_lifetime = min(max_lifetime, self._retention_allowed_lifetime_max)
|
||||
|
||||
logger.debug("[purge] max_lifetime for room %s: %s", room_id, max_lifetime)
|
||||
if max_lifetime is None:
|
||||
# If max_lifetime is None, it means that include_null equals True,
|
||||
# therefore we can safely assume that there is a default policy defined
|
||||
# in the server's configuration.
|
||||
max_lifetime = self._retention_default_max_lifetime
|
||||
|
||||
# Figure out what token we should start purging at.
|
||||
ts = self.clock.time_msec() - max_lifetime
|
||||
@@ -357,7 +335,7 @@ class PaginationHandler(object):
|
||||
if room_token.topological:
|
||||
max_topo = room_token.topological
|
||||
else:
|
||||
max_topo = await self.store.get_max_topological_token(
|
||||
max_topo = await self.store.get_current_topological_token(
|
||||
room_id, room_token.stream
|
||||
)
|
||||
|
||||
@@ -373,7 +351,7 @@ class PaginationHandler(object):
|
||||
source_config.from_key = str(leave_token)
|
||||
|
||||
await self.hs.get_handlers().federation_handler.maybe_backfill(
|
||||
room_id, max_topo
|
||||
room_id, max_topo, limit=pagin_config.limit,
|
||||
)
|
||||
|
||||
events, next_key = await self.store.paginate_room_events(
|
||||
|
||||
@@ -33,14 +33,14 @@ from typing_extensions import ContextManager
|
||||
import synapse.metrics
|
||||
from synapse.api.constants import EventTypes, Membership, PresenceState
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.api.presence import UserPresenceState
|
||||
from synapse.logging.context import run_in_background
|
||||
from synapse.logging.utils import log_function
|
||||
from synapse.metrics import LaterGauge
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.state import StateHandler
|
||||
from synapse.storage.databases.main import DataStore
|
||||
from synapse.types import Collection, JsonDict, UserID, get_domain_from_id
|
||||
from synapse.storage.presence import UserPresenceState
|
||||
from synapse.types import JsonDict, UserID, get_domain_from_id
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.util.caches.descriptors import cached
|
||||
from synapse.util.metrics import Measure
|
||||
@@ -1318,7 +1318,7 @@ async def get_interested_parties(
|
||||
|
||||
async def get_interested_remotes(
|
||||
store: DataStore, states: List[UserPresenceState], state_handler: StateHandler
|
||||
) -> List[Tuple[Collection[str], List[UserPresenceState]]]:
|
||||
) -> List[Tuple[List[str], List[UserPresenceState]]]:
|
||||
"""Given a list of presence states figure out which remote servers
|
||||
should be sent which.
|
||||
|
||||
@@ -1334,7 +1334,7 @@ async def get_interested_remotes(
|
||||
each tuple the list of UserPresenceState should be sent to each
|
||||
destination
|
||||
"""
|
||||
hosts_and_states = [] # type: List[Tuple[Collection[str], List[UserPresenceState]]]
|
||||
hosts_and_states = []
|
||||
|
||||
# First we look up the rooms each user is in (as well as any explicit
|
||||
# subscriptions), then for each distinct room we look up the remote
|
||||
|
||||
@@ -26,7 +26,6 @@ from synapse.replication.http.register import (
|
||||
ReplicationPostRegisterActionsServlet,
|
||||
ReplicationRegisterServlet,
|
||||
)
|
||||
from synapse.spam_checker_api import RegistrationBehaviour
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import RoomAlias, UserID, create_requester
|
||||
|
||||
@@ -53,8 +52,6 @@ class RegistrationHandler(BaseHandler):
|
||||
self.macaroon_gen = hs.get_macaroon_generator()
|
||||
self._server_notices_mxid = hs.config.server_notices_mxid
|
||||
|
||||
self.spam_checker = hs.get_spam_checker()
|
||||
|
||||
if hs.config.worker_app:
|
||||
self._register_client = ReplicationRegisterServlet.make_client(hs)
|
||||
self._register_device_client = RegisterDeviceReplicationServlet.make_client(
|
||||
@@ -127,9 +124,7 @@ class RegistrationHandler(BaseHandler):
|
||||
try:
|
||||
int(localpart)
|
||||
raise SynapseError(
|
||||
400,
|
||||
"Numeric user IDs are reserved for guest users.",
|
||||
errcode=Codes.INVALID_USERNAME,
|
||||
400, "Numeric user IDs are reserved for guest users."
|
||||
)
|
||||
except ValueError:
|
||||
pass
|
||||
@@ -147,7 +142,6 @@ class RegistrationHandler(BaseHandler):
|
||||
address=None,
|
||||
bind_emails=[],
|
||||
by_admin=False,
|
||||
user_agent_ips=None,
|
||||
):
|
||||
"""Registers a new client on the server.
|
||||
|
||||
@@ -165,8 +159,6 @@ class RegistrationHandler(BaseHandler):
|
||||
bind_emails (List[str]): list of emails to bind to this account.
|
||||
by_admin (bool): True if this registration is being made via the
|
||||
admin api, otherwise False.
|
||||
user_agent_ips (List[(str, str)]): Tuples of IP addresses and user-agents used
|
||||
during the registration process.
|
||||
Returns:
|
||||
str: user_id
|
||||
Raises:
|
||||
@@ -174,24 +166,6 @@ class RegistrationHandler(BaseHandler):
|
||||
"""
|
||||
self.check_registration_ratelimit(address)
|
||||
|
||||
result = self.spam_checker.check_registration_for_spam(
|
||||
threepid, localpart, user_agent_ips or [],
|
||||
)
|
||||
|
||||
if result == RegistrationBehaviour.DENY:
|
||||
logger.info(
|
||||
"Blocked registration of %r", localpart,
|
||||
)
|
||||
# We return a 429 to make it not obvious that they've been
|
||||
# denied.
|
||||
raise SynapseError(429, "Rate limited")
|
||||
|
||||
shadow_banned = result == RegistrationBehaviour.SHADOW_BAN
|
||||
if shadow_banned:
|
||||
logger.info(
|
||||
"Shadow banning registration of %r", localpart,
|
||||
)
|
||||
|
||||
# do not check_auth_blocking if the call is coming through the Admin API
|
||||
if not by_admin:
|
||||
await self.auth.check_auth_blocking(threepid=threepid)
|
||||
@@ -220,7 +194,6 @@ class RegistrationHandler(BaseHandler):
|
||||
admin=admin,
|
||||
user_type=user_type,
|
||||
address=address,
|
||||
shadow_banned=shadow_banned,
|
||||
)
|
||||
|
||||
if self.hs.config.user_directory_search_all_users:
|
||||
@@ -251,7 +224,6 @@ class RegistrationHandler(BaseHandler):
|
||||
make_guest=make_guest,
|
||||
create_profile_with_displayname=default_display_name,
|
||||
address=address,
|
||||
shadow_banned=shadow_banned,
|
||||
)
|
||||
|
||||
# Successfully registered
|
||||
@@ -557,7 +529,6 @@ class RegistrationHandler(BaseHandler):
|
||||
admin=False,
|
||||
user_type=None,
|
||||
address=None,
|
||||
shadow_banned=False,
|
||||
):
|
||||
"""Register user in the datastore.
|
||||
|
||||
@@ -575,7 +546,6 @@ class RegistrationHandler(BaseHandler):
|
||||
user_type (str|None): type of user. One of the values from
|
||||
api.constants.UserTypes, or None for a normal user.
|
||||
address (str|None): the IP address used to perform the registration.
|
||||
shadow_banned (bool): Whether to shadow-ban the user
|
||||
|
||||
Returns:
|
||||
Awaitable
|
||||
@@ -591,7 +561,6 @@ class RegistrationHandler(BaseHandler):
|
||||
admin=admin,
|
||||
user_type=user_type,
|
||||
address=address,
|
||||
shadow_banned=shadow_banned,
|
||||
)
|
||||
else:
|
||||
return self.store.register_user(
|
||||
@@ -603,7 +572,6 @@ class RegistrationHandler(BaseHandler):
|
||||
create_profile_with_displayname=create_profile_with_displayname,
|
||||
admin=admin,
|
||||
user_type=user_type,
|
||||
shadow_banned=shadow_banned,
|
||||
)
|
||||
|
||||
async def register_device(
|
||||
|
||||
@@ -20,10 +20,9 @@
|
||||
import itertools
|
||||
import logging
|
||||
import math
|
||||
import random
|
||||
import string
|
||||
from collections import OrderedDict
|
||||
from typing import TYPE_CHECKING, Any, Awaitable, Dict, List, Optional, Tuple
|
||||
from typing import Awaitable, Optional, Tuple
|
||||
|
||||
from synapse.api.constants import (
|
||||
EventTypes,
|
||||
@@ -33,14 +32,11 @@ from synapse.api.constants import (
|
||||
RoomEncryptionAlgorithms,
|
||||
)
|
||||
from synapse.api.errors import AuthError, Codes, NotFoundError, StoreError, SynapseError
|
||||
from synapse.api.filtering import Filter
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
|
||||
from synapse.events import EventBase
|
||||
from synapse.events.utils import copy_power_levels_contents
|
||||
from synapse.http.endpoint import parse_and_validate_server_name
|
||||
from synapse.storage.state import StateFilter
|
||||
from synapse.types import (
|
||||
JsonDict,
|
||||
Requester,
|
||||
RoomAlias,
|
||||
RoomID,
|
||||
@@ -57,9 +53,6 @@ from synapse.visibility import filter_events_for_client
|
||||
|
||||
from ._base import BaseHandler
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
id_server_scheme = "https://"
|
||||
@@ -68,7 +61,7 @@ FIVE_MINUTES_IN_MS = 5 * 60 * 1000
|
||||
|
||||
|
||||
class RoomCreationHandler(BaseHandler):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
def __init__(self, hs):
|
||||
super(RoomCreationHandler, self).__init__(hs)
|
||||
|
||||
self.spam_checker = hs.get_spam_checker()
|
||||
@@ -99,7 +92,7 @@ class RoomCreationHandler(BaseHandler):
|
||||
"guest_can_join": False,
|
||||
"power_level_content_override": {},
|
||||
},
|
||||
} # type: Dict[str, Dict[str, Any]]
|
||||
}
|
||||
|
||||
# Modify presets to selectively enable encryption by default per homeserver config
|
||||
for preset_name, preset_config in self._presets_dict.items():
|
||||
@@ -136,9 +129,6 @@ class RoomCreationHandler(BaseHandler):
|
||||
|
||||
Returns:
|
||||
the new room id
|
||||
|
||||
Raises:
|
||||
ShadowBanError if the requester is shadow-banned.
|
||||
"""
|
||||
await self.ratelimit(requester)
|
||||
|
||||
@@ -174,15 +164,6 @@ class RoomCreationHandler(BaseHandler):
|
||||
async def _upgrade_room(
|
||||
self, requester: Requester, old_room_id: str, new_version: RoomVersion
|
||||
):
|
||||
"""
|
||||
Args:
|
||||
requester: the user requesting the upgrade
|
||||
old_room_id: the id of the room to be replaced
|
||||
new_versions: the version to upgrade the room to
|
||||
|
||||
Raises:
|
||||
ShadowBanError if the requester is shadow-banned.
|
||||
"""
|
||||
user_id = requester.user.to_string()
|
||||
|
||||
# start by allocating a new room id
|
||||
@@ -234,9 +215,6 @@ class RoomCreationHandler(BaseHandler):
|
||||
|
||||
old_room_state = await tombstone_context.get_current_state_ids()
|
||||
|
||||
# We know the tombstone event isn't an outlier so it has current state.
|
||||
assert old_room_state is not None
|
||||
|
||||
# update any aliases
|
||||
await self._move_aliases_to_new_room(
|
||||
requester, old_room_id, new_room_id, old_room_state
|
||||
@@ -269,9 +247,6 @@ class RoomCreationHandler(BaseHandler):
|
||||
old_room_id: the id of the room to be replaced
|
||||
new_room_id: the id of the replacement room
|
||||
old_room_state: the state map for the old room
|
||||
|
||||
Raises:
|
||||
ShadowBanError if the requester is shadow-banned.
|
||||
"""
|
||||
old_room_pl_event_id = old_room_state.get((EventTypes.PowerLevels, ""))
|
||||
|
||||
@@ -553,21 +528,17 @@ class RoomCreationHandler(BaseHandler):
|
||||
logger.error("Unable to send updated alias events in new room: %s", e)
|
||||
|
||||
async def create_room(
|
||||
self,
|
||||
requester: Requester,
|
||||
config: JsonDict,
|
||||
ratelimit: bool = True,
|
||||
creator_join_profile: Optional[JsonDict] = None,
|
||||
self, requester, config, ratelimit=True, creator_join_profile=None
|
||||
) -> Tuple[dict, int]:
|
||||
""" Creates a new room.
|
||||
|
||||
Args:
|
||||
requester:
|
||||
requester (synapse.types.Requester):
|
||||
The user who requested the room creation.
|
||||
config : A dict of configuration options.
|
||||
ratelimit: set to False to disable the rate limiter
|
||||
config (dict) : A dict of configuration options.
|
||||
ratelimit (bool): set to False to disable the rate limiter
|
||||
|
||||
creator_join_profile:
|
||||
creator_join_profile (dict|None):
|
||||
Set to override the displayname and avatar for the creating
|
||||
user in this room. If unset, displayname and avatar will be
|
||||
derived from the user's profile. If set, should contain the
|
||||
@@ -630,7 +601,6 @@ class RoomCreationHandler(BaseHandler):
|
||||
Codes.UNSUPPORTED_ROOM_VERSION,
|
||||
)
|
||||
|
||||
room_alias = None
|
||||
if "room_alias_name" in config:
|
||||
for wchar in string.whitespace:
|
||||
if wchar in config["room_alias_name"]:
|
||||
@@ -641,8 +611,9 @@ class RoomCreationHandler(BaseHandler):
|
||||
|
||||
if mapping:
|
||||
raise SynapseError(400, "Room alias already taken", Codes.ROOM_IN_USE)
|
||||
else:
|
||||
room_alias = None
|
||||
|
||||
invite_3pid_list = config.get("invite_3pid", [])
|
||||
invite_list = config.get("invite", [])
|
||||
for i in invite_list:
|
||||
try:
|
||||
@@ -651,14 +622,6 @@ class RoomCreationHandler(BaseHandler):
|
||||
except Exception:
|
||||
raise SynapseError(400, "Invalid user_id: %s" % (i,))
|
||||
|
||||
if (invite_list or invite_3pid_list) and requester.shadow_banned:
|
||||
# We randomly sleep a bit just to annoy the requester.
|
||||
await self.clock.sleep(random.randint(1, 10))
|
||||
|
||||
# Allow the request to go through, but remove any associated invites.
|
||||
invite_3pid_list = []
|
||||
invite_list = []
|
||||
|
||||
await self.event_creation_handler.assert_accepted_privacy_policy(requester)
|
||||
|
||||
power_level_content_override = config.get("power_level_content_override")
|
||||
@@ -673,6 +636,8 @@ class RoomCreationHandler(BaseHandler):
|
||||
% (user_id,),
|
||||
)
|
||||
|
||||
invite_3pid_list = config.get("invite_3pid", [])
|
||||
|
||||
visibility = config.get("visibility", None)
|
||||
is_public = visibility == "public"
|
||||
|
||||
@@ -767,8 +732,6 @@ class RoomCreationHandler(BaseHandler):
|
||||
if is_direct:
|
||||
content["is_direct"] = is_direct
|
||||
|
||||
# Note that update_membership with an action of "invite" can raise a
|
||||
# ShadowBanError, but this was handled above by emptying invite_list.
|
||||
_, last_stream_id = await self.room_member_handler.update_membership(
|
||||
requester,
|
||||
UserID.from_string(invitee),
|
||||
@@ -783,8 +746,6 @@ class RoomCreationHandler(BaseHandler):
|
||||
id_access_token = invite_3pid.get("id_access_token") # optional
|
||||
address = invite_3pid["address"]
|
||||
medium = invite_3pid["medium"]
|
||||
# Note that do_3pid_invite can raise a ShadowBanError, but this was
|
||||
# handled above by emptying invite_3pid_list.
|
||||
last_stream_id = await self.hs.get_room_member_handler().do_3pid_invite(
|
||||
room_id,
|
||||
requester.user,
|
||||
@@ -810,30 +771,23 @@ class RoomCreationHandler(BaseHandler):
|
||||
|
||||
async def _send_events_for_new_room(
|
||||
self,
|
||||
creator: Requester,
|
||||
room_id: str,
|
||||
preset_config: str,
|
||||
invite_list: List[str],
|
||||
initial_state: StateMap,
|
||||
creation_content: JsonDict,
|
||||
room_alias: Optional[RoomAlias] = None,
|
||||
power_level_content_override: Optional[JsonDict] = None,
|
||||
creator_join_profile: Optional[JsonDict] = None,
|
||||
creator, # A Requester object.
|
||||
room_id,
|
||||
preset_config,
|
||||
invite_list,
|
||||
initial_state,
|
||||
creation_content,
|
||||
room_alias=None,
|
||||
power_level_content_override=None, # Doesn't apply when initial state has power level state event content
|
||||
creator_join_profile=None,
|
||||
) -> int:
|
||||
"""Sends the initial events into a new room.
|
||||
|
||||
`power_level_content_override` doesn't apply when initial state has
|
||||
power level state event content.
|
||||
|
||||
Returns:
|
||||
The stream_id of the last event persisted.
|
||||
"""
|
||||
|
||||
creator_id = creator.user.to_string()
|
||||
|
||||
event_keys = {"room_id": room_id, "sender": creator_id, "state_key": ""}
|
||||
|
||||
def create(etype: str, content: JsonDict, **kwargs) -> JsonDict:
|
||||
def create(etype, content, **kwargs):
|
||||
e = {"type": etype, "content": content}
|
||||
|
||||
e.update(event_keys)
|
||||
@@ -841,21 +795,23 @@ class RoomCreationHandler(BaseHandler):
|
||||
|
||||
return e
|
||||
|
||||
async def send(etype: str, content: JsonDict, **kwargs) -> int:
|
||||
async def send(etype, content, **kwargs) -> int:
|
||||
event = create(etype, content, **kwargs)
|
||||
logger.debug("Sending %s in new room", etype)
|
||||
# Allow these events to be sent even if the user is shadow-banned to
|
||||
# allow the room creation to complete.
|
||||
(
|
||||
_,
|
||||
last_stream_id,
|
||||
) = await self.event_creation_handler.create_and_send_nonmember_event(
|
||||
creator, event, ratelimit=False, ignore_shadow_ban=True,
|
||||
creator, event, ratelimit=False
|
||||
)
|
||||
return last_stream_id
|
||||
|
||||
config = self._presets_dict[preset_config]
|
||||
|
||||
creator_id = creator.user.to_string()
|
||||
|
||||
event_keys = {"room_id": room_id, "sender": creator_id, "state_key": ""}
|
||||
|
||||
creation_content.update({"creator": creator_id})
|
||||
await send(etype=EventTypes.Create, content=creation_content)
|
||||
|
||||
@@ -896,7 +852,7 @@ class RoomCreationHandler(BaseHandler):
|
||||
"kick": 50,
|
||||
"redact": 50,
|
||||
"invite": 50,
|
||||
} # type: JsonDict
|
||||
}
|
||||
|
||||
if config["original_invitees_have_ops"]:
|
||||
for invitee in invite_list:
|
||||
@@ -950,7 +906,7 @@ class RoomCreationHandler(BaseHandler):
|
||||
return last_sent_stream_id
|
||||
|
||||
async def _generate_room_id(
|
||||
self, creator_id: str, is_public: bool, room_version: RoomVersion,
|
||||
self, creator_id: str, is_public: str, room_version: RoomVersion,
|
||||
):
|
||||
# autogen room IDs and try to create it. We may clash, so just
|
||||
# try a few times till one goes through, giving up eventually.
|
||||
@@ -974,30 +930,23 @@ class RoomCreationHandler(BaseHandler):
|
||||
|
||||
|
||||
class RoomContextHandler(object):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
def __init__(self, hs):
|
||||
self.hs = hs
|
||||
self.store = hs.get_datastore()
|
||||
self.storage = hs.get_storage()
|
||||
self.state_store = self.storage.state
|
||||
|
||||
async def get_event_context(
|
||||
self,
|
||||
user: UserID,
|
||||
room_id: str,
|
||||
event_id: str,
|
||||
limit: int,
|
||||
event_filter: Optional[Filter],
|
||||
) -> Optional[JsonDict]:
|
||||
async def get_event_context(self, user, room_id, event_id, limit, event_filter):
|
||||
"""Retrieves events, pagination tokens and state around a given event
|
||||
in a room.
|
||||
|
||||
Args:
|
||||
user
|
||||
room_id
|
||||
event_id
|
||||
limit: The maximum number of events to return in total
|
||||
user (UserID)
|
||||
room_id (str)
|
||||
event_id (str)
|
||||
limit (int): The maximum number of events to return in total
|
||||
(excluding state).
|
||||
event_filter: the filter to apply to the events returned
|
||||
event_filter (Filter|None): the filter to apply to the events returned
|
||||
(excluding the target event_id)
|
||||
|
||||
Returns:
|
||||
@@ -1084,18 +1033,12 @@ class RoomContextHandler(object):
|
||||
|
||||
|
||||
class RoomEventSource(object):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
def __init__(self, hs):
|
||||
self.store = hs.get_datastore()
|
||||
|
||||
async def get_new_events(
|
||||
self,
|
||||
user: UserID,
|
||||
from_key: str,
|
||||
limit: int,
|
||||
room_ids: List[str],
|
||||
is_guest: bool,
|
||||
explicit_room_id: Optional[str] = None,
|
||||
) -> Tuple[List[EventBase], str]:
|
||||
self, user, from_key, limit, room_ids, is_guest, explicit_room_id=None
|
||||
):
|
||||
# We just ignore the key for now.
|
||||
|
||||
to_key = self.get_current_key()
|
||||
@@ -1153,7 +1096,7 @@ class RoomShutdownHandler(object):
|
||||
)
|
||||
DEFAULT_ROOM_NAME = "Content Violation Notification"
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
def __init__(self, hs):
|
||||
self.hs = hs
|
||||
self.room_member_handler = hs.get_room_member_handler()
|
||||
self._room_creation_handler = hs.get_room_creation_handler()
|
||||
|
||||
@@ -15,21 +15,14 @@
|
||||
|
||||
import abc
|
||||
import logging
|
||||
import random
|
||||
from http import HTTPStatus
|
||||
from typing import TYPE_CHECKING, Iterable, List, Optional, Tuple, Union
|
||||
from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Tuple, Union
|
||||
|
||||
from unpaddedbase64 import encode_base64
|
||||
|
||||
from synapse import types
|
||||
from synapse.api.constants import MAX_DEPTH, EventTypes, Membership
|
||||
from synapse.api.errors import (
|
||||
AuthError,
|
||||
Codes,
|
||||
LimitExceededError,
|
||||
ShadowBanError,
|
||||
SynapseError,
|
||||
)
|
||||
from synapse.api.errors import AuthError, Codes, LimitExceededError, SynapseError
|
||||
from synapse.api.ratelimiting import Ratelimiter
|
||||
from synapse.api.room_versions import EventFormatVersions
|
||||
from synapse.crypto.event_signing import compute_event_reference_hash
|
||||
@@ -38,15 +31,7 @@ from synapse.events.builder import create_local_event_from_event_dict
|
||||
from synapse.events.snapshot import EventContext
|
||||
from synapse.events.validator import EventValidator
|
||||
from synapse.storage.roommember import RoomsForUser
|
||||
from synapse.types import (
|
||||
Collection,
|
||||
JsonDict,
|
||||
Requester,
|
||||
RoomAlias,
|
||||
RoomID,
|
||||
StateMap,
|
||||
UserID,
|
||||
)
|
||||
from synapse.types import Collection, JsonDict, Requester, RoomAlias, RoomID, UserID
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.util.distributor import user_joined_room, user_left_room
|
||||
|
||||
@@ -316,31 +301,6 @@ class RoomMemberHandler(object):
|
||||
content: Optional[dict] = None,
|
||||
require_consent: bool = True,
|
||||
) -> Tuple[str, int]:
|
||||
"""Update a user's membership in a room.
|
||||
|
||||
Params:
|
||||
requester: The user who is performing the update.
|
||||
target: The user whose membership is being updated.
|
||||
room_id: The room ID whose membership is being updated.
|
||||
action: The membership change, see synapse.api.constants.Membership.
|
||||
txn_id: The transaction ID, if given.
|
||||
remote_room_hosts: Remote servers to send the update to.
|
||||
third_party_signed: Information from a 3PID invite.
|
||||
ratelimit: Whether to rate limit the request.
|
||||
content: The content of the created event.
|
||||
require_consent: Whether consent is required.
|
||||
|
||||
Returns:
|
||||
A tuple of the new event ID and stream ID.
|
||||
|
||||
Raises:
|
||||
ShadowBanError if a shadow-banned requester attempts to send an invite.
|
||||
"""
|
||||
if action == Membership.INVITE and requester.shadow_banned:
|
||||
# We randomly sleep a bit just to annoy the requester.
|
||||
await self.clock.sleep(random.randint(1, 10))
|
||||
raise ShadowBanError()
|
||||
|
||||
key = (room_id,)
|
||||
|
||||
with (await self.member_linearizer.queue(key)):
|
||||
@@ -750,7 +710,9 @@ class RoomMemberHandler(object):
|
||||
if prev_member_event.membership == Membership.JOIN:
|
||||
await self._user_left_room(target_user, room_id)
|
||||
|
||||
async def _can_guest_join(self, current_state_ids: StateMap[str]) -> bool:
|
||||
async def _can_guest_join(
|
||||
self, current_state_ids: Dict[Tuple[str, str], str]
|
||||
) -> bool:
|
||||
"""
|
||||
Returns whether a guest can join a room based on its current state.
|
||||
"""
|
||||
@@ -760,7 +722,7 @@ class RoomMemberHandler(object):
|
||||
|
||||
guest_access = await self.store.get_event(guest_access_id)
|
||||
|
||||
return bool(
|
||||
return (
|
||||
guest_access
|
||||
and guest_access.content
|
||||
and "guest_access" in guest_access.content
|
||||
@@ -817,25 +779,6 @@ class RoomMemberHandler(object):
|
||||
txn_id: Optional[str],
|
||||
id_access_token: Optional[str] = None,
|
||||
) -> int:
|
||||
"""Invite a 3PID to a room.
|
||||
|
||||
Args:
|
||||
room_id: The room to invite the 3PID to.
|
||||
inviter: The user sending the invite.
|
||||
medium: The 3PID's medium.
|
||||
address: The 3PID's address.
|
||||
id_server: The identity server to use.
|
||||
requester: The user making the request.
|
||||
txn_id: The transaction ID this is part of, or None if this is not
|
||||
part of a transaction.
|
||||
id_access_token: The optional identity server access token.
|
||||
|
||||
Returns:
|
||||
The new stream ID.
|
||||
|
||||
Raises:
|
||||
ShadowBanError if the requester has been shadow-banned.
|
||||
"""
|
||||
if self.config.block_non_admin_invites:
|
||||
is_requester_admin = await self.auth.is_server_admin(requester.user)
|
||||
if not is_requester_admin:
|
||||
@@ -843,11 +786,6 @@ class RoomMemberHandler(object):
|
||||
403, "Invites have been disabled on this server", Codes.FORBIDDEN
|
||||
)
|
||||
|
||||
if requester.shadow_banned:
|
||||
# We randomly sleep a bit just to annoy the requester.
|
||||
await self.clock.sleep(random.randint(1, 10))
|
||||
raise ShadowBanError()
|
||||
|
||||
# We need to rate limit *before* we send out any 3PID invites, so we
|
||||
# can't just rely on the standard ratelimiting of events.
|
||||
await self.base_handler.ratelimit(requester)
|
||||
@@ -872,8 +810,6 @@ class RoomMemberHandler(object):
|
||||
)
|
||||
|
||||
if invitee:
|
||||
# Note that update_membership with an action of "invite" can raise
|
||||
# a ShadowBanError, but this was done above already.
|
||||
_, stream_id = await self.update_membership(
|
||||
requester, UserID.from_string(invitee), room_id, "invite", txn_id=txn_id
|
||||
)
|
||||
@@ -979,7 +915,9 @@ class RoomMemberHandler(object):
|
||||
)
|
||||
return stream_id
|
||||
|
||||
async def _is_host_in_room(self, current_state_ids: StateMap[str]) -> bool:
|
||||
async def _is_host_in_room(
|
||||
self, current_state_ids: Dict[Tuple[str, str], str]
|
||||
) -> bool:
|
||||
# Have we just created the room, and is this about to be the very
|
||||
# first member event?
|
||||
create_event_id = current_state_ids.get(("m.room.create", ""))
|
||||
@@ -1110,7 +1048,7 @@ class RoomMemberMasterHandler(RoomMemberHandler):
|
||||
return event_id, stream_id
|
||||
|
||||
# The room is too large. Leave.
|
||||
requester = types.create_requester(user, None, False, False, None)
|
||||
requester = types.create_requester(user, None, False, None)
|
||||
await self.update_membership(
|
||||
requester=requester, target=user, room_id=room_id, action="leave"
|
||||
)
|
||||
|
||||
@@ -54,7 +54,6 @@ class Saml2SessionData:
|
||||
|
||||
class SamlHandler:
|
||||
def __init__(self, hs: "synapse.server.HomeServer"):
|
||||
self.hs = hs
|
||||
self._saml_client = Saml2Client(hs.config.saml2_sp_config)
|
||||
self._auth = hs.get_auth()
|
||||
self._auth_handler = hs.get_auth_handler()
|
||||
@@ -134,14 +133,8 @@ class SamlHandler:
|
||||
# the dict.
|
||||
self.expire_sessions()
|
||||
|
||||
# Pull out the user-agent and IP from the request.
|
||||
user_agent = request.requestHeaders.getRawHeaders(b"User-Agent", default=[b""])[
|
||||
0
|
||||
].decode("ascii", "surrogateescape")
|
||||
ip_address = self.hs.get_ip_from_request(request)
|
||||
|
||||
user_id, current_session = await self._map_saml_response_to_user(
|
||||
resp_bytes, relay_state, user_agent, ip_address
|
||||
resp_bytes, relay_state
|
||||
)
|
||||
|
||||
# Complete the interactive auth session or the login.
|
||||
@@ -154,11 +147,7 @@ class SamlHandler:
|
||||
await self._auth_handler.complete_sso_login(user_id, request, relay_state)
|
||||
|
||||
async def _map_saml_response_to_user(
|
||||
self,
|
||||
resp_bytes: str,
|
||||
client_redirect_url: str,
|
||||
user_agent: str,
|
||||
ip_address: str,
|
||||
self, resp_bytes: str, client_redirect_url: str
|
||||
) -> Tuple[str, Optional[Saml2SessionData]]:
|
||||
"""
|
||||
Given a sample response, retrieve the cached session and user for it.
|
||||
@@ -166,8 +155,6 @@ class SamlHandler:
|
||||
Args:
|
||||
resp_bytes: The SAML response.
|
||||
client_redirect_url: The redirect URL passed in by the client.
|
||||
user_agent: The user agent of the client making the request.
|
||||
ip_address: The IP address of the client making the request.
|
||||
|
||||
Returns:
|
||||
Tuple of the user ID and SAML session associated with this response.
|
||||
@@ -304,7 +291,6 @@ class SamlHandler:
|
||||
localpart=localpart,
|
||||
default_display_name=displayname,
|
||||
bind_emails=emails,
|
||||
user_agent_ips=(user_agent, ip_address),
|
||||
)
|
||||
|
||||
await self._datastore.record_user_external_id(
|
||||
|
||||
@@ -16,12 +16,13 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from canonicaljson import json
|
||||
|
||||
from twisted.web.client import PartialDownloadError
|
||||
|
||||
from synapse.api.constants import LoginType
|
||||
from synapse.api.errors import Codes, LoginError, SynapseError
|
||||
from synapse.config.emailconfig import ThreepidBehaviour
|
||||
from synapse.util import json_decoder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -116,7 +117,7 @@ class RecaptchaAuthChecker(UserInteractiveAuthChecker):
|
||||
except PartialDownloadError as pde:
|
||||
# Twisted is silly
|
||||
data = pde.response
|
||||
resp_body = json_decoder.decode(data.decode("utf-8"))
|
||||
resp_body = json.loads(data.decode("utf-8"))
|
||||
|
||||
if "success" in resp_body:
|
||||
# Note that we do NOT check the hostname here: we explicitly
|
||||
|
||||
@@ -19,7 +19,7 @@ import urllib
|
||||
from io import BytesIO
|
||||
|
||||
import treq
|
||||
from canonicaljson import encode_canonical_json
|
||||
from canonicaljson import encode_canonical_json, json
|
||||
from netaddr import IPAddress
|
||||
from prometheus_client import Counter
|
||||
from zope.interface import implementer, provider
|
||||
@@ -47,7 +47,6 @@ from synapse.http import (
|
||||
from synapse.http.proxyagent import ProxyAgent
|
||||
from synapse.logging.context import make_deferred_yieldable
|
||||
from synapse.logging.opentracing import set_tag, start_active_span, tags
|
||||
from synapse.util import json_decoder
|
||||
from synapse.util.async_helpers import timeout_deferred
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -392,7 +391,7 @@ class SimpleHttpClient(object):
|
||||
body = await make_deferred_yieldable(readBody(response))
|
||||
|
||||
if 200 <= response.code < 300:
|
||||
return json_decoder.decode(body.decode("utf-8"))
|
||||
return json.loads(body.decode("utf-8"))
|
||||
else:
|
||||
raise HttpResponseException(
|
||||
response.code, response.phrase.decode("ascii", errors="replace"), body
|
||||
@@ -434,7 +433,7 @@ class SimpleHttpClient(object):
|
||||
body = await make_deferred_yieldable(readBody(response))
|
||||
|
||||
if 200 <= response.code < 300:
|
||||
return json_decoder.decode(body.decode("utf-8"))
|
||||
return json.loads(body.decode("utf-8"))
|
||||
else:
|
||||
raise HttpResponseException(
|
||||
response.code, response.phrase.decode("ascii", errors="replace"), body
|
||||
@@ -464,7 +463,7 @@ class SimpleHttpClient(object):
|
||||
actual_headers.update(headers)
|
||||
|
||||
body = await self.get_raw(uri, args, headers=headers)
|
||||
return json_decoder.decode(body.decode("utf-8"))
|
||||
return json.loads(body.decode("utf-8"))
|
||||
|
||||
async def put_json(self, uri, json_body, args={}, headers=None):
|
||||
""" Puts some json to the given URI.
|
||||
@@ -507,7 +506,7 @@ class SimpleHttpClient(object):
|
||||
body = await make_deferred_yieldable(readBody(response))
|
||||
|
||||
if 200 <= response.code < 300:
|
||||
return json_decoder.decode(body.decode("utf-8"))
|
||||
return json.loads(body.decode("utf-8"))
|
||||
else:
|
||||
raise HttpResponseException(
|
||||
response.code, response.phrase.decode("ascii", errors="replace"), body
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import random
|
||||
import time
|
||||
@@ -25,7 +26,7 @@ from twisted.web.http import stringToDatetime
|
||||
from twisted.web.http_headers import Headers
|
||||
|
||||
from synapse.logging.context import make_deferred_yieldable
|
||||
from synapse.util import Clock, json_decoder
|
||||
from synapse.util import Clock
|
||||
from synapse.util.caches.ttlcache import TTLCache
|
||||
from synapse.util.metrics import Measure
|
||||
|
||||
@@ -180,7 +181,7 @@ class WellKnownResolver(object):
|
||||
if response.code != 200:
|
||||
raise Exception("Non-200 response %s" % (response.code,))
|
||||
|
||||
parsed_body = json_decoder.decode(body.decode("utf-8"))
|
||||
parsed_body = json.loads(body.decode("utf-8"))
|
||||
logger.info("Response from .well-known: %s", parsed_body)
|
||||
|
||||
result = parsed_body["m.server"].encode("ascii")
|
||||
|
||||
@@ -22,13 +22,12 @@ import types
|
||||
import urllib
|
||||
from http import HTTPStatus
|
||||
from io import BytesIO
|
||||
from typing import Any, Callable, Dict, Iterator, List, Tuple, Union
|
||||
from typing import Any, Callable, Dict, Tuple, Union
|
||||
|
||||
import jinja2
|
||||
from canonicaljson import iterencode_canonical_json, iterencode_pretty_printed_json
|
||||
from zope.interface import implementer
|
||||
from canonicaljson import encode_canonical_json, encode_pretty_printed_json
|
||||
|
||||
from twisted.internet import defer, interfaces
|
||||
from twisted.internet import defer
|
||||
from twisted.python import failure
|
||||
from twisted.web import resource
|
||||
from twisted.web.server import NOT_DONE_YET, Request
|
||||
@@ -500,90 +499,6 @@ class RootOptionsRedirectResource(OptionsResource, RootRedirect):
|
||||
pass
|
||||
|
||||
|
||||
@implementer(interfaces.IPushProducer)
|
||||
class _ByteProducer:
|
||||
"""
|
||||
Iteratively write bytes to the request.
|
||||
"""
|
||||
|
||||
# The minimum number of bytes for each chunk. Note that the last chunk will
|
||||
# usually be smaller than this.
|
||||
min_chunk_size = 1024
|
||||
|
||||
def __init__(
|
||||
self, request: Request, iterator: Iterator[bytes],
|
||||
):
|
||||
self._request = request
|
||||
self._iterator = iterator
|
||||
self._paused = False
|
||||
|
||||
# Register the producer and start producing data.
|
||||
self._request.registerProducer(self, True)
|
||||
self.resumeProducing()
|
||||
|
||||
def _send_data(self, data: List[bytes]) -> None:
|
||||
"""
|
||||
Send a list of bytes as a chunk of a response.
|
||||
"""
|
||||
if not data:
|
||||
return
|
||||
self._request.write(b"".join(data))
|
||||
|
||||
def pauseProducing(self) -> None:
|
||||
self._paused = True
|
||||
|
||||
def resumeProducing(self) -> None:
|
||||
# We've stopped producing in the meantime (note that this might be
|
||||
# re-entrant after calling write).
|
||||
if not self._request:
|
||||
return
|
||||
|
||||
self._paused = False
|
||||
|
||||
# Write until there's backpressure telling us to stop.
|
||||
while not self._paused:
|
||||
# Get the next chunk and write it to the request.
|
||||
#
|
||||
# The output of the JSON encoder is buffered and coalesced until
|
||||
# min_chunk_size is reached. This is because JSON encoders produce
|
||||
# very small output per iteration and the Request object converts
|
||||
# each call to write() to a separate chunk. Without this there would
|
||||
# be an explosion in bytes written (e.g. b"{" becoming "1\r\n{\r\n").
|
||||
#
|
||||
# Note that buffer stores a list of bytes (instead of appending to
|
||||
# bytes) to hopefully avoid many allocations.
|
||||
buffer = []
|
||||
buffered_bytes = 0
|
||||
while buffered_bytes < self.min_chunk_size:
|
||||
try:
|
||||
data = next(self._iterator)
|
||||
buffer.append(data)
|
||||
buffered_bytes += len(data)
|
||||
except StopIteration:
|
||||
# The entire JSON object has been serialized, write any
|
||||
# remaining data, finalize the producer and the request, and
|
||||
# clean-up any references.
|
||||
self._send_data(buffer)
|
||||
self._request.unregisterProducer()
|
||||
self._request.finish()
|
||||
self.stopProducing()
|
||||
return
|
||||
|
||||
self._send_data(buffer)
|
||||
|
||||
def stopProducing(self) -> None:
|
||||
# Clear a circular reference.
|
||||
self._request = None
|
||||
|
||||
|
||||
def _encode_json_bytes(json_object: Any) -> Iterator[bytes]:
|
||||
"""
|
||||
Encode an object into JSON. Returns an iterator of bytes.
|
||||
"""
|
||||
for chunk in json_encoder.iterencode(json_object):
|
||||
yield chunk.encode("utf-8")
|
||||
|
||||
|
||||
def respond_with_json(
|
||||
request: Request,
|
||||
code: int,
|
||||
@@ -618,22 +533,15 @@ def respond_with_json(
|
||||
return None
|
||||
|
||||
if pretty_print:
|
||||
encoder = iterencode_pretty_printed_json
|
||||
json_bytes = encode_pretty_printed_json(json_object) + b"\n"
|
||||
else:
|
||||
if canonical_json or synapse.events.USE_FROZEN_DICTS:
|
||||
encoder = iterencode_canonical_json
|
||||
# canonicaljson already encodes to bytes
|
||||
json_bytes = encode_canonical_json(json_object)
|
||||
else:
|
||||
encoder = _encode_json_bytes
|
||||
json_bytes = json_encoder.encode(json_object).encode("utf-8")
|
||||
|
||||
request.setResponseCode(code)
|
||||
request.setHeader(b"Content-Type", b"application/json")
|
||||
request.setHeader(b"Cache-Control", b"no-cache, no-store, must-revalidate")
|
||||
|
||||
if send_cors:
|
||||
set_cors_headers(request)
|
||||
|
||||
_ByteProducer(request, encoder(json_object))
|
||||
return NOT_DONE_YET
|
||||
return respond_with_json_bytes(request, code, json_bytes, send_cors=send_cors)
|
||||
|
||||
|
||||
def respond_with_json_bytes(
|
||||
|
||||
@@ -17,8 +17,9 @@
|
||||
|
||||
import logging
|
||||
|
||||
from canonicaljson import json
|
||||
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.util import json_decoder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -214,7 +215,7 @@ def parse_json_value_from_request(request, allow_empty_body=False):
|
||||
return None
|
||||
|
||||
try:
|
||||
content = json_decoder.decode(content_bytes.decode("utf-8"))
|
||||
content = json.loads(content_bytes.decode("utf-8"))
|
||||
except Exception as e:
|
||||
logger.warning("Unable to parse JSON: %s", e)
|
||||
raise SynapseError(400, "Content not JSON.", errcode=Codes.NOT_JSON)
|
||||
|
||||
@@ -172,11 +172,11 @@ from functools import wraps
|
||||
from typing import TYPE_CHECKING, Dict, Optional, Type
|
||||
|
||||
import attr
|
||||
from canonicaljson import json
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.config import ConfigError
|
||||
from synapse.util import json_decoder, json_encoder
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.http.site import SynapseRequest
|
||||
@@ -499,9 +499,7 @@ def start_active_span_from_edu(
|
||||
if opentracing is None:
|
||||
return _noop_context_manager()
|
||||
|
||||
carrier = json_decoder.decode(edu_content.get("context", "{}")).get(
|
||||
"opentracing", {}
|
||||
)
|
||||
carrier = json.loads(edu_content.get("context", "{}")).get("opentracing", {})
|
||||
context = opentracing.tracer.extract(opentracing.Format.TEXT_MAP, carrier)
|
||||
_references = [
|
||||
opentracing.child_of(span_context_from_string(x))
|
||||
@@ -692,7 +690,7 @@ def active_span_context_as_string():
|
||||
opentracing.tracer.inject(
|
||||
opentracing.tracer.active_span, opentracing.Format.TEXT_MAP, carrier
|
||||
)
|
||||
return json_encoder.encode(carrier)
|
||||
return json.dumps(carrier)
|
||||
|
||||
|
||||
@only_if_tracing
|
||||
@@ -701,7 +699,7 @@ def span_context_from_string(carrier):
|
||||
Returns:
|
||||
The active span context decoded from a string.
|
||||
"""
|
||||
carrier = json_decoder.decode(carrier)
|
||||
carrier = json.loads(carrier)
|
||||
return opentracing.tracer.extract(opentracing.Format.TEXT_MAP, carrier)
|
||||
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user