1
0

Compare commits

..

2 Commits

Author SHA1 Message Date
Andrew Morgan
2ac38a1109 changelog 2023-02-23 23:40:07 +00:00
Andrew Morgan
29cc17fde7 Add a method for Synapse modules to carry out HTTP federation requests
Provides a fairly basic interface for Synapse modules to complete HTTP
federation requests.

Custom error types were used in order to prevent stabilising any of the
internal MatrixFederationHttpClient code.
2023-02-23 23:40:07 +00:00
129 changed files with 2093 additions and 2992 deletions

View File

@@ -14,7 +14,7 @@ jobs:
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
- name: 📥 Download artifact
uses: dawidd6/action-download-artifact@5e780fc7bbd0cac69fc73271ed86edf5dcb72d67 # v2.26.0
uses: dawidd6/action-download-artifact@b59d8c6a6c5c6c6437954f470d963c0b20ea7415 # v2.25.0
with:
workflow: docs-pr.yaml
run_id: ${{ github.event.workflow_run.id }}

View File

@@ -12,7 +12,7 @@ jobs:
name: GitHub Pages
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
- name: Setup mdbook
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0
@@ -39,7 +39,7 @@ jobs:
name: Check links in documentation
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
- name: Setup mdbook
uses: peaceiris/actions-mdbook@adeb05db28a0c0004681db83893d56c0388ea9ea # v1.2.0

View File

@@ -48,7 +48,7 @@ jobs:
with:
ref: master
- name: Login to registry
uses: docker/login-action@v2
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.actor }}

View File

@@ -6,7 +6,7 @@ on:
jobs:
triage:
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v2
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v1
with:
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
content_id: ${{ github.event.issue.node_id }}

View File

@@ -1,12 +1,3 @@
Synapse 1.78.0 (2023-02-28)
===========================
Bugfixes
--------
- Fix a bug introduced in Synapse 1.76 where 5s delays would occasionally occur in deployments using workers. ([\#15150](https://github.com/matrix-org/synapse/issues/15150))
Synapse 1.78.0rc1 (2023-02-21)
==============================

View File

@@ -1 +0,0 @@
Document how to use caches in a module.

View File

@@ -1 +0,0 @@
Batch up storing state groups when creating a new room.

View File

@@ -1 +0,0 @@
Add two new Third Party Rules module API callbacks: [`on_add_user_third_party_identifier`](https://matrix-org.github.io/synapse/v1.79/modules/third_party_rules_callbacks.html#on_add_user_third_party_identifier) and [`on_remove_user_third_party_identifier`](https://matrix-org.github.io/synapse/v1.79/modules/third_party_rules_callbacks.html#on_remove_user_third_party_identifier).

View File

@@ -1 +0,0 @@
Fix a long-standing bug where Synapse handled an unspecced field on push rules.

View File

@@ -1 +0,0 @@
Fix a long-standing bug where a URL preview would break if the discovered oEmbed failed to download.

View File

@@ -1 +0,0 @@
Add an [admin API](https://matrix-org.github.io/synapse/latest/usage/administration/admin_api/index.html) to delete a [specific event report](https://spec.matrix.org/v1.6/client-server-api/#reporting-content).

View File

@@ -1 +0,0 @@
Allow use of the `/filter` Client-Server APIs on workers.

View File

@@ -1 +0,0 @@
Remove the undocumented and unspecced `type` parameter to the `/thumbnail` endpoint.

View File

@@ -1 +0,0 @@
Fix a typo in an experimental config setting.

View File

@@ -1 +0,0 @@
Fix a long-standing bug where the user directory search was not case-insensitive for accented characters.

View File

@@ -1 +0,0 @@
Refactor the media modules.

View File

@@ -1 +0,0 @@
Correct small documentation errors in some `MatrixFederationHttpClient` methods.

View File

@@ -0,0 +1 @@
Add a new `send_federation_http_request` method to the Module API to allow Synapse modules to make matrix federation requests over HTTP.

View File

@@ -1 +0,0 @@
Bump dawidd6/action-download-artifact from 2.25.0 to 2.26.0.

View File

@@ -1 +0,0 @@
Bump docker/login-action from 1 to 2.

View File

@@ -1 +0,0 @@
Bump actions/checkout from 2 to 3.

View File

@@ -1 +0,0 @@
Bump matrix-org/backend-meta from 1 to 2.

View File

@@ -1 +0,0 @@
Bump typing-extensions from 4.4.0 to 4.5.0.

View File

@@ -1 +0,0 @@
Bump types-opentracing from 2.4.10.1 to 2.4.10.3.

View File

@@ -1 +0,0 @@
Bump ruff from 0.0.237 to 0.0.252.

View File

@@ -1 +0,0 @@
Bump types-setuptools from 67.3.0.1 to 67.4.0.3.

View File

@@ -1 +0,0 @@
Fix a long-standing bug where an initial sync would not respond to changes to the list of ignored users if there was an initial sync cached.

View File

@@ -1 +0,0 @@
Improve type hints.

View File

@@ -1 +0,0 @@
Remove dangling reference to being a reference implementation in docstring.

View File

@@ -1 +0,0 @@
Correct the description of the behavior of `registration_shared_secret_path` on startup.

View File

@@ -1 +0,0 @@
Remove support for server-side aggregation of reactions.

View File

@@ -1 +0,0 @@
Refactor the media modules.

6
debian/changelog vendored
View File

@@ -1,9 +1,3 @@
matrix-synapse-py3 (1.78.0) stable; urgency=medium
* New Synapse release 1.78.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Feb 2023 08:56:03 -0800
matrix-synapse-py3 (1.78.0~rc1) stable; urgency=medium
* Add `matrix-org-archive-keyring` package as recommended.

View File

@@ -142,7 +142,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
"^/_matrix/client/(api/v1|r0|v3|unstable/.*)/rooms/.*/aliases",
"^/_matrix/client/v1/rooms/.*/timestamp_to_event$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/search",
"^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$)",
],
"shared_extra_conf": {},
"worker_extra_conf": "",

View File

@@ -169,17 +169,3 @@ The following fields are returned in the JSON response body:
* `canonical_alias`: string - The canonical alias of the room. `null` if the room does not
have a canonical alias set.
* `event_json`: object - Details of the original event that was reported.
# Delete a specific event report
This API deletes a specific event report. If the request is successful, the response body
will be an empty JSON object.
The api is:
```
DELETE /_synapse/admin/v1/event_reports/<report_id>
```
**URL parameters:**
* `report_id`: string - The ID of the event report.

View File

@@ -307,8 +307,8 @@ _Changed in Synapse v1.62.0: `synapse.module_api.NOT_SPAM` and `synapse.module_a
```python
async def check_media_file_for_spam(
file_wrapper: "synapse.media.media_storage.ReadableFileWrapper",
file_info: "synapse.media._base.FileInfo",
file_wrapper: "synapse.rest.media.v1.media_storage.ReadableFileWrapper",
file_info: "synapse.rest.media.v1._base.FileInfo",
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
```

View File

@@ -254,11 +254,6 @@ If multiple modules implement this callback, Synapse runs them all in order.
_First introduced in Synapse v1.56.0_
**<span style="color:red">
This callback is deprecated in favour of the `on_add_user_third_party_identifier` callback, which
features the same functionality. The only difference is in name.
</span>**
```python
async def on_threepid_bind(user_id: str, medium: str, address: str) -> None:
```
@@ -273,44 +268,6 @@ server_.
If multiple modules implement this callback, Synapse runs them all in order.
### `on_add_user_third_party_identifier`
_First introduced in Synapse v1.79.0_
```python
async def on_add_user_third_party_identifier(user_id: str, medium: str, address: str) -> None:
```
Called after successfully creating an association between a user and a third-party identifier
(email address, phone number). The module is given the Matrix ID of the user the
association is for, as well as the medium (`email` or `msisdn`) and address of the
third-party identifier (i.e. an email address).
Note that this callback is _not_ called if a user attempts to bind their third-party identifier
to an identity server (via a call to [`POST
/_matrix/client/v3/account/3pid/bind`](https://spec.matrix.org/v1.5/client-server-api/#post_matrixclientv3account3pidbind)).
If multiple modules implement this callback, Synapse runs them all in order.
### `on_remove_user_third_party_identifier`
_First introduced in Synapse v1.79.0_
```python
async def on_remove_user_third_party_identifier(user_id: str, medium: str, address: str) -> None:
```
Called after successfully removing an association between a user and a third-party identifier
(email address, phone number). The module is given the Matrix ID of the user the
association is for, as well as the medium (`email` or `msisdn`) and address of the
third-party identifier (i.e. an email address).
Note that this callback is _not_ called if a user attempts to unbind their third-party
identifier from an identity server (via a call to [`POST
/_matrix/client/v3/account/3pid/unbind`](https://spec.matrix.org/v1.5/client-server-api/#post_matrixclientv3account3pidunbind)).
If multiple modules implement this callback, Synapse runs them all in order.
## Example
The example below is a module that implements the third-party rules callback
@@ -343,4 +300,4 @@ class EventCensorer:
)
event_dict["content"] = new_event_content
return event_dict
```
```

View File

@@ -83,59 +83,3 @@ the callback name as the argument name and the function as its value. A
Callbacks for each category can be found on their respective page of the
[Synapse documentation website](https://matrix-org.github.io/synapse).
## Caching
_Added in Synapse 1.74.0._
Modules can leverage Synapse's caching tools to manage their own cached functions. This
can be helpful for modules that need to repeatedly request the same data from the database
or a remote service.
Functions that need to be wrapped with a cache need to be decorated with a `@cached()`
decorator (which can be imported from `synapse.module_api`) and registered with the
[`ModuleApi.register_cached_function`](https://github.com/matrix-org/synapse/blob/release-v1.77/synapse/module_api/__init__.py#L888)
API when initialising the module. If the module needs to invalidate an entry in a cache,
it needs to use the [`ModuleApi.invalidate_cache`](https://github.com/matrix-org/synapse/blob/release-v1.77/synapse/module_api/__init__.py#L904)
API, with the function to invalidate the cache of and the key(s) of the entry to
invalidate.
Below is an example of a simple module using a cached function:
```python
from typing import Any
from synapse.module_api import cached, ModuleApi
class MyModule:
def __init__(self, config: Any, api: ModuleApi):
self.api = api
# Register the cached function so Synapse knows how to correctly invalidate
# entries for it.
self.api.register_cached_function(self.get_user_from_id)
@cached()
async def get_department_for_user(self, user_id: str) -> str:
"""A function with a cache."""
# Request a department from an external service.
return await self.http_client.get_json(
"https://int.example.com/users", {"user_id": user_id)
)["department"]
async def do_something_with_users(self) -> None:
"""Calls the cached function and then invalidates an entry in its cache."""
user_id = "@alice:example.com"
# Get the user. Since get_department_for_user is wrapped with a cache,
# the return value for this user_id will be cached.
department = await self.get_department_for_user(user_id)
# Do something with `department`...
# Let's say something has changed with our user, and the entry we have for
# them in the cache is out of date, so we want to invalidate it.
await self.api.invalidate_cache(self.get_department_for_user, (user_id,))
```
See the [`cached` docstring](https://github.com/matrix-org/synapse/blob/release-v1.77/synapse/module_api/__init__.py#L190) for more details.

View File

@@ -88,30 +88,6 @@ process, for example:
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
```
# Upgrading to v1.79.0
## The `on_threepid_bind` module callback method has been deprecated
Synapse v1.79.0 deprecates the
[`on_threepid_bind`](modules/third_party_rules_callbacks.md#on_threepid_bind)
"third-party rules" Synapse module callback method in favour of a new module method,
[`on_add_user_third_party_identifier`](modules/third_party_rules_callbacks.md#on_add_user_third_party_identifier).
`on_threepid_bind` will be removed in a future version of Synapse. You should check whether any Synapse
modules in use in your deployment are making use of `on_threepid_bind`, and update them where possible.
The arguments and functionality of the new method are the same.
The justification behind the name change is that the old method's name, `on_threepid_bind`, was
misleading. A user is considered to "bind" their third-party ID to their Matrix ID only if they
do so via an [identity server](https://spec.matrix.org/latest/identity-service-api/)
(so that users on other homeservers may find them). But this method was not called in that case -
it was only called when a user added a third-party identifier on the local homeserver.
Module developers may also be interested in the related
[`on_remove_user_third_party_identifier`](modules/third_party_rules_callbacks.md#on_remove_user_third_party_identifier)
module callback method that was also added in Synapse v1.79.0. This new method is called when a
user removes a third-party identifier from their account.
# Upgrading to v1.78.0
## Deprecate the `/_synapse/admin/v1/media/<server_name>/delete` admin API

View File

@@ -2227,8 +2227,8 @@ allows the shared secret to be specified in an external file.
The file should be a plain text file, containing only the shared secret.
If this file does not exist, Synapse will create a new shared
secret on startup and store it in this file.
If this file does not exist, Synapse will create a new signing
key on startup and store it in this file.
Example configuration:
```yaml

View File

@@ -232,7 +232,6 @@ information.
^/_matrix/client/(api/v1|r0|v3|unstable)/joined_rooms$
^/_matrix/client/v1/rooms/.*/timestamp_to_event$
^/_matrix/client/(api/v1|r0|v3|unstable)/search$
^/_matrix/client/(r0|v3|unstable)/user/.*/filter(/|$)
# Encryption requests
^/_matrix/client/(r0|v3|unstable)/keys/query$

View File

@@ -36,6 +36,9 @@ exclude = (?x)
[mypy-synapse.federation.transport.client]
disallow_untyped_defs = False
[mypy-synapse.http.client]
disallow_untyped_defs = False
[mypy-synapse.http.matrixfederationclient]
disallow_untyped_defs = False

70
poetry.lock generated
View File

@@ -1985,29 +1985,28 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"]
[[package]]
name = "ruff"
version = "0.0.252"
version = "0.0.237"
description = "An extremely fast Python linter, written in Rust."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "ruff-0.0.252-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:349367a227c4db7abbc3a9993efea8a608b5bea4bb4a1e5fc6f0d56819524f92"},
{file = "ruff-0.0.252-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:ce77f9106d96b4faf7865860fb5155b9deaf6f699d9c279118c5ad947739ecaf"},
{file = "ruff-0.0.252-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edadb0b050293b4e60dab979ba6a4e734d9c899cbe316a0ee5b65e3cdd39c750"},
{file = "ruff-0.0.252-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4efdae98937d1e4d23ab0b7fc7e8e6b6836cc7d2d42238ceeacbc793ef780542"},
{file = "ruff-0.0.252-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8546d879f7d3f669379a03e7b103d90e11901976ab508aeda59c03dfd8a359e"},
{file = "ruff-0.0.252-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:83fdc7169b6c1fb5fe8d1cdf345697f558c1b433ef97df9ca11defa2a8f3ee9e"},
{file = "ruff-0.0.252-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84ed9be1a17e2a556a571a5b959398633dd10910abd8dcf8b098061e746e892d"},
{file = "ruff-0.0.252-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f5e77bd9ba4438cf2ee32154e2673afe22f538ef29f5d65ca47e3dc46c42cf8"},
{file = "ruff-0.0.252-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5179b94b45c0f8512eaff3ab304c14714a46df2e9ca72a9d96084adc376b71"},
{file = "ruff-0.0.252-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:92efd8a71157595df5bc46aaaa0613d8a2fbc5cddc53ae7b749c16025c324732"},
{file = "ruff-0.0.252-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fd350fc10832cfd28e681d829a8aa83ea3e653326e0ea9d98637dfb8d46177d2"},
{file = "ruff-0.0.252-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f119240c9631216e846166e06023b1d878e25fbac93bf20da50069e91cfbfaee"},
{file = "ruff-0.0.252-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5c5a49f89f5ede93d16eddfeeadd7e5739ec703e8f63ac95eac30236b9e49da3"},
{file = "ruff-0.0.252-py3-none-win32.whl", hash = "sha256:89a897dc743f2fe063483ea666097e72e848f4bbe40493fe0533e61799959f6e"},
{file = "ruff-0.0.252-py3-none-win_amd64.whl", hash = "sha256:cdc89ad6ff88519b1fb1816ac82a9ad910762c90ff5fd64dda7691b72d36aff7"},
{file = "ruff-0.0.252-py3-none-win_arm64.whl", hash = "sha256:4b594a17cf53077165429486650658a0e1b2ac6ab88954f5afd50d2b1b5657a9"},
{file = "ruff-0.0.252.tar.gz", hash = "sha256:6992611ab7bdbe7204e4831c95ddd3febfeece2e6f5e44bbed044454c7db0f63"},
{file = "ruff-0.0.237-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:2ea04d826ffca58a7ae926115a801960c757d53c9027f2ca9acbe84c9f2b2f04"},
{file = "ruff-0.0.237-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:8ed113937fab9f73f8c1a6c0350bb4fe03e951370139c6e0adb81f48a8dcf4c6"},
{file = "ruff-0.0.237-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9bcb71a3efb5fe886eb48d739cfae5df4a15617e7b5a7668aa45ebf74c0d3fa"},
{file = "ruff-0.0.237-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:80ce10718abbf502818c0d650ebab99fdcef5e937a1ded3884493ddff804373c"},
{file = "ruff-0.0.237-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0cc6cb7c1efcc260df5a939435649610a28f9f438b8b313384c8985ac6574f9f"},
{file = "ruff-0.0.237-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7eef0c7a1e45a4e30328ae101613575944cbf47a3a11494bf9827722da6c66b3"},
{file = "ruff-0.0.237-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d122433a21ce4a21fbba34b73fc3add0ccddd1643b3ff5abb8d2767952f872e"},
{file = "ruff-0.0.237-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b76311335adda4de3c1d471e64e89a49abfeebf02647e3db064e7740e7f36ed6"},
{file = "ruff-0.0.237-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c5977b643aaf2b6f84641265f835b6c7f67fcca38dbae08c4f15602e084ca0"},
{file = "ruff-0.0.237-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3d6ed86d0d4d742360a262d52191581f12b669a68e59ae3b52e80d7483b3d7b3"},
{file = "ruff-0.0.237-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fedfb60f986c26cdb1809db02866e68508db99910c587d2c4066a5c07aa85593"},
{file = "ruff-0.0.237-py3-none-musllinux_1_2_i686.whl", hash = "sha256:bb96796be5919871fa9ae7e88968ba9e14306d9a3f217ca6c204f68a5abeccdd"},
{file = "ruff-0.0.237-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ea239cfedf67b74ea4952e1074bb99a4281c2145441d70bc7e2f058d5c49f1c9"},
{file = "ruff-0.0.237-py3-none-win32.whl", hash = "sha256:8d6a1d21ae15da2b1dcffeee2606e90de0e6717e72957da7d16ab6ae18dd0058"},
{file = "ruff-0.0.237-py3-none-win_amd64.whl", hash = "sha256:525e5ec81cee29b993f77976026a6bf44528a14aa6edb1ef47bd8079147395ae"},
{file = "ruff-0.0.237.tar.gz", hash = "sha256:630c575f543733adf6c19a11d9a02ca9ecc364bd7140af8a4c854d4728be6b56"},
]
[[package]]
@@ -2601,6 +2600,18 @@ files = [
types-enum34 = "*"
types-ipaddress = "*"
[[package]]
name = "types-docutils"
version = "0.19.1.1"
description = "Typing stubs for docutils"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-docutils-0.19.1.1.tar.gz", hash = "sha256:be0a51ba1c7dd215d9d2df66d6845e63c1009b4bbf4c5beb87a0d9745cdba962"},
{file = "types_docutils-0.19.1.1-py3-none-any.whl", hash = "sha256:a024cada35f0c13cc45eb0b68a102719018a634013690b7fef723bcbfadbd1f1"},
]
[[package]]
name = "types-enum34"
version = "1.1.8"
@@ -2639,14 +2650,14 @@ files = [
[[package]]
name = "types-opentracing"
version = "2.4.10.3"
version = "2.4.10.1"
description = "Typing stubs for opentracing"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-opentracing-2.4.10.3.tar.gz", hash = "sha256:b277f114265b41216714f9c77dffcab57038f1730fd141e2c55c5c9f6f2caa87"},
{file = "types_opentracing-2.4.10.3-py3-none-any.whl", hash = "sha256:60244d718fcd9de7043645ecaf597222d550432507098ab2e6268f7b589a7fa7"},
{file = "types-opentracing-2.4.10.1.tar.gz", hash = "sha256:49e7e52b8b6e221865a9201fc8c2df0bcda8e7098d4ebb35903dbfa4b4d29195"},
{file = "types_opentracing-2.4.10.1-py3-none-any.whl", hash = "sha256:eb63394acd793e7d9e327956242349fee14580a87c025408dc268d4dd883cc24"},
]
[[package]]
@@ -2717,16 +2728,19 @@ types-urllib3 = "<1.27"
[[package]]
name = "types-setuptools"
version = "67.4.0.3"
version = "67.3.0.1"
description = "Typing stubs for setuptools"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "types-setuptools-67.4.0.3.tar.gz", hash = "sha256:19e958dfdbf1c5a628e54c2a7ee84935051afb7278d0c1cdb08ac194757ee3b1"},
{file = "types_setuptools-67.4.0.3-py3-none-any.whl", hash = "sha256:3c83c3a6363dd3ddcdd054796705605f0fa8b8e5a39390e07a05e5f7af054978"},
{file = "types-setuptools-67.3.0.1.tar.gz", hash = "sha256:1a26d373036c720e566823b6edd664a2db4d138b6eeba856721ec1254203474f"},
{file = "types_setuptools-67.3.0.1-py3-none-any.whl", hash = "sha256:a7e0f0816b5b449f5bcdc0efa43da91ff81dbe6941f293a6490d68a450e130a1"},
]
[package.dependencies]
types-docutils = "*"
[[package]]
name = "types-urllib3"
version = "1.26.10"
@@ -2741,14 +2755,14 @@ files = [
[[package]]
name = "typing-extensions"
version = "4.5.0"
version = "4.4.0"
description = "Backported and Experimental Type Hints for Python 3.7+"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"},
{file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"},
{file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
{file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
]
[[package]]
@@ -3030,4 +3044,4 @@ user-search = ["pyicu"]
[metadata]
lock-version = "2.0"
python-versions = "^3.7.1"
content-hash = "7bcffef7b6e6d4b1113222e2ca152b3798c997872789c8a1ea01238f199d56fe"
content-hash = "e12077711e5ff83f3c6038ea44c37bd49773799ec8245035b01094b7800c5c92"

View File

@@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml"
[tool.poetry]
name = "matrix-synapse"
version = "1.78.0"
version = "1.78.0rc1"
description = "Homeserver for the Matrix decentralised comms protocol"
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
license = "Apache-2.0"
@@ -313,7 +313,7 @@ all = [
# We pin black so that our tests don't start failing on new releases.
isort = ">=5.10.1"
black = ">=22.3.0"
ruff = "0.0.252"
ruff = "0.0.237"
# Typechecking
mypy = "*"

View File

@@ -60,7 +60,8 @@ fn bench_match_exact(b: &mut Bencher) {
let condition = Condition::Known(synapse::push::KnownCondition::EventMatch(
EventMatchCondition {
key: "room_id".into(),
pattern: "!room:server".into(),
pattern: Some("!room:server".into()),
pattern_type: None,
},
));
@@ -108,7 +109,8 @@ fn bench_match_word(b: &mut Bencher) {
let condition = Condition::Known(synapse::push::KnownCondition::EventMatch(
EventMatchCondition {
key: "content.body".into(),
pattern: "test".into(),
pattern: Some("test".into()),
pattern_type: None,
},
));
@@ -156,7 +158,8 @@ fn bench_match_word_miss(b: &mut Bencher) {
let condition = Condition::Known(synapse::push::KnownCondition::EventMatch(
EventMatchCondition {
key: "content.body".into(),
pattern: "foobar".into(),
pattern: Some("foobar".into()),
pattern_type: None,
},
));

View File

@@ -21,13 +21,13 @@ use lazy_static::lazy_static;
use serde_json::Value;
use super::KnownCondition;
use crate::push::Condition;
use crate::push::EventMatchCondition;
use crate::push::PushRule;
use crate::push::RelatedEventMatchTypeCondition;
use crate::push::RelatedEventMatchCondition;
use crate::push::SetTweak;
use crate::push::TweakValue;
use crate::push::{Action, ExactEventMatchCondition, SimpleJsonValue};
use crate::push::{Condition, EventMatchTypeCondition};
use crate::push::{EventMatchCondition, EventMatchPatternType};
const HIGHLIGHT_ACTION: Action = Action::SetTweak(SetTweak {
set_tweak: Cow::Borrowed("highlight"),
@@ -72,7 +72,8 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("content.m.relates_to.rel_type"),
pattern: Cow::Borrowed("m.replace"),
pattern: Some(Cow::Borrowed("m.replace")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[]),
@@ -85,7 +86,8 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("content.msgtype"),
pattern: Cow::Borrowed("m.notice"),
pattern: Some(Cow::Borrowed("m.notice")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[Action::DontNotify]),
@@ -98,15 +100,18 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.room.member"),
pattern: Some(Cow::Borrowed("m.room.member")),
pattern_type: None,
})),
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("content.membership"),
pattern: Cow::Borrowed("invite"),
pattern: Some(Cow::Borrowed("invite")),
pattern_type: None,
})),
Condition::Known(KnownCondition::EventMatchType(EventMatchTypeCondition {
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("state_key"),
pattern_type: Cow::Borrowed(&EventMatchPatternType::UserId),
pattern: None,
pattern_type: Some(Cow::Borrowed("user_id")),
})),
]),
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION, SOUND_ACTION]),
@@ -119,7 +124,8 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.room.member"),
pattern: Some(Cow::Borrowed("m.room.member")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[Action::DontNotify]),
@@ -129,10 +135,11 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
PushRule {
rule_id: Cow::Borrowed("global/override/.im.nheko.msc3664.reply"),
priority_class: 5,
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::RelatedEventMatchType(
RelatedEventMatchTypeCondition {
key: Cow::Borrowed("sender"),
pattern_type: Cow::Borrowed(&EventMatchPatternType::UserId),
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::RelatedEventMatch(
RelatedEventMatchCondition {
key: Some(Cow::Borrowed("sender")),
pattern: None,
pattern_type: Some(Cow::Borrowed("user_id")),
rel_type: Cow::Borrowed("m.in_reply_to"),
include_fallbacks: None,
},
@@ -182,7 +189,8 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
}),
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("content.body"),
pattern: Cow::Borrowed("@room"),
pattern: Some(Cow::Borrowed("@room")),
pattern_type: None,
})),
]),
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION]),
@@ -195,11 +203,13 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.room.tombstone"),
pattern: Some(Cow::Borrowed("m.room.tombstone")),
pattern_type: None,
})),
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("state_key"),
pattern: Cow::Borrowed(""),
pattern: Some(Cow::Borrowed("")),
pattern_type: None,
})),
]),
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION]),
@@ -212,7 +222,8 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.reaction"),
pattern: Some(Cow::Borrowed("m.reaction")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[]),
@@ -225,11 +236,13 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.room.server_acl"),
pattern: Some(Cow::Borrowed("m.room.server_acl")),
pattern_type: None,
})),
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("state_key"),
pattern: Cow::Borrowed(""),
pattern: Some(Cow::Borrowed("")),
pattern_type: None,
})),
]),
actions: Cow::Borrowed(&[]),
@@ -242,7 +255,8 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("org.matrix.msc3381.poll.response"),
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.response")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[]),
@@ -254,10 +268,11 @@ pub const BASE_APPEND_OVERRIDE_RULES: &[PushRule] = &[
pub const BASE_APPEND_CONTENT_RULES: &[PushRule] = &[PushRule {
rule_id: Cow::Borrowed("global/content/.m.rule.contains_user_name"),
priority_class: 4,
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatchType(
EventMatchTypeCondition {
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("content.body"),
pattern_type: Cow::Borrowed(&EventMatchPatternType::UserLocalpart),
pattern: None,
pattern_type: Some(Cow::Borrowed("user_localpart")),
},
))]),
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_ACTION, SOUND_ACTION]),
@@ -272,7 +287,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.call.invite"),
pattern: Some(Cow::Borrowed("m.call.invite")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[Action::Notify, RING_ACTION, HIGHLIGHT_FALSE_ACTION]),
@@ -285,7 +301,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.room.message"),
pattern: Some(Cow::Borrowed("m.room.message")),
pattern_type: None,
})),
Condition::Known(KnownCondition::RoomMemberCount {
is: Some(Cow::Borrowed("2")),
@@ -301,7 +318,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.room.encrypted"),
pattern: Some(Cow::Borrowed("m.room.encrypted")),
pattern_type: None,
})),
Condition::Known(KnownCondition::RoomMemberCount {
is: Some(Cow::Borrowed("2")),
@@ -320,7 +338,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("org.matrix.msc1767.encrypted"),
pattern: Some(Cow::Borrowed("org.matrix.msc1767.encrypted")),
pattern_type: None,
})),
Condition::Known(KnownCondition::RoomMemberCount {
is: Some(Cow::Borrowed("2")),
@@ -344,7 +363,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("org.matrix.msc1767.message"),
pattern: Some(Cow::Borrowed("org.matrix.msc1767.message")),
pattern_type: None,
})),
Condition::Known(KnownCondition::RoomMemberCount {
is: Some(Cow::Borrowed("2")),
@@ -368,7 +388,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("org.matrix.msc1767.file"),
pattern: Some(Cow::Borrowed("org.matrix.msc1767.file")),
pattern_type: None,
})),
Condition::Known(KnownCondition::RoomMemberCount {
is: Some(Cow::Borrowed("2")),
@@ -392,7 +413,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("org.matrix.msc1767.image"),
pattern: Some(Cow::Borrowed("org.matrix.msc1767.image")),
pattern_type: None,
})),
Condition::Known(KnownCondition::RoomMemberCount {
is: Some(Cow::Borrowed("2")),
@@ -416,7 +438,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("org.matrix.msc1767.video"),
pattern: Some(Cow::Borrowed("org.matrix.msc1767.video")),
pattern_type: None,
})),
Condition::Known(KnownCondition::RoomMemberCount {
is: Some(Cow::Borrowed("2")),
@@ -440,7 +463,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("org.matrix.msc1767.audio"),
pattern: Some(Cow::Borrowed("org.matrix.msc1767.audio")),
pattern_type: None,
})),
Condition::Known(KnownCondition::RoomMemberCount {
is: Some(Cow::Borrowed("2")),
@@ -461,7 +485,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.room.message"),
pattern: Some(Cow::Borrowed("m.room.message")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION]),
@@ -474,7 +499,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("m.room.encrypted"),
pattern: Some(Cow::Borrowed("m.room.encrypted")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION]),
@@ -488,7 +514,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("m.encrypted"),
pattern: Some(Cow::Borrowed("m.encrypted")),
pattern_type: None,
})),
// MSC3933: Add condition on top of template rule - see MSC.
Condition::Known(KnownCondition::RoomVersionSupports {
@@ -507,7 +534,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("m.message"),
pattern: Some(Cow::Borrowed("m.message")),
pattern_type: None,
})),
// MSC3933: Add condition on top of template rule - see MSC.
Condition::Known(KnownCondition::RoomVersionSupports {
@@ -526,7 +554,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("m.file"),
pattern: Some(Cow::Borrowed("m.file")),
pattern_type: None,
})),
// MSC3933: Add condition on top of template rule - see MSC.
Condition::Known(KnownCondition::RoomVersionSupports {
@@ -545,7 +574,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("m.image"),
pattern: Some(Cow::Borrowed("m.image")),
pattern_type: None,
})),
// MSC3933: Add condition on top of template rule - see MSC.
Condition::Known(KnownCondition::RoomVersionSupports {
@@ -564,7 +594,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("m.video"),
pattern: Some(Cow::Borrowed("m.video")),
pattern_type: None,
})),
// MSC3933: Add condition on top of template rule - see MSC.
Condition::Known(KnownCondition::RoomVersionSupports {
@@ -583,7 +614,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
// MSC3933: Type changed from template rule - see MSC.
pattern: Cow::Borrowed("m.audio"),
pattern: Some(Cow::Borrowed("m.audio")),
pattern_type: None,
})),
// MSC3933: Add condition on top of template rule - see MSC.
Condition::Known(KnownCondition::RoomVersionSupports {
@@ -601,15 +633,18 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("im.vector.modular.widgets"),
pattern: Some(Cow::Borrowed("im.vector.modular.widgets")),
pattern_type: None,
})),
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("content.type"),
pattern: Cow::Borrowed("jitsi"),
pattern: Some(Cow::Borrowed("jitsi")),
pattern_type: None,
})),
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("state_key"),
pattern: Cow::Borrowed("*"),
pattern: Some(Cow::Borrowed("*")),
pattern_type: None,
})),
]),
actions: Cow::Borrowed(&[Action::Notify, HIGHLIGHT_FALSE_ACTION]),
@@ -625,7 +660,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
}),
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("org.matrix.msc3381.poll.start"),
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.start")),
pattern_type: None,
})),
]),
actions: Cow::Borrowed(&[Action::Notify, SOUND_ACTION]),
@@ -638,7 +674,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("org.matrix.msc3381.poll.start"),
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.start")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[Action::Notify]),
@@ -654,7 +691,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
}),
Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("org.matrix.msc3381.poll.end"),
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.end")),
pattern_type: None,
})),
]),
actions: Cow::Borrowed(&[Action::Notify, SOUND_ACTION]),
@@ -667,7 +705,8 @@ pub const BASE_APPEND_UNDERRIDE_RULES: &[PushRule] = &[
conditions: Cow::Borrowed(&[Condition::Known(KnownCondition::EventMatch(
EventMatchCondition {
key: Cow::Borrowed("type"),
pattern: Cow::Borrowed("org.matrix.msc3381.poll.end"),
pattern: Some(Cow::Borrowed("org.matrix.msc3381.poll.end")),
pattern_type: None,
},
))]),
actions: Cow::Borrowed(&[Action::Notify]),

View File

@@ -12,10 +12,9 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::borrow::Cow;
use std::collections::{BTreeMap, BTreeSet};
use crate::push::{EventMatchPatternType, JsonValue};
use crate::push::JsonValue;
use anyhow::{Context, Error};
use lazy_static::lazy_static;
use log::warn;
@@ -24,8 +23,8 @@ use regex::Regex;
use super::{
utils::{get_glob_matcher, get_localpart_from_id, GlobMatchType},
Action, Condition, ExactEventMatchCondition, FilteredPushRules, KnownCondition,
SimpleJsonValue,
Action, Condition, EventMatchCondition, ExactEventMatchCondition, FilteredPushRules,
KnownCondition, RelatedEventMatchCondition, SimpleJsonValue,
};
lazy_static! {
@@ -257,58 +256,14 @@ impl PushRuleEvaluator {
};
let result = match known_condition {
KnownCondition::EventMatch(event_match) => self.match_event_match(
&self.flattened_keys,
&event_match.key,
&event_match.pattern,
)?,
KnownCondition::EventMatchType(event_match) => {
// The `pattern_type` can either be "user_id" or "user_localpart",
// either way if we don't have a `user_id` then the condition can't
// match.
let user_id = if let Some(user_id) = user_id {
user_id
} else {
return Ok(false);
};
let pattern = match &*event_match.pattern_type {
EventMatchPatternType::UserId => user_id,
EventMatchPatternType::UserLocalpart => get_localpart_from_id(user_id)?,
};
self.match_event_match(&self.flattened_keys, &event_match.key, pattern)?
KnownCondition::EventMatch(event_match) => {
self.match_event_match(event_match, user_id)?
}
KnownCondition::ExactEventMatch(exact_event_match) => {
self.match_exact_event_match(exact_event_match)?
}
KnownCondition::RelatedEventMatch(event_match) => self.match_related_event_match(
&event_match.rel_type.clone(),
event_match.include_fallbacks,
event_match.key.clone(),
event_match.pattern.clone(),
)?,
KnownCondition::RelatedEventMatchType(event_match) => {
// The `pattern_type` can either be "user_id" or "user_localpart",
// either way if we don't have a `user_id` then the condition can't
// match.
let user_id = if let Some(user_id) = user_id {
user_id
} else {
return Ok(false);
};
let pattern = match &*event_match.pattern_type {
EventMatchPatternType::UserId => user_id,
EventMatchPatternType::UserLocalpart => get_localpart_from_id(user_id)?,
};
self.match_related_event_match(
&event_match.rel_type.clone(),
event_match.include_fallbacks,
Some(event_match.key.clone()),
Some(Cow::Borrowed(pattern)),
)?
KnownCondition::RelatedEventMatch(event_match) => {
self.match_related_event_match(event_match, user_id)?
}
KnownCondition::ExactEventPropertyContains(exact_event_match) => {
self.match_exact_event_property_contains(exact_event_match)?
@@ -370,12 +325,32 @@ impl PushRuleEvaluator {
/// Evaluates a `event_match` condition.
fn match_event_match(
&self,
flattened_event: &BTreeMap<String, JsonValue>,
key: &str,
pattern: &str,
event_match: &EventMatchCondition,
user_id: Option<&str>,
) -> Result<bool, Error> {
let pattern = if let Some(pattern) = &event_match.pattern {
pattern
} else if let Some(pattern_type) = &event_match.pattern_type {
// The `pattern_type` can either be "user_id" or "user_localpart",
// either way if we don't have a `user_id` then the condition can't
// match.
let user_id = if let Some(user_id) = user_id {
user_id
} else {
return Ok(false);
};
match &**pattern_type {
"user_id" => user_id,
"user_localpart" => get_localpart_from_id(user_id)?,
_ => return Ok(false),
}
} else {
return Ok(false);
};
let haystack = if let Some(JsonValue::Value(SimpleJsonValue::Str(haystack))) =
flattened_event.get(key)
self.flattened_keys.get(&*event_match.key)
{
haystack
} else {
@@ -384,7 +359,7 @@ impl PushRuleEvaluator {
// For the content.body we match against "words", but for everything
// else we match against the entire value.
let match_type = if key == "content.body" {
let match_type = if event_match.key == "content.body" {
GlobMatchType::Word
} else {
GlobMatchType::Whole
@@ -420,10 +395,8 @@ impl PushRuleEvaluator {
/// Evaluates a `related_event_match` condition. (MSC3664)
fn match_related_event_match(
&self,
rel_type: &str,
include_fallbacks: Option<bool>,
key: Option<Cow<str>>,
pattern: Option<Cow<str>>,
event_match: &RelatedEventMatchCondition,
user_id: Option<&str>,
) -> Result<bool, Error> {
// First check if related event matching is enabled...
if !self.related_event_match_enabled {
@@ -431,7 +404,7 @@ impl PushRuleEvaluator {
}
// get the related event, fail if there is none.
let event = if let Some(event) = self.related_events_flattened.get(rel_type) {
let event = if let Some(event) = self.related_events_flattened.get(&*event_match.rel_type) {
event
} else {
return Ok(false);
@@ -439,18 +412,58 @@ impl PushRuleEvaluator {
// If we are not matching fallbacks, don't match if our special key indicating this is a
// fallback relation is not present.
if !include_fallbacks.unwrap_or(false) && event.contains_key("im.vector.is_falling_back") {
if !event_match.include_fallbacks.unwrap_or(false)
&& event.contains_key("im.vector.is_falling_back")
{
return Ok(false);
}
match (key, pattern) {
// if we have no key, accept the event as matching.
(None, _) => Ok(true),
// There was a key, so we *must* have a pattern to go with it.
(Some(_), None) => Ok(false),
// If there is a key & pattern, check if they're in the flattened event (given by rel_type).
(Some(key), Some(pattern)) => self.match_event_match(event, &key, &pattern),
}
// if we have no key, accept the event as matching, if it existed without matching any
// fields.
let key = if let Some(key) = &event_match.key {
key
} else {
return Ok(true);
};
let pattern = if let Some(pattern) = &event_match.pattern {
pattern
} else if let Some(pattern_type) = &event_match.pattern_type {
// The `pattern_type` can either be "user_id" or "user_localpart",
// either way if we don't have a `user_id` then the condition can't
// match.
let user_id = if let Some(user_id) = user_id {
user_id
} else {
return Ok(false);
};
match &**pattern_type {
"user_id" => user_id,
"user_localpart" => get_localpart_from_id(user_id)?,
_ => return Ok(false),
}
} else {
return Ok(false);
};
let haystack =
if let Some(JsonValue::Value(SimpleJsonValue::Str(haystack))) = event.get(&**key) {
haystack
} else {
return Ok(false);
};
// For the content.body we match against "words", but for everything
// else we match against the entire value.
let match_type = if key == "content.body" {
GlobMatchType::Word
} else {
GlobMatchType::Whole
};
let mut compiled_pattern = get_glob_matcher(pattern, match_type)?;
compiled_pattern.is_match(haystack)
}
/// Evaluates a `exact_event_property_contains` condition. (MSC3758)

View File

@@ -328,16 +328,10 @@ pub enum Condition {
#[serde(tag = "kind")]
pub enum KnownCondition {
EventMatch(EventMatchCondition),
// Identical to event_match but gives predefined patterns. Cannot be added by users.
#[serde(skip_deserializing, rename = "event_match")]
EventMatchType(EventMatchTypeCondition),
#[serde(rename = "com.beeper.msc3758.exact_event_match")]
ExactEventMatch(ExactEventMatchCondition),
#[serde(rename = "im.nheko.msc3664.related_event_match")]
RelatedEventMatch(RelatedEventMatchCondition),
// Identical to related_event_match but gives predefined patterns. Cannot be added by users.
#[serde(skip_deserializing, rename = "im.nheko.msc3664.related_event_match")]
RelatedEventMatchType(RelatedEventMatchTypeCondition),
#[serde(rename = "org.matrix.msc3966.exact_event_property_contains")]
ExactEventPropertyContains(ExactEventMatchCondition),
#[serde(rename = "org.matrix.msc3952.is_user_mention")]
@@ -368,27 +362,14 @@ impl<'source> FromPyObject<'source> for Condition {
}
}
/// The body of a [`Condition::EventMatch`] with a pattern.
/// The body of a [`Condition::EventMatch`]
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct EventMatchCondition {
pub key: Cow<'static, str>,
pub pattern: Cow<'static, str>,
}
#[derive(Serialize, Debug, Clone)]
#[serde(rename_all = "snake_case")]
pub enum EventMatchPatternType {
UserId,
UserLocalpart,
}
/// The body of a [`Condition::EventMatch`] that uses user_id or user_localpart as a pattern.
#[derive(Serialize, Debug, Clone)]
pub struct EventMatchTypeCondition {
pub key: Cow<'static, str>,
// During serialization, the pattern_type property gets replaced with a
// pattern property of the correct value in synapse.push.clientformat.format_push_rules_for_user.
pub pattern_type: Cow<'static, EventMatchPatternType>,
#[serde(skip_serializing_if = "Option::is_none")]
pub pattern: Option<Cow<'static, str>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub pattern_type: Option<Cow<'static, str>>,
}
/// The body of a [`Condition::ExactEventMatch`]
@@ -405,18 +386,8 @@ pub struct RelatedEventMatchCondition {
pub key: Option<Cow<'static, str>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub pattern: Option<Cow<'static, str>>,
pub rel_type: Cow<'static, str>,
#[serde(skip_serializing_if = "Option::is_none")]
pub include_fallbacks: Option<bool>,
}
/// The body of a [`Condition::RelatedEventMatch`] that uses user_id or user_localpart as a pattern.
#[derive(Serialize, Debug, Clone)]
pub struct RelatedEventMatchTypeCondition {
// This is only used if pattern_type exists (and thus key must exist), so is
// a bit simpler than RelatedEventMatchCondition.
pub key: Cow<'static, str>,
pub pattern_type: Cow<'static, EventMatchPatternType>,
pub pattern_type: Option<Cow<'static, str>>,
pub rel_type: Cow<'static, str>,
#[serde(skip_serializing_if = "Option::is_none")]
pub include_fallbacks: Option<bool>,
@@ -600,7 +571,8 @@ impl FilteredPushRules {
fn test_serialize_condition() {
let condition = Condition::Known(KnownCondition::EventMatch(EventMatchCondition {
key: "content.body".into(),
pattern: "coffee".into(),
pattern: Some("coffee".into()),
pattern_type: None,
}));
let json = serde_json::to_string(&condition).unwrap();
@@ -614,33 +586,7 @@ fn test_serialize_condition() {
fn test_deserialize_condition() {
let json = r#"{"kind":"event_match","key":"content.body","pattern":"coffee"}"#;
let condition: Condition = serde_json::from_str(json).unwrap();
assert!(matches!(
condition,
Condition::Known(KnownCondition::EventMatch(_))
));
}
#[test]
fn test_serialize_event_match_condition_with_pattern_type() {
let condition = Condition::Known(KnownCondition::EventMatchType(EventMatchTypeCondition {
key: "content.body".into(),
pattern_type: Cow::Owned(EventMatchPatternType::UserId),
}));
let json = serde_json::to_string(&condition).unwrap();
assert_eq!(
json,
r#"{"kind":"event_match","key":"content.body","pattern_type":"user_id"}"#
)
}
#[test]
fn test_cannot_deserialize_event_match_condition_with_pattern_type() {
let json = r#"{"kind":"event_match","key":"content.body","pattern_type":"user_id"}"#;
let condition: Condition = serde_json::from_str(json).unwrap();
assert!(matches!(condition, Condition::Unknown(_)));
let _: Condition = serde_json::from_str(json).unwrap();
}
#[test]
@@ -654,37 +600,6 @@ fn test_deserialize_unstable_msc3664_condition() {
));
}
#[test]
fn test_serialize_unstable_msc3664_condition_with_pattern_type() {
let condition = Condition::Known(KnownCondition::RelatedEventMatchType(
RelatedEventMatchTypeCondition {
key: "content.body".into(),
pattern_type: Cow::Owned(EventMatchPatternType::UserId),
rel_type: "m.in_reply_to".into(),
include_fallbacks: Some(true),
},
));
let json = serde_json::to_string(&condition).unwrap();
assert_eq!(
json,
r#"{"kind":"im.nheko.msc3664.related_event_match","key":"content.body","pattern_type":"user_id","rel_type":"m.in_reply_to","include_fallbacks":true}"#
)
}
#[test]
fn test_cannot_deserialize_unstable_msc3664_condition_with_pattern_type() {
let json = r#"{"kind":"im.nheko.msc3664.related_event_match","key":"content.body","pattern_type":"user_id","rel_type":"m.in_reply_to"}"#;
let condition: Condition = serde_json::from_str(json).unwrap();
// Since pattern is optional on RelatedEventMatch it deserializes it to that
// instead of RelatedEventMatchType.
assert!(matches!(
condition,
Condition::Known(KnownCondition::RelatedEventMatch(_))
));
}
#[test]
fn test_deserialize_unstable_msc3931_condition() {
let json =

View File

@@ -1,6 +1,5 @@
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018-2019 New Vector Ltd
# Copyright 2023 The Matrix.org Foundation C.I.C.
# Copyright 2018-9 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
""" This is an implementation of a Matrix homeserver.
""" This is a reference implementation of a Matrix homeserver.
"""
import json

View File

@@ -37,7 +37,7 @@ import os
import shutil
import sys
from synapse.media.filepath import MediaFilePaths
from synapse.rest.media.v1.filepath import MediaFilePaths
logger = logging.getLogger()

View File

@@ -175,8 +175,8 @@ class ExperimentalConfig(Config):
)
# MSC3873: Disambiguate event_match keys.
self.msc3873_escape_event_match_key = experimental.get(
"msc3873_escape_event_match_key", False
self.msc3783_escape_event_match_key = experimental.get(
"msc3783_escape_event_match_key", False
)
# MSC3952: Intentional mentions, this depends on MSC3758.

View File

@@ -178,13 +178,11 @@ class ContentRepositoryConfig(Config):
for i, provider_config in enumerate(storage_providers):
# We special case the module "file_system" so as not to need to
# expose FileStorageProviderBackend
if (
provider_config["module"] == "file_system"
or provider_config["module"] == "synapse.rest.media.v1.storage_provider"
):
provider_config[
"module"
] = "synapse.media.storage_provider.FileStorageProviderBackend"
if provider_config["module"] == "file_system":
provider_config["module"] = (
"synapse.rest.media.v1.storage_provider"
".FileStorageProviderBackend"
)
provider_class, parsed_config = load_module(
provider_config, ("media_storage_providers", "<item %i>" % i)

View File

@@ -168,21 +168,13 @@ async def check_state_independent_auth_rules(
return
# 2. Reject if event has auth_events that: ...
auth_events = await store.get_events(
event.auth_event_ids(),
redact_behaviour=EventRedactBehaviour.as_is,
allow_rejected=True,
)
if batched_auth_events:
auth_event_ids = event.auth_event_ids()
auth_events = dict(batched_auth_events)
if set(auth_event_ids) - batched_auth_events.keys():
auth_events.update(
await store.get_events(
set(auth_event_ids) - batched_auth_events.keys()
)
)
else:
auth_events = await store.get_events(
event.auth_event_ids(),
redact_behaviour=EventRedactBehaviour.as_is,
allow_rejected=True,
)
auth_events.update(batched_auth_events)
room_id = event.room_id
auth_dict: MutableStateMap[str] = {}

View File

@@ -23,7 +23,6 @@ from synapse.types import JsonDict, StateMap
if TYPE_CHECKING:
from synapse.storage.controllers import StorageControllers
from synapse.storage.databases import StateGroupDataStore
from synapse.storage.databases.main import DataStore
from synapse.types.state import StateFilter
@@ -135,8 +134,6 @@ class EventContext(UnpersistedEventContextBase):
delta_ids: Optional[StateMap[str]] = None
app_service: Optional[ApplicationService] = None
_state_map_before_event: Optional[StateMap[str]] = None
partial_state: bool = False
@staticmethod
@@ -295,11 +292,6 @@ class EventContext(UnpersistedEventContextBase):
Maps a (type, state_key) to the event ID of the state event matching
this tuple.
"""
if self._state_map_before_event is not None:
if state_filter is not None:
return state_filter.filter_state(self._state_map_before_event)
return self._state_map_before_event
assert self.state_group_before_event is not None
return await self._storage.state.get_state_ids_for_group(
self.state_group_before_event, state_filter
@@ -356,44 +348,6 @@ class UnpersistedEventContext(UnpersistedEventContextBase):
partial_state: bool
state_map_before_event: Optional[StateMap[str]] = None
@classmethod
async def batch_persist_unpersisted_contexts(
cls,
events_and_context: List[Tuple[EventBase, "UnpersistedEventContextBase"]],
room_id: str,
last_known_state_group: int,
datastore: "StateGroupDataStore",
) -> List[Tuple[EventBase, EventContext]]:
"""
Takes a list of events and their associated unpersisted contexts and persists
the unpersisted contexts, returning a list of events and persisted contexts.
Note that all the events must be in a linear chain (ie a <- b <- c).
Args:
events_and_context: A list of events and their unpersisted contexts
room_id: the room_id for the events
last_known_state_group: the last persisted state group
datastore: a state datastore
"""
amended_events_and_context = await datastore.store_state_deltas_for_batched(
events_and_context, room_id, last_known_state_group
)
events_and_persisted_context = []
for event, unpersisted_context in amended_events_and_context:
context = EventContext(
storage=unpersisted_context._storage,
state_group=unpersisted_context.state_group_after_event,
state_group_before_event=unpersisted_context.state_group_before_event,
state_delta_due_to_event=unpersisted_context.state_delta_due_to_event,
partial_state=unpersisted_context.partial_state,
prev_group=unpersisted_context.prev_group_for_state_group_before_event,
delta_ids=unpersisted_context.delta_ids_to_state_group_before_event,
state_map_before_event=unpersisted_context.state_map_before_event,
)
events_and_persisted_context.append((event, context))
return events_and_persisted_context
async def get_prev_state_ids(
self, state_filter: Optional["StateFilter"] = None
) -> StateMap[str]:

View File

@@ -33,8 +33,8 @@ from typing_extensions import Literal
import synapse
from synapse.api.errors import Codes
from synapse.logging.opentracing import trace
from synapse.media._base import FileInfo
from synapse.media.media_storage import ReadableFileWrapper
from synapse.rest.media.v1._base import FileInfo
from synapse.rest.media.v1.media_storage import ReadableFileWrapper
from synapse.spam_checker_api import RegistrationBehaviour
from synapse.types import JsonDict, RoomAlias, UserProfile
from synapse.util.async_helpers import delay_cancellation, maybe_awaitable

View File

@@ -45,8 +45,6 @@ CHECK_CAN_DEACTIVATE_USER_CALLBACK = Callable[[str, bool], Awaitable[bool]]
ON_PROFILE_UPDATE_CALLBACK = Callable[[str, ProfileInfo, bool, bool], Awaitable]
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK = Callable[[str, bool, bool], Awaitable]
ON_THREEPID_BIND_CALLBACK = Callable[[str, str, str], Awaitable]
ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK = Callable[[str, str, str], Awaitable]
ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK = Callable[[str, str, str], Awaitable]
def load_legacy_third_party_event_rules(hs: "HomeServer") -> None:
@@ -174,12 +172,6 @@ class ThirdPartyEventRules:
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK
] = []
self._on_threepid_bind_callbacks: List[ON_THREEPID_BIND_CALLBACK] = []
self._on_add_user_third_party_identifier_callbacks: List[
ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
] = []
self._on_remove_user_third_party_identifier_callbacks: List[
ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
] = []
def register_third_party_rules_callbacks(
self,
@@ -199,12 +191,6 @@ class ThirdPartyEventRules:
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK
] = None,
on_threepid_bind: Optional[ON_THREEPID_BIND_CALLBACK] = None,
on_add_user_third_party_identifier: Optional[
ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
] = None,
on_remove_user_third_party_identifier: Optional[
ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
] = None,
) -> None:
"""Register callbacks from modules for each hook."""
if check_event_allowed is not None:
@@ -242,11 +228,6 @@ class ThirdPartyEventRules:
if on_threepid_bind is not None:
self._on_threepid_bind_callbacks.append(on_threepid_bind)
if on_add_user_third_party_identifier is not None:
self._on_add_user_third_party_identifier_callbacks.append(
on_add_user_third_party_identifier
)
async def check_event_allowed(
self,
event: EventBase,
@@ -530,9 +511,6 @@ class ThirdPartyEventRules:
local homeserver, not when it's created on an identity server (and then kept track
of so that it can be unbound on the same IS later on).
THIS MODULE CALLBACK METHOD HAS BEEN DEPRECATED. Please use the
`on_add_user_third_party_identifier` callback method instead.
Args:
user_id: the user being associated with the threepid.
medium: the threepid's medium.
@@ -545,44 +523,3 @@ class ThirdPartyEventRules:
logger.exception(
"Failed to run module API callback %s: %s", callback, e
)
async def on_add_user_third_party_identifier(
self, user_id: str, medium: str, address: str
) -> None:
"""Called when an association between a user's Matrix ID and a third-party ID
(email, phone number) has successfully been registered on the homeserver.
Args:
user_id: The User ID included in the association.
medium: The medium of the third-party ID (email, msisdn).
address: The address of the third-party ID (i.e. an email address).
"""
for callback in self._on_add_user_third_party_identifier_callbacks:
try:
await callback(user_id, medium, address)
except Exception as e:
logger.exception(
"Failed to run module API callback %s: %s", callback, e
)
async def on_remove_user_third_party_identifier(
self, user_id: str, medium: str, address: str
) -> None:
"""Called when an association between a user's Matrix ID and a third-party ID
(email, phone number) has been successfully removed on the homeserver.
This is called *after* any known bindings on identity servers for this
association have been removed.
Args:
user_id: The User ID included in the removed association.
medium: The medium of the third-party ID (email, msisdn).
address: The address of the third-party ID (i.e. an email address).
"""
for callback in self._on_remove_user_third_party_identifier_callbacks:
try:
await callback(user_id, medium, address)
except Exception as e:
logger.exception(
"Failed to run module API callback %s: %s", callback, e
)

View File

@@ -516,6 +516,11 @@ class EventClientSerializer:
# being serialized.
serialized_aggregations = {}
if event_aggregations.annotations:
serialized_aggregations[
RelationTypes.ANNOTATION
] = event_aggregations.annotations
if event_aggregations.references:
serialized_aggregations[
RelationTypes.REFERENCE

View File

@@ -1542,17 +1542,6 @@ class AuthHandler:
async def add_threepid(
self, user_id: str, medium: str, address: str, validated_at: int
) -> None:
"""
Adds an association between a user's Matrix ID and a third-party ID (email,
phone number).
Args:
user_id: The ID of the user to associate.
medium: The medium of the third-party ID (email, msisdn).
address: The address of the third-party ID (i.e. an email address).
validated_at: The timestamp in ms of when the validation that the user owns
this third-party ID occurred.
"""
# check if medium has a valid value
if medium not in ["email", "msisdn"]:
raise SynapseError(
@@ -1577,44 +1566,42 @@ class AuthHandler:
user_id, medium, address, validated_at, self.hs.get_clock().time_msec()
)
# Inform Synapse modules that a 3PID association has been created.
await self._third_party_rules.on_add_user_third_party_identifier(
user_id, medium, address
)
# Deprecated method for informing Synapse modules that a 3PID association
# has successfully been created.
await self._third_party_rules.on_threepid_bind(user_id, medium, address)
async def delete_local_threepid(
self, user_id: str, medium: str, address: str
) -> None:
"""Deletes an association between a third-party ID and a user ID from the local
database. This method does not unbind the association from any identity servers.
If `medium` is 'email' and a pusher is associated with this third-party ID, the
pusher will also be deleted.
async def delete_threepid(
self, user_id: str, medium: str, address: str, id_server: Optional[str] = None
) -> bool:
"""Attempts to unbind the 3pid on the identity servers and deletes it
from the local database.
Args:
user_id: ID of user to remove the 3pid from.
medium: The medium of the 3pid being removed: "email" or "msisdn".
address: The 3pid address to remove.
id_server: Use the given identity server when unbinding
any threepids. If None then will attempt to unbind using the
identity server specified when binding (if known).
Returns:
Returns True if successfully unbound the 3pid on
the identity server, False if identity server doesn't support the
unbind API.
"""
# 'Canonicalise' email addresses as per above
if medium == "email":
address = canonicalise_email(address)
await self.store.user_delete_threepid(user_id, medium, address)
# Inform Synapse modules that a 3PID association has been deleted.
await self._third_party_rules.on_remove_user_third_party_identifier(
user_id, medium, address
result = await self.hs.get_identity_handler().try_unbind_threepid(
user_id, medium, address, id_server
)
await self.store.user_delete_threepid(user_id, medium, address)
if medium == "email":
await self.store.delete_pusher_by_app_id_pushkey_user_id(
app_id="m.email", pushkey=address, user_id=user_id
)
return result
async def hash(self, password: str) -> str:
"""Computes a secure hash of password.

View File

@@ -100,28 +100,26 @@ class DeactivateAccountHandler:
# unbinding
identity_server_supports_unbinding = True
# Attempt to unbind any known bound threepids to this account from identity
# server(s).
bound_threepids = await self.store.user_get_bound_threepids(user_id)
for threepid in bound_threepids:
# Retrieve the 3PIDs this user has bound to an identity server
threepids = await self.store.user_get_bound_threepids(user_id)
for threepid in threepids:
try:
result = await self._identity_handler.try_unbind_threepid(
user_id, threepid["medium"], threepid["address"], id_server
)
identity_server_supports_unbinding &= result
except Exception:
# Do we want this to be a fatal error or should we carry on?
logger.exception("Failed to remove threepid from ID server")
raise SynapseError(400, "Failed to remove threepid from ID server")
identity_server_supports_unbinding &= result
# Remove any local threepid associations for this account.
local_threepids = await self.store.user_get_threepids(user_id)
for threepid in local_threepids:
await self._auth_handler.delete_local_threepid(
await self.store.user_delete_threepid(
user_id, threepid["medium"], threepid["address"]
)
# Remove all 3PIDs this user has bound to the homeserver
await self.store.user_delete_threepids(user_id)
# delete any devices belonging to the user, which will also
# delete corresponding access tokens.
await self._device_handler.delete_all_devices_for_user(user_id)

View File

@@ -63,19 +63,9 @@ class EventAuthHandler:
self._store, event, batched_auth_events
)
auth_event_ids = event.auth_event_ids()
logger.info("Batched auth events %s", list(batched_auth_events.keys()))
logger.info("auth events %s", auth_event_ids)
auth_events_by_id = await self._store.get_events(auth_event_ids)
if batched_auth_events:
auth_events_by_id = dict(batched_auth_events)
if set(auth_event_ids) - set(batched_auth_events):
logger.info("fetching %s", set(auth_event_ids) - set(batched_auth_events))
auth_events_by_id.update(
await self._store.get_events(
set(auth_event_ids) - set(batched_auth_events)
)
)
else:
auth_events_by_id = await self._store.get_events(auth_event_ids)
auth_events_by_id.update(batched_auth_events)
check_state_dependent_auth_rules(event, auth_events_by_id.values())
def compute_auth_events(

View File

@@ -574,7 +574,7 @@ class EventCreationHandler:
state_map: Optional[StateMap[str]] = None,
for_batch: bool = False,
current_state_group: Optional[int] = None,
) -> Tuple[EventBase, UnpersistedEventContextBase]:
) -> Tuple[EventBase, EventContext]:
"""
Given a dict from a client, create a new event. If bool for_batch is true, will
create an event using the prev_event_ids, and will create an event context for
@@ -721,6 +721,8 @@ class EventCreationHandler:
current_state_group=current_state_group,
)
context = await unpersisted_context.persist(event)
# In an ideal world we wouldn't need the second part of this condition. However,
# this behaviour isn't spec'd yet, meaning we should be able to deactivate this
# behaviour. Another reason is that this code is also evaluated each time a new
@@ -737,7 +739,7 @@ class EventCreationHandler:
assert state_map is not None
prev_event_id = state_map.get((EventTypes.Member, event.sender))
else:
prev_state_ids = await unpersisted_context.get_prev_state_ids(
prev_state_ids = await context.get_prev_state_ids(
StateFilter.from_types([(EventTypes.Member, None)])
)
prev_event_id = prev_state_ids.get((EventTypes.Member, event.sender))
@@ -762,7 +764,8 @@ class EventCreationHandler:
)
self.validator.validate_new(event, self.config)
return event, unpersisted_context
return event, context
async def _is_exempt_from_privacy_policy(
self, builder: EventBuilder, requester: Requester
@@ -1002,7 +1005,7 @@ class EventCreationHandler:
max_retries = 5
for i in range(max_retries):
try:
event, unpersisted_context = await self.create_event(
event, context = await self.create_event(
requester,
event_dict,
txn_id=txn_id,
@@ -1013,7 +1016,6 @@ class EventCreationHandler:
historical=historical,
depth=depth,
)
context = await unpersisted_context.persist(event)
assert self.hs.is_mine_id(event.sender), "User must be our own: %s" % (
event.sender,
@@ -1188,6 +1190,7 @@ class EventCreationHandler:
if for_batch:
assert prev_event_ids is not None
assert state_map is not None
assert current_state_group is not None
auth_ids = self._event_auth_handler.compute_auth_events(builder, state_map)
event = await builder.build(
prev_event_ids=prev_event_ids, auth_event_ids=auth_ids, depth=depth
@@ -2043,7 +2046,7 @@ class EventCreationHandler:
max_retries = 5
for i in range(max_retries):
try:
event, unpersisted_context = await self.create_event(
event, context = await self.create_event(
requester,
{
"type": EventTypes.Dummy,
@@ -2052,7 +2055,6 @@ class EventCreationHandler:
"sender": user_id,
},
)
context = await unpersisted_context.persist(event)
event.internal_metadata.proactively_send = False

View File

@@ -60,12 +60,13 @@ class BundledAggregations:
Some values require additional processing during serialization.
"""
annotations: Optional[JsonDict] = None
references: Optional[JsonDict] = None
replace: Optional[EventBase] = None
thread: Optional[_ThreadAggregation] = None
def __bool__(self) -> bool:
return bool(self.references or self.replace or self.thread)
return bool(self.annotations or self.references or self.replace or self.thread)
class RelationsHandler:
@@ -226,6 +227,67 @@ class RelationsHandler:
e.msg,
)
async def get_annotations_for_events(
self, event_ids: Collection[str], ignored_users: FrozenSet[str] = frozenset()
) -> Dict[str, List[JsonDict]]:
"""Get a list of annotations to the given events, grouped by event type and
aggregation key, sorted by count.
This is used e.g. to get the what and how many reactions have happened
on an event.
Args:
event_ids: Fetch events that relate to these event IDs.
ignored_users: The users ignored by the requesting user.
Returns:
A map of event IDs to a list of groups of annotations that match.
Each entry is a dict with `type`, `key` and `count` fields.
"""
# Get the base results for all users.
full_results = await self._main_store.get_aggregation_groups_for_events(
event_ids
)
# Avoid additional logic if there are no ignored users.
if not ignored_users:
return {
event_id: results
for event_id, results in full_results.items()
if results
}
# Then subtract off the results for any ignored users.
ignored_results = await self._main_store.get_aggregation_groups_for_users(
[event_id for event_id, results in full_results.items() if results],
ignored_users,
)
filtered_results = {}
for event_id, results in full_results.items():
# If no annotations, skip.
if not results:
continue
# If there are not ignored results for this event, copy verbatim.
if event_id not in ignored_results:
filtered_results[event_id] = results
continue
# Otherwise, subtract out the ignored results.
event_ignored_results = ignored_results[event_id]
for result in results:
key = (result["type"], result["key"])
if key in event_ignored_results:
# Ensure to not modify the cache.
result = result.copy()
result["count"] -= event_ignored_results[key]
if result["count"] <= 0:
continue
filtered_results.setdefault(event_id, []).append(result)
return filtered_results
async def get_references_for_events(
self, event_ids: Collection[str], ignored_users: FrozenSet[str] = frozenset()
) -> Dict[str, List[_RelatedEvent]]:
@@ -469,6 +531,17 @@ class RelationsHandler:
# (as that is what makes it part of the thread).
relations_by_id[latest_thread_event.event_id] = RelationTypes.THREAD
async def _fetch_annotations() -> None:
"""Fetch any annotations (ie, reactions) to bundle with this event."""
annotations_by_event_id = await self.get_annotations_for_events(
events_by_id.keys(), ignored_users=ignored_users
)
for event_id, annotations in annotations_by_event_id.items():
if annotations:
results.setdefault(event_id, BundledAggregations()).annotations = {
"chunk": annotations
}
async def _fetch_references() -> None:
"""Fetch any references to bundle with this event."""
references_by_event_id = await self.get_references_for_events(
@@ -502,6 +575,7 @@ class RelationsHandler:
await make_deferred_yieldable(
gather_results(
(
run_in_background(_fetch_annotations),
run_in_background(_fetch_references),
run_in_background(_fetch_edits),
)

View File

@@ -51,7 +51,6 @@ from synapse.api.filtering import Filter
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
from synapse.event_auth import validate_event_for_room_version
from synapse.events import EventBase
from synapse.events.snapshot import UnpersistedEventContext
from synapse.events.utils import copy_and_fixup_power_levels_contents
from synapse.handlers.relations import BundledAggregations
from synapse.module_api import NOT_SPAM
@@ -212,7 +211,7 @@ class RoomCreationHandler:
# the required power level to send the tombstone event.
(
tombstone_event,
tombstone_unpersisted_context,
tombstone_context,
) = await self.event_creation_handler.create_event(
requester,
{
@@ -226,9 +225,6 @@ class RoomCreationHandler:
},
},
)
tombstone_context = await tombstone_unpersisted_context.persist(
tombstone_event
)
validate_event_for_room_version(tombstone_event)
await self._event_auth_handler.check_auth_rules_from_context(
tombstone_event
@@ -1096,7 +1092,7 @@ class RoomCreationHandler:
content: JsonDict,
for_batch: bool,
**kwargs: Any,
) -> Tuple[EventBase, synapse.events.snapshot.UnpersistedEventContextBase]:
) -> Tuple[EventBase, synapse.events.snapshot.EventContext]:
"""
Creates an event and associated event context.
Args:
@@ -1115,23 +1111,20 @@ class RoomCreationHandler:
event_dict = create_event_dict(etype, content, **kwargs)
(
new_event,
new_unpersisted_context,
) = await self.event_creation_handler.create_event(
new_event, new_context = await self.event_creation_handler.create_event(
creator,
event_dict,
prev_event_ids=prev_event,
depth=depth,
state_map=dict(state_map),
state_map=state_map,
for_batch=for_batch,
current_state_group=current_state_group,
)
depth += 1
prev_event = [new_event.event_id]
state_map[(new_event.type, new_event.state_key)] = new_event.event_id
return new_event, new_unpersisted_context
return new_event, new_context
try:
config = self._presets_dict[preset_config]
@@ -1141,10 +1134,10 @@ class RoomCreationHandler:
)
creation_content.update({"creator": creator_id})
creation_event, unpersisted_creation_context = await create_event(
creation_event, creation_context = await create_event(
EventTypes.Create, creation_content, False
)
creation_context = await unpersisted_creation_context.persist(creation_event)
logger.debug("Sending %s in new room", EventTypes.Member)
ev = await self.event_creation_handler.handle_new_client_event(
requester=creator,
@@ -1188,6 +1181,7 @@ class RoomCreationHandler:
power_event, power_context = await create_event(
EventTypes.PowerLevels, pl_content, True
)
current_state_group = power_context._state_group
events_to_send.append((power_event, power_context))
else:
power_level_content: JsonDict = {
@@ -1236,12 +1230,14 @@ class RoomCreationHandler:
power_level_content,
True,
)
current_state_group = pl_context._state_group
events_to_send.append((pl_event, pl_context))
if room_alias and (EventTypes.CanonicalAlias, "") not in initial_state:
room_alias_event, room_alias_context = await create_event(
EventTypes.CanonicalAlias, {"alias": room_alias.to_string()}, True
)
current_state_group = room_alias_context._state_group
events_to_send.append((room_alias_event, room_alias_context))
if (EventTypes.JoinRules, "") not in initial_state:
@@ -1250,6 +1246,7 @@ class RoomCreationHandler:
{"join_rule": config["join_rules"]},
True,
)
current_state_group = join_rules_context._state_group
events_to_send.append((join_rules_event, join_rules_context))
if (EventTypes.RoomHistoryVisibility, "") not in initial_state:
@@ -1258,6 +1255,7 @@ class RoomCreationHandler:
{"history_visibility": config["history_visibility"]},
True,
)
current_state_group = visibility_context._state_group
events_to_send.append((visibility_event, visibility_context))
if config["guest_can_join"]:
@@ -1267,12 +1265,14 @@ class RoomCreationHandler:
{EventContentFields.GUEST_ACCESS: GuestAccess.CAN_JOIN},
True,
)
current_state_group = guest_access_context._state_group
events_to_send.append((guest_access_event, guest_access_context))
for (etype, state_key), content in initial_state.items():
event, context = await create_event(
etype, content, True, state_key=state_key
)
current_state_group = context._state_group
events_to_send.append((event, context))
if config["encrypted"]:
@@ -1284,16 +1284,9 @@ class RoomCreationHandler:
)
events_to_send.append((encryption_event, encryption_context))
datastore = self.hs.get_datastores().state
events_and_context = (
await UnpersistedEventContext.batch_persist_unpersisted_contexts(
events_to_send, room_id, current_state_group, datastore
)
)
last_event = await self.event_creation_handler.handle_new_client_event(
creator,
events_and_context,
events_to_send,
ignore_shadow_ban=True,
ratelimit=False,
)

View File

@@ -327,7 +327,7 @@ class RoomBatchHandler:
# Mark all events as historical
event_dict["content"][EventContentFields.MSC2716_HISTORICAL] = True
event, unpersisted_context = await self.event_creation_handler.create_event(
event, context = await self.event_creation_handler.create_event(
await self.create_requester_for_user_id_from_app_service(
ev["sender"], app_service_requester.app_service
),
@@ -345,7 +345,7 @@ class RoomBatchHandler:
historical=True,
depth=inherited_depth,
)
context = await unpersisted_context.persist(event)
assert context._state_group
# Normally this is done when persisting the event but we have to

View File

@@ -414,10 +414,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
max_retries = 5
for i in range(max_retries):
try:
(
event,
unpersisted_context,
) = await self.event_creation_handler.create_event(
event, context = await self.event_creation_handler.create_event(
requester,
{
"type": EventTypes.Member,
@@ -438,7 +435,7 @@ class RoomMemberHandler(metaclass=abc.ABCMeta):
outlier=outlier,
historical=historical,
)
context = await unpersisted_context.persist(event)
prev_state_ids = await context.get_prev_state_ids(
StateFilter.from_types([(EventTypes.Member, None)])
)
@@ -1947,10 +1944,7 @@ class RoomMemberMasterHandler(RoomMemberHandler):
max_retries = 5
for i in range(max_retries):
try:
(
event,
unpersisted_context,
) = await self.event_creation_handler.create_event(
event, context = await self.event_creation_handler.create_event(
requester,
event_dict,
txn_id=txn_id,
@@ -1958,7 +1952,6 @@ class RoomMemberMasterHandler(RoomMemberHandler):
auth_event_ids=auth_event_ids,
outlier=True,
)
context = await unpersisted_context.persist(event)
event.internal_metadata.out_of_band_membership = True
result_event = (

View File

@@ -44,7 +44,6 @@ from twisted.internet.interfaces import (
IAddress,
IDelayedCall,
IHostResolution,
IOpenSSLContextFactory,
IReactorCore,
IReactorPluggableNameResolver,
IReactorTime,
@@ -959,8 +958,8 @@ class InsecureInterceptableContextFactory(ssl.ContextFactory):
self._context = SSL.Context(SSL.SSLv23_METHOD)
self._context.set_verify(VERIFY_NONE, lambda *_: False)
def getContext(self) -> SSL.Context:
def getContext(self, hostname=None, port=None):
return self._context
def creatorForNetloc(self, hostname: bytes, port: int) -> IOpenSSLContextFactory:
def creatorForNetloc(self, hostname: bytes, port: int):
return self

View File

@@ -440,7 +440,7 @@ class MatrixFederationHttpClient:
Args:
request: details of request to be sent
retry_on_dns_fail: true if the request should be retried on DNS failures
retry_on_dns_fail: true if the request should be retied on DNS failures
timeout: number of milliseconds to wait for the response headers
(including connecting to the server), *for each attempt*.
@@ -475,7 +475,7 @@ class MatrixFederationHttpClient:
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.
@@ -871,7 +871,7 @@ class MatrixFederationHttpClient:
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.
@@ -958,7 +958,7 @@ class MatrixFederationHttpClient:
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.
@@ -1036,8 +1036,6 @@ class MatrixFederationHttpClient:
args: A dictionary used to create query strings, defaults to
None.
retry_on_dns_fail: true if the request should be retried on DNS failures
timeout: number of milliseconds to wait for the response.
self._default_timeout (60s) by default.
@@ -1065,7 +1063,7 @@ class MatrixFederationHttpClient:
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.
@@ -1143,7 +1141,7 @@ class MatrixFederationHttpClient:
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.
@@ -1199,7 +1197,7 @@ class MatrixFederationHttpClient:
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.

View File

@@ -1,479 +0,0 @@
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import urllib
from abc import ABC, abstractmethod
from types import TracebackType
from typing import Awaitable, Dict, Generator, List, Optional, Tuple, Type
import attr
from twisted.internet.interfaces import IConsumer
from twisted.protocols.basic import FileSender
from twisted.web.server import Request
from synapse.api.errors import Codes, SynapseError, cs_error
from synapse.http.server import finish_request, respond_with_json
from synapse.http.site import SynapseRequest
from synapse.logging.context import make_deferred_yieldable
from synapse.util.stringutils import is_ascii, parse_and_validate_server_name
logger = logging.getLogger(__name__)
# list all text content types that will have the charset default to UTF-8 when
# none is given
TEXT_CONTENT_TYPES = [
"text/css",
"text/csv",
"text/html",
"text/calendar",
"text/plain",
"text/javascript",
"application/json",
"application/ld+json",
"application/rtf",
"image/svg+xml",
"text/xml",
]
def parse_media_id(request: Request) -> Tuple[str, str, Optional[str]]:
"""Parses the server name, media ID and optional file name from the request URI
Also performs some rough validation on the server name.
Args:
request: The `Request`.
Returns:
A tuple containing the parsed server name, media ID and optional file name.
Raises:
SynapseError(404): if parsing or validation fail for any reason
"""
try:
# The type on postpath seems incorrect in Twisted 21.2.0.
postpath: List[bytes] = request.postpath # type: ignore
assert postpath
# This allows users to append e.g. /test.png to the URL. Useful for
# clients that parse the URL to see content type.
server_name_bytes, media_id_bytes = postpath[:2]
server_name = server_name_bytes.decode("utf-8")
media_id = media_id_bytes.decode("utf8")
# Validate the server name, raising if invalid
parse_and_validate_server_name(server_name)
file_name = None
if len(postpath) > 2:
try:
file_name = urllib.parse.unquote(postpath[-1].decode("utf-8"))
except UnicodeDecodeError:
pass
return server_name, media_id, file_name
except Exception:
raise SynapseError(
404, "Invalid media id token %r" % (request.postpath,), Codes.UNKNOWN
)
def respond_404(request: SynapseRequest) -> None:
respond_with_json(
request,
404,
cs_error("Not found %r" % (request.postpath,), code=Codes.NOT_FOUND),
send_cors=True,
)
async def respond_with_file(
request: SynapseRequest,
media_type: str,
file_path: str,
file_size: Optional[int] = None,
upload_name: Optional[str] = None,
) -> None:
logger.debug("Responding with %r", file_path)
if os.path.isfile(file_path):
if file_size is None:
stat = os.stat(file_path)
file_size = stat.st_size
add_file_headers(request, media_type, file_size, upload_name)
with open(file_path, "rb") as f:
await make_deferred_yieldable(FileSender().beginFileTransfer(f, request))
finish_request(request)
else:
respond_404(request)
def add_file_headers(
request: Request,
media_type: str,
file_size: Optional[int],
upload_name: Optional[str],
) -> None:
"""Adds the correct response headers in preparation for responding with the
media.
Args:
request
media_type: The media/content type.
file_size: Size in bytes of the media, if known.
upload_name: The name of the requested file, if any.
"""
def _quote(x: str) -> str:
return urllib.parse.quote(x.encode("utf-8"))
# Default to a UTF-8 charset for text content types.
# ex, uses UTF-8 for 'text/css' but not 'text/css; charset=UTF-16'
if media_type.lower() in TEXT_CONTENT_TYPES:
content_type = media_type + "; charset=UTF-8"
else:
content_type = media_type
request.setHeader(b"Content-Type", content_type.encode("UTF-8"))
if upload_name:
# RFC6266 section 4.1 [1] defines both `filename` and `filename*`.
#
# `filename` is defined to be a `value`, which is defined by RFC2616
# section 3.6 [2] to be a `token` or a `quoted-string`, where a `token`
# is (essentially) a single US-ASCII word, and a `quoted-string` is a
# US-ASCII string surrounded by double-quotes, using backslash as an
# escape character. Note that %-encoding is *not* permitted.
#
# `filename*` is defined to be an `ext-value`, which is defined in
# RFC5987 section 3.2.1 [3] to be `charset "'" [ language ] "'" value-chars`,
# where `value-chars` is essentially a %-encoded string in the given charset.
#
# [1]: https://tools.ietf.org/html/rfc6266#section-4.1
# [2]: https://tools.ietf.org/html/rfc2616#section-3.6
# [3]: https://tools.ietf.org/html/rfc5987#section-3.2.1
# We avoid the quoted-string version of `filename`, because (a) synapse didn't
# correctly interpret those as of 0.99.2 and (b) they are a bit of a pain and we
# may as well just do the filename* version.
if _can_encode_filename_as_token(upload_name):
disposition = "inline; filename=%s" % (upload_name,)
else:
disposition = "inline; filename*=utf-8''%s" % (_quote(upload_name),)
request.setHeader(b"Content-Disposition", disposition.encode("ascii"))
# cache for at least a day.
# XXX: we might want to turn this off for data we don't want to
# recommend caching as it's sensitive or private - or at least
# select private. don't bother setting Expires as all our
# clients are smart enough to be happy with Cache-Control
request.setHeader(b"Cache-Control", b"public,max-age=86400,s-maxage=86400")
if file_size is not None:
request.setHeader(b"Content-Length", b"%d" % (file_size,))
# Tell web crawlers to not index, archive, or follow links in media. This
# should help to prevent things in the media repo from showing up in web
# search results.
request.setHeader(b"X-Robots-Tag", "noindex, nofollow, noarchive, noimageindex")
# separators as defined in RFC2616. SP and HT are handled separately.
# see _can_encode_filename_as_token.
_FILENAME_SEPARATOR_CHARS = {
"(",
")",
"<",
">",
"@",
",",
";",
":",
"\\",
'"',
"/",
"[",
"]",
"?",
"=",
"{",
"}",
}
def _can_encode_filename_as_token(x: str) -> bool:
for c in x:
# from RFC2616:
#
# token = 1*<any CHAR except CTLs or separators>
#
# separators = "(" | ")" | "<" | ">" | "@"
# | "," | ";" | ":" | "\" | <">
# | "/" | "[" | "]" | "?" | "="
# | "{" | "}" | SP | HT
#
# CHAR = <any US-ASCII character (octets 0 - 127)>
#
# CTL = <any US-ASCII control character
# (octets 0 - 31) and DEL (127)>
#
if ord(c) >= 127 or ord(c) <= 32 or c in _FILENAME_SEPARATOR_CHARS:
return False
return True
async def respond_with_responder(
request: SynapseRequest,
responder: "Optional[Responder]",
media_type: str,
file_size: Optional[int],
upload_name: Optional[str] = None,
) -> None:
"""Responds to the request with given responder. If responder is None then
returns 404.
Args:
request
responder
media_type: The media/content type.
file_size: Size in bytes of the media. If not known it should be None
upload_name: The name of the requested file, if any.
"""
if not responder:
respond_404(request)
return
# If we have a responder we *must* use it as a context manager.
with responder:
if request._disconnected:
logger.warning(
"Not sending response to request %s, already disconnected.", request
)
return
logger.debug("Responding to media request with responder %s", responder)
add_file_headers(request, media_type, file_size, upload_name)
try:
await responder.write_to_consumer(request)
except Exception as e:
# The majority of the time this will be due to the client having gone
# away. Unfortunately, Twisted simply throws a generic exception at us
# in that case.
logger.warning("Failed to write to consumer: %s %s", type(e), e)
# Unregister the producer, if it has one, so Twisted doesn't complain
if request.producer:
request.unregisterProducer()
finish_request(request)
class Responder(ABC):
"""Represents a response that can be streamed to the requester.
Responder is a context manager which *must* be used, so that any resources
held can be cleaned up.
"""
@abstractmethod
def write_to_consumer(self, consumer: IConsumer) -> Awaitable:
"""Stream response into consumer
Args:
consumer: The consumer to stream into.
Returns:
Resolves once the response has finished being written
"""
raise NotImplementedError()
def __enter__(self) -> None: # noqa: B027
pass
def __exit__( # noqa: B027
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
pass
@attr.s(slots=True, frozen=True, auto_attribs=True)
class ThumbnailInfo:
"""Details about a generated thumbnail."""
width: int
height: int
method: str
# Content type of thumbnail, e.g. image/png
type: str
# The size of the media file, in bytes.
length: Optional[int] = None
@attr.s(slots=True, frozen=True, auto_attribs=True)
class FileInfo:
"""Details about a requested/uploaded file."""
# The server name where the media originated from, or None if local.
server_name: Optional[str]
# The local ID of the file. For local files this is the same as the media_id
file_id: str
# If the file is for the url preview cache
url_cache: bool = False
# Whether the file is a thumbnail or not.
thumbnail: Optional[ThumbnailInfo] = None
# The below properties exist to maintain compatibility with third-party modules.
@property
def thumbnail_width(self) -> Optional[int]:
if not self.thumbnail:
return None
return self.thumbnail.width
@property
def thumbnail_height(self) -> Optional[int]:
if not self.thumbnail:
return None
return self.thumbnail.height
@property
def thumbnail_method(self) -> Optional[str]:
if not self.thumbnail:
return None
return self.thumbnail.method
@property
def thumbnail_type(self) -> Optional[str]:
if not self.thumbnail:
return None
return self.thumbnail.type
@property
def thumbnail_length(self) -> Optional[int]:
if not self.thumbnail:
return None
return self.thumbnail.length
def get_filename_from_headers(headers: Dict[bytes, List[bytes]]) -> Optional[str]:
"""
Get the filename of the downloaded file by inspecting the
Content-Disposition HTTP header.
Args:
headers: The HTTP request headers.
Returns:
The filename, or None.
"""
content_disposition = headers.get(b"Content-Disposition", [b""])
# No header, bail out.
if not content_disposition[0]:
return None
_, params = _parse_header(content_disposition[0])
upload_name = None
# First check if there is a valid UTF-8 filename
upload_name_utf8 = params.get(b"filename*", None)
if upload_name_utf8:
if upload_name_utf8.lower().startswith(b"utf-8''"):
upload_name_utf8 = upload_name_utf8[7:]
# We have a filename*= section. This MUST be ASCII, and any UTF-8
# bytes are %-quoted.
try:
# Once it is decoded, we can then unquote the %-encoded
# parts strictly into a unicode string.
upload_name = urllib.parse.unquote(
upload_name_utf8.decode("ascii"), errors="strict"
)
except UnicodeDecodeError:
# Incorrect UTF-8.
pass
# If there isn't check for an ascii name.
if not upload_name:
upload_name_ascii = params.get(b"filename", None)
if upload_name_ascii and is_ascii(upload_name_ascii):
upload_name = upload_name_ascii.decode("ascii")
# This may be None here, indicating we did not find a matching name.
return upload_name
def _parse_header(line: bytes) -> Tuple[bytes, Dict[bytes, bytes]]:
"""Parse a Content-type like header.
Cargo-culted from `cgi`, but works on bytes rather than strings.
Args:
line: header to be parsed
Returns:
The main content-type, followed by the parameter dictionary
"""
parts = _parseparam(b";" + line)
key = next(parts)
pdict = {}
for p in parts:
i = p.find(b"=")
if i >= 0:
name = p[:i].strip().lower()
value = p[i + 1 :].strip()
# strip double-quotes
if len(value) >= 2 and value[0:1] == value[-1:] == b'"':
value = value[1:-1]
value = value.replace(b"\\\\", b"\\").replace(b'\\"', b'"')
pdict[name] = value
return key, pdict
def _parseparam(s: bytes) -> Generator[bytes, None, None]:
"""Generator which splits the input on ;, respecting double-quoted sequences
Cargo-culted from `cgi`, but works on bytes rather than strings.
Args:
s: header to be parsed
Returns:
The split input
"""
while s[:1] == b";":
s = s[1:]
# look for the next ;
end = s.find(b";")
# if there is an odd number of " marks between here and the next ;, skip to the
# next ; instead
while end > 0 and (s.count(b'"', 0, end) - s.count(b'\\"', 0, end)) % 2:
end = s.find(b";", end + 1)
if end < 0:
end = len(s)
f = s[:end]
yield f.strip()
s = s[end:]

View File

@@ -1,374 +0,0 @@
# Copyright 2018-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import logging
import os
import shutil
from types import TracebackType
from typing import (
IO,
TYPE_CHECKING,
Any,
Awaitable,
BinaryIO,
Callable,
Generator,
Optional,
Sequence,
Tuple,
Type,
)
import attr
from twisted.internet.defer import Deferred
from twisted.internet.interfaces import IConsumer
from twisted.protocols.basic import FileSender
import synapse
from synapse.api.errors import NotFoundError
from synapse.logging.context import defer_to_thread, make_deferred_yieldable
from synapse.util import Clock
from synapse.util.file_consumer import BackgroundFileConsumer
from ._base import FileInfo, Responder
from .filepath import MediaFilePaths
if TYPE_CHECKING:
from synapse.media.storage_provider import StorageProvider
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
class MediaStorage:
"""Responsible for storing/fetching files from local sources.
Args:
hs
local_media_directory: Base path where we store media on disk
filepaths
storage_providers: List of StorageProvider that are used to fetch and store files.
"""
def __init__(
self,
hs: "HomeServer",
local_media_directory: str,
filepaths: MediaFilePaths,
storage_providers: Sequence["StorageProvider"],
):
self.hs = hs
self.reactor = hs.get_reactor()
self.local_media_directory = local_media_directory
self.filepaths = filepaths
self.storage_providers = storage_providers
self.spam_checker = hs.get_spam_checker()
self.clock = hs.get_clock()
async def store_file(self, source: IO, file_info: FileInfo) -> str:
"""Write `source` to the on disk media store, and also any other
configured storage providers
Args:
source: A file like object that should be written
file_info: Info about the file to store
Returns:
the file path written to in the primary media store
"""
with self.store_into_file(file_info) as (f, fname, finish_cb):
# Write to the main repository
await self.write_to_file(source, f)
await finish_cb()
return fname
async def write_to_file(self, source: IO, output: IO) -> None:
"""Asynchronously write the `source` to `output`."""
await defer_to_thread(self.reactor, _write_file_synchronously, source, output)
@contextlib.contextmanager
def store_into_file(
self, file_info: FileInfo
) -> Generator[Tuple[BinaryIO, str, Callable[[], Awaitable[None]]], None, None]:
"""Context manager used to get a file like object to write into, as
described by file_info.
Actually yields a 3-tuple (file, fname, finish_cb), where file is a file
like object that can be written to, fname is the absolute path of file
on disk, and finish_cb is a function that returns an awaitable.
fname can be used to read the contents from after upload, e.g. to
generate thumbnails.
finish_cb must be called and waited on after the file has been
successfully been written to. Should not be called if there was an
error.
Args:
file_info: Info about the file to store
Example:
with media_storage.store_into_file(info) as (f, fname, finish_cb):
# .. write into f ...
await finish_cb()
"""
path = self._file_info_to_path(file_info)
fname = os.path.join(self.local_media_directory, path)
dirname = os.path.dirname(fname)
os.makedirs(dirname, exist_ok=True)
finished_called = [False]
try:
with open(fname, "wb") as f:
async def finish() -> None:
# Ensure that all writes have been flushed and close the
# file.
f.flush()
f.close()
spam_check = await self.spam_checker.check_media_file_for_spam(
ReadableFileWrapper(self.clock, fname), file_info
)
if spam_check != synapse.module_api.NOT_SPAM:
logger.info("Blocking media due to spam checker")
# Note that we'll delete the stored media, due to the
# try/except below. The media also won't be stored in
# the DB.
# We currently ignore any additional field returned by
# the spam-check API.
raise SpamMediaException(errcode=spam_check[0])
for provider in self.storage_providers:
await provider.store_file(path, file_info)
finished_called[0] = True
yield f, fname, finish
except Exception as e:
try:
os.remove(fname)
except Exception:
pass
raise e from None
if not finished_called:
raise Exception("Finished callback not called")
async def fetch_media(self, file_info: FileInfo) -> Optional[Responder]:
"""Attempts to fetch media described by file_info from the local cache
and configured storage providers.
Args:
file_info
Returns:
Returns a Responder if the file was found, otherwise None.
"""
paths = [self._file_info_to_path(file_info)]
# fallback for remote thumbnails with no method in the filename
if file_info.thumbnail and file_info.server_name:
paths.append(
self.filepaths.remote_media_thumbnail_rel_legacy(
server_name=file_info.server_name,
file_id=file_info.file_id,
width=file_info.thumbnail.width,
height=file_info.thumbnail.height,
content_type=file_info.thumbnail.type,
)
)
for path in paths:
local_path = os.path.join(self.local_media_directory, path)
if os.path.exists(local_path):
logger.debug("responding with local file %s", local_path)
return FileResponder(open(local_path, "rb"))
logger.debug("local file %s did not exist", local_path)
for provider in self.storage_providers:
for path in paths:
res: Any = await provider.fetch(path, file_info)
if res:
logger.debug("Streaming %s from %s", path, provider)
return res
logger.debug("%s not found on %s", path, provider)
return None
async def ensure_media_is_in_local_cache(self, file_info: FileInfo) -> str:
"""Ensures that the given file is in the local cache. Attempts to
download it from storage providers if it isn't.
Args:
file_info
Returns:
Full path to local file
"""
path = self._file_info_to_path(file_info)
local_path = os.path.join(self.local_media_directory, path)
if os.path.exists(local_path):
return local_path
# Fallback for paths without method names
# Should be removed in the future
if file_info.thumbnail and file_info.server_name:
legacy_path = self.filepaths.remote_media_thumbnail_rel_legacy(
server_name=file_info.server_name,
file_id=file_info.file_id,
width=file_info.thumbnail.width,
height=file_info.thumbnail.height,
content_type=file_info.thumbnail.type,
)
legacy_local_path = os.path.join(self.local_media_directory, legacy_path)
if os.path.exists(legacy_local_path):
return legacy_local_path
dirname = os.path.dirname(local_path)
os.makedirs(dirname, exist_ok=True)
for provider in self.storage_providers:
res: Any = await provider.fetch(path, file_info)
if res:
with res:
consumer = BackgroundFileConsumer(
open(local_path, "wb"), self.reactor
)
await res.write_to_consumer(consumer)
await consumer.wait()
return local_path
raise NotFoundError()
def _file_info_to_path(self, file_info: FileInfo) -> str:
"""Converts file_info into a relative path.
The path is suitable for storing files under a directory, e.g. used to
store files on local FS under the base media repository directory.
"""
if file_info.url_cache:
if file_info.thumbnail:
return self.filepaths.url_cache_thumbnail_rel(
media_id=file_info.file_id,
width=file_info.thumbnail.width,
height=file_info.thumbnail.height,
content_type=file_info.thumbnail.type,
method=file_info.thumbnail.method,
)
return self.filepaths.url_cache_filepath_rel(file_info.file_id)
if file_info.server_name:
if file_info.thumbnail:
return self.filepaths.remote_media_thumbnail_rel(
server_name=file_info.server_name,
file_id=file_info.file_id,
width=file_info.thumbnail.width,
height=file_info.thumbnail.height,
content_type=file_info.thumbnail.type,
method=file_info.thumbnail.method,
)
return self.filepaths.remote_media_filepath_rel(
file_info.server_name, file_info.file_id
)
if file_info.thumbnail:
return self.filepaths.local_media_thumbnail_rel(
media_id=file_info.file_id,
width=file_info.thumbnail.width,
height=file_info.thumbnail.height,
content_type=file_info.thumbnail.type,
method=file_info.thumbnail.method,
)
return self.filepaths.local_media_filepath_rel(file_info.file_id)
def _write_file_synchronously(source: IO, dest: IO) -> None:
"""Write `source` to the file like `dest` synchronously. Should be called
from a thread.
Args:
source: A file like object that's to be written
dest: A file like object to be written to
"""
source.seek(0) # Ensure we read from the start of the file
shutil.copyfileobj(source, dest)
class FileResponder(Responder):
"""Wraps an open file that can be sent to a request.
Args:
open_file: A file like object to be streamed ot the client,
is closed when finished streaming.
"""
def __init__(self, open_file: IO):
self.open_file = open_file
def write_to_consumer(self, consumer: IConsumer) -> Deferred:
return make_deferred_yieldable(
FileSender().beginFileTransfer(self.open_file, consumer)
)
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
self.open_file.close()
class SpamMediaException(NotFoundError):
"""The media was blocked by a spam checker, so we simply 404 the request (in
the same way as if it was quarantined).
"""
@attr.s(slots=True, auto_attribs=True)
class ReadableFileWrapper:
"""Wrapper that allows reading a file in chunks, yielding to the reactor,
and writing to a callback.
This is simplified `FileSender` that takes an IO object rather than an
`IConsumer`.
"""
CHUNK_SIZE = 2**14
clock: Clock
path: str
async def write_chunks_to(self, callback: Callable[[bytes], object]) -> None:
"""Reads the file in chunks and calls the callback with each chunk."""
with open(self.path, "rb") as file:
while True:
chunk = file.read(self.CHUNK_SIZE)
if not chunk:
break
callback(chunk)
# We yield to the reactor by sleeping for 0 seconds.
await self.clock.sleep(0)

View File

@@ -1,181 +0,0 @@
# Copyright 2018-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
import logging
import os
import shutil
from typing import TYPE_CHECKING, Callable, Optional
from synapse.config._base import Config
from synapse.logging.context import defer_to_thread, run_in_background
from synapse.util.async_helpers import maybe_awaitable
from ._base import FileInfo, Responder
from .media_storage import FileResponder
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from synapse.server import HomeServer
class StorageProvider(metaclass=abc.ABCMeta):
"""A storage provider is a service that can store uploaded media and
retrieve them.
"""
@abc.abstractmethod
async def store_file(self, path: str, file_info: FileInfo) -> None:
"""Store the file described by file_info. The actual contents can be
retrieved by reading the file in file_info.upload_path.
Args:
path: Relative path of file in local cache
file_info: The metadata of the file.
"""
@abc.abstractmethod
async def fetch(self, path: str, file_info: FileInfo) -> Optional[Responder]:
"""Attempt to fetch the file described by file_info and stream it
into writer.
Args:
path: Relative path of file in local cache
file_info: The metadata of the file.
Returns:
Returns a Responder if the provider has the file, otherwise returns None.
"""
class StorageProviderWrapper(StorageProvider):
"""Wraps a storage provider and provides various config options
Args:
backend: The storage provider to wrap.
store_local: Whether to store new local files or not.
store_synchronous: Whether to wait for file to be successfully
uploaded, or todo the upload in the background.
store_remote: Whether remote media should be uploaded
"""
def __init__(
self,
backend: StorageProvider,
store_local: bool,
store_synchronous: bool,
store_remote: bool,
):
self.backend = backend
self.store_local = store_local
self.store_synchronous = store_synchronous
self.store_remote = store_remote
def __str__(self) -> str:
return "StorageProviderWrapper[%s]" % (self.backend,)
async def store_file(self, path: str, file_info: FileInfo) -> None:
if not file_info.server_name and not self.store_local:
return None
if file_info.server_name and not self.store_remote:
return None
if file_info.url_cache:
# The URL preview cache is short lived and not worth offloading or
# backing up.
return None
if self.store_synchronous:
# store_file is supposed to return an Awaitable, but guard
# against improper implementations.
await maybe_awaitable(self.backend.store_file(path, file_info)) # type: ignore
else:
# TODO: Handle errors.
async def store() -> None:
try:
return await maybe_awaitable(
self.backend.store_file(path, file_info)
)
except Exception:
logger.exception("Error storing file")
run_in_background(store)
async def fetch(self, path: str, file_info: FileInfo) -> Optional[Responder]:
if file_info.url_cache:
# Files in the URL preview cache definitely aren't stored here,
# so avoid any potentially slow I/O or network access.
return None
# store_file is supposed to return an Awaitable, but guard
# against improper implementations.
return await maybe_awaitable(self.backend.fetch(path, file_info))
class FileStorageProviderBackend(StorageProvider):
"""A storage provider that stores files in a directory on a filesystem.
Args:
hs
config: The config returned by `parse_config`.
"""
def __init__(self, hs: "HomeServer", config: str):
self.hs = hs
self.cache_directory = hs.config.media.media_store_path
self.base_directory = config
def __str__(self) -> str:
return "FileStorageProviderBackend[%s]" % (self.base_directory,)
async def store_file(self, path: str, file_info: FileInfo) -> None:
"""See StorageProvider.store_file"""
primary_fname = os.path.join(self.cache_directory, path)
backup_fname = os.path.join(self.base_directory, path)
dirname = os.path.dirname(backup_fname)
os.makedirs(dirname, exist_ok=True)
# mypy needs help inferring the type of the second parameter, which is generic
shutil_copyfile: Callable[[str, str], str] = shutil.copyfile
await defer_to_thread(
self.hs.get_reactor(),
shutil_copyfile,
primary_fname,
backup_fname,
)
async def fetch(self, path: str, file_info: FileInfo) -> Optional[Responder]:
"""See StorageProvider.fetch"""
backup_fname = os.path.join(self.base_directory, path)
if os.path.isfile(backup_fname):
return FileResponder(open(backup_fname, "rb"))
return None
@staticmethod
def parse_config(config: dict) -> str:
"""Called on startup to parse config supplied. This should parse
the config and raise if there is a problem.
The returned value is passed into the constructor.
In this case we only care about a single param, the directory, so let's
just pull that out.
"""
return Config.ensure_directory(config["directory"])

View File

@@ -37,7 +37,12 @@ from twisted.internet import defer
from twisted.web.resource import Resource
from synapse.api import errors
from synapse.api.errors import SynapseError
from synapse.api.errors import (
FederationDeniedError,
HttpResponseException,
RequestSendFailed,
SynapseError,
)
from synapse.events import EventBase
from synapse.events.presence_router import (
GET_INTERESTED_USERS_CALLBACK,
@@ -64,11 +69,9 @@ from synapse.events.third_party_rules import (
CHECK_EVENT_ALLOWED_CALLBACK,
CHECK_THREEPID_CAN_BE_INVITED_CALLBACK,
CHECK_VISIBILITY_CAN_BE_MODIFIED_CALLBACK,
ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK,
ON_CREATE_ROOM_CALLBACK,
ON_NEW_EVENT_CALLBACK,
ON_PROFILE_UPDATE_CALLBACK,
ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK,
ON_THREEPID_BIND_CALLBACK,
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK,
)
@@ -131,6 +134,14 @@ from synapse.util import Clock
from synapse.util.async_helpers import maybe_awaitable
from synapse.util.caches.descriptors import CachedFunction, cached as _cached
from synapse.util.frozenutils import freeze
from synapse.util.retryutils import NotRetryingDestination
from .errors import (
FederationHttpDeniedException,
FederationHttpNotRetryingDestinationException,
FederationHttpRequestSendFailedException,
FederationHttpResponseException,
)
if TYPE_CHECKING:
from synapse.app.generic_worker import GenericWorkerSlavedStore
@@ -359,12 +370,6 @@ class ModuleApi:
ON_USER_DEACTIVATION_STATUS_CHANGED_CALLBACK
] = None,
on_threepid_bind: Optional[ON_THREEPID_BIND_CALLBACK] = None,
on_add_user_third_party_identifier: Optional[
ON_ADD_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
] = None,
on_remove_user_third_party_identifier: Optional[
ON_REMOVE_USER_THIRD_PARTY_IDENTIFIER_CALLBACK
] = None,
) -> None:
"""Registers callbacks for third party event rules capabilities.
@@ -381,8 +386,6 @@ class ModuleApi:
on_profile_update=on_profile_update,
on_user_deactivation_status_changed=on_user_deactivation_status_changed,
on_threepid_bind=on_threepid_bind,
on_add_user_third_party_identifier=on_add_user_third_party_identifier,
on_remove_user_third_party_identifier=on_remove_user_third_party_identifier,
)
def register_presence_router_callbacks(
@@ -1622,6 +1625,123 @@ class ModuleApi:
deactivation=deactivation,
)
async def _try_federation_http_request(
self,
method: str,
remote_server_name: str,
path: str,
query_parameters: Optional[Dict[str, Any]],
body: Optional[JsonDict] = None,
timeout: Optional[int] = None,
) -> Union[JsonDict, List]:
"""
Send a federation request to a remote homeserver and return the response.
This method assumes the `method` argument is fully capitalised.
A helper method for self.send_federation_http_request, see that method for
more details.
"""
assert method in ["GET", "PUT", "POST", "DELETE"]
fed_client = self._hs.get_federation_http_client()
if method == "GET":
return await fed_client.get_json(
destination=remote_server_name,
path=path,
args=query_parameters,
timeout=timeout,
)
elif method == "PUT":
return await fed_client.put_json(
destination=remote_server_name,
path=path,
args=query_parameters,
data=body,
timeout=timeout,
)
elif method == "POST":
return await fed_client.post_json(
destination=remote_server_name,
path=path,
args=query_parameters,
data=body,
timeout=timeout,
)
elif method == "DELETE":
return await fed_client.delete_json(
destination=remote_server_name,
path=path,
args=query_parameters,
timeout=timeout,
)
return {}
async def send_federation_http_request(
self,
method: str,
remote_server_name: str,
path: str,
query_parameters: Optional[Dict[str, Any]],
body: Optional[JsonDict] = None,
timeout: Optional[int] = None,
) -> Union[JsonDict, List]:
"""
Send an HTTP federation request to a remote homeserver.
Added in Synapse v1.79.0.
If the request is successful, the parsed response body will be returned. If
unsuccessful, an exception will be raised. Callers are expected to handle the
possible exception cases. See exception class docstrings for a more detailed
explanation of each.
Args:
method: The HTTP method to use. Must be one of: "GET", "PUT", "POST",
"DELETE".
remote_server_name: The remote server to send the request to. This method
will resolve delegated homeserver URLs automatically (well-known etc).
path: The HTTP path for the request.
query_parameters: Any query parameters for the request.
body: The body of the request.
timeout: The timeout in seconds to wait before giving up on a request.
Returns:
The response to the request as a Python object.
Raises:
FederationHttpResponseException: If we get an HTTP response code >= 300
(except 429).
FederationHttpNotRetryingDestinationException: If the homeserver believes the
remote homeserver is down and is not yet ready to attempt to contact it.
FederationHttpDeniedException: If this destination is not on the local
homeserver's configured federation whitelist.
FederationHttpRequestSendFailedException: If there were problems connecting
to the remote, due to e.g. DNS failures, connection timeouts etc.
"""
try:
return await self._try_federation_http_request(
method.upper(), remote_server_name, path, query_parameters, body, timeout
)
except HttpResponseException as e:
raise FederationHttpResponseException(
remote_server_name,
status_code=e.code,
msg=e.msg,
response_body=e.response,
)
except NotRetryingDestination:
raise FederationHttpNotRetryingDestinationException(remote_server_name)
except FederationDeniedError:
raise FederationHttpDeniedException(remote_server_name)
except RequestSendFailed as e:
raise FederationHttpRequestSendFailedException(
remote_server_name,
can_retry=e.can_retry,
)
class PublicRoomListManager:
"""Contains methods for adding to, removing from and querying whether a room

View File

@@ -13,6 +13,7 @@
# limitations under the License.
"""Exception types which are exposed as part of the stable module API"""
import attr
from synapse.api.errors import (
Codes,
@@ -24,6 +25,57 @@ from synapse.config._base import ConfigError
from synapse.handlers.push_rules import InvalidRuleException
from synapse.storage.push_rule import RuleNotFoundException
@attr.s(auto_attribs=True)
class FederationHttpResponseException(Exception):
"""
Raised when an HTTP request over federation returns a status code > 300 (and not 429).
"""
remote_server_name: str
# The HTTP status code of the response.
status_code: int
# A human-readable explanation for the error.
msg: str
# The non-parsed HTTP response body.
response_body: bytes
@attr.s(auto_attribs=True)
class FederationHttpNotRetryingDestinationException(Exception):
"""
Raised when the local homeserver refuses to send traffic to a remote homeserver that
it believes is experiencing an outage.
"""
remote_server_name: str
@attr.s(auto_attribs=True)
class FederationHttpDeniedException(Exception):
"""
Raised when the local homeserver refuses to send federation traffic to a remote
homeserver. This is due to the remote homeserver not being on the configured
federation whitelist.
"""
remote_server_name: str
@attr.s(auto_attribs=True)
class FederationHttpRequestSendFailedException(Exception):
"""
Raised when there are problems connecting to the remote homeserver due to e.g.
DNS failures, connection timeouts, etc.
"""
remote_server_name: str
# Whether the request can be retried with a chance of success. This will be True
# if the failure occurred due to e.g. timeouts, a disruption in the connection etc.
# Will be false in the case of e.g. a malformed response from the remote homeserver.
can_retry: bool
__all__ = [
"Codes",
"InvalidClientCredentialsError",
@@ -32,4 +84,8 @@ __all__ = [
"ConfigError",
"InvalidRuleException",
"RuleNotFoundException",
"FederationHttpResponseException",
"FederationHttpNotRetryingDestinationException",
"FederationHttpDeniedException",
"FederationHttpRequestSendFailedException",
]

View File

@@ -276,7 +276,7 @@ class BulkPushRuleEvaluator:
if related_event is not None:
related_events[relation_type] = _flatten_dict(
related_event,
msc3873_escape_event_match_key=self.hs.config.experimental.msc3873_escape_event_match_key,
msc3783_escape_event_match_key=self.hs.config.experimental.msc3783_escape_event_match_key,
)
reply_event_id = (
@@ -294,7 +294,7 @@ class BulkPushRuleEvaluator:
if related_event is not None:
related_events["m.in_reply_to"] = _flatten_dict(
related_event,
msc3873_escape_event_match_key=self.hs.config.experimental.msc3873_escape_event_match_key,
msc3783_escape_event_match_key=self.hs.config.experimental.msc3783_escape_event_match_key,
)
# indicate that this is from a fallback relation.
@@ -412,7 +412,7 @@ class BulkPushRuleEvaluator:
evaluator = PushRuleEvaluator(
_flatten_dict(
event,
msc3873_escape_event_match_key=self.hs.config.experimental.msc3873_escape_event_match_key,
msc3783_escape_event_match_key=self.hs.config.experimental.msc3783_escape_event_match_key,
),
has_mentions,
user_mentions,
@@ -507,7 +507,7 @@ def _flatten_dict(
prefix: Optional[List[str]] = None,
result: Optional[Dict[str, JsonValue]] = None,
*,
msc3873_escape_event_match_key: bool = False,
msc3783_escape_event_match_key: bool = False,
) -> Dict[str, JsonValue]:
"""
Given a JSON dictionary (or event) which might contain sub dictionaries,
@@ -536,7 +536,7 @@ def _flatten_dict(
if result is None:
result = {}
for key, value in d.items():
if msc3873_escape_event_match_key:
if msc3783_escape_event_match_key:
# Escape periods in the key with a backslash (and backslashes with an
# extra backslash). This is since a period is used as a separator between
# nested fields.
@@ -552,7 +552,7 @@ def _flatten_dict(
value,
prefix=(prefix + [key]),
result=result,
msc3873_escape_event_match_key=msc3873_escape_event_match_key,
msc3783_escape_event_match_key=msc3783_escape_event_match_key,
)
# `room_version` should only ever be set when looking at the top level of an event

View File

@@ -238,24 +238,6 @@ class ReplicationStreamer:
except Exception:
logger.exception("Failed to replicate")
# The last token we send may not match the current
# token, in which case we want to send out a `POSITION`
# to tell other workers the actual current position.
if updates[-1][0] < current_token:
logger.info(
"Sending position: %s -> %s",
stream.NAME,
current_token,
)
self.command_handler.send_command(
PositionCommand(
stream.NAME,
self._instance_name,
updates[-1][0],
current_token,
)
)
logger.debug("No more pending updates, breaking poke loop")
finally:
self.pending_updates = False

View File

@@ -108,7 +108,8 @@ class ClientRestResource(JsonResource):
if is_main_process:
logout.register_servlets(hs, client_resource)
sync.register_servlets(hs, client_resource)
filter.register_servlets(hs, client_resource)
if is_main_process:
filter.register_servlets(hs, client_resource)
account.register_servlets(hs, client_resource)
register.register_servlets(hs, client_resource)
if is_main_process:

View File

@@ -53,11 +53,11 @@ class EventReportsRestServlet(RestServlet):
PATTERNS = admin_patterns("/event_reports$")
def __init__(self, hs: "HomeServer"):
self._auth = hs.get_auth()
self._store = hs.get_datastores().main
self.auth = hs.get_auth()
self.store = hs.get_datastores().main
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
await assert_requester_is_admin(self._auth, request)
await assert_requester_is_admin(self.auth, request)
start = parse_integer(request, "from", default=0)
limit = parse_integer(request, "limit", default=100)
@@ -79,7 +79,7 @@ class EventReportsRestServlet(RestServlet):
errcode=Codes.INVALID_PARAM,
)
event_reports, total = await self._store.get_event_reports_paginate(
event_reports, total = await self.store.get_event_reports_paginate(
start, limit, direction, user_id, room_id
)
ret = {"event_reports": event_reports, "total": total}
@@ -108,13 +108,13 @@ class EventReportDetailRestServlet(RestServlet):
PATTERNS = admin_patterns("/event_reports/(?P<report_id>[^/]*)$")
def __init__(self, hs: "HomeServer"):
self._auth = hs.get_auth()
self._store = hs.get_datastores().main
self.auth = hs.get_auth()
self.store = hs.get_datastores().main
async def on_GET(
self, request: SynapseRequest, report_id: str
) -> Tuple[int, JsonDict]:
await assert_requester_is_admin(self._auth, request)
await assert_requester_is_admin(self.auth, request)
message = (
"The report_id parameter must be a string representing a positive integer."
@@ -131,33 +131,8 @@ class EventReportDetailRestServlet(RestServlet):
HTTPStatus.BAD_REQUEST, message, errcode=Codes.INVALID_PARAM
)
ret = await self._store.get_event_report(resolved_report_id)
ret = await self.store.get_event_report(resolved_report_id)
if not ret:
raise NotFoundError("Event report not found")
return HTTPStatus.OK, ret
async def on_DELETE(
self, request: SynapseRequest, report_id: str
) -> Tuple[int, JsonDict]:
await assert_requester_is_admin(self._auth, request)
message = (
"The report_id parameter must be a string representing a positive integer."
)
try:
resolved_report_id = int(report_id)
except ValueError:
raise SynapseError(
HTTPStatus.BAD_REQUEST, message, errcode=Codes.INVALID_PARAM
)
if resolved_report_id < 0:
raise SynapseError(
HTTPStatus.BAD_REQUEST, message, errcode=Codes.INVALID_PARAM
)
if await self._store.delete_event_report(resolved_report_id):
return HTTPStatus.OK, {}
raise NotFoundError("Event report not found")

View File

@@ -304,20 +304,13 @@ class UserRestServletV2(RestServlet):
# remove old threepids
for medium, address in del_threepids:
try:
# Attempt to remove any known bindings of this third-party ID
# and user ID from identity servers.
await self.hs.get_identity_handler().try_unbind_threepid(
user_id, medium, address, id_server=None
await self.auth_handler.delete_threepid(
user_id, medium, address, None
)
except Exception:
logger.exception("Failed to remove threepids")
raise SynapseError(500, "Failed to remove threepids")
# Delete the local association of this user ID and third-party ID.
await self.auth_handler.delete_local_threepid(
user_id, medium, address
)
# add new threepids
current_time = self.hs.get_clock().time_msec()
for medium, address in add_threepids:

View File

@@ -768,9 +768,7 @@ class ThreepidDeleteRestServlet(RestServlet):
user_id = requester.user.to_string()
try:
# Attempt to remove any known bindings of this third-party ID
# and user ID from identity servers.
ret = await self.hs.get_identity_handler().try_unbind_threepid(
ret = await self.auth_handler.delete_threepid(
user_id, body.medium, body.address, body.id_server
)
except Exception:
@@ -785,11 +783,6 @@ class ThreepidDeleteRestServlet(RestServlet):
else:
id_server_unbind_result = "no-support"
# Delete the local association of this user ID and third-party ID.
await self.auth_handler.delete_local_threepid(
user_id, body.medium, body.address
)
return 200, {"id_server_unbind_result": id_server_unbind_result}

View File

@@ -16,7 +16,7 @@ import logging
from collections import defaultdict
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
from synapse.api.constants import AccountDataTypes, EduTypes, Membership, PresenceState
from synapse.api.constants import EduTypes, Membership, PresenceState
from synapse.api.errors import Codes, StoreError, SynapseError
from synapse.api.filtering import FilterCollection
from synapse.api.presence import UserPresenceState
@@ -139,28 +139,7 @@ class SyncRestServlet(RestServlet):
device_id,
)
# Stream position of the last ignored users account data event for this user,
# if we're initial syncing.
# We include this in the request key to invalidate an initial sync
# in the response cache once the set of ignored users has changed.
# (We filter out ignored users from timeline events, so our sync response
# is invalid once the set of ignored users changes.)
last_ignore_accdata_streampos: Optional[int] = None
if not since:
# No `since`, so this is an initial sync.
last_ignore_accdata_streampos = await self.store.get_latest_stream_id_for_global_account_data_by_type_for_user(
user.to_string(), AccountDataTypes.IGNORED_USER_LIST
)
request_key = (
user,
timeout,
since,
filter_id,
full_state,
device_id,
last_ignore_accdata_streampos,
)
request_key = (user, timeout, since, filter_id, full_state, device_id)
if filter_id is None:
filter_collection = self.filtering.DEFAULT_FILTER_COLLECTION

View File

@@ -1,93 +0,0 @@
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from synapse.config._base import ConfigError
from synapse.http.server import UnrecognizedRequestResource
from .config_resource import MediaConfigResource
from .download_resource import DownloadResource
from .preview_url_resource import PreviewUrlResource
from .thumbnail_resource import ThumbnailResource
from .upload_resource import UploadResource
if TYPE_CHECKING:
from synapse.server import HomeServer
class MediaRepositoryResource(UnrecognizedRequestResource):
"""File uploading and downloading.
Uploads are POSTed to a resource which returns a token which is used to GET
the download::
=> POST /_matrix/media/r0/upload HTTP/1.1
Content-Type: <media-type>
Content-Length: <content-length>
<media>
<= HTTP/1.1 200 OK
Content-Type: application/json
{ "content_uri": "mxc://<server-name>/<media-id>" }
=> GET /_matrix/media/r0/download/<server-name>/<media-id> HTTP/1.1
<= HTTP/1.1 200 OK
Content-Type: <media-type>
Content-Disposition: attachment;filename=<upload-filename>
<media>
Clients can get thumbnails by supplying a desired width and height and
thumbnailing method::
=> GET /_matrix/media/r0/thumbnail/<server_name>
/<media-id>?width=<w>&height=<h>&method=<m> HTTP/1.1
<= HTTP/1.1 200 OK
Content-Type: image/jpeg or image/png
<thumbnail>
The thumbnail methods are "crop" and "scale". "scale" tries to return an
image where either the width or the height is smaller than the requested
size. The client should then scale and letterbox the image if it needs to
fit within a given rectangle. "crop" tries to return an image where the
width and height are close to the requested size and the aspect matches
the requested size. The client should scale the image if it needs to fit
within a given rectangle.
"""
def __init__(self, hs: "HomeServer"):
# If we're not configured to use it, raise if we somehow got here.
if not hs.config.media.can_load_media_repo:
raise ConfigError("Synapse is not configured to use a media repo.")
super().__init__()
media_repo = hs.get_media_repository()
self.putChild(b"upload", UploadResource(hs, media_repo))
self.putChild(b"download", DownloadResource(hs, media_repo))
self.putChild(
b"thumbnail", ThumbnailResource(hs, media_repo, media_repo.media_storage)
)
if hs.config.media.url_preview_enabled:
self.putChild(
b"preview_url",
PreviewUrlResource(hs, media_repo, media_repo.media_storage),
)
self.putChild(b"config", MediaConfigResource(hs))

View File

@@ -1,4 +1,5 @@
# Copyright 2023 The Matrix.org Foundation C.I.C.
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,7 +12,468 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This exists purely for backwards compatibility with media providers and spam checkers.
from synapse.media._base import FileInfo, Responder # noqa: F401
import logging
import os
import urllib
from abc import ABC, abstractmethod
from types import TracebackType
from typing import Awaitable, Dict, Generator, List, Optional, Tuple, Type
import attr
from twisted.internet.interfaces import IConsumer
from twisted.protocols.basic import FileSender
from twisted.web.server import Request
from synapse.api.errors import Codes, SynapseError, cs_error
from synapse.http.server import finish_request, respond_with_json
from synapse.http.site import SynapseRequest
from synapse.logging.context import make_deferred_yieldable
from synapse.util.stringutils import is_ascii, parse_and_validate_server_name
logger = logging.getLogger(__name__)
# list all text content types that will have the charset default to UTF-8 when
# none is given
TEXT_CONTENT_TYPES = [
"text/css",
"text/csv",
"text/html",
"text/calendar",
"text/plain",
"text/javascript",
"application/json",
"application/ld+json",
"application/rtf",
"image/svg+xml",
"text/xml",
]
def parse_media_id(request: Request) -> Tuple[str, str, Optional[str]]:
"""Parses the server name, media ID and optional file name from the request URI
Also performs some rough validation on the server name.
Args:
request: The `Request`.
Returns:
A tuple containing the parsed server name, media ID and optional file name.
Raises:
SynapseError(404): if parsing or validation fail for any reason
"""
try:
# The type on postpath seems incorrect in Twisted 21.2.0.
postpath: List[bytes] = request.postpath # type: ignore
assert postpath
# This allows users to append e.g. /test.png to the URL. Useful for
# clients that parse the URL to see content type.
server_name_bytes, media_id_bytes = postpath[:2]
server_name = server_name_bytes.decode("utf-8")
media_id = media_id_bytes.decode("utf8")
# Validate the server name, raising if invalid
parse_and_validate_server_name(server_name)
file_name = None
if len(postpath) > 2:
try:
file_name = urllib.parse.unquote(postpath[-1].decode("utf-8"))
except UnicodeDecodeError:
pass
return server_name, media_id, file_name
except Exception:
raise SynapseError(
404, "Invalid media id token %r" % (request.postpath,), Codes.UNKNOWN
)
def respond_404(request: SynapseRequest) -> None:
respond_with_json(
request,
404,
cs_error("Not found %r" % (request.postpath,), code=Codes.NOT_FOUND),
send_cors=True,
)
async def respond_with_file(
request: SynapseRequest,
media_type: str,
file_path: str,
file_size: Optional[int] = None,
upload_name: Optional[str] = None,
) -> None:
logger.debug("Responding with %r", file_path)
if os.path.isfile(file_path):
if file_size is None:
stat = os.stat(file_path)
file_size = stat.st_size
add_file_headers(request, media_type, file_size, upload_name)
with open(file_path, "rb") as f:
await make_deferred_yieldable(FileSender().beginFileTransfer(f, request))
finish_request(request)
else:
respond_404(request)
def add_file_headers(
request: Request,
media_type: str,
file_size: Optional[int],
upload_name: Optional[str],
) -> None:
"""Adds the correct response headers in preparation for responding with the
media.
Args:
request
media_type: The media/content type.
file_size: Size in bytes of the media, if known.
upload_name: The name of the requested file, if any.
"""
def _quote(x: str) -> str:
return urllib.parse.quote(x.encode("utf-8"))
# Default to a UTF-8 charset for text content types.
# ex, uses UTF-8 for 'text/css' but not 'text/css; charset=UTF-16'
if media_type.lower() in TEXT_CONTENT_TYPES:
content_type = media_type + "; charset=UTF-8"
else:
content_type = media_type
request.setHeader(b"Content-Type", content_type.encode("UTF-8"))
if upload_name:
# RFC6266 section 4.1 [1] defines both `filename` and `filename*`.
#
# `filename` is defined to be a `value`, which is defined by RFC2616
# section 3.6 [2] to be a `token` or a `quoted-string`, where a `token`
# is (essentially) a single US-ASCII word, and a `quoted-string` is a
# US-ASCII string surrounded by double-quotes, using backslash as an
# escape character. Note that %-encoding is *not* permitted.
#
# `filename*` is defined to be an `ext-value`, which is defined in
# RFC5987 section 3.2.1 [3] to be `charset "'" [ language ] "'" value-chars`,
# where `value-chars` is essentially a %-encoded string in the given charset.
#
# [1]: https://tools.ietf.org/html/rfc6266#section-4.1
# [2]: https://tools.ietf.org/html/rfc2616#section-3.6
# [3]: https://tools.ietf.org/html/rfc5987#section-3.2.1
# We avoid the quoted-string version of `filename`, because (a) synapse didn't
# correctly interpret those as of 0.99.2 and (b) they are a bit of a pain and we
# may as well just do the filename* version.
if _can_encode_filename_as_token(upload_name):
disposition = "inline; filename=%s" % (upload_name,)
else:
disposition = "inline; filename*=utf-8''%s" % (_quote(upload_name),)
request.setHeader(b"Content-Disposition", disposition.encode("ascii"))
# cache for at least a day.
# XXX: we might want to turn this off for data we don't want to
# recommend caching as it's sensitive or private - or at least
# select private. don't bother setting Expires as all our
# clients are smart enough to be happy with Cache-Control
request.setHeader(b"Cache-Control", b"public,max-age=86400,s-maxage=86400")
if file_size is not None:
request.setHeader(b"Content-Length", b"%d" % (file_size,))
# Tell web crawlers to not index, archive, or follow links in media. This
# should help to prevent things in the media repo from showing up in web
# search results.
request.setHeader(b"X-Robots-Tag", "noindex, nofollow, noarchive, noimageindex")
# separators as defined in RFC2616. SP and HT are handled separately.
# see _can_encode_filename_as_token.
_FILENAME_SEPARATOR_CHARS = {
"(",
")",
"<",
">",
"@",
",",
";",
":",
"\\",
'"',
"/",
"[",
"]",
"?",
"=",
"{",
"}",
}
def _can_encode_filename_as_token(x: str) -> bool:
for c in x:
# from RFC2616:
#
# token = 1*<any CHAR except CTLs or separators>
#
# separators = "(" | ")" | "<" | ">" | "@"
# | "," | ";" | ":" | "\" | <">
# | "/" | "[" | "]" | "?" | "="
# | "{" | "}" | SP | HT
#
# CHAR = <any US-ASCII character (octets 0 - 127)>
#
# CTL = <any US-ASCII control character
# (octets 0 - 31) and DEL (127)>
#
if ord(c) >= 127 or ord(c) <= 32 or c in _FILENAME_SEPARATOR_CHARS:
return False
return True
async def respond_with_responder(
request: SynapseRequest,
responder: "Optional[Responder]",
media_type: str,
file_size: Optional[int],
upload_name: Optional[str] = None,
) -> None:
"""Responds to the request with given responder. If responder is None then
returns 404.
Args:
request
responder
media_type: The media/content type.
file_size: Size in bytes of the media. If not known it should be None
upload_name: The name of the requested file, if any.
"""
if not responder:
respond_404(request)
return
# If we have a responder we *must* use it as a context manager.
with responder:
if request._disconnected:
logger.warning(
"Not sending response to request %s, already disconnected.", request
)
return
logger.debug("Responding to media request with responder %s", responder)
add_file_headers(request, media_type, file_size, upload_name)
try:
await responder.write_to_consumer(request)
except Exception as e:
# The majority of the time this will be due to the client having gone
# away. Unfortunately, Twisted simply throws a generic exception at us
# in that case.
logger.warning("Failed to write to consumer: %s %s", type(e), e)
# Unregister the producer, if it has one, so Twisted doesn't complain
if request.producer:
request.unregisterProducer()
finish_request(request)
class Responder(ABC):
"""Represents a response that can be streamed to the requester.
Responder is a context manager which *must* be used, so that any resources
held can be cleaned up.
"""
@abstractmethod
def write_to_consumer(self, consumer: IConsumer) -> Awaitable:
"""Stream response into consumer
Args:
consumer: The consumer to stream into.
Returns:
Resolves once the response has finished being written
"""
raise NotImplementedError()
def __enter__(self) -> None: # noqa: B027
pass
def __exit__( # noqa: B027
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
pass
@attr.s(slots=True, frozen=True, auto_attribs=True)
class ThumbnailInfo:
"""Details about a generated thumbnail."""
width: int
height: int
method: str
# Content type of thumbnail, e.g. image/png
type: str
# The size of the media file, in bytes.
length: Optional[int] = None
@attr.s(slots=True, frozen=True, auto_attribs=True)
class FileInfo:
"""Details about a requested/uploaded file."""
# The server name where the media originated from, or None if local.
server_name: Optional[str]
# The local ID of the file. For local files this is the same as the media_id
file_id: str
# If the file is for the url preview cache
url_cache: bool = False
# Whether the file is a thumbnail or not.
thumbnail: Optional[ThumbnailInfo] = None
# The below properties exist to maintain compatibility with third-party modules.
@property
def thumbnail_width(self) -> Optional[int]:
if not self.thumbnail:
return None
return self.thumbnail.width
@property
def thumbnail_height(self) -> Optional[int]:
if not self.thumbnail:
return None
return self.thumbnail.height
@property
def thumbnail_method(self) -> Optional[str]:
if not self.thumbnail:
return None
return self.thumbnail.method
@property
def thumbnail_type(self) -> Optional[str]:
if not self.thumbnail:
return None
return self.thumbnail.type
@property
def thumbnail_length(self) -> Optional[int]:
if not self.thumbnail:
return None
return self.thumbnail.length
def get_filename_from_headers(headers: Dict[bytes, List[bytes]]) -> Optional[str]:
"""
Get the filename of the downloaded file by inspecting the
Content-Disposition HTTP header.
Args:
headers: The HTTP request headers.
Returns:
The filename, or None.
"""
content_disposition = headers.get(b"Content-Disposition", [b""])
# No header, bail out.
if not content_disposition[0]:
return None
_, params = _parse_header(content_disposition[0])
upload_name = None
# First check if there is a valid UTF-8 filename
upload_name_utf8 = params.get(b"filename*", None)
if upload_name_utf8:
if upload_name_utf8.lower().startswith(b"utf-8''"):
upload_name_utf8 = upload_name_utf8[7:]
# We have a filename*= section. This MUST be ASCII, and any UTF-8
# bytes are %-quoted.
try:
# Once it is decoded, we can then unquote the %-encoded
# parts strictly into a unicode string.
upload_name = urllib.parse.unquote(
upload_name_utf8.decode("ascii"), errors="strict"
)
except UnicodeDecodeError:
# Incorrect UTF-8.
pass
# If there isn't check for an ascii name.
if not upload_name:
upload_name_ascii = params.get(b"filename", None)
if upload_name_ascii and is_ascii(upload_name_ascii):
upload_name = upload_name_ascii.decode("ascii")
# This may be None here, indicating we did not find a matching name.
return upload_name
def _parse_header(line: bytes) -> Tuple[bytes, Dict[bytes, bytes]]:
"""Parse a Content-type like header.
Cargo-culted from `cgi`, but works on bytes rather than strings.
Args:
line: header to be parsed
Returns:
The main content-type, followed by the parameter dictionary
"""
parts = _parseparam(b";" + line)
key = next(parts)
pdict = {}
for p in parts:
i = p.find(b"=")
if i >= 0:
name = p[:i].strip().lower()
value = p[i + 1 :].strip()
# strip double-quotes
if len(value) >= 2 and value[0:1] == value[-1:] == b'"':
value = value[1:-1]
value = value.replace(b"\\\\", b"\\").replace(b'\\"', b'"')
pdict[name] = value
return key, pdict
def _parseparam(s: bytes) -> Generator[bytes, None, None]:
"""Generator which splits the input on ;, respecting double-quoted sequences
Cargo-culted from `cgi`, but works on bytes rather than strings.
Args:
s: header to be parsed
Returns:
The split input
"""
while s[:1] == b";":
s = s[1:]
# look for the next ;
end = s.find(b";")
# if there is an odd number of " marks between here and the next ;, skip to the
# next ; instead
while end > 0 and (s.count(b'"', 0, end) - s.count(b'\\"', 0, end)) % 2:
end = s.find(b";", end + 1)
if end < 0:
end = len(s)
f = s[:end]
yield f.strip()
s = s[end:]

View File

@@ -22,10 +22,11 @@ from synapse.http.server import (
)
from synapse.http.servlet import parse_boolean
from synapse.http.site import SynapseRequest
from synapse.media._base import parse_media_id, respond_404
from ._base import parse_media_id, respond_404
if TYPE_CHECKING:
from synapse.media.media_repository import MediaRepository
from synapse.rest.media.v1.media_repository import MediaRepository
from synapse.server import HomeServer
logger = logging.getLogger(__name__)

View File

@@ -32,10 +32,18 @@ from synapse.api.errors import (
RequestSendFailed,
SynapseError,
)
from synapse.config._base import ConfigError
from synapse.config.repository import ThumbnailRequirement
from synapse.http.server import UnrecognizedRequestResource
from synapse.http.site import SynapseRequest
from synapse.logging.context import defer_to_thread
from synapse.media._base import (
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.types import UserID
from synapse.util.async_helpers import Linearizer
from synapse.util.retryutils import NotRetryingDestination
from synapse.util.stringutils import random_string
from ._base import (
FileInfo,
Responder,
ThumbnailInfo,
@@ -43,15 +51,15 @@ from synapse.media._base import (
respond_404,
respond_with_responder,
)
from synapse.media.filepath import MediaFilePaths
from synapse.media.media_storage import MediaStorage
from synapse.media.storage_provider import StorageProviderWrapper
from synapse.media.thumbnailer import Thumbnailer, ThumbnailError
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.types import UserID
from synapse.util.async_helpers import Linearizer
from synapse.util.retryutils import NotRetryingDestination
from synapse.util.stringutils import random_string
from .config_resource import MediaConfigResource
from .download_resource import DownloadResource
from .filepath import MediaFilePaths
from .media_storage import MediaStorage
from .preview_url_resource import PreviewUrlResource
from .storage_provider import StorageProviderWrapper
from .thumbnail_resource import ThumbnailResource
from .thumbnailer import Thumbnailer, ThumbnailError
from .upload_resource import UploadResource
if TYPE_CHECKING:
from synapse.server import HomeServer
@@ -1036,3 +1044,69 @@ class MediaRepository:
removed_media.append(media_id)
return removed_media, len(removed_media)
class MediaRepositoryResource(UnrecognizedRequestResource):
"""File uploading and downloading.
Uploads are POSTed to a resource which returns a token which is used to GET
the download::
=> POST /_matrix/media/r0/upload HTTP/1.1
Content-Type: <media-type>
Content-Length: <content-length>
<media>
<= HTTP/1.1 200 OK
Content-Type: application/json
{ "content_uri": "mxc://<server-name>/<media-id>" }
=> GET /_matrix/media/r0/download/<server-name>/<media-id> HTTP/1.1
<= HTTP/1.1 200 OK
Content-Type: <media-type>
Content-Disposition: attachment;filename=<upload-filename>
<media>
Clients can get thumbnails by supplying a desired width and height and
thumbnailing method::
=> GET /_matrix/media/r0/thumbnail/<server_name>
/<media-id>?width=<w>&height=<h>&method=<m> HTTP/1.1
<= HTTP/1.1 200 OK
Content-Type: image/jpeg or image/png
<thumbnail>
The thumbnail methods are "crop" and "scale". "scale" tries to return an
image where either the width or the height is smaller than the requested
size. The client should then scale and letterbox the image if it needs to
fit within a given rectangle. "crop" tries to return an image where the
width and height are close to the requested size and the aspect matches
the requested size. The client should scale the image if it needs to fit
within a given rectangle.
"""
def __init__(self, hs: "HomeServer"):
# If we're not configured to use it, raise if we somehow got here.
if not hs.config.media.can_load_media_repo:
raise ConfigError("Synapse is not configured to use a media repo.")
super().__init__()
media_repo = hs.get_media_repository()
self.putChild(b"upload", UploadResource(hs, media_repo))
self.putChild(b"download", DownloadResource(hs, media_repo))
self.putChild(
b"thumbnail", ThumbnailResource(hs, media_repo, media_repo.media_storage)
)
if hs.config.media.url_preview_enabled:
self.putChild(
b"preview_url",
PreviewUrlResource(hs, media_repo, media_repo.media_storage),
)
self.putChild(b"config", MediaConfigResource(hs))

View File

@@ -1,4 +1,4 @@
# Copyright 2023 The Matrix.org Foundation C.I.C.
# Copyright 2018-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,7 +11,364 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import contextlib
import logging
import os
import shutil
from types import TracebackType
from typing import (
IO,
TYPE_CHECKING,
Any,
Awaitable,
BinaryIO,
Callable,
Generator,
Optional,
Sequence,
Tuple,
Type,
)
# This exists purely for backwards compatibility with spam checkers.
from synapse.media.media_storage import ReadableFileWrapper # noqa: F401
import attr
from twisted.internet.defer import Deferred
from twisted.internet.interfaces import IConsumer
from twisted.protocols.basic import FileSender
import synapse
from synapse.api.errors import NotFoundError
from synapse.logging.context import defer_to_thread, make_deferred_yieldable
from synapse.util import Clock
from synapse.util.file_consumer import BackgroundFileConsumer
from ._base import FileInfo, Responder
from .filepath import MediaFilePaths
if TYPE_CHECKING:
from synapse.rest.media.v1.storage_provider import StorageProvider
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
class MediaStorage:
"""Responsible for storing/fetching files from local sources.
Args:
hs
local_media_directory: Base path where we store media on disk
filepaths
storage_providers: List of StorageProvider that are used to fetch and store files.
"""
def __init__(
self,
hs: "HomeServer",
local_media_directory: str,
filepaths: MediaFilePaths,
storage_providers: Sequence["StorageProvider"],
):
self.hs = hs
self.reactor = hs.get_reactor()
self.local_media_directory = local_media_directory
self.filepaths = filepaths
self.storage_providers = storage_providers
self.spam_checker = hs.get_spam_checker()
self.clock = hs.get_clock()
async def store_file(self, source: IO, file_info: FileInfo) -> str:
"""Write `source` to the on disk media store, and also any other
configured storage providers
Args:
source: A file like object that should be written
file_info: Info about the file to store
Returns:
the file path written to in the primary media store
"""
with self.store_into_file(file_info) as (f, fname, finish_cb):
# Write to the main repository
await self.write_to_file(source, f)
await finish_cb()
return fname
async def write_to_file(self, source: IO, output: IO) -> None:
"""Asynchronously write the `source` to `output`."""
await defer_to_thread(self.reactor, _write_file_synchronously, source, output)
@contextlib.contextmanager
def store_into_file(
self, file_info: FileInfo
) -> Generator[Tuple[BinaryIO, str, Callable[[], Awaitable[None]]], None, None]:
"""Context manager used to get a file like object to write into, as
described by file_info.
Actually yields a 3-tuple (file, fname, finish_cb), where file is a file
like object that can be written to, fname is the absolute path of file
on disk, and finish_cb is a function that returns an awaitable.
fname can be used to read the contents from after upload, e.g. to
generate thumbnails.
finish_cb must be called and waited on after the file has been
successfully been written to. Should not be called if there was an
error.
Args:
file_info: Info about the file to store
Example:
with media_storage.store_into_file(info) as (f, fname, finish_cb):
# .. write into f ...
await finish_cb()
"""
path = self._file_info_to_path(file_info)
fname = os.path.join(self.local_media_directory, path)
dirname = os.path.dirname(fname)
os.makedirs(dirname, exist_ok=True)
finished_called = [False]
try:
with open(fname, "wb") as f:
async def finish() -> None:
# Ensure that all writes have been flushed and close the
# file.
f.flush()
f.close()
spam_check = await self.spam_checker.check_media_file_for_spam(
ReadableFileWrapper(self.clock, fname), file_info
)
if spam_check != synapse.module_api.NOT_SPAM:
logger.info("Blocking media due to spam checker")
# Note that we'll delete the stored media, due to the
# try/except below. The media also won't be stored in
# the DB.
# We currently ignore any additional field returned by
# the spam-check API.
raise SpamMediaException(errcode=spam_check[0])
for provider in self.storage_providers:
await provider.store_file(path, file_info)
finished_called[0] = True
yield f, fname, finish
except Exception as e:
try:
os.remove(fname)
except Exception:
pass
raise e from None
if not finished_called:
raise Exception("Finished callback not called")
async def fetch_media(self, file_info: FileInfo) -> Optional[Responder]:
"""Attempts to fetch media described by file_info from the local cache
and configured storage providers.
Args:
file_info
Returns:
Returns a Responder if the file was found, otherwise None.
"""
paths = [self._file_info_to_path(file_info)]
# fallback for remote thumbnails with no method in the filename
if file_info.thumbnail and file_info.server_name:
paths.append(
self.filepaths.remote_media_thumbnail_rel_legacy(
server_name=file_info.server_name,
file_id=file_info.file_id,
width=file_info.thumbnail.width,
height=file_info.thumbnail.height,
content_type=file_info.thumbnail.type,
)
)
for path in paths:
local_path = os.path.join(self.local_media_directory, path)
if os.path.exists(local_path):
logger.debug("responding with local file %s", local_path)
return FileResponder(open(local_path, "rb"))
logger.debug("local file %s did not exist", local_path)
for provider in self.storage_providers:
for path in paths:
res: Any = await provider.fetch(path, file_info)
if res:
logger.debug("Streaming %s from %s", path, provider)
return res
logger.debug("%s not found on %s", path, provider)
return None
async def ensure_media_is_in_local_cache(self, file_info: FileInfo) -> str:
"""Ensures that the given file is in the local cache. Attempts to
download it from storage providers if it isn't.
Args:
file_info
Returns:
Full path to local file
"""
path = self._file_info_to_path(file_info)
local_path = os.path.join(self.local_media_directory, path)
if os.path.exists(local_path):
return local_path
# Fallback for paths without method names
# Should be removed in the future
if file_info.thumbnail and file_info.server_name:
legacy_path = self.filepaths.remote_media_thumbnail_rel_legacy(
server_name=file_info.server_name,
file_id=file_info.file_id,
width=file_info.thumbnail.width,
height=file_info.thumbnail.height,
content_type=file_info.thumbnail.type,
)
legacy_local_path = os.path.join(self.local_media_directory, legacy_path)
if os.path.exists(legacy_local_path):
return legacy_local_path
dirname = os.path.dirname(local_path)
os.makedirs(dirname, exist_ok=True)
for provider in self.storage_providers:
res: Any = await provider.fetch(path, file_info)
if res:
with res:
consumer = BackgroundFileConsumer(
open(local_path, "wb"), self.reactor
)
await res.write_to_consumer(consumer)
await consumer.wait()
return local_path
raise NotFoundError()
def _file_info_to_path(self, file_info: FileInfo) -> str:
"""Converts file_info into a relative path.
The path is suitable for storing files under a directory, e.g. used to
store files on local FS under the base media repository directory.
"""
if file_info.url_cache:
if file_info.thumbnail:
return self.filepaths.url_cache_thumbnail_rel(
media_id=file_info.file_id,
width=file_info.thumbnail.width,
height=file_info.thumbnail.height,
content_type=file_info.thumbnail.type,
method=file_info.thumbnail.method,
)
return self.filepaths.url_cache_filepath_rel(file_info.file_id)
if file_info.server_name:
if file_info.thumbnail:
return self.filepaths.remote_media_thumbnail_rel(
server_name=file_info.server_name,
file_id=file_info.file_id,
width=file_info.thumbnail.width,
height=file_info.thumbnail.height,
content_type=file_info.thumbnail.type,
method=file_info.thumbnail.method,
)
return self.filepaths.remote_media_filepath_rel(
file_info.server_name, file_info.file_id
)
if file_info.thumbnail:
return self.filepaths.local_media_thumbnail_rel(
media_id=file_info.file_id,
width=file_info.thumbnail.width,
height=file_info.thumbnail.height,
content_type=file_info.thumbnail.type,
method=file_info.thumbnail.method,
)
return self.filepaths.local_media_filepath_rel(file_info.file_id)
def _write_file_synchronously(source: IO, dest: IO) -> None:
"""Write `source` to the file like `dest` synchronously. Should be called
from a thread.
Args:
source: A file like object that's to be written
dest: A file like object to be written to
"""
source.seek(0) # Ensure we read from the start of the file
shutil.copyfileobj(source, dest)
class FileResponder(Responder):
"""Wraps an open file that can be sent to a request.
Args:
open_file: A file like object to be streamed ot the client,
is closed when finished streaming.
"""
def __init__(self, open_file: IO):
self.open_file = open_file
def write_to_consumer(self, consumer: IConsumer) -> Deferred:
return make_deferred_yieldable(
FileSender().beginFileTransfer(self.open_file, consumer)
)
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
self.open_file.close()
class SpamMediaException(NotFoundError):
"""The media was blocked by a spam checker, so we simply 404 the request (in
the same way as if it was quarantined).
"""
@attr.s(slots=True, auto_attribs=True)
class ReadableFileWrapper:
"""Wrapper that allows reading a file in chunks, yielding to the reactor,
and writing to a callback.
This is simplified `FileSender` that takes an IO object rather than an
`IConsumer`.
"""
CHUNK_SIZE = 2**14
clock: Clock
path: str
async def write_chunks_to(self, callback: Callable[[bytes], object]) -> None:
"""Reads the file in chunks and calls the callback with each chunk."""
with open(self.path, "rb") as file:
while True:
chunk = file.read(self.CHUNK_SIZE)
if not chunk:
break
callback(chunk)
# We yield to the reactor by sleeping for 0 seconds.
await self.clock.sleep(0)

View File

@@ -18,7 +18,7 @@ from typing import TYPE_CHECKING, List, Optional
import attr
from synapse.media.preview_html import parse_html_description
from synapse.rest.media.v1.preview_html import parse_html_description
from synapse.types import JsonDict
from synapse.util import json_decoder

View File

@@ -40,19 +40,21 @@ from synapse.http.server import (
from synapse.http.servlet import parse_integer, parse_string
from synapse.http.site import SynapseRequest
from synapse.logging.context import make_deferred_yieldable, run_in_background
from synapse.media._base import FileInfo, get_filename_from_headers
from synapse.media.media_storage import MediaStorage
from synapse.media.oembed import OEmbedProvider
from synapse.media.preview_html import decode_body, parse_html_to_open_graph
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.rest.media.v1._base import get_filename_from_headers
from synapse.rest.media.v1.media_storage import MediaStorage
from synapse.rest.media.v1.oembed import OEmbedProvider
from synapse.rest.media.v1.preview_html import decode_body, parse_html_to_open_graph
from synapse.types import JsonDict, UserID
from synapse.util import json_encoder
from synapse.util.async_helpers import ObservableDeferred
from synapse.util.caches.expiringcache import ExpiringCache
from synapse.util.stringutils import random_string
from ._base import FileInfo
if TYPE_CHECKING:
from synapse.media.media_repository import MediaRepository
from synapse.rest.media.v1.media_repository import MediaRepository
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
@@ -161,10 +163,6 @@ class PreviewUrlResource(DirectServeJsonResource):
7. Stores the result in the database cache.
4. Returns the result.
If any additional requests (e.g. from oEmbed autodiscovery, step 5.3 or
image thumbnailing, step 5.4 or 6.4) fails then the URL preview as a whole
does not fail. As much information as possible is returned.
The in-memory cache expires after 1 hour.
Expired entries in the database cache (and their associated media files) are
@@ -366,25 +364,16 @@ class PreviewUrlResource(DirectServeJsonResource):
oembed_url = self._oembed.autodiscover_from_html(tree)
og_from_oembed: JsonDict = {}
if oembed_url:
try:
oembed_info = await self._handle_url(
oembed_url, user, allow_data_urls=True
)
except Exception as e:
# Fetching the oEmbed info failed, don't block the entire URL preview.
logger.warning(
"oEmbed fetch failed during URL preview: %s errored with %s",
oembed_url,
e,
)
else:
(
og_from_oembed,
author_name,
expiration_ms,
) = await self._handle_oembed_response(
url, oembed_info, expiration_ms
)
oembed_info = await self._handle_url(
oembed_url, user, allow_data_urls=True
)
(
og_from_oembed,
author_name,
expiration_ms,
) = await self._handle_oembed_response(
url, oembed_info, expiration_ms
)
# Parse Open Graph information from the HTML in case the oEmbed
# response failed or is incomplete.

View File

@@ -1,4 +1,4 @@
# Copyright 2023 The Matrix.org Foundation C.I.C.
# Copyright 2018-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,7 +11,171 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This exists purely for backwards compatibility with media providers.
from synapse.media.storage_provider import StorageProvider # noqa: F401
import abc
import logging
import os
import shutil
from typing import TYPE_CHECKING, Callable, Optional
from synapse.config._base import Config
from synapse.logging.context import defer_to_thread, run_in_background
from synapse.util.async_helpers import maybe_awaitable
from ._base import FileInfo, Responder
from .media_storage import FileResponder
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from synapse.server import HomeServer
class StorageProvider(metaclass=abc.ABCMeta):
"""A storage provider is a service that can store uploaded media and
retrieve them.
"""
@abc.abstractmethod
async def store_file(self, path: str, file_info: FileInfo) -> None:
"""Store the file described by file_info. The actual contents can be
retrieved by reading the file in file_info.upload_path.
Args:
path: Relative path of file in local cache
file_info: The metadata of the file.
"""
@abc.abstractmethod
async def fetch(self, path: str, file_info: FileInfo) -> Optional[Responder]:
"""Attempt to fetch the file described by file_info and stream it
into writer.
Args:
path: Relative path of file in local cache
file_info: The metadata of the file.
Returns:
Returns a Responder if the provider has the file, otherwise returns None.
"""
class StorageProviderWrapper(StorageProvider):
"""Wraps a storage provider and provides various config options
Args:
backend: The storage provider to wrap.
store_local: Whether to store new local files or not.
store_synchronous: Whether to wait for file to be successfully
uploaded, or todo the upload in the background.
store_remote: Whether remote media should be uploaded
"""
def __init__(
self,
backend: StorageProvider,
store_local: bool,
store_synchronous: bool,
store_remote: bool,
):
self.backend = backend
self.store_local = store_local
self.store_synchronous = store_synchronous
self.store_remote = store_remote
def __str__(self) -> str:
return "StorageProviderWrapper[%s]" % (self.backend,)
async def store_file(self, path: str, file_info: FileInfo) -> None:
if not file_info.server_name and not self.store_local:
return None
if file_info.server_name and not self.store_remote:
return None
if file_info.url_cache:
# The URL preview cache is short lived and not worth offloading or
# backing up.
return None
if self.store_synchronous:
# store_file is supposed to return an Awaitable, but guard
# against improper implementations.
await maybe_awaitable(self.backend.store_file(path, file_info)) # type: ignore
else:
# TODO: Handle errors.
async def store() -> None:
try:
return await maybe_awaitable(
self.backend.store_file(path, file_info)
)
except Exception:
logger.exception("Error storing file")
run_in_background(store)
async def fetch(self, path: str, file_info: FileInfo) -> Optional[Responder]:
if file_info.url_cache:
# Files in the URL preview cache definitely aren't stored here,
# so avoid any potentially slow I/O or network access.
return None
# store_file is supposed to return an Awaitable, but guard
# against improper implementations.
return await maybe_awaitable(self.backend.fetch(path, file_info))
class FileStorageProviderBackend(StorageProvider):
"""A storage provider that stores files in a directory on a filesystem.
Args:
hs
config: The config returned by `parse_config`.
"""
def __init__(self, hs: "HomeServer", config: str):
self.hs = hs
self.cache_directory = hs.config.media.media_store_path
self.base_directory = config
def __str__(self) -> str:
return "FileStorageProviderBackend[%s]" % (self.base_directory,)
async def store_file(self, path: str, file_info: FileInfo) -> None:
"""See StorageProvider.store_file"""
primary_fname = os.path.join(self.cache_directory, path)
backup_fname = os.path.join(self.base_directory, path)
dirname = os.path.dirname(backup_fname)
os.makedirs(dirname, exist_ok=True)
# mypy needs help inferring the type of the second parameter, which is generic
shutil_copyfile: Callable[[str, str], str] = shutil.copyfile
await defer_to_thread(
self.hs.get_reactor(),
shutil_copyfile,
primary_fname,
backup_fname,
)
async def fetch(self, path: str, file_info: FileInfo) -> Optional[Responder]:
"""See StorageProvider.fetch"""
backup_fname = os.path.join(self.base_directory, path)
if os.path.isfile(backup_fname):
return FileResponder(open(backup_fname, "rb"))
return None
@staticmethod
def parse_config(config: dict) -> str:
"""Called on startup to parse config supplied. This should parse
the config and raise if there is a problem.
The returned value is passed into the constructor.
In this case we only care about a single param, the directory, so let's
just pull that out.
"""
return Config.ensure_directory(config["directory"])

View File

@@ -27,7 +27,9 @@ from synapse.http.server import (
)
from synapse.http.servlet import parse_integer, parse_string
from synapse.http.site import SynapseRequest
from synapse.media._base import (
from synapse.rest.media.v1.media_storage import MediaStorage
from ._base import (
FileInfo,
ThumbnailInfo,
parse_media_id,
@@ -35,10 +37,9 @@ from synapse.media._base import (
respond_with_file,
respond_with_responder,
)
from synapse.media.media_storage import MediaStorage
if TYPE_CHECKING:
from synapse.media.media_repository import MediaRepository
from synapse.rest.media.v1.media_repository import MediaRepository
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
@@ -68,8 +69,7 @@ class ThumbnailResource(DirectServeJsonResource):
width = parse_integer(request, "width", required=True)
height = parse_integer(request, "height", required=True)
method = parse_string(request, "method", "scale")
# TODO Parse the Accept header to get an prioritised list of thumbnail types.
m_type = "image/png"
m_type = parse_string(request, "type", "image/png")
if server_name == self.server_name:
if self.dynamic_thumbnails:

View File

@@ -20,10 +20,10 @@ from synapse.api.errors import Codes, SynapseError
from synapse.http.server import DirectServeJsonResource, respond_with_json
from synapse.http.servlet import parse_bytes_from_args
from synapse.http.site import SynapseRequest
from synapse.media.media_storage import SpamMediaException
from synapse.rest.media.v1.media_storage import SpamMediaException
if TYPE_CHECKING:
from synapse.media.media_repository import MediaRepository
from synapse.rest.media.v1.media_repository import MediaRepository
from synapse.server import HomeServer
logger = logging.getLogger(__name__)

View File

@@ -105,7 +105,6 @@ from synapse.handlers.typing import FollowerTypingHandler, TypingWriterHandler
from synapse.handlers.user_directory import UserDirectoryHandler
from synapse.http.client import InsecureInterceptableContextFactory, SimpleHttpClient
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
from synapse.media.media_repository import MediaRepository
from synapse.metrics.common_usage_metrics import CommonUsageMetricsManager
from synapse.module_api import ModuleApi
from synapse.notifier import Notifier, ReplicationNotifier
@@ -116,7 +115,10 @@ from synapse.replication.tcp.external_cache import ExternalCache
from synapse.replication.tcp.handler import ReplicationCommandHandler
from synapse.replication.tcp.resource import ReplicationStreamer
from synapse.replication.tcp.streams import STREAMS_MAP, Stream
from synapse.rest.media.media_repository_resource import MediaRepositoryResource
from synapse.rest.media.v1.media_repository import (
MediaRepository,
MediaRepositoryResource,
)
from synapse.server_notices.server_notices_manager import ServerNoticesManager
from synapse.server_notices.server_notices_sender import ServerNoticesSender
from synapse.server_notices.worker_server_notices_sender import (

View File

@@ -43,7 +43,7 @@ from .event_federation import EventFederationStore
from .event_push_actions import EventPushActionsStore
from .events_bg_updates import EventsBackgroundUpdatesStore
from .events_forward_extremities import EventForwardExtremitiesStore
from .filtering import FilteringWorkerStore
from .filtering import FilteringStore
from .keys import KeyStore
from .lock import LockStore
from .media_repository import MediaRepositoryStore
@@ -99,7 +99,7 @@ class DataStore(
EventFederationStore,
MediaRepositoryStore,
RejectionsStore,
FilteringWorkerStore,
FilteringStore,
PusherStore,
PushRuleStore,
ApplicationServiceTransactionStore,

View File

@@ -237,37 +237,6 @@ class AccountDataWorkerStore(PushRulesWorkerStore, CacheInvalidationWorkerStore)
else:
return None
async def get_latest_stream_id_for_global_account_data_by_type_for_user(
self, user_id: str, data_type: str
) -> Optional[int]:
"""
Returns:
The stream ID of the account data,
or None if there is no such account data.
"""
def get_latest_stream_id_for_global_account_data_by_type_for_user_txn(
txn: LoggingTransaction,
) -> Optional[int]:
sql = """
SELECT stream_id FROM account_data
WHERE user_id = ? AND account_data_type = ?
ORDER BY stream_id DESC
LIMIT 1
"""
txn.execute(sql, (user_id, data_type))
row = txn.fetchone()
if row:
return row[0]
else:
return None
return await self.db_pool.runInteraction(
"get_latest_stream_id_for_global_account_data_by_type_for_user",
get_latest_stream_id_for_global_account_data_by_type_for_user_txn,
)
@cached(num_args=2, tree=True)
async def get_account_data_for_room(
self, user_id: str, room_id: str

View File

@@ -266,6 +266,9 @@ class CacheInvalidationWorkerStore(SQLBaseStore):
if relates_to:
self._attempt_to_invalidate_cache("get_relations_for_event", (relates_to,))
self._attempt_to_invalidate_cache("get_references_for_event", (relates_to,))
self._attempt_to_invalidate_cache(
"get_aggregation_groups_for_event", (relates_to,)
)
self._attempt_to_invalidate_cache("get_applicable_edit", (relates_to,))
self._attempt_to_invalidate_cache("get_thread_summary", (relates_to,))
self._attempt_to_invalidate_cache("get_thread_participated", (relates_to,))

View File

@@ -2024,6 +2024,10 @@ class PersistEventsStore:
self.store._invalidate_cache_and_stream(
txn, self.store.get_relations_for_event, (redacted_relates_to,)
)
if rel_type == RelationTypes.ANNOTATION:
self.store._invalidate_cache_and_stream(
txn, self.store.get_aggregation_groups_for_event, (redacted_relates_to,)
)
if rel_type == RelationTypes.REFERENCE:
self.store._invalidate_cache_and_stream(
txn, self.store.get_references_for_event, (redacted_relates_to,)

View File

@@ -1219,6 +1219,9 @@ class EventsBackgroundUpdatesStore(SQLBaseStore):
self._invalidate_cache_and_stream( # type: ignore[attr-defined]
txn, self.get_relations_for_event, cache_tuple # type: ignore[attr-defined]
)
self._invalidate_cache_and_stream( # type: ignore[attr-defined]
txn, self.get_aggregation_groups_for_event, cache_tuple # type: ignore[attr-defined]
)
self._invalidate_cache_and_stream( # type: ignore[attr-defined]
txn, self.get_thread_summary, cache_tuple # type: ignore[attr-defined]
)

View File

@@ -17,7 +17,7 @@ from typing import Optional, Tuple, Union, cast
from canonicaljson import encode_canonical_json
from synapse.api.errors import Codes, StoreError, SynapseError
from synapse.api.errors import Codes, SynapseError
from synapse.storage._base import SQLBaseStore, db_to_json
from synapse.storage.database import LoggingTransaction
from synapse.types import JsonDict
@@ -46,6 +46,8 @@ class FilteringWorkerStore(SQLBaseStore):
return db_to_json(def_json)
class FilteringStore(FilteringWorkerStore):
async def add_user_filter(self, user_localpart: str, user_filter: JsonDict) -> int:
def_json = encode_canonical_json(user_filter)
@@ -77,23 +79,4 @@ class FilteringWorkerStore(SQLBaseStore):
return filter_id
attempts = 0
while True:
# Try a few times.
# This is technically needed if a user tries to create two filters at once,
# leading to two concurrent transactions.
# The failure case would be:
# - SELECT filter_id ... filter_json = ? → both transactions return no rows
# - SELECT MAX(filter_id) ... → both transactions return e.g. 5
# - INSERT INTO ... → both transactions insert filter_id = 6
# One of the transactions will commit. The other will get a unique key
# constraint violation error (IntegrityError). This is not the same as a
# serialisability violation, which would be automatically retried by
# `runInteraction`.
try:
return await self.db_pool.runInteraction("add_user_filter", _do_txn)
except self.db_pool.engine.module.IntegrityError:
attempts += 1
if attempts >= 5:
raise StoreError(500, "Couldn't generate a filter ID.")
return await self.db_pool.runInteraction("add_user_filter", _do_txn)

Some files were not shown because too many files have changed in this diff Show More