1
0

Compare commits

..

6 Commits

Author SHA1 Message Date
Travis Ralston
09649fce2a Merge branch 'develop' into travis/auth-always-on 2024-11-18 09:19:18 -07:00
Travis Ralston
0f8d3a66a6 Merge branch 'develop' into travis/auth-always-on 2024-11-07 09:57:41 -07:00
Travis Ralston
e90e6b80a8 Merge branch 'develop' into travis/auth-always-on 2024-10-30 11:00:13 -06:00
Travis Ralston
597fd92c98 appease linter? 2024-10-30 10:58:34 -06:00
Travis Ralston
0f65bcc26b changelog 2024-10-30 10:56:24 -06:00
Travis Ralston
4a47ad6638 Enable authenticated media by default 2024-10-30 09:59:28 -06:00
33 changed files with 64 additions and 571 deletions

4
Cargo.lock generated
View File

@@ -505,9 +505,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.133"
version = "1.0.132"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377"
checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03"
dependencies = [
"itoa",
"memchr",

View File

@@ -1 +1 @@
Enforce authenticated media by default. Administrators can revert this by configuring `enable_authenticated_media` to `false`. In a future release of Synapse, this option will be removed and become always-on.
Set `enable_authenticated_media` to `true` by default. In a future release of Synapse, this option will be removed and become always-on.

View File

@@ -1 +0,0 @@
Support new package name of PyPI package `python-multipart` 0.0.13 so that distro packagers do not need to work around name conflict with PyPI package `multipart`.

View File

@@ -1 +0,0 @@
Add a one-off task to delete old one-time-keys, to guard against us having old OTKs in the database that the client has long forgotten about.

View File

@@ -1 +0,0 @@
Raise setuptools_rust version cap to 1.10.2.

View File

@@ -1 +0,0 @@
Enable encrypted appservice related experimental features in the complement docker image.

View File

@@ -1 +0,0 @@
Speed up slow initial sliding syncs on large servers.

View File

@@ -1 +0,0 @@
Add experimental option to pass through unsigned data in `/keys/query` responses.

View File

@@ -104,18 +104,6 @@ experimental_features:
msc3967_enabled: true
# Expose a room summary for public rooms
msc3266_enabled: true
# Send to-device messages to application services
msc2409_to_device_messages_enabled: true
# Allow application services to masquerade devices
msc3202_device_masquerading: true
# Sending device list changes, one-time key counts and fallback key usage to application services
msc3202_transaction_extensions: true
# Proxy OTK claim requests to exclusive ASes
msc3983_appservice_otk_claims: true
# Proxy key queries to exclusive ASes
msc3984_appservice_key_query: true
# Pass through unsigned device data in /keys/query
msc4229_enabled: true
server_notices:
system_mxid_localpart: _server

View File

@@ -128,29 +128,6 @@ removing the experimental support for it in this release.
The `experimental_features.msc3886_endpoint` configuration option has
been removed.
## Authenticated media is now enforced by default
The [`enable_authenticated_media`] configuration option now defaults to true.
This means that clients and remote (federated) homeservers now need to use
the authenticated media endpoints in order to download media from your
homeserver.
As an exception, existing media that was stored on the server prior to
this option changing to `true` will still be accessible over the
unauthenticated endpoints.
The matrix.org homeserver has already been running with this option enabled
since September 2024, so most common clients and homeservers should already
be compatible.
With that said, administrators who wish to disable this feature for broader
compatibility can still do so by manually configuring
`enable_authenticated_media: False`.
[`enable_authenticated_media`]: usage/configuration/config_documentation.md#enable_authenticated_media
# Upgrading to v1.119.0
## Minimum supported Python version

View File

@@ -1909,8 +1909,6 @@ will perpetually be available over the legacy, unauthenticated endpoint, even af
This is for backwards compatibility with older clients and homeservers that do not yet support requesting authenticated media;
those older clients or homeservers will not be cut off from media they can already see.
_Changed in Synapse 1.120:_ This option now defaults to `True` when not set, whereas before this version it defaulted to `False`.
Example configuration:
```yaml
enable_authenticated_media: false

48
flake.lock generated
View File

@@ -56,6 +56,24 @@
"type": "github"
}
},
"flake-utils_2": {
"inputs": {
"systems": "systems_2"
},
"locked": {
"lastModified": 1681202837,
"narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "cfacdce06f30d2b68473a46042957675eebb3401",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"gitignore": {
"inputs": {
"nixpkgs": [
@@ -184,11 +202,11 @@
},
"nixpkgs_3": {
"locked": {
"lastModified": 1728538411,
"narHash": "sha256-f0SBJz1eZ2yOuKUr5CA9BHULGXVSn6miBuUWdTyhUhU=",
"lastModified": 1681358109,
"narHash": "sha256-eKyxW4OohHQx9Urxi7TQlFBTDWII+F+x2hklDOQPB50=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "b69de56fac8c2b6f8fd27f2eca01dcda8e0a4221",
"rev": "96ba1c52e54e74c3197f4d43026b3f3d92e83ff9",
"type": "github"
},
"original": {
@@ -231,19 +249,20 @@
"devenv": "devenv",
"nixpkgs": "nixpkgs_2",
"rust-overlay": "rust-overlay",
"systems": "systems_2"
"systems": "systems_3"
}
},
"rust-overlay": {
"inputs": {
"flake-utils": "flake-utils_2",
"nixpkgs": "nixpkgs_3"
},
"locked": {
"lastModified": 1731897198,
"narHash": "sha256-Ou7vLETSKwmE/HRQz4cImXXJBr/k9gp4J4z/PF8LzTE=",
"lastModified": 1693966243,
"narHash": "sha256-a2CA1aMIPE67JWSVIGoGtD3EGlFdK9+OlJQs0FOWCKY=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "0be641045af6d8666c11c2c40e45ffc9667839b5",
"rev": "a8b4bb4cbb744baaabc3e69099f352f99164e2c1",
"type": "github"
},
"original": {
@@ -281,6 +300,21 @@
"repo": "default",
"type": "github"
}
},
"systems_3": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",

View File

@@ -82,7 +82,7 @@
#
# NOTE: We currently need to set the Rust version unnecessarily high
# in order to work around https://github.com/matrix-org/synapse/issues/15939
(rust-bin.stable."1.82.0".default.override {
(rust-bin.stable."1.71.1".default.override {
# Additionally install the "rust-src" extension to allow diving into the
# Rust source code in an IDE (rust-analyzer will also make use of it).
extensions = [ "rust-src" ];
@@ -205,7 +205,7 @@
# corresponding Nix packages on https://search.nixos.org/packages.
#
# This was done until `./install-deps.pl --dryrun` produced no output.
env.PERL5LIB = "${with pkgs.perl538Packages; makePerlPath [
env.PERL5LIB = "${with pkgs.perl536Packages; makePerlPath [
DBI
ClassMethodModifiers
CryptEd25519

View File

@@ -370,7 +370,7 @@ tomli = ">=1.2.3"
# runtime errors caused by build system changes.
# We are happy to raise these upper bounds upon request,
# provided we check that it's safe to do so (i.e. that CI passes).
requires = ["poetry-core>=1.1.0,<=1.9.1", "setuptools_rust>=1.3,<=1.10.2"]
requires = ["poetry-core>=1.1.0,<=1.9.1", "setuptools_rust>=1.3,<=1.8.1"]
build-backend = "poetry.core.masonry.api"

View File

@@ -225,7 +225,6 @@ test_packages=(
./tests/msc3902
./tests/msc3967
./tests/msc4140
./tests/msc4229
)
# Enable dirty runs, so tests will reuse the same container where possible.

View File

@@ -88,7 +88,6 @@ from synapse.storage.databases.main.relations import RelationsWorkerStore
from synapse.storage.databases.main.room import RoomBackgroundUpdateStore
from synapse.storage.databases.main.roommember import RoomMemberBackgroundUpdateStore
from synapse.storage.databases.main.search import SearchBackgroundUpdateStore
from synapse.storage.databases.main.sliding_sync import SlidingSyncStore
from synapse.storage.databases.main.state import MainStateBackgroundUpdateStore
from synapse.storage.databases.main.stats import StatsStore
from synapse.storage.databases.main.user_directory import (
@@ -256,7 +255,6 @@ class Store(
ReceiptsBackgroundUpdateStore,
RelationsWorkerStore,
EventFederationWorkerStore,
SlidingSyncStore,
):
def execute(self, f: Callable[..., R], *args: Any, **kwargs: Any) -> Awaitable[R]:
return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs)

View File

@@ -448,6 +448,3 @@ class ExperimentalConfig(Config):
# MSC4222: Adding `state_after` to sync v2
self.msc4222_enabled: bool = experimental.get("msc4222_enabled", False)
# MSC4229: Pass through `unsigned` data from `/keys/upload` to `/keys/query`
self.msc4229_enabled: bool = experimental.get("msc4229_enabled", False)

View File

@@ -39,8 +39,6 @@ from synapse.replication.http.devices import ReplicationUploadKeysForUserRestSer
from synapse.types import (
JsonDict,
JsonMapping,
ScheduledTask,
TaskStatus,
UserID,
get_domain_from_id,
get_verify_key_from_cross_signing_key,
@@ -72,7 +70,6 @@ class E2eKeysHandler:
self.is_mine = hs.is_mine
self.clock = hs.get_clock()
self._worker_lock_handler = hs.get_worker_locks_handler()
self._task_scheduler = hs.get_task_scheduler()
federation_registry = hs.get_federation_registry()
@@ -119,10 +116,6 @@ class E2eKeysHandler:
hs.config.experimental.msc3984_appservice_key_query
)
self._task_scheduler.register_action(
self._delete_old_one_time_keys_task, "delete_old_otks"
)
@trace
@cancellable
async def query_devices(
@@ -542,9 +535,7 @@ class E2eKeysHandler:
result_dict[user_id] = {}
results = await self.store.get_e2e_device_keys_for_cs_api(
local_query,
include_displaynames,
include_uploaded_unsigned_data=self.config.experimental.msc4229_enabled,
local_query, include_displaynames
)
# Check if the application services have any additional results.
@@ -1583,45 +1574,6 @@ class E2eKeysHandler:
return True
return False
async def _delete_old_one_time_keys_task(
self, task: ScheduledTask
) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]:
"""Scheduler task to delete old one time keys.
Until Synapse 1.119, Synapse used to issue one-time-keys in a random order, leading to the possibility
that it could still have old OTKs that the client has dropped. This task is scheduled exactly once
by a database schema delta file, and it clears out old one-time-keys that look like they came from libolm.
"""
last_user = task.result.get("from_user", "") if task.result else ""
while True:
# We process users in batches of 100
users, rowcount = await self.store.delete_old_otks_for_next_user_batch(
last_user, 100
)
if len(users) == 0:
# We're done!
return TaskStatus.COMPLETE, None, None
logger.debug(
"Deleted %i old one-time-keys for users '%s'..'%s'",
rowcount,
users[0],
users[-1],
)
last_user = users[-1]
# Store our progress
await self._task_scheduler.update_task(
task.id, result={"from_user": last_user}
)
# Sleep a little before doing the next user.
#
# matrix.org has about 15M users in the e2e_one_time_keys_json table
# (comprising 20M devices). We want this to take about a week, so we need
# to do about one batch of 100 users every 4 seconds.
await self.clock.sleep(4)
def _check_cross_signing_key(
key: JsonDict, user_id: str, key_type: str, signing_key: Optional[VerifyKey] = None

View File

@@ -36,6 +36,7 @@ from typing import (
)
import attr
import multipart
import treq
from canonicaljson import encode_canonical_json
from netaddr import AddrFormatError, IPAddress, IPSet
@@ -92,20 +93,6 @@ from synapse.util.async_helpers import timeout_deferred
if TYPE_CHECKING:
from synapse.server import HomeServer
# Support both import names for the `python-multipart` (PyPI) library,
# which renamed its package name from `multipart` to `python_multipart`
# in 0.0.13 (though supports the old import name for compatibility).
# Note that the `multipart` package name conflicts with `multipart` (PyPI)
# so we should prefer importing from `python_multipart` when possible.
try:
from python_multipart import MultipartParser
if TYPE_CHECKING:
from python_multipart import multipart
except ImportError:
from multipart import MultipartParser # type: ignore[no-redef]
logger = logging.getLogger(__name__)
outgoing_requests_counter = Counter("synapse_http_client_requests", "", ["method"])
@@ -1052,7 +1039,7 @@ class _MultipartParserProtocol(protocol.Protocol):
self.deferred = deferred
self.boundary = boundary
self.max_length = max_length
self.parser: Optional[MultipartParser] = None
self.parser: Optional[multipart.MultipartParser] = None
self.multipart_response = MultipartResponse()
self.has_redirect = False
self.in_json = False
@@ -1110,12 +1097,12 @@ class _MultipartParserProtocol(protocol.Protocol):
self.deferred.errback()
self.file_length += end - start
callbacks: "multipart.MultipartCallbacks" = {
callbacks: "multipart.multipart.MultipartCallbacks" = {
"on_header_field": on_header_field,
"on_header_value": on_header_value,
"on_part_data": on_part_data,
}
self.parser = MultipartParser(self.boundary, callbacks)
self.parser = multipart.MultipartParser(self.boundary, callbacks)
self.total_length += len(incoming_data)
if self.max_length is not None and self.total_length >= self.max_length:

View File

@@ -220,15 +220,12 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
self,
query_list: Collection[Tuple[str, Optional[str]]],
include_displaynames: bool = True,
include_uploaded_unsigned_data: bool = False,
) -> Dict[str, Dict[str, JsonDict]]:
"""Fetch a list of device keys, formatted suitably for the C/S API.
Args:
query_list: List of pairs of user_ids and device_ids.
include_displaynames: Whether to include the displayname of returned devices
(if one exists).
include_uploaded_unsigned_data: Whether to include uploaded `unsigned` data
in the response
Returns:
Dict mapping from user-id to dict mapping from device_id to
key data. The key data will be a dict in the same format as the
@@ -250,13 +247,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
if r is None:
continue
# If there was already an `unsigned` dict in the uploaded key, keep it.
# Otherwise, create a new one.
if not include_uploaded_unsigned_data or not isinstance(
r.get("unsigned"), dict
):
r["unsigned"] = {}
r["unsigned"] = {}
if include_displaynames:
# Include the device's display name in the "unsigned" dictionary
display_name = device_info.display_name
@@ -1462,54 +1453,6 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
impl,
)
async def delete_old_otks_for_next_user_batch(
self, after_user_id: str, number_of_users: int
) -> Tuple[List[str], int]:
"""Deletes old OTKs belonging to the next batch of users
Returns:
`(users, rows)`, where:
* `users` is the user IDs of the updated users. An empty list if we are done.
* `rows` is the number of deleted rows
"""
def impl(txn: LoggingTransaction) -> Tuple[List[str], int]:
# Find a batch of users
txn.execute(
"""
SELECT DISTINCT(user_id) FROM e2e_one_time_keys_json
WHERE user_id > ?
ORDER BY user_id
LIMIT ?
""",
(after_user_id, number_of_users),
)
users = [row[0] for row in txn.fetchall()]
if len(users) == 0:
return users, 0
# Delete any old OTKs belonging to those users.
#
# We only actually consider OTKs whose key ID is 6 characters long. These
# keys were likely made by libolm rather than Vodozemac; libolm only kept
# 100 private OTKs, so was far more vulnerable than Vodozemac to throwing
# away keys prematurely.
clause, args = make_in_list_sql_clause(
txn.database_engine, "user_id", users
)
sql = f"""
DELETE FROM e2e_one_time_keys_json
WHERE {clause} AND ts_added_ms < ? AND length(key_id) = 6
"""
args.append(self._clock.time_msec() - (7 * 24 * 3600 * 1000))
txn.execute(sql, args)
return users, txn.rowcount
return await self.db_pool.runInteraction(
"delete_old_otks_for_next_user_batch", impl
)
class EndToEndKeyStore(EndToEndKeyWorkerStore, SQLBaseStore):
def __init__(

View File

@@ -21,11 +21,7 @@ import attr
from synapse.api.errors import SlidingSyncUnknownPosition
from synapse.logging.opentracing import log_kv
from synapse.storage._base import SQLBaseStore, db_to_json
from synapse.storage.database import (
DatabasePool,
LoggingDatabaseConnection,
LoggingTransaction,
)
from synapse.storage.database import LoggingTransaction
from synapse.types import MultiWriterStreamToken, RoomStreamToken
from synapse.types.handlers.sliding_sync import (
HaveSentRoom,
@@ -39,28 +35,12 @@ from synapse.util import json_encoder
from synapse.util.caches.descriptors import cached
if TYPE_CHECKING:
from synapse.server import HomeServer
from synapse.storage.databases.main import DataStore
logger = logging.getLogger(__name__)
class SlidingSyncStore(SQLBaseStore):
def __init__(
self,
database: DatabasePool,
db_conn: LoggingDatabaseConnection,
hs: "HomeServer",
):
super().__init__(database, db_conn, hs)
self.db_pool.updates.register_background_index_update(
update_name="sliding_sync_connection_room_configs_required_state_id_idx",
index_name="sliding_sync_connection_room_configs_required_state_id_idx",
table="sliding_sync_connection_room_configs",
columns=("required_state_id",),
)
async def get_latest_bump_stamp_for_room(
self,
room_id: str,

View File

@@ -1,19 +0,0 @@
--
-- This file is licensed under the Affero General Public License (AGPL) version 3.
--
-- Copyright (C) 2024 New Vector, Ltd
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU Affero General Public License as
-- published by the Free Software Foundation, either version 3 of the
-- License, or (at your option) any later version.
--
-- See the GNU Affero General Public License for more details:
-- <https://www.gnu.org/licenses/agpl-3.0.html>.
-- Until Synapse 1.119, Synapse used to issue one-time-keys in a random order, leading to the possibility
-- that it could still have old OTKs that the client has dropped.
--
-- We create a scheduled task which will drop old OTKs, to flush them out.
INSERT INTO scheduled_tasks(id, action, status, timestamp)
VALUES ('delete_old_otks_task', 'delete_old_otks', 'scheduled', extract(epoch from current_timestamp) * 1000);

View File

@@ -1,19 +0,0 @@
--
-- This file is licensed under the Affero General Public License (AGPL) version 3.
--
-- Copyright (C) 2024 New Vector, Ltd
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU Affero General Public License as
-- published by the Free Software Foundation, either version 3 of the
-- License, or (at your option) any later version.
--
-- See the GNU Affero General Public License for more details:
-- <https://www.gnu.org/licenses/agpl-3.0.html>.
-- Until Synapse 1.119, Synapse used to issue one-time-keys in a random order, leading to the possibility
-- that it could still have old OTKs that the client has dropped.
--
-- We create a scheduled task which will drop old OTKs, to flush them out.
INSERT INTO scheduled_tasks(id, action, status, timestamp)
VALUES ('delete_old_otks_task', 'delete_old_otks', 'scheduled', strftime('%s', 'now') * 1000);

View File

@@ -1,20 +0,0 @@
--
-- This file is licensed under the Affero General Public License (AGPL) version 3.
--
-- Copyright (C) 2024 New Vector, Ltd
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU Affero General Public License as
-- published by the Free Software Foundation, either version 3 of the
-- License, or (at your option) any later version.
--
-- See the GNU Affero General Public License for more details:
-- <https://www.gnu.org/licenses/agpl-3.0.html>.
-- Add an index on sliding_sync_connection_room_configs(required_state_id), so
-- that when we delete entries in `sliding_sync_connection_required_state` it's
-- efficient for Postgres to check they've been deleted from
-- `sliding_sync_connection_room_configs` too
INSERT INTO background_updates (ordering, update_name, progress_json) VALUES
(8805, 'sliding_sync_connection_room_configs_required_state_id_idx', '{}');

View File

@@ -19,7 +19,6 @@
# [This file includes modifications made by New Vector Limited]
#
#
import time
from typing import Dict, Iterable
from unittest import mock
@@ -1827,72 +1826,3 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase):
)
self.assertIs(exists, True)
self.assertIs(replaceable_without_uia, False)
def test_delete_old_one_time_keys(self) -> None:
"""Test the db migration that clears out old OTKs"""
# We upload two sets of keys, one just over a week ago, and one just less than
# a week ago. Each batch contains some keys that match the deletion pattern
# (key IDs of 6 chars), and some that do not.
#
# Finally, set the scheduled task going, and check what gets deleted.
user_id = "@user000:" + self.hs.hostname
device_id = "xyz"
# The scheduled task should be for "now" in real, wallclock time, so
# set the test reactor to just over a week ago.
self.reactor.advance(time.time() - 7.5 * 24 * 3600)
# Upload some keys
self.get_success(
self.handler.upload_keys_for_user(
user_id,
device_id,
{
"one_time_keys": {
# some keys to delete
"alg1:AAAAAA": "key1",
"alg2:AAAAAB": {"key": "key2", "signatures": {"k1": "sig1"}},
# A key to *not* delete
"alg2:AAAAAAAAAA": {"key": "key3"},
}
},
)
)
# A day passes
self.reactor.advance(24 * 3600)
# Upload some more keys
self.get_success(
self.handler.upload_keys_for_user(
user_id,
device_id,
{
"one_time_keys": {
# some keys which match the pattern
"alg1:BAAAAA": "key1",
"alg2:BAAAAB": {"key": "key2", "signatures": {"k1": "sig1"}},
# A key to *not* delete
"alg2:BAAAAAAAAA": {"key": "key3"},
}
},
)
)
# The rest of the week passes, which should set the scheduled task going.
self.reactor.advance(6.5 * 24 * 3600)
# Check what we're left with in the database
remaining_key_ids = {
row[0]
for row in self.get_success(
self.handler.store.db_pool.simple_select_list(
"e2e_one_time_keys_json", None, ["key_id"]
)
)
}
self.assertEqual(
remaining_key_ids, {"AAAAAAAAAA", "BAAAAA", "BAAAAB", "BAAAAAAAAA"}
)

View File

@@ -419,11 +419,6 @@ class MediaRepoTests(unittest.HomeserverTestCase):
return channel
@unittest.override_config(
{
"enable_authenticated_media": False,
}
)
def test_handle_missing_content_type(self) -> None:
channel = self._req(
b"attachment; filename=out" + self.test_image.extension,
@@ -435,11 +430,6 @@ class MediaRepoTests(unittest.HomeserverTestCase):
headers.getRawHeaders(b"Content-Type"), [b"application/octet-stream"]
)
@unittest.override_config(
{
"enable_authenticated_media": False,
}
)
def test_disposition_filename_ascii(self) -> None:
"""
If the filename is filename=<ascii> then Synapse will decode it as an
@@ -460,11 +450,6 @@ class MediaRepoTests(unittest.HomeserverTestCase):
],
)
@unittest.override_config(
{
"enable_authenticated_media": False,
}
)
def test_disposition_filenamestar_utf8escaped(self) -> None:
"""
If the filename is filename=*utf8''<utf8 escaped> then Synapse will
@@ -490,11 +475,6 @@ class MediaRepoTests(unittest.HomeserverTestCase):
],
)
@unittest.override_config(
{
"enable_authenticated_media": False,
}
)
def test_disposition_none(self) -> None:
"""
If there is no filename, Content-Disposition should only
@@ -511,11 +491,6 @@ class MediaRepoTests(unittest.HomeserverTestCase):
[b"inline" if self.test_image.is_inline else b"attachment"],
)
@unittest.override_config(
{
"enable_authenticated_media": False,
}
)
def test_thumbnail_crop(self) -> None:
"""Test that a cropped remote thumbnail is available."""
self._test_thumbnail(
@@ -525,11 +500,6 @@ class MediaRepoTests(unittest.HomeserverTestCase):
unable_to_thumbnail=self.test_image.unable_to_thumbnail,
)
@unittest.override_config(
{
"enable_authenticated_media": False,
}
)
def test_thumbnail_scale(self) -> None:
"""Test that a scaled remote thumbnail is available."""
self._test_thumbnail(
@@ -539,11 +509,6 @@ class MediaRepoTests(unittest.HomeserverTestCase):
unable_to_thumbnail=self.test_image.unable_to_thumbnail,
)
@unittest.override_config(
{
"enable_authenticated_media": False,
}
)
def test_invalid_type(self) -> None:
"""An invalid thumbnail type is never available."""
self._test_thumbnail(
@@ -554,10 +519,7 @@ class MediaRepoTests(unittest.HomeserverTestCase):
)
@unittest.override_config(
{
"thumbnail_sizes": [{"width": 32, "height": 32, "method": "scale"}],
"enable_authenticated_media": False,
},
{"thumbnail_sizes": [{"width": 32, "height": 32, "method": "scale"}]}
)
def test_no_thumbnail_crop(self) -> None:
"""
@@ -571,10 +533,7 @@ class MediaRepoTests(unittest.HomeserverTestCase):
)
@unittest.override_config(
{
"thumbnail_sizes": [{"width": 32, "height": 32, "method": "crop"}],
"enable_authenticated_media": False,
}
{"thumbnail_sizes": [{"width": 32, "height": 32, "method": "crop"}]}
)
def test_no_thumbnail_scale(self) -> None:
"""
@@ -587,11 +546,6 @@ class MediaRepoTests(unittest.HomeserverTestCase):
unable_to_thumbnail=self.test_image.unable_to_thumbnail,
)
@unittest.override_config(
{
"enable_authenticated_media": False,
}
)
def test_thumbnail_repeated_thumbnail(self) -> None:
"""Test that fetching the same thumbnail works, and deleting the on disk
thumbnail regenerates it.
@@ -766,11 +720,6 @@ class MediaRepoTests(unittest.HomeserverTestCase):
)
)
@unittest.override_config(
{
"enable_authenticated_media": False,
}
)
def test_x_robots_tag_header(self) -> None:
"""
Tests that the `X-Robots-Tag` header is present, which informs web crawlers
@@ -784,11 +733,6 @@ class MediaRepoTests(unittest.HomeserverTestCase):
[b"noindex, nofollow, noarchive, noimageindex"],
)
@unittest.override_config(
{
"enable_authenticated_media": False,
}
)
def test_cross_origin_resource_policy_header(self) -> None:
"""
Test that the Cross-Origin-Resource-Policy header is set to "cross-origin"
@@ -803,11 +747,6 @@ class MediaRepoTests(unittest.HomeserverTestCase):
[b"cross-origin"],
)
@unittest.override_config(
{
"enable_authenticated_media": False,
}
)
def test_unknown_v3_endpoint(self) -> None:
"""
If the v3 endpoint fails, try the r0 one.
@@ -1046,11 +985,6 @@ class RemoteDownloadLimiterTestCase(unittest.HomeserverTestCase):
d.callback(52428800)
return d
@override_config(
{
"enable_authenticated_media": False,
}
)
@patch(
"synapse.http.matrixfederationclient.read_body_with_max_size",
read_body_with_max_size_30MiB,
@@ -1126,7 +1060,6 @@ class RemoteDownloadLimiterTestCase(unittest.HomeserverTestCase):
{
"remote_media_download_per_second": "50M",
"remote_media_download_burst_count": "50M",
"enable_authenticated_media": False,
}
)
@patch(
@@ -1186,12 +1119,7 @@ class RemoteDownloadLimiterTestCase(unittest.HomeserverTestCase):
)
assert channel.code == 200
@override_config(
{
"remote_media_download_burst_count": "87M",
"enable_authenticated_media": False,
}
)
@override_config({"remote_media_download_burst_count": "87M"})
@patch(
"synapse.http.matrixfederationclient.read_body_with_max_size",
read_body_with_max_size_30MiB,
@@ -1231,7 +1159,7 @@ class RemoteDownloadLimiterTestCase(unittest.HomeserverTestCase):
)
assert channel2.code == 429
@override_config({"max_upload_size": "29M", "enable_authenticated_media": False})
@override_config({"max_upload_size": "29M"})
@patch(
"synapse.http.matrixfederationclient.read_body_with_max_size",
read_body_with_max_size_30MiB,

View File

@@ -40,7 +40,6 @@ from tests.http import (
from tests.replication._base import BaseMultiWorkerStreamTestCase
from tests.server import FakeChannel, FakeTransport, make_request
from tests.test_utils import SMALL_PNG
from tests.unittest import override_config
logger = logging.getLogger(__name__)
@@ -149,7 +148,6 @@ class MediaRepoShardTestCase(BaseMultiWorkerStreamTestCase):
return channel, request
@override_config({"enable_authenticated_media": False})
def test_basic(self) -> None:
"""Test basic fetching of remote media from a single worker."""
hs1 = self.make_worker_hs("synapse.app.generic_worker")
@@ -166,7 +164,6 @@ class MediaRepoShardTestCase(BaseMultiWorkerStreamTestCase):
self.assertEqual(channel.code, 200)
self.assertEqual(channel.result["body"], b"Hello!")
@override_config({"enable_authenticated_media": False})
def test_download_simple_file_race(self) -> None:
"""Test that fetching remote media from two different processes at the
same time works.
@@ -206,7 +203,6 @@ class MediaRepoShardTestCase(BaseMultiWorkerStreamTestCase):
# We expect only one new file to have been persisted.
self.assertEqual(start_count + 1, self._count_remote_media())
@override_config({"enable_authenticated_media": False})
def test_download_image_race(self) -> None:
"""Test that fetching remote *images* from two different processes at
the same time works.

View File

@@ -30,7 +30,7 @@ from twisted.web.resource import Resource
import synapse.rest.admin
from synapse.http.server import JsonResource
from synapse.rest.admin import VersionServlet
from synapse.rest.client import login, media, room
from synapse.rest.client import login, room
from synapse.server import HomeServer
from synapse.util import Clock
@@ -60,7 +60,6 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase):
synapse.rest.admin.register_servlets,
synapse.rest.admin.register_servlets_for_media_repo,
login.register_servlets,
media.register_servlets,
room.register_servlets,
]
@@ -75,7 +74,7 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase):
"""Ensure a piece of media is quarantined when trying to access it."""
channel = self.make_request(
"GET",
f"/_matrix/client/v1/media/download/{server_and_media_id}",
f"/_matrix/media/v3/download/{server_and_media_id}",
shorthand=False,
access_token=admin_user_tok,
)
@@ -132,7 +131,7 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase):
# Attempt to access the media
channel = self.make_request(
"GET",
f"/_matrix/client/v1/media/download/{server_name_and_media_id}",
f"/_matrix/media/v3/download/{server_name_and_media_id}",
shorthand=False,
access_token=non_admin_user_tok,
)
@@ -296,7 +295,7 @@ class QuarantineMediaTestCase(unittest.HomeserverTestCase):
# Attempt to access each piece of media
channel = self.make_request(
"GET",
f"/_matrix/client/v1/media/download/{server_and_media_id_2}",
f"/_matrix/media/v3/download/{server_and_media_id_2}",
shorthand=False,
access_token=non_admin_user_tok,
)

View File

@@ -36,7 +36,6 @@ from synapse.util import Clock
from tests import unittest
from tests.test_utils import SMALL_PNG
from tests.unittest import override_config
VALID_TIMESTAMP = 1609459200000 # 2021-01-01 in milliseconds
INVALID_TIMESTAMP_IN_S = 1893456000 # 2030-01-01 in seconds
@@ -127,7 +126,6 @@ class DeleteMediaByIDTestCase(_AdminMediaTests):
self.assertEqual(400, channel.code, msg=channel.json_body)
self.assertEqual("Can only delete local media", channel.json_body["error"])
@override_config({"enable_authenticated_media": False})
def test_delete_media(self) -> None:
"""
Tests that delete a media is successfully
@@ -373,7 +371,6 @@ class DeleteMediaByDateSizeTestCase(_AdminMediaTests):
self._access_media(server_and_media_id, False)
@override_config({"enable_authenticated_media": False})
def test_keep_media_by_date(self) -> None:
"""
Tests that media is not deleted if it is newer than `before_ts`
@@ -411,7 +408,6 @@ class DeleteMediaByDateSizeTestCase(_AdminMediaTests):
self._access_media(server_and_media_id, False)
@override_config({"enable_authenticated_media": False})
def test_keep_media_by_size(self) -> None:
"""
Tests that media is not deleted if its size is smaller than or equal
@@ -447,7 +443,6 @@ class DeleteMediaByDateSizeTestCase(_AdminMediaTests):
self._access_media(server_and_media_id, False)
@override_config({"enable_authenticated_media": False})
def test_keep_media_by_user_avatar(self) -> None:
"""
Tests that we do not delete media if is used as a user avatar
@@ -492,7 +487,6 @@ class DeleteMediaByDateSizeTestCase(_AdminMediaTests):
self._access_media(server_and_media_id, False)
@override_config({"enable_authenticated_media": False})
def test_keep_media_by_room_avatar(self) -> None:
"""
Tests that we do not delete media if it is used as a room avatar

View File

@@ -45,7 +45,6 @@ from synapse.rest.client import (
devices,
login,
logout,
media,
profile,
register,
room,
@@ -3518,7 +3517,6 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
servlets = [
synapse.rest.admin.register_servlets,
login.register_servlets,
media.register_servlets,
]
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
@@ -4025,7 +4023,7 @@ class UserMediaRestTestCase(unittest.HomeserverTestCase):
# Try to access a media and to create `last_access_ts`
channel = self.make_request(
"GET",
f"/_matrix/client/v1/media/download/{server_and_media_id}",
f"/_matrix/media/v3/download/{server_and_media_id}",
shorthand=False,
access_token=user_token,
)

View File

@@ -19,7 +19,6 @@
#
#
import urllib.parse
from copy import deepcopy
from http import HTTPStatus
from unittest.mock import patch
@@ -206,141 +205,6 @@ class KeyQueryTestCase(unittest.HomeserverTestCase):
self.assertEqual(channel.code, HTTPStatus.OK, channel.result)
class UnsignedKeyDataTestCase(unittest.HomeserverTestCase):
servlets = [
keys.register_servlets,
admin.register_servlets_for_client_rest_resource,
login.register_servlets,
]
def default_config(self) -> JsonDict:
config = super().default_config()
config["experimental_features"] = {"msc4229_enabled": True}
return config
def make_key_data(self, user_id: str, device_id: str) -> JsonDict:
return {
"algorithms": ["m.olm.v1.curve25519-aes-sha2", "m.megolm.v1.aes-sha2"],
"device_id": device_id,
"keys": {
f"curve25519:{device_id}": "keykeykey",
f"ed25519:{device_id}": "keykeykey",
},
"signatures": {user_id: {f"ed25519:{device_id}": "sigsigsig"}},
"user_id": user_id,
}
def test_unsigned_uploaded_data_returned_in_keys_query(self) -> None:
password = "wonderland"
device_id = "ABCDEFGHI"
alice_id = self.register_user("alice", password)
alice_token = self.login(
"alice",
password,
device_id=device_id,
additional_request_fields={"initial_device_display_name": "mydevice"},
)
# Alice uploads some keys, with a bit of unsigned data
keys1 = self.make_key_data(alice_id, device_id)
keys1["unsigned"] = {"a": "b"}
channel = self.make_request(
"POST",
"/_matrix/client/v3/keys/upload",
{"device_keys": keys1},
alice_token,
)
self.assertEqual(channel.code, HTTPStatus.OK, channel.result)
# /keys/query should return the unsigned data, with the device display name merged in.
channel = self.make_request(
"POST",
"/_matrix/client/v3/keys/query",
{"device_keys": {alice_id: []}},
alice_token,
)
self.assertEqual(channel.code, HTTPStatus.OK, channel.result)
device_response = channel.json_body["device_keys"][alice_id][device_id]
expected_device_response = deepcopy(keys1)
expected_device_response["unsigned"]["device_display_name"] = "mydevice"
self.assertEqual(device_response, expected_device_response)
# /_matrix/federation/v1/user/devices/{userId} should return the unsigned data too
fed_response = self.get_success(
self.hs.get_device_handler().on_federation_query_user_devices(alice_id)
)
self.assertEqual(
fed_response["devices"][0],
{"device_id": device_id, "keys": keys1},
)
# so should /_matrix/federation/v1/user/keys/query
fed_response = self.get_success(
self.hs.get_e2e_keys_handler().on_federation_query_client_keys(
{"device_keys": {alice_id: []}}
)
)
fed_device_response = fed_response["device_keys"][alice_id][device_id]
self.assertEqual(fed_device_response, keys1)
def test_non_dict_unsigned_is_ignored(self) -> None:
password = "wonderland"
device_id = "ABCDEFGHI"
alice_id = self.register_user("alice", password)
alice_token = self.login(
"alice",
password,
device_id=device_id,
additional_request_fields={"initial_device_display_name": "mydevice"},
)
# Alice uploads some keys, with a malformed unsigned data
keys1 = self.make_key_data(alice_id, device_id)
keys1["unsigned"] = ["a", "b"] # a list!
channel = self.make_request(
"POST",
"/_matrix/client/v3/keys/upload",
{"device_keys": keys1},
alice_token,
)
self.assertEqual(channel.code, HTTPStatus.OK, channel.result)
# /keys/query should return the unsigned data, with the device display name merged in.
channel = self.make_request(
"POST",
"/_matrix/client/v3/keys/query",
{"device_keys": {alice_id: []}},
alice_token,
)
self.assertEqual(channel.code, HTTPStatus.OK, channel.result)
device_response = channel.json_body["device_keys"][alice_id][device_id]
expected_device_response = deepcopy(keys1)
expected_device_response["unsigned"] = {"device_display_name": "mydevice"}
self.assertEqual(device_response, expected_device_response)
# /_matrix/federation/v1/user/devices/{userId} should return the unsigned data too
fed_response = self.get_success(
self.hs.get_device_handler().on_federation_query_user_devices(alice_id)
)
self.assertEqual(
fed_response["devices"][0],
{"device_id": device_id, "keys": keys1},
)
# so should /_matrix/federation/v1/user/keys/query
fed_response = self.get_success(
self.hs.get_e2e_keys_handler().on_federation_query_client_keys(
{"device_keys": {alice_id: []}}
)
)
fed_device_response = fed_response["device_keys"][alice_id][device_id]
expected_device_response = deepcopy(keys1)
expected_device_response["unsigned"] = {}
self.assertEqual(fed_device_response, expected_device_response)
class SigningKeyUploadServletTestCase(unittest.HomeserverTestCase):
servlets = [
admin.register_servlets,

View File

@@ -91,8 +91,7 @@ class MediaDomainBlockingTests(unittest.HomeserverTestCase):
{
# Disable downloads from a domain we won't be requesting downloads from.
# This proves we haven't broken anything.
"prevent_media_downloads_from": ["not-listed.com"],
"enable_authenticated_media": False,
"prevent_media_downloads_from": ["not-listed.com"]
}
)
def test_remote_media_normally_unblocked(self) -> None:
@@ -133,7 +132,6 @@ class MediaDomainBlockingTests(unittest.HomeserverTestCase):
# This proves we haven't broken anything.
"prevent_media_downloads_from": ["not-listed.com"],
"dynamic_thumbnails": True,
"enable_authenticated_media": False,
}
)
def test_remote_media_thumbnail_normally_unblocked(self) -> None:

View File

@@ -42,7 +42,6 @@ from synapse.util.stringutils import parse_and_validate_mxc_uri
from tests import unittest
from tests.server import FakeTransport
from tests.test_utils import SMALL_PNG
from tests.unittest import override_config
try:
import lxml
@@ -1260,7 +1259,6 @@ class URLPreviewTests(unittest.HomeserverTestCase):
self.assertIsNone(_port)
return host, media_id
@override_config({"enable_authenticated_media": False})
def test_storage_providers_exclude_files(self) -> None:
"""Test that files are not stored in or fetched from storage providers."""
host, media_id = self._download_image()
@@ -1303,7 +1301,6 @@ class URLPreviewTests(unittest.HomeserverTestCase):
"URL cache file was unexpectedly retrieved from a storage provider",
)
@override_config({"enable_authenticated_media": False})
def test_storage_providers_exclude_thumbnails(self) -> None:
"""Test that thumbnails are not stored in or fetched from storage providers."""
host, media_id = self._download_image()