1
0

Compare commits

..

6 Commits

22 changed files with 38 additions and 301 deletions

View File

@@ -1 +0,0 @@
Drop unused db tables `room_stats_historical` and `user_stats_historical`.

1
changelog.d/11333.misc Normal file
View File

@@ -0,0 +1 @@
Remove legacy code related to depreciated "trust_identity_server_for_password_resets" config flag.

View File

@@ -1 +0,0 @@
Support the stable version of [MSC2778](https://github.com/matrix-org/matrix-doc/pull/2778): the `m.login.application_service` login type. Contributed by @tulir.

View File

@@ -1 +0,0 @@
Add type hints to storage classes.

View File

@@ -1 +0,0 @@
Add type hints to storage classes.

View File

@@ -1 +0,0 @@
Add a development script for visualising the storage class inheritance hierarchy.

View File

@@ -28,6 +28,7 @@ exclude = (?x)
|synapse/storage/databases/main/account_data.py
|synapse/storage/databases/main/cache.py
|synapse/storage/databases/main/devices.py
|synapse/storage/databases/main/directory.py
|synapse/storage/databases/main/e2e_room_keys.py
|synapse/storage/databases/main/end_to_end_keys.py
|synapse/storage/databases/main/event_federation.py
@@ -38,6 +39,7 @@ exclude = (?x)
|synapse/storage/databases/main/metrics.py
|synapse/storage/databases/main/monthly_active_users.py
|synapse/storage/databases/main/presence.py
|synapse/storage/databases/main/profile.py
|synapse/storage/databases/main/purge_events.py
|synapse/storage/databases/main/push_rule.py
|synapse/storage/databases/main/receipts.py
@@ -175,15 +177,9 @@ disallow_untyped_defs = True
[mypy-synapse.storage.databases.main.client_ips]
disallow_untyped_defs = True
[mypy-synapse.storage.databases.main.directory]
disallow_untyped_defs = True
[mypy-synapse.storage.databases.main.room_batch]
disallow_untyped_defs = True
[mypy-synapse.storage.databases.main.profile]
disallow_untyped_defs = True
[mypy-synapse.storage.databases.main.state_deltas]
disallow_untyped_defs = True
@@ -286,9 +282,6 @@ disallow_untyped_defs = True
[mypy-tests.handlers.test_user_directory]
disallow_untyped_defs = True
[mypy-tests.storage.test_profile]
disallow_untyped_defs = True
[mypy-tests.storage.test_user_directory]
disallow_untyped_defs = True

View File

@@ -1,179 +0,0 @@
#! /usr/bin/env python3
import argparse
import os
import re
import subprocess
import sys
import tempfile
from typing import Iterable, Optional, Set
import networkx
def scrape_storage_classes() -> str:
"""Grep the for classes ending with "Store" and extract their list of parents.
Returns the stdout from `rg` as a single string."""
# TODO: this is a big hack which assumes that each Store class has a unique name.
# That assumption is wrong: there are two DirectoryStores, one in
# synapse/replication/slave/storage/directory.py and the other in
# synapse/storage/databases/main/directory.py
# Would be nice to have a way to account for this.
return subprocess.check_output(
[
"rg",
"-o",
"--no-line-number",
"--no-filename",
"--multiline",
r"class .*Store\((.|\n)*?\):$",
"synapse",
"tests",
],
).decode()
oneline_class_pattern = re.compile(r"^class (.*)\((.*)\):$")
opening_class_pattern = re.compile(r"^class (.*)\($")
def load_graph(lines: Iterable[str]) -> networkx.DiGraph:
"""Process the output of scrape_storage_classes to build an inheritance graph.
Every time a class C is created that explicitly inherits from a parent P, we add an
edge C -> P.
"""
G = networkx.DiGraph()
child: Optional[str] = None
for line in lines:
line = line.strip()
if not line or line.startswith("#"):
continue
if (match := oneline_class_pattern.match(line)) is not None:
child, parents = match.groups()
for parent in parents.split(", "):
if "metaclass" not in parent:
G.add_edge(child, parent)
child = None
elif (match := opening_class_pattern.match(line)) is not None:
(child,) = match.groups()
elif line == "):":
child = None
else:
assert child is not None, repr(line)
parent = line.strip(",")
if "metaclass" not in parent:
G.add_edge(child, parent)
return G
def select_vertices_of_interest(G: networkx.DiGraph, target: Optional[str]) -> Set[str]:
"""Find all nodes we want to visualise.
If no TARGET is given, we visualise all of G. Otherwise we visualise a given
TARGET, its parents, and all of their parents recursively.
Requires that G is a DAG.
If not None, the TARGET must belong to G.
"""
assert networkx.is_directed_acyclic_graph(G)
if target is not None:
component: Set[str] = networkx.descendants(G, target)
component.add(target)
else:
component = set(G.nodes)
return component
def generate_dot_source(G: networkx.DiGraph, nodes: Set[str]) -> str:
output = """\
strict digraph {
rankdir="LR";
node [shape=box];
"""
for (child, parent) in G.edges:
if child in nodes and parent in nodes:
output += f" {child} -> {parent};\n"
output += "}\n"
return output
def render_png(dot_source: str, destination: Optional[str]) -> str:
if destination is None:
handle, destination = tempfile.mkstemp()
os.close(handle)
print("Warning: writing to", destination, "which will persist", file=sys.stderr)
subprocess.run(
[
"dot",
"-o",
destination,
"-Tpng",
],
input=dot_source,
encoding="utf-8",
check=True,
)
return destination
def show_graph(location: str) -> None:
subprocess.run(
["xdg-open", location],
check=True,
)
def main(parser: argparse.ArgumentParser, args: argparse.Namespace) -> int:
if not (args.output or args.show):
parser.print_help(file=sys.stderr)
print("Must either --output or --show, or both.", file=sys.stderr)
return os.EX_USAGE
lines = scrape_storage_classes().split("\n")
G = load_graph(lines)
nodes = select_vertices_of_interest(G, args.target)
dot_source = generate_dot_source(G, nodes)
output_location = render_png(dot_source, args.output)
if args.show:
show_graph(output_location)
return os.EX_OK
def build_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(
description="Visualise the inheritance of Synapse's storage classes. Requires "
"ripgrep (https://github.com/BurntSushi/ripgrep) as 'rg'; graphviz "
"(https://graphviz.org/) for the 'dot' program; and networkx "
"(https://networkx.org/). Requires Python 3.8+ for the walrus"
"operator."
)
parser.add_argument(
"target",
nargs="?",
help="Show only TARGET and its ancestors. Otherwise, show the entire hierarchy.",
)
parser.add_argument(
"--output",
nargs=1,
help="Render inheritance graph to a png file.",
)
parser.add_argument(
"--show",
action="store_true",
help="Open the inheritance graph in an image viewer.",
)
return parser
if __name__ == "__main__":
parser = build_parser()
args = parser.parse_args()
sys.exit(main(parser, args))

View File

@@ -135,8 +135,6 @@ CONDITIONAL_REQUIREMENTS["dev"] = (
# The following are executed as commands by the release script.
"twine",
"towncrier",
# For storage_inheritance script
"networkx==2.6.3",
]
)

View File

@@ -137,33 +137,14 @@ class EmailConfig(Config):
if self.root.registration.account_threepid_delegate_email
else ThreepidBehaviour.LOCAL
)
# Prior to Synapse v1.4.0, there was another option that defined whether Synapse would
# use an identity server to password reset tokens on its behalf. We now warn the user
# if they have this set and tell them to use the updated option, while using a default
# identity server in the process.
self.using_identity_server_from_trusted_list = False
if (
not self.root.registration.account_threepid_delegate_email
and config.get("trust_identity_server_for_password_resets", False) is True
):
# Use the first entry in self.trusted_third_party_id_servers instead
if self.trusted_third_party_id_servers:
# XXX: It's a little confusing that account_threepid_delegate_email is modified
# both in RegistrationConfig and here. We should factor this bit out
first_trusted_identity_server = self.trusted_third_party_id_servers[0]
# trusted_third_party_id_servers does not contain a scheme whereas
# account_threepid_delegate_email is expected to. Presume https
self.root.registration.account_threepid_delegate_email = (
"https://" + first_trusted_identity_server
)
self.using_identity_server_from_trusted_list = True
else:
raise ConfigError(
"Attempted to use an identity server from"
'"trusted_third_party_id_servers" but it is empty.'
)
if config.get("trust_identity_server_for_password_resets"):
raise ConfigError(
'The config option "trust_identity_server_for_password_resets" '
'has been replaced by "account_threepid_delegate". '
"Please consult the sample config at docs/sample_config.yaml for "
"details and update your config file."
)
self.local_threepid_handling_disabled_due_to_email_config = False
if (

View File

@@ -39,9 +39,7 @@ class RegistrationConfig(Config):
self.registration_shared_secret = config.get("registration_shared_secret")
self.bcrypt_rounds = config.get("bcrypt_rounds", 12)
self.trusted_third_party_id_servers = config.get(
"trusted_third_party_id_servers", ["matrix.org", "vector.im"]
)
account_threepid_delegates = config.get("account_threepid_delegates") or {}
self.account_threepid_delegate_email = account_threepid_delegates.get("email")
self.account_threepid_delegate_msisdn = account_threepid_delegates.get("msisdn")

View File

@@ -464,15 +464,6 @@ class IdentityHandler:
if next_link:
params["next_link"] = next_link
if self.hs.config.email.using_identity_server_from_trusted_list:
# Warn that a deprecated config option is in use
logger.warning(
'The config option "trust_identity_server_for_password_resets" '
'has been replaced by "account_threepid_delegate". '
"Please consult the sample config at docs/sample_config.yaml for "
"details and update your config file."
)
try:
data = await self.http_client.post_json_get_json(
id_server + "/_matrix/identity/api/v1/validate/email/requestToken",
@@ -517,15 +508,6 @@ class IdentityHandler:
if next_link:
params["next_link"] = next_link
if self.hs.config.email.using_identity_server_from_trusted_list:
# Warn that a deprecated config option is in use
logger.warning(
'The config option "trust_identity_server_for_password_resets" '
'has been replaced by "account_threepid_delegate". '
"Please consult the sample config at docs/sample_config.yaml for "
"details and update your config file."
)
try:
data = await self.http_client.post_json_get_json(
id_server + "/_matrix/identity/api/v1/validate/msisdn/requestToken",

View File

@@ -61,8 +61,7 @@ class LoginRestServlet(RestServlet):
TOKEN_TYPE = "m.login.token"
JWT_TYPE = "org.matrix.login.jwt"
JWT_TYPE_DEPRECATED = "m.login.jwt"
APPSERVICE_TYPE = "m.login.application_service"
APPSERVICE_TYPE_UNSTABLE = "uk.half-shot.msc2778.login.application_service"
APPSERVICE_TYPE = "uk.half-shot.msc2778.login.application_service"
REFRESH_TOKEN_PARAM = "org.matrix.msc2918.refresh_token"
def __init__(self, hs: "HomeServer"):
@@ -144,7 +143,6 @@ class LoginRestServlet(RestServlet):
flows.extend({"type": t} for t in self.auth_handler.get_supported_login_types())
flows.append({"type": LoginRestServlet.APPSERVICE_TYPE})
flows.append({"type": LoginRestServlet.APPSERVICE_TYPE_UNSTABLE})
return 200, {"flows": flows}
@@ -161,10 +159,7 @@ class LoginRestServlet(RestServlet):
should_issue_refresh_token = False
try:
if login_submission["type"] in (
LoginRestServlet.APPSERVICE_TYPE,
LoginRestServlet.APPSERVICE_TYPE_UNSTABLE,
):
if login_submission["type"] == LoginRestServlet.APPSERVICE_TYPE:
appservice = self.auth.get_appservice_by_req(request)
if appservice.is_rate_limited():

View File

@@ -154,7 +154,6 @@ class DataStore(
db_conn, "local_group_updates", "stream_id"
)
self._cache_id_gen: Optional[MultiWriterIdGenerator]
if isinstance(self.database_engine, PostgresEngine):
# We set the `writers` to an empty list here as we don't care about
# missing updates over restarts, as we'll not have anything in our

View File

@@ -13,18 +13,18 @@
# limitations under the License.
from collections import namedtuple
from typing import Iterable, List, Optional, Tuple
from typing import Iterable, List, Optional
from synapse.api.errors import SynapseError
from synapse.storage._base import SQLBaseStore
from synapse.storage.database import LoggingTransaction
from synapse.storage.databases.main import CacheInvalidationWorkerStore
from synapse.types import RoomAlias
from synapse.util.caches.descriptors import cached
RoomAliasMapping = namedtuple("RoomAliasMapping", ("room_id", "room_alias", "servers"))
class DirectoryWorkerStore(CacheInvalidationWorkerStore):
class DirectoryWorkerStore(SQLBaseStore):
async def get_association_from_room_alias(
self, room_alias: RoomAlias
) -> Optional[RoomAliasMapping]:
@@ -92,7 +92,7 @@ class DirectoryWorkerStore(CacheInvalidationWorkerStore):
creator: Optional user_id of creator.
"""
def alias_txn(txn: LoggingTransaction) -> None:
def alias_txn(txn):
self.db_pool.simple_insert_txn(
txn,
"room_aliases",
@@ -176,9 +176,9 @@ class DirectoryStore(DirectoryWorkerStore):
If None, the creator will be left unchanged.
"""
def _update_aliases_for_room_txn(txn: LoggingTransaction) -> None:
def _update_aliases_for_room_txn(txn):
update_creator_sql = ""
sql_params: Tuple[str, ...] = (new_room_id, old_room_id)
sql_params = (new_room_id, old_room_id)
if creator:
update_creator_sql = ", creator = ?"
sql_params = (new_room_id, creator, old_room_id)

View File

@@ -15,7 +15,6 @@ from typing import Any, Dict, List, Optional
from synapse.api.errors import StoreError
from synapse.storage._base import SQLBaseStore
from synapse.storage.database import LoggingTransaction
from synapse.storage.databases.main.roommember import ProfileInfo
@@ -105,7 +104,7 @@ class ProfileWorkerStore(SQLBaseStore):
desc="update_remote_profile_cache",
)
async def maybe_delete_remote_profile_cache(self, user_id: str) -> None:
async def maybe_delete_remote_profile_cache(self, user_id):
"""Check if we still care about the remote user's profile, and if we
don't then remove their profile from the cache
"""
@@ -117,9 +116,9 @@ class ProfileWorkerStore(SQLBaseStore):
desc="delete_remote_profile_cache",
)
async def is_subscribed_remote_profile_for_user(self, user_id: str) -> bool:
async def is_subscribed_remote_profile_for_user(self, user_id):
"""Check whether we are interested in a remote user's profile."""
res: Optional[str] = await self.db_pool.simple_select_one_onecol(
res = await self.db_pool.simple_select_one_onecol(
table="group_users",
keyvalues={"user_id": user_id},
retcol="user_id",
@@ -140,16 +139,13 @@ class ProfileWorkerStore(SQLBaseStore):
if res:
return True
return False
async def get_remote_profile_cache_entries_that_expire(
self, last_checked: int
) -> List[Dict[str, str]]:
"""Get all users who haven't been checked since `last_checked`"""
def _get_remote_profile_cache_entries_that_expire_txn(
txn: LoggingTransaction,
) -> List[Dict[str, str]]:
def _get_remote_profile_cache_entries_that_expire_txn(txn):
sql = """
SELECT user_id, displayname, avatar_url
FROM remote_profile_cache

View File

@@ -45,13 +45,10 @@ Changes in SCHEMA_VERSION = 64:
Changes in SCHEMA_VERSION = 65:
- MSC2716: Remove unique event_id constraint from insertion_event_edges
because an insertion event can have multiple edges.
- Remove unused tables `user_stats_historical` and `room_stats_historical`.
"""
SCHEMA_COMPAT_VERSION = (
61 # 61: Remove unused tables `user_stats_historical` and `room_stats_historical`
)
SCHEMA_COMPAT_VERSION = 60 # 60: "outlier" not in internal_metadata.
"""Limit on how far the synapse codebase can be rolled back without breaking db compat
This value is stored in the database, and checked on startup. If the value in the

View File

@@ -1,19 +0,0 @@
/* Copyright 2021 The Matrix.org Foundation C.I.C
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-- Remove unused tables room_stats_historical and user_stats_historical
-- which have not been read or written since schema version 61.
DROP TABLE IF EXISTS room_stats_historical;
DROP TABLE IF EXISTS user_stats_historical;

View File

@@ -94,3 +94,12 @@ class ConfigLoadingFileTestCase(ConfigFileTestCase):
# The default Metrics Flags are off by default.
config = HomeServerConfig.load_config("", ["-c", self.config_file])
self.assertFalse(config.metrics.metrics_flags.known_servers)
def test_depreciated_identity_server_flag_throws_error(self):
self.generate_config()
# Needed to ensure that actual key/value pair added below don't end up on a line with a comment
self.add_lines_to_config([" "])
# Check that presence of "trust_identity_server_for_password" throws config error
self.add_lines_to_config(["trust_identity_server_for_password_resets: true"])
with self.assertRaises(ConfigError):
HomeServerConfig.load_config("", ["-c", self.config_file])

View File

@@ -31,10 +31,7 @@ from tests.unittest import override_config
# (possibly experimental) login flows we expect to appear in the list after the normal
# ones
ADDITIONAL_LOGIN_FLOWS = [
{"type": "m.login.application_service"},
{"type": "uk.half-shot.msc2778.login.application_service"},
]
ADDITIONAL_LOGIN_FLOWS = [{"type": "uk.half-shot.msc2778.login.application_service"}]
# a mock instance which the dummy auth providers delegate to, so we can see what's going
# on

View File

@@ -79,10 +79,7 @@ EXPECTED_CLIENT_REDIRECT_URL_PARAMS = [("<ab c>", ""), ('q" =+"', '"fö&=o"')]
# (possibly experimental) login flows we expect to appear in the list after the normal
# ones
ADDITIONAL_LOGIN_FLOWS = [
{"type": "m.login.application_service"},
{"type": "uk.half-shot.msc2778.login.application_service"},
]
ADDITIONAL_LOGIN_FLOWS = [{"type": "uk.half-shot.msc2778.login.application_service"}]
class LoginRestServletTestCase(unittest.HomeserverTestCase):

View File

@@ -11,22 +11,19 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.test.proto_helpers import MemoryReactor
from synapse.server import HomeServer
from synapse.types import UserID
from synapse.util import Clock
from tests import unittest
class ProfileStoreTestCase(unittest.HomeserverTestCase):
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
def prepare(self, reactor, clock, hs):
self.store = hs.get_datastore()
self.u_frank = UserID.from_string("@frank:test")
def test_displayname(self) -> None:
def test_displayname(self):
self.get_success(self.store.create_profile(self.u_frank.localpart))
self.get_success(
@@ -51,7 +48,7 @@ class ProfileStoreTestCase(unittest.HomeserverTestCase):
self.get_success(self.store.get_profile_displayname(self.u_frank.localpart))
)
def test_avatar_url(self) -> None:
def test_avatar_url(self):
self.get_success(self.store.create_profile(self.u_frank.localpart))
self.get_success(