Compare commits
6 Commits
release-v1
...
rei/flake
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c27fe41858 | ||
|
|
9c8a9d8c51 | ||
|
|
197fbb123b | ||
|
|
5e024a0645 | ||
|
|
ae69d69525 | ||
|
|
cb8e274c07 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -18,6 +18,7 @@ __pycache__/
|
||||
# We do want the poetry and cargo lockfile.
|
||||
!poetry.lock
|
||||
!Cargo.lock
|
||||
!flake.lock
|
||||
|
||||
# stuff that is likely to exist when you run a server locally
|
||||
/*.db
|
||||
|
||||
@@ -1,9 +1,3 @@
|
||||
Synapse 1.82.0 (2023-04-25)
|
||||
===========================
|
||||
|
||||
No significant changes since 1.82.0rc1.
|
||||
|
||||
|
||||
Synapse 1.82.0rc1 (2023-04-18)
|
||||
==============================
|
||||
|
||||
|
||||
12
book.toml
12
book.toml
@@ -34,14 +34,6 @@ additional-css = [
|
||||
"docs/website_files/table-of-contents.css",
|
||||
"docs/website_files/remove-nav-buttons.css",
|
||||
"docs/website_files/indent-section-headers.css",
|
||||
"docs/website_files/version-picker.css",
|
||||
]
|
||||
additional-js = [
|
||||
"docs/website_files/table-of-contents.js",
|
||||
"docs/website_files/version-picker.js",
|
||||
"docs/website_files/version.js",
|
||||
]
|
||||
theme = "docs/website_files/theme"
|
||||
|
||||
[preprocessor.schema_versions]
|
||||
command = "./scripts-dev/schema_versions.py"
|
||||
additional-js = ["docs/website_files/table-of-contents.js"]
|
||||
theme = "docs/website_files/theme"
|
||||
1
changelog.d/15284.misc
Normal file
1
changelog.d/15284.misc
Normal file
@@ -0,0 +1 @@
|
||||
Speedup tests by caching HomeServerConfig instances.
|
||||
1
changelog.d/15417.bugfix
Normal file
1
changelog.d/15417.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug where cached key results which were directly fetched would not be properly re-used.
|
||||
1
changelog.d/15418.misc
Normal file
1
changelog.d/15418.misc
Normal file
@@ -0,0 +1 @@
|
||||
Always use multi-user device resync replication endpoints.
|
||||
1
changelog.d/15492.misc
Normal file
1
changelog.d/15492.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add a Nix flake for use as a development environment.
|
||||
6
debian/changelog
vendored
6
debian/changelog
vendored
@@ -1,9 +1,3 @@
|
||||
matrix-synapse-py3 (1.82.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.82.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Apr 2023 11:56:06 +0100
|
||||
|
||||
matrix-synapse-py3 (1.82.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.82.0rc1.
|
||||
|
||||
@@ -24,11 +24,6 @@ Finally, we also stylise the chapter titles in the left sidebar by indenting the
|
||||
slightly so that they are more visually distinguishable from the section headers
|
||||
(the bold titles). This is done through the `indent-section-headers.css` file.
|
||||
|
||||
In addition to these modifications, we have added a version picker to the documentation.
|
||||
Users can switch between documentations for different versions of Synapse.
|
||||
This functionality was implemented through the `version-picker.js` and
|
||||
`version-picker.css` files.
|
||||
|
||||
More information can be found in mdbook's official documentation for
|
||||
[injecting page JS/CSS](https://rust-lang.github.io/mdBook/format/config.html)
|
||||
and
|
||||
|
||||
@@ -131,18 +131,6 @@
|
||||
<i class="fa fa-search"></i>
|
||||
</button>
|
||||
{{/if}}
|
||||
<div class="version-picker">
|
||||
<div class="dropdown">
|
||||
<div class="select">
|
||||
<span></span>
|
||||
<i class="fa fa-chevron-down"></i>
|
||||
</div>
|
||||
<input type="hidden" name="version">
|
||||
<ul class="dropdown-menu">
|
||||
<!-- Versions will be added dynamically in version-picker.js -->
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h1 class="menu-title">{{ book_title }}</h1>
|
||||
@@ -321,4 +309,4 @@
|
||||
{{/if}}
|
||||
|
||||
</body>
|
||||
</html>
|
||||
</html>
|
||||
@@ -1,78 +0,0 @@
|
||||
.version-picker {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.version-picker .dropdown {
|
||||
width: 130px;
|
||||
max-height: 29px;
|
||||
margin-left: 10px;
|
||||
display: inline-block;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
position: relative;
|
||||
font-size: 13px;
|
||||
color: var(--fg);
|
||||
height: 100%;
|
||||
text-align: left;
|
||||
}
|
||||
.version-picker .dropdown .select {
|
||||
cursor: pointer;
|
||||
display: block;
|
||||
padding: 5px 2px 5px 15px;
|
||||
}
|
||||
.version-picker .dropdown .select > i {
|
||||
font-size: 10px;
|
||||
color: var(--fg);
|
||||
cursor: pointer;
|
||||
float: right;
|
||||
line-height: 20px !important;
|
||||
}
|
||||
.version-picker .dropdown:hover {
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
}
|
||||
.version-picker .dropdown:active {
|
||||
background-color: var(--theme-popup-bg);
|
||||
}
|
||||
.version-picker .dropdown.active:hover,
|
||||
.version-picker .dropdown.active {
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
border-radius: 2px 2px 0 0;
|
||||
background-color: var(--theme-popup-bg);
|
||||
}
|
||||
.version-picker .dropdown.active .select > i {
|
||||
transform: rotate(-180deg);
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu {
|
||||
position: absolute;
|
||||
background-color: var(--theme-popup-bg);
|
||||
width: 100%;
|
||||
left: -1px;
|
||||
right: 1px;
|
||||
margin-top: 1px;
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
border-radius: 0 0 4px 4px;
|
||||
overflow: hidden;
|
||||
display: none;
|
||||
max-height: 300px;
|
||||
overflow-y: auto;
|
||||
z-index: 9;
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu li {
|
||||
font-size: 12px;
|
||||
padding: 6px 20px;
|
||||
cursor: pointer;
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu {
|
||||
padding: 0;
|
||||
list-style: none;
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu li:hover {
|
||||
background-color: var(--theme-hover);
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu li.active::before {
|
||||
display: inline-block;
|
||||
content: "✓";
|
||||
margin-inline-start: -14px;
|
||||
width: 14px;
|
||||
}
|
||||
@@ -1,127 +0,0 @@
|
||||
|
||||
const dropdown = document.querySelector('.version-picker .dropdown');
|
||||
const dropdownMenu = dropdown.querySelector('.dropdown-menu');
|
||||
|
||||
fetchVersions(dropdown, dropdownMenu).then(() => {
|
||||
initializeVersionDropdown(dropdown, dropdownMenu);
|
||||
});
|
||||
|
||||
/**
|
||||
* Initialize the dropdown functionality for version selection.
|
||||
*
|
||||
* @param {Element} dropdown - The dropdown element.
|
||||
* @param {Element} dropdownMenu - The dropdown menu element.
|
||||
*/
|
||||
function initializeVersionDropdown(dropdown, dropdownMenu) {
|
||||
// Toggle the dropdown menu on click
|
||||
dropdown.addEventListener('click', function () {
|
||||
this.setAttribute('tabindex', 1);
|
||||
this.classList.toggle('active');
|
||||
dropdownMenu.style.display = (dropdownMenu.style.display === 'block') ? 'none' : 'block';
|
||||
});
|
||||
|
||||
// Remove the 'active' class and hide the dropdown menu on focusout
|
||||
dropdown.addEventListener('focusout', function () {
|
||||
this.classList.remove('active');
|
||||
dropdownMenu.style.display = 'none';
|
||||
});
|
||||
|
||||
// Handle item selection within the dropdown menu
|
||||
const dropdownMenuItems = dropdownMenu.querySelectorAll('li');
|
||||
dropdownMenuItems.forEach(function (item) {
|
||||
item.addEventListener('click', function () {
|
||||
dropdownMenuItems.forEach(function (item) {
|
||||
item.classList.remove('active');
|
||||
});
|
||||
this.classList.add('active');
|
||||
dropdown.querySelector('span').textContent = this.textContent;
|
||||
dropdown.querySelector('input').value = this.getAttribute('id');
|
||||
|
||||
window.location.href = changeVersion(window.location.href, this.textContent);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* This function fetches the available versions from a GitHub repository
|
||||
* and inserts them into the version picker.
|
||||
*
|
||||
* @param {Element} dropdown - The dropdown element.
|
||||
* @param {Element} dropdownMenu - The dropdown menu element.
|
||||
* @returns {Promise<Array<string>>} A promise that resolves with an array of available versions.
|
||||
*/
|
||||
function fetchVersions(dropdown, dropdownMenu) {
|
||||
return new Promise((resolve, reject) => {
|
||||
window.addEventListener("load", () => {
|
||||
|
||||
fetch("https://api.github.com/repos/matrix-org/synapse/git/trees/gh-pages", {
|
||||
cache: "force-cache",
|
||||
}).then(res =>
|
||||
res.json()
|
||||
).then(resObject => {
|
||||
const excluded = ['dev-docs', 'v1.91.0', 'v1.80.0', 'v1.69.0'];
|
||||
const tree = resObject.tree.filter(item => item.type === "tree" && !excluded.includes(item.path));
|
||||
const versions = tree.map(item => item.path).sort(sortVersions);
|
||||
|
||||
// Create a list of <li> items for versions
|
||||
versions.forEach((version) => {
|
||||
const li = document.createElement("li");
|
||||
li.textContent = version;
|
||||
li.id = version;
|
||||
|
||||
if (window.SYNAPSE_VERSION === version) {
|
||||
li.classList.add('active');
|
||||
dropdown.querySelector('span').textContent = version;
|
||||
dropdown.querySelector('input').value = version;
|
||||
}
|
||||
|
||||
dropdownMenu.appendChild(li);
|
||||
});
|
||||
|
||||
resolve(versions);
|
||||
|
||||
}).catch(ex => {
|
||||
console.error("Failed to fetch version data", ex);
|
||||
reject(ex);
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom sorting function to sort an array of version strings.
|
||||
*
|
||||
* @param {string} a - The first version string to compare.
|
||||
* @param {string} b - The second version string to compare.
|
||||
* @returns {number} - A negative number if a should come before b, a positive number if b should come before a, or 0 if they are equal.
|
||||
*/
|
||||
function sortVersions(a, b) {
|
||||
// Put 'develop' and 'latest' at the top
|
||||
if (a === 'develop' || a === 'latest') return -1;
|
||||
if (b === 'develop' || b === 'latest') return 1;
|
||||
|
||||
const versionA = (a.match(/v\d+(\.\d+)+/) || [])[0];
|
||||
const versionB = (b.match(/v\d+(\.\d+)+/) || [])[0];
|
||||
|
||||
return versionB.localeCompare(versionA);
|
||||
}
|
||||
|
||||
/**
|
||||
* Change the version in a URL path.
|
||||
*
|
||||
* @param {string} url - The original URL to be modified.
|
||||
* @param {string} newVersion - The new version to replace the existing version in the URL.
|
||||
* @returns {string} The updated URL with the new version.
|
||||
*/
|
||||
function changeVersion(url, newVersion) {
|
||||
const parsedURL = new URL(url);
|
||||
const pathSegments = parsedURL.pathname.split('/');
|
||||
|
||||
// Modify the version
|
||||
pathSegments[2] = newVersion;
|
||||
|
||||
// Reconstruct the URL
|
||||
parsedURL.pathname = pathSegments.join('/');
|
||||
|
||||
return parsedURL.href;
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
window.SYNAPSE_VERSION = 'v1.82';
|
||||
58
flake.lock
generated
Normal file
58
flake.lock
generated
Normal file
@@ -0,0 +1,58 @@
|
||||
{
|
||||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1677075010,
|
||||
"narHash": "sha256-X+UmR1AkdR//lPVcShmLy8p1n857IGf7y+cyCArp8bU=",
|
||||
"path": "/nix/store/b1vy558z7lxph5mbg7n50b5njp393ia9-source",
|
||||
"rev": "c95bf18beba4290af25c60cbaaceea1110d0f727",
|
||||
"type": "path"
|
||||
},
|
||||
"original": {
|
||||
"id": "nixpkgs",
|
||||
"type": "indirect"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"nixpkgs": "nixpkgs",
|
||||
"utils": "utils"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1681202837,
|
||||
"narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "cfacdce06f30d2b68473a46042957675eebb3401",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
17
flake.nix
Normal file
17
flake.nix
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
description = "Synapse (development)";
|
||||
|
||||
inputs = {
|
||||
utils.url = "github:numtide/flake-utils";
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, utils }:
|
||||
utils.lib.eachDefaultSystem (system: let
|
||||
pkgs = nixpkgs.legacyPackages."${system}";
|
||||
in rec {
|
||||
# `nix develop`
|
||||
devShell = pkgs.mkShell {
|
||||
nativeBuildInputs = with pkgs; [ rustc cargo python sqlite poetry postgresql icu clang ];
|
||||
};
|
||||
});
|
||||
}
|
||||
@@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.82.0"
|
||||
version = "1.82.0rc1"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "Apache-2.0"
|
||||
|
||||
@@ -150,18 +150,19 @@ class Keyring:
|
||||
def __init__(
|
||||
self, hs: "HomeServer", key_fetchers: "Optional[Iterable[KeyFetcher]]" = None
|
||||
):
|
||||
self.clock = hs.get_clock()
|
||||
|
||||
if key_fetchers is None:
|
||||
key_fetchers = (
|
||||
# Fetch keys from the database.
|
||||
StoreKeyFetcher(hs),
|
||||
# Fetch keys from a configured Perspectives server.
|
||||
PerspectivesKeyFetcher(hs),
|
||||
# Fetch keys from the origin server directly.
|
||||
ServerKeyFetcher(hs),
|
||||
)
|
||||
self._key_fetchers = key_fetchers
|
||||
# Always fetch keys from the database.
|
||||
mutable_key_fetchers: List[KeyFetcher] = [StoreKeyFetcher(hs)]
|
||||
# Fetch keys from configured trusted key servers, if any exist.
|
||||
key_servers = hs.config.key.key_servers
|
||||
if key_servers:
|
||||
mutable_key_fetchers.append(PerspectivesKeyFetcher(hs))
|
||||
# Finally, fetch keys from the origin server directly.
|
||||
mutable_key_fetchers.append(ServerKeyFetcher(hs))
|
||||
|
||||
self._key_fetchers: Iterable[KeyFetcher] = tuple(mutable_key_fetchers)
|
||||
else:
|
||||
self._key_fetchers = key_fetchers
|
||||
|
||||
self._fetch_keys_queue: BatchingQueue[
|
||||
_FetchKeyRequest, Dict[str, Dict[str, FetchKeyResult]]
|
||||
@@ -510,7 +511,7 @@ class StoreKeyFetcher(KeyFetcher):
|
||||
for key_id in queue_value.key_ids
|
||||
)
|
||||
|
||||
res = await self.store.get_server_verify_keys(key_ids_to_fetch)
|
||||
res = await self.store.get_server_keys_json(key_ids_to_fetch)
|
||||
keys: Dict[str, Dict[str, FetchKeyResult]] = {}
|
||||
for (server_name, key_id), key in res.items():
|
||||
keys.setdefault(server_name, {})[key_id] = key
|
||||
@@ -522,7 +523,6 @@ class BaseV2KeyFetcher(KeyFetcher):
|
||||
super().__init__(hs)
|
||||
|
||||
self.store = hs.get_datastores().main
|
||||
self.config = hs.config
|
||||
|
||||
async def process_v2_response(
|
||||
self, from_server: str, response_json: JsonDict, time_added_ms: int
|
||||
@@ -626,7 +626,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
||||
super().__init__(hs)
|
||||
self.clock = hs.get_clock()
|
||||
self.client = hs.get_federation_http_client()
|
||||
self.key_servers = self.config.key.key_servers
|
||||
self.key_servers = hs.config.key.key_servers
|
||||
|
||||
async def _fetch_keys(
|
||||
self, keys_to_fetch: List[_FetchKeyRequest]
|
||||
@@ -775,7 +775,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
||||
|
||||
keys.setdefault(server_name, {}).update(processed_response)
|
||||
|
||||
await self.store.store_server_verify_keys(
|
||||
await self.store.store_server_signature_keys(
|
||||
perspective_name, time_now_ms, added_keys
|
||||
)
|
||||
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
from http import HTTPStatus
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
@@ -921,12 +920,8 @@ class DeviceListWorkerUpdater:
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
from synapse.replication.http.devices import (
|
||||
ReplicationMultiUserDevicesResyncRestServlet,
|
||||
ReplicationUserDevicesResyncRestServlet,
|
||||
)
|
||||
|
||||
self._user_device_resync_client = (
|
||||
ReplicationUserDevicesResyncRestServlet.make_client(hs)
|
||||
)
|
||||
self._multi_user_device_resync_client = (
|
||||
ReplicationMultiUserDevicesResyncRestServlet.make_client(hs)
|
||||
)
|
||||
@@ -948,37 +943,7 @@ class DeviceListWorkerUpdater:
|
||||
# Shortcut empty requests
|
||||
return {}
|
||||
|
||||
try:
|
||||
return await self._multi_user_device_resync_client(user_ids=user_ids)
|
||||
except SynapseError as err:
|
||||
if not (
|
||||
err.code == HTTPStatus.NOT_FOUND and err.errcode == Codes.UNRECOGNIZED
|
||||
):
|
||||
raise
|
||||
|
||||
# Fall back to single requests
|
||||
result: Dict[str, Optional[JsonDict]] = {}
|
||||
for user_id in user_ids:
|
||||
result[user_id] = await self._user_device_resync_client(user_id=user_id)
|
||||
return result
|
||||
|
||||
async def user_device_resync(
|
||||
self, user_id: str, mark_failed_as_stale: bool = True
|
||||
) -> Optional[JsonDict]:
|
||||
"""Fetches all devices for a user and updates the device cache with them.
|
||||
|
||||
Args:
|
||||
user_id: The user's id whose device_list will be updated.
|
||||
mark_failed_as_stale: Whether to mark the user's device list as stale
|
||||
if the attempt to resync failed.
|
||||
Returns:
|
||||
A dict with device info as under the "devices" in the result of this
|
||||
request:
|
||||
https://matrix.org/docs/spec/server_server/r0.1.2#get-matrix-federation-v1-user-devices-userid
|
||||
None when we weren't able to fetch the device info for some reason,
|
||||
e.g. due to a connection problem.
|
||||
"""
|
||||
return (await self.multi_user_device_resync([user_id]))[user_id]
|
||||
return await self._multi_user_device_resync_client(user_ids=user_ids)
|
||||
|
||||
|
||||
class DeviceListUpdater(DeviceListWorkerUpdater):
|
||||
@@ -1131,7 +1096,7 @@ class DeviceListUpdater(DeviceListWorkerUpdater):
|
||||
)
|
||||
|
||||
if resync:
|
||||
await self.user_device_resync(user_id)
|
||||
await self.multi_user_device_resync([user_id])
|
||||
else:
|
||||
# Simply update the single device, since we know that is the only
|
||||
# change (because of the single prev_id matching the current cache)
|
||||
@@ -1198,10 +1163,9 @@ class DeviceListUpdater(DeviceListWorkerUpdater):
|
||||
for user_id in need_resync:
|
||||
try:
|
||||
# Try to resync the current user's devices list.
|
||||
result = await self.user_device_resync(
|
||||
user_id=user_id,
|
||||
mark_failed_as_stale=False,
|
||||
)
|
||||
result = (await self.multi_user_device_resync([user_id], False))[
|
||||
user_id
|
||||
]
|
||||
|
||||
# user_device_resync only returns a result if it managed to
|
||||
# successfully resync and update the database. Updating the table
|
||||
@@ -1260,18 +1224,6 @@ class DeviceListUpdater(DeviceListWorkerUpdater):
|
||||
|
||||
return result
|
||||
|
||||
async def user_device_resync(
|
||||
self, user_id: str, mark_failed_as_stale: bool = True
|
||||
) -> Optional[JsonDict]:
|
||||
result, failed = await self._user_device_resync_returning_failed(user_id)
|
||||
|
||||
if failed and mark_failed_as_stale:
|
||||
# Mark the remote user's device list as stale so we know we need to retry
|
||||
# it later.
|
||||
await self.store.mark_remote_users_device_caches_as_stale((user_id,))
|
||||
|
||||
return result
|
||||
|
||||
async def _user_device_resync_returning_failed(
|
||||
self, user_id: str
|
||||
) -> Tuple[Optional[JsonDict], bool]:
|
||||
|
||||
@@ -25,7 +25,9 @@ from synapse.logging.opentracing import (
|
||||
log_kv,
|
||||
set_tag,
|
||||
)
|
||||
from synapse.replication.http.devices import ReplicationUserDevicesResyncRestServlet
|
||||
from synapse.replication.http.devices import (
|
||||
ReplicationMultiUserDevicesResyncRestServlet,
|
||||
)
|
||||
from synapse.types import JsonDict, Requester, StreamKeyType, UserID, get_domain_from_id
|
||||
from synapse.util import json_encoder
|
||||
from synapse.util.stringutils import random_string
|
||||
@@ -71,12 +73,12 @@ class DeviceMessageHandler:
|
||||
# sync. We do all device list resyncing on the master instance, so if
|
||||
# we're on a worker we hit the device resync replication API.
|
||||
if hs.config.worker.worker_app is None:
|
||||
self._user_device_resync = (
|
||||
hs.get_device_handler().device_list_updater.user_device_resync
|
||||
self._multi_user_device_resync = (
|
||||
hs.get_device_handler().device_list_updater.multi_user_device_resync
|
||||
)
|
||||
else:
|
||||
self._user_device_resync = (
|
||||
ReplicationUserDevicesResyncRestServlet.make_client(hs)
|
||||
self._multi_user_device_resync = (
|
||||
ReplicationMultiUserDevicesResyncRestServlet.make_client(hs)
|
||||
)
|
||||
|
||||
# a rate limiter for room key requests. The keys are
|
||||
@@ -198,7 +200,7 @@ class DeviceMessageHandler:
|
||||
await self.store.mark_remote_users_device_caches_as_stale((sender_user_id,))
|
||||
|
||||
# Immediately attempt a resync in the background
|
||||
run_in_background(self._user_device_resync, user_id=sender_user_id)
|
||||
run_in_background(self._multi_user_device_resync, user_ids=[sender_user_id])
|
||||
|
||||
async def send_device_message(
|
||||
self,
|
||||
|
||||
@@ -70,7 +70,9 @@ from synapse.logging.opentracing import (
|
||||
trace,
|
||||
)
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.replication.http.devices import ReplicationUserDevicesResyncRestServlet
|
||||
from synapse.replication.http.devices import (
|
||||
ReplicationMultiUserDevicesResyncRestServlet,
|
||||
)
|
||||
from synapse.replication.http.federation import (
|
||||
ReplicationFederationSendEventsRestServlet,
|
||||
)
|
||||
@@ -167,8 +169,8 @@ class FederationEventHandler:
|
||||
|
||||
self._send_events = ReplicationFederationSendEventsRestServlet.make_client(hs)
|
||||
if hs.config.worker.worker_app:
|
||||
self._user_device_resync = (
|
||||
ReplicationUserDevicesResyncRestServlet.make_client(hs)
|
||||
self._multi_user_device_resync = (
|
||||
ReplicationMultiUserDevicesResyncRestServlet.make_client(hs)
|
||||
)
|
||||
else:
|
||||
self._device_list_updater = hs.get_device_handler().device_list_updater
|
||||
@@ -1487,9 +1489,11 @@ class FederationEventHandler:
|
||||
|
||||
# Immediately attempt a resync in the background
|
||||
if self._config.worker.worker_app:
|
||||
await self._user_device_resync(user_id=sender)
|
||||
await self._multi_user_device_resync(user_ids=[sender])
|
||||
else:
|
||||
await self._device_list_updater.user_device_resync(sender)
|
||||
await self._device_list_updater.multi_user_device_resync(
|
||||
user_ids=[sender]
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Failed to resync device for %s", sender)
|
||||
|
||||
|
||||
@@ -28,62 +28,6 @@ if TYPE_CHECKING:
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ReplicationUserDevicesResyncRestServlet(ReplicationEndpoint):
|
||||
"""Ask master to resync the device list for a user by contacting their
|
||||
server.
|
||||
|
||||
This must happen on master so that the results can be correctly cached in
|
||||
the database and streamed to workers.
|
||||
|
||||
Request format:
|
||||
|
||||
POST /_synapse/replication/user_device_resync/:user_id
|
||||
|
||||
{}
|
||||
|
||||
Response is equivalent to ` /_matrix/federation/v1/user/devices/:user_id`
|
||||
response, e.g.:
|
||||
|
||||
{
|
||||
"user_id": "@alice:example.org",
|
||||
"devices": [
|
||||
{
|
||||
"device_id": "JLAFKJWSCS",
|
||||
"keys": { ... },
|
||||
"device_display_name": "Alice's Mobile Phone"
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
|
||||
NAME = "user_device_resync"
|
||||
PATH_ARGS = ("user_id",)
|
||||
CACHE = False
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__(hs)
|
||||
|
||||
from synapse.handlers.device import DeviceHandler
|
||||
|
||||
handler = hs.get_device_handler()
|
||||
assert isinstance(handler, DeviceHandler)
|
||||
self.device_list_updater = handler.device_list_updater
|
||||
|
||||
self.store = hs.get_datastores().main
|
||||
self.clock = hs.get_clock()
|
||||
|
||||
@staticmethod
|
||||
async def _serialize_payload(user_id: str) -> JsonDict: # type: ignore[override]
|
||||
return {}
|
||||
|
||||
async def _handle_request( # type: ignore[override]
|
||||
self, request: Request, content: JsonDict, user_id: str
|
||||
) -> Tuple[int, Optional[JsonDict]]:
|
||||
user_devices = await self.device_list_updater.user_device_resync(user_id)
|
||||
|
||||
return 200, user_devices
|
||||
|
||||
|
||||
class ReplicationMultiUserDevicesResyncRestServlet(ReplicationEndpoint):
|
||||
"""Ask master to resync the device list for multiple users from the same
|
||||
remote server by contacting their server.
|
||||
@@ -216,6 +160,5 @@ class ReplicationUploadKeysForUserRestServlet(ReplicationEndpoint):
|
||||
|
||||
|
||||
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
ReplicationUserDevicesResyncRestServlet(hs).register(http_server)
|
||||
ReplicationMultiUserDevicesResyncRestServlet(hs).register(http_server)
|
||||
ReplicationUploadKeysForUserRestServlet(hs).register(http_server)
|
||||
|
||||
@@ -155,7 +155,7 @@ class RemoteKey(RestServlet):
|
||||
for key_id in key_ids:
|
||||
store_queries.append((server_name, key_id, None))
|
||||
|
||||
cached = await self.store.get_server_keys_json(store_queries)
|
||||
cached = await self.store.get_server_keys_json_for_remote(store_queries)
|
||||
|
||||
json_results: Set[bytes] = set()
|
||||
|
||||
|
||||
@@ -14,10 +14,12 @@
|
||||
# limitations under the License.
|
||||
|
||||
import itertools
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple
|
||||
|
||||
from signedjson.key import decode_verify_key_bytes
|
||||
from unpaddedbase64 import decode_base64
|
||||
|
||||
from synapse.storage._base import SQLBaseStore
|
||||
from synapse.storage.database import LoggingTransaction
|
||||
@@ -36,15 +38,16 @@ class KeyStore(SQLBaseStore):
|
||||
"""Persistence for signature verification keys"""
|
||||
|
||||
@cached()
|
||||
def _get_server_verify_key(
|
||||
def _get_server_signature_key(
|
||||
self, server_name_and_key_id: Tuple[str, str]
|
||||
) -> FetchKeyResult:
|
||||
raise NotImplementedError()
|
||||
|
||||
@cachedList(
|
||||
cached_method_name="_get_server_verify_key", list_name="server_name_and_key_ids"
|
||||
cached_method_name="_get_server_signature_key",
|
||||
list_name="server_name_and_key_ids",
|
||||
)
|
||||
async def get_server_verify_keys(
|
||||
async def get_server_signature_keys(
|
||||
self, server_name_and_key_ids: Iterable[Tuple[str, str]]
|
||||
) -> Dict[Tuple[str, str], FetchKeyResult]:
|
||||
"""
|
||||
@@ -62,10 +65,12 @@ class KeyStore(SQLBaseStore):
|
||||
"""Processes a batch of keys to fetch, and adds the result to `keys`."""
|
||||
|
||||
# batch_iter always returns tuples so it's safe to do len(batch)
|
||||
sql = (
|
||||
"SELECT server_name, key_id, verify_key, ts_valid_until_ms "
|
||||
"FROM server_signature_keys WHERE 1=0"
|
||||
) + " OR (server_name=? AND key_id=?)" * len(batch)
|
||||
sql = """
|
||||
SELECT server_name, key_id, verify_key, ts_valid_until_ms
|
||||
FROM server_signature_keys WHERE 1=0
|
||||
""" + " OR (server_name=? AND key_id=?)" * len(
|
||||
batch
|
||||
)
|
||||
|
||||
txn.execute(sql, tuple(itertools.chain.from_iterable(batch)))
|
||||
|
||||
@@ -89,9 +94,9 @@ class KeyStore(SQLBaseStore):
|
||||
_get_keys(txn, batch)
|
||||
return keys
|
||||
|
||||
return await self.db_pool.runInteraction("get_server_verify_keys", _txn)
|
||||
return await self.db_pool.runInteraction("get_server_signature_keys", _txn)
|
||||
|
||||
async def store_server_verify_keys(
|
||||
async def store_server_signature_keys(
|
||||
self,
|
||||
from_server: str,
|
||||
ts_added_ms: int,
|
||||
@@ -119,7 +124,7 @@ class KeyStore(SQLBaseStore):
|
||||
)
|
||||
)
|
||||
# invalidate takes a tuple corresponding to the params of
|
||||
# _get_server_verify_key. _get_server_verify_key only takes one
|
||||
# _get_server_signature_key. _get_server_signature_key only takes one
|
||||
# param, which is itself the 2-tuple (server_name, key_id).
|
||||
invalidations.append((server_name, key_id))
|
||||
|
||||
@@ -134,10 +139,10 @@ class KeyStore(SQLBaseStore):
|
||||
"verify_key",
|
||||
),
|
||||
value_values=value_values,
|
||||
desc="store_server_verify_keys",
|
||||
desc="store_server_signature_keys",
|
||||
)
|
||||
|
||||
invalidate = self._get_server_verify_key.invalidate
|
||||
invalidate = self._get_server_signature_key.invalidate
|
||||
for i in invalidations:
|
||||
invalidate((i,))
|
||||
|
||||
@@ -180,7 +185,75 @@ class KeyStore(SQLBaseStore):
|
||||
desc="store_server_keys_json",
|
||||
)
|
||||
|
||||
# invalidate takes a tuple corresponding to the params of
|
||||
# _get_server_keys_json. _get_server_keys_json only takes one
|
||||
# param, which is itself the 2-tuple (server_name, key_id).
|
||||
self._get_server_keys_json.invalidate((((server_name, key_id),)))
|
||||
|
||||
@cached()
|
||||
def _get_server_keys_json(
|
||||
self, server_name_and_key_id: Tuple[str, str]
|
||||
) -> FetchKeyResult:
|
||||
raise NotImplementedError()
|
||||
|
||||
@cachedList(
|
||||
cached_method_name="_get_server_keys_json", list_name="server_name_and_key_ids"
|
||||
)
|
||||
async def get_server_keys_json(
|
||||
self, server_name_and_key_ids: Iterable[Tuple[str, str]]
|
||||
) -> Dict[Tuple[str, str], FetchKeyResult]:
|
||||
"""
|
||||
Args:
|
||||
server_name_and_key_ids:
|
||||
iterable of (server_name, key-id) tuples to fetch keys for
|
||||
|
||||
Returns:
|
||||
A map from (server_name, key_id) -> FetchKeyResult, or None if the
|
||||
key is unknown
|
||||
"""
|
||||
keys = {}
|
||||
|
||||
def _get_keys(txn: Cursor, batch: Tuple[Tuple[str, str], ...]) -> None:
|
||||
"""Processes a batch of keys to fetch, and adds the result to `keys`."""
|
||||
|
||||
# batch_iter always returns tuples so it's safe to do len(batch)
|
||||
sql = """
|
||||
SELECT server_name, key_id, key_json, ts_valid_until_ms
|
||||
FROM server_keys_json WHERE 1=0
|
||||
""" + " OR (server_name=? AND key_id=?)" * len(
|
||||
batch
|
||||
)
|
||||
|
||||
txn.execute(sql, tuple(itertools.chain.from_iterable(batch)))
|
||||
|
||||
for server_name, key_id, key_json_bytes, ts_valid_until_ms in txn:
|
||||
if ts_valid_until_ms is None:
|
||||
# Old keys may be stored with a ts_valid_until_ms of null,
|
||||
# in which case we treat this as if it was set to `0`, i.e.
|
||||
# it won't match key requests that define a minimum
|
||||
# `ts_valid_until_ms`.
|
||||
ts_valid_until_ms = 0
|
||||
|
||||
# The entire signed JSON response is stored in server_keys_json,
|
||||
# fetch out the bits needed.
|
||||
key_json = json.loads(bytes(key_json_bytes))
|
||||
key_base64 = key_json["verify_keys"][key_id]["key"]
|
||||
|
||||
keys[(server_name, key_id)] = FetchKeyResult(
|
||||
verify_key=decode_verify_key_bytes(
|
||||
key_id, decode_base64(key_base64)
|
||||
),
|
||||
valid_until_ts=ts_valid_until_ms,
|
||||
)
|
||||
|
||||
def _txn(txn: Cursor) -> Dict[Tuple[str, str], FetchKeyResult]:
|
||||
for batch in batch_iter(server_name_and_key_ids, 50):
|
||||
_get_keys(txn, batch)
|
||||
return keys
|
||||
|
||||
return await self.db_pool.runInteraction("get_server_keys_json", _txn)
|
||||
|
||||
async def get_server_keys_json_for_remote(
|
||||
self, server_keys: Iterable[Tuple[str, Optional[str], Optional[str]]]
|
||||
) -> Dict[Tuple[str, Optional[str], Optional[str]], List[Dict[str, Any]]]:
|
||||
"""Retrieve the key json for a list of server_keys and key ids.
|
||||
@@ -188,8 +261,10 @@ class KeyStore(SQLBaseStore):
|
||||
that server, key_id, and source triplet entry will be an empty list.
|
||||
The JSON is returned as a byte array so that it can be efficiently
|
||||
used in an HTTP response.
|
||||
|
||||
Args:
|
||||
server_keys: List of (server_name, key_id, source) triplets.
|
||||
|
||||
Returns:
|
||||
A mapping from (server_name, key_id, source) triplets to a list of dicts
|
||||
"""
|
||||
|
||||
@@ -190,10 +190,23 @@ class KeyringTestCase(unittest.HomeserverTestCase):
|
||||
kr = keyring.Keyring(self.hs)
|
||||
|
||||
key1 = signedjson.key.generate_signing_key("1")
|
||||
r = self.hs.get_datastores().main.store_server_verify_keys(
|
||||
r = self.hs.get_datastores().main.store_server_keys_json(
|
||||
"server9",
|
||||
int(time.time() * 1000),
|
||||
{("server9", get_key_id(key1)): FetchKeyResult(get_verify_key(key1), 1000)},
|
||||
get_key_id(key1),
|
||||
from_server="test",
|
||||
ts_now_ms=int(time.time() * 1000),
|
||||
ts_expires_ms=1000,
|
||||
# The entire response gets signed & stored, just include the bits we
|
||||
# care about.
|
||||
key_json_bytes=canonicaljson.encode_canonical_json(
|
||||
{
|
||||
"verify_keys": {
|
||||
get_key_id(key1): {
|
||||
"key": encode_verify_key_base64(get_verify_key(key1))
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
)
|
||||
self.get_success(r)
|
||||
|
||||
@@ -280,17 +293,13 @@ class KeyringTestCase(unittest.HomeserverTestCase):
|
||||
mock_fetcher = Mock()
|
||||
mock_fetcher.get_keys = Mock(return_value=make_awaitable({}))
|
||||
|
||||
kr = keyring.Keyring(
|
||||
self.hs, key_fetchers=(StoreKeyFetcher(self.hs), mock_fetcher)
|
||||
)
|
||||
|
||||
key1 = signedjson.key.generate_signing_key("1")
|
||||
r = self.hs.get_datastores().main.store_server_verify_keys(
|
||||
r = self.hs.get_datastores().main.store_server_signature_keys(
|
||||
"server9",
|
||||
int(time.time() * 1000),
|
||||
# None is not a valid value in FetchKeyResult, but we're abusing this
|
||||
# API to insert null values into the database. The nulls get converted
|
||||
# to 0 when fetched in KeyStore.get_server_verify_keys.
|
||||
# to 0 when fetched in KeyStore.get_server_signature_keys.
|
||||
{("server9", get_key_id(key1)): FetchKeyResult(get_verify_key(key1), None)}, # type: ignore[arg-type]
|
||||
)
|
||||
self.get_success(r)
|
||||
@@ -298,27 +307,12 @@ class KeyringTestCase(unittest.HomeserverTestCase):
|
||||
json1: JsonDict = {}
|
||||
signedjson.sign.sign_json(json1, "server9", key1)
|
||||
|
||||
# should fail immediately on an unsigned object
|
||||
d = kr.verify_json_for_server("server9", {}, 0)
|
||||
self.get_failure(d, SynapseError)
|
||||
|
||||
# should fail on a signed object with a non-zero minimum_valid_until_ms,
|
||||
# as it tries to refetch the keys and fails.
|
||||
d = kr.verify_json_for_server("server9", json1, 500)
|
||||
self.get_failure(d, SynapseError)
|
||||
|
||||
# We expect the keyring tried to refetch the key once.
|
||||
mock_fetcher.get_keys.assert_called_once_with(
|
||||
"server9", [get_key_id(key1)], 500
|
||||
)
|
||||
|
||||
# should succeed on a signed object with a 0 minimum_valid_until_ms
|
||||
d = kr.verify_json_for_server(
|
||||
"server9",
|
||||
json1,
|
||||
0,
|
||||
d = self.hs.get_datastores().main.get_server_signature_keys(
|
||||
[("server9", get_key_id(key1))]
|
||||
)
|
||||
self.get_success(d)
|
||||
result = self.get_success(d)
|
||||
self.assertEquals(result[("server9", get_key_id(key1))].valid_until_ts, 0)
|
||||
|
||||
def test_verify_json_dedupes_key_requests(self) -> None:
|
||||
"""Two requests for the same key should be deduped."""
|
||||
@@ -464,7 +458,9 @@ class ServerKeyFetcherTestCase(unittest.HomeserverTestCase):
|
||||
# check that the perspectives store is correctly updated
|
||||
lookup_triplet = (SERVER_NAME, testverifykey_id, None)
|
||||
key_json = self.get_success(
|
||||
self.hs.get_datastores().main.get_server_keys_json([lookup_triplet])
|
||||
self.hs.get_datastores().main.get_server_keys_json_for_remote(
|
||||
[lookup_triplet]
|
||||
)
|
||||
)
|
||||
res_keys = key_json[lookup_triplet]
|
||||
self.assertEqual(len(res_keys), 1)
|
||||
@@ -582,7 +578,9 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase):
|
||||
# check that the perspectives store is correctly updated
|
||||
lookup_triplet = (SERVER_NAME, testverifykey_id, None)
|
||||
key_json = self.get_success(
|
||||
self.hs.get_datastores().main.get_server_keys_json([lookup_triplet])
|
||||
self.hs.get_datastores().main.get_server_keys_json_for_remote(
|
||||
[lookup_triplet]
|
||||
)
|
||||
)
|
||||
res_keys = key_json[lookup_triplet]
|
||||
self.assertEqual(len(res_keys), 1)
|
||||
@@ -703,7 +701,9 @@ class PerspectivesKeyFetcherTestCase(unittest.HomeserverTestCase):
|
||||
# check that the perspectives store is correctly updated
|
||||
lookup_triplet = (SERVER_NAME, testverifykey_id, None)
|
||||
key_json = self.get_success(
|
||||
self.hs.get_datastores().main.get_server_keys_json([lookup_triplet])
|
||||
self.hs.get_datastores().main.get_server_keys_json_for_remote(
|
||||
[lookup_triplet]
|
||||
)
|
||||
)
|
||||
res_keys = key_json[lookup_triplet]
|
||||
self.assertEqual(len(res_keys), 1)
|
||||
|
||||
@@ -37,13 +37,13 @@ KEY_2 = decode_verify_key_base64(
|
||||
|
||||
|
||||
class KeyStoreTestCase(tests.unittest.HomeserverTestCase):
|
||||
def test_get_server_verify_keys(self) -> None:
|
||||
def test_get_server_signature_keys(self) -> None:
|
||||
store = self.hs.get_datastores().main
|
||||
|
||||
key_id_1 = "ed25519:key1"
|
||||
key_id_2 = "ed25519:KEY_ID_2"
|
||||
self.get_success(
|
||||
store.store_server_verify_keys(
|
||||
store.store_server_signature_keys(
|
||||
"from_server",
|
||||
10,
|
||||
{
|
||||
@@ -54,7 +54,7 @@ class KeyStoreTestCase(tests.unittest.HomeserverTestCase):
|
||||
)
|
||||
|
||||
res = self.get_success(
|
||||
store.get_server_verify_keys(
|
||||
store.get_server_signature_keys(
|
||||
[
|
||||
("server1", key_id_1),
|
||||
("server1", key_id_2),
|
||||
@@ -87,7 +87,7 @@ class KeyStoreTestCase(tests.unittest.HomeserverTestCase):
|
||||
key_id_2 = "ed25519:key2"
|
||||
|
||||
self.get_success(
|
||||
store.store_server_verify_keys(
|
||||
store.store_server_signature_keys(
|
||||
"from_server",
|
||||
0,
|
||||
{
|
||||
@@ -98,7 +98,7 @@ class KeyStoreTestCase(tests.unittest.HomeserverTestCase):
|
||||
)
|
||||
|
||||
res = self.get_success(
|
||||
store.get_server_verify_keys([("srv1", key_id_1), ("srv1", key_id_2)])
|
||||
store.get_server_signature_keys([("srv1", key_id_1), ("srv1", key_id_2)])
|
||||
)
|
||||
self.assertEqual(len(res.keys()), 2)
|
||||
|
||||
@@ -111,20 +111,20 @@ class KeyStoreTestCase(tests.unittest.HomeserverTestCase):
|
||||
self.assertEqual(res2.valid_until_ts, 200)
|
||||
|
||||
# we should be able to look up the same thing again without a db hit
|
||||
res = self.get_success(store.get_server_verify_keys([("srv1", key_id_1)]))
|
||||
res = self.get_success(store.get_server_signature_keys([("srv1", key_id_1)]))
|
||||
self.assertEqual(len(res.keys()), 1)
|
||||
self.assertEqual(res[("srv1", key_id_1)].verify_key, KEY_1)
|
||||
|
||||
new_key_2 = signedjson.key.get_verify_key(
|
||||
signedjson.key.generate_signing_key("key2")
|
||||
)
|
||||
d = store.store_server_verify_keys(
|
||||
d = store.store_server_signature_keys(
|
||||
"from_server", 10, {("srv1", key_id_2): FetchKeyResult(new_key_2, 300)}
|
||||
)
|
||||
self.get_success(d)
|
||||
|
||||
res = self.get_success(
|
||||
store.get_server_verify_keys([("srv1", key_id_1), ("srv1", key_id_2)])
|
||||
store.get_server_signature_keys([("srv1", key_id_1), ("srv1", key_id_2)])
|
||||
)
|
||||
self.assertEqual(len(res.keys()), 2)
|
||||
|
||||
|
||||
@@ -267,7 +267,9 @@ class MessageAcceptTests(unittest.HomeserverTestCase):
|
||||
# Resync the device list.
|
||||
device_handler = self.hs.get_device_handler()
|
||||
self.get_success(
|
||||
device_handler.device_list_updater.user_device_resync(remote_user_id),
|
||||
device_handler.device_list_updater.multi_user_device_resync(
|
||||
[remote_user_id]
|
||||
),
|
||||
)
|
||||
|
||||
# Retrieve the cross-signing keys for this user.
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
import gc
|
||||
import hashlib
|
||||
import hmac
|
||||
import json
|
||||
import logging
|
||||
import secrets
|
||||
import time
|
||||
@@ -53,6 +54,7 @@ from twisted.web.server import Request
|
||||
from synapse import events
|
||||
from synapse.api.constants import EventTypes
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
|
||||
from synapse.config._base import Config, RootConfig
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.server import DEFAULT_ROOM_VERSION
|
||||
from synapse.crypto.event_signing import add_hashes_and_signatures
|
||||
@@ -67,7 +69,6 @@ from synapse.logging.context import (
|
||||
)
|
||||
from synapse.rest import RegisterServletsFunc
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.keys import FetchKeyResult
|
||||
from synapse.types import JsonDict, Requester, UserID, create_requester
|
||||
from synapse.util import Clock
|
||||
from synapse.util.httpresourcetree import create_resource_tree
|
||||
@@ -124,6 +125,63 @@ def around(target: TV) -> Callable[[Callable[Concatenate[S, P], R]], None]:
|
||||
return _around
|
||||
|
||||
|
||||
_TConfig = TypeVar("_TConfig", Config, RootConfig)
|
||||
|
||||
|
||||
def deepcopy_config(config: _TConfig) -> _TConfig:
|
||||
new_config: _TConfig
|
||||
|
||||
if isinstance(config, RootConfig):
|
||||
new_config = config.__class__(config.config_files) # type: ignore[arg-type]
|
||||
else:
|
||||
new_config = config.__class__(config.root)
|
||||
|
||||
for attr_name in config.__dict__:
|
||||
if attr_name.startswith("__") or attr_name == "root":
|
||||
continue
|
||||
attr = getattr(config, attr_name)
|
||||
if isinstance(attr, Config):
|
||||
new_attr = deepcopy_config(attr)
|
||||
else:
|
||||
new_attr = attr
|
||||
|
||||
setattr(new_config, attr_name, new_attr)
|
||||
|
||||
return new_config
|
||||
|
||||
|
||||
_make_homeserver_config_obj_cache: Dict[str, Union[RootConfig, Config]] = {}
|
||||
|
||||
|
||||
def make_homeserver_config_obj(config: Dict[str, Any]) -> RootConfig:
|
||||
"""Creates a :class:`HomeServerConfig` instance with the given configuration dict.
|
||||
|
||||
This is equivalent to::
|
||||
|
||||
config_obj = HomeServerConfig()
|
||||
config_obj.parse_config_dict(config, "", "")
|
||||
|
||||
but it keeps a cache of `HomeServerConfig` instances and deepcopies them as needed,
|
||||
to avoid validating the whole configuration every time.
|
||||
"""
|
||||
cache_key = json.dumps(config)
|
||||
|
||||
if cache_key in _make_homeserver_config_obj_cache:
|
||||
# Cache hit: reuse the existing instance
|
||||
config_obj = _make_homeserver_config_obj_cache[cache_key]
|
||||
else:
|
||||
# Cache miss; create the actual instance
|
||||
config_obj = HomeServerConfig()
|
||||
config_obj.parse_config_dict(config, "", "")
|
||||
|
||||
# Add to the cache
|
||||
_make_homeserver_config_obj_cache[cache_key] = config_obj
|
||||
|
||||
assert isinstance(config_obj, RootConfig)
|
||||
|
||||
return deepcopy_config(config_obj)
|
||||
|
||||
|
||||
class TestCase(unittest.TestCase):
|
||||
"""A subclass of twisted.trial's TestCase which looks for 'loglevel'
|
||||
attributes on both itself and its individual test methods, to override the
|
||||
@@ -528,8 +586,7 @@ class HomeserverTestCase(TestCase):
|
||||
config = kwargs["config"]
|
||||
|
||||
# Parse the config from a config dict into a HomeServerConfig
|
||||
config_obj = HomeServerConfig()
|
||||
config_obj.parse_config_dict(config, "", "")
|
||||
config_obj = make_homeserver_config_obj(config)
|
||||
kwargs["config"] = config_obj
|
||||
|
||||
async def run_bg_updates() -> None:
|
||||
@@ -790,15 +847,23 @@ class FederatingHomeserverTestCase(HomeserverTestCase):
|
||||
verify_key_id = "%s:%s" % (verify_key.alg, verify_key.version)
|
||||
|
||||
self.get_success(
|
||||
hs.get_datastores().main.store_server_verify_keys(
|
||||
hs.get_datastores().main.store_server_keys_json(
|
||||
self.OTHER_SERVER_NAME,
|
||||
verify_key_id,
|
||||
from_server=self.OTHER_SERVER_NAME,
|
||||
ts_added_ms=clock.time_msec(),
|
||||
verify_keys={
|
||||
(self.OTHER_SERVER_NAME, verify_key_id): FetchKeyResult(
|
||||
verify_key=verify_key,
|
||||
valid_until_ts=clock.time_msec() + 10000,
|
||||
),
|
||||
},
|
||||
ts_now_ms=clock.time_msec(),
|
||||
ts_expires_ms=clock.time_msec() + 10000,
|
||||
key_json_bytes=canonicaljson.encode_canonical_json(
|
||||
{
|
||||
"verify_keys": {
|
||||
verify_key_id: {
|
||||
"key": signedjson.key.encode_verify_key_base64(
|
||||
verify_key
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -131,6 +131,9 @@ def default_config(
|
||||
# the test signing key is just an arbitrary ed25519 key to keep the config
|
||||
# parser happy
|
||||
"signing_key": "ed25519 a_lPym qvioDNmfExFBRPgdTU+wtFYKq4JfwFRv7sYVgWvmgJg",
|
||||
# Disable trusted key servers, otherwise unit tests might try to actually
|
||||
# reach out to matrix.org.
|
||||
"trusted_key_servers": [],
|
||||
"event_cache_size": 1,
|
||||
"enable_registration": True,
|
||||
"enable_registration_captcha": False,
|
||||
|
||||
Reference in New Issue
Block a user