Compare commits
23 Commits
mv/key_req
...
release-v1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
86a83bbc15 | ||
|
|
1294d10c70 | ||
|
|
718d7dfef2 | ||
|
|
664ba14080 | ||
|
|
649848627c | ||
|
|
13fc89148c | ||
|
|
10ed3e233e | ||
|
|
472c2c72f6 | ||
|
|
78cfa55dad | ||
|
|
14c1bfd534 | ||
|
|
70dc44f667 | ||
|
|
25c55a9d22 | ||
|
|
52d8131e87 | ||
|
|
53ea381ec3 | ||
|
|
6e65ca0b36 | ||
|
|
d535473520 | ||
|
|
e0c39d6bb5 | ||
|
|
289ce3b8d9 | ||
|
|
6c749c5124 | ||
|
|
496f73103d | ||
|
|
1fcefd8f3e | ||
|
|
7d3da399dd | ||
|
|
6a5cf1a759 |
4
.github/workflows/tests.yml
vendored
4
.github/workflows/tests.yml
vendored
@@ -399,8 +399,8 @@ jobs:
|
||||
env:
|
||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
||||
POSTGRES: ${{ matrix.job.postgres && 1}}
|
||||
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') && 1}}
|
||||
ASYNCIO_REACTOR: ${{ (matrix.job.reactor == 'asyncio') && 1 }}
|
||||
MULTI_POSTGRES: ${{ (matrix.job.postgres == 'multi-postgres') || '' }}
|
||||
ASYNCIO_REACTOR: ${{ (matrix.job.reactor == 'asyncio') || '' }}
|
||||
WORKERS: ${{ matrix.job.workers && 1 }}
|
||||
BLACKLIST: ${{ matrix.job.workers && 'synapse-blacklist-with-workers' }}
|
||||
TOP: ${{ github.workspace }}
|
||||
|
||||
2840
CHANGES.md
2840
CHANGES.md
File diff suppressed because it is too large
Load Diff
4
Cargo.lock
generated
4
Cargo.lock
generated
@@ -340,9 +340,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.97"
|
||||
version = "1.0.99"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bdf3bf93142acad5821c99197022e170842cdbc1c30482b98750c688c640842a"
|
||||
checksum = "46266871c240a00b8f503b877622fe33430b3c7d963bdc0f2adc511e54a1eae3"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"ryu",
|
||||
|
||||
12
book.toml
12
book.toml
@@ -34,6 +34,14 @@ additional-css = [
|
||||
"docs/website_files/table-of-contents.css",
|
||||
"docs/website_files/remove-nav-buttons.css",
|
||||
"docs/website_files/indent-section-headers.css",
|
||||
"docs/website_files/version-picker.css",
|
||||
]
|
||||
additional-js = ["docs/website_files/table-of-contents.js"]
|
||||
theme = "docs/website_files/theme"
|
||||
additional-js = [
|
||||
"docs/website_files/table-of-contents.js",
|
||||
"docs/website_files/version-picker.js",
|
||||
"docs/website_files/version.js",
|
||||
]
|
||||
theme = "docs/website_files/theme"
|
||||
|
||||
[preprocessor.schema_versions]
|
||||
command = "./scripts-dev/schema_versions.py"
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
Replace `EventContext` fields `prev_group` and `delta_ids` with field `state_group_deltas`.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a long-standing bug where media files were served in an unsafe manner. Contributed by @joshqou.
|
||||
@@ -1 +0,0 @@
|
||||
Improve `/messages` response time by avoiding backfill when we already have messages to return.
|
||||
@@ -1 +0,0 @@
|
||||
Regularly try to send transactions to other servers after they failed instead of waiting for a new event to be available before trying.
|
||||
@@ -1 +0,0 @@
|
||||
Remove experimental [MSC2716](https://github.com/matrix-org/matrix-spec-proposals/pull/2716) implementation to incrementally import history into existing rooms.
|
||||
@@ -1 +0,0 @@
|
||||
Fix requesting multiple keys at once over federation, related to [MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983).
|
||||
@@ -1 +0,0 @@
|
||||
Avoid invalidating a cache that was just prefilled.
|
||||
@@ -1 +0,0 @@
|
||||
Fix requesting multiple keys at once over federation, related to [MSC3983](https://github.com/matrix-org/matrix-spec-proposals/pull/3983).
|
||||
@@ -1 +0,0 @@
|
||||
Document `looping_call()` functionality that will wait for the given function to finish before scheduling another.
|
||||
@@ -1 +0,0 @@
|
||||
Fix joining rooms through aliases where the alias server isn't a real homeserver. Contributed by @tulir @ Beeper.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug in push rules handling leading to an invalid (per spec) `is_user_mention` rule sent to clients. Also fix wrong rule names for `is_user_mention` and `is_room_mention`.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug introduced in 1.57.0 where the wrong table would be locked on updating database rows when using SQLite as the database backend.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a typo in the [Admin API](https://matrix-org.github.io/synapse/latest/usage/administration/admin_api/index.html).
|
||||
@@ -1 +0,0 @@
|
||||
Switch from `matrix://` to `matrix-federation://` scheme for internal Synapse routing of outbound federation traffic.
|
||||
@@ -1 +0,0 @@
|
||||
Ignore key request if the device inbox is already big.
|
||||
18
debian/changelog
vendored
18
debian/changelog
vendored
@@ -1,3 +1,21 @@
|
||||
matrix-synapse-py3 (1.87.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.87.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Jul 2023 16:24:00 +0100
|
||||
|
||||
matrix-synapse-py3 (1.87.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.87.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 27 Jun 2023 15:27:04 +0000
|
||||
|
||||
matrix-synapse-py3 (1.86.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.86.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Jun 2023 17:22:46 +0200
|
||||
|
||||
matrix-synapse-py3 (1.86.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.86.0rc2.
|
||||
|
||||
2766
docs/changelogs/CHANGES-2022.md
Normal file
2766
docs/changelogs/CHANGES-2022.md
Normal file
File diff suppressed because it is too large
Load Diff
@@ -6,7 +6,7 @@ This is a work-in-progress set of notes with two goals:
|
||||
|
||||
See also [MSC3902](https://github.com/matrix-org/matrix-spec-proposals/pull/3902).
|
||||
|
||||
The key idea is described by [MSC706](https://github.com/matrix-org/matrix-spec-proposals/pull/3902). This allows servers to
|
||||
The key idea is described by [MSC3706](https://github.com/matrix-org/matrix-spec-proposals/pull/3706). This allows servers to
|
||||
request a lightweight response to the federation `/send_join` endpoint.
|
||||
This is called a **faster join**, also known as a **partial join**. In these
|
||||
notes we'll usually use the word "partial" as it matches the database schema.
|
||||
|
||||
@@ -348,6 +348,42 @@ callback returns `False`, Synapse falls through to the next one. The value of th
|
||||
callback that does not return `False` will be used. If this happens, Synapse will not call
|
||||
any of the subsequent implementations of this callback.
|
||||
|
||||
|
||||
### `check_login_for_spam`
|
||||
|
||||
_First introduced in Synapse v1.87.0_
|
||||
|
||||
```python
|
||||
async def check_login_for_spam(
|
||||
user_id: str,
|
||||
device_id: Optional[str],
|
||||
initial_display_name: Optional[str],
|
||||
request_info: Collection[Tuple[Optional[str], str]],
|
||||
auth_provider_id: Optional[str] = None,
|
||||
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes"]
|
||||
```
|
||||
|
||||
Called when a user logs in.
|
||||
|
||||
The arguments passed to this callback are:
|
||||
|
||||
* `user_id`: The user ID the user is logging in with
|
||||
* `device_id`: The device ID the user is re-logging into.
|
||||
* `initial_display_name`: The device display name, if any.
|
||||
* `request_info`: A collection of tuples, which first item is a user agent, and which
|
||||
second item is an IP address. These user agents and IP addresses are the ones that were
|
||||
used during the login process.
|
||||
* `auth_provider_id`: The identifier of the SSO authentication provider, if any.
|
||||
|
||||
If multiple modules implement this callback, they will be considered in order. If a
|
||||
callback returns `synapse.module_api.NOT_SPAM`, Synapse falls through to the next one.
|
||||
The value of the first callback that does not return `synapse.module_api.NOT_SPAM` will
|
||||
be used. If this happens, Synapse will not call any of the subsequent implementations of
|
||||
this callback.
|
||||
|
||||
*Note:* This will not be called when a user registers.
|
||||
|
||||
|
||||
## Example
|
||||
|
||||
The example below is a module that implements the spam checker callback
|
||||
|
||||
@@ -1196,6 +1196,32 @@ Example configuration:
|
||||
allow_device_name_lookup_over_federation: true
|
||||
```
|
||||
---
|
||||
### `federation`
|
||||
|
||||
The federation section defines some sub-options related to federation.
|
||||
|
||||
The following options are related to configuring timeout and retry logic for one request,
|
||||
independently of the others.
|
||||
Short retry algorithm is used when something or someone will wait for the request to have an
|
||||
answer, while long retry is used for requests that happen in the background,
|
||||
like sending a federation transaction.
|
||||
|
||||
* `client_timeout`: timeout for the federation requests. Default to 60s.
|
||||
* `max_short_retry_delay`: maximum delay to be used for the short retry algo. Default to 2s.
|
||||
* `max_long_retry_delay`: maximum delay to be used for the short retry algo. Default to 60s.
|
||||
* `max_short_retries`: maximum number of retries for the short retry algo. Default to 3 attempts.
|
||||
* `max_long_retries`: maximum number of retries for the long retry algo. Default to 10 attempts.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
federation:
|
||||
client_timeout: 180s
|
||||
max_short_retry_delay: 7s
|
||||
max_long_retry_delay: 100s
|
||||
max_short_retries: 5
|
||||
max_long_retries: 20
|
||||
```
|
||||
---
|
||||
## Caching
|
||||
|
||||
Options related to caching.
|
||||
|
||||
@@ -24,6 +24,11 @@ Finally, we also stylise the chapter titles in the left sidebar by indenting the
|
||||
slightly so that they are more visually distinguishable from the section headers
|
||||
(the bold titles). This is done through the `indent-section-headers.css` file.
|
||||
|
||||
In addition to these modifications, we have added a version picker to the documentation.
|
||||
Users can switch between documentations for different versions of Synapse.
|
||||
This functionality was implemented through the `version-picker.js` and
|
||||
`version-picker.css` files.
|
||||
|
||||
More information can be found in mdbook's official documentation for
|
||||
[injecting page JS/CSS](https://rust-lang.github.io/mdBook/format/config.html)
|
||||
and
|
||||
|
||||
@@ -131,6 +131,18 @@
|
||||
<i class="fa fa-search"></i>
|
||||
</button>
|
||||
{{/if}}
|
||||
<div class="version-picker">
|
||||
<div class="dropdown">
|
||||
<div class="select">
|
||||
<span></span>
|
||||
<i class="fa fa-chevron-down"></i>
|
||||
</div>
|
||||
<input type="hidden" name="version">
|
||||
<ul class="dropdown-menu">
|
||||
<!-- Versions will be added dynamically in version-picker.js -->
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h1 class="menu-title">{{ book_title }}</h1>
|
||||
@@ -309,4 +321,4 @@
|
||||
{{/if}}
|
||||
|
||||
</body>
|
||||
</html>
|
||||
</html>
|
||||
|
||||
78
docs/website_files/version-picker.css
Normal file
78
docs/website_files/version-picker.css
Normal file
@@ -0,0 +1,78 @@
|
||||
.version-picker {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.version-picker .dropdown {
|
||||
width: 130px;
|
||||
max-height: 29px;
|
||||
margin-left: 10px;
|
||||
display: inline-block;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
position: relative;
|
||||
font-size: 13px;
|
||||
color: var(--fg);
|
||||
height: 100%;
|
||||
text-align: left;
|
||||
}
|
||||
.version-picker .dropdown .select {
|
||||
cursor: pointer;
|
||||
display: block;
|
||||
padding: 5px 2px 5px 15px;
|
||||
}
|
||||
.version-picker .dropdown .select > i {
|
||||
font-size: 10px;
|
||||
color: var(--fg);
|
||||
cursor: pointer;
|
||||
float: right;
|
||||
line-height: 20px !important;
|
||||
}
|
||||
.version-picker .dropdown:hover {
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
}
|
||||
.version-picker .dropdown:active {
|
||||
background-color: var(--theme-popup-bg);
|
||||
}
|
||||
.version-picker .dropdown.active:hover,
|
||||
.version-picker .dropdown.active {
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
border-radius: 2px 2px 0 0;
|
||||
background-color: var(--theme-popup-bg);
|
||||
}
|
||||
.version-picker .dropdown.active .select > i {
|
||||
transform: rotate(-180deg);
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu {
|
||||
position: absolute;
|
||||
background-color: var(--theme-popup-bg);
|
||||
width: 100%;
|
||||
left: -1px;
|
||||
right: 1px;
|
||||
margin-top: 1px;
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
border-radius: 0 0 4px 4px;
|
||||
overflow: hidden;
|
||||
display: none;
|
||||
max-height: 300px;
|
||||
overflow-y: auto;
|
||||
z-index: 9;
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu li {
|
||||
font-size: 12px;
|
||||
padding: 6px 20px;
|
||||
cursor: pointer;
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu {
|
||||
padding: 0;
|
||||
list-style: none;
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu li:hover {
|
||||
background-color: var(--theme-hover);
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu li.active::before {
|
||||
display: inline-block;
|
||||
content: "✓";
|
||||
margin-inline-start: -14px;
|
||||
width: 14px;
|
||||
}
|
||||
127
docs/website_files/version-picker.js
Normal file
127
docs/website_files/version-picker.js
Normal file
@@ -0,0 +1,127 @@
|
||||
|
||||
const dropdown = document.querySelector('.version-picker .dropdown');
|
||||
const dropdownMenu = dropdown.querySelector('.dropdown-menu');
|
||||
|
||||
fetchVersions(dropdown, dropdownMenu).then(() => {
|
||||
initializeVersionDropdown(dropdown, dropdownMenu);
|
||||
});
|
||||
|
||||
/**
|
||||
* Initialize the dropdown functionality for version selection.
|
||||
*
|
||||
* @param {Element} dropdown - The dropdown element.
|
||||
* @param {Element} dropdownMenu - The dropdown menu element.
|
||||
*/
|
||||
function initializeVersionDropdown(dropdown, dropdownMenu) {
|
||||
// Toggle the dropdown menu on click
|
||||
dropdown.addEventListener('click', function () {
|
||||
this.setAttribute('tabindex', 1);
|
||||
this.classList.toggle('active');
|
||||
dropdownMenu.style.display = (dropdownMenu.style.display === 'block') ? 'none' : 'block';
|
||||
});
|
||||
|
||||
// Remove the 'active' class and hide the dropdown menu on focusout
|
||||
dropdown.addEventListener('focusout', function () {
|
||||
this.classList.remove('active');
|
||||
dropdownMenu.style.display = 'none';
|
||||
});
|
||||
|
||||
// Handle item selection within the dropdown menu
|
||||
const dropdownMenuItems = dropdownMenu.querySelectorAll('li');
|
||||
dropdownMenuItems.forEach(function (item) {
|
||||
item.addEventListener('click', function () {
|
||||
dropdownMenuItems.forEach(function (item) {
|
||||
item.classList.remove('active');
|
||||
});
|
||||
this.classList.add('active');
|
||||
dropdown.querySelector('span').textContent = this.textContent;
|
||||
dropdown.querySelector('input').value = this.getAttribute('id');
|
||||
|
||||
window.location.href = changeVersion(window.location.href, this.textContent);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* This function fetches the available versions from a GitHub repository
|
||||
* and inserts them into the version picker.
|
||||
*
|
||||
* @param {Element} dropdown - The dropdown element.
|
||||
* @param {Element} dropdownMenu - The dropdown menu element.
|
||||
* @returns {Promise<Array<string>>} A promise that resolves with an array of available versions.
|
||||
*/
|
||||
function fetchVersions(dropdown, dropdownMenu) {
|
||||
return new Promise((resolve, reject) => {
|
||||
window.addEventListener("load", () => {
|
||||
|
||||
fetch("https://api.github.com/repos/matrix-org/synapse/git/trees/gh-pages", {
|
||||
cache: "force-cache",
|
||||
}).then(res =>
|
||||
res.json()
|
||||
).then(resObject => {
|
||||
const excluded = ['dev-docs', 'v1.91.0', 'v1.80.0', 'v1.69.0'];
|
||||
const tree = resObject.tree.filter(item => item.type === "tree" && !excluded.includes(item.path));
|
||||
const versions = tree.map(item => item.path).sort(sortVersions);
|
||||
|
||||
// Create a list of <li> items for versions
|
||||
versions.forEach((version) => {
|
||||
const li = document.createElement("li");
|
||||
li.textContent = version;
|
||||
li.id = version;
|
||||
|
||||
if (window.SYNAPSE_VERSION === version) {
|
||||
li.classList.add('active');
|
||||
dropdown.querySelector('span').textContent = version;
|
||||
dropdown.querySelector('input').value = version;
|
||||
}
|
||||
|
||||
dropdownMenu.appendChild(li);
|
||||
});
|
||||
|
||||
resolve(versions);
|
||||
|
||||
}).catch(ex => {
|
||||
console.error("Failed to fetch version data", ex);
|
||||
reject(ex);
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom sorting function to sort an array of version strings.
|
||||
*
|
||||
* @param {string} a - The first version string to compare.
|
||||
* @param {string} b - The second version string to compare.
|
||||
* @returns {number} - A negative number if a should come before b, a positive number if b should come before a, or 0 if they are equal.
|
||||
*/
|
||||
function sortVersions(a, b) {
|
||||
// Put 'develop' and 'latest' at the top
|
||||
if (a === 'develop' || a === 'latest') return -1;
|
||||
if (b === 'develop' || b === 'latest') return 1;
|
||||
|
||||
const versionA = (a.match(/v\d+(\.\d+)+/) || [])[0];
|
||||
const versionB = (b.match(/v\d+(\.\d+)+/) || [])[0];
|
||||
|
||||
return versionB.localeCompare(versionA);
|
||||
}
|
||||
|
||||
/**
|
||||
* Change the version in a URL path.
|
||||
*
|
||||
* @param {string} url - The original URL to be modified.
|
||||
* @param {string} newVersion - The new version to replace the existing version in the URL.
|
||||
* @returns {string} The updated URL with the new version.
|
||||
*/
|
||||
function changeVersion(url, newVersion) {
|
||||
const parsedURL = new URL(url);
|
||||
const pathSegments = parsedURL.pathname.split('/');
|
||||
|
||||
// Modify the version
|
||||
pathSegments[2] = newVersion;
|
||||
|
||||
// Reconstruct the URL
|
||||
parsedURL.pathname = pathSegments.join('/');
|
||||
|
||||
return parsedURL.href;
|
||||
}
|
||||
1
docs/website_files/version.js
Normal file
1
docs/website_files/version.js
Normal file
@@ -0,0 +1 @@
|
||||
window.SYNAPSE_VERSION = 'v1.87';
|
||||
182
poetry.lock
generated
182
poetry.lock
generated
@@ -465,30 +465,30 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "40.0.2"
|
||||
version = "41.0.1"
|
||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b"},
|
||||
{file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440"},
|
||||
{file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d"},
|
||||
{file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288"},
|
||||
{file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2"},
|
||||
{file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b"},
|
||||
{file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9"},
|
||||
{file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c"},
|
||||
{file = "cryptography-40.0.2-cp36-abi3-win32.whl", hash = "sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9"},
|
||||
{file = "cryptography-40.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b"},
|
||||
{file = "cryptography-40.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b"},
|
||||
{file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e"},
|
||||
{file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a"},
|
||||
{file = "cryptography-40.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958"},
|
||||
{file = "cryptography-40.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b"},
|
||||
{file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636"},
|
||||
{file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e"},
|
||||
{file = "cryptography-40.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404"},
|
||||
{file = "cryptography-40.0.2.tar.gz", hash = "sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99"},
|
||||
{file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699"},
|
||||
{file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a"},
|
||||
{file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca"},
|
||||
{file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43"},
|
||||
{file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b"},
|
||||
{file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3"},
|
||||
{file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db"},
|
||||
{file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31"},
|
||||
{file = "cryptography-41.0.1-cp37-abi3-win32.whl", hash = "sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5"},
|
||||
{file = "cryptography-41.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c"},
|
||||
{file = "cryptography-41.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb"},
|
||||
{file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3"},
|
||||
{file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039"},
|
||||
{file = "cryptography-41.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc"},
|
||||
{file = "cryptography-41.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485"},
|
||||
{file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c"},
|
||||
{file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a"},
|
||||
{file = "cryptography-41.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5"},
|
||||
{file = "cryptography-41.0.1.tar.gz", hash = "sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -497,12 +497,12 @@ cffi = ">=1.12"
|
||||
[package.extras]
|
||||
docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
|
||||
docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"]
|
||||
pep8test = ["black", "check-manifest", "mypy", "ruff"]
|
||||
sdist = ["setuptools-rust (>=0.11.4)"]
|
||||
nox = ["nox"]
|
||||
pep8test = ["black", "check-sdist", "mypy", "ruff"]
|
||||
sdist = ["build"]
|
||||
ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist"]
|
||||
test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
|
||||
test-randomorder = ["pytest-randomly"]
|
||||
tox = ["tox"]
|
||||
|
||||
[[package]]
|
||||
name = "defusedxml"
|
||||
@@ -1829,47 +1829,47 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "1.10.9"
|
||||
version = "1.10.10"
|
||||
description = "Data validation and settings management using python type hints"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pydantic-1.10.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e692dec4a40bfb40ca530e07805b1208c1de071a18d26af4a2a0d79015b352ca"},
|
||||
{file = "pydantic-1.10.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c52eb595db83e189419bf337b59154bdcca642ee4b2a09e5d7797e41ace783f"},
|
||||
{file = "pydantic-1.10.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939328fd539b8d0edf244327398a667b6b140afd3bf7e347cf9813c736211896"},
|
||||
{file = "pydantic-1.10.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b48d3d634bca23b172f47f2335c617d3fcb4b3ba18481c96b7943a4c634f5c8d"},
|
||||
{file = "pydantic-1.10.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f0b7628fb8efe60fe66fd4adadd7ad2304014770cdc1f4934db41fe46cc8825f"},
|
||||
{file = "pydantic-1.10.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e1aa5c2410769ca28aa9a7841b80d9d9a1c5f223928ca8bec7e7c9a34d26b1d4"},
|
||||
{file = "pydantic-1.10.9-cp310-cp310-win_amd64.whl", hash = "sha256:eec39224b2b2e861259d6f3c8b6290d4e0fbdce147adb797484a42278a1a486f"},
|
||||
{file = "pydantic-1.10.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d111a21bbbfd85c17248130deac02bbd9b5e20b303338e0dbe0faa78330e37e0"},
|
||||
{file = "pydantic-1.10.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e9aec8627a1a6823fc62fb96480abe3eb10168fd0d859ee3d3b395105ae19a7"},
|
||||
{file = "pydantic-1.10.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07293ab08e7b4d3c9d7de4949a0ea571f11e4557d19ea24dd3ae0c524c0c334d"},
|
||||
{file = "pydantic-1.10.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ee829b86ce984261d99ff2fd6e88f2230068d96c2a582f29583ed602ef3fc2c"},
|
||||
{file = "pydantic-1.10.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b466a23009ff5cdd7076eb56aca537c745ca491293cc38e72bf1e0e00de5b91"},
|
||||
{file = "pydantic-1.10.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7847ca62e581e6088d9000f3c497267868ca2fa89432714e21a4fb33a04d52e8"},
|
||||
{file = "pydantic-1.10.9-cp311-cp311-win_amd64.whl", hash = "sha256:7845b31959468bc5b78d7b95ec52fe5be32b55d0d09983a877cca6aedc51068f"},
|
||||
{file = "pydantic-1.10.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:517a681919bf880ce1dac7e5bc0c3af1e58ba118fd774da2ffcd93c5f96eaece"},
|
||||
{file = "pydantic-1.10.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67195274fd27780f15c4c372f4ba9a5c02dad6d50647b917b6a92bf00b3d301a"},
|
||||
{file = "pydantic-1.10.9-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2196c06484da2b3fded1ab6dbe182bdabeb09f6318b7fdc412609ee2b564c49a"},
|
||||
{file = "pydantic-1.10.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6257bb45ad78abacda13f15bde5886efd6bf549dd71085e64b8dcf9919c38b60"},
|
||||
{file = "pydantic-1.10.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3283b574b01e8dbc982080d8287c968489d25329a463b29a90d4157de4f2baaf"},
|
||||
{file = "pydantic-1.10.9-cp37-cp37m-win_amd64.whl", hash = "sha256:5f8bbaf4013b9a50e8100333cc4e3fa2f81214033e05ac5aa44fa24a98670a29"},
|
||||
{file = "pydantic-1.10.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9cd67fb763248cbe38f0593cd8611bfe4b8ad82acb3bdf2b0898c23415a1f82"},
|
||||
{file = "pydantic-1.10.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f50e1764ce9353be67267e7fd0da08349397c7db17a562ad036aa7c8f4adfdb6"},
|
||||
{file = "pydantic-1.10.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73ef93e5e1d3c8e83f1ff2e7fdd026d9e063c7e089394869a6e2985696693766"},
|
||||
{file = "pydantic-1.10.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:128d9453d92e6e81e881dd7e2484e08d8b164da5507f62d06ceecf84bf2e21d3"},
|
||||
{file = "pydantic-1.10.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ad428e92ab68798d9326bb3e5515bc927444a3d71a93b4a2ca02a8a5d795c572"},
|
||||
{file = "pydantic-1.10.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fab81a92f42d6d525dd47ced310b0c3e10c416bbfae5d59523e63ea22f82b31e"},
|
||||
{file = "pydantic-1.10.9-cp38-cp38-win_amd64.whl", hash = "sha256:963671eda0b6ba6926d8fc759e3e10335e1dc1b71ff2a43ed2efd6996634dafb"},
|
||||
{file = "pydantic-1.10.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:970b1bdc6243ef663ba5c7e36ac9ab1f2bfecb8ad297c9824b542d41a750b298"},
|
||||
{file = "pydantic-1.10.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7e1d5290044f620f80cf1c969c542a5468f3656de47b41aa78100c5baa2b8276"},
|
||||
{file = "pydantic-1.10.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83fcff3c7df7adff880622a98022626f4f6dbce6639a88a15a3ce0f96466cb60"},
|
||||
{file = "pydantic-1.10.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0da48717dc9495d3a8f215e0d012599db6b8092db02acac5e0d58a65248ec5bc"},
|
||||
{file = "pydantic-1.10.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0a2aabdc73c2a5960e87c3ffebca6ccde88665616d1fd6d3db3178ef427b267a"},
|
||||
{file = "pydantic-1.10.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9863b9420d99dfa9c064042304868e8ba08e89081428a1c471858aa2af6f57c4"},
|
||||
{file = "pydantic-1.10.9-cp39-cp39-win_amd64.whl", hash = "sha256:e7c9900b43ac14110efa977be3da28931ffc74c27e96ee89fbcaaf0b0fe338e1"},
|
||||
{file = "pydantic-1.10.9-py3-none-any.whl", hash = "sha256:6cafde02f6699ce4ff643417d1a9223716ec25e228ddc3b436fe7e2d25a1f305"},
|
||||
{file = "pydantic-1.10.9.tar.gz", hash = "sha256:95c70da2cd3b6ddf3b9645ecaa8d98f3d80c606624b6d245558d202cd23ea3be"},
|
||||
{file = "pydantic-1.10.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:adad1ee4ab9888f12dac2529276704e719efcf472e38df7813f5284db699b4ec"},
|
||||
{file = "pydantic-1.10.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a7db03339893feef2092ff7b1afc9497beed15ebd4af84c3042a74abce02d48"},
|
||||
{file = "pydantic-1.10.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67b3714b97ff84b2689654851c2426389bcabfac9080617bcf4306c69db606f6"},
|
||||
{file = "pydantic-1.10.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edfdf0a5abc5c9bf2052ebaec20e67abd52e92d257e4f2d30e02c354ed3e6030"},
|
||||
{file = "pydantic-1.10.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20a3b30fd255eeeb63caa9483502ba96b7795ce5bf895c6a179b3d909d9f53a6"},
|
||||
{file = "pydantic-1.10.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db4c7f7e60ca6f7d6c1785070f3e5771fcb9b2d88546e334d2f2c3934d949028"},
|
||||
{file = "pydantic-1.10.10-cp310-cp310-win_amd64.whl", hash = "sha256:a2d5be50ac4a0976817144c7d653e34df2f9436d15555189f5b6f61161d64183"},
|
||||
{file = "pydantic-1.10.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:566a04ba755e8f701b074ffb134ddb4d429f75d5dced3fbd829a527aafe74c71"},
|
||||
{file = "pydantic-1.10.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f79db3652ed743309f116ba863dae0c974a41b688242482638b892246b7db21d"},
|
||||
{file = "pydantic-1.10.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c62376890b819bebe3c717a9ac841a532988372b7e600e76f75c9f7c128219d5"},
|
||||
{file = "pydantic-1.10.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4870f13a4fafd5bc3e93cff3169222534fad867918b188e83ee0496452978437"},
|
||||
{file = "pydantic-1.10.10-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:990027e77cda6072a566e433b6962ca3b96b4f3ae8bd54748e9d62a58284d9d7"},
|
||||
{file = "pydantic-1.10.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8c40964596809eb616d94f9c7944511f620a1103d63d5510440ed2908fc410af"},
|
||||
{file = "pydantic-1.10.10-cp311-cp311-win_amd64.whl", hash = "sha256:ea9eebc2ebcba3717e77cdeee3f6203ffc0e78db5f7482c68b1293e8cc156e5e"},
|
||||
{file = "pydantic-1.10.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:762aa598f79b4cac2f275d13336b2dd8662febee2a9c450a49a2ab3bec4b385f"},
|
||||
{file = "pydantic-1.10.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dab5219659f95e357d98d70577b361383057fb4414cfdb587014a5f5c595f7b"},
|
||||
{file = "pydantic-1.10.10-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3d4ee957a727ccb5a36f1b0a6dbd9fad5dedd2a41eada99a8df55c12896e18d"},
|
||||
{file = "pydantic-1.10.10-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b69f9138dec566962ec65623c9d57bee44412d2fc71065a5f3ebb3820bdeee96"},
|
||||
{file = "pydantic-1.10.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7aa75d1bd9cc275cf9782f50f60cddaf74cbaae19b6ada2a28e737edac420312"},
|
||||
{file = "pydantic-1.10.10-cp37-cp37m-win_amd64.whl", hash = "sha256:9f62a727f5c590c78c2d12fda302d1895141b767c6488fe623098f8792255fe5"},
|
||||
{file = "pydantic-1.10.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:aac218feb4af73db8417ca7518fb3bade4534fcca6e3fb00f84966811dd94450"},
|
||||
{file = "pydantic-1.10.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88546dc10a40b5b52cae87d64666787aeb2878f9a9b37825aedc2f362e7ae1da"},
|
||||
{file = "pydantic-1.10.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c41bbaae89e32fc582448e71974de738c055aef5ab474fb25692981a08df808a"},
|
||||
{file = "pydantic-1.10.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b71bd504d1573b0b722ae536e8ffb796bedeef978979d076bf206e77dcc55a5"},
|
||||
{file = "pydantic-1.10.10-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e088e3865a2270ecbc369924cd7d9fbc565667d9158e7f304e4097ebb9cf98dd"},
|
||||
{file = "pydantic-1.10.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3403a090db45d4027d2344859d86eb797484dfda0706cf87af79ace6a35274ef"},
|
||||
{file = "pydantic-1.10.10-cp38-cp38-win_amd64.whl", hash = "sha256:e0014e29637125f4997c174dd6167407162d7af0da73414a9340461ea8573252"},
|
||||
{file = "pydantic-1.10.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9965e49c6905840e526e5429b09e4c154355b6ecc0a2f05492eda2928190311d"},
|
||||
{file = "pydantic-1.10.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:748d10ab6089c5d196e1c8be9de48274f71457b01e59736f7a09c9dc34f51887"},
|
||||
{file = "pydantic-1.10.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86936c383f7c38fd26d35107eb669c85d8f46dfceae873264d9bab46fe1c7dde"},
|
||||
{file = "pydantic-1.10.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a26841be620309a9697f5b1ffc47dce74909e350c5315ccdac7a853484d468a"},
|
||||
{file = "pydantic-1.10.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:409b810f387610cc7405ab2fa6f62bdf7ea485311845a242ebc0bd0496e7e5ac"},
|
||||
{file = "pydantic-1.10.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ce937a2a2c020bcad1c9fde02892392a1123de6dda906ddba62bfe8f3e5989a2"},
|
||||
{file = "pydantic-1.10.10-cp39-cp39-win_amd64.whl", hash = "sha256:37ebddef68370e6f26243acc94de56d291e01227a67b2ace26ea3543cf53dd5f"},
|
||||
{file = "pydantic-1.10.10-py3-none-any.whl", hash = "sha256:a5939ec826f7faec434e2d406ff5e4eaf1716eb1f247d68cd3d0b3612f7b4c8a"},
|
||||
{file = "pydantic-1.10.10.tar.gz", hash = "sha256:3b8d5bd97886f9eb59260594207c9f57dce14a6f869c6ceea90188715d29921a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2245,28 +2245,28 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.0.272"
|
||||
version = "0.0.275"
|
||||
description = "An extremely fast Python linter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.0.272-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:ae9b57546e118660175d45d264b87e9b4c19405c75b587b6e4d21e6a17bf4fdf"},
|
||||
{file = "ruff-0.0.272-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:1609b864a8d7ee75a8c07578bdea0a7db75a144404e75ef3162e0042bfdc100d"},
|
||||
{file = "ruff-0.0.272-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee76b4f05fcfff37bd6ac209d1370520d509ea70b5a637bdf0a04d0c99e13dff"},
|
||||
{file = "ruff-0.0.272-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:48eccf225615e106341a641f826b15224b8a4240b84269ead62f0afd6d7e2d95"},
|
||||
{file = "ruff-0.0.272-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:677284430ac539bb23421a2b431b4ebc588097ef3ef918d0e0a8d8ed31fea216"},
|
||||
{file = "ruff-0.0.272-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9c4bfb75456a8e1efe14c52fcefb89cfb8f2a0d31ed8d804b82c6cf2dc29c42c"},
|
||||
{file = "ruff-0.0.272-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86bc788245361a8148ff98667da938a01e1606b28a45e50ac977b09d3ad2c538"},
|
||||
{file = "ruff-0.0.272-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b2ea68d2aa69fff1b20b67636b1e3e22a6a39e476c880da1282c3e4bf6ee5a"},
|
||||
{file = "ruff-0.0.272-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd2bbe337a3f84958f796c77820d55ac2db1e6753f39d1d1baed44e07f13f96d"},
|
||||
{file = "ruff-0.0.272-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d5a208f8ef0e51d4746930589f54f9f92f84bb69a7d15b1de34ce80a7681bc00"},
|
||||
{file = "ruff-0.0.272-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:905ff8f3d6206ad56fcd70674453527b9011c8b0dc73ead27618426feff6908e"},
|
||||
{file = "ruff-0.0.272-py3-none-musllinux_1_2_i686.whl", hash = "sha256:19643d448f76b1eb8a764719072e9c885968971bfba872e14e7257e08bc2f2b7"},
|
||||
{file = "ruff-0.0.272-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:691d72a00a99707a4e0b2846690961157aef7b17b6b884f6b4420a9f25cd39b5"},
|
||||
{file = "ruff-0.0.272-py3-none-win32.whl", hash = "sha256:dc406e5d756d932da95f3af082814d2467943631a587339ee65e5a4f4fbe83eb"},
|
||||
{file = "ruff-0.0.272-py3-none-win_amd64.whl", hash = "sha256:a37ec80e238ead2969b746d7d1b6b0d31aa799498e9ba4281ab505b93e1f4b28"},
|
||||
{file = "ruff-0.0.272-py3-none-win_arm64.whl", hash = "sha256:06b8ee4eb8711ab119db51028dd9f5384b44728c23586424fd6e241a5b9c4a3b"},
|
||||
{file = "ruff-0.0.272.tar.gz", hash = "sha256:273a01dc8c3c4fd4c2af7ea7a67c8d39bb09bce466e640dd170034da75d14cab"},
|
||||
{file = "ruff-0.0.275-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:5e6554a072e7ce81eb6f0bec1cebd3dcb0e358652c0f4900d7d630d61691e914"},
|
||||
{file = "ruff-0.0.275-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:1cc599022fe5ffb143a965b8d659eb64161ab8ab4433d208777eab018a1aab67"},
|
||||
{file = "ruff-0.0.275-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5206fc1cd8c1c1deadd2e6360c0dbcd690f1c845da588ca9d32e4a764a402c60"},
|
||||
{file = "ruff-0.0.275-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0c4e6468da26f77b90cae35319d310999f471a8c352998e9b39937a23750149e"},
|
||||
{file = "ruff-0.0.275-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0dbdea02942131dbc15dd45f431d152224f15e1dd1859fcd0c0487b658f60f1a"},
|
||||
{file = "ruff-0.0.275-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:22efd9f41af27ef8fb9779462c46c35c89134d33e326c889971e10b2eaf50c63"},
|
||||
{file = "ruff-0.0.275-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c09662112cfa22d7467a19252a546291fd0eae4f423e52b75a7a2000a1894db"},
|
||||
{file = "ruff-0.0.275-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80043726662144876a381efaab88841c88e8df8baa69559f96b22d4fa216bef1"},
|
||||
{file = "ruff-0.0.275-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5859ee543b01b7eb67835dfd505faa8bb7cc1550f0295c92c1401b45b42be399"},
|
||||
{file = "ruff-0.0.275-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c8ace4d40a57b5ea3c16555f25a6b16bc5d8b2779ae1912ce2633543d4e9b1da"},
|
||||
{file = "ruff-0.0.275-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8347fc16aa185aae275906c4ac5b770e00c896b6a0acd5ba521f158801911998"},
|
||||
{file = "ruff-0.0.275-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ec43658c64bfda44fd84bbea9da8c7a3b34f65448192d1c4dd63e9f4e7abfdd4"},
|
||||
{file = "ruff-0.0.275-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:508b13f7ca37274cceaba4fb3ea5da6ca192356323d92acf39462337c33ad14e"},
|
||||
{file = "ruff-0.0.275-py3-none-win32.whl", hash = "sha256:6afb1c4422f24f361e877937e2a44b3f8176774a476f5e33845ebfe887dd5ec2"},
|
||||
{file = "ruff-0.0.275-py3-none-win_amd64.whl", hash = "sha256:d9b264d78621bf7b698b6755d4913ab52c19bd28bee1a16001f954d64c1a1220"},
|
||||
{file = "ruff-0.0.275-py3-none-win_arm64.whl", hash = "sha256:a19ce3bea71023eee5f0f089dde4a4272d088d5ac0b675867e074983238ccc65"},
|
||||
{file = "ruff-0.0.275.tar.gz", hash = "sha256:a63a0b645da699ae5c758fce19188e901b3033ec54d862d93fcd042addf7f38d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2711,21 +2711,21 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "towncrier"
|
||||
version = "22.12.0"
|
||||
version = "23.6.0"
|
||||
description = "Building newsfiles for your project."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "towncrier-22.12.0-py3-none-any.whl", hash = "sha256:9767a899a4d6856950f3598acd9e8f08da2663c49fdcda5ea0f9e6ba2afc8eea"},
|
||||
{file = "towncrier-22.12.0.tar.gz", hash = "sha256:9c49d7e75f646a9aea02ae904c0bc1639c8fd14a01292d2b123b8d307564034d"},
|
||||
{file = "towncrier-23.6.0-py3-none-any.whl", hash = "sha256:da552f29192b3c2b04d630133f194c98e9f14f0558669d427708e203fea4d0a5"},
|
||||
{file = "towncrier-23.6.0.tar.gz", hash = "sha256:fc29bd5ab4727c8dacfbe636f7fb5dc53b99805b62da1c96b214836159ff70c1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
click = "*"
|
||||
click-default-group = "*"
|
||||
importlib-resources = {version = ">=5", markers = "python_version < \"3.10\""}
|
||||
incremental = "*"
|
||||
jinja2 = "*"
|
||||
setuptools = "*"
|
||||
tomli = {version = "*", markers = "python_version < \"3.11\""}
|
||||
|
||||
[package.extras]
|
||||
@@ -2931,13 +2931,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "types-opentracing"
|
||||
version = "2.4.10.4"
|
||||
version = "2.4.10.5"
|
||||
description = "Typing stubs for opentracing"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-opentracing-2.4.10.4.tar.gz", hash = "sha256:347040c9da4ada7d3c795659912c95d98c5651e242e8eaa0344815fee5bb97e2"},
|
||||
{file = "types_opentracing-2.4.10.4-py3-none-any.whl", hash = "sha256:73c9b958eea3df6c4906ebf3865608a562dd9981c1bbc75a373a583c613bed56"},
|
||||
{file = "types-opentracing-2.4.10.5.tar.gz", hash = "sha256:852d13ab1324832835d50c00cfd58b9267f0e79ec3189e5664c2a90c26880fd4"},
|
||||
{file = "types_opentracing-2.4.10.5-py3-none-any.whl", hash = "sha256:8f12ab4dce3e298a8e6655da9a6d52171e7a275357eae4cec22a1663d94023a7"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3003,13 +3003,13 @@ types-urllib3 = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-setuptools"
|
||||
version = "67.8.0.0"
|
||||
version = "68.0.0.0"
|
||||
description = "Typing stubs for setuptools"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-setuptools-67.8.0.0.tar.gz", hash = "sha256:95c9ed61871d6c0e258433373a4e1753c0a7c3627a46f4d4058c7b5a08ab844f"},
|
||||
{file = "types_setuptools-67.8.0.0-py3-none-any.whl", hash = "sha256:6df73340d96b238a4188b7b7668814b37e8018168aef1eef94a3b1872e3f60ff"},
|
||||
{file = "types-setuptools-68.0.0.0.tar.gz", hash = "sha256:fc958b4123b155ffc069a66d3af5fe6c1f9d0600c35c0c8444b2ab4147112641"},
|
||||
{file = "types_setuptools-68.0.0.0-py3-none-any.whl", hash = "sha256:cc00e09ba8f535362cbe1ea8b8407d15d14b59c57f4190cceaf61a9e57616446"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3294,4 +3294,4 @@ user-search = ["pyicu"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.7.1"
|
||||
content-hash = "090924370b17fd265407b5a3f9cbc00997308f575b455399b39a48e3ca1a5a8e"
|
||||
content-hash = "7f31754a1009d7b6c9a1bd7221a0b243ffd510f362c28f0da417aaac16757a87"
|
||||
|
||||
@@ -89,7 +89,7 @@ manifest-path = "rust/Cargo.toml"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.86.0rc2"
|
||||
version = "1.87.0"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "Apache-2.0"
|
||||
@@ -207,7 +207,8 @@ packaging = ">=16.1"
|
||||
# which shipped in Python 3.8. This corresponds to version 1.4 of the backport.
|
||||
importlib_metadata = { version = ">=1.4", python = "<3.8" }
|
||||
# This is the most recent version of Pydantic with available on common distros.
|
||||
pydantic = ">=1.7.4"
|
||||
# We are currently incompatible with >=2.0.0: (https://github.com/matrix-org/synapse/issues/15858)
|
||||
pydantic = "^1.7.4"
|
||||
|
||||
# This is for building the rust components during "poetry install", which
|
||||
# currently ignores the `build-system.requires` directive (c.f.
|
||||
@@ -311,7 +312,7 @@ all = [
|
||||
# We pin black so that our tests don't start failing on new releases.
|
||||
isort = ">=5.10.1"
|
||||
black = ">=22.3.0"
|
||||
ruff = "0.0.272"
|
||||
ruff = "0.0.275"
|
||||
|
||||
# Typechecking
|
||||
lxml-stubs = ">=0.4.0"
|
||||
|
||||
@@ -1369,6 +1369,9 @@ def main() -> None:
|
||||
sys.stderr.write("Database must use the 'psycopg2' connector.\n")
|
||||
sys.exit(3)
|
||||
|
||||
# Don't run the background tasks that get started by the data stores.
|
||||
hs_config["run_background_tasks_on"] = "some_other_process"
|
||||
|
||||
config = HomeServerConfig()
|
||||
config.parse_config_dict(hs_config, "", "")
|
||||
|
||||
|
||||
@@ -22,6 +22,8 @@ class FederationConfig(Config):
|
||||
section = "federation"
|
||||
|
||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||
federation_config = config.setdefault("federation", {})
|
||||
|
||||
# FIXME: federation_domain_whitelist needs sytests
|
||||
self.federation_domain_whitelist: Optional[dict] = None
|
||||
federation_domain_whitelist = config.get("federation_domain_whitelist", None)
|
||||
@@ -49,5 +51,19 @@ class FederationConfig(Config):
|
||||
"allow_device_name_lookup_over_federation", False
|
||||
)
|
||||
|
||||
# Allow for the configuration of timeout, max request retries
|
||||
# and min/max retry delays in the matrix federation client.
|
||||
self.client_timeout_ms = Config.parse_duration(
|
||||
federation_config.get("client_timeout", "60s")
|
||||
)
|
||||
self.max_long_retry_delay_ms = Config.parse_duration(
|
||||
federation_config.get("max_long_retry_delay", "60s")
|
||||
)
|
||||
self.max_short_retry_delay_ms = Config.parse_duration(
|
||||
federation_config.get("max_short_retry_delay", "2s")
|
||||
)
|
||||
self.max_long_retries = federation_config.get("max_long_retries", 10)
|
||||
self.max_short_retries = federation_config.get("max_short_retries", 3)
|
||||
|
||||
|
||||
_METRICS_FOR_DOMAINS_SCHEMA = {"type": "array", "items": {"type": "string"}}
|
||||
|
||||
@@ -39,9 +39,6 @@ if TYPE_CHECKING:
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
INBOX_SIZE_LIMIT_FOR_KEY_REQUEST = 100
|
||||
|
||||
|
||||
class DeviceMessageHandler:
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
"""
|
||||
@@ -169,7 +166,7 @@ class DeviceMessageHandler:
|
||||
found marks the remote cache for the user as stale.
|
||||
"""
|
||||
|
||||
if message_type != ToDeviceEventTypes.RoomKeyRequest:
|
||||
if message_type != "m.room_key_request":
|
||||
return
|
||||
|
||||
# Get the sending device IDs
|
||||
@@ -289,16 +286,10 @@ class DeviceMessageHandler:
|
||||
"org.matrix.opentracing_context": json_encoder.encode(context),
|
||||
}
|
||||
|
||||
device_inbox_size_limit = None
|
||||
if message_type == ToDeviceEventTypes.RoomKeyRequest and self.is_mine(
|
||||
UserID.from_string(user_id)
|
||||
):
|
||||
device_inbox_size_limit = INBOX_SIZE_LIMIT_FOR_KEY_REQUEST
|
||||
|
||||
# Add messages to the database.
|
||||
# Retrieve the stream id of the last-processed to-device message.
|
||||
last_stream_id = await self.store.add_messages_to_device_inbox(
|
||||
local_messages, remote_edu_contents, device_inbox_size_limit
|
||||
local_messages, remote_edu_contents
|
||||
)
|
||||
|
||||
# Notify listeners that there are new to-device messages to process,
|
||||
|
||||
@@ -95,8 +95,6 @@ incoming_responses_counter = Counter(
|
||||
)
|
||||
|
||||
|
||||
MAX_LONG_RETRIES = 10
|
||||
MAX_SHORT_RETRIES = 3
|
||||
MAXINT = sys.maxsize
|
||||
|
||||
|
||||
@@ -413,7 +411,16 @@ class MatrixFederationHttpClient:
|
||||
self.clock = hs.get_clock()
|
||||
self._store = hs.get_datastores().main
|
||||
self.version_string_bytes = hs.version_string.encode("ascii")
|
||||
self.default_timeout = 60
|
||||
self.default_timeout_seconds = hs.config.federation.client_timeout_ms / 1000
|
||||
|
||||
self.max_long_retry_delay_seconds = (
|
||||
hs.config.federation.max_long_retry_delay_ms / 1000
|
||||
)
|
||||
self.max_short_retry_delay_seconds = (
|
||||
hs.config.federation.max_short_retry_delay_ms / 1000
|
||||
)
|
||||
self.max_long_retries = hs.config.federation.max_long_retries
|
||||
self.max_short_retries = hs.config.federation.max_short_retries
|
||||
|
||||
self._cooperator = Cooperator(scheduler=_make_scheduler(self.reactor))
|
||||
|
||||
@@ -542,10 +549,10 @@ class MatrixFederationHttpClient:
|
||||
logger.exception(f"Invalid destination: {request.destination}.")
|
||||
raise FederationDeniedError(request.destination)
|
||||
|
||||
if timeout:
|
||||
if timeout is not None:
|
||||
_sec_timeout = timeout / 1000
|
||||
else:
|
||||
_sec_timeout = self.default_timeout
|
||||
_sec_timeout = self.default_timeout_seconds
|
||||
|
||||
if (
|
||||
self.hs.config.federation.federation_domain_whitelist is not None
|
||||
@@ -590,9 +597,9 @@ class MatrixFederationHttpClient:
|
||||
# XXX: Would be much nicer to retry only at the transaction-layer
|
||||
# (once we have reliable transactions in place)
|
||||
if long_retries:
|
||||
retries_left = MAX_LONG_RETRIES
|
||||
retries_left = self.max_long_retries
|
||||
else:
|
||||
retries_left = MAX_SHORT_RETRIES
|
||||
retries_left = self.max_short_retries
|
||||
|
||||
url_bytes = request.uri
|
||||
url_str = url_bytes.decode("ascii")
|
||||
@@ -737,24 +744,34 @@ class MatrixFederationHttpClient:
|
||||
|
||||
if retries_left and not timeout:
|
||||
if long_retries:
|
||||
delay = 4 ** (MAX_LONG_RETRIES + 1 - retries_left)
|
||||
delay = min(delay, 60)
|
||||
delay *= random.uniform(0.8, 1.4)
|
||||
delay_seconds = 4 ** (
|
||||
self.max_long_retries + 1 - retries_left
|
||||
)
|
||||
delay_seconds = min(
|
||||
delay_seconds, self.max_long_retry_delay_seconds
|
||||
)
|
||||
delay_seconds *= random.uniform(0.8, 1.4)
|
||||
else:
|
||||
delay = 0.5 * 2 ** (MAX_SHORT_RETRIES - retries_left)
|
||||
delay = min(delay, 2)
|
||||
delay *= random.uniform(0.8, 1.4)
|
||||
delay_seconds = 0.5 * 2 ** (
|
||||
self.max_short_retries - retries_left
|
||||
)
|
||||
delay_seconds = min(
|
||||
delay_seconds, self.max_short_retry_delay_seconds
|
||||
)
|
||||
delay_seconds *= random.uniform(0.8, 1.4)
|
||||
|
||||
logger.debug(
|
||||
"{%s} [%s] Waiting %ss before re-sending...",
|
||||
request.txn_id,
|
||||
request.destination,
|
||||
delay,
|
||||
delay_seconds,
|
||||
)
|
||||
|
||||
# Sleep for the calculated delay, or wake up immediately
|
||||
# if we get notified that the server is back up.
|
||||
await self._sleeper.sleep(request.destination, delay * 1000)
|
||||
await self._sleeper.sleep(
|
||||
request.destination, delay_seconds * 1000
|
||||
)
|
||||
retries_left -= 1
|
||||
else:
|
||||
raise
|
||||
@@ -953,7 +970,7 @@ class MatrixFederationHttpClient:
|
||||
if timeout is not None:
|
||||
_sec_timeout = timeout / 1000
|
||||
else:
|
||||
_sec_timeout = self.default_timeout
|
||||
_sec_timeout = self.default_timeout_seconds
|
||||
|
||||
if parser is None:
|
||||
parser = cast(ByteParser[T], JsonParser())
|
||||
@@ -1031,10 +1048,10 @@ class MatrixFederationHttpClient:
|
||||
ignore_backoff=ignore_backoff,
|
||||
)
|
||||
|
||||
if timeout:
|
||||
if timeout is not None:
|
||||
_sec_timeout = timeout / 1000
|
||||
else:
|
||||
_sec_timeout = self.default_timeout
|
||||
_sec_timeout = self.default_timeout_seconds
|
||||
|
||||
body = await _handle_response(
|
||||
self.reactor, _sec_timeout, request, response, start_ms, parser=JsonParser()
|
||||
@@ -1142,7 +1159,7 @@ class MatrixFederationHttpClient:
|
||||
if timeout is not None:
|
||||
_sec_timeout = timeout / 1000
|
||||
else:
|
||||
_sec_timeout = self.default_timeout
|
||||
_sec_timeout = self.default_timeout_seconds
|
||||
|
||||
if parser is None:
|
||||
parser = cast(ByteParser[T], JsonParser())
|
||||
@@ -1218,7 +1235,7 @@ class MatrixFederationHttpClient:
|
||||
if timeout is not None:
|
||||
_sec_timeout = timeout / 1000
|
||||
else:
|
||||
_sec_timeout = self.default_timeout
|
||||
_sec_timeout = self.default_timeout_seconds
|
||||
|
||||
body = await _handle_response(
|
||||
self.reactor, _sec_timeout, request, response, start_ms, parser=JsonParser()
|
||||
@@ -1270,7 +1287,7 @@ class MatrixFederationHttpClient:
|
||||
|
||||
try:
|
||||
d = read_body_with_max_size(response, output_stream, max_size)
|
||||
d.addTimeout(self.default_timeout, self.reactor)
|
||||
d.addTimeout(self.default_timeout_seconds, self.reactor)
|
||||
length = await make_deferred_yieldable(d)
|
||||
except BodyExceededMaxSize:
|
||||
msg = "Requested file is too large > %r bytes" % (max_size,)
|
||||
|
||||
@@ -521,6 +521,11 @@ class SynapseRequest(Request):
|
||||
else:
|
||||
return self.getClientAddress().host
|
||||
|
||||
def request_info(self) -> "RequestInfo":
|
||||
h = self.getHeader(b"User-Agent")
|
||||
user_agent = h.decode("ascii", "replace") if h else None
|
||||
return RequestInfo(user_agent=user_agent, ip=self.get_client_ip_if_available())
|
||||
|
||||
|
||||
class XForwardedForRequest(SynapseRequest):
|
||||
"""Request object which honours proxy headers
|
||||
@@ -661,3 +666,9 @@ class SynapseSite(Site):
|
||||
|
||||
def log(self, request: SynapseRequest) -> None:
|
||||
pass
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
||||
class RequestInfo:
|
||||
user_agent: Optional[str]
|
||||
ip: str
|
||||
|
||||
@@ -80,6 +80,7 @@ from synapse.module_api.callbacks.account_validity_callbacks import (
|
||||
)
|
||||
from synapse.module_api.callbacks.spamchecker_callbacks import (
|
||||
CHECK_EVENT_FOR_SPAM_CALLBACK,
|
||||
CHECK_LOGIN_FOR_SPAM_CALLBACK,
|
||||
CHECK_MEDIA_FILE_FOR_SPAM_CALLBACK,
|
||||
CHECK_REGISTRATION_FOR_SPAM_CALLBACK,
|
||||
CHECK_USERNAME_FOR_SPAM_CALLBACK,
|
||||
@@ -302,6 +303,7 @@ class ModuleApi:
|
||||
CHECK_REGISTRATION_FOR_SPAM_CALLBACK
|
||||
] = None,
|
||||
check_media_file_for_spam: Optional[CHECK_MEDIA_FILE_FOR_SPAM_CALLBACK] = None,
|
||||
check_login_for_spam: Optional[CHECK_LOGIN_FOR_SPAM_CALLBACK] = None,
|
||||
) -> None:
|
||||
"""Registers callbacks for spam checking capabilities.
|
||||
|
||||
@@ -319,6 +321,7 @@ class ModuleApi:
|
||||
check_username_for_spam=check_username_for_spam,
|
||||
check_registration_for_spam=check_registration_for_spam,
|
||||
check_media_file_for_spam=check_media_file_for_spam,
|
||||
check_login_for_spam=check_login_for_spam,
|
||||
)
|
||||
|
||||
def register_account_validity_callbacks(
|
||||
|
||||
@@ -196,6 +196,26 @@ CHECK_MEDIA_FILE_FOR_SPAM_CALLBACK = Callable[
|
||||
]
|
||||
],
|
||||
]
|
||||
CHECK_LOGIN_FOR_SPAM_CALLBACK = Callable[
|
||||
[
|
||||
str,
|
||||
Optional[str],
|
||||
Optional[str],
|
||||
Collection[Tuple[Optional[str], str]],
|
||||
Optional[str],
|
||||
],
|
||||
Awaitable[
|
||||
Union[
|
||||
Literal["NOT_SPAM"],
|
||||
Codes,
|
||||
# Highly experimental, not officially part of the spamchecker API, may
|
||||
# disappear without warning depending on the results of ongoing
|
||||
# experiments.
|
||||
# Use this to return additional information as part of an error.
|
||||
Tuple[Codes, JsonDict],
|
||||
]
|
||||
],
|
||||
]
|
||||
|
||||
|
||||
def load_legacy_spam_checkers(hs: "synapse.server.HomeServer") -> None:
|
||||
@@ -315,6 +335,7 @@ class SpamCheckerModuleApiCallbacks:
|
||||
self._check_media_file_for_spam_callbacks: List[
|
||||
CHECK_MEDIA_FILE_FOR_SPAM_CALLBACK
|
||||
] = []
|
||||
self._check_login_for_spam_callbacks: List[CHECK_LOGIN_FOR_SPAM_CALLBACK] = []
|
||||
|
||||
def register_callbacks(
|
||||
self,
|
||||
@@ -335,6 +356,7 @@ class SpamCheckerModuleApiCallbacks:
|
||||
CHECK_REGISTRATION_FOR_SPAM_CALLBACK
|
||||
] = None,
|
||||
check_media_file_for_spam: Optional[CHECK_MEDIA_FILE_FOR_SPAM_CALLBACK] = None,
|
||||
check_login_for_spam: Optional[CHECK_LOGIN_FOR_SPAM_CALLBACK] = None,
|
||||
) -> None:
|
||||
"""Register callbacks from module for each hook."""
|
||||
if check_event_for_spam is not None:
|
||||
@@ -378,6 +400,9 @@ class SpamCheckerModuleApiCallbacks:
|
||||
if check_media_file_for_spam is not None:
|
||||
self._check_media_file_for_spam_callbacks.append(check_media_file_for_spam)
|
||||
|
||||
if check_login_for_spam is not None:
|
||||
self._check_login_for_spam_callbacks.append(check_login_for_spam)
|
||||
|
||||
@trace
|
||||
async def check_event_for_spam(
|
||||
self, event: "synapse.events.EventBase"
|
||||
@@ -819,3 +844,58 @@ class SpamCheckerModuleApiCallbacks:
|
||||
return synapse.api.errors.Codes.FORBIDDEN, {}
|
||||
|
||||
return self.NOT_SPAM
|
||||
|
||||
async def check_login_for_spam(
|
||||
self,
|
||||
user_id: str,
|
||||
device_id: Optional[str],
|
||||
initial_display_name: Optional[str],
|
||||
request_info: Collection[Tuple[Optional[str], str]],
|
||||
auth_provider_id: Optional[str] = None,
|
||||
) -> Union[Tuple[Codes, dict], Literal["NOT_SPAM"]]:
|
||||
"""Checks if we should allow the given registration request.
|
||||
|
||||
Args:
|
||||
user_id: The request user ID
|
||||
request_info: List of tuples of user agent and IP that
|
||||
were used during the registration process.
|
||||
auth_provider_id: The SSO IdP the user used, e.g "oidc", "saml",
|
||||
"cas". If any. Note this does not include users registered
|
||||
via a password provider.
|
||||
|
||||
Returns:
|
||||
Enum for how the request should be handled
|
||||
"""
|
||||
|
||||
for callback in self._check_login_for_spam_callbacks:
|
||||
with Measure(
|
||||
self.clock, "{}.{}".format(callback.__module__, callback.__qualname__)
|
||||
):
|
||||
res = await delay_cancellation(
|
||||
callback(
|
||||
user_id,
|
||||
device_id,
|
||||
initial_display_name,
|
||||
request_info,
|
||||
auth_provider_id,
|
||||
)
|
||||
)
|
||||
# Normalize return values to `Codes` or `"NOT_SPAM"`.
|
||||
if res is self.NOT_SPAM:
|
||||
continue
|
||||
elif isinstance(res, synapse.api.errors.Codes):
|
||||
return res, {}
|
||||
elif (
|
||||
isinstance(res, tuple)
|
||||
and len(res) == 2
|
||||
and isinstance(res[0], synapse.api.errors.Codes)
|
||||
and isinstance(res[1], dict)
|
||||
):
|
||||
return res
|
||||
else:
|
||||
logger.warning(
|
||||
"Module returned invalid value, rejecting login as spam"
|
||||
)
|
||||
return synapse.api.errors.Codes.FORBIDDEN, {}
|
||||
|
||||
return self.NOT_SPAM
|
||||
|
||||
@@ -50,7 +50,7 @@ from synapse.http.servlet import (
|
||||
parse_json_object_from_request,
|
||||
parse_string,
|
||||
)
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.http.site import RequestInfo, SynapseRequest
|
||||
from synapse.rest.client._base import client_patterns
|
||||
from synapse.rest.well_known import WellKnownBuilder
|
||||
from synapse.types import JsonDict, UserID
|
||||
@@ -114,6 +114,7 @@ class LoginRestServlet(RestServlet):
|
||||
self.auth_handler = self.hs.get_auth_handler()
|
||||
self.registration_handler = hs.get_registration_handler()
|
||||
self._sso_handler = hs.get_sso_handler()
|
||||
self._spam_checker = hs.get_module_api_callbacks().spam_checker
|
||||
|
||||
self._well_known_builder = WellKnownBuilder(hs)
|
||||
self._address_ratelimiter = Ratelimiter(
|
||||
@@ -197,6 +198,8 @@ class LoginRestServlet(RestServlet):
|
||||
self._refresh_tokens_enabled and client_requested_refresh_token
|
||||
)
|
||||
|
||||
request_info = request.request_info()
|
||||
|
||||
try:
|
||||
if login_submission["type"] == LoginRestServlet.APPSERVICE_TYPE:
|
||||
requester = await self.auth.get_user_by_req(request)
|
||||
@@ -216,6 +219,7 @@ class LoginRestServlet(RestServlet):
|
||||
login_submission,
|
||||
appservice,
|
||||
should_issue_refresh_token=should_issue_refresh_token,
|
||||
request_info=request_info,
|
||||
)
|
||||
elif (
|
||||
self.jwt_enabled
|
||||
@@ -227,6 +231,7 @@ class LoginRestServlet(RestServlet):
|
||||
result = await self._do_jwt_login(
|
||||
login_submission,
|
||||
should_issue_refresh_token=should_issue_refresh_token,
|
||||
request_info=request_info,
|
||||
)
|
||||
elif login_submission["type"] == LoginRestServlet.TOKEN_TYPE:
|
||||
await self._address_ratelimiter.ratelimit(
|
||||
@@ -235,6 +240,7 @@ class LoginRestServlet(RestServlet):
|
||||
result = await self._do_token_login(
|
||||
login_submission,
|
||||
should_issue_refresh_token=should_issue_refresh_token,
|
||||
request_info=request_info,
|
||||
)
|
||||
else:
|
||||
await self._address_ratelimiter.ratelimit(
|
||||
@@ -243,6 +249,7 @@ class LoginRestServlet(RestServlet):
|
||||
result = await self._do_other_login(
|
||||
login_submission,
|
||||
should_issue_refresh_token=should_issue_refresh_token,
|
||||
request_info=request_info,
|
||||
)
|
||||
except KeyError:
|
||||
raise SynapseError(400, "Missing JSON keys.")
|
||||
@@ -265,6 +272,8 @@ class LoginRestServlet(RestServlet):
|
||||
login_submission: JsonDict,
|
||||
appservice: ApplicationService,
|
||||
should_issue_refresh_token: bool = False,
|
||||
*,
|
||||
request_info: RequestInfo,
|
||||
) -> LoginResponse:
|
||||
identifier = login_submission.get("identifier")
|
||||
logger.info("Got appservice login request with identifier: %r", identifier)
|
||||
@@ -300,10 +309,15 @@ class LoginRestServlet(RestServlet):
|
||||
# The user represented by an appservice's configured sender_localpart
|
||||
# is not actually created in Synapse.
|
||||
should_check_deactivated=qualified_user_id != appservice.sender,
|
||||
request_info=request_info,
|
||||
)
|
||||
|
||||
async def _do_other_login(
|
||||
self, login_submission: JsonDict, should_issue_refresh_token: bool = False
|
||||
self,
|
||||
login_submission: JsonDict,
|
||||
should_issue_refresh_token: bool = False,
|
||||
*,
|
||||
request_info: RequestInfo,
|
||||
) -> LoginResponse:
|
||||
"""Handle non-token/saml/jwt logins
|
||||
|
||||
@@ -333,6 +347,7 @@ class LoginRestServlet(RestServlet):
|
||||
login_submission,
|
||||
callback,
|
||||
should_issue_refresh_token=should_issue_refresh_token,
|
||||
request_info=request_info,
|
||||
)
|
||||
return result
|
||||
|
||||
@@ -347,6 +362,8 @@ class LoginRestServlet(RestServlet):
|
||||
should_issue_refresh_token: bool = False,
|
||||
auth_provider_session_id: Optional[str] = None,
|
||||
should_check_deactivated: bool = True,
|
||||
*,
|
||||
request_info: RequestInfo,
|
||||
) -> LoginResponse:
|
||||
"""Called when we've successfully authed the user and now need to
|
||||
actually login them in (e.g. create devices). This gets called on
|
||||
@@ -371,6 +388,7 @@ class LoginRestServlet(RestServlet):
|
||||
|
||||
This exists purely for appservice's configured sender_localpart
|
||||
which doesn't have an associated user in the database.
|
||||
request_info: The user agent/IP address of the user.
|
||||
|
||||
Returns:
|
||||
Dictionary of account information after successful login.
|
||||
@@ -417,6 +435,22 @@ class LoginRestServlet(RestServlet):
|
||||
)
|
||||
|
||||
initial_display_name = login_submission.get("initial_device_display_name")
|
||||
spam_check = await self._spam_checker.check_login_for_spam(
|
||||
user_id,
|
||||
device_id=device_id,
|
||||
initial_display_name=initial_display_name,
|
||||
request_info=[(request_info.user_agent, request_info.ip)],
|
||||
auth_provider_id=auth_provider_id,
|
||||
)
|
||||
if spam_check != self._spam_checker.NOT_SPAM:
|
||||
logger.info("Blocking login due to spam checker")
|
||||
raise SynapseError(
|
||||
403,
|
||||
msg="Login was blocked by the server",
|
||||
errcode=spam_check[0],
|
||||
additional_fields=spam_check[1],
|
||||
)
|
||||
|
||||
(
|
||||
device_id,
|
||||
access_token,
|
||||
@@ -451,7 +485,11 @@ class LoginRestServlet(RestServlet):
|
||||
return result
|
||||
|
||||
async def _do_token_login(
|
||||
self, login_submission: JsonDict, should_issue_refresh_token: bool = False
|
||||
self,
|
||||
login_submission: JsonDict,
|
||||
should_issue_refresh_token: bool = False,
|
||||
*,
|
||||
request_info: RequestInfo,
|
||||
) -> LoginResponse:
|
||||
"""
|
||||
Handle token login.
|
||||
@@ -474,10 +512,15 @@ class LoginRestServlet(RestServlet):
|
||||
auth_provider_id=res.auth_provider_id,
|
||||
should_issue_refresh_token=should_issue_refresh_token,
|
||||
auth_provider_session_id=res.auth_provider_session_id,
|
||||
request_info=request_info,
|
||||
)
|
||||
|
||||
async def _do_jwt_login(
|
||||
self, login_submission: JsonDict, should_issue_refresh_token: bool = False
|
||||
self,
|
||||
login_submission: JsonDict,
|
||||
should_issue_refresh_token: bool = False,
|
||||
*,
|
||||
request_info: RequestInfo,
|
||||
) -> LoginResponse:
|
||||
"""
|
||||
Handle the custom JWT login.
|
||||
@@ -496,6 +539,7 @@ class LoginRestServlet(RestServlet):
|
||||
login_submission,
|
||||
create_non_existent_users=True,
|
||||
should_issue_refresh_token=should_issue_refresh_token,
|
||||
request_info=request_info,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -289,6 +289,17 @@ class CacheInvalidationWorkerStore(SQLBaseStore):
|
||||
)
|
||||
self._attempt_to_invalidate_cache("get_rooms_for_user", (state_key,))
|
||||
|
||||
self._attempt_to_invalidate_cache(
|
||||
"did_forget",
|
||||
(
|
||||
state_key,
|
||||
room_id,
|
||||
),
|
||||
)
|
||||
self._attempt_to_invalidate_cache(
|
||||
"get_forgotten_rooms_for_user", (state_key,)
|
||||
)
|
||||
|
||||
if relates_to:
|
||||
self._attempt_to_invalidate_cache("get_relations_for_event", (relates_to,))
|
||||
self._attempt_to_invalidate_cache("get_references_for_event", (relates_to,))
|
||||
@@ -336,6 +347,8 @@ class CacheInvalidationWorkerStore(SQLBaseStore):
|
||||
"get_rooms_for_user_with_stream_ordering", None
|
||||
)
|
||||
self._attempt_to_invalidate_cache("get_rooms_for_user", None)
|
||||
self._attempt_to_invalidate_cache("did_forget", None)
|
||||
self._attempt_to_invalidate_cache("get_forgotten_rooms_for_user", None)
|
||||
self._attempt_to_invalidate_cache("get_references_for_event", None)
|
||||
self._attempt_to_invalidate_cache("get_thread_summary", None)
|
||||
self._attempt_to_invalidate_cache("get_thread_participated", None)
|
||||
|
||||
@@ -650,7 +650,6 @@ class DeviceInboxWorkerStore(SQLBaseStore):
|
||||
self,
|
||||
local_messages_by_user_then_device: Dict[str, Dict[str, JsonDict]],
|
||||
remote_messages_by_destination: Dict[str, JsonDict],
|
||||
size_limit: Optional[int] = None,
|
||||
) -> int:
|
||||
"""Used to send messages from this server.
|
||||
|
||||
@@ -667,14 +666,11 @@ class DeviceInboxWorkerStore(SQLBaseStore):
|
||||
assert self._can_write_to_device
|
||||
|
||||
def add_messages_txn(
|
||||
txn: LoggingTransaction,
|
||||
now_ms: int,
|
||||
stream_id: int,
|
||||
size_limit: Optional[int],
|
||||
txn: LoggingTransaction, now_ms: int, stream_id: int
|
||||
) -> None:
|
||||
# Add the local messages directly to the local inbox.
|
||||
self._add_messages_to_local_device_inbox_txn(
|
||||
txn, stream_id, local_messages_by_user_then_device, size_limit
|
||||
txn, stream_id, local_messages_by_user_then_device
|
||||
)
|
||||
|
||||
# Add the remote messages to the federation outbox.
|
||||
@@ -735,11 +731,7 @@ class DeviceInboxWorkerStore(SQLBaseStore):
|
||||
async with self._device_inbox_id_gen.get_next() as stream_id:
|
||||
now_ms = self._clock.time_msec()
|
||||
await self.db_pool.runInteraction(
|
||||
"add_messages_to_device_inbox",
|
||||
add_messages_txn,
|
||||
now_ms,
|
||||
stream_id,
|
||||
size_limit,
|
||||
"add_messages_to_device_inbox", add_messages_txn, now_ms, stream_id
|
||||
)
|
||||
for user_id in local_messages_by_user_then_device.keys():
|
||||
self._device_inbox_stream_cache.entity_has_changed(user_id, stream_id)
|
||||
@@ -810,23 +802,11 @@ class DeviceInboxWorkerStore(SQLBaseStore):
|
||||
txn: LoggingTransaction,
|
||||
stream_id: int,
|
||||
messages_by_user_then_device: Dict[str, Dict[str, JsonDict]],
|
||||
size_limit: Optional[int] = None,
|
||||
) -> None:
|
||||
assert self._can_write_to_device
|
||||
|
||||
local_by_user_then_device = {}
|
||||
for user_id, messages_by_device in messages_by_user_then_device.items():
|
||||
inbox_sizes = {}
|
||||
if size_limit:
|
||||
sql = """
|
||||
SELECT device_id, COUNT(*) FROM device_inbox
|
||||
WHERE user_id = ?
|
||||
GROUP BY device_id
|
||||
"""
|
||||
txn.execute(sql, (user_id,))
|
||||
for r in txn:
|
||||
inbox_sizes[r[0]] = r[1]
|
||||
|
||||
messages_json_for_user = {}
|
||||
devices = list(messages_by_device.keys())
|
||||
if len(devices) == 1 and devices[0] == "*":
|
||||
@@ -842,10 +822,9 @@ class DeviceInboxWorkerStore(SQLBaseStore):
|
||||
|
||||
message_json = json_encoder.encode(messages_by_device["*"])
|
||||
for device_id in devices:
|
||||
if size_limit is None or inbox_sizes.get(device_id, 0) < size_limit:
|
||||
# Add the message for all devices for this user on this
|
||||
# server.
|
||||
messages_json_for_user[device_id] = message_json
|
||||
# Add the message for all devices for this user on this
|
||||
# server.
|
||||
messages_json_for_user[device_id] = message_json
|
||||
else:
|
||||
if not devices:
|
||||
continue
|
||||
@@ -878,8 +857,7 @@ class DeviceInboxWorkerStore(SQLBaseStore):
|
||||
)
|
||||
message_json = json_encoder.encode(msg)
|
||||
|
||||
if size_limit is None or inbox_sizes.get(device_id, 0) < size_limit:
|
||||
messages_json_for_user[device_id] = message_json
|
||||
messages_json_for_user[device_id] = message_json
|
||||
|
||||
if messages_json_for_user:
|
||||
local_by_user_then_device[user_id] = messages_json_for_user
|
||||
|
||||
@@ -61,9 +61,7 @@ def run_upgrade(
|
||||
full_user_id text NOT NULL,
|
||||
user_id text NOT NULL,
|
||||
filter_id bigint NOT NULL,
|
||||
filter_json bytea NOT NULL,
|
||||
UNIQUE (full_user_id),
|
||||
UNIQUE (user_id)
|
||||
filter_json bytea NOT NULL
|
||||
)
|
||||
"""
|
||||
cur.execute(create_sql)
|
||||
|
||||
@@ -0,0 +1,65 @@
|
||||
# Copyright 2023 The Matrix.org Foundation C.I.C
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.storage.database import LoggingTransaction
|
||||
from synapse.storage.engines import BaseDatabaseEngine, Sqlite3Engine
|
||||
|
||||
|
||||
def run_update(
|
||||
cur: LoggingTransaction,
|
||||
database_engine: BaseDatabaseEngine,
|
||||
config: HomeServerConfig,
|
||||
) -> None:
|
||||
"""
|
||||
Fix to drop unused indexes caused by incorrectly adding UNIQUE constraint to
|
||||
columns `user_id` and `full_user_id` of table `user_filters` in previous migration.
|
||||
"""
|
||||
|
||||
if isinstance(database_engine, Sqlite3Engine):
|
||||
cur.execute("DROP TABLE IF EXISTS temp_user_filters")
|
||||
create_sql = """
|
||||
CREATE TABLE temp_user_filters (
|
||||
full_user_id text NOT NULL,
|
||||
user_id text NOT NULL,
|
||||
filter_id bigint NOT NULL,
|
||||
filter_json bytea NOT NULL
|
||||
)
|
||||
"""
|
||||
cur.execute(create_sql)
|
||||
|
||||
copy_sql = """
|
||||
INSERT INTO temp_user_filters (
|
||||
user_id,
|
||||
filter_id,
|
||||
filter_json,
|
||||
full_user_id)
|
||||
SELECT user_id, filter_id, filter_json, full_user_id FROM user_filters
|
||||
"""
|
||||
cur.execute(copy_sql)
|
||||
|
||||
drop_sql = """
|
||||
DROP TABLE user_filters
|
||||
"""
|
||||
cur.execute(drop_sql)
|
||||
|
||||
rename_sql = """
|
||||
ALTER TABLE temp_user_filters RENAME to user_filters
|
||||
"""
|
||||
cur.execute(rename_sql)
|
||||
|
||||
index_sql = """
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS user_filters_unique ON
|
||||
user_filters (user_id, filter_id)
|
||||
"""
|
||||
cur.execute(index_sql)
|
||||
@@ -0,0 +1,25 @@
|
||||
# Copyright 2023 The Matrix.org Foundation C.I.C
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse.storage.database import LoggingTransaction
|
||||
from synapse.storage.engines import BaseDatabaseEngine, Sqlite3Engine
|
||||
|
||||
|
||||
def run_create(cur: LoggingTransaction, database_engine: BaseDatabaseEngine) -> None:
|
||||
if isinstance(database_engine, Sqlite3Engine):
|
||||
idx_sql = """
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS user_filters_full_user_id_unique ON
|
||||
user_filters (full_user_id, filter_id)
|
||||
"""
|
||||
cur.execute(idx_sql)
|
||||
@@ -23,28 +23,20 @@ from synapse.api.constants import RoomEncryptionAlgorithms
|
||||
from synapse.api.errors import NotFoundError, SynapseError
|
||||
from synapse.appservice import ApplicationService
|
||||
from synapse.handlers.device import MAX_DEVICE_DISPLAY_NAME_LEN, DeviceHandler
|
||||
from synapse.handlers.devicemessage import INBOX_SIZE_LIMIT_FOR_KEY_REQUEST
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.databases.main.appservice import _make_exclusive_regex
|
||||
from synapse.types import JsonDict, create_requester
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util import Clock
|
||||
|
||||
from tests import unittest
|
||||
from tests.test_utils import make_awaitable
|
||||
from tests.unittest import override_config
|
||||
|
||||
import synapse
|
||||
|
||||
user1 = "@boris:aaa"
|
||||
user2 = "@theresa:bbb"
|
||||
|
||||
|
||||
class DeviceTestCase(unittest.HomeserverTestCase):
|
||||
servlets = [
|
||||
synapse.rest.admin.register_servlets,
|
||||
synapse.rest.client.login.register_servlets,
|
||||
]
|
||||
|
||||
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
||||
self.appservice_api = mock.Mock()
|
||||
hs = self.setup_test_homeserver(
|
||||
@@ -55,8 +47,6 @@ class DeviceTestCase(unittest.HomeserverTestCase):
|
||||
handler = hs.get_device_handler()
|
||||
assert isinstance(handler, DeviceHandler)
|
||||
self.handler = handler
|
||||
self.msg_handler = hs.get_device_message_handler()
|
||||
self.event_sources = hs.get_event_sources()
|
||||
self.store = hs.get_datastores().main
|
||||
return hs
|
||||
|
||||
@@ -408,79 +398,6 @@ class DeviceTestCase(unittest.HomeserverTestCase):
|
||||
],
|
||||
)
|
||||
|
||||
def test_room_key_request_limit(self) -> None:
|
||||
store = self.hs.get_datastores().main
|
||||
|
||||
myuser = self.register_user("myuser", "pass")
|
||||
self.login("myuser", "pass", "device")
|
||||
self.login("myuser", "pass", "device2")
|
||||
|
||||
requester = requester = create_requester(myuser)
|
||||
|
||||
from_token = self.event_sources.get_current_token()
|
||||
|
||||
# for i in range(0, INBOX_SIZE_LIMIT_FOR_KEY_REQUEST * 2):
|
||||
# self.get_success(
|
||||
# self.msg_handler.send_device_message(
|
||||
# requester,
|
||||
# "m.room_key",
|
||||
# {
|
||||
# myuser2: {
|
||||
# "device": {
|
||||
# "algorithm": "m.megolm.v1.aes-sha2",
|
||||
# "room_id": "!Cuyf34gef24t:localhost",
|
||||
# "session_id": "X3lUlvLELLYxeTx4yOVu6UDpasGEVO0Jbu+QFnm0cKQ",
|
||||
# "session_key": "AgAAAADxKHa9uFxcXzwYoNueL5Xqi69IkD4sni8LlfJL7qNBEY..."
|
||||
|
||||
# }
|
||||
# }
|
||||
# },
|
||||
# )
|
||||
# )
|
||||
|
||||
# to_token = self.event_sources.get_current_token()
|
||||
|
||||
# res = self.get_success(self.store.get_messages_for_device(
|
||||
# myuser2,
|
||||
# "device",
|
||||
# from_token.to_device_key,
|
||||
# to_token.to_device_key,
|
||||
# INBOX_SIZE_LIMIT_FOR_KEY_REQUEST * 5,
|
||||
# ))
|
||||
# self.assertEqual(len(res[0]), INBOX_SIZE_LIMIT_FOR_KEY_REQUEST * 2)
|
||||
|
||||
# from_token = to_token
|
||||
|
||||
for i in range(0, INBOX_SIZE_LIMIT_FOR_KEY_REQUEST * 2):
|
||||
self.get_success(
|
||||
self.msg_handler.send_device_message(
|
||||
requester,
|
||||
"m.room_key_request",
|
||||
{
|
||||
myuser: {
|
||||
"device2": {
|
||||
"action": "request",
|
||||
"request_id": f"request_id_{i}",
|
||||
"requesting_device_id": "device",
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
to_token = self.event_sources.get_current_token()
|
||||
|
||||
res = self.get_success(
|
||||
self.store.get_messages_for_device(
|
||||
myuser,
|
||||
"device2",
|
||||
from_token.to_device_key,
|
||||
to_token.to_device_key,
|
||||
INBOX_SIZE_LIMIT_FOR_KEY_REQUEST * 5,
|
||||
)
|
||||
)
|
||||
self.assertEqual(len(res[0]), INBOX_SIZE_LIMIT_FOR_KEY_REQUEST)
|
||||
|
||||
|
||||
class DehydrationTestCase(unittest.HomeserverTestCase):
|
||||
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
||||
|
||||
@@ -333,6 +333,27 @@ class RoomMemberMasterHandlerTestCase(HomeserverTestCase):
|
||||
self.get_success(self.store.is_locally_forgotten_room(self.room_id))
|
||||
)
|
||||
|
||||
def test_leave_and_unforget(self) -> None:
|
||||
"""Tests if rejoining a room unforgets the room, so that it shows up in sync again."""
|
||||
self.helper.join(self.room_id, user=self.bob, tok=self.bob_token)
|
||||
|
||||
# alice is not the last room member that leaves and forgets the room
|
||||
self.helper.leave(self.room_id, user=self.alice, tok=self.alice_token)
|
||||
self.get_success(self.handler.forget(self.alice_ID, self.room_id))
|
||||
self.assertTrue(
|
||||
self.get_success(self.store.did_forget(self.alice, self.room_id))
|
||||
)
|
||||
|
||||
self.helper.join(self.room_id, user=self.alice, tok=self.alice_token)
|
||||
self.assertFalse(
|
||||
self.get_success(self.store.did_forget(self.alice, self.room_id))
|
||||
)
|
||||
|
||||
# the server has not forgotten the room
|
||||
self.assertFalse(
|
||||
self.get_success(self.store.is_locally_forgotten_room(self.room_id))
|
||||
)
|
||||
|
||||
@override_config({"forget_rooms_on_leave": True})
|
||||
def test_leave_and_auto_forget(self) -> None:
|
||||
"""Tests the `forget_rooms_on_leave` config option."""
|
||||
|
||||
@@ -40,7 +40,7 @@ from synapse.server import HomeServer
|
||||
from synapse.util import Clock
|
||||
|
||||
from tests.server import FakeTransport
|
||||
from tests.unittest import HomeserverTestCase
|
||||
from tests.unittest import HomeserverTestCase, override_config
|
||||
|
||||
|
||||
def check_logcontext(context: LoggingContextOrSentinel) -> None:
|
||||
@@ -640,3 +640,21 @@ class FederationClientTests(HomeserverTestCase):
|
||||
self.cl.build_auth_headers(
|
||||
b"", b"GET", b"https://example.com", destination_is=b""
|
||||
)
|
||||
|
||||
@override_config(
|
||||
{
|
||||
"federation": {
|
||||
"client_timeout": "180s",
|
||||
"max_long_retry_delay": "100s",
|
||||
"max_short_retry_delay": "7s",
|
||||
"max_long_retries": 20,
|
||||
"max_short_retries": 5,
|
||||
}
|
||||
}
|
||||
)
|
||||
def test_configurable_retry_and_delay_values(self) -> None:
|
||||
self.assertEqual(self.cl.default_timeout_seconds, 180)
|
||||
self.assertEqual(self.cl.max_long_retry_delay_seconds, 100)
|
||||
self.assertEqual(self.cl.max_short_retry_delay_seconds, 7)
|
||||
self.assertEqual(self.cl.max_long_retries, 20)
|
||||
self.assertEqual(self.cl.max_short_retries, 5)
|
||||
|
||||
@@ -13,11 +13,12 @@
|
||||
# limitations under the License.
|
||||
import time
|
||||
import urllib.parse
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Any, Collection, Dict, List, Optional, Tuple, Union
|
||||
from unittest.mock import Mock
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import pymacaroons
|
||||
from typing_extensions import Literal
|
||||
|
||||
from twisted.test.proto_helpers import MemoryReactor
|
||||
from twisted.web.resource import Resource
|
||||
@@ -26,11 +27,12 @@ import synapse.rest.admin
|
||||
from synapse.api.constants import ApprovalNoticeMedium, LoginType
|
||||
from synapse.api.errors import Codes
|
||||
from synapse.appservice import ApplicationService
|
||||
from synapse.module_api import ModuleApi
|
||||
from synapse.rest.client import devices, login, logout, register
|
||||
from synapse.rest.client.account import WhoamiRestServlet
|
||||
from synapse.rest.synapse.client import build_synapse_client_resource_tree
|
||||
from synapse.server import HomeServer
|
||||
from synapse.types import create_requester
|
||||
from synapse.types import JsonDict, create_requester
|
||||
from synapse.util import Clock
|
||||
|
||||
from tests import unittest
|
||||
@@ -88,6 +90,56 @@ ADDITIONAL_LOGIN_FLOWS = [
|
||||
]
|
||||
|
||||
|
||||
class TestSpamChecker:
|
||||
def __init__(self, config: None, api: ModuleApi):
|
||||
api.register_spam_checker_callbacks(
|
||||
check_login_for_spam=self.check_login_for_spam,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_config(config: JsonDict) -> None:
|
||||
return None
|
||||
|
||||
async def check_login_for_spam(
|
||||
self,
|
||||
user_id: str,
|
||||
device_id: Optional[str],
|
||||
initial_display_name: Optional[str],
|
||||
request_info: Collection[Tuple[Optional[str], str]],
|
||||
auth_provider_id: Optional[str] = None,
|
||||
) -> Union[
|
||||
Literal["NOT_SPAM"],
|
||||
Tuple["synapse.module_api.errors.Codes", JsonDict],
|
||||
]:
|
||||
return "NOT_SPAM"
|
||||
|
||||
|
||||
class DenyAllSpamChecker:
|
||||
def __init__(self, config: None, api: ModuleApi):
|
||||
api.register_spam_checker_callbacks(
|
||||
check_login_for_spam=self.check_login_for_spam,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_config(config: JsonDict) -> None:
|
||||
return None
|
||||
|
||||
async def check_login_for_spam(
|
||||
self,
|
||||
user_id: str,
|
||||
device_id: Optional[str],
|
||||
initial_display_name: Optional[str],
|
||||
request_info: Collection[Tuple[Optional[str], str]],
|
||||
auth_provider_id: Optional[str] = None,
|
||||
) -> Union[
|
||||
Literal["NOT_SPAM"],
|
||||
Tuple["synapse.module_api.errors.Codes", JsonDict],
|
||||
]:
|
||||
# Return an odd set of values to ensure that they get correctly passed
|
||||
# to the client.
|
||||
return Codes.LIMIT_EXCEEDED, {"extra": "value"}
|
||||
|
||||
|
||||
class LoginRestServletTestCase(unittest.HomeserverTestCase):
|
||||
servlets = [
|
||||
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
||||
@@ -469,6 +521,58 @@ class LoginRestServletTestCase(unittest.HomeserverTestCase):
|
||||
],
|
||||
)
|
||||
|
||||
@override_config(
|
||||
{
|
||||
"modules": [
|
||||
{
|
||||
"module": TestSpamChecker.__module__
|
||||
+ "."
|
||||
+ TestSpamChecker.__qualname__
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
def test_spam_checker_allow(self) -> None:
|
||||
"""Check that that adding a spam checker doesn't break login."""
|
||||
self.register_user("kermit", "monkey")
|
||||
|
||||
body = {"type": "m.login.password", "user": "kermit", "password": "monkey"}
|
||||
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
"/_matrix/client/r0/login",
|
||||
body,
|
||||
)
|
||||
self.assertEqual(channel.code, 200, channel.result)
|
||||
|
||||
@override_config(
|
||||
{
|
||||
"modules": [
|
||||
{
|
||||
"module": DenyAllSpamChecker.__module__
|
||||
+ "."
|
||||
+ DenyAllSpamChecker.__qualname__
|
||||
}
|
||||
]
|
||||
}
|
||||
)
|
||||
def test_spam_checker_deny(self) -> None:
|
||||
"""Check that login"""
|
||||
|
||||
self.register_user("kermit", "monkey")
|
||||
|
||||
body = {"type": "m.login.password", "user": "kermit", "password": "monkey"}
|
||||
|
||||
channel = self.make_request(
|
||||
"POST",
|
||||
"/_matrix/client/r0/login",
|
||||
body,
|
||||
)
|
||||
self.assertEqual(channel.code, 403, channel.result)
|
||||
self.assertDictContainsSubset(
|
||||
{"errcode": Codes.LIMIT_EXCEEDED, "extra": "value"}, channel.json_body
|
||||
)
|
||||
|
||||
|
||||
@skip_unless(has_saml2 and HAS_OIDC, "Requires SAML2 and OIDC")
|
||||
class MultiSSOTestCase(unittest.HomeserverTestCase):
|
||||
|
||||
Reference in New Issue
Block a user