Compare commits
23 Commits
v1.69.0rc1
...
release-v1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
549e1acd05 | ||
|
|
6b097a3e17 | ||
|
|
b43be004b4 | ||
|
|
d241a1350d | ||
|
|
6e0dde3215 | ||
|
|
424d1d28cc | ||
|
|
f3f303aa22 | ||
|
|
29ee4b6698 | ||
|
|
16c5d95b59 | ||
|
|
821f74a8c0 | ||
|
|
19eb23bf32 | ||
|
|
02086e1da0 | ||
|
|
422cff7df6 | ||
|
|
d94bcbced3 | ||
|
|
f1673866ed | ||
|
|
a98ac3cc1e | ||
|
|
b42177f94f | ||
|
|
bb69dbf3e3 | ||
|
|
e9a0419c8d | ||
|
|
720b12c209 | ||
|
|
79c592cec6 | ||
|
|
f6f6bdc7b3 | ||
|
|
e3d4755454 |
@@ -9,6 +9,7 @@
|
||||
!pyproject.toml
|
||||
!poetry.lock
|
||||
!Cargo.lock
|
||||
!Cargo.toml
|
||||
!build_rust.py
|
||||
|
||||
rust/target
|
||||
|
||||
7
.github/workflows/docker.yml
vendored
7
.github/workflows/docker.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
|
||||
- name: Inspect builder
|
||||
run: docker buildx inspect
|
||||
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
@@ -55,3 +55,8 @@ jobs:
|
||||
tags: "${{ steps.set-tag.outputs.tags }}"
|
||||
file: "docker/Dockerfile"
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
# arm64 builds OOM without the git fetch setting. c.f.
|
||||
# https://github.com/rust-lang/cargo/issues/10583
|
||||
build-args: |
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||
|
||||
60
CHANGES.md
60
CHANGES.md
@@ -1,12 +1,66 @@
|
||||
Synapse 1.69.0rc1 (2022-10-04)
|
||||
==============================
|
||||
|
||||
Synapse 1.69.0 (2022-10-17)
|
||||
===========================
|
||||
|
||||
Please note that legacy Prometheus metric names are now deprecated and will be removed in Synapse 1.73.0.
|
||||
Server administrators should update their dashboards and alerting rules to avoid using the deprecated metric names.
|
||||
See the [upgrade notes](https://matrix-org.github.io/synapse/v1.69/upgrade.html#upgrading-to-v1690) for more details.
|
||||
|
||||
|
||||
No significant changes since 1.69.0rc4.
|
||||
|
||||
|
||||
Synapse 1.69.0rc4 (2022-10-14)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix poor performance of the `event_push_backfill_thread_id` background update, which was introduced in Synapse 1.68.0rc1. ([\#14172](https://github.com/matrix-org/synapse/issues/14172), [\#14181](https://github.com/matrix-org/synapse/issues/14181))
|
||||
|
||||
|
||||
Updates to the Docker image
|
||||
---------------------------
|
||||
|
||||
- Fix docker build OOMing in CI for arm64 builds. ([\#14173](https://github.com/matrix-org/synapse/issues/14173))
|
||||
|
||||
|
||||
Synapse 1.69.0rc3 (2022-10-12)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix an issue with Docker images causing the Rust dependencies to not be pinned correctly. Introduced in v1.68.0 ([\#14129](https://github.com/matrix-org/synapse/issues/14129))
|
||||
- Fix a bug introduced in Synapse 1.69.0rc1 which would cause registration replication requests to fail if the worker sending the request is not running Synapse 1.69. ([\#14135](https://github.com/matrix-org/synapse/issues/14135))
|
||||
- Fix error in background update when rotating existing notifications. Introduced in v1.69.0rc2. ([\#14138](https://github.com/matrix-org/synapse/issues/14138))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Rename the `url_preview` extra to `url-preview`, for compatability with poetry-core 1.3.0 and [PEP 685](https://peps.python.org/pep-0685/). From-source installations using this extra will need to install using the new name. ([\#14085](https://github.com/matrix-org/synapse/issues/14085))
|
||||
|
||||
|
||||
Synapse 1.69.0rc2 (2022-10-06)
|
||||
==============================
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Deprecate the `generate_short_term_login_token` method in favor of an async `create_login_token` method in the Module API. ([\#13842](https://github.com/matrix-org/synapse/issues/13842))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Ensure Synapse v1.69 works with upcoming database changes in v1.70. ([\#14045](https://github.com/matrix-org/synapse/issues/14045))
|
||||
- Fix a bug introduced in Synapse v1.68.0 where messages could not be sent in rooms with non-integer `notifications` power level. ([\#14073](https://github.com/matrix-org/synapse/issues/14073))
|
||||
- Temporarily pin build-system requirements to workaround an incompatibility with poetry-core 1.3.0. This will be reverted before the v1.69.0 release proper, see [\#14079](https://github.com/matrix-org/synapse/issues/14079). ([\#14080](https://github.com/matrix-org/synapse/issues/14080))
|
||||
|
||||
|
||||
Synapse 1.69.0rc1 (2022-10-04)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
|
||||
12
book.toml
12
book.toml
@@ -34,6 +34,14 @@ additional-css = [
|
||||
"docs/website_files/table-of-contents.css",
|
||||
"docs/website_files/remove-nav-buttons.css",
|
||||
"docs/website_files/indent-section-headers.css",
|
||||
"docs/website_files/version-picker.css",
|
||||
]
|
||||
additional-js = ["docs/website_files/table-of-contents.js"]
|
||||
theme = "docs/website_files/theme"
|
||||
additional-js = [
|
||||
"docs/website_files/table-of-contents.js",
|
||||
"docs/website_files/version-picker.js",
|
||||
"docs/website_files/version.js",
|
||||
]
|
||||
theme = "docs/website_files/theme"
|
||||
|
||||
[preprocessor.schema_versions]
|
||||
command = "./scripts-dev/schema_versions.py"
|
||||
|
||||
24
debian/changelog
vendored
24
debian/changelog
vendored
@@ -1,3 +1,27 @@
|
||||
matrix-synapse-py3 (1.69.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.69.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 17 Oct 2022 11:31:03 +0100
|
||||
|
||||
matrix-synapse-py3 (1.69.0~rc4) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.69.0rc4.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 14 Oct 2022 15:04:47 +0100
|
||||
|
||||
matrix-synapse-py3 (1.69.0~rc3) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.69.0rc3.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 12 Oct 2022 13:24:04 +0100
|
||||
|
||||
matrix-synapse-py3 (1.69.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.69.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 06 Oct 2022 14:45:00 +0100
|
||||
|
||||
matrix-synapse-py3 (1.69.0~rc1) stable; urgency=medium
|
||||
|
||||
* The man page for the hash_password script has been updated to reflect
|
||||
|
||||
@@ -108,6 +108,12 @@ RUN mkdir /rust /cargo
|
||||
|
||||
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable
|
||||
|
||||
|
||||
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
|
||||
# set to true, so we expose it as a build-arg.
|
||||
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
|
||||
ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
|
||||
|
||||
# To speed up rebuilds, install all of the dependencies before we copy over
|
||||
# the whole synapse project, so that this layer in the Docker cache can be
|
||||
# used while you develop on the source
|
||||
@@ -121,7 +127,7 @@ RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
COPY synapse /synapse/synapse/
|
||||
COPY rust /synapse/rust/
|
||||
# ... and what we need to `pip install`.
|
||||
COPY pyproject.toml README.rst build_rust.py /synapse/
|
||||
COPY pyproject.toml README.rst build_rust.py Cargo.toml Cargo.lock /synapse/
|
||||
|
||||
# Repeat of earlier build argument declaration, as this is a new build stage.
|
||||
ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
|
||||
|
||||
@@ -128,6 +128,39 @@ you may specify `enable_legacy_metrics: false` in your homeserver configuration.
|
||||
A list of affected metrics is available on the [Metrics How-to page](https://matrix-org.github.io/synapse/v1.69/metrics-howto.html?highlight=metrics%20deprecated#renaming-of-metrics--deprecation-of-old-names-in-12).
|
||||
|
||||
|
||||
## Deprecation of the `generate_short_term_login_token` module API method
|
||||
|
||||
The following method of the module API has been deprecated, and is scheduled to
|
||||
be remove in v1.71.0:
|
||||
|
||||
```python
|
||||
def generate_short_term_login_token(
|
||||
self,
|
||||
user_id: str,
|
||||
duration_in_ms: int = (2 * 60 * 1000),
|
||||
auth_provider_id: str = "",
|
||||
auth_provider_session_id: Optional[str] = None,
|
||||
) -> str:
|
||||
...
|
||||
```
|
||||
|
||||
It has been replaced by an asynchronous equivalent:
|
||||
|
||||
```python
|
||||
async def create_login_token(
|
||||
self,
|
||||
user_id: str,
|
||||
duration_in_ms: int = (2 * 60 * 1000),
|
||||
auth_provider_id: Optional[str] = None,
|
||||
auth_provider_session_id: Optional[str] = None,
|
||||
) -> str:
|
||||
...
|
||||
```
|
||||
|
||||
Synapse will log a warning when a module uses the deprecated method, to help
|
||||
administrators find modules using it.
|
||||
|
||||
|
||||
# Upgrading to v1.68.0
|
||||
|
||||
Two changes announced in the upgrade notes for v1.67.0 have now landed in v1.68.0.
|
||||
|
||||
@@ -24,6 +24,11 @@ Finally, we also stylise the chapter titles in the left sidebar by indenting the
|
||||
slightly so that they are more visually distinguishable from the section headers
|
||||
(the bold titles). This is done through the `indent-section-headers.css` file.
|
||||
|
||||
In addition to these modifications, we have added a version picker to the documentation.
|
||||
Users can switch between documentations for different versions of Synapse.
|
||||
This functionality was implemented through the `version-picker.js` and
|
||||
`version-picker.css` files.
|
||||
|
||||
More information can be found in mdbook's official documentation for
|
||||
[injecting page JS/CSS](https://rust-lang.github.io/mdBook/format/config.html)
|
||||
and
|
||||
|
||||
@@ -131,6 +131,18 @@
|
||||
<i class="fa fa-search"></i>
|
||||
</button>
|
||||
{{/if}}
|
||||
<div class="version-picker">
|
||||
<div class="dropdown">
|
||||
<div class="select">
|
||||
<span></span>
|
||||
<i class="fa fa-chevron-down"></i>
|
||||
</div>
|
||||
<input type="hidden" name="version">
|
||||
<ul class="dropdown-menu">
|
||||
<!-- Versions will be added dynamically in version-picker.js -->
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<h1 class="menu-title">{{ book_title }}</h1>
|
||||
@@ -309,4 +321,4 @@
|
||||
{{/if}}
|
||||
|
||||
</body>
|
||||
</html>
|
||||
</html>
|
||||
|
||||
78
docs/website_files/version-picker.css
Normal file
78
docs/website_files/version-picker.css
Normal file
@@ -0,0 +1,78 @@
|
||||
.version-picker {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.version-picker .dropdown {
|
||||
width: 130px;
|
||||
max-height: 29px;
|
||||
margin-left: 10px;
|
||||
display: inline-block;
|
||||
border-radius: 4px;
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
position: relative;
|
||||
font-size: 13px;
|
||||
color: var(--fg);
|
||||
height: 100%;
|
||||
text-align: left;
|
||||
}
|
||||
.version-picker .dropdown .select {
|
||||
cursor: pointer;
|
||||
display: block;
|
||||
padding: 5px 2px 5px 15px;
|
||||
}
|
||||
.version-picker .dropdown .select > i {
|
||||
font-size: 10px;
|
||||
color: var(--fg);
|
||||
cursor: pointer;
|
||||
float: right;
|
||||
line-height: 20px !important;
|
||||
}
|
||||
.version-picker .dropdown:hover {
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
}
|
||||
.version-picker .dropdown:active {
|
||||
background-color: var(--theme-popup-bg);
|
||||
}
|
||||
.version-picker .dropdown.active:hover,
|
||||
.version-picker .dropdown.active {
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
border-radius: 2px 2px 0 0;
|
||||
background-color: var(--theme-popup-bg);
|
||||
}
|
||||
.version-picker .dropdown.active .select > i {
|
||||
transform: rotate(-180deg);
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu {
|
||||
position: absolute;
|
||||
background-color: var(--theme-popup-bg);
|
||||
width: 100%;
|
||||
left: -1px;
|
||||
right: 1px;
|
||||
margin-top: 1px;
|
||||
border: 1px solid var(--theme-popup-border);
|
||||
border-radius: 0 0 4px 4px;
|
||||
overflow: hidden;
|
||||
display: none;
|
||||
max-height: 300px;
|
||||
overflow-y: auto;
|
||||
z-index: 9;
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu li {
|
||||
font-size: 12px;
|
||||
padding: 6px 20px;
|
||||
cursor: pointer;
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu {
|
||||
padding: 0;
|
||||
list-style: none;
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu li:hover {
|
||||
background-color: var(--theme-hover);
|
||||
}
|
||||
.version-picker .dropdown .dropdown-menu li.active::before {
|
||||
display: inline-block;
|
||||
content: "✓";
|
||||
margin-inline-start: -14px;
|
||||
width: 14px;
|
||||
}
|
||||
127
docs/website_files/version-picker.js
Normal file
127
docs/website_files/version-picker.js
Normal file
@@ -0,0 +1,127 @@
|
||||
|
||||
const dropdown = document.querySelector('.version-picker .dropdown');
|
||||
const dropdownMenu = dropdown.querySelector('.dropdown-menu');
|
||||
|
||||
fetchVersions(dropdown, dropdownMenu).then(() => {
|
||||
initializeVersionDropdown(dropdown, dropdownMenu);
|
||||
});
|
||||
|
||||
/**
|
||||
* Initialize the dropdown functionality for version selection.
|
||||
*
|
||||
* @param {Element} dropdown - The dropdown element.
|
||||
* @param {Element} dropdownMenu - The dropdown menu element.
|
||||
*/
|
||||
function initializeVersionDropdown(dropdown, dropdownMenu) {
|
||||
// Toggle the dropdown menu on click
|
||||
dropdown.addEventListener('click', function () {
|
||||
this.setAttribute('tabindex', 1);
|
||||
this.classList.toggle('active');
|
||||
dropdownMenu.style.display = (dropdownMenu.style.display === 'block') ? 'none' : 'block';
|
||||
});
|
||||
|
||||
// Remove the 'active' class and hide the dropdown menu on focusout
|
||||
dropdown.addEventListener('focusout', function () {
|
||||
this.classList.remove('active');
|
||||
dropdownMenu.style.display = 'none';
|
||||
});
|
||||
|
||||
// Handle item selection within the dropdown menu
|
||||
const dropdownMenuItems = dropdownMenu.querySelectorAll('li');
|
||||
dropdownMenuItems.forEach(function (item) {
|
||||
item.addEventListener('click', function () {
|
||||
dropdownMenuItems.forEach(function (item) {
|
||||
item.classList.remove('active');
|
||||
});
|
||||
this.classList.add('active');
|
||||
dropdown.querySelector('span').textContent = this.textContent;
|
||||
dropdown.querySelector('input').value = this.getAttribute('id');
|
||||
|
||||
window.location.href = changeVersion(window.location.href, this.textContent);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* This function fetches the available versions from a GitHub repository
|
||||
* and inserts them into the version picker.
|
||||
*
|
||||
* @param {Element} dropdown - The dropdown element.
|
||||
* @param {Element} dropdownMenu - The dropdown menu element.
|
||||
* @returns {Promise<Array<string>>} A promise that resolves with an array of available versions.
|
||||
*/
|
||||
function fetchVersions(dropdown, dropdownMenu) {
|
||||
return new Promise((resolve, reject) => {
|
||||
window.addEventListener("load", () => {
|
||||
|
||||
fetch("https://api.github.com/repos/matrix-org/synapse/git/trees/gh-pages", {
|
||||
cache: "force-cache",
|
||||
}).then(res =>
|
||||
res.json()
|
||||
).then(resObject => {
|
||||
const excluded = ['dev-docs', 'v1.91.0', 'v1.80.0', 'v1.69.0'];
|
||||
const tree = resObject.tree.filter(item => item.type === "tree" && !excluded.includes(item.path));
|
||||
const versions = tree.map(item => item.path).sort(sortVersions);
|
||||
|
||||
// Create a list of <li> items for versions
|
||||
versions.forEach((version) => {
|
||||
const li = document.createElement("li");
|
||||
li.textContent = version;
|
||||
li.id = version;
|
||||
|
||||
if (window.SYNAPSE_VERSION === version) {
|
||||
li.classList.add('active');
|
||||
dropdown.querySelector('span').textContent = version;
|
||||
dropdown.querySelector('input').value = version;
|
||||
}
|
||||
|
||||
dropdownMenu.appendChild(li);
|
||||
});
|
||||
|
||||
resolve(versions);
|
||||
|
||||
}).catch(ex => {
|
||||
console.error("Failed to fetch version data", ex);
|
||||
reject(ex);
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom sorting function to sort an array of version strings.
|
||||
*
|
||||
* @param {string} a - The first version string to compare.
|
||||
* @param {string} b - The second version string to compare.
|
||||
* @returns {number} - A negative number if a should come before b, a positive number if b should come before a, or 0 if they are equal.
|
||||
*/
|
||||
function sortVersions(a, b) {
|
||||
// Put 'develop' and 'latest' at the top
|
||||
if (a === 'develop' || a === 'latest') return -1;
|
||||
if (b === 'develop' || b === 'latest') return 1;
|
||||
|
||||
const versionA = (a.match(/v\d+(\.\d+)+/) || [])[0];
|
||||
const versionB = (b.match(/v\d+(\.\d+)+/) || [])[0];
|
||||
|
||||
return versionB.localeCompare(versionA);
|
||||
}
|
||||
|
||||
/**
|
||||
* Change the version in a URL path.
|
||||
*
|
||||
* @param {string} url - The original URL to be modified.
|
||||
* @param {string} newVersion - The new version to replace the existing version in the URL.
|
||||
* @returns {string} The updated URL with the new version.
|
||||
*/
|
||||
function changeVersion(url, newVersion) {
|
||||
const parsedURL = new URL(url);
|
||||
const pathSegments = parsedURL.pathname.split('/');
|
||||
|
||||
// Modify the version
|
||||
pathSegments[2] = newVersion;
|
||||
|
||||
// Reconstruct the URL
|
||||
parsedURL.pathname = pathSegments.join('/');
|
||||
|
||||
return parsedURL.href;
|
||||
}
|
||||
1
docs/website_files/version.js
Normal file
1
docs/website_files/version.js
Normal file
@@ -0,0 +1 @@
|
||||
window.SYNAPSE_VERSION = 'v1.69';
|
||||
3
mypy.ini
3
mypy.ini
@@ -106,6 +106,9 @@ disallow_untyped_defs = False
|
||||
[mypy-tests.handlers.test_user_directory]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.push.test_bulk_push_rule_evaluator]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-tests.test_server]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@ manifest-path = "rust/Cargo.toml"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.69.0rc1"
|
||||
version = "1.69.0"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "Apache-2.0"
|
||||
@@ -219,7 +219,7 @@ oidc = ["authlib"]
|
||||
# `systemd.journal.JournalHandler`, as is documented in
|
||||
# `contrib/systemd/log_config.yaml`.
|
||||
systemd = ["systemd-python"]
|
||||
url_preview = ["lxml"]
|
||||
url-preview = ["lxml"]
|
||||
sentry = ["sentry-sdk"]
|
||||
opentracing = ["jaeger-client", "opentracing"]
|
||||
jwt = ["authlib"]
|
||||
@@ -250,7 +250,7 @@ all = [
|
||||
"pysaml2",
|
||||
# oidc and jwt
|
||||
"authlib",
|
||||
# url_preview
|
||||
# url-preview
|
||||
"lxml",
|
||||
# sentry
|
||||
"sentry-sdk",
|
||||
@@ -307,7 +307,12 @@ twine = "*"
|
||||
towncrier = ">=18.6.0rc1"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0", "setuptools_rust>=1.3"]
|
||||
# The upper bounds here are defensive, intended to prevent situations like
|
||||
# #13849 and #14079 where we see buildtime or runtime errors caused by build
|
||||
# system changes.
|
||||
# We are happy to raise these upper bounds upon request,
|
||||
# provided we check that it's safe to do so (i.e. that CI passes).
|
||||
requires = ["poetry-core>=1.0.0,<=1.3.1", "setuptools_rust>=1.3,<=1.5.2"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
|
||||
|
||||
@@ -205,7 +205,7 @@ class ContentRepositoryConfig(Config):
|
||||
)
|
||||
self.url_preview_enabled = config.get("url_preview_enabled", False)
|
||||
if self.url_preview_enabled:
|
||||
check_requirements("url_preview")
|
||||
check_requirements("url-preview")
|
||||
|
||||
proxy_env = getproxies_environment()
|
||||
if "url_preview_ip_range_blacklist" not in config:
|
||||
|
||||
@@ -748,6 +748,40 @@ class ModuleApi:
|
||||
)
|
||||
)
|
||||
|
||||
async def create_login_token(
|
||||
self,
|
||||
user_id: str,
|
||||
duration_in_ms: int = (2 * 60 * 1000),
|
||||
auth_provider_id: Optional[str] = None,
|
||||
auth_provider_session_id: Optional[str] = None,
|
||||
) -> str:
|
||||
"""Create a login token suitable for m.login.token authentication
|
||||
|
||||
Added in Synapse v1.69.0.
|
||||
|
||||
Args:
|
||||
user_id: gives the ID of the user that the token is for
|
||||
|
||||
duration_in_ms: the time that the token will be valid for
|
||||
|
||||
auth_provider_id: the ID of the SSO IdP that the user used to authenticate
|
||||
to get this token, if any. This is encoded in the token so that
|
||||
/login can report stats on number of successful logins by IdP.
|
||||
|
||||
auth_provider_session_id: The session ID got during login from the SSO IdP,
|
||||
if any.
|
||||
"""
|
||||
# The deprecated `generate_short_term_login_token` method defaulted to an empty
|
||||
# string for the `auth_provider_id` because of how the underlying macaroon was
|
||||
# generated. This will change to a proper NULL-able field when the tokens get
|
||||
# moved to the database.
|
||||
return self._hs.get_macaroon_generator().generate_short_term_login_token(
|
||||
user_id,
|
||||
auth_provider_id or "",
|
||||
auth_provider_session_id,
|
||||
duration_in_ms,
|
||||
)
|
||||
|
||||
def generate_short_term_login_token(
|
||||
self,
|
||||
user_id: str,
|
||||
@@ -759,6 +793,9 @@ class ModuleApi:
|
||||
|
||||
Added in Synapse v1.9.0.
|
||||
|
||||
This was deprecated in Synapse v1.69.0 in favor of create_login_token, and will
|
||||
be removed in Synapse 1.71.0.
|
||||
|
||||
Args:
|
||||
user_id: gives the ID of the user that the token is for
|
||||
|
||||
@@ -768,6 +805,11 @@ class ModuleApi:
|
||||
to get this token, if any. This is encoded in the token so that
|
||||
/login can report stats on number of successful logins by IdP.
|
||||
"""
|
||||
logger.warn(
|
||||
"A module configured on this server uses ModuleApi.generate_short_term_login_token(), "
|
||||
"which is deprecated in favor of ModuleApi.create_login_token(), and will be removed in "
|
||||
"Synapse 1.71.0",
|
||||
)
|
||||
return self._hs.get_macaroon_generator().generate_short_term_login_token(
|
||||
user_id,
|
||||
auth_provider_id,
|
||||
|
||||
@@ -289,11 +289,18 @@ class BulkPushRuleEvaluator:
|
||||
if relation.rel_type == RelationTypes.THREAD:
|
||||
thread_id = relation.parent_id
|
||||
|
||||
# It's possible that old room versions have non-integer power levels (floats or
|
||||
# strings). Workaround this by explicitly converting to int.
|
||||
notification_levels = power_levels.get("notifications", {})
|
||||
if not event.room_version.msc3667_int_only_power_levels:
|
||||
for user_id, level in notification_levels.items():
|
||||
notification_levels[user_id] = int(level)
|
||||
|
||||
evaluator = PushRuleEvaluator(
|
||||
_flatten_dict(event),
|
||||
room_member_count,
|
||||
sender_power_level,
|
||||
power_levels.get("notifications", {}),
|
||||
notification_levels,
|
||||
relations,
|
||||
self._relations_match_enabled,
|
||||
)
|
||||
|
||||
@@ -39,6 +39,16 @@ class ReplicationRegisterServlet(ReplicationEndpoint):
|
||||
self.store = hs.get_datastores().main
|
||||
self.registration_handler = hs.get_registration_handler()
|
||||
|
||||
# Default value if the worker that sent the replication request did not include
|
||||
# an 'approved' property.
|
||||
if (
|
||||
hs.config.experimental.msc3866.enabled
|
||||
and hs.config.experimental.msc3866.require_approval_for_new_accounts
|
||||
):
|
||||
self._approval_default = False
|
||||
else:
|
||||
self._approval_default = True
|
||||
|
||||
@staticmethod
|
||||
async def _serialize_payload( # type: ignore[override]
|
||||
user_id: str,
|
||||
@@ -92,6 +102,12 @@ class ReplicationRegisterServlet(ReplicationEndpoint):
|
||||
|
||||
await self.registration_handler.check_registration_ratelimit(content["address"])
|
||||
|
||||
# Always default admin users to approved (since it means they were created by
|
||||
# an admin).
|
||||
approved_default = self._approval_default
|
||||
if content["admin"]:
|
||||
approved_default = True
|
||||
|
||||
await self.registration_handler.register_with_store(
|
||||
user_id=user_id,
|
||||
password_hash=content["password_hash"],
|
||||
@@ -103,7 +119,7 @@ class ReplicationRegisterServlet(ReplicationEndpoint):
|
||||
user_type=content["user_type"],
|
||||
address=content["address"],
|
||||
shadow_banned=content["shadow_banned"],
|
||||
approved=content["approved"],
|
||||
approved=content.get("approved", approved_default),
|
||||
)
|
||||
|
||||
return 200, {}
|
||||
|
||||
@@ -269,11 +269,11 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||
event_push_actions_done = progress.get("event_push_actions_done", False)
|
||||
|
||||
def add_thread_id_txn(
|
||||
txn: LoggingTransaction, table_name: str, start_stream_ordering: int
|
||||
txn: LoggingTransaction, start_stream_ordering: int
|
||||
) -> int:
|
||||
sql = f"""
|
||||
sql = """
|
||||
SELECT stream_ordering
|
||||
FROM {table_name}
|
||||
FROM event_push_actions
|
||||
WHERE
|
||||
thread_id IS NULL
|
||||
AND stream_ordering > ?
|
||||
@@ -285,7 +285,7 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||
# No more rows to process.
|
||||
rows = txn.fetchall()
|
||||
if not rows:
|
||||
progress[f"{table_name}_done"] = True
|
||||
progress["event_push_actions_done"] = True
|
||||
self.db_pool.updates._background_update_progress_txn(
|
||||
txn, "event_push_backfill_thread_id", progress
|
||||
)
|
||||
@@ -294,16 +294,65 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||
# Update the thread ID for any of those rows.
|
||||
max_stream_ordering = rows[-1][0]
|
||||
|
||||
sql = f"""
|
||||
UPDATE {table_name}
|
||||
sql = """
|
||||
UPDATE event_push_actions
|
||||
SET thread_id = 'main'
|
||||
WHERE stream_ordering <= ? AND thread_id IS NULL
|
||||
WHERE ? < stream_ordering AND stream_ordering <= ? AND thread_id IS NULL
|
||||
"""
|
||||
txn.execute(sql, (max_stream_ordering,))
|
||||
txn.execute(
|
||||
sql,
|
||||
(
|
||||
start_stream_ordering,
|
||||
max_stream_ordering,
|
||||
),
|
||||
)
|
||||
|
||||
# Update progress.
|
||||
processed_rows = txn.rowcount
|
||||
progress[f"max_{table_name}_stream_ordering"] = max_stream_ordering
|
||||
progress["max_event_push_actions_stream_ordering"] = max_stream_ordering
|
||||
self.db_pool.updates._background_update_progress_txn(
|
||||
txn, "event_push_backfill_thread_id", progress
|
||||
)
|
||||
|
||||
return processed_rows
|
||||
|
||||
def add_thread_id_summary_txn(txn: LoggingTransaction) -> int:
|
||||
min_user_id = progress.get("max_summary_user_id", "")
|
||||
min_room_id = progress.get("max_summary_room_id", "")
|
||||
|
||||
# Slightly overcomplicated query for getting the Nth user ID / room
|
||||
# ID tuple, or the last if there are less than N remaining.
|
||||
sql = """
|
||||
SELECT user_id, room_id FROM (
|
||||
SELECT user_id, room_id FROM event_push_summary
|
||||
WHERE (user_id, room_id) > (?, ?)
|
||||
AND thread_id IS NULL
|
||||
ORDER BY user_id, room_id
|
||||
LIMIT ?
|
||||
) AS e
|
||||
ORDER BY user_id DESC, room_id DESC
|
||||
LIMIT 1
|
||||
"""
|
||||
|
||||
txn.execute(sql, (min_user_id, min_room_id, batch_size))
|
||||
row = txn.fetchone()
|
||||
if not row:
|
||||
return 0
|
||||
|
||||
max_user_id, max_room_id = row
|
||||
|
||||
sql = """
|
||||
UPDATE event_push_summary
|
||||
SET thread_id = 'main'
|
||||
WHERE
|
||||
(?, ?) < (user_id, room_id) AND (user_id, room_id) <= (?, ?)
|
||||
AND thread_id IS NULL
|
||||
"""
|
||||
txn.execute(sql, (min_user_id, min_room_id, max_user_id, max_room_id))
|
||||
processed_rows = txn.rowcount
|
||||
|
||||
progress["max_summary_user_id"] = max_user_id
|
||||
progress["max_summary_room_id"] = max_room_id
|
||||
self.db_pool.updates._background_update_progress_txn(
|
||||
txn, "event_push_backfill_thread_id", progress
|
||||
)
|
||||
@@ -319,15 +368,12 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||
result = await self.db_pool.runInteraction(
|
||||
"event_push_backfill_thread_id",
|
||||
add_thread_id_txn,
|
||||
"event_push_actions",
|
||||
progress.get("max_event_push_actions_stream_ordering", 0),
|
||||
)
|
||||
else:
|
||||
result = await self.db_pool.runInteraction(
|
||||
"event_push_backfill_thread_id",
|
||||
add_thread_id_txn,
|
||||
"event_push_summary",
|
||||
progress.get("max_event_push_summary_stream_ordering", 0),
|
||||
add_thread_id_summary_txn,
|
||||
)
|
||||
|
||||
# Only done after the event_push_summary table is done.
|
||||
@@ -1103,19 +1149,28 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||
txn, room_id, user_id, stream_ordering, old_rotate_stream_ordering
|
||||
)
|
||||
|
||||
# First ensure that the existing rows have an updated thread_id field.
|
||||
txn.execute(
|
||||
"""
|
||||
UPDATE event_push_summary
|
||||
SET thread_id = ?
|
||||
WHERE room_id = ? AND user_id = ? AND thread_id is NULL
|
||||
""",
|
||||
("main", room_id, user_id),
|
||||
)
|
||||
|
||||
# Replace the previous summary with the new counts.
|
||||
#
|
||||
# TODO(threads): Upsert per-thread instead of setting them all to main.
|
||||
self.db_pool.simple_upsert_txn(
|
||||
txn,
|
||||
table="event_push_summary",
|
||||
keyvalues={"room_id": room_id, "user_id": user_id},
|
||||
keyvalues={"room_id": room_id, "user_id": user_id, "thread_id": "main"},
|
||||
values={
|
||||
"notif_count": notif_count,
|
||||
"unread_count": unread_count,
|
||||
"stream_ordering": old_rotate_stream_ordering,
|
||||
"last_receipt_stream_ordering": stream_ordering,
|
||||
"thread_id": "main",
|
||||
},
|
||||
)
|
||||
|
||||
@@ -1264,20 +1319,33 @@ class EventPushActionsWorkerStore(ReceiptsWorkerStore, StreamWorkerStore, SQLBas
|
||||
|
||||
logger.info("Rotating notifications, handling %d rows", len(summaries))
|
||||
|
||||
# Ensure that any updated threads have an updated thread_id.
|
||||
txn.execute_batch(
|
||||
"""
|
||||
UPDATE event_push_summary
|
||||
SET thread_id = ?
|
||||
WHERE room_id = ? AND user_id = ? AND thread_id is NULL
|
||||
""",
|
||||
[("main", room_id, user_id) for user_id, room_id in summaries],
|
||||
)
|
||||
self.db_pool.simple_update_many_txn(
|
||||
txn,
|
||||
table="event_push_summary",
|
||||
key_names=("user_id", "room_id", "thread_id"),
|
||||
key_values=[(user_id, room_id, None) for user_id, room_id in summaries],
|
||||
value_names=("thread_id",),
|
||||
value_values=[("main",) for _ in summaries],
|
||||
)
|
||||
|
||||
# TODO(threads): Update on a per-thread basis.
|
||||
self.db_pool.simple_upsert_many_txn(
|
||||
txn,
|
||||
table="event_push_summary",
|
||||
key_names=("user_id", "room_id"),
|
||||
key_values=[(user_id, room_id) for user_id, room_id in summaries],
|
||||
value_names=("notif_count", "unread_count", "stream_ordering", "thread_id"),
|
||||
key_names=("user_id", "room_id", "thread_id"),
|
||||
key_values=[(user_id, room_id, "main") for user_id, room_id in summaries],
|
||||
value_names=("notif_count", "unread_count", "stream_ordering"),
|
||||
value_values=[
|
||||
(
|
||||
summary.notif_count,
|
||||
summary.unread_count,
|
||||
summary.stream_ordering,
|
||||
"main",
|
||||
)
|
||||
(summary.notif_count, summary.unread_count, summary.stream_ordering)
|
||||
for summary in summaries.values()
|
||||
],
|
||||
)
|
||||
|
||||
74
tests/push/test_bulk_push_rule_evaluator.py
Normal file
74
tests/push/test_bulk_push_rule_evaluator.py
Normal file
@@ -0,0 +1,74 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from synapse.api.room_versions import RoomVersions
|
||||
from synapse.push.bulk_push_rule_evaluator import BulkPushRuleEvaluator
|
||||
from synapse.rest import admin
|
||||
from synapse.rest.client import login, register, room
|
||||
from synapse.types import create_requester
|
||||
|
||||
from tests import unittest
|
||||
|
||||
|
||||
class TestBulkPushRuleEvaluator(unittest.HomeserverTestCase):
|
||||
|
||||
servlets = [
|
||||
admin.register_servlets_for_client_rest_resource,
|
||||
room.register_servlets,
|
||||
login.register_servlets,
|
||||
register.register_servlets,
|
||||
]
|
||||
|
||||
def test_action_for_event_by_user_handles_noninteger_power_levels(self) -> None:
|
||||
"""We should convert floats and strings to integers before passing to Rust.
|
||||
|
||||
Reproduces #14060.
|
||||
|
||||
A lack of validation: the gift that keeps on giving.
|
||||
"""
|
||||
# Create a new user and room.
|
||||
alice = self.register_user("alice", "pass")
|
||||
token = self.login(alice, "pass")
|
||||
|
||||
room_id = self.helper.create_room_as(
|
||||
alice, room_version=RoomVersions.V9.identifier, tok=token
|
||||
)
|
||||
|
||||
# Alter the power levels in that room to include stringy and floaty levels.
|
||||
# We need to suppress the validation logic or else it will reject these dodgy
|
||||
# values. (Presumably this validation was not always present.)
|
||||
event_creation_handler = self.hs.get_event_creation_handler()
|
||||
requester = create_requester(alice)
|
||||
with patch("synapse.events.validator.validate_canonicaljson"), patch(
|
||||
"synapse.events.validator.jsonschema.validate"
|
||||
):
|
||||
self.helper.send_state(
|
||||
room_id,
|
||||
"m.room.power_levels",
|
||||
{
|
||||
"users": {alice: "100"}, # stringy
|
||||
"notifications": {"room": 100.0}, # float
|
||||
},
|
||||
token,
|
||||
state_key="",
|
||||
)
|
||||
|
||||
# Create a new message event, and try to evaluate it under the dodgy
|
||||
# power level event.
|
||||
event, context = self.get_success(
|
||||
event_creation_handler.create_event(
|
||||
requester,
|
||||
{
|
||||
"type": "m.room.message",
|
||||
"room_id": room_id,
|
||||
"content": {
|
||||
"msgtype": "m.text",
|
||||
"body": "helo",
|
||||
},
|
||||
"sender": alice,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
bulk_evaluator = BulkPushRuleEvaluator(self.hs)
|
||||
# should not raise
|
||||
self.get_success(bulk_evaluator.action_for_event_by_user(event, context))
|
||||
Reference in New Issue
Block a user