Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 8a69993893 | |||
| f9eff0766d | |||
| d18edf67d6 | |||
| fd5d3d852d | |||
| ea376126a0 | |||
| 74be5cfdbc | |||
| f2ca2e31f7 | |||
| 6dc1ecd359 | |||
| 2965c9970c |
+7
-5
@@ -253,15 +253,17 @@ Alongside all that, join our developer community on Matrix:
|
||||
Copyright and Licensing
|
||||
=======================
|
||||
|
||||
Copyright 2014-2017 OpenMarket Ltd
|
||||
Copyright 2017 Vector Creations Ltd
|
||||
Copyright 2017-2025 New Vector Ltd
|
||||
| Copyright 2014-2017 OpenMarket Ltd
|
||||
| Copyright 2017 Vector Creations Ltd
|
||||
| Copyright 2017-2025 New Vector Ltd
|
||||
|
|
||||
|
||||
This software is dual-licensed by New Vector Ltd (Element). It can be used either:
|
||||
|
||||
|
||||
(1) for free under the terms of the GNU Affero General Public License (as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version); OR
|
||||
|
||||
|
||||
(2) under the terms of a paid-for Element Commercial License agreement between you and Element (the terms of which may vary depending on what you and Element have agreed to).
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the specific language governing permissions and limitations under the Licenses.
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
Stop auto-provisionning missing users & devices when delegating auth to Matrix Authentication Service. Requires MAS 0.13.0 or later.
|
||||
@@ -0,0 +1 @@
|
||||
Add an Admin API endpoint `GET /_synapse/admin/v1/scheduled_tasks` to fetch scheduled tasks.
|
||||
@@ -0,0 +1 @@
|
||||
Improve formatting of the README file.
|
||||
@@ -0,0 +1 @@
|
||||
Don't validate the `at_hash` (access token hash) field in OIDC ID Tokens if we don't end up actually using the OIDC Access Token.
|
||||
@@ -0,0 +1 @@
|
||||
Add `/_matrix/federation/v1/version` to list of federation endpoints that can be handled by workers.
|
||||
@@ -1 +0,0 @@
|
||||
Allow client & media admin apis to coexist.
|
||||
@@ -0,0 +1 @@
|
||||
Add an Admin API endpoint `GET /_synapse/admin/v1/scheduled_tasks` to fetch scheduled tasks.
|
||||
@@ -0,0 +1 @@
|
||||
Don't validate the `at_hash` (access token hash) field in OIDC ID Tokens if we don't end up actually using the OIDC Access Token.
|
||||
@@ -0,0 +1 @@
|
||||
Migrate from deprecated `poetry.dev-dependencies` -> `poetry.group.dev.dependencies` in pyproject.toml.
|
||||
@@ -202,6 +202,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["federation"],
|
||||
"endpoint_patterns": [
|
||||
"^/_matrix/federation/v1/version$",
|
||||
"^/_matrix/federation/(v1|v2)/event/",
|
||||
"^/_matrix/federation/(v1|v2)/state/",
|
||||
"^/_matrix/federation/(v1|v2)/state_ids/",
|
||||
|
||||
@@ -0,0 +1,54 @@
|
||||
# Show scheduled tasks
|
||||
|
||||
This API returns information about scheduled tasks.
|
||||
|
||||
To use it, you will need to authenticate by providing an `access_token`
|
||||
for a server admin: see [Admin API](../usage/administration/admin_api/).
|
||||
|
||||
The api is:
|
||||
```
|
||||
GET /_synapse/admin/v1/scheduled_tasks
|
||||
```
|
||||
|
||||
It returns a JSON body like the following:
|
||||
|
||||
```json
|
||||
{
|
||||
"scheduled_tasks": [
|
||||
{
|
||||
"id": "GSA124oegf1",
|
||||
"action": "shutdown_room",
|
||||
"status": "complete",
|
||||
"timestamp_ms": 23423523,
|
||||
"resource_id": "!roomid",
|
||||
"result": "some result",
|
||||
"error": null
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Query parameters:**
|
||||
|
||||
* `action_name`: string - Is optional. Returns only the scheduled tasks with the given action name.
|
||||
* `resource_id`: string - Is optional. Returns only the scheduled tasks with the given resource id.
|
||||
* `status`: string - Is optional. Returns only the scheduled tasks matching the given status, one of
|
||||
- "scheduled" - Task is scheduled but not active
|
||||
- "active" - Task is active and probably running, and if not will be run on next scheduler loop run
|
||||
- "complete" - Task has completed successfully
|
||||
- "failed" - Task is over and either returned a failed status, or had an exception
|
||||
|
||||
* `max_timestamp`: int - Is optional. Returns only the scheduled tasks with a timestamp inferior to the specified one.
|
||||
|
||||
**Response**
|
||||
|
||||
The following fields are returned in the JSON response body along with a `200` HTTP status code:
|
||||
|
||||
* `id`: string - ID of scheduled task.
|
||||
* `action`: string - The name of the scheduled task's action.
|
||||
* `status`: string - The status of the scheduled task.
|
||||
* `timestamp_ms`: integer - The timestamp (in milliseconds since the unix epoch) of the given task - If the status is "scheduled" then this represents when it should be launched.
|
||||
Otherwise it represents the last time this task got a change of state.
|
||||
* `resource_id`: Optional string - The resource id of the scheduled task, if it possesses one
|
||||
* `result`: Optional Json - Any result of the scheduled task, if given
|
||||
* `error`: Optional string - If the task has the status "failed", the error associated with this failure
|
||||
@@ -117,6 +117,16 @@ each upgrade are complete before moving on to the next upgrade, to avoid
|
||||
stacking them up. You can monitor the currently running background updates with
|
||||
[the Admin API](usage/administration/admin_api/background_updates.html#status).
|
||||
|
||||
# Upgrading to v1.130.0
|
||||
|
||||
## Documented endpoint which can be delegated to a federation worker
|
||||
|
||||
The endpoint `^/_matrix/federation/v1/version$` can be delegated to a federation
|
||||
worker. This is not new behaviour, but had not been documented yet. The
|
||||
[list of delegatable endpoints](workers.md#synapseappgeneric_worker) has
|
||||
been updated to include it. Make sure to check your reverse proxy rules if you
|
||||
are using workers.
|
||||
|
||||
# Upgrading to v1.126.0
|
||||
|
||||
## Room list publication rules change
|
||||
|
||||
@@ -200,6 +200,7 @@ information.
|
||||
^/_matrix/client/(api/v1|r0|v3)/rooms/[^/]+/initialSync$
|
||||
|
||||
# Federation requests
|
||||
^/_matrix/federation/v1/version$
|
||||
^/_matrix/federation/v1/event/
|
||||
^/_matrix/federation/v1/state/
|
||||
^/_matrix/federation/v1/state_ids/
|
||||
|
||||
+1
-1
@@ -315,7 +315,7 @@ all = [
|
||||
# - systemd: this is a system-based requirement
|
||||
]
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
# We pin development dependencies in poetry.lock so that our tests don't start
|
||||
# failing on new releases. Keeping lower bounds loose here means that dependabot
|
||||
# can bump versions without having to update the content-hash in the lockfile.
|
||||
|
||||
@@ -39,7 +39,6 @@ from synapse.api.errors import (
|
||||
HttpResponseException,
|
||||
InvalidClientTokenError,
|
||||
OAuthInsufficientScopeError,
|
||||
StoreError,
|
||||
SynapseError,
|
||||
UnrecognizedRequestError,
|
||||
)
|
||||
@@ -512,7 +511,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
raise InvalidClientTokenError("No scope in token granting user rights")
|
||||
|
||||
# Match via the sub claim
|
||||
sub: Optional[str] = introspection_result.get_sub()
|
||||
sub = introspection_result.get_sub()
|
||||
if sub is None:
|
||||
raise InvalidClientTokenError(
|
||||
"Invalid sub claim in the introspection result"
|
||||
@@ -525,29 +524,20 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
# If we could not find a user via the external_id, it either does not exist,
|
||||
# or the external_id was never recorded
|
||||
|
||||
# TODO: claim mapping should be configurable
|
||||
username: Optional[str] = introspection_result.get_username()
|
||||
if username is None or not isinstance(username, str):
|
||||
username = introspection_result.get_username()
|
||||
if username is None:
|
||||
raise AuthError(
|
||||
500,
|
||||
"Invalid username claim in the introspection result",
|
||||
)
|
||||
user_id = UserID(username, self._hostname)
|
||||
|
||||
# First try to find a user from the username claim
|
||||
# Try to find a user from the username claim
|
||||
user_info = await self.store.get_user_by_id(user_id=user_id.to_string())
|
||||
if user_info is None:
|
||||
# If the user does not exist, we should create it on the fly
|
||||
# TODO: we could use SCIM to provision users ahead of time and listen
|
||||
# for SCIM SET events if those ever become standard:
|
||||
# https://datatracker.ietf.org/doc/html/draft-hunt-scim-notify-00
|
||||
|
||||
# TODO: claim mapping should be configurable
|
||||
# If present, use the name claim as the displayname
|
||||
name: Optional[str] = introspection_result.get_name()
|
||||
|
||||
await self.store.register_user(
|
||||
user_id=user_id.to_string(), create_profile_with_displayname=name
|
||||
raise AuthError(
|
||||
500,
|
||||
"User not found",
|
||||
)
|
||||
|
||||
# And record the sub as external_id
|
||||
@@ -587,17 +577,10 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
"Invalid device ID in introspection result",
|
||||
)
|
||||
|
||||
# Create the device on the fly if it does not exist
|
||||
try:
|
||||
await self.store.get_device(
|
||||
user_id=user_id.to_string(), device_id=device_id
|
||||
)
|
||||
except StoreError:
|
||||
await self.store.store_device(
|
||||
user_id=user_id.to_string(),
|
||||
device_id=device_id,
|
||||
initial_device_display_name="OIDC-native client",
|
||||
)
|
||||
# Make sure the device exists
|
||||
await self.store.get_device(
|
||||
user_id=user_id.to_string(), device_id=device_id
|
||||
)
|
||||
|
||||
# TODO: there is a few things missing in the requester here, which still need
|
||||
# to be figured out, like:
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
#
|
||||
import logging
|
||||
import sys
|
||||
from typing import Dict, List, cast
|
||||
from typing import Dict, List
|
||||
|
||||
from twisted.web.resource import Resource
|
||||
|
||||
@@ -51,7 +51,7 @@ from synapse.http.server import JsonResource, OptionsResource
|
||||
from synapse.logging.context import LoggingContext
|
||||
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
|
||||
from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
|
||||
from synapse.rest import ClientRestResource, admin
|
||||
from synapse.rest import ClientRestResource
|
||||
from synapse.rest.admin import AdminRestResource, register_servlets_for_media_repo
|
||||
from synapse.rest.health import HealthResource
|
||||
from synapse.rest.key.v2 import KeyResource
|
||||
@@ -190,11 +190,8 @@ class GenericWorkerServer(HomeServer):
|
||||
|
||||
resources.update(build_synapse_client_resource_tree(self))
|
||||
resources["/.well-known"] = well_known_resource(self)
|
||||
admin_res = resources.get("/_synapse/admin")
|
||||
if admin_res is not None:
|
||||
admin.register_servlets(self, cast(JsonResource, admin_res))
|
||||
else:
|
||||
resources["/_synapse/admin"] = AdminRestResource(self)
|
||||
resources["/_synapse/admin"] = AdminRestResource(self)
|
||||
|
||||
elif name == "federation":
|
||||
resources[FEDERATION_PREFIX] = TransportLayerServer(self)
|
||||
elif name == "media":
|
||||
@@ -203,21 +200,15 @@ class GenericWorkerServer(HomeServer):
|
||||
|
||||
# We need to serve the admin servlets for media on the
|
||||
# worker.
|
||||
admin_res = resources.get("/_synapse/admin")
|
||||
if admin_res is not None:
|
||||
register_servlets_for_media_repo(
|
||||
self, cast(JsonResource, admin_res)
|
||||
)
|
||||
else:
|
||||
admin_resource = JsonResource(self, canonical_json=False)
|
||||
register_servlets_for_media_repo(self, admin_resource)
|
||||
resources["/_synapse/admin"] = admin_resource
|
||||
admin_resource = JsonResource(self, canonical_json=False)
|
||||
register_servlets_for_media_repo(self, admin_resource)
|
||||
|
||||
resources.update(
|
||||
{
|
||||
MEDIA_R0_PREFIX: media_repo,
|
||||
MEDIA_V3_PREFIX: media_repo,
|
||||
LEGACY_MEDIA_PREFIX: media_repo,
|
||||
"/_synapse/admin": admin_resource,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -586,6 +586,24 @@ class OidcProvider:
|
||||
or self._user_profile_method == "userinfo_endpoint"
|
||||
)
|
||||
|
||||
@property
|
||||
def _uses_access_token(self) -> bool:
|
||||
"""Return True if the `access_token` will be used during the login process.
|
||||
|
||||
This is useful to determine whether the access token
|
||||
returned by the identity provider, and
|
||||
any related metadata (such as the `at_hash` field in
|
||||
the ID token), should be validated.
|
||||
"""
|
||||
# Currently, Synapse only uses the access_token to fetch user metadata
|
||||
# from the userinfo endpoint. Therefore we only have a single criteria
|
||||
# to check right now but this may change in the future and this function
|
||||
# should be updated if more usages are introduced.
|
||||
#
|
||||
# For example, if we start to use the access_token given to us by the
|
||||
# IdP for more things, such as accessing Resource Server APIs.
|
||||
return self._uses_userinfo
|
||||
|
||||
@property
|
||||
def issuer(self) -> str:
|
||||
"""The issuer identifying this provider."""
|
||||
@@ -957,9 +975,16 @@ class OidcProvider:
|
||||
"nonce": nonce,
|
||||
"client_id": self._client_auth.client_id,
|
||||
}
|
||||
if "access_token" in token:
|
||||
if self._uses_access_token and "access_token" in token:
|
||||
# If we got an `access_token`, there should be an `at_hash` claim
|
||||
# in the `id_token` that we can check against.
|
||||
# in the `id_token` that we can check against. Setting this
|
||||
# instructs authlib to check the value of `at_hash` in the
|
||||
# ID token.
|
||||
#
|
||||
# We only need to verify the access token if we actually make
|
||||
# use of it. Which currently only happens when we need to fetch
|
||||
# the user's information from the userinfo_endpoint. Thus, this
|
||||
# check is also gated on self._uses_userinfo.
|
||||
claims_params["access_token"] = token["access_token"]
|
||||
|
||||
claims_options = {"iss": {"values": [metadata["issuer"]]}}
|
||||
|
||||
@@ -86,6 +86,7 @@ from synapse.rest.admin.rooms import (
|
||||
RoomStateRestServlet,
|
||||
RoomTimestampToEventRestServlet,
|
||||
)
|
||||
from synapse.rest.admin.scheduled_tasks import ScheduledTasksRestServlet
|
||||
from synapse.rest.admin.server_notice_servlet import SendServerNoticeServlet
|
||||
from synapse.rest.admin.statistics import (
|
||||
LargestRoomsStatistics,
|
||||
@@ -338,6 +339,7 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
|
||||
BackgroundUpdateStartJobRestServlet(hs).register(http_server)
|
||||
ExperimentalFeaturesRestServlet(hs).register(http_server)
|
||||
SuspendAccountRestServlet(hs).register(http_server)
|
||||
ScheduledTasksRestServlet(hs).register(http_server)
|
||||
|
||||
|
||||
def register_servlets_for_client_rest_resource(
|
||||
|
||||
@@ -0,0 +1,70 @@
|
||||
#
|
||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
#
|
||||
# Copyright (C) 2025 New Vector, Ltd
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# See the GNU Affero General Public License for more details:
|
||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
#
|
||||
#
|
||||
#
|
||||
from typing import TYPE_CHECKING, Tuple
|
||||
|
||||
from synapse.http.servlet import RestServlet, parse_integer, parse_string
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.rest.admin import admin_patterns, assert_requester_is_admin
|
||||
from synapse.types import JsonDict, TaskStatus
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
class ScheduledTasksRestServlet(RestServlet):
|
||||
"""Get a list of scheduled tasks and their statuses
|
||||
optionally filtered by action name, resource id, status, and max timestamp
|
||||
"""
|
||||
|
||||
PATTERNS = admin_patterns("/scheduled_tasks$")
|
||||
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self._auth = hs.get_auth()
|
||||
self._store = hs.get_datastores().main
|
||||
|
||||
async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
|
||||
await assert_requester_is_admin(self._auth, request)
|
||||
|
||||
# extract query params
|
||||
action_name = parse_string(request, "action_name")
|
||||
resource_id = parse_string(request, "resource_id")
|
||||
status = parse_string(request, "job_status")
|
||||
max_timestamp = parse_integer(request, "max_timestamp")
|
||||
|
||||
actions = [action_name] if action_name else None
|
||||
statuses = [TaskStatus(status)] if status else None
|
||||
|
||||
tasks = await self._store.get_scheduled_tasks(
|
||||
actions=actions,
|
||||
resource_id=resource_id,
|
||||
statuses=statuses,
|
||||
max_timestamp=max_timestamp,
|
||||
)
|
||||
|
||||
json_tasks = []
|
||||
for task in tasks:
|
||||
result_task = {
|
||||
"id": task.id,
|
||||
"action": task.action,
|
||||
"status": task.status,
|
||||
"timestamp_ms": task.timestamp,
|
||||
"resource_id": task.resource_id,
|
||||
"result": task.result,
|
||||
"error": task.error,
|
||||
}
|
||||
json_tasks.append(result_task)
|
||||
|
||||
return 200, {"scheduled_tasks": json_tasks}
|
||||
@@ -147,6 +147,16 @@ class MSC3861OAuthDelegation(HomeserverTestCase):
|
||||
|
||||
return hs
|
||||
|
||||
def prepare(
|
||||
self, reactor: MemoryReactor, clock: Clock, homeserver: HomeServer
|
||||
) -> None:
|
||||
# Provision the user and the device we use in the tests.
|
||||
store = homeserver.get_datastores().main
|
||||
self.get_success(store.register_user(USER_ID))
|
||||
self.get_success(
|
||||
store.store_device(USER_ID, DEVICE, initial_device_display_name=None)
|
||||
)
|
||||
|
||||
def _assertParams(self) -> None:
|
||||
"""Assert that the request parameters are correct."""
|
||||
params = parse_qs(self.http_client.request.call_args[1]["data"].decode("utf-8"))
|
||||
|
||||
@@ -1029,6 +1029,50 @@ class OidcHandlerTestCase(HomeserverTestCase):
|
||||
args = parse_qs(kwargs["data"].decode("utf-8"))
|
||||
self.assertEqual(args["redirect_uri"], [TEST_REDIRECT_URI])
|
||||
|
||||
@override_config(
|
||||
{
|
||||
"oidc_config": {
|
||||
**DEFAULT_CONFIG,
|
||||
"redirect_uri": TEST_REDIRECT_URI,
|
||||
}
|
||||
}
|
||||
)
|
||||
def test_code_exchange_ignores_access_token(self) -> None:
|
||||
"""
|
||||
Code exchange completes successfully and doesn't validate the `at_hash`
|
||||
(access token hash) field of an ID token when the access token isn't
|
||||
going to be used.
|
||||
|
||||
The access token won't be used in this test because Synapse (currently)
|
||||
only needs it to fetch a user's metadata if it isn't included in the ID
|
||||
token itself.
|
||||
|
||||
Because we have included "openid" in the requested scopes for this IdP
|
||||
(see `SCOPES`), user metadata is be included in the ID token. Thus the
|
||||
access token isn't needed, and it's unnecessary for Synapse to validate
|
||||
the access token.
|
||||
|
||||
This is a regression test for a situation where an upstream identity
|
||||
provider was providing an invalid `at_hash` value, which Synapse errored
|
||||
on, yet Synapse wasn't using the access token for anything.
|
||||
"""
|
||||
# Exchange the code against the fake IdP.
|
||||
userinfo = {
|
||||
"sub": "foo",
|
||||
"username": "foo",
|
||||
"phone": "1234567",
|
||||
}
|
||||
with self.fake_server.id_token_override(
|
||||
{
|
||||
"at_hash": "invalid-hash",
|
||||
}
|
||||
):
|
||||
request, _ = self.start_authorization(userinfo)
|
||||
self.get_success(self.handler.handle_oidc_callback(request))
|
||||
|
||||
# If no error was rendered, then we have success.
|
||||
self.render_error.assert_not_called()
|
||||
|
||||
@override_config(
|
||||
{
|
||||
"oidc_config": {
|
||||
|
||||
@@ -0,0 +1,192 @@
|
||||
#
|
||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
#
|
||||
# Copyright (C) 2025 New Vector, Ltd
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# See the GNU Affero General Public License for more details:
|
||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
#
|
||||
#
|
||||
#
|
||||
from typing import Mapping, Optional, Tuple
|
||||
|
||||
from twisted.test.proto_helpers import MemoryReactor
|
||||
|
||||
import synapse.rest.admin
|
||||
from synapse.api.errors import Codes
|
||||
from synapse.rest.client import login
|
||||
from synapse.server import HomeServer
|
||||
from synapse.types import JsonMapping, ScheduledTask, TaskStatus
|
||||
from synapse.util import Clock
|
||||
|
||||
from tests import unittest
|
||||
|
||||
|
||||
class ScheduledTasksAdminApiTestCase(unittest.HomeserverTestCase):
|
||||
servlets = [
|
||||
synapse.rest.admin.register_servlets,
|
||||
login.register_servlets,
|
||||
]
|
||||
|
||||
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||
self.store = hs.get_datastores().main
|
||||
self.admin_user = self.register_user("admin", "pass", admin=True)
|
||||
self.admin_user_tok = self.login("admin", "pass")
|
||||
self._task_scheduler = hs.get_task_scheduler()
|
||||
|
||||
# create and schedule a few tasks
|
||||
async def _test_task(
|
||||
task: ScheduledTask,
|
||||
) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]:
|
||||
return TaskStatus.ACTIVE, None, None
|
||||
|
||||
async def _finished_test_task(
|
||||
task: ScheduledTask,
|
||||
) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]:
|
||||
return TaskStatus.COMPLETE, None, None
|
||||
|
||||
async def _failed_test_task(
|
||||
task: ScheduledTask,
|
||||
) -> Tuple[TaskStatus, Optional[JsonMapping], Optional[str]]:
|
||||
return TaskStatus.FAILED, None, "Everything failed"
|
||||
|
||||
self._task_scheduler.register_action(_test_task, "test_task")
|
||||
self.get_success(
|
||||
self._task_scheduler.schedule_task("test_task", resource_id="test")
|
||||
)
|
||||
|
||||
self._task_scheduler.register_action(_finished_test_task, "finished_test_task")
|
||||
self.get_success(
|
||||
self._task_scheduler.schedule_task(
|
||||
"finished_test_task", resource_id="finished_task"
|
||||
)
|
||||
)
|
||||
|
||||
self._task_scheduler.register_action(_failed_test_task, "failed_test_task")
|
||||
self.get_success(
|
||||
self._task_scheduler.schedule_task(
|
||||
"failed_test_task", resource_id="failed_task"
|
||||
)
|
||||
)
|
||||
|
||||
def check_scheduled_tasks_response(self, scheduled_tasks: Mapping) -> list:
|
||||
result = []
|
||||
for task in scheduled_tasks:
|
||||
if task["resource_id"] == "test":
|
||||
self.assertEqual(task["status"], TaskStatus.ACTIVE)
|
||||
self.assertEqual(task["action"], "test_task")
|
||||
result.append(task)
|
||||
if task["resource_id"] == "finished_task":
|
||||
self.assertEqual(task["status"], TaskStatus.COMPLETE)
|
||||
self.assertEqual(task["action"], "finished_test_task")
|
||||
result.append(task)
|
||||
if task["resource_id"] == "failed_task":
|
||||
self.assertEqual(task["status"], TaskStatus.FAILED)
|
||||
self.assertEqual(task["action"], "failed_test_task")
|
||||
result.append(task)
|
||||
|
||||
return result
|
||||
|
||||
def test_requester_is_not_admin(self) -> None:
|
||||
"""
|
||||
If the user is not a server admin, an error 403 is returned.
|
||||
"""
|
||||
|
||||
self.register_user("user", "pass", admin=False)
|
||||
other_user_tok = self.login("user", "pass")
|
||||
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
"/_synapse/admin/v1/scheduled_tasks",
|
||||
content={},
|
||||
access_token=other_user_tok,
|
||||
)
|
||||
|
||||
self.assertEqual(403, channel.code, msg=channel.json_body)
|
||||
self.assertEqual(Codes.FORBIDDEN, channel.json_body["errcode"])
|
||||
|
||||
def test_scheduled_tasks(self) -> None:
|
||||
"""
|
||||
Test that endpoint returns scheduled tasks.
|
||||
"""
|
||||
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
"/_synapse/admin/v1/scheduled_tasks",
|
||||
content={},
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
scheduled_tasks = channel.json_body["scheduled_tasks"]
|
||||
|
||||
# make sure we got back all the scheduled tasks
|
||||
found_tasks = self.check_scheduled_tasks_response(scheduled_tasks)
|
||||
self.assertEqual(len(found_tasks), 3)
|
||||
|
||||
def test_filtering_scheduled_tasks(self) -> None:
|
||||
"""
|
||||
Test that filtering the scheduled tasks response via query params works as expected.
|
||||
"""
|
||||
# filter via job_status
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
"/_synapse/admin/v1/scheduled_tasks?job_status=active",
|
||||
content={},
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
scheduled_tasks = channel.json_body["scheduled_tasks"]
|
||||
found_tasks = self.check_scheduled_tasks_response(scheduled_tasks)
|
||||
|
||||
# only the active task should have been returned
|
||||
self.assertEqual(len(found_tasks), 1)
|
||||
self.assertEqual(found_tasks[0]["status"], "active")
|
||||
|
||||
# filter via action_name
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
"/_synapse/admin/v1/scheduled_tasks?action_name=test_task",
|
||||
content={},
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
scheduled_tasks = channel.json_body["scheduled_tasks"]
|
||||
|
||||
# only test_task should have been returned
|
||||
found_tasks = self.check_scheduled_tasks_response(scheduled_tasks)
|
||||
self.assertEqual(len(found_tasks), 1)
|
||||
self.assertEqual(found_tasks[0]["action"], "test_task")
|
||||
|
||||
# filter via max_timestamp
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
"/_synapse/admin/v1/scheduled_tasks?max_timestamp=0",
|
||||
content={},
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
scheduled_tasks = channel.json_body["scheduled_tasks"]
|
||||
found_tasks = self.check_scheduled_tasks_response(scheduled_tasks)
|
||||
|
||||
# none should have been returned
|
||||
self.assertEqual(len(found_tasks), 0)
|
||||
|
||||
# filter via resource id
|
||||
channel = self.make_request(
|
||||
"GET",
|
||||
"/_synapse/admin/v1/scheduled_tasks?resource_id=failed_task",
|
||||
content={},
|
||||
access_token=self.admin_user_tok,
|
||||
)
|
||||
self.assertEqual(200, channel.code, msg=channel.json_body)
|
||||
scheduled_tasks = channel.json_body["scheduled_tasks"]
|
||||
found_tasks = self.check_scheduled_tasks_response(scheduled_tasks)
|
||||
|
||||
# only the task with the matching resource id should have been returned
|
||||
self.assertEqual(len(found_tasks), 1)
|
||||
self.assertEqual(found_tasks[0]["resource_id"], "failed_task")
|
||||
@@ -20,7 +20,9 @@
|
||||
#
|
||||
|
||||
|
||||
import base64
|
||||
import json
|
||||
from hashlib import sha256
|
||||
from typing import Any, ContextManager, Dict, List, Optional, Tuple
|
||||
from unittest.mock import Mock, patch
|
||||
from urllib.parse import parse_qs
|
||||
@@ -154,10 +156,23 @@ class FakeOidcServer:
|
||||
json_payload = json.dumps(payload)
|
||||
return jws.serialize_compact(protected, json_payload, self._key).decode("utf-8")
|
||||
|
||||
def generate_id_token(self, grant: FakeAuthorizationGrant) -> str:
|
||||
def generate_id_token(
|
||||
self, grant: FakeAuthorizationGrant, access_token: str
|
||||
) -> str:
|
||||
# Generate a hash of the access token for the optional
|
||||
# `at_hash` field in an ID Token.
|
||||
#
|
||||
# 3.1.3.6. ID Token, https://openid.net/specs/openid-connect-core-1_0.html#CodeIDToken
|
||||
at_hash = (
|
||||
base64.urlsafe_b64encode(sha256(access_token.encode("ascii")).digest()[:16])
|
||||
.rstrip(b"=")
|
||||
.decode("ascii")
|
||||
)
|
||||
|
||||
now = int(self._clock.time())
|
||||
id_token = {
|
||||
**grant.userinfo,
|
||||
"at_hash": at_hash,
|
||||
"iss": self.issuer,
|
||||
"aud": grant.client_id,
|
||||
"iat": now,
|
||||
@@ -243,7 +258,7 @@ class FakeOidcServer:
|
||||
}
|
||||
|
||||
if "openid" in grant.scope:
|
||||
token["id_token"] = self.generate_id_token(grant)
|
||||
token["id_token"] = self.generate_id_token(grant, access_token)
|
||||
|
||||
return dict(token)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user