1
0

Compare commits

...

8 Commits

Author SHA1 Message Date
Hugh Nimmo-Smith d9058a9182 state_after WIP 2024-10-25 16:14:22 +01:00
Hugh Nimmo-Smith 96425d4071 Test cases for sync of state from DAG branches 2024-10-25 16:13:46 +01:00
dependabot[bot] 69e9b75373 Bump types-setuptools from 75.1.0.20241014 to 75.2.0.20241019 (#17856)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-10-25 10:44:12 +01:00
dependabot[bot] 5d0514f29b Bump serde_json from 1.0.128 to 1.0.132 (#17857)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-10-25 10:43:40 +01:00
dependabot[bot] 4e5410fdae Bump types-psycopg2 from 2.9.21.20240819 to 2.9.21.20241019 (#17855)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-10-25 10:42:38 +01:00
dependabot[bot] 12d65a6778 Bump cryptography from 43.0.1 to 43.0.3 (#17853)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-10-25 10:40:58 +01:00
dependabot[bot] 1006c12eb2 Bump anyhow from 1.0.89 to 1.0.90 (#17858)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-10-25 10:35:37 +01:00
Andrew Morgan 57efc8c03e Add media tests for a CMYK JPEG image (#17786) 2024-10-23 18:26:01 +01:00
9 changed files with 493 additions and 63 deletions
Generated
+4 -4
View File
@@ -13,9 +13,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.89"
version = "1.0.90"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6"
checksum = "37bf3594c4c988a53154954629820791dde498571819ae4ca50ca811e060cc95"
[[package]]
name = "arc-swap"
@@ -505,9 +505,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.128"
version = "1.0.132"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8"
checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03"
dependencies = [
"itoa",
"memchr",
+1
View File
@@ -0,0 +1 @@
Add a test for downloading and thumbnailing a CMYK JPEG.
Generated
+35 -35
View File
@@ -360,38 +360,38 @@ files = [
[[package]]
name = "cryptography"
version = "43.0.1"
version = "43.0.3"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = false
python-versions = ">=3.7"
files = [
{file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"},
{file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"},
{file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"},
{file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"},
{file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"},
{file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"},
{file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"},
{file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"},
{file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"},
{file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"},
{file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"},
{file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"},
{file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"},
{file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"},
{file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"},
{file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"},
{file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"},
{file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"},
{file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"},
{file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"},
{file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"},
{file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"},
{file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"},
{file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"},
{file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"},
{file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"},
{file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"},
{file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"},
{file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"},
{file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"},
{file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"},
{file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"},
{file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"},
{file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"},
{file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"},
{file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"},
{file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"},
{file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"},
{file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"},
{file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"},
{file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"},
{file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"},
{file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"},
{file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"},
{file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"},
{file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"},
{file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"},
{file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"},
{file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"},
{file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"},
{file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"},
{file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"},
{file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"},
{file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"},
]
[package.dependencies]
@@ -404,7 +404,7 @@ nox = ["nox"]
pep8test = ["check-sdist", "click", "mypy", "ruff"]
sdist = ["build"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
test-randomorder = ["pytest-randomly"]
[[package]]
@@ -2783,13 +2783,13 @@ files = [
[[package]]
name = "types-psycopg2"
version = "2.9.21.20240819"
version = "2.9.21.20241019"
description = "Typing stubs for psycopg2"
optional = false
python-versions = ">=3.8"
files = [
{file = "types-psycopg2-2.9.21.20240819.tar.gz", hash = "sha256:4ed6b47464d6374fa64e5e3b234cea0f710e72123a4596d67ab50b7415a84666"},
{file = "types_psycopg2-2.9.21.20240819-py3-none-any.whl", hash = "sha256:c9192311c27d7ad561eef705f1b2df1074f2cdcf445a98a6a2fcaaaad43278cf"},
{file = "types-psycopg2-2.9.21.20241019.tar.gz", hash = "sha256:bca89b988d2ebd19bcd08b177d22a877ea8b841decb10ed130afcf39404612fa"},
{file = "types_psycopg2-2.9.21.20241019-py3-none-any.whl", hash = "sha256:44d091e67732d16a941baae48cd7b53bf91911bc36888652447cf1ef0c1fb3f6"},
]
[[package]]
@@ -2834,13 +2834,13 @@ urllib3 = ">=2"
[[package]]
name = "types-setuptools"
version = "75.1.0.20241014"
version = "75.2.0.20241019"
description = "Typing stubs for setuptools"
optional = false
python-versions = ">=3.8"
files = [
{file = "types-setuptools-75.1.0.20241014.tar.gz", hash = "sha256:29b0560a8d4b4a91174be085847002c69abfcb048e20b33fc663005aedf56804"},
{file = "types_setuptools-75.1.0.20241014-py3-none-any.whl", hash = "sha256:caab58366741fb99673d0138b6e2d760717f154cfb981b74fea5e8de40f0b703"},
{file = "types-setuptools-75.2.0.20241019.tar.gz", hash = "sha256:86ea31b5f6df2c6b8f2dc8ae3f72b213607f62549b6fa2ed5866e5299f968694"},
{file = "types_setuptools-75.2.0.20241019-py3-none-any.whl", hash = "sha256:2e48ff3acd4919471e80d5e3f049cce5c177e108d5d36d2d4cee3fa4d4104258"},
]
[[package]]
+51 -21
View File
@@ -171,6 +171,7 @@ class JoinedSyncResult:
room_id: str
timeline: TimelineBatch
state: StateMap[EventBase]
state_after: StateMap[EventBase]
ephemeral: List[JsonDict]
account_data: List[JsonDict]
unread_notifications: JsonDict
@@ -194,6 +195,7 @@ class ArchivedSyncResult:
room_id: str
timeline: TimelineBatch
state: StateMap[EventBase]
state_after: StateMap[EventBase]
account_data: List[JsonDict]
def __bool__(self) -> bool:
@@ -1141,7 +1143,7 @@ class SyncHandler:
since_token: Optional[StreamToken],
end_token: StreamToken,
full_state: bool,
) -> MutableStateMap[EventBase]:
) -> Tuple[MutableStateMap[EventBase], MutableStateMap[EventBase]]:
"""Works out the difference in state between the end of the previous sync and
the start of the timeline.
@@ -1157,7 +1159,7 @@ class SyncHandler:
`lazy_load_members` still applies when `full_state` is `True`.
Returns:
The state to return in the sync response for the room.
The `state` and `state_after` to return in the sync response for the room.
Clients will overlay this onto the state at the end of the previous sync to
arrive at the state at the start of the timeline.
@@ -1224,11 +1226,15 @@ class SyncHandler:
# sync's timeline and the start of the current sync's timeline.
# See the docstring above for details.
state_ids: StateMap[str]
state_after_ids: StateMap[str]
# We need to know whether the state we fetch may be partial, so check
# whether the room is partial stated *before* fetching it.
is_partial_state_room = await self.store.is_partial_state_room(room_id)
if full_state:
state_ids = await self._compute_state_delta_for_full_sync(
[
state_ids,
state_after_ids,
] = await self._compute_state_delta_for_full_sync(
room_id,
sync_config.user,
batch,
@@ -1242,7 +1248,10 @@ class SyncHandler:
# is indeed the case.
assert since_token is not None
state_ids = await self._compute_state_delta_for_incremental_sync(
[
state_ids,
state_after_ids,
] = await self._compute_state_delta_for_incremental_sync(
room_id,
batch,
since_token,
@@ -1258,6 +1267,7 @@ class SyncHandler:
assert members_to_fetch is not None
assert first_event_by_sender_map is not None
# TODO: would this need to take account of state_after_ids?
additional_state_ids = (
await self._find_missing_partial_state_memberships(
room_id, members_to_fetch, first_event_by_sender_map, state_ids
@@ -1304,14 +1314,26 @@ class SyncHandler:
state: Dict[str, EventBase] = {}
if state_ids:
state = await self.store.get_events(list(state_ids.values()))
state_after: Dict[str, EventBase] = {}
if state_after_ids:
state_after = await self.store.get_events(list(state_after_ids.values()))
return {
(e.type, e.state_key): e
for e in await sync_config.filter_collection.filter_room_state(
list(state.values())
)
if e.type != EventTypes.Aliases # until MSC2261 or alternative solution
}
return [
{
(e.type, e.state_key): e
for e in await sync_config.filter_collection.filter_room_state(
list(state.values())
)
if e.type != EventTypes.Aliases # until MSC2261 or alternative solution
},
{
(e.type, e.state_key): e
for e in await sync_config.filter_collection.filter_room_state(
list(state_after.values())
)
if e.type != EventTypes.Aliases # until MSC2261 or alternative solution
},
]
async def _compute_state_delta_for_full_sync(
self,
@@ -1321,7 +1343,7 @@ class SyncHandler:
end_token: StreamToken,
members_to_fetch: Optional[Set[str]],
timeline_state: StateMap[str],
) -> StateMap[str]:
) -> Tuple[StateMap[str], StateMap[str]]:
"""Calculate the state events to be included in a full sync response.
As with `_compute_state_delta_for_incremental_sync`, the result will include
@@ -1341,7 +1363,7 @@ class SyncHandler:
Returns:
A map from (type, state_key) to event_id, for each event that we believe
should be included in the `state` part of the sync response.
should be included in the `state` and `state_after` part of the sync response.
"""
if members_to_fetch is not None:
# Lazy-loading of membership events is enabled.
@@ -1410,7 +1432,7 @@ class SyncHandler:
end_token: StreamToken,
members_to_fetch: Optional[Set[str]],
timeline_state: StateMap[str],
) -> StateMap[str]:
) -> Tuple[StateMap[str], StateMap[str]]:
"""Calculate the state events to be included in an incremental sync response.
If lazy-loading of membership events is enabled (as indicated by
@@ -1433,7 +1455,7 @@ class SyncHandler:
Returns:
A map from (type, state_key) to event_id, for each event that we believe
should be included in the `state` part of the sync response.
should be included in the `state` and `state_after` part of the sync response.
"""
if members_to_fetch is not None:
# Lazy-loading is enabled. Only return the state that is needed.
@@ -1491,7 +1513,7 @@ class SyncHandler:
await_full_state=False,
)
)
return state_ids
return [state_ids, {}]
if batch:
state_at_timeline_start = (
@@ -2860,7 +2882,7 @@ class SyncHandler:
return
if not room_builder.out_of_band:
state = await self.compute_state_delta(
[state, state_after] = await self.compute_state_delta(
room_id,
batch,
sync_config,
@@ -2871,6 +2893,7 @@ class SyncHandler:
else:
# An out of band room won't have any state changes.
state = {}
state_after = {}
summary: Optional[JsonDict] = {}
@@ -2905,6 +2928,7 @@ class SyncHandler:
room_id=room_id,
timeline=batch,
state=state,
state_after=state_after,
ephemeral=ephemeral,
account_data=account_data_events,
unread_notifications=unread_notifications,
@@ -2957,6 +2981,7 @@ class SyncHandler:
room_id=room_id,
timeline=batch,
state=state,
state_after=state_after,
account_data=account_data_events,
)
if archived_room_sync or always_include:
@@ -2982,8 +3007,8 @@ def _calculate_state(
timeline_end: StateMap[str],
previous_timeline_end: StateMap[str],
lazy_load_members: bool,
) -> StateMap[str]:
"""Works out what state to include in a sync response.
) -> Tuple[StateMap[str], StateMap[str]]:
"""Works out what state and state_after to include in a sync response.
Args:
timeline_contains: state in the timeline
@@ -3080,13 +3105,18 @@ def _calculate_state(
# even try; it is ether omitted or plonked into `state` as if it were at the start
# of the timeline, depending on what else is in the timeline.)
state_ids = (
state_before_ids = (
(timeline_end_ids | timeline_start_ids)
- previous_timeline_end_ids
- timeline_contains_ids
)
return {event_id_to_state_key[e]: e for e in state_ids}
state_after_ids = timeline_end_ids - timeline_contains_ids - timeline_start_ids
return [
{event_id_to_state_key[e]: e for e in state_before_ids},
{event_id_to_state_key[e]: e for e in state_after_ids},
]
@attr.s(slots=True, auto_attribs=True)
+6
View File
@@ -521,9 +521,11 @@ class SyncRestServlet(RestServlet):
The room, encoded in our response format
"""
state_dict = room.state
state_after_dict = room.state_after
timeline_events = room.timeline.events
state_events = state_dict.values()
state_after_events = state_after_dict.values()
for event in itertools.chain(state_events, timeline_events):
# We've had bug reports that events were coming down under the
@@ -545,6 +547,9 @@ class SyncRestServlet(RestServlet):
config=serialize_options,
bundle_aggregations=room.timeline.bundled_aggregations,
)
serialized_state_after = await self._event_serializer.serialize_events(
state_after_events, time_now, config=serialize_options
)
account_data = room.account_data
@@ -555,6 +560,7 @@ class SyncRestServlet(RestServlet):
"limited": room.timeline.limited,
},
"state": {"events": serialized_state},
"state_after": {"events": serialized_state_after},
"account_data": {"events": account_data},
}
+311 -1
View File
@@ -571,6 +571,7 @@ class SyncTestCase(tests.unittest.HomeserverTestCase):
[e.event_id for e in room_sync.state.values()],
[],
)
self.assertEqual(room_sync.state_after, {})
# Now send another event that points to S2, but not E3.
with self._patch_get_latest_events([s2_event]):
@@ -602,6 +603,7 @@ class SyncTestCase(tests.unittest.HomeserverTestCase):
[e.event_id for e in room_sync.state.values()],
[s2_event],
)
self.assertEqual(room_sync.state_after, {})
def test_state_includes_changes_on_ungappy_syncs(self) -> None:
"""Test `state` where the sync is not gappy.
@@ -710,10 +712,318 @@ class SyncTestCase(tests.unittest.HomeserverTestCase):
[e.event_id for e in room_sync.timeline.events],
[e4_event, e5_event],
)
def test_state_after_on_branches_winner_at_end_of_timeline(self) -> None:
r"""Test `state` and `state_after` where not all information is in `state` + `timeline`.
-----|---------- initial sync
|
unrelated state event
|
S1
-----|---------- incremental sync 1
↗ ↖
| S2
--|------|------ incremental sync 2
E3 E4
--|------|------ incremental sync 3
| |
\ ↗ S2 wins
E5
-----|---------- incremental sync 4
The "interesting" sync is sync 3. At the end of sync 3 the server doesn't know which branch will win.
"""
alice = self.register_user("alice", "password")
alice_tok = self.login(alice, "password")
alice_requester = create_requester(alice)
room_id = self.helper.create_room_as(alice, is_public=True, tok=alice_tok)
# Do an initial sync to get a known starting point.
initial_sync_result = self.get_success(
self.sync_handler.wait_for_sync_for_user(
alice_requester,
generate_sync_config(alice),
sync_version=SyncVersion.SYNC_V2,
request_key=generate_request_key(),
)
)
# Send an unrelated state event which doesn't change across the branches
unrelated_state_event = self.helper.send_state(
room_id, "m.something.else", {"node": "S1"}, tok=alice_tok
)["event_id"]
# Send S1
s1_event = self.helper.send_state(
room_id, "m.call.member", {"node": "S1"}, tok=alice_tok
)["event_id"]
# Incremental sync 1
incremental_sync = self.get_success(
self.sync_handler.wait_for_sync_for_user(
alice_requester,
generate_sync_config(alice),
sync_version=SyncVersion.SYNC_V2,
request_key=generate_request_key(),
since_token=initial_sync_result.next_batch,
)
)
room_sync = incremental_sync.joined[0]
self.assertEqual(room_sync.room_id, room_id)
self.assertEqual(room_sync.state, {})
self.assertEqual(
[e.event_id for e in room_sync.state.values()],
[e.event_id for e in room_sync.timeline.events],
[unrelated_state_event, s1_event],
)
self.assertEqual(room_sync.state_after, {})
# Send S2 -> S1
s2_event = self.helper.send_state(
room_id, "m.call.member", {"node": "S2"}, tok=alice_tok
)["event_id"]
# Incremental sync 2
incremental_sync = self.get_success(
self.sync_handler.wait_for_sync_for_user(
alice_requester,
generate_sync_config(alice),
sync_version=SyncVersion.SYNC_V2,
request_key=generate_request_key(),
since_token=incremental_sync.next_batch,
)
)
room_sync = incremental_sync.joined[0]
self.assertEqual(room_sync.room_id, room_id)
self.assertEqual(room_sync.state, {})
self.assertEqual(
[e.event_id for e in room_sync.timeline.events],
[s2_event],
)
self.assertEqual(room_sync.state_after, {})
# Send two regular events on different branches:
# E3 -> S1
# E4 -> S2
with self._patch_get_latest_events([s1_event]):
e3_event = self.helper.send(room_id, "E3", tok=alice_tok)["event_id"]
with self._patch_get_latest_events([s2_event]):
e4_event = self.helper.send(room_id, "E4", tok=alice_tok)["event_id"]
# Incremental sync 3
incremental_sync = self.get_success(
self.sync_handler.wait_for_sync_for_user(
alice_requester,
generate_sync_config(alice),
sync_version=SyncVersion.SYNC_V2,
request_key=generate_request_key(),
since_token=incremental_sync.next_batch,
)
)
room_sync = incremental_sync.joined[0]
self.assertEqual(room_sync.room_id, room_id)
self.assertEqual(
[e.event_id for e in room_sync.state.values()],
[
s1_event
], # S1 is repeated because it is the state at the start of the timeline (before E3)
)
self.assertEqual(
[e.event_id for e in room_sync.timeline.events],
[
e3_event,
e4_event,
], # We have two events from different timelines neither of which are state events
)
self.assertEqual(
[e.event_id for e in room_sync.state_after.values()],
[
s2_event
], # S2 is repeated because it is the state at the end of the the timeline (after E4)
)
# Send E5 which resolves the branches
e5_event = self.helper.send(room_id, "E5", tok=alice_tok)["event_id"]
# Incremental sync 4
incremental_sync = self.get_success(
self.sync_handler.wait_for_sync_for_user(
alice_requester,
generate_sync_config(alice),
sync_version=SyncVersion.SYNC_V2,
request_key=generate_request_key(),
since_token=incremental_sync.next_batch,
)
)
room_sync = incremental_sync.joined[0]
self.assertEqual(room_sync.room_id, room_id)
self.assertEqual(room_sync.state, {})
self.assertEqual(
[e.event_id for e in room_sync.timeline.events],
[e5_event],
)
self.assertEqual(room_sync.state_after, {})
# FIXED: S2 is the winning state event and the last state event that the client saw!
def test_state_after_on_branches_winner_at_start_of_timeline(self) -> None:
r"""Test `state` and `state_after` where not all information is in `state` + `timeline`.
-----|---------- initial sync
|
S1
-----|---------- incremental sync 1
↗ ↖
| S2
--|------|------ incremental sync 2
S3 E4
--|------|------ incremental sync 3
| |
↖ / S3 wins
E5
-----|---------- incremental sync 4
The "interesting" sync is sync 3. At the end of sync 3 the server doesn't know which branch will win.
"""
alice = self.register_user("alice", "password")
alice_tok = self.login(alice, "password")
alice_requester = create_requester(alice)
room_id = self.helper.create_room_as(alice, is_public=True, tok=alice_tok)
# Do an initial sync to get a known starting point.
initial_sync_result = self.get_success(
self.sync_handler.wait_for_sync_for_user(
alice_requester,
generate_sync_config(alice),
sync_version=SyncVersion.SYNC_V2,
request_key=generate_request_key(),
)
)
# Send an unrelated state event which doesn't change across the branches
unrelated_state_event = self.helper.send_state(
room_id, "m.something.else", {"node": "S1"}, tok=alice_tok
)["event_id"]
# Send S1
s1_event = self.helper.send_state(
room_id, "m.call.member", {"node": "S1"}, tok=alice_tok
)["event_id"]
# Incremental sync 1
incremental_sync = self.get_success(
self.sync_handler.wait_for_sync_for_user(
alice_requester,
generate_sync_config(alice),
sync_version=SyncVersion.SYNC_V2,
request_key=generate_request_key(),
since_token=initial_sync_result.next_batch,
)
)
room_sync = incremental_sync.joined[0]
self.assertEqual(room_sync.room_id, room_id)
self.assertEqual(room_sync.state, {})
self.assertEqual(
[e.event_id for e in room_sync.timeline.events],
[unrelated_state_event, s1_event],
)
self.assertEqual(room_sync.state_after, {})
# Send S2 -> S1
s2_event = self.helper.send_state(
room_id, "m.call.member", {"node": "S2"}, tok=alice_tok
)["event_id"]
# Incremental sync 2
incremental_sync = self.get_success(
self.sync_handler.wait_for_sync_for_user(
alice_requester,
generate_sync_config(alice),
sync_version=SyncVersion.SYNC_V2,
request_key=generate_request_key(),
since_token=incremental_sync.next_batch,
)
)
room_sync = incremental_sync.joined[0]
self.assertEqual(room_sync.room_id, room_id)
self.assertEqual(room_sync.state, {})
self.assertEqual(
[e.event_id for e in room_sync.timeline.events],
[s2_event],
)
self.assertEqual(room_sync.state_after, {})
# Send two events on different branches:
# S3 -> S1
# E4 -> S2
with self._patch_get_latest_events([s1_event]):
s3_event = self.helper.send_state(
room_id, "m.call.member", {"node": "S3"}, tok=alice_tok
)["event_id"]
with self._patch_get_latest_events([s2_event]):
e4_event = self.helper.send(room_id, "E4", tok=alice_tok)["event_id"]
# Incremental sync 3
incremental_sync = self.get_success(
self.sync_handler.wait_for_sync_for_user(
alice_requester,
generate_sync_config(alice),
sync_version=SyncVersion.SYNC_V2,
request_key=generate_request_key(),
since_token=incremental_sync.next_batch,
)
)
room_sync = incremental_sync.joined[0]
self.assertEqual(room_sync.room_id, room_id)
self.assertEqual(room_sync.state, {})
self.assertEqual(
[e.event_id for e in room_sync.timeline.events],
[
s3_event,
e4_event,
], # We have two events from different timelines
)
self.assertEqual(
[e.event_id for e in room_sync.state_after.values()],
[
s2_event
], # S2 is repeated because it is the state at the end of the the timeline (after E4)
)
# Send E5 which resolves the branches with S3 winning
e5_event = self.helper.send(room_id, "E5", tok=alice_tok)["event_id"]
# Incremental sync 4
incremental_sync = self.get_success(
self.sync_handler.wait_for_sync_for_user(
alice_requester,
generate_sync_config(alice),
sync_version=SyncVersion.SYNC_V2,
request_key=generate_request_key(),
since_token=incremental_sync.next_batch,
)
)
room_sync = incremental_sync.joined[0]
self.assertEqual(room_sync.room_id, room_id)
self.assertEqual(
[e.event_id for e in room_sync.state.values()],
[s3_event], # S3 is the winning state event
)
self.assertEqual(
[e.event_id for e in room_sync.timeline.events],
[e5_event],
)
self.assertEqual(room_sync.state_after, {})
@parameterized.expand(
[
+63 -1
View File
@@ -60,7 +60,7 @@ from synapse.util import Clock
from tests import unittest
from tests.server import FakeChannel
from tests.test_utils import SMALL_PNG
from tests.test_utils import SMALL_CMYK_JPEG, SMALL_PNG
from tests.unittest import override_config
from tests.utils import default_config
@@ -187,6 +187,68 @@ small_png_with_transparency = TestImage(
# different versions of Pillow.
)
small_cmyk_jpeg = TestImage(
SMALL_CMYK_JPEG,
b"image/jpeg",
b".jpeg",
# These values were sourced simply by seeing at what the tests produced at
# the time of writing. If this changes, the tests will fail.
unhexlify(
b"ffd8ffe000104a46494600010100000100010000ffdb00430006"
b"040506050406060506070706080a100a0a09090a140e0f0c1017"
b"141818171416161a1d251f1a1b231c1616202c20232627292a29"
b"191f2d302d283025282928ffdb0043010707070a080a130a0a13"
b"281a161a28282828282828282828282828282828282828282828"
b"2828282828282828282828282828282828282828282828282828"
b"2828ffc00011080020002003012200021101031101ffc4001f00"
b"0001050101010101010000000000000000010203040506070809"
b"0a0bffc400b5100002010303020403050504040000017d010203"
b"00041105122131410613516107227114328191a1082342b1c115"
b"52d1f02433627282090a161718191a25262728292a3435363738"
b"393a434445464748494a535455565758595a636465666768696a"
b"737475767778797a838485868788898a92939495969798999aa2"
b"a3a4a5a6a7a8a9aab2b3b4b5b6b7b8b9bac2c3c4c5c6c7c8c9ca"
b"d2d3d4d5d6d7d8d9dae1e2e3e4e5e6e7e8e9eaf1f2f3f4f5f6f7"
b"f8f9faffc4001f01000301010101010101010100000000000001"
b"02030405060708090a0bffc400b5110002010204040304070504"
b"0400010277000102031104052131061241510761711322328108"
b"144291a1b1c109233352f0156272d10a162434e125f11718191a"
b"262728292a35363738393a434445464748494a53545556575859"
b"5a636465666768696a737475767778797a82838485868788898a"
b"92939495969798999aa2a3a4a5a6a7a8a9aab2b3b4b5b6b7b8b9"
b"bac2c3c4c5c6c7c8c9cad2d3d4d5d6d7d8d9dae2e3e4e5e6e7e8"
b"e9eaf2f3f4f5f6f7f8f9faffda000c03010002110311003f00fa"
b"a68a28a0028a28a0028a28a0028a28a00fffd9"
),
unhexlify(
b"ffd8ffe000104a46494600010100000100010000ffdb00430006"
b"040506050406060506070706080a100a0a09090a140e0f0c1017"
b"141818171416161a1d251f1a1b231c1616202c20232627292a29"
b"191f2d302d283025282928ffdb0043010707070a080a130a0a13"
b"281a161a28282828282828282828282828282828282828282828"
b"2828282828282828282828282828282828282828282828282828"
b"2828ffc00011080001000103012200021101031101ffc4001f00"
b"0001050101010101010000000000000000010203040506070809"
b"0a0bffc400b5100002010303020403050504040000017d010203"
b"00041105122131410613516107227114328191a1082342b1c115"
b"52d1f02433627282090a161718191a25262728292a3435363738"
b"393a434445464748494a535455565758595a636465666768696a"
b"737475767778797a838485868788898a92939495969798999aa2"
b"a3a4a5a6a7a8a9aab2b3b4b5b6b7b8b9bac2c3c4c5c6c7c8c9ca"
b"d2d3d4d5d6d7d8d9dae1e2e3e4e5e6e7e8e9eaf1f2f3f4f5f6f7"
b"f8f9faffc4001f01000301010101010101010100000000000001"
b"02030405060708090a0bffc400b5110002010204040304070504"
b"0400010277000102031104052131061241510761711322328108"
b"144291a1b1c109233352f0156272d10a162434e125f11718191a"
b"262728292a35363738393a434445464748494a53545556575859"
b"5a636465666768696a737475767778797a82838485868788898a"
b"92939495969798999aa2a3a4a5a6a7a8a9aab2b3b4b5b6b7b8b9"
b"bac2c3c4c5c6c7c8c9cad2d3d4d5d6d7d8d9dae2e3e4e5e6e7e8"
b"e9eaf2f3f4f5f6f7f8f9faffda000c03010002110311003f00fa"
b"a68a28a00fffd9"
),
)
small_lossless_webp = TestImage(
unhexlify(
b"524946461a000000574542505650384c0d0000002f0000001007" b"1011118888fe0700"
+3 -1
View File
@@ -66,6 +66,7 @@ from tests.media.test_media_storage import (
SVG,
TestImage,
empty_file,
small_cmyk_jpeg,
small_lossless_webp,
small_png,
small_png_with_transparency,
@@ -1916,6 +1917,7 @@ class RemoteDownloadLimiterTestCase(unittest.HomeserverTestCase):
test_images = [
small_png,
small_png_with_transparency,
small_cmyk_jpeg,
small_lossless_webp,
empty_file,
SVG,
@@ -2400,7 +2402,7 @@ class DownloadAndThumbnailTestCase(unittest.HomeserverTestCase):
if expected_body is not None:
self.assertEqual(
channel.result["body"], expected_body, channel.result["body"]
channel.result["body"], expected_body, channel.result["body"].hex()
)
else:
# ensure that the result is at least some valid image
+19
View File
@@ -23,6 +23,7 @@
Utilities for running the unit tests
"""
import base64
import json
import sys
import warnings
@@ -138,3 +139,21 @@ SMALL_PNG = unhexlify(
b"0000001f15c4890000000a49444154789c63000100000500010d"
b"0a2db40000000049454e44ae426082"
)
# A small CMYK-encoded JPEG image used in some tests.
#
# Generated with:
# img = PIL.Image.new('CMYK', (1, 1), (0, 0, 0, 0))
# img.save('minimal_cmyk.jpg', 'JPEG')
#
# Resolution: 1x1, MIME type: image/jpeg, Extension: jpeg, Size: 4 KiB
SMALL_CMYK_JPEG = base64.b64decode("""
/9j/7gAOQWRvYmUAZAAAAAAA/9sAQwAIBgYHBgUIBwcHCQkICgwUDQwLCww
ZEhMPFB0aHx4dGhwcICQuJyAiLCMcHCg3KSwwMTQ0NB8nOT04MjwuMzQy/8
AAFAgAAQABBEMRAE0RAFkRAEsRAP/EAB8AAAEFAQEBAQEBAAAAAAAAAAABA
gMEBQYHCAkKC//EALUQAAIBAwMCBAMFBQQEAAABfQECAwAEEQUSITFBBhNR
YQcicRQygZGhCCNCscEVUtHwJDNicoIJChYXGBkaJSYnKCkqNDU2Nzg5OkN
ERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6g4SFhoeIiYqSk5SVlp
eYmZqio6Slpqeoqaqys7S1tre4ubrCw8TFxsfIycrS09TV1tfY2drh4uPk5
ebn6Onq8fLz9PX29/j5+v/aAA4EQwBNAFkASwAAPwD3+vf69/r3+v/Z
""")