diff --git a/CHANGES.md b/CHANGES.md index 972b7ee3ed02..7b71e32e2310 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,54 @@ +# Synapse 1.113.0 (2024-08-13) + +No significant changes since 1.113.0rc1. + + + + +# Synapse 1.113.0rc1 (2024-08-06) + +### Features + +- Track which rooms have been sent to clients in the experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint. ([\#17447](https://github.com/element-hq/synapse/issues/17447)) +- Add Account Data extension support to experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint. ([\#17477](https://github.com/element-hq/synapse/issues/17477)) +- Add receipts extension support to experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint. ([\#17489](https://github.com/element-hq/synapse/issues/17489)) +- Add typing notification extension support to experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint. ([\#17505](https://github.com/element-hq/synapse/issues/17505)) + +### Bugfixes + +- Update experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint to handle invite/knock rooms when filtering. ([\#17450](https://github.com/element-hq/synapse/issues/17450)) +- Fix a bug introduced in v1.110.0 which caused `/keys/query` to return incomplete results, leading to high network activity and CPU usage on Matrix clients. ([\#17499](https://github.com/element-hq/synapse/issues/17499)) + +### Improved Documentation + +- Update the [`allowed_local_3pids`](https://element-hq.github.io/synapse/v1.112/usage/configuration/config_documentation.html#allowed_local_3pids) config option's msisdn address to a working example. ([\#17476](https://github.com/element-hq/synapse/issues/17476)) + +### Internal Changes + +- Change sliding sync to use their own token format in preparation for storing per-connection state. ([\#17452](https://github.com/element-hq/synapse/issues/17452)) +- Ensure we don't send down negative `bump_stamp` in experimental sliding sync endpoint. ([\#17478](https://github.com/element-hq/synapse/issues/17478)) +- Do not send down empty room entries down experimental sliding sync endpoint. ([\#17479](https://github.com/element-hq/synapse/issues/17479)) +- Refactor Sliding Sync tests to better utilize the `SlidingSyncBase`. ([\#17481](https://github.com/element-hq/synapse/issues/17481), [\#17482](https://github.com/element-hq/synapse/issues/17482)) +- Add some opentracing tags and logging to the experimental sliding sync implementation. ([\#17501](https://github.com/element-hq/synapse/issues/17501)) +- Split and move Sliding Sync tests so we have some more sane test file sizes. ([\#17504](https://github.com/element-hq/synapse/issues/17504)) +- Update the `limited` field description in the Sliding Sync response to accurately describe what it actually represents. ([\#17507](https://github.com/element-hq/synapse/issues/17507)) +- Easier to understand `timeline` assertions in Sliding Sync tests. ([\#17511](https://github.com/element-hq/synapse/issues/17511)) +- Reset the sliding sync connection if we don't recognize the per-connection state position. ([\#17529](https://github.com/element-hq/synapse/issues/17529)) + + + +### Updates to locked dependencies + +* Bump bcrypt from 4.1.3 to 4.2.0. ([\#17495](https://github.com/element-hq/synapse/issues/17495)) +* Bump black from 24.4.2 to 24.8.0. ([\#17522](https://github.com/element-hq/synapse/issues/17522)) +* Bump phonenumbers from 8.13.39 to 8.13.42. ([\#17521](https://github.com/element-hq/synapse/issues/17521)) +* Bump ruff from 0.5.4 to 0.5.5. ([\#17494](https://github.com/element-hq/synapse/issues/17494)) +* Bump serde_json from 1.0.120 to 1.0.121. ([\#17493](https://github.com/element-hq/synapse/issues/17493)) +* Bump serde_json from 1.0.121 to 1.0.122. ([\#17525](https://github.com/element-hq/synapse/issues/17525)) +* Bump towncrier from 23.11.0 to 24.7.1. ([\#17523](https://github.com/element-hq/synapse/issues/17523)) +* Bump types-pyopenssl from 24.1.0.20240425 to 24.1.0.20240722. ([\#17496](https://github.com/element-hq/synapse/issues/17496)) +* Bump types-setuptools from 70.1.0.20240627 to 71.1.0.20240726. ([\#17497](https://github.com/element-hq/synapse/issues/17497)) + # Synapse 1.112.0 (2024-07-30) This security release is to update our locked dependency on Twisted to 24.7.0rc1, which includes a security fix for [CVE-2024-41671 / GHSA-c8m8-j448-xjx7: Disordered HTTP pipeline response in twisted.web, again](https://github.com/twisted/twisted/security/advisories/GHSA-c8m8-j448-xjx7). @@ -81,6 +132,27 @@ The same security fix can be found in the full release of 1.112.0. * Bump ulid from 1.1.2 to 1.1.3. ([\#17442](https://github.com/element-hq/synapse/issues/17442)) * Bump zipp from 3.15.0 to 3.19.1. ([\#17427](https://github.com/element-hq/synapse/issues/17427)) + +# Synapse 1.111.1 (2024-07-30) + +This security release is to update our locked dependency on Twisted to 24.7.0rc1, which includes a security fix for [CVE-2024-41671 / GHSA-c8m8-j448-xjx7: Disordered HTTP pipeline response in twisted.web, again](https://github.com/twisted/twisted/security/advisories/GHSA-c8m8-j448-xjx7). + +This issue means that, if multiple HTTP requests are pipelined in the same TCP connection, Synapse can send responses to the wrong HTTP request. +If a reverse proxy was configured to use HTTP pipelining, this could result in responses being sent to the wrong user, severely harming confidentiality. + +With that said, despite being a high severity issue, **we consider it unlikely that Synapse installations will be affected**. +The use of HTTP pipelining in this fashion would cause worse performance for clients (request-response latencies would be increased as users' responses would be artificially blocked behind other users' slow requests). Further, Nginx and Haproxy, two common reverse proxies, do not appear to support configuring their upstreams to use HTTP pipelining and thus would not be affected. For both of these reasons, we consider it unlikely that a Synapse deployment would be set up in such a configuration. + +Despite that, we cannot rule out that some installations may exist with this unusual setup and so we are releasing this security update today. + +**pip users:** Note that by default, upgrading Synapse using pip will not automatically upgrade Twisted. **Please manually install the new version of Twisted** using `pip install Twisted==24.7.0rc1`. Note also that even the `--upgrade-strategy=eager` flag to `pip install -U matrix-synapse` will not upgrade Twisted to a patched version because it is only a release candidate at this time. + + +### Internal Changes + +- Upgrade locked dependency on Twisted to 24.7.0rc1. ([\#17502](https://github.com/element-hq/synapse/issues/17502)) + + # Synapse 1.111.0 (2024-07-16) No significant changes since 1.111.0rc2. diff --git a/Cargo.lock b/Cargo.lock index e9adfcbdc358..d50ce87d17d0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -505,11 +505,12 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.120" +version = "1.0.122" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e0d21c9a8cae1235ad58a00c11cb40d4b1e5c784f1ef2c537876ed6ffd8b7c5" +checksum = "784b6203951c57ff748476b126ccb5e8e2959a5c19e5c617ab1956be3dbc68da" dependencies = [ "itoa", + "memchr", "ryu", "serde", ] diff --git a/debian/changelog b/debian/changelog index 873fde44a8f4..2692ab621d85 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,15 @@ +matrix-synapse-py3 (1.113.0) stable; urgency=medium + + * New Synapse release 1.113.0. + + -- Synapse Packaging team Tue, 13 Aug 2024 14:36:56 +0100 + +matrix-synapse-py3 (1.113.0~rc1) stable; urgency=medium + + * New Synapse release 1.113.0rc1. + + -- Synapse Packaging team Tue, 06 Aug 2024 12:23:23 +0100 + matrix-synapse-py3 (1.112.0) stable; urgency=medium * New Synapse release 1.112.0. @@ -10,6 +22,12 @@ matrix-synapse-py3 (1.112.0~rc1) stable; urgency=medium -- Synapse Packaging team Tue, 23 Jul 2024 08:58:55 -0600 +matrix-synapse-py3 (1.111.1) stable; urgency=medium + + * New Synapse release 1.111.1. + + -- Synapse Packaging team Tue, 30 Jul 2024 16:13:52 +0100 + matrix-synapse-py3 (1.111.0) stable; urgency=medium * New Synapse release 1.111.0. diff --git a/debian/templates b/debian/templates index cab05715d0d6..7bfd3c2e9f10 100644 --- a/debian/templates +++ b/debian/templates @@ -5,7 +5,7 @@ _Description: Name of the server: servers via federation. This is normally the public hostname of the server running synapse, but can be different if you set up delegation. Please refer to the delegation documentation in this case: - https://github.com/element-hq/synapse/blob/master/docs/delegate.md. + https://element-hq.github.io/synapse/latest/delegate.html. Template: matrix-synapse/report-stats Type: boolean diff --git a/docs/usage/configuration/config_documentation.md b/docs/usage/configuration/config_documentation.md index 649f4f71c7df..40f64be85615 100644 --- a/docs/usage/configuration/config_documentation.md +++ b/docs/usage/configuration/config_documentation.md @@ -2386,7 +2386,7 @@ enable_registration_without_verification: true --- ### `registrations_require_3pid` -If this is set, users must provide all of the specified types of 3PID when registering an account. +If this is set, users must provide all of the specified types of [3PID](https://spec.matrix.org/latest/appendices/#3pid-types) when registering an account. Note that [`enable_registration`](#enable_registration) must also be set to allow account registration. @@ -2411,6 +2411,9 @@ disable_msisdn_registration: true Mandate that users are only allowed to associate certain formats of 3PIDs with accounts on this server, as specified by the `medium` and `pattern` sub-options. +`pattern` is a [Perl-like regular expression](https://docs.python.org/3/library/re.html#module-re). + +More information about 3PIDs, allowed `medium` types and their `address` syntax can be found [in the Matrix spec](https://spec.matrix.org/latest/appendices/#3pid-types). Example configuration: ```yaml @@ -2420,7 +2423,7 @@ allowed_local_3pids: - medium: email pattern: '^[^@]+@vector\.im$' - medium: msisdn - pattern: '\+44' + pattern: '^44\d{10}$' ``` --- ### `enable_3pid_lookup` diff --git a/poetry.lock b/poetry.lock index 735993098344..278bd6cb6e8a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "annotated-types" @@ -67,38 +67,38 @@ visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"] [[package]] name = "bcrypt" -version = "4.1.3" +version = "4.2.0" description = "Modern password hashing for your software and your servers" optional = false python-versions = ">=3.7" files = [ - {file = "bcrypt-4.1.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:48429c83292b57bf4af6ab75809f8f4daf52aa5d480632e53707805cc1ce9b74"}, - {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a8bea4c152b91fd8319fef4c6a790da5c07840421c2b785084989bf8bbb7455"}, - {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d3b317050a9a711a5c7214bf04e28333cf528e0ed0ec9a4e55ba628d0f07c1a"}, - {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:094fd31e08c2b102a14880ee5b3d09913ecf334cd604af27e1013c76831f7b05"}, - {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4fb253d65da30d9269e0a6f4b0de32bd657a0208a6f4e43d3e645774fb5457f3"}, - {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:193bb49eeeb9c1e2db9ba65d09dc6384edd5608d9d672b4125e9320af9153a15"}, - {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:8cbb119267068c2581ae38790e0d1fbae65d0725247a930fc9900c285d95725d"}, - {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6cac78a8d42f9d120b3987f82252bdbeb7e6e900a5e1ba37f6be6fe4e3848286"}, - {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:01746eb2c4299dd0ae1670234bf77704f581dd72cc180f444bfe74eb80495b64"}, - {file = "bcrypt-4.1.3-cp37-abi3-win32.whl", hash = "sha256:037c5bf7c196a63dcce75545c8874610c600809d5d82c305dd327cd4969995bf"}, - {file = "bcrypt-4.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:8a893d192dfb7c8e883c4576813bf18bb9d59e2cfd88b68b725990f033f1b978"}, - {file = "bcrypt-4.1.3-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d4cf6ef1525f79255ef048b3489602868c47aea61f375377f0d00514fe4a78c"}, - {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5698ce5292a4e4b9e5861f7e53b1d89242ad39d54c3da451a93cac17b61921a"}, - {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec3c2e1ca3e5c4b9edb94290b356d082b721f3f50758bce7cce11d8a7c89ce84"}, - {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3a5be252fef513363fe281bafc596c31b552cf81d04c5085bc5dac29670faa08"}, - {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5f7cd3399fbc4ec290378b541b0cf3d4398e4737a65d0f938c7c0f9d5e686611"}, - {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:c4c8d9b3e97209dd7111bf726e79f638ad9224b4691d1c7cfefa571a09b1b2d6"}, - {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:31adb9cbb8737a581a843e13df22ffb7c84638342de3708a98d5c986770f2834"}, - {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:551b320396e1d05e49cc18dd77d970accd52b322441628aca04801bbd1d52a73"}, - {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6717543d2c110a155e6821ce5670c1f512f602eabb77dba95717ca76af79867d"}, - {file = "bcrypt-4.1.3-cp39-abi3-win32.whl", hash = "sha256:6004f5229b50f8493c49232b8e75726b568535fd300e5039e255d919fc3a07f2"}, - {file = "bcrypt-4.1.3-cp39-abi3-win_amd64.whl", hash = "sha256:2505b54afb074627111b5a8dc9b6ae69d0f01fea65c2fcaea403448c503d3991"}, - {file = "bcrypt-4.1.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:cb9c707c10bddaf9e5ba7cdb769f3e889e60b7d4fea22834b261f51ca2b89fed"}, - {file = "bcrypt-4.1.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9f8ea645eb94fb6e7bea0cf4ba121c07a3a182ac52876493870033141aa687bc"}, - {file = "bcrypt-4.1.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:f44a97780677e7ac0ca393bd7982b19dbbd8d7228c1afe10b128fd9550eef5f1"}, - {file = "bcrypt-4.1.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d84702adb8f2798d813b17d8187d27076cca3cd52fe3686bb07a9083930ce650"}, - {file = "bcrypt-4.1.3.tar.gz", hash = "sha256:2ee15dd749f5952fe3f0430d0ff6b74082e159c50332a1413d51b5689cf06623"}, + {file = "bcrypt-4.2.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:096a15d26ed6ce37a14c1ac1e48119660f21b24cba457f160a4b830f3fe6b5cb"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c02d944ca89d9b1922ceb8a46460dd17df1ba37ab66feac4870f6862a1533c00"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d84cf6d877918620b687b8fd1bf7781d11e8a0998f576c7aa939776b512b98d"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1bb429fedbe0249465cdd85a58e8376f31bb315e484f16e68ca4c786dcc04291"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:655ea221910bcac76ea08aaa76df427ef8625f92e55a8ee44fbf7753dbabb328"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:1ee38e858bf5d0287c39b7a1fc59eec64bbf880c7d504d3a06a96c16e14058e7"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0da52759f7f30e83f1e30a888d9163a81353ef224d82dc58eb5bb52efcabc399"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3698393a1b1f1fd5714524193849d0c6d524d33523acca37cd28f02899285060"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:762a2c5fb35f89606a9fde5e51392dad0cd1ab7ae64149a8b935fe8d79dd5ed7"}, + {file = "bcrypt-4.2.0-cp37-abi3-win32.whl", hash = "sha256:5a1e8aa9b28ae28020a3ac4b053117fb51c57a010b9f969603ed885f23841458"}, + {file = "bcrypt-4.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:8f6ede91359e5df88d1f5c1ef47428a4420136f3ce97763e31b86dd8280fbdf5"}, + {file = "bcrypt-4.2.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:c52aac18ea1f4a4f65963ea4f9530c306b56ccd0c6f8c8da0c06976e34a6e841"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3bbbfb2734f0e4f37c5136130405332640a1e46e6b23e000eeff2ba8d005da68"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3413bd60460f76097ee2e0a493ccebe4a7601918219c02f503984f0a7ee0aebe"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8d7bb9c42801035e61c109c345a28ed7e84426ae4865511eb82e913df18f58c2"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3d3a6d28cb2305b43feac298774b997e372e56c7c7afd90a12b3dc49b189151c"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9c1c4ad86351339c5f320ca372dfba6cb6beb25e8efc659bedd918d921956bae"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:27fe0f57bb5573104b5a6de5e4153c60814c711b29364c10a75a54bb6d7ff48d"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8ac68872c82f1add6a20bd489870c71b00ebacd2e9134a8aa3f98a0052ab4b0e"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cb2a8ec2bc07d3553ccebf0746bbf3d19426d1c6d1adbd4fa48925f66af7b9e8"}, + {file = "bcrypt-4.2.0-cp39-abi3-win32.whl", hash = "sha256:77800b7147c9dc905db1cba26abe31e504d8247ac73580b4aa179f98e6608f34"}, + {file = "bcrypt-4.2.0-cp39-abi3-win_amd64.whl", hash = "sha256:61ed14326ee023917ecd093ee6ef422a72f3aec6f07e21ea5f10622b735538a9"}, + {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:39e1d30c7233cfc54f5c3f2c825156fe044efdd3e0b9d309512cc514a263ec2a"}, + {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f4f4acf526fcd1c34e7ce851147deedd4e26e6402369304220250598b26448db"}, + {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:1ff39b78a52cf03fdf902635e4c81e544714861ba3f0efc56558979dd4f09170"}, + {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:373db9abe198e8e2c70d12b479464e0d5092cc122b20ec504097b5f2297ed184"}, + {file = "bcrypt-4.2.0.tar.gz", hash = "sha256:cf69eaf5185fd58f268f805b505ce31f9b9fc2d64b376642164e9244540c1221"}, ] [package.extras] @@ -107,33 +107,33 @@ typecheck = ["mypy"] [[package]] name = "black" -version = "24.4.2" +version = "24.8.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, - {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, - {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, - {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, - {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, - {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, - {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, - {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, - {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, - {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, - {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, - {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, - {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, - {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, - {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, - {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, - {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, - {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, - {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, - {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, - {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, - {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, + {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, + {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, + {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, + {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, + {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, + {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, + {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, + {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, + {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, + {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, + {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, + {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, + {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, + {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, + {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, + {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, + {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, + {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, + {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, + {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, + {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, + {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, ] [package.dependencies] @@ -1516,13 +1516,13 @@ files = [ [[package]] name = "phonenumbers" -version = "8.13.39" +version = "8.13.42" description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." optional = false python-versions = "*" files = [ - {file = "phonenumbers-8.13.39-py2.py3-none-any.whl", hash = "sha256:3ad2d086fa71e7eef409001b9195ac54bebb0c6e3e752209b558ca192c9229a0"}, - {file = "phonenumbers-8.13.39.tar.gz", hash = "sha256:db7ca4970d206b2056231105300753b1a5b229f43416f8c2b3010e63fbb68d77"}, + {file = "phonenumbers-8.13.42-py2.py3-none-any.whl", hash = "sha256:18acc22ee03116d27b26e990f53806a1770a3e05f05e1620bc09ad187f889456"}, + {file = "phonenumbers-8.13.42.tar.gz", hash = "sha256:7137904f2db3b991701e853174ce8e1cb8f540b8bfdf27617540de04c0b7bed5"}, ] [[package]] @@ -2361,29 +2361,29 @@ files = [ [[package]] name = "ruff" -version = "0.5.4" +version = "0.5.5" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.5.4-py3-none-linux_armv6l.whl", hash = "sha256:82acef724fc639699b4d3177ed5cc14c2a5aacd92edd578a9e846d5b5ec18ddf"}, - {file = "ruff-0.5.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:da62e87637c8838b325e65beee485f71eb36202ce8e3cdbc24b9fcb8b99a37be"}, - {file = "ruff-0.5.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e98ad088edfe2f3b85a925ee96da652028f093d6b9b56b76fc242d8abb8e2059"}, - {file = "ruff-0.5.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c55efbecc3152d614cfe6c2247a3054cfe358cefbf794f8c79c8575456efe19"}, - {file = "ruff-0.5.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9b85eaa1f653abd0a70603b8b7008d9e00c9fa1bbd0bf40dad3f0c0bdd06793"}, - {file = "ruff-0.5.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0cf497a47751be8c883059c4613ba2f50dd06ec672692de2811f039432875278"}, - {file = "ruff-0.5.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:09c14ed6a72af9ccc8d2e313d7acf7037f0faff43cde4b507e66f14e812e37f7"}, - {file = "ruff-0.5.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:628f6b8f97b8bad2490240aa84f3e68f390e13fabc9af5c0d3b96b485921cd60"}, - {file = "ruff-0.5.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3520a00c0563d7a7a7c324ad7e2cde2355733dafa9592c671fb2e9e3cd8194c1"}, - {file = "ruff-0.5.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93789f14ca2244fb91ed481456f6d0bb8af1f75a330e133b67d08f06ad85b516"}, - {file = "ruff-0.5.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:029454e2824eafa25b9df46882f7f7844d36fd8ce51c1b7f6d97e2615a57bbcc"}, - {file = "ruff-0.5.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9492320eed573a13a0bc09a2957f17aa733fff9ce5bf00e66e6d4a88ec33813f"}, - {file = "ruff-0.5.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a6e1f62a92c645e2919b65c02e79d1f61e78a58eddaebca6c23659e7c7cb4ac7"}, - {file = "ruff-0.5.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:768fa9208df2bec4b2ce61dbc7c2ddd6b1be9fb48f1f8d3b78b3332c7d71c1ff"}, - {file = "ruff-0.5.4-py3-none-win32.whl", hash = "sha256:e1e7393e9c56128e870b233c82ceb42164966f25b30f68acbb24ed69ce9c3a4e"}, - {file = "ruff-0.5.4-py3-none-win_amd64.whl", hash = "sha256:58b54459221fd3f661a7329f177f091eb35cf7a603f01d9eb3eb11cc348d38c4"}, - {file = "ruff-0.5.4-py3-none-win_arm64.whl", hash = "sha256:bd53da65f1085fb5b307c38fd3c0829e76acf7b2a912d8d79cadcdb4875c1eb7"}, - {file = "ruff-0.5.4.tar.gz", hash = "sha256:2795726d5f71c4f4e70653273d1c23a8182f07dd8e48c12de5d867bfb7557eed"}, + {file = "ruff-0.5.5-py3-none-linux_armv6l.whl", hash = "sha256:605d589ec35d1da9213a9d4d7e7a9c761d90bba78fc8790d1c5e65026c1b9eaf"}, + {file = "ruff-0.5.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00817603822a3e42b80f7c3298c8269e09f889ee94640cd1fc7f9329788d7bf8"}, + {file = "ruff-0.5.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:187a60f555e9f865a2ff2c6984b9afeffa7158ba6e1eab56cb830404c942b0f3"}, + {file = "ruff-0.5.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe26fc46fa8c6e0ae3f47ddccfbb136253c831c3289bba044befe68f467bfb16"}, + {file = "ruff-0.5.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4ad25dd9c5faac95c8e9efb13e15803cd8bbf7f4600645a60ffe17c73f60779b"}, + {file = "ruff-0.5.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f70737c157d7edf749bcb952d13854e8f745cec695a01bdc6e29c29c288fc36e"}, + {file = "ruff-0.5.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:cfd7de17cef6ab559e9f5ab859f0d3296393bc78f69030967ca4d87a541b97a0"}, + {file = "ruff-0.5.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a09b43e02f76ac0145f86a08e045e2ea452066f7ba064fd6b0cdccb486f7c3e7"}, + {file = "ruff-0.5.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0b856cb19c60cd40198be5d8d4b556228e3dcd545b4f423d1ad812bfdca5884"}, + {file = "ruff-0.5.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3687d002f911e8a5faf977e619a034d159a8373514a587249cc00f211c67a091"}, + {file = "ruff-0.5.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ac9dc814e510436e30d0ba535f435a7f3dc97f895f844f5b3f347ec8c228a523"}, + {file = "ruff-0.5.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:af9bdf6c389b5add40d89b201425b531e0a5cceb3cfdcc69f04d3d531c6be74f"}, + {file = "ruff-0.5.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d40a8533ed545390ef8315b8e25c4bb85739b90bd0f3fe1280a29ae364cc55d8"}, + {file = "ruff-0.5.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cab904683bf9e2ecbbe9ff235bfe056f0eba754d0168ad5407832928d579e7ab"}, + {file = "ruff-0.5.5-py3-none-win32.whl", hash = "sha256:696f18463b47a94575db635ebb4c178188645636f05e934fdf361b74edf1bb2d"}, + {file = "ruff-0.5.5-py3-none-win_amd64.whl", hash = "sha256:50f36d77f52d4c9c2f1361ccbfbd09099a1b2ea5d2b2222c586ab08885cf3445"}, + {file = "ruff-0.5.5-py3-none-win_arm64.whl", hash = "sha256:3191317d967af701f1b73a31ed5788795936e423b7acce82a2b63e26eb3e89d6"}, + {file = "ruff-0.5.5.tar.gz", hash = "sha256:cc5516bdb4858d972fbc31d246bdb390eab8df1a26e2353be2dbc0c2d7f5421a"}, ] [[package]] @@ -2649,24 +2649,24 @@ files = [ [[package]] name = "towncrier" -version = "23.11.0" +version = "24.7.1" description = "Building newsfiles for your project." optional = false python-versions = ">=3.8" files = [ - {file = "towncrier-23.11.0-py3-none-any.whl", hash = "sha256:2e519ca619426d189e3c98c99558fe8be50c9ced13ea1fc20a4a353a95d2ded7"}, - {file = "towncrier-23.11.0.tar.gz", hash = "sha256:13937c247e3f8ae20ac44d895cf5f96a60ad46cfdcc1671759530d7837d9ee5d"}, + {file = "towncrier-24.7.1-py3-none-any.whl", hash = "sha256:685e2a94335b5dc47537b4d3b449a25b18571ea85b07dcf6e8df31ba40f692dd"}, + {file = "towncrier-24.7.1.tar.gz", hash = "sha256:57a057faedabcadf1a62f6f9bad726ae566c1f31a411338ddb8316993f583b3d"}, ] [package.dependencies] click = "*" +importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} importlib-resources = {version = ">=5", markers = "python_version < \"3.10\""} -incremental = "*" jinja2 = "*" tomli = {version = "*", markers = "python_version < \"3.11\""} [package.extras] -dev = ["furo", "packaging", "sphinx (>=5)", "twisted"] +dev = ["furo (>=2024.05.06)", "nox", "packaging", "sphinx (>=5)", "twisted"] [[package]] name = "treq" @@ -2875,13 +2875,13 @@ files = [ [[package]] name = "types-pyopenssl" -version = "24.1.0.20240425" +version = "24.1.0.20240722" description = "Typing stubs for pyOpenSSL" optional = false python-versions = ">=3.8" files = [ - {file = "types-pyOpenSSL-24.1.0.20240425.tar.gz", hash = "sha256:0a7e82626c1983dc8dc59292bf20654a51c3c3881bcbb9b337c1da6e32f0204e"}, - {file = "types_pyOpenSSL-24.1.0.20240425-py3-none-any.whl", hash = "sha256:f51a156835555dd2a1f025621e8c4fbe7493470331afeef96884d1d29bf3a473"}, + {file = "types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39"}, + {file = "types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54"}, ] [package.dependencies] @@ -2915,13 +2915,13 @@ urllib3 = ">=2" [[package]] name = "types-setuptools" -version = "70.1.0.20240627" +version = "71.1.0.20240726" description = "Typing stubs for setuptools" optional = false python-versions = ">=3.8" files = [ - {file = "types-setuptools-70.1.0.20240627.tar.gz", hash = "sha256:385907a47b5cf302b928ce07953cd91147d5de6f3da604c31905fdf0ec309e83"}, - {file = "types_setuptools-70.1.0.20240627-py3-none-any.whl", hash = "sha256:c7bdf05cd0a8b66868b4774c7b3c079d01ae025d8c9562bfc8bf2ff44d263c9c"}, + {file = "types-setuptools-71.1.0.20240726.tar.gz", hash = "sha256:85ba28e9461bb1be86ebba4db0f1c2408f2b11115b1966334ea9dc464e29303e"}, + {file = "types_setuptools-71.1.0.20240726-py3-none-any.whl", hash = "sha256:a7775376f36e0ff09bcad236bf265777590a66b11623e48c20bfc30f1444ea36"}, ] [[package]] @@ -3196,4 +3196,4 @@ user-search = ["pyicu"] [metadata] lock-version = "2.0" python-versions = "^3.8.0" -content-hash = "e65fbd044230964cae8810c84289bcf0bc43b27532ea5a5ef8843eab4f6514af" +content-hash = "c165cdc1f6612c9f1b5bfd8063c23e2d595d717dd8ac1a468519e902be2cdf93" diff --git a/pyproject.toml b/pyproject.toml index 521b2793906c..82369f90527c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,7 +97,7 @@ module-name = "synapse.synapse_rust" [tool.poetry] name = "matrix-synapse" -version = "1.112.0" +version = "1.113.0" description = "Homeserver for the Matrix decentralised comms protocol" authors = ["Matrix.org Team and Contributors "] license = "AGPL-3.0-or-later" @@ -201,8 +201,8 @@ netaddr = ">=0.7.18" # add a lower bound to the Jinja2 dependency. Jinja2 = ">=3.0" bleach = ">=1.4.3" -# We use `Self`, which were added in `typing-extensions` 4.0. -typing-extensions = ">=4.0" +# We use `assert_never`, which were added in `typing-extensions` 4.1. +typing-extensions = ">=4.1" # We enforce that we have a `cryptography` version that bundles an `openssl` # with the latest security patches. cryptography = ">=3.4.7" @@ -322,7 +322,7 @@ all = [ # This helps prevents merge conflicts when running a batch of dependabot updates. isort = ">=5.10.1" black = ">=22.7.0" -ruff = "0.5.4" +ruff = "0.5.5" # Type checking only works with the pydantic.v1 compat module from pydantic v2 pydantic = "^2" diff --git a/synapse/api/constants.py b/synapse/api/constants.py index 85001d967651..7dcb1e01fda4 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -225,6 +225,11 @@ class EventContentFields: # This is deprecated in MSC2175. ROOM_CREATOR: Final = "creator" + # The version of the room for `m.room.create` events. + ROOM_VERSION: Final = "room_version" + + ROOM_NAME: Final = "name" + # Used in m.room.guest_access events. GUEST_ACCESS: Final = "guest_access" @@ -237,6 +242,9 @@ class EventContentFields: # an unspecced field added to to-device messages to identify them uniquely-ish TO_DEVICE_MSGID: Final = "org.matrix.msgid" + # `m.room.encryption`` algorithm field + ENCRYPTION_ALGORITHM: Final = "algorithm" + class EventUnsignedContentFields: """Fields found inside the 'unsigned' data on events""" diff --git a/synapse/api/errors.py b/synapse/api/errors.py index dd4a1ae70639..99fc7eab54b7 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -128,6 +128,10 @@ class Codes(str, Enum): # MSC2677 DUPLICATE_ANNOTATION = "M_DUPLICATE_ANNOTATION" + # MSC3575 we are telling the client they need to reset their sliding sync + # connection. + UNKNOWN_POS = "M_UNKNOWN_POS" + class CodeMessageException(RuntimeError): """An exception with integer code, a message string attributes and optional headers. @@ -847,3 +851,17 @@ def __init__(self) -> None: msg=PartialStateConflictError.message(), errcode=Codes.UNKNOWN, ) + + +class SlidingSyncUnknownPosition(SynapseError): + """An error that Synapse can return to signal to the client to expire their + sliding sync connection (i.e. send a new request without a `?since=` + param). + """ + + def __init__(self) -> None: + super().__init__( + HTTPStatus.BAD_REQUEST, + msg="Unknown position", + errcode=Codes.UNKNOWN_POS, + ) diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 36e0f47e5141..2e56b671f06f 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -554,3 +554,22 @@ def relation_from_event(event: EventBase) -> Optional[_EventRelation]: aggregation_key = None return _EventRelation(parent_id, rel_type, aggregation_key) + + +@attr.s(slots=True, frozen=True, auto_attribs=True) +class StrippedStateEvent: + """ + A stripped down state event. Usually used for remote invite/knocks so the user can + make an informed decision on whether they want to join. + + Attributes: + type: Event `type` + state_key: Event `state_key` + sender: Event `sender` + content: Event `content` + """ + + type: str + state_key: str + sender: str + content: Dict[str, Any] diff --git a/synapse/events/utils.py b/synapse/events/utils.py index f937fd469803..54f94add4dcd 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -49,7 +49,7 @@ from synapse.api.room_versions import RoomVersion from synapse.types import JsonDict, Requester -from . import EventBase, make_event_from_dict +from . import EventBase, StrippedStateEvent, make_event_from_dict if TYPE_CHECKING: from synapse.handlers.relations import BundledAggregations @@ -854,3 +854,30 @@ def strip_event(event: EventBase) -> JsonDict: "content": event.content, "sender": event.sender, } + + +def parse_stripped_state_event(raw_stripped_event: Any) -> Optional[StrippedStateEvent]: + """ + Given a raw value from an event's `unsigned` field, attempt to parse it into a + `StrippedStateEvent`. + """ + if isinstance(raw_stripped_event, dict): + # All of these fields are required + type = raw_stripped_event.get("type") + state_key = raw_stripped_event.get("state_key") + sender = raw_stripped_event.get("sender") + content = raw_stripped_event.get("content") + if ( + isinstance(type, str) + and isinstance(state_key, str) + and isinstance(sender, str) + and isinstance(content, dict) + ): + return StrippedStateEvent( + type=type, + state_key=state_key, + sender=sender, + content=content, + ) + + return None diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 668cec513b6e..f78e66ad0a13 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -291,13 +291,20 @@ async def _query(destination: str) -> None: # Only try and fetch keys for destinations that are not marked as # down. - filtered_destinations = await filter_destinations_by_retry_limiter( - remote_queries_not_in_cache.keys(), - self.clock, - self.store, - # Let's give an arbitrary grace period for those hosts that are - # only recently down - retry_due_within_ms=60 * 1000, + unfiltered_destinations = remote_queries_not_in_cache.keys() + filtered_destinations = set( + await filter_destinations_by_retry_limiter( + unfiltered_destinations, + self.clock, + self.store, + # Let's give an arbitrary grace period for those hosts that are + # only recently down + retry_due_within_ms=60 * 1000, + ) + ) + failures.update( + (dest, _NOT_READY_FOR_RETRY_FAILURE) + for dest in (unfiltered_destinations - filtered_destinations) ) await concurrently_execute( @@ -1641,6 +1648,9 @@ def _check_device_signature( raise SynapseError(400, "Invalid signature", Codes.INVALID_SIGNATURE) +_NOT_READY_FOR_RETRY_FAILURE = {"status": 503, "message": "Not ready for retry"} + + def _exception_to_failure(e: Exception) -> JsonDict: if isinstance(e, SynapseError): return {"status": e.code, "errcode": e.errcode, "message": str(e)} @@ -1649,7 +1659,7 @@ def _exception_to_failure(e: Exception) -> JsonDict: return {"status": e.code, "message": str(e)} if isinstance(e, NotRetryingDestination): - return {"status": 503, "message": "Not ready for retry"} + return _NOT_READY_FOR_RETRY_FAILURE # include ConnectionRefused and other errors # diff --git a/synapse/handlers/receipts.py b/synapse/handlers/receipts.py index 8674a8fcdda0..c776654d12cd 100644 --- a/synapse/handlers/receipts.py +++ b/synapse/handlers/receipts.py @@ -286,8 +286,14 @@ async def get_new_events( room_ids: Iterable[str], is_guest: bool, explicit_room_id: Optional[str] = None, + to_key: Optional[MultiWriterStreamToken] = None, ) -> Tuple[List[JsonMapping], MultiWriterStreamToken]: - to_key = self.get_current_key() + """ + Find read receipts for given rooms (> `from_token` and <= `to_token`) + """ + + if to_key is None: + to_key = self.get_current_key() if from_key == to_key: return [], to_key diff --git a/synapse/handlers/sliding_sync.py b/synapse/handlers/sliding_sync.py index 554ab59bf3c2..193647134563 100644 --- a/synapse/handlers/sliding_sync.py +++ b/synapse/handlers/sliding_sync.py @@ -17,7 +17,9 @@ # [This file includes modifications made by New Vector Limited] # # +import enum import logging +from enum import Enum from itertools import chain from typing import ( TYPE_CHECKING, @@ -25,31 +27,50 @@ Dict, Final, List, + Literal, Mapping, Optional, Sequence, Set, Tuple, + Union, ) import attr from immutabledict import immutabledict - -from synapse.api.constants import AccountDataTypes, Direction, EventTypes, Membership -from synapse.events import EventBase -from synapse.events.utils import strip_event +from typing_extensions import assert_never + +from synapse.api.constants import ( + AccountDataTypes, + Direction, + EventContentFields, + EventTypes, + Membership, +) +from synapse.api.errors import SlidingSyncUnknownPosition +from synapse.events import EventBase, StrippedStateEvent +from synapse.events.utils import parse_stripped_state_event, strip_event from synapse.handlers.relations import BundledAggregations -from synapse.logging.opentracing import start_active_span, tag_args, trace +from synapse.logging.opentracing import log_kv, start_active_span, tag_args, trace from synapse.storage.databases.main.roommember import extract_heroes_from_room_summary +from synapse.storage.databases.main.state import ( + ROOM_UNKNOWN_SENTINEL, + Sentinel as StateSentinel, +) from synapse.storage.databases.main.stream import CurrentStateDeltaMembership from synapse.storage.roommember import MemberSummary from synapse.types import ( DeviceListUpdates, JsonDict, + JsonMapping, + MultiWriterStreamToken, + MutableStateMap, PersistedEventPosition, Requester, RoomStreamToken, + SlidingSyncStreamToken, StateMap, + StrCollection, StreamKeyType, StreamToken, UserID, @@ -65,6 +86,12 @@ logger = logging.getLogger(__name__) +class Sentinel(enum.Enum): + # defining a sentinel in this way allows mypy to correctly handle the + # type of a dictionary lookup and subsequent type narrowing. + UNSET_SENTINEL = object() + + # The event types that clients should consider as new activity. DEFAULT_BUMP_EVENT_TYPES = { EventTypes.Create, @@ -356,13 +383,16 @@ def __init__(self, hs: "HomeServer"): self.event_sources = hs.get_event_sources() self.relations_handler = hs.get_relations_handler() self.device_handler = hs.get_device_handler() + self.push_rules_handler = hs.get_push_rules_handler() self.rooms_to_exclude_globally = hs.config.server.rooms_to_exclude_from_sync + self.connection_store = SlidingSyncConnectionStore() + async def wait_for_sync_for_user( self, requester: Requester, sync_config: SlidingSyncConfig, - from_token: Optional[StreamToken] = None, + from_token: Optional[SlidingSyncStreamToken] = None, timeout_ms: int = 0, ) -> SlidingSyncResult: """ @@ -393,7 +423,7 @@ async def wait_for_sync_for_user( # this returns false, it means we timed out waiting, and we should # just return an empty response. before_wait_ts = self.clock.time_msec() - if not await self.notifier.wait_for_stream_token(from_token): + if not await self.notifier.wait_for_stream_token(from_token.stream_token): logger.warning( "Timed out waiting for worker to catch up. Returning empty response" ) @@ -431,16 +461,17 @@ async def current_sync_callback( sync_config.user.to_string(), timeout_ms, current_sync_callback, - from_token=from_token, + from_token=from_token.stream_token, ) return result + @trace async def current_sync_for_user( self, sync_config: SlidingSyncConfig, to_token: StreamToken, - from_token: Optional[StreamToken] = None, + from_token: Optional[SlidingSyncStreamToken] = None, ) -> SlidingSyncResult: """ Generates the response body of a Sliding Sync result, represented as a @@ -461,6 +492,27 @@ async def current_sync_for_user( # See https://github.com/matrix-org/matrix-doc/issues/1144 raise NotImplementedError() + if from_token: + # Check that we recognize the connection position, if not tell the + # clients that they need to start again. + # + # If we don't do this and the client asks for the full range of + # rooms, we end up sending down all rooms and their state from + # scratch (which can be very slow). By expiring the connection we + # allow the client a chance to do an initial request with a smaller + # range of rooms to get them some results sooner but will end up + # taking the same amount of time (more with round-trips and + # re-processing) in the end to get everything again. + if not await self.connection_store.is_valid_token( + sync_config, from_token.connection_position + ): + raise SlidingSyncUnknownPosition() + + await self.connection_store.mark_token_seen( + sync_config=sync_config, + from_token=from_token, + ) + # Get all of the room IDs that the user should be able to see in the sync # response has_lists = sync_config.lists is not None and len(sync_config.lists) > 0 @@ -473,14 +525,13 @@ async def current_sync_for_user( await self.get_room_membership_for_user_at_to_token( user=sync_config.user, to_token=to_token, - from_token=from_token, + from_token=from_token.stream_token if from_token else None, ) ) # Assemble sliding window lists lists: Dict[str, SlidingSyncResult.SlidingWindowList] = {} - # Keep track of the rooms that we're going to display and need to fetch more - # info about + # Keep track of the rooms that we can display and need to fetch more info about relevant_room_map: Dict[str, RoomSyncConfig] = {} if has_lists and sync_config.lists is not None: sync_room_map = await self.filter_rooms_relevant_for_sync( @@ -606,13 +657,60 @@ async def current_sync_for_user( # Fetch room data rooms: Dict[str, SlidingSyncResult.RoomResult] = {} + # Filter out rooms that haven't received updates and we've sent down + # previously. + # Keep track of the rooms that we're going to display and need to fetch more info about + relevant_rooms_to_send_map = relevant_room_map + if from_token: + rooms_should_send = set() + + # First we check if there are rooms that match a list/room + # subscription and have updates we need to send (i.e. either because + # we haven't sent the room down, or we have but there are missing + # updates). + for room_id in relevant_room_map: + status = await self.connection_store.have_sent_room( + sync_config, + from_token.connection_position, + room_id, + ) + if ( + # The room was never sent down before so the client needs to know + # about it regardless of any updates. + status.status == HaveSentRoomFlag.NEVER + # `PREVIOUSLY` literally means the "room was sent down before *AND* + # there are updates we haven't sent down" so we already know this + # room has updates. + or status.status == HaveSentRoomFlag.PREVIOUSLY + ): + rooms_should_send.add(room_id) + elif status.status == HaveSentRoomFlag.LIVE: + # We know that we've sent all updates up until `from_token`, + # so we just need to check if there have been updates since + # then. + pass + else: + assert_never(status.status) + + # We only need to check for new events since any state changes + # will also come down as new events. + rooms_that_have_updates = self.store.get_rooms_that_might_have_updates( + relevant_room_map.keys(), from_token.stream_token.room_key + ) + rooms_should_send.update(rooms_that_have_updates) + relevant_rooms_to_send_map = { + room_id: room_sync_config + for room_id, room_sync_config in relevant_room_map.items() + if room_id in rooms_should_send + } + @trace @tag_args async def handle_room(room_id: str) -> None: room_sync_result = await self.get_room_sync_data( - user=sync_config.user, + sync_config=sync_config, room_id=room_id, - room_sync_config=relevant_room_map[room_id], + room_sync_config=relevant_rooms_to_send_map[room_id], room_membership_for_user_at_to_token=room_membership_for_user_map[ room_id ], @@ -620,19 +718,44 @@ async def handle_room(room_id: str) -> None: to_token=to_token, ) - rooms[room_id] = room_sync_result + # Filter out empty room results during incremental sync + if room_sync_result or not from_token: + rooms[room_id] = room_sync_result - with start_active_span("sliding_sync.generate_room_entries"): - await concurrently_execute(handle_room, relevant_room_map, 10) + if relevant_rooms_to_send_map: + with start_active_span("sliding_sync.generate_room_entries"): + await concurrently_execute(handle_room, relevant_rooms_to_send_map, 10) extensions = await self.get_extensions_response( sync_config=sync_config, + actual_lists=lists, + # We're purposely using `relevant_room_map` instead of + # `relevant_rooms_to_send_map` here. This needs to be all room_ids we could + # send regardless of whether they have an event update or not. The + # extensions care about more than just normal events in the rooms (like + # account data, read receipts, typing indicators, to-device messages, etc). + actual_room_ids=set(relevant_room_map.keys()), + actual_room_response_map=rooms, from_token=from_token, to_token=to_token, ) + if has_lists or has_room_subscriptions: + connection_position = await self.connection_store.record_rooms( + sync_config=sync_config, + from_token=from_token, + sent_room_ids=relevant_rooms_to_send_map.keys(), + # TODO: We need to calculate which rooms have had updates since the `from_token` but were not included in the `sent_room_ids` + unsent_room_ids=[], + ) + elif from_token: + connection_position = from_token.connection_position + else: + # Initial sync without a `from_token` starts at `0` + connection_position = 0 + return SlidingSyncResult( - next_pos=to_token, + next_pos=SlidingSyncStreamToken(to_token, connection_position), lists=lists, rooms=rooms, extensions=extensions, @@ -1086,6 +1209,266 @@ async def check_room_subscription_allowed_for_user( # return None + async def _bulk_get_stripped_state_for_rooms_from_sync_room_map( + self, + room_ids: StrCollection, + sync_room_map: Dict[str, _RoomMembershipForUser], + ) -> Dict[str, Optional[StateMap[StrippedStateEvent]]]: + """ + Fetch stripped state for a list of room IDs. Stripped state is only + applicable to invite/knock rooms. Other rooms will have `None` as their + stripped state. + + For invite rooms, we pull from `unsigned.invite_room_state`. + For knock rooms, we pull from `unsigned.knock_room_state`. + + Args: + room_ids: Room IDs to fetch stripped state for + sync_room_map: Dictionary of room IDs to sort along with membership + information in the room at the time of `to_token`. + + Returns: + Mapping from room_id to mapping of (type, state_key) to stripped state + event. + """ + room_id_to_stripped_state_map: Dict[ + str, Optional[StateMap[StrippedStateEvent]] + ] = {} + + # Fetch what we haven't before + room_ids_to_fetch = [ + room_id + for room_id in room_ids + if room_id not in room_id_to_stripped_state_map + ] + + # Gather a list of event IDs we can grab stripped state from + invite_or_knock_event_ids: List[str] = [] + for room_id in room_ids_to_fetch: + if sync_room_map[room_id].membership in ( + Membership.INVITE, + Membership.KNOCK, + ): + event_id = sync_room_map[room_id].event_id + # If this is an invite/knock then there should be an event_id + assert event_id is not None + invite_or_knock_event_ids.append(event_id) + else: + room_id_to_stripped_state_map[room_id] = None + + invite_or_knock_events = await self.store.get_events(invite_or_knock_event_ids) + for invite_or_knock_event in invite_or_knock_events.values(): + room_id = invite_or_knock_event.room_id + membership = invite_or_knock_event.membership + + raw_stripped_state_events = None + if membership == Membership.INVITE: + invite_room_state = invite_or_knock_event.unsigned.get( + "invite_room_state" + ) + raw_stripped_state_events = invite_room_state + elif membership == Membership.KNOCK: + knock_room_state = invite_or_knock_event.unsigned.get( + "knock_room_state" + ) + raw_stripped_state_events = knock_room_state + else: + raise AssertionError( + f"Unexpected membership {membership} (this is a problem with Synapse itself)" + ) + + stripped_state_map: Optional[MutableStateMap[StrippedStateEvent]] = None + # Scrutinize unsigned things. `raw_stripped_state_events` should be a list + # of stripped events + if raw_stripped_state_events is not None: + stripped_state_map = {} + if isinstance(raw_stripped_state_events, list): + for raw_stripped_event in raw_stripped_state_events: + stripped_state_event = parse_stripped_state_event( + raw_stripped_event + ) + if stripped_state_event is not None: + stripped_state_map[ + ( + stripped_state_event.type, + stripped_state_event.state_key, + ) + ] = stripped_state_event + + room_id_to_stripped_state_map[room_id] = stripped_state_map + + return room_id_to_stripped_state_map + + async def _bulk_get_partial_current_state_content_for_rooms( + self, + content_type: Literal[ + # `content.type` from `EventTypes.Create`` + "room_type", + # `content.algorithm` from `EventTypes.RoomEncryption` + "room_encryption", + ], + room_ids: Set[str], + sync_room_map: Dict[str, _RoomMembershipForUser], + to_token: StreamToken, + room_id_to_stripped_state_map: Dict[ + str, Optional[StateMap[StrippedStateEvent]] + ], + ) -> Mapping[str, Union[Optional[str], StateSentinel]]: + """ + Get the given state event content for a list of rooms. First we check the + current state of the room, then fallback to stripped state if available, then + historical state. + + Args: + content_type: Which content to grab + room_ids: Room IDs to fetch the given content field for. + sync_room_map: Dictionary of room IDs to sort along with membership + information in the room at the time of `to_token`. + to_token: We filter based on the state of the room at this token + room_id_to_stripped_state_map: This does not need to be filled in before + calling this function. Mapping from room_id to mapping of (type, state_key) + to stripped state event. Modified in place when we fetch new rooms so we can + save work next time this function is called. + + Returns: + A mapping from room ID to the state event content if the room has + the given state event (event_type, ""), otherwise `None`. Rooms unknown to + this server will return `ROOM_UNKNOWN_SENTINEL`. + """ + room_id_to_content: Dict[str, Union[Optional[str], StateSentinel]] = {} + + # As a bulk shortcut, use the current state if the server is particpating in the + # room (meaning we have current state). Ideally, for leave/ban rooms, we would + # want the state at the time of the membership instead of current state to not + # leak anything but we consider the create/encryption stripped state events to + # not be a secret given they are often set at the start of the room and they are + # normally handed out on invite/knock. + # + # Be mindful to only use this for non-sensitive details. For example, even + # though the room name/avatar/topic are also stripped state, they seem a lot + # more senstive to leak the current state value of. + # + # Since this function is cached, we need to make a mutable copy via + # `dict(...)`. + event_type = "" + event_content_field = "" + if content_type == "room_type": + event_type = EventTypes.Create + event_content_field = EventContentFields.ROOM_TYPE + room_id_to_content = dict(await self.store.bulk_get_room_type(room_ids)) + elif content_type == "room_encryption": + event_type = EventTypes.RoomEncryption + event_content_field = EventContentFields.ENCRYPTION_ALGORITHM + room_id_to_content = dict( + await self.store.bulk_get_room_encryption(room_ids) + ) + else: + assert_never(content_type) + + room_ids_with_results = [ + room_id + for room_id, content_field in room_id_to_content.items() + if content_field is not ROOM_UNKNOWN_SENTINEL + ] + + # We might not have current room state for remote invite/knocks if we are + # the first person on our server to see the room. The best we can do is look + # in the optional stripped state from the invite/knock event. + room_ids_without_results = room_ids.difference( + chain( + room_ids_with_results, + [ + room_id + for room_id, stripped_state_map in room_id_to_stripped_state_map.items() + if stripped_state_map is not None + ], + ) + ) + room_id_to_stripped_state_map.update( + await self._bulk_get_stripped_state_for_rooms_from_sync_room_map( + room_ids_without_results, sync_room_map + ) + ) + + # Update our `room_id_to_content` map based on the stripped state + # (applies to invite/knock rooms) + rooms_ids_without_stripped_state: Set[str] = set() + for room_id in room_ids_without_results: + stripped_state_map = room_id_to_stripped_state_map.get( + room_id, Sentinel.UNSET_SENTINEL + ) + assert stripped_state_map is not Sentinel.UNSET_SENTINEL, ( + f"Stripped state left unset for room {room_id}. " + + "Make sure you're calling `_bulk_get_stripped_state_for_rooms_from_sync_room_map(...)` " + + "with that room_id. (this is a problem with Synapse itself)" + ) + + # If there is some stripped state, we assume the remote server passed *all* + # of the potential stripped state events for the room. + if stripped_state_map is not None: + create_stripped_event = stripped_state_map.get((EventTypes.Create, "")) + stripped_event = stripped_state_map.get((event_type, "")) + # Sanity check that we at-least have the create event + if create_stripped_event is not None: + if stripped_event is not None: + room_id_to_content[room_id] = stripped_event.content.get( + event_content_field + ) + else: + # Didn't see the state event we're looking for in the stripped + # state so we can assume relevant content field is `None`. + room_id_to_content[room_id] = None + else: + rooms_ids_without_stripped_state.add(room_id) + + # Last resort, we might not have current room state for rooms that the + # server has left (no one local is in the room) but we can look at the + # historical state. + # + # Update our `room_id_to_content` map based on the state at the time of + # the membership event. + for room_id in rooms_ids_without_stripped_state: + # TODO: It would be nice to look this up in a bulk way (N+1 queries) + # + # TODO: `get_state_at(...)` doesn't take into account the "current state". + room_state = await self.storage_controllers.state.get_state_at( + room_id=room_id, + stream_position=to_token.copy_and_replace( + StreamKeyType.ROOM, + sync_room_map[room_id].event_pos.to_room_stream_token(), + ), + state_filter=StateFilter.from_types( + [ + (EventTypes.Create, ""), + (event_type, ""), + ] + ), + # Partially-stated rooms should have all state events except for + # remote membership events so we don't need to wait at all because + # we only want the create event and some non-member event. + await_full_state=False, + ) + # We can use the create event as a canary to tell whether the server has + # seen the room before + create_event = room_state.get((EventTypes.Create, "")) + state_event = room_state.get((event_type, "")) + + if create_event is None: + # Skip for unknown rooms + continue + + if state_event is not None: + room_id_to_content[room_id] = state_event.content.get( + event_content_field + ) + else: + # Didn't see the state event we're looking for in the stripped + # state so we can assume relevant content field is `None`. + room_id_to_content[room_id] = None + + return room_id_to_content + + @trace async def filter_rooms( self, user: UserID, @@ -1107,6 +1490,10 @@ async def filter_rooms( A filtered dictionary of room IDs along with membership information in the room at the time of `to_token`. """ + room_id_to_stripped_state_map: Dict[ + str, Optional[StateMap[StrippedStateEvent]] + ] = {} + filtered_room_id_set = set(sync_room_map.keys()) # Filter for Direct-Message (DM) rooms @@ -1126,31 +1513,34 @@ async def filter_rooms( if not sync_room_map[room_id].is_dm } - if filters.spaces: + if filters.spaces is not None: raise NotImplementedError() # Filter for encrypted rooms if filters.is_encrypted is not None: + room_id_to_encryption = ( + await self._bulk_get_partial_current_state_content_for_rooms( + content_type="room_encryption", + room_ids=filtered_room_id_set, + to_token=to_token, + sync_room_map=sync_room_map, + room_id_to_stripped_state_map=room_id_to_stripped_state_map, + ) + ) + # Make a copy so we don't run into an error: `Set changed size during # iteration`, when we filter out and remove items for room_id in filtered_room_id_set.copy(): - state_at_to_token = await self.storage_controllers.state.get_state_at( - room_id, - to_token, - state_filter=StateFilter.from_types( - [(EventTypes.RoomEncryption, "")] - ), - # Partially-stated rooms should have all state events except for the - # membership events so we don't need to wait because we only care - # about retrieving the `EventTypes.RoomEncryption` state event here. - # Plus we don't want to block the whole sync waiting for this one - # room. - await_full_state=False, - ) - is_encrypted = state_at_to_token.get((EventTypes.RoomEncryption, "")) + encryption = room_id_to_encryption.get(room_id, ROOM_UNKNOWN_SENTINEL) + + # Just remove rooms if we can't determine their encryption status + if encryption is ROOM_UNKNOWN_SENTINEL: + filtered_room_id_set.remove(room_id) + continue # If we're looking for encrypted rooms, filter out rooms that are not # encrypted and vice versa + is_encrypted = encryption is not None if (filters.is_encrypted and not is_encrypted) or ( not filters.is_encrypted and is_encrypted ): @@ -1176,15 +1566,26 @@ async def filter_rooms( # provided in the list. `None` is a valid type for rooms which do not have a # room type. if filters.room_types is not None or filters.not_room_types is not None: - room_to_type = await self.store.bulk_get_room_type( - { - room_id - for room_id in filtered_room_id_set - # We only know the room types for joined rooms - if sync_room_map[room_id].membership == Membership.JOIN - } + room_id_to_type = ( + await self._bulk_get_partial_current_state_content_for_rooms( + content_type="room_type", + room_ids=filtered_room_id_set, + to_token=to_token, + sync_room_map=sync_room_map, + room_id_to_stripped_state_map=room_id_to_stripped_state_map, + ) ) - for room_id, room_type in room_to_type.items(): + + # Make a copy so we don't run into an error: `Set changed size during + # iteration`, when we filter out and remove items + for room_id in filtered_room_id_set.copy(): + room_type = room_id_to_type.get(room_id, ROOM_UNKNOWN_SENTINEL) + + # Just remove rooms if we can't determine their type + if room_type is ROOM_UNKNOWN_SENTINEL: + filtered_room_id_set.remove(room_id) + continue + if ( filters.room_types is not None and room_type not in filters.room_types @@ -1197,18 +1598,30 @@ async def filter_rooms( ): filtered_room_id_set.remove(room_id) - if filters.room_name_like: + if filters.room_name_like is not None: + # TODO: The room name is a bit more sensitive to leak than the + # create/encryption event. Maybe we should consider a better way to fetch + # historical state before implementing this. + # + # room_id_to_create_content = await self._bulk_get_partial_current_state_content_for_rooms( + # content_type="room_name", + # room_ids=filtered_room_id_set, + # to_token=to_token, + # sync_room_map=sync_room_map, + # room_id_to_stripped_state_map=room_id_to_stripped_state_map, + # ) raise NotImplementedError() - if filters.tags: + if filters.tags is not None: raise NotImplementedError() - if filters.not_tags: + if filters.not_tags is not None: raise NotImplementedError() # Assemble a new sync room map but only with the `filtered_room_id_set` return {room_id: sync_room_map[room_id] for room_id in filtered_room_id_set} + @trace async def sort_rooms( self, sync_room_map: Dict[str, _RoomMembershipForUser], @@ -1283,14 +1696,17 @@ async def get_current_state_ids_at( in the room at the time of `to_token`. to_token: The point in the stream to sync up to. """ - room_state_ids: StateMap[str] + state_ids: StateMap[str] # People shouldn't see past their leave/ban event if room_membership_for_user_at_to_token.membership in ( Membership.LEAVE, Membership.BAN, ): - # TODO: `get_state_ids_at(...)` doesn't take into account the "current state" - room_state_ids = await self.storage_controllers.state.get_state_ids_at( + # TODO: `get_state_ids_at(...)` doesn't take into account the "current + # state". Maybe we need to use + # `get_forward_extremities_for_room_at_stream_ordering(...)` to "Fetch the + # current state at the time." + state_ids = await self.storage_controllers.state.get_state_ids_at( room_id, stream_position=to_token.copy_and_replace( StreamKeyType.ROOM, @@ -1309,7 +1725,7 @@ async def get_current_state_ids_at( ) # Otherwise, we can get the latest current state in the room else: - room_state_ids = await self.storage_controllers.state.get_current_state_ids( + state_ids = await self.storage_controllers.state.get_current_state_ids( room_id, state_filter, # Partially-stated rooms should have all state events except for @@ -1324,7 +1740,7 @@ async def get_current_state_ids_at( ) # TODO: Query `current_state_delta_stream` and reverse/rewind back to the `to_token` - return room_state_ids + return state_ids async def get_current_state_at( self, @@ -1344,17 +1760,17 @@ async def get_current_state_at( in the room at the time of `to_token`. to_token: The point in the stream to sync up to. """ - room_state_ids = await self.get_current_state_ids_at( + state_ids = await self.get_current_state_ids_at( room_id=room_id, room_membership_for_user_at_to_token=room_membership_for_user_at_to_token, state_filter=state_filter, to_token=to_token, ) - event_map = await self.store.get_events(list(room_state_ids.values())) + event_map = await self.store.get_events(list(state_ids.values())) state_map = {} - for key, event_id in room_state_ids.items(): + for key, event_id in state_ids.items(): event = event_map.get(event_id) if event: state_map[key] = event @@ -1363,11 +1779,11 @@ async def get_current_state_at( async def get_room_sync_data( self, - user: UserID, + sync_config: SlidingSyncConfig, room_id: str, room_sync_config: RoomSyncConfig, room_membership_for_user_at_to_token: _RoomMembershipForUser, - from_token: Optional[StreamToken], + from_token: Optional[SlidingSyncStreamToken], to_token: StreamToken, ) -> SlidingSyncResult.RoomResult: """ @@ -1385,6 +1801,41 @@ async def get_room_sync_data( from_token: The point in the stream to sync from. to_token: The point in the stream to sync up to. """ + user = sync_config.user + + # Determine whether we should limit the timeline to the token range. + # + # We should return historical messages (before token range) in the + # following cases because we want clients to be able to show a basic + # screen of information: + # - Initial sync (because no `from_token` to limit us anyway) + # - When users `newly_joined` + # - For an incremental sync where we haven't sent it down this + # connection before + from_bound = None + initial = True + if from_token and not room_membership_for_user_at_to_token.newly_joined: + room_status = await self.connection_store.have_sent_room( + sync_config=sync_config, + connection_token=from_token.connection_position, + room_id=room_id, + ) + if room_status.status == HaveSentRoomFlag.LIVE: + from_bound = from_token.stream_token.room_key + initial = False + elif room_status.status == HaveSentRoomFlag.PREVIOUSLY: + assert room_status.last_token is not None + from_bound = room_status.last_token + initial = False + elif room_status.status == HaveSentRoomFlag.NEVER: + from_bound = None + initial = True + else: + assert_never(room_status.status) + + log_kv({"sliding_sync.room_status": room_status}) + + log_kv({"sliding_sync.from_bound": from_bound, "sliding_sync.initial": initial}) # Assemble the list of timeline events # @@ -1411,36 +1862,23 @@ async def get_room_sync_data( prev_batch_token = to_token # We're going to paginate backwards from the `to_token` - from_bound = to_token.room_key + to_bound = to_token.room_key # People shouldn't see past their leave/ban event if room_membership_for_user_at_to_token.membership in ( Membership.LEAVE, Membership.BAN, ): - from_bound = ( + to_bound = ( room_membership_for_user_at_to_token.event_pos.to_room_stream_token() ) - # Determine whether we should limit the timeline to the token range. - # - # We should return historical messages (before token range) in the - # following cases because we want clients to be able to show a basic - # screen of information: - # - Initial sync (because no `from_token` to limit us anyway) - # - When users `newly_joined` - # - TODO: For an incremental sync where we haven't sent it down this - # connection before - to_bound = ( - from_token.room_key - if from_token is not None - and not room_membership_for_user_at_to_token.newly_joined - else None - ) - timeline_events, new_room_key = await self.store.paginate_room_events( room_id=room_id, - from_key=from_bound, - to_key=to_bound, + # The bounds are reversed so we can paginate backwards + # (from newer to older events) starting at to_bound. + # This ensures we fill the `limit` with the newest events first, + from_key=to_bound, + to_key=from_bound, direction=Direction.BACKWARDS, # We add one so we can determine if there are enough events to saturate # the limit or not (see `limited`) @@ -1498,7 +1936,9 @@ async def get_room_sync_data( instance_name=timeline_event.internal_metadata.instance_name, stream=timeline_event.internal_metadata.stream_ordering, ) - if persisted_position.persisted_after(from_token.room_key): + if persisted_position.persisted_after( + from_token.stream_token.room_key + ): num_live += 1 else: # Since we're iterating over the timeline events in @@ -1555,12 +1995,6 @@ async def get_room_sync_data( # indicate to the client that a state reset happened. Perhaps we should indicate # this by setting `initial: True` and empty `required_state`. - # TODO: Since we can't determine whether we've already sent a room down this - # Sliding Sync connection before (we plan to add this optimization in the - # future), we're always returning the requested room state instead of - # updates. - initial = True - # Check whether the room has a name set name_state_ids = await self.get_current_state_ids_at( room_id=room_id, @@ -1706,9 +2140,22 @@ async def get_room_sync_data( to_token=to_token, ) else: - # TODO: Once we can figure out if we've sent a room down this connection before, - # we can return updates instead of the full required state. - raise NotImplementedError() + assert from_bound is not None + + # TODO: Limit the number of state events we're about to send down + # the room, if its too many we should change this to an + # `initial=True`? + deltas = await self.store.get_current_state_deltas_for_room( + room_id=room_id, + from_token=from_bound, + to_token=to_token.room_key, + ) + # TODO: Filter room state before fetching events + # TODO: Handle state resets where event_id is None + events = await self.store.get_events( + [d.event_id for d in deltas if d.event_id] + ) + room_state = {(s.type, s.state_key): s for s in events.values()} required_room_state: StateMap[EventBase] = {} if required_state_filter != StateFilter.none(): @@ -1752,8 +2199,14 @@ async def get_room_sync_data( bump_stamp = room_membership_for_user_at_to_token.event_pos.stream # But if we found a bump event, use that instead if last_bump_event_result is not None: - _, bump_event_pos = last_bump_event_result - bump_stamp = bump_event_pos.stream + _, new_bump_event_pos = last_bump_event_result + + # If we've just joined a remote room, then the last bump event may + # have been backfilled (and so have a negative stream ordering). + # These negative stream orderings can't sensibly be compared, so + # instead we use the membership event position. + if new_bump_event_pos.stream > 0: + bump_stamp = new_bump_event_pos.stream return SlidingSyncResult.RoomResult( name=room_name, @@ -1782,16 +2235,25 @@ async def get_room_sync_data( highlight_count=0, ) + @trace async def get_extensions_response( self, sync_config: SlidingSyncConfig, + actual_lists: Dict[str, SlidingSyncResult.SlidingWindowList], + actual_room_ids: Set[str], + actual_room_response_map: Dict[str, SlidingSyncResult.RoomResult], to_token: StreamToken, - from_token: Optional[StreamToken], + from_token: Optional[SlidingSyncStreamToken], ) -> SlidingSyncResult.Extensions: """Handle extension requests. Args: sync_config: Sync configuration + actual_lists: Sliding window API. A map of list key to list results in the + Sliding Sync response. + actual_room_ids: The actual room IDs in the the Sliding Sync response. + actual_room_response_map: A map of room ID to room results in the the + Sliding Sync response. to_token: The point in the stream to sync up to. from_token: The point in the stream to sync from. """ @@ -1816,11 +2278,121 @@ async def get_extensions_response( from_token=from_token, ) + account_data_response = None + if sync_config.extensions.account_data is not None: + account_data_response = await self.get_account_data_extension_response( + sync_config=sync_config, + actual_lists=actual_lists, + actual_room_ids=actual_room_ids, + account_data_request=sync_config.extensions.account_data, + to_token=to_token, + from_token=from_token, + ) + + receipts_response = None + if sync_config.extensions.receipts is not None: + receipts_response = await self.get_receipts_extension_response( + sync_config=sync_config, + actual_lists=actual_lists, + actual_room_ids=actual_room_ids, + actual_room_response_map=actual_room_response_map, + receipts_request=sync_config.extensions.receipts, + to_token=to_token, + from_token=from_token, + ) + + typing_response = None + if sync_config.extensions.typing is not None: + typing_response = await self.get_typing_extension_response( + sync_config=sync_config, + actual_lists=actual_lists, + actual_room_ids=actual_room_ids, + actual_room_response_map=actual_room_response_map, + typing_request=sync_config.extensions.typing, + to_token=to_token, + from_token=from_token, + ) + return SlidingSyncResult.Extensions( to_device=to_device_response, e2ee=e2ee_response, + account_data=account_data_response, + receipts=receipts_response, + typing=typing_response, ) + def find_relevant_room_ids_for_extension( + self, + requested_lists: Optional[List[str]], + requested_room_ids: Optional[List[str]], + actual_lists: Dict[str, SlidingSyncResult.SlidingWindowList], + actual_room_ids: Set[str], + ) -> Set[str]: + """ + Handle the reserved `lists`/`rooms` keys for extensions. Extensions should only + return results for rooms in the Sliding Sync response. This matches up the + requested rooms/lists with the actual lists/rooms in the Sliding Sync response. + + {"lists": []} // Do not process any lists. + {"lists": ["rooms", "dms"]} // Process only a subset of lists. + {"lists": ["*"]} // Process all lists defined in the Sliding Window API. (This is the default.) + + {"rooms": []} // Do not process any specific rooms. + {"rooms": ["!a:b", "!c:d"]} // Process only a subset of room subscriptions. + {"rooms": ["*"]} // Process all room subscriptions defined in the Room Subscription API. (This is the default.) + + Args: + requested_lists: The `lists` from the extension request. + requested_room_ids: The `rooms` from the extension request. + actual_lists: The actual lists from the Sliding Sync response. + actual_room_ids: The actual room subscriptions from the Sliding Sync request. + """ + + # We only want to include account data for rooms that are already in the sliding + # sync response AND that were requested in the account data request. + relevant_room_ids: Set[str] = set() + + # See what rooms from the room subscriptions we should get account data for + if requested_room_ids is not None: + for room_id in requested_room_ids: + # A wildcard means we process all rooms from the room subscriptions + if room_id == "*": + relevant_room_ids.update(actual_room_ids) + break + + if room_id in actual_room_ids: + relevant_room_ids.add(room_id) + + # See what rooms from the sliding window lists we should get account data for + if requested_lists is not None: + for list_key in requested_lists: + # Just some typing because we share the variable name in multiple places + actual_list: Optional[SlidingSyncResult.SlidingWindowList] = None + + # A wildcard means we process rooms from all lists + if list_key == "*": + for actual_list in actual_lists.values(): + # We only expect a single SYNC operation for any list + assert len(actual_list.ops) == 1 + sync_op = actual_list.ops[0] + assert sync_op.op == OperationType.SYNC + + relevant_room_ids.update(sync_op.room_ids) + + break + + actual_list = actual_lists.get(list_key) + if actual_list is not None: + # We only expect a single SYNC operation for any list + assert len(actual_list.ops) == 1 + sync_op = actual_list.ops[0] + assert sync_op.op == OperationType.SYNC + + relevant_room_ids.update(sync_op.room_ids) + + return relevant_room_ids + + @trace async def get_to_device_extension_response( self, sync_config: SlidingSyncConfig, @@ -1835,7 +2407,7 @@ async def get_to_device_extension_response( to_token: The point in the stream to sync up to. """ user_id = sync_config.user.to_string() - device_id = sync_config.device_id + device_id = sync_config.requester.device_id # Skip if the extension is not enabled if not to_device_request.enabled: @@ -1895,12 +2467,13 @@ async def get_to_device_extension_response( events=messages, ) + @trace async def get_e2ee_extension_response( self, sync_config: SlidingSyncConfig, e2ee_request: SlidingSyncConfig.Extensions.E2eeExtension, to_token: StreamToken, - from_token: Optional[StreamToken], + from_token: Optional[SlidingSyncStreamToken], ) -> Optional[SlidingSyncResult.Extensions.E2eeExtension]: """Handle E2EE device extension (MSC3884) @@ -1911,7 +2484,7 @@ async def get_e2ee_extension_response( from_token: The point in the stream to sync from. """ user_id = sync_config.user.to_string() - device_id = sync_config.device_id + device_id = sync_config.requester.device_id # Skip if the extension is not enabled if not e2ee_request.enabled: @@ -1922,7 +2495,7 @@ async def get_e2ee_extension_response( # TODO: This should take into account the `from_token` and `to_token` device_list_updates = await self.device_handler.get_user_ids_changed( user_id=user_id, - from_token=from_token, + from_token=from_token.stream_token, ) device_one_time_keys_count: Mapping[str, int] = {} @@ -1944,3 +2517,484 @@ async def get_e2ee_extension_response( device_one_time_keys_count=device_one_time_keys_count, device_unused_fallback_key_types=device_unused_fallback_key_types, ) + + @trace + async def get_account_data_extension_response( + self, + sync_config: SlidingSyncConfig, + actual_lists: Dict[str, SlidingSyncResult.SlidingWindowList], + actual_room_ids: Set[str], + account_data_request: SlidingSyncConfig.Extensions.AccountDataExtension, + to_token: StreamToken, + from_token: Optional[SlidingSyncStreamToken], + ) -> Optional[SlidingSyncResult.Extensions.AccountDataExtension]: + """Handle Account Data extension (MSC3959) + + Args: + sync_config: Sync configuration + actual_lists: Sliding window API. A map of list key to list results in the + Sliding Sync response. + actual_room_ids: The actual room IDs in the the Sliding Sync response. + account_data_request: The account_data extension from the request + to_token: The point in the stream to sync up to. + from_token: The point in the stream to sync from. + """ + user_id = sync_config.user.to_string() + + # Skip if the extension is not enabled + if not account_data_request.enabled: + return None + + global_account_data_map: Mapping[str, JsonMapping] = {} + if from_token is not None: + # TODO: This should take into account the `from_token` and `to_token` + global_account_data_map = ( + await self.store.get_updated_global_account_data_for_user( + user_id, from_token.stream_token.account_data_key + ) + ) + + have_push_rules_changed = await self.store.have_push_rules_changed_for_user( + user_id, from_token.stream_token.push_rules_key + ) + if have_push_rules_changed: + global_account_data_map = dict(global_account_data_map) + # TODO: This should take into account the `from_token` and `to_token` + global_account_data_map[AccountDataTypes.PUSH_RULES] = ( + await self.push_rules_handler.push_rules_for_user(sync_config.user) + ) + else: + # TODO: This should take into account the `to_token` + all_global_account_data = await self.store.get_global_account_data_for_user( + user_id + ) + + global_account_data_map = dict(all_global_account_data) + # TODO: This should take into account the `to_token` + global_account_data_map[AccountDataTypes.PUSH_RULES] = ( + await self.push_rules_handler.push_rules_for_user(sync_config.user) + ) + + # Fetch room account data + account_data_by_room_map: Mapping[str, Mapping[str, JsonMapping]] = {} + relevant_room_ids = self.find_relevant_room_ids_for_extension( + requested_lists=account_data_request.lists, + requested_room_ids=account_data_request.rooms, + actual_lists=actual_lists, + actual_room_ids=actual_room_ids, + ) + if len(relevant_room_ids) > 0: + if from_token is not None: + # TODO: This should take into account the `from_token` and `to_token` + account_data_by_room_map = ( + await self.store.get_updated_room_account_data_for_user( + user_id, from_token.stream_token.account_data_key + ) + ) + else: + # TODO: This should take into account the `to_token` + account_data_by_room_map = ( + await self.store.get_room_account_data_for_user(user_id) + ) + + # Filter down to the relevant rooms + account_data_by_room_map = { + room_id: account_data_map + for room_id, account_data_map in account_data_by_room_map.items() + if room_id in relevant_room_ids + } + + return SlidingSyncResult.Extensions.AccountDataExtension( + global_account_data_map=global_account_data_map, + account_data_by_room_map=account_data_by_room_map, + ) + + async def get_receipts_extension_response( + self, + sync_config: SlidingSyncConfig, + actual_lists: Dict[str, SlidingSyncResult.SlidingWindowList], + actual_room_ids: Set[str], + actual_room_response_map: Dict[str, SlidingSyncResult.RoomResult], + receipts_request: SlidingSyncConfig.Extensions.ReceiptsExtension, + to_token: StreamToken, + from_token: Optional[SlidingSyncStreamToken], + ) -> Optional[SlidingSyncResult.Extensions.ReceiptsExtension]: + """Handle Receipts extension (MSC3960) + + Args: + sync_config: Sync configuration + actual_lists: Sliding window API. A map of list key to list results in the + Sliding Sync response. + actual_room_ids: The actual room IDs in the the Sliding Sync response. + actual_room_response_map: A map of room ID to room results in the the + Sliding Sync response. + account_data_request: The account_data extension from the request + to_token: The point in the stream to sync up to. + from_token: The point in the stream to sync from. + """ + # Skip if the extension is not enabled + if not receipts_request.enabled: + return None + + relevant_room_ids = self.find_relevant_room_ids_for_extension( + requested_lists=receipts_request.lists, + requested_room_ids=receipts_request.rooms, + actual_lists=actual_lists, + actual_room_ids=actual_room_ids, + ) + + room_id_to_receipt_map: Dict[str, JsonMapping] = {} + if len(relevant_room_ids) > 0: + # TODO: Take connection tracking into account so that when a room comes back + # into range we can send the receipts that were missed. + receipt_source = self.event_sources.sources.receipt + receipts, _ = await receipt_source.get_new_events( + user=sync_config.user, + from_key=( + from_token.stream_token.receipt_key + if from_token + else MultiWriterStreamToken(stream=0) + ), + to_key=to_token.receipt_key, + # This is a dummy value and isn't used in the function + limit=0, + room_ids=relevant_room_ids, + is_guest=False, + ) + + for receipt in receipts: + # These fields should exist for every receipt + room_id = receipt["room_id"] + type = receipt["type"] + content = receipt["content"] + + # For `inital: True` rooms, we only want to include receipts for events + # in the timeline. + room_result = actual_room_response_map.get(room_id) + if room_result is not None: + if room_result.initial: + # TODO: In the future, it would be good to fetch less receipts + # out of the database in the first place but we would need to + # add a new `event_id` index to `receipts_linearized`. + relevant_event_ids = [ + event.event_id for event in room_result.timeline_events + ] + + assert isinstance(content, dict) + content = { + event_id: content_value + for event_id, content_value in content.items() + if event_id in relevant_event_ids + } + + room_id_to_receipt_map[room_id] = {"type": type, "content": content} + + return SlidingSyncResult.Extensions.ReceiptsExtension( + room_id_to_receipt_map=room_id_to_receipt_map, + ) + + async def get_typing_extension_response( + self, + sync_config: SlidingSyncConfig, + actual_lists: Dict[str, SlidingSyncResult.SlidingWindowList], + actual_room_ids: Set[str], + actual_room_response_map: Dict[str, SlidingSyncResult.RoomResult], + typing_request: SlidingSyncConfig.Extensions.TypingExtension, + to_token: StreamToken, + from_token: Optional[SlidingSyncStreamToken], + ) -> Optional[SlidingSyncResult.Extensions.TypingExtension]: + """Handle Typing Notification extension (MSC3961) + + Args: + sync_config: Sync configuration + actual_lists: Sliding window API. A map of list key to list results in the + Sliding Sync response. + actual_room_ids: The actual room IDs in the the Sliding Sync response. + actual_room_response_map: A map of room ID to room results in the the + Sliding Sync response. + account_data_request: The account_data extension from the request + to_token: The point in the stream to sync up to. + from_token: The point in the stream to sync from. + """ + # Skip if the extension is not enabled + if not typing_request.enabled: + return None + + relevant_room_ids = self.find_relevant_room_ids_for_extension( + requested_lists=typing_request.lists, + requested_room_ids=typing_request.rooms, + actual_lists=actual_lists, + actual_room_ids=actual_room_ids, + ) + + room_id_to_typing_map: Dict[str, JsonMapping] = {} + if len(relevant_room_ids) > 0: + # Note: We don't need to take connection tracking into account for typing + # notifications because they'll get anything still relevant and hasn't timed + # out when the room comes into range. We consider the gap where the room + # fell out of range, as long enough for any typing notifications to have + # timed out (it's not worth the 30 seconds of data we may have missed). + typing_source = self.event_sources.sources.typing + typing_notifications, _ = await typing_source.get_new_events( + user=sync_config.user, + from_key=(from_token.stream_token.typing_key if from_token else 0), + to_key=to_token.typing_key, + # This is a dummy value and isn't used in the function + limit=0, + room_ids=relevant_room_ids, + is_guest=False, + ) + + for typing_notification in typing_notifications: + # These fields should exist for every typing notification + room_id = typing_notification["room_id"] + type = typing_notification["type"] + content = typing_notification["content"] + + room_id_to_typing_map[room_id] = {"type": type, "content": content} + + return SlidingSyncResult.Extensions.TypingExtension( + room_id_to_typing_map=room_id_to_typing_map, + ) + + +class HaveSentRoomFlag(Enum): + """Flag for whether we have sent the room down a sliding sync connection. + + The valid state changes here are: + NEVER -> LIVE + LIVE -> PREVIOUSLY + PREVIOUSLY -> LIVE + """ + + # The room has never been sent down (or we have forgotten we have sent it + # down). + NEVER = 1 + + # We have previously sent the room down, but there are updates that we + # haven't sent down. + PREVIOUSLY = 2 + + # We have sent the room down and the client has received all updates. + LIVE = 3 + + +@attr.s(auto_attribs=True, slots=True, frozen=True) +class HaveSentRoom: + """Whether we have sent the room down a sliding sync connection. + + Attributes: + status: Flag of if we have or haven't sent down the room + last_token: If the flag is `PREVIOUSLY` then this is non-null and + contains the last stream token of the last updates we sent down + the room, i.e. we still need to send everything since then to the + client. + """ + + status: HaveSentRoomFlag + last_token: Optional[RoomStreamToken] + + @staticmethod + def previously(last_token: RoomStreamToken) -> "HaveSentRoom": + """Constructor for `PREVIOUSLY` flag.""" + return HaveSentRoom(HaveSentRoomFlag.PREVIOUSLY, last_token) + + +HAVE_SENT_ROOM_NEVER = HaveSentRoom(HaveSentRoomFlag.NEVER, None) +HAVE_SENT_ROOM_LIVE = HaveSentRoom(HaveSentRoomFlag.LIVE, None) + + +@attr.s(auto_attribs=True) +class SlidingSyncConnectionStore: + """In-memory store of per-connection state, including what rooms we have + previously sent down a sliding sync connection. + + Note: This is NOT safe to run in a worker setup because connection positions will + point to different sets of rooms on different workers. e.g. for the same connection, + a connection position of 5 might have totally different states on worker A and + worker B. + + One complication that we need to deal with here is needing to handle requests being + resent, i.e. if we sent down a room in a response that the client received, we must + consider the room *not* sent when we get the request again. + + This is handled by using an integer "token", which is returned to the client + as part of the sync token. For each connection we store a mapping from + tokens to the room states, and create a new entry when we send down new + rooms. + + Note that for any given sliding sync connection we will only store a maximum + of two different tokens: the previous token from the request and a new token + sent in the response. When we receive a request with a given token, we then + clear out all other entries with a different token. + + Attributes: + _connections: Mapping from `(user_id, conn_id)` to mapping of `token` + to mapping of room ID to `HaveSentRoom`. + """ + + # `(user_id, conn_id)` -> `token` -> `room_id` -> `HaveSentRoom` + _connections: Dict[Tuple[str, str], Dict[int, Dict[str, HaveSentRoom]]] = ( + attr.Factory(dict) + ) + + async def is_valid_token( + self, sync_config: SlidingSyncConfig, connection_token: int + ) -> bool: + """Return whether the connection token is valid/recognized""" + if connection_token == 0: + return True + + conn_key = self._get_connection_key(sync_config) + return connection_token in self._connections.get(conn_key, {}) + + async def have_sent_room( + self, sync_config: SlidingSyncConfig, connection_token: int, room_id: str + ) -> HaveSentRoom: + """For the given user_id/conn_id/token, return whether we have + previously sent the room down + """ + + conn_key = self._get_connection_key(sync_config) + sync_statuses = self._connections.setdefault(conn_key, {}) + room_status = sync_statuses.get(connection_token, {}).get( + room_id, HAVE_SENT_ROOM_NEVER + ) + + return room_status + + async def record_rooms( + self, + sync_config: SlidingSyncConfig, + from_token: Optional[SlidingSyncStreamToken], + *, + sent_room_ids: StrCollection, + unsent_room_ids: StrCollection, + ) -> int: + """Record which rooms we have/haven't sent down in a new response + + Attributes: + sync_config + from_token: The since token from the request, if any + sent_room_ids: The set of room IDs that we have sent down as + part of this request (only needs to be ones we didn't + previously sent down). + unsent_room_ids: The set of room IDs that have had updates + since the `from_token`, but which were not included in + this request + """ + prev_connection_token = 0 + if from_token is not None: + prev_connection_token = from_token.connection_position + + # If there are no changes then this is a noop. + if not sent_room_ids and not unsent_room_ids: + return prev_connection_token + + conn_key = self._get_connection_key(sync_config) + sync_statuses = self._connections.setdefault(conn_key, {}) + + # Generate a new token, removing any existing entries in that token + # (which can happen if requests get resent). + new_store_token = prev_connection_token + 1 + sync_statuses.pop(new_store_token, None) + + # Copy over and update the room mappings. + new_room_statuses = dict(sync_statuses.get(prev_connection_token, {})) + + # Whether we have updated the `new_room_statuses`, if we don't by the + # end we can treat this as a noop. + have_updated = False + for room_id in sent_room_ids: + new_room_statuses[room_id] = HAVE_SENT_ROOM_LIVE + have_updated = True + + # Whether we add/update the entries for unsent rooms depends on the + # existing entry: + # - LIVE: We have previously sent down everything up to + # `last_room_token, so we update the entry to be `PREVIOUSLY` with + # `last_room_token`. + # - PREVIOUSLY: We have previously sent down everything up to *a* + # given token, so we don't need to update the entry. + # - NEVER: We have never previously sent down the room, and we haven't + # sent anything down this time either so we leave it as NEVER. + + # Work out the new state for unsent rooms that were `LIVE`. + if from_token: + new_unsent_state = HaveSentRoom.previously(from_token.stream_token.room_key) + else: + new_unsent_state = HAVE_SENT_ROOM_NEVER + + for room_id in unsent_room_ids: + prev_state = new_room_statuses.get(room_id) + if prev_state is not None and prev_state.status == HaveSentRoomFlag.LIVE: + new_room_statuses[room_id] = new_unsent_state + have_updated = True + + if not have_updated: + return prev_connection_token + + sync_statuses[new_store_token] = new_room_statuses + + return new_store_token + + async def mark_token_seen( + self, + sync_config: SlidingSyncConfig, + from_token: Optional[SlidingSyncStreamToken], + ) -> None: + """We have received a request with the given token, so we can clear out + any other tokens associated with the connection. + + If there is no from token then we have started afresh, and so we delete + all tokens associated with the device. + """ + # Clear out any tokens for the connection that doesn't match the one + # from the request. + + conn_key = self._get_connection_key(sync_config) + sync_statuses = self._connections.pop(conn_key, {}) + if from_token is None: + return + + sync_statuses = { + connection_token: room_statuses + for connection_token, room_statuses in sync_statuses.items() + if connection_token == from_token.connection_position + } + if sync_statuses: + self._connections[conn_key] = sync_statuses + + @staticmethod + def _get_connection_key(sync_config: SlidingSyncConfig) -> Tuple[str, str]: + """Return a unique identifier for this connection. + + The first part is simply the user ID. + + The second part is generally a combination of device ID and conn_id. + However, both these two are optional (e.g. puppet access tokens don't + have device IDs), so this handles those edge cases. + + We use this over the raw `conn_id` to avoid clashes between different + clients that use the same `conn_id`. Imagine a user uses a web client + that uses `conn_id: main_sync_loop` and an Android client that also has + a `conn_id: main_sync_loop`. + """ + + user_id = sync_config.user.to_string() + + # Only one sliding sync connection is allowed per given conn_id (empty + # or not). + conn_id = sync_config.conn_id or "" + + if sync_config.requester.device_id: + return (user_id, f"D/{sync_config.requester.device_id}/{conn_id}") + + if sync_config.requester.access_token_id: + # If we don't have a device, then the access token ID should be a + # stable ID. + return (user_id, f"A/{sync_config.requester.access_token_id}/{conn_id}") + + # If we have neither then its likely an AS or some weird token. Either + # way we can just fail here. + raise Exception("Cannot use sliding sync with access token type") diff --git a/synapse/handlers/stats.py b/synapse/handlers/stats.py index 1c94f3ca4610..8f90c1706023 100644 --- a/synapse/handlers/stats.py +++ b/synapse/handlers/stats.py @@ -293,7 +293,9 @@ async def _handle_deltas( "history_visibility" ) elif delta.event_type == EventTypes.RoomEncryption: - room_state["encryption"] = event_content.get("algorithm") + room_state["encryption"] = event_content.get( + EventContentFields.ENCRYPTION_ALGORITHM + ) elif delta.event_type == EventTypes.Name: room_state["name"] = event_content.get("name") elif delta.event_type == EventTypes.Topic: diff --git a/synapse/handlers/typing.py b/synapse/handlers/typing.py index 4c8771833748..8d693fee304a 100644 --- a/synapse/handlers/typing.py +++ b/synapse/handlers/typing.py @@ -565,7 +565,12 @@ async def get_new_events( room_ids: Iterable[str], is_guest: bool, explicit_room_id: Optional[str] = None, + to_key: Optional[int] = None, ) -> Tuple[List[JsonMapping], int]: + """ + Find typing notifications for given rooms (> `from_token` and <= `to_token`) + """ + with Measure(self.clock, "typing.get_new_events"): from_key = int(from_key) handler = self.get_typing_handler() @@ -574,7 +579,9 @@ async def get_new_events( for room_id in room_ids: if room_id not in handler._room_serials: continue - if handler._room_serials[room_id] <= from_key: + if handler._room_serials[room_id] <= from_key or ( + to_key is not None and handler._room_serials[room_id] > to_key + ): continue events.append(self._make_event_for(room_id)) diff --git a/synapse/rest/client/sync.py b/synapse/rest/client/sync.py index 93fe1d439e26..4f2c552af267 100644 --- a/synapse/rest/client/sync.py +++ b/synapse/rest/client/sync.py @@ -52,9 +52,9 @@ parse_string, ) from synapse.http.site import SynapseRequest -from synapse.logging.opentracing import trace_with_opname +from synapse.logging.opentracing import log_kv, set_tag, trace_with_opname from synapse.rest.admin.experimental_features import ExperimentalFeature -from synapse.types import JsonDict, Requester, StreamToken +from synapse.types import JsonDict, Requester, SlidingSyncStreamToken, StreamToken from synapse.types.rest.client import SlidingSyncBody from synapse.util import json_decoder from synapse.util.caches.lrucache import LruCache @@ -881,7 +881,6 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: ) user = requester.user - device_id = requester.device_id timeout = parse_integer(request, "timeout", default=0) # Position in the stream @@ -889,22 +888,41 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: from_token = None if from_token_string is not None: - from_token = await StreamToken.from_string(self.store, from_token_string) + from_token = await SlidingSyncStreamToken.from_string( + self.store, from_token_string + ) # TODO: We currently don't know whether we're going to use sticky params or # maybe some filters like sync v2 where they are built up once and referenced # by filter ID. For now, we will just prototype with always passing everything # in. body = parse_and_validate_json_object_from_request(request, SlidingSyncBody) - logger.info("Sliding sync request: %r", body) + + # Tag and log useful data to differentiate requests. + set_tag("sliding_sync.conn_id", body.conn_id or "") + log_kv( + { + "sliding_sync.lists": { + list_name: { + "ranges": list_config.ranges, + "timeline_limit": list_config.timeline_limit, + } + for list_name, list_config in (body.lists or {}).items() + }, + "sliding_sync.room_subscriptions": list( + (body.room_subscriptions or {}).keys() + ), + } + ) sync_config = SlidingSyncConfig( user=user, - device_id=device_id, + requester=requester, # FIXME: Currently, we're just manually copying the fields from the - # `SlidingSyncBody` into the config. How can we gurantee into the future + # `SlidingSyncBody` into the config. How can we guarantee into the future # that we don't forget any? I would like something more structured like # `copy_attributes(from=body, to=config)` + conn_id=body.conn_id, lists=body.lists, room_subscriptions=body.room_subscriptions, extensions=body.extensions, @@ -927,7 +945,6 @@ async def on_POST(self, request: SynapseRequest) -> Tuple[int, JsonDict]: return 200, response_content - # TODO: Is there a better way to encode things? async def encode_response( self, requester: Requester, @@ -1115,6 +1132,34 @@ async def encode_extensions( extensions.e2ee.device_list_updates.left ) + if extensions.account_data is not None: + serialized_extensions["account_data"] = { + # Same as the the top-level `account_data.events` field in Sync v2. + "global": [ + {"type": account_data_type, "content": content} + for account_data_type, content in extensions.account_data.global_account_data_map.items() + ], + # Same as the joined room's account_data field in Sync v2, e.g the path + # `rooms.join["!foo:bar"].account_data.events`. + "rooms": { + room_id: [ + {"type": account_data_type, "content": content} + for account_data_type, content in event_map.items() + ] + for room_id, event_map in extensions.account_data.account_data_by_room_map.items() + }, + } + + if extensions.receipts is not None: + serialized_extensions["receipts"] = { + "rooms": extensions.receipts.room_id_to_receipt_map, + } + + if extensions.typing is not None: + serialized_extensions["typing"] = { + "rooms": extensions.typing.room_id_to_typing_map, + } + return serialized_extensions diff --git a/synapse/server.py b/synapse/server.py index 4a3f9ff9349f..46b9d83a0447 100644 --- a/synapse/server.py +++ b/synapse/server.py @@ -559,6 +559,7 @@ def get_jwt_handler(self) -> "JwtHandler": def get_sync_handler(self) -> SyncHandler: return SyncHandler(self) + @cache_in_self def get_sliding_sync_handler(self) -> SlidingSyncHandler: return SlidingSyncHandler(self) diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 066f3d08ae44..e12ab9457674 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -127,6 +127,8 @@ def _invalidate_state_caches( # Purge other caches based on room state. self._attempt_to_invalidate_cache("get_room_summary", (room_id,)) self._attempt_to_invalidate_cache("get_partial_current_state_ids", (room_id,)) + self._attempt_to_invalidate_cache("get_room_type", (room_id,)) + self._attempt_to_invalidate_cache("get_room_encryption", (room_id,)) def _invalidate_state_caches_all(self, room_id: str) -> None: """Invalidates caches that are based on the current state, but does @@ -153,6 +155,8 @@ def _invalidate_state_caches_all(self, room_id: str) -> None: "_get_rooms_for_local_user_where_membership_is_inner", None ) self._attempt_to_invalidate_cache("get_room_summary", (room_id,)) + self._attempt_to_invalidate_cache("get_room_type", (room_id,)) + self._attempt_to_invalidate_cache("get_room_encryption", (room_id,)) def _attempt_to_invalidate_cache( self, cache_name: str, key: Optional[Collection[Any]] diff --git a/synapse/storage/databases/main/cache.py b/synapse/storage/databases/main/cache.py index 26b8e1a17271..63624f3e8f35 100644 --- a/synapse/storage/databases/main/cache.py +++ b/synapse/storage/databases/main/cache.py @@ -268,13 +268,23 @@ def _process_event_stream_row(self, token: int, row: EventsStreamRow) -> None: self._curr_state_delta_stream_cache.entity_has_changed(data.room_id, token) # type: ignore[attr-defined] if data.type == EventTypes.Member: - self.get_rooms_for_user.invalidate((data.state_key,)) # type: ignore[attr-defined] + self._attempt_to_invalidate_cache( + "get_rooms_for_user", (data.state_key,) + ) + elif data.type == EventTypes.RoomEncryption: + self._attempt_to_invalidate_cache( + "get_room_encryption", (data.room_id,) + ) + elif data.type == EventTypes.Create: + self._attempt_to_invalidate_cache("get_room_type", (data.room_id,)) elif row.type == EventsStreamAllStateRow.TypeId: assert isinstance(data, EventsStreamAllStateRow) # Similar to the above, but the entire caches are invalidated. This is # unfortunate for the membership caches, but should recover quickly. self._curr_state_delta_stream_cache.entity_has_changed(data.room_id, token) # type: ignore[attr-defined] - self.get_rooms_for_user.invalidate_all() # type: ignore[attr-defined] + self._attempt_to_invalidate_cache("get_rooms_for_user", None) + self._attempt_to_invalidate_cache("get_room_type", (data.room_id,)) + self._attempt_to_invalidate_cache("get_room_encryption", (data.room_id,)) else: raise Exception("Unknown events stream row type %s" % (row.type,)) @@ -345,6 +355,10 @@ def _invalidate_caches_for_event( self._attempt_to_invalidate_cache( "get_forgotten_rooms_for_user", (state_key,) ) + elif etype == EventTypes.Create: + self._attempt_to_invalidate_cache("get_room_type", (room_id,)) + elif etype == EventTypes.RoomEncryption: + self._attempt_to_invalidate_cache("get_room_encryption", (room_id,)) if relates_to: self._attempt_to_invalidate_cache( @@ -405,6 +419,8 @@ def _invalidate_caches_for_room_events(self, room_id: str) -> None: self._attempt_to_invalidate_cache("get_thread_summary", None) self._attempt_to_invalidate_cache("get_thread_participated", None) self._attempt_to_invalidate_cache("get_threads", (room_id,)) + self._attempt_to_invalidate_cache("get_room_type", (room_id,)) + self._attempt_to_invalidate_cache("get_room_encryption", (room_id,)) self._attempt_to_invalidate_cache("_get_state_group_for_event", None) @@ -457,6 +473,8 @@ def _invalidate_caches_for_room(self, room_id: str) -> None: self._attempt_to_invalidate_cache("get_forgotten_rooms_for_user", None) self._attempt_to_invalidate_cache("_get_membership_from_event_id", None) self._attempt_to_invalidate_cache("get_room_version_id", (room_id,)) + self._attempt_to_invalidate_cache("get_room_type", (room_id,)) + self._attempt_to_invalidate_cache("get_room_encryption", (room_id,)) # And delete state caches. diff --git a/synapse/storage/databases/main/state.py b/synapse/storage/databases/main/state.py index 5188b2f7a4d2..62bc4600fb25 100644 --- a/synapse/storage/databases/main/state.py +++ b/synapse/storage/databases/main/state.py @@ -30,6 +30,7 @@ Iterable, List, Mapping, + MutableMapping, Optional, Set, Tuple, @@ -72,10 +73,18 @@ _T = TypeVar("_T") - MAX_STATE_DELTA_HOPS = 100 +# Freeze so it's immutable and we can use it as a cache value +@attr.s(slots=True, frozen=True, auto_attribs=True) +class Sentinel: + pass + + +ROOM_UNKNOWN_SENTINEL = Sentinel() + + @attr.s(slots=True, frozen=True, auto_attribs=True) class EventMetadata: """Returned by `get_metadata_for_events`""" @@ -300,51 +309,189 @@ async def get_create_event_for_room(self, room_id: str) -> EventBase: @cached(max_entries=10000) async def get_room_type(self, room_id: str) -> Optional[str]: - """Get the room type for a given room. The server must be joined to the - given room. - """ - - row = await self.db_pool.simple_select_one( - table="room_stats_state", - keyvalues={"room_id": room_id}, - retcols=("room_type",), - allow_none=True, - desc="get_room_type", - ) - - if row is not None: - return row[0] - - # If we haven't updated `room_stats_state` with the room yet, query the - # create event directly. - create_event = await self.get_create_event_for_room(room_id) - room_type = create_event.content.get(EventContentFields.ROOM_TYPE) - return room_type + raise NotImplementedError() @cachedList(cached_method_name="get_room_type", list_name="room_ids") async def bulk_get_room_type( self, room_ids: Set[str] - ) -> Mapping[str, Optional[str]]: - """Bulk fetch room types for the given rooms, the server must be in all - the rooms given. + ) -> Mapping[str, Union[Optional[str], Sentinel]]: """ + Bulk fetch room types for the given rooms (via current state). - rows = await self.db_pool.simple_select_many_batch( - table="room_stats_state", - column="room_id", - iterable=room_ids, - retcols=("room_id", "room_type"), - desc="bulk_get_room_type", + Since this function is cached, any missing values would be cached as `None`. In + order to distinguish between an unencrypted room that has `None` encryption and + a room that is unknown to the server where we might want to omit the value + (which would make it cached as `None`), instead we use the sentinel value + `ROOM_UNKNOWN_SENTINEL`. + + Returns: + A mapping from room ID to the room's type (`None` is a valid room type). + Rooms unknown to this server will return `ROOM_UNKNOWN_SENTINEL`. + """ + + def txn( + txn: LoggingTransaction, + ) -> MutableMapping[str, Union[Optional[str], Sentinel]]: + clause, args = make_in_list_sql_clause( + txn.database_engine, "room_id", room_ids + ) + + # We can't rely on `room_stats_state.room_type` if the server has left the + # room because the `room_id` will still be in the table but everything will + # be set to `None` but `None` is a valid room type value. We join against + # the `room_stats_current` table which keeps track of the + # `current_state_events` count (and a proxy value `local_users_in_room` + # which can used to assume the server is participating in the room and has + # current state) to ensure that the data in `room_stats_state` is up-to-date + # with the current state. + # + # FIXME: Use `room_stats_current.current_state_events` instead of + # `room_stats_current.local_users_in_room` once + # https://github.com/element-hq/synapse/issues/17457 is fixed. + sql = f""" + SELECT room_id, room_type + FROM room_stats_state + INNER JOIN room_stats_current USING (room_id) + WHERE + {clause} + AND local_users_in_room > 0 + """ + + txn.execute(sql, args) + + room_id_to_type_map = {} + for row in txn: + room_id_to_type_map[row[0]] = row[1] + + return room_id_to_type_map + + results = await self.db_pool.runInteraction( + "bulk_get_room_type", + txn, ) # If we haven't updated `room_stats_state` with the room yet, query the # create events directly. This should happen only rarely so we don't # mind if we do this in a loop. - results = dict(rows) for room_id in room_ids - results.keys(): - create_event = await self.get_create_event_for_room(room_id) - room_type = create_event.content.get(EventContentFields.ROOM_TYPE) - results[room_id] = room_type + try: + create_event = await self.get_create_event_for_room(room_id) + room_type = create_event.content.get(EventContentFields.ROOM_TYPE) + results[room_id] = room_type + except NotFoundError: + # We use the sentinel value to distinguish between `None` which is a + # valid room type and a room that is unknown to the server so the value + # is just unset. + results[room_id] = ROOM_UNKNOWN_SENTINEL + + return results + + @cached(max_entries=10000) + async def get_room_encryption(self, room_id: str) -> Optional[str]: + raise NotImplementedError() + + @cachedList(cached_method_name="get_room_encryption", list_name="room_ids") + async def bulk_get_room_encryption( + self, room_ids: Set[str] + ) -> Mapping[str, Union[Optional[str], Sentinel]]: + """ + Bulk fetch room encryption for the given rooms (via current state). + + Since this function is cached, any missing values would be cached as `None`. In + order to distinguish between an unencrypted room that has `None` encryption and + a room that is unknown to the server where we might want to omit the value + (which would make it cached as `None`), instead we use the sentinel value + `ROOM_UNKNOWN_SENTINEL`. + + Returns: + A mapping from room ID to the room's encryption algorithm if the room is + encrypted, otherwise `None`. Rooms unknown to this server will return + `ROOM_UNKNOWN_SENTINEL`. + """ + + def txn( + txn: LoggingTransaction, + ) -> MutableMapping[str, Union[Optional[str], Sentinel]]: + clause, args = make_in_list_sql_clause( + txn.database_engine, "room_id", room_ids + ) + + # We can't rely on `room_stats_state.encryption` if the server has left the + # room because the `room_id` will still be in the table but everything will + # be set to `None` but `None` is a valid encryption value. We join against + # the `room_stats_current` table which keeps track of the + # `current_state_events` count (and a proxy value `local_users_in_room` + # which can used to assume the server is participating in the room and has + # current state) to ensure that the data in `room_stats_state` is up-to-date + # with the current state. + # + # FIXME: Use `room_stats_current.current_state_events` instead of + # `room_stats_current.local_users_in_room` once + # https://github.com/element-hq/synapse/issues/17457 is fixed. + sql = f""" + SELECT room_id, encryption + FROM room_stats_state + INNER JOIN room_stats_current USING (room_id) + WHERE + {clause} + AND local_users_in_room > 0 + """ + + txn.execute(sql, args) + + room_id_to_encryption_map = {} + for row in txn: + room_id_to_encryption_map[row[0]] = row[1] + + return room_id_to_encryption_map + + results = await self.db_pool.runInteraction( + "bulk_get_room_encryption", + txn, + ) + + # If we haven't updated `room_stats_state` with the room yet, query the state + # directly. This should happen only rarely so we don't mind if we do this in a + # loop. + encryption_event_ids: List[str] = [] + for room_id in room_ids - results.keys(): + state_map = await self.get_partial_filtered_current_state_ids( + room_id, + state_filter=StateFilter.from_types( + [ + (EventTypes.Create, ""), + (EventTypes.RoomEncryption, ""), + ] + ), + ) + # We can use the create event as a canary to tell whether the server has + # seen the room before + create_event_id = state_map.get((EventTypes.Create, "")) + encryption_event_id = state_map.get((EventTypes.RoomEncryption, "")) + + if create_event_id is None: + # We use the sentinel value to distinguish between `None` which is a + # valid room type and a room that is unknown to the server so the value + # is just unset. + results[room_id] = ROOM_UNKNOWN_SENTINEL + continue + + if encryption_event_id is None: + results[room_id] = None + else: + encryption_event_ids.append(encryption_event_id) + + encryption_event_map = await self.get_events(encryption_event_ids) + + for encryption_event_id in encryption_event_ids: + encryption_event = encryption_event_map.get(encryption_event_id) + # If the curent state says there is an encryption event, we should have it + # in the database. + assert encryption_event is not None + + results[encryption_event.room_id] = encryption_event.content.get( + EventContentFields.ENCRYPTION_ALGORITHM + ) return results diff --git a/synapse/storage/databases/main/state_deltas.py b/synapse/storage/databases/main/state_deltas.py index 036972ac257f..da3ebe66b88a 100644 --- a/synapse/storage/databases/main/state_deltas.py +++ b/synapse/storage/databases/main/state_deltas.py @@ -26,6 +26,8 @@ from synapse.storage._base import SQLBaseStore from synapse.storage.database import LoggingTransaction +from synapse.storage.databases.main.stream import _filter_results_by_stream +from synapse.types import RoomStreamToken from synapse.util.caches.stream_change_cache import StreamChangeCache logger = logging.getLogger(__name__) @@ -156,3 +158,38 @@ async def get_max_stream_id_in_current_state_deltas(self) -> int: "get_max_stream_id_in_current_state_deltas", self._get_max_stream_id_in_current_state_deltas_txn, ) + + async def get_current_state_deltas_for_room( + self, room_id: str, from_token: RoomStreamToken, to_token: RoomStreamToken + ) -> List[StateDelta]: + """Get the state deltas between two tokens.""" + + def get_current_state_deltas_for_room_txn( + txn: LoggingTransaction, + ) -> List[StateDelta]: + sql = """ + SELECT instance_name, stream_id, type, state_key, event_id, prev_event_id + FROM current_state_delta_stream + WHERE room_id = ? AND ? < stream_id AND stream_id <= ? + ORDER BY stream_id ASC + """ + txn.execute( + sql, (room_id, from_token.stream, to_token.get_max_stream_pos()) + ) + + return [ + StateDelta( + stream_id=row[1], + room_id=room_id, + event_type=row[2], + state_key=row[3], + event_id=row[4], + prev_event_id=row[5], + ) + for row in txn + if _filter_results_by_stream(from_token, to_token, row[0], row[1]) + ] + + return await self.db_pool.runInteraction( + "get_current_state_deltas_for_room", get_current_state_deltas_for_room_txn + ) diff --git a/synapse/storage/databases/main/stream.py b/synapse/storage/databases/main/stream.py index b034361aec60..4207e73c7f9a 100644 --- a/synapse/storage/databases/main/stream.py +++ b/synapse/storage/databases/main/stream.py @@ -2104,3 +2104,13 @@ async def get_timeline_gaps( return RoomStreamToken(stream=last_position.stream - 1) return None + + def get_rooms_that_might_have_updates( + self, room_ids: StrCollection, from_token: RoomStreamToken + ) -> StrCollection: + """Filters given room IDs down to those that might have updates, i.e. + removes rooms that definitely do not have updates. + """ + return self._events_stream_cache.get_entities_changed( + room_ids, from_token.stream + ) diff --git a/synapse/types/__init__.py b/synapse/types/__init__.py index c0d30ac2a348..5259550f1c5d 100644 --- a/synapse/types/__init__.py +++ b/synapse/types/__init__.py @@ -1160,6 +1160,49 @@ def __str__(self) -> str: ) +@attr.s(slots=True, frozen=True, auto_attribs=True) +class SlidingSyncStreamToken: + """The same as a `StreamToken`, but includes an extra field at the start for + the sliding sync connection token (separated by a '/'). This is used to + store per-connection state. + + This then looks something like: + 5/s2633508_17_338_6732159_1082514_541479_274711_265584_1_379 + + Attributes: + stream_token: Token representing the position of all the standard + streams. + connection_position: Token used by sliding sync to track updates to any + per-connection state stored by Synapse. + """ + + stream_token: StreamToken + connection_position: int + + @staticmethod + @cancellable + async def from_string(store: "DataStore", string: str) -> "SlidingSyncStreamToken": + """Creates a SlidingSyncStreamToken from its textual representation.""" + try: + connection_position_str, stream_token_str = string.split("/", 1) + connection_position = int(connection_position_str) + stream_token = await StreamToken.from_string(store, stream_token_str) + + return SlidingSyncStreamToken( + stream_token=stream_token, + connection_position=connection_position, + ) + except CancelledError: + raise + except Exception: + raise SynapseError(400, "Invalid stream token") + + async def to_string(self, store: "DataStore") -> str: + """Serializes the token to a string""" + stream_token_str = await self.stream_token.to_string(store) + return f"{self.connection_position}/{stream_token_str}" + + @attr.s(slots=True, frozen=True, auto_attribs=True) class PersistedPosition: """Position of a newly persisted row with instance that persisted it.""" diff --git a/synapse/types/handlers/__init__.py b/synapse/types/handlers/__init__.py index 4c6c42db0407..363f060bef9b 100644 --- a/synapse/types/handlers/__init__.py +++ b/synapse/types/handlers/__init__.py @@ -31,7 +31,15 @@ from pydantic import Extra from synapse.events import EventBase -from synapse.types import DeviceListUpdates, JsonDict, JsonMapping, StreamToken, UserID +from synapse.types import ( + DeviceListUpdates, + JsonDict, + JsonMapping, + Requester, + SlidingSyncStreamToken, + StreamToken, + UserID, +) from synapse.types.rest.client import SlidingSyncBody if TYPE_CHECKING: @@ -102,7 +110,7 @@ class SlidingSyncConfig(SlidingSyncBody): """ user: UserID - device_id: Optional[str] + requester: Requester # Pydantic config class Config: @@ -144,7 +152,7 @@ class SlidingSyncResult: Attributes: next_pos: The next position token in the sliding window to request (next_batch). lists: Sliding window API. A map of list key to list results. - rooms: Room subscription API. A map of room ID to room subscription to room results. + rooms: Room subscription API. A map of room ID to room results. extensions: Extensions API. A map of extension key to extension results. """ @@ -174,8 +182,8 @@ class RoomResult: absent on joined/left rooms prev_batch: A token that can be passed as a start parameter to the `/rooms//messages` API to retrieve earlier messages. - limited: True if their are more events than fit between the given position and now. - Sync again to get more. + limited: True if there are more events than `timeline_limit` looking + backwards from the `response.pos` to the `request.pos`. num_live: The number of timeline events which have just occurred and are not historical. The last N events are 'live' and should be treated as such. This is mostly useful to determine whether a given @mention event should make a noise or not. @@ -230,6 +238,17 @@ class StrippedHero: notification_count: int highlight_count: int + def __bool__(self) -> bool: + return ( + # If this is the first time the client is seeing the room, we should not filter it out + # under any circumstance. + self.initial + # We need to let the client know if there are any new events + or bool(self.required_state) + or bool(self.timeline_events) + or bool(self.stripped_state) + ) + @attr.s(slots=True, frozen=True, auto_attribs=True) class SlidingWindowList: """ @@ -323,13 +342,69 @@ def __bool__(self) -> bool: or self.device_unused_fallback_key_types ) + @attr.s(slots=True, frozen=True, auto_attribs=True) + class AccountDataExtension: + """The Account Data extension (MSC3959) + + Attributes: + global_account_data_map: Mapping from `type` to `content` of global account + data events. + account_data_by_room_map: Mapping from room_id to mapping of `type` to + `content` of room account data events. + """ + + global_account_data_map: Mapping[str, JsonMapping] + account_data_by_room_map: Mapping[str, Mapping[str, JsonMapping]] + + def __bool__(self) -> bool: + return bool( + self.global_account_data_map or self.account_data_by_room_map + ) + + @attr.s(slots=True, frozen=True, auto_attribs=True) + class ReceiptsExtension: + """The Receipts extension (MSC3960) + + Attributes: + room_id_to_receipt_map: Mapping from room_id to `m.receipt` ephemeral + event (type, content) + """ + + room_id_to_receipt_map: Mapping[str, JsonMapping] + + def __bool__(self) -> bool: + return bool(self.room_id_to_receipt_map) + + @attr.s(slots=True, frozen=True, auto_attribs=True) + class TypingExtension: + """The Typing Notification extension (MSC3961) + + Attributes: + room_id_to_typing_map: Mapping from room_id to `m.typing` ephemeral + event (type, content) + """ + + room_id_to_typing_map: Mapping[str, JsonMapping] + + def __bool__(self) -> bool: + return bool(self.room_id_to_typing_map) + to_device: Optional[ToDeviceExtension] = None e2ee: Optional[E2eeExtension] = None + account_data: Optional[AccountDataExtension] = None + receipts: Optional[ReceiptsExtension] = None + typing: Optional[TypingExtension] = None def __bool__(self) -> bool: - return bool(self.to_device or self.e2ee) - - next_pos: StreamToken + return bool( + self.to_device + or self.e2ee + or self.account_data + or self.receipts + or self.typing + ) + + next_pos: SlidingSyncStreamToken lists: Dict[str, SlidingWindowList] rooms: Dict[str, RoomResult] extensions: Extensions @@ -339,10 +414,14 @@ def __bool__(self) -> bool: to tell if the notifier needs to wait for more events when polling for events. """ - return bool(self.lists or self.rooms or self.extensions) + # We don't include `self.lists` here, as a) `lists` is always non-empty even if + # there are no changes, and b) since we're sorting rooms by `stream_ordering` of + # the latest activity, anything that would cause the order to change would end + # up in `self.rooms` and cause us to send down the change. + return bool(self.rooms or self.extensions) @staticmethod - def empty(next_pos: StreamToken) -> "SlidingSyncResult": + def empty(next_pos: SlidingSyncStreamToken) -> "SlidingSyncResult": "Return a new empty result" return SlidingSyncResult( next_pos=next_pos, diff --git a/synapse/types/rest/client/__init__.py b/synapse/types/rest/client/__init__.py index f3c45a0d6a50..93b537ab7b05 100644 --- a/synapse/types/rest/client/__init__.py +++ b/synapse/types/rest/client/__init__.py @@ -120,6 +120,9 @@ class SlidingSyncBody(RequestBodyModel): Sliding Sync API request body. Attributes: + conn_id: An optional string to identify this connection to the server. + Only one sliding sync connection is allowed per given conn_id (empty + or not). lists: Sliding window API. A map of list key to list information (:class:`SlidingSyncList`). Max lists: 100. The list keys should be arbitrary strings which the client is using to refer to the list. Keep this @@ -322,8 +325,64 @@ class E2eeExtension(RequestBodyModel): enabled: Optional[StrictBool] = False + class AccountDataExtension(RequestBodyModel): + """The Account Data extension (MSC3959) + + Attributes: + enabled + lists: List of list keys (from the Sliding Window API) to apply this + extension to. + rooms: List of room IDs (from the Room Subscription API) to apply this + extension to. + """ + + enabled: Optional[StrictBool] = False + # Process all lists defined in the Sliding Window API. (This is the default.) + lists: Optional[List[StrictStr]] = ["*"] + # Process all room subscriptions defined in the Room Subscription API. (This is the default.) + rooms: Optional[List[StrictStr]] = ["*"] + + class ReceiptsExtension(RequestBodyModel): + """The Receipts extension (MSC3960) + + Attributes: + enabled + lists: List of list keys (from the Sliding Window API) to apply this + extension to. + rooms: List of room IDs (from the Room Subscription API) to apply this + extension to. + """ + + enabled: Optional[StrictBool] = False + # Process all lists defined in the Sliding Window API. (This is the default.) + lists: Optional[List[StrictStr]] = ["*"] + # Process all room subscriptions defined in the Room Subscription API. (This is the default.) + rooms: Optional[List[StrictStr]] = ["*"] + + class TypingExtension(RequestBodyModel): + """The Typing Notification extension (MSC3961) + + Attributes: + enabled + lists: List of list keys (from the Sliding Window API) to apply this + extension to. + rooms: List of room IDs (from the Room Subscription API) to apply this + extension to. + """ + + enabled: Optional[StrictBool] = False + # Process all lists defined in the Sliding Window API. (This is the default.) + lists: Optional[List[StrictStr]] = ["*"] + # Process all room subscriptions defined in the Room Subscription API. (This is the default.) + rooms: Optional[List[StrictStr]] = ["*"] + to_device: Optional[ToDeviceExtension] = None e2ee: Optional[E2eeExtension] = None + account_data: Optional[AccountDataExtension] = None + receipts: Optional[ReceiptsExtension] = None + typing: Optional[TypingExtension] = None + + conn_id: Optional[str] # mypy workaround via https://github.com/pydantic/pydantic/issues/156#issuecomment-1130883884 if TYPE_CHECKING: diff --git a/tests/handlers/test_e2e_keys.py b/tests/handlers/test_e2e_keys.py index 0e6352ff4b9b..8a3dfdcf75c1 100644 --- a/tests/handlers/test_e2e_keys.py +++ b/tests/handlers/test_e2e_keys.py @@ -43,9 +43,7 @@ class E2eKeysHandlerTestCase(unittest.HomeserverTestCase): def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.appservice_api = mock.AsyncMock() - return self.setup_test_homeserver( - federation_client=mock.Mock(), application_service_api=self.appservice_api - ) + return self.setup_test_homeserver(application_service_api=self.appservice_api) def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.handler = hs.get_e2e_keys_handler() @@ -1224,6 +1222,61 @@ def test_query_devices_remote_sync(self) -> None: }, ) + def test_query_devices_remote_down(self) -> None: + """Tests that querying keys for a remote user on an unreachable server returns + results in the "failures" property + """ + + remote_user_id = "@test:other" + local_user_id = "@test:test" + + # The backoff code treats time zero as special + self.reactor.advance(5) + + self.hs.get_federation_http_client().agent.request = mock.AsyncMock( # type: ignore[method-assign] + side_effect=Exception("boop") + ) + + e2e_handler = self.hs.get_e2e_keys_handler() + + query_result = self.get_success( + e2e_handler.query_devices( + { + "device_keys": {remote_user_id: []}, + }, + timeout=10, + from_user_id=local_user_id, + from_device_id="some_device_id", + ) + ) + + self.assertEqual( + query_result["failures"], + { + "other": { + "message": "Failed to send request: Exception: boop", + "status": 503, + } + }, + ) + + # Do it again: we should hit the backoff + query_result = self.get_success( + e2e_handler.query_devices( + { + "device_keys": {remote_user_id: []}, + }, + timeout=10, + from_user_id=local_user_id, + from_device_id="some_device_id", + ) + ) + + self.assertEqual( + query_result["failures"], + {"other": {"message": "Not ready for retry", "status": 503}}, + ) + @parameterized.expand( [ # The remote homeserver's response indicates that this user has 0/1/2 devices. diff --git a/tests/handlers/test_sliding_sync.py b/tests/handlers/test_sliding_sync.py index a7aa9bb8afe1..96da47f3b9c5 100644 --- a/tests/handlers/test_sliding_sync.py +++ b/tests/handlers/test_sliding_sync.py @@ -19,7 +19,7 @@ # import logging from copy import deepcopy -from typing import Dict, Optional +from typing import Dict, List, Optional from unittest.mock import patch from parameterized import parameterized @@ -35,7 +35,7 @@ RoomTypes, ) from synapse.api.room_versions import RoomVersions -from synapse.events import make_event_from_dict +from synapse.events import StrippedStateEvent, make_event_from_dict from synapse.events.snapshot import EventContext from synapse.handlers.sliding_sync import ( RoomSyncConfig, @@ -3093,6 +3093,78 @@ def _create_dm_room( return room_id + _remote_invite_count: int = 0 + + def _create_remote_invite_room_for_user( + self, + invitee_user_id: str, + unsigned_invite_room_state: Optional[List[StrippedStateEvent]], + ) -> str: + """ + Create a fake invite for a remote room and persist it. + + We don't have any state for these kind of rooms and can only rely on the + stripped state included in the unsigned portion of the invite event to identify + the room. + + Args: + invitee_user_id: The person being invited + unsigned_invite_room_state: List of stripped state events to assist the + receiver in identifying the room. + + Returns: + The room ID of the remote invite room + """ + invite_room_id = f"!test_room{self._remote_invite_count}:remote_server" + + invite_event_dict = { + "room_id": invite_room_id, + "sender": "@inviter:remote_server", + "state_key": invitee_user_id, + "depth": 1, + "origin_server_ts": 1, + "type": EventTypes.Member, + "content": {"membership": Membership.INVITE}, + "auth_events": [], + "prev_events": [], + } + if unsigned_invite_room_state is not None: + serialized_stripped_state_events = [] + for stripped_event in unsigned_invite_room_state: + serialized_stripped_state_events.append( + { + "type": stripped_event.type, + "state_key": stripped_event.state_key, + "sender": stripped_event.sender, + "content": stripped_event.content, + } + ) + + invite_event_dict["unsigned"] = { + "invite_room_state": serialized_stripped_state_events + } + + invite_event = make_event_from_dict( + invite_event_dict, + room_version=RoomVersions.V10, + ) + invite_event.internal_metadata.outlier = True + invite_event.internal_metadata.out_of_band_membership = True + + self.get_success( + self.store.maybe_store_room_on_outlier_membership( + room_id=invite_room_id, room_version=invite_event.room_version + ) + ) + context = EventContext.for_outlier(self.hs.get_storage_controllers()) + persist_controller = self.hs.get_storage_controllers().persistence + assert persist_controller is not None + self.get_success(persist_controller.persist_event(invite_event, context)) + + self._remote_invite_count += 1 + + return invite_room_id + def test_filter_dm_rooms(self) -> None: """ Test `filter.is_dm` for DM rooms @@ -3157,7 +3229,455 @@ def test_filter_encrypted_rooms(self) -> None: user1_id = self.register_user("user1", "pass") user1_tok = self.login(user1_id, "pass") - # Create a normal room + # Create an unencrypted room + room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + + # Create an encrypted room + encrypted_room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + self.helper.send_state( + encrypted_room_id, + EventTypes.RoomEncryption, + {EventContentFields.ENCRYPTION_ALGORITHM: "m.megolm.v1.aes-sha2"}, + tok=user1_tok, + ) + + after_rooms_token = self.event_sources.get_current_token() + + # Get the rooms the user should be syncing with + sync_room_map = self._get_sync_room_ids_for_user( + UserID.from_string(user1_id), + from_token=None, + to_token=after_rooms_token, + ) + + # Try with `is_encrypted=True` + truthy_filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters( + is_encrypted=True, + ), + after_rooms_token, + ) + ) + + self.assertEqual(truthy_filtered_room_map.keys(), {encrypted_room_id}) + + # Try with `is_encrypted=False` + falsy_filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters( + is_encrypted=False, + ), + after_rooms_token, + ) + ) + + self.assertEqual(falsy_filtered_room_map.keys(), {room_id}) + + def test_filter_encrypted_server_left_room(self) -> None: + """ + Test that we can apply a `filter.is_encrypted` against a room that everyone has left. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + before_rooms_token = self.event_sources.get_current_token() + + # Create an unencrypted room + room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + # Leave the room + self.helper.leave(room_id, user1_id, tok=user1_tok) + + # Create an encrypted room + encrypted_room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + self.helper.send_state( + encrypted_room_id, + EventTypes.RoomEncryption, + {EventContentFields.ENCRYPTION_ALGORITHM: "m.megolm.v1.aes-sha2"}, + tok=user1_tok, + ) + # Leave the room + self.helper.leave(encrypted_room_id, user1_id, tok=user1_tok) + + after_rooms_token = self.event_sources.get_current_token() + + # Get the rooms the user should be syncing with + sync_room_map = self._get_sync_room_ids_for_user( + UserID.from_string(user1_id), + # We're using a `from_token` so that the room is considered `newly_left` and + # appears in our list of relevant sync rooms + from_token=before_rooms_token, + to_token=after_rooms_token, + ) + + # Try with `is_encrypted=True` + truthy_filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters( + is_encrypted=True, + ), + after_rooms_token, + ) + ) + + self.assertEqual(truthy_filtered_room_map.keys(), {encrypted_room_id}) + + # Try with `is_encrypted=False` + falsy_filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters( + is_encrypted=False, + ), + after_rooms_token, + ) + ) + + self.assertEqual(falsy_filtered_room_map.keys(), {room_id}) + + def test_filter_encrypted_server_left_room2(self) -> None: + """ + Test that we can apply a `filter.is_encrypted` against a room that everyone has + left. + + There is still someone local who is invited to the rooms but that doesn't affect + whether the server is participating in the room (users need to be joined). + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + _user2_tok = self.login(user2_id, "pass") + + before_rooms_token = self.event_sources.get_current_token() + + # Create an unencrypted room + room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + # Invite user2 + self.helper.invite(room_id, targ=user2_id, tok=user1_tok) + # User1 leaves the room + self.helper.leave(room_id, user1_id, tok=user1_tok) + + # Create an encrypted room + encrypted_room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + self.helper.send_state( + encrypted_room_id, + EventTypes.RoomEncryption, + {EventContentFields.ENCRYPTION_ALGORITHM: "m.megolm.v1.aes-sha2"}, + tok=user1_tok, + ) + # Invite user2 + self.helper.invite(encrypted_room_id, targ=user2_id, tok=user1_tok) + # User1 leaves the room + self.helper.leave(encrypted_room_id, user1_id, tok=user1_tok) + + after_rooms_token = self.event_sources.get_current_token() + + # Get the rooms the user should be syncing with + sync_room_map = self._get_sync_room_ids_for_user( + UserID.from_string(user1_id), + # We're using a `from_token` so that the room is considered `newly_left` and + # appears in our list of relevant sync rooms + from_token=before_rooms_token, + to_token=after_rooms_token, + ) + + # Try with `is_encrypted=True` + truthy_filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters( + is_encrypted=True, + ), + after_rooms_token, + ) + ) + + self.assertEqual(truthy_filtered_room_map.keys(), {encrypted_room_id}) + + # Try with `is_encrypted=False` + falsy_filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters( + is_encrypted=False, + ), + after_rooms_token, + ) + ) + + self.assertEqual(falsy_filtered_room_map.keys(), {room_id}) + + def test_filter_encrypted_after_we_left(self) -> None: + """ + Test that we can apply a `filter.is_encrypted` against a room that was encrypted + after we left the room (make sure we don't just use the current state) + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + before_rooms_token = self.event_sources.get_current_token() + + # Create an unencrypted room + room_id = self.helper.create_room_as(user2_id, tok=user2_tok) + # Leave the room + self.helper.join(room_id, user1_id, tok=user1_tok) + self.helper.leave(room_id, user1_id, tok=user1_tok) + + # Create a room that will be encrypted + encrypted_after_we_left_room_id = self.helper.create_room_as( + user2_id, tok=user2_tok + ) + # Leave the room + self.helper.join(encrypted_after_we_left_room_id, user1_id, tok=user1_tok) + self.helper.leave(encrypted_after_we_left_room_id, user1_id, tok=user1_tok) + + # Encrypt the room after we've left + self.helper.send_state( + encrypted_after_we_left_room_id, + EventTypes.RoomEncryption, + {EventContentFields.ENCRYPTION_ALGORITHM: "m.megolm.v1.aes-sha2"}, + tok=user2_tok, + ) + + after_rooms_token = self.event_sources.get_current_token() + + # Get the rooms the user should be syncing with + sync_room_map = self._get_sync_room_ids_for_user( + UserID.from_string(user1_id), + # We're using a `from_token` so that the room is considered `newly_left` and + # appears in our list of relevant sync rooms + from_token=before_rooms_token, + to_token=after_rooms_token, + ) + + # Try with `is_encrypted=True` + truthy_filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters( + is_encrypted=True, + ), + after_rooms_token, + ) + ) + + # Even though we left the room before it was encrypted, we still see it because + # someone else on our server is still participating in the room and we "leak" + # the current state to the left user. But we consider the room encryption status + # to not be a secret given it's often set at the start of the room and it's one + # of the stripped state events that is normally handed out. + self.assertEqual( + truthy_filtered_room_map.keys(), {encrypted_after_we_left_room_id} + ) + + # Try with `is_encrypted=False` + falsy_filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters( + is_encrypted=False, + ), + after_rooms_token, + ) + ) + + # Even though we left the room before it was encrypted... (see comment above) + self.assertEqual(falsy_filtered_room_map.keys(), {room_id}) + + def test_filter_encrypted_with_remote_invite_room_no_stripped_state(self) -> None: + """ + Test that we can apply a `filter.is_encrypted` filter against a remote invite + room without any `unsigned.invite_room_state` (stripped state). + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + # Create a remote invite room without any `unsigned.invite_room_state` + _remote_invite_room_id = self._create_remote_invite_room_for_user( + user1_id, None + ) + + # Create an unencrypted room + room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + + # Create an encrypted room + encrypted_room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + self.helper.send_state( + encrypted_room_id, + EventTypes.RoomEncryption, + {EventContentFields.ENCRYPTION_ALGORITHM: "m.megolm.v1.aes-sha2"}, + tok=user1_tok, + ) + + after_rooms_token = self.event_sources.get_current_token() + + # Get the rooms the user should be syncing with + sync_room_map = self._get_sync_room_ids_for_user( + UserID.from_string(user1_id), + from_token=None, + to_token=after_rooms_token, + ) + + # Try with `is_encrypted=True` + truthy_filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters( + is_encrypted=True, + ), + after_rooms_token, + ) + ) + + # `remote_invite_room_id` should not appear because we can't figure out whether + # it is encrypted or not (no stripped state, `unsigned.invite_room_state`). + self.assertEqual(truthy_filtered_room_map.keys(), {encrypted_room_id}) + + # Try with `is_encrypted=False` + falsy_filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters( + is_encrypted=False, + ), + after_rooms_token, + ) + ) + + # `remote_invite_room_id` should not appear because we can't figure out whether + # it is encrypted or not (no stripped state, `unsigned.invite_room_state`). + self.assertEqual(falsy_filtered_room_map.keys(), {room_id}) + + def test_filter_encrypted_with_remote_invite_encrypted_room(self) -> None: + """ + Test that we can apply a `filter.is_encrypted` filter against a remote invite + encrypted room with some `unsigned.invite_room_state` (stripped state). + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + # Create a remote invite room with some `unsigned.invite_room_state` + # indicating that the room is encrypted. + remote_invite_room_id = self._create_remote_invite_room_for_user( + user1_id, + [ + StrippedStateEvent( + type=EventTypes.Create, + state_key="", + sender="@inviter:remote_server", + content={ + EventContentFields.ROOM_CREATOR: "@inviter:remote_server", + EventContentFields.ROOM_VERSION: RoomVersions.V10.identifier, + }, + ), + StrippedStateEvent( + type=EventTypes.RoomEncryption, + state_key="", + sender="@inviter:remote_server", + content={ + EventContentFields.ENCRYPTION_ALGORITHM: "m.megolm.v1.aes-sha2", + }, + ), + ], + ) + + # Create an unencrypted room + room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + + # Create an encrypted room + encrypted_room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + self.helper.send_state( + encrypted_room_id, + EventTypes.RoomEncryption, + {EventContentFields.ENCRYPTION_ALGORITHM: "m.megolm.v1.aes-sha2"}, + tok=user1_tok, + ) + + after_rooms_token = self.event_sources.get_current_token() + + # Get the rooms the user should be syncing with + sync_room_map = self._get_sync_room_ids_for_user( + UserID.from_string(user1_id), + from_token=None, + to_token=after_rooms_token, + ) + + # Try with `is_encrypted=True` + truthy_filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters( + is_encrypted=True, + ), + after_rooms_token, + ) + ) + + # `remote_invite_room_id` should appear here because it is encrypted + # according to the stripped state + self.assertEqual( + truthy_filtered_room_map.keys(), {encrypted_room_id, remote_invite_room_id} + ) + + # Try with `is_encrypted=False` + falsy_filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters( + is_encrypted=False, + ), + after_rooms_token, + ) + ) + + # `remote_invite_room_id` should not appear here because it is encrypted + # according to the stripped state + self.assertEqual(falsy_filtered_room_map.keys(), {room_id}) + + def test_filter_encrypted_with_remote_invite_unencrypted_room(self) -> None: + """ + Test that we can apply a `filter.is_encrypted` filter against a remote invite + unencrypted room with some `unsigned.invite_room_state` (stripped state). + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + # Create a remote invite room with some `unsigned.invite_room_state` + # but don't set any room encryption event. + remote_invite_room_id = self._create_remote_invite_room_for_user( + user1_id, + [ + StrippedStateEvent( + type=EventTypes.Create, + state_key="", + sender="@inviter:remote_server", + content={ + EventContentFields.ROOM_CREATOR: "@inviter:remote_server", + EventContentFields.ROOM_VERSION: RoomVersions.V10.identifier, + }, + ), + # No room encryption event + ], + ) + + # Create an unencrypted room room_id = self.helper.create_room_as(user1_id, tok=user1_tok) # Create an encrypted room @@ -3165,7 +3685,7 @@ def test_filter_encrypted_rooms(self) -> None: self.helper.send_state( encrypted_room_id, EventTypes.RoomEncryption, - {"algorithm": "m.megolm.v1.aes-sha2"}, + {EventContentFields.ENCRYPTION_ALGORITHM: "m.megolm.v1.aes-sha2"}, tok=user1_tok, ) @@ -3190,6 +3710,8 @@ def test_filter_encrypted_rooms(self) -> None: ) ) + # `remote_invite_room_id` should not appear here because it is unencrypted + # according to the stripped state self.assertEqual(truthy_filtered_room_map.keys(), {encrypted_room_id}) # Try with `is_encrypted=False` @@ -3204,7 +3726,11 @@ def test_filter_encrypted_rooms(self) -> None: ) ) - self.assertEqual(falsy_filtered_room_map.keys(), {room_id}) + # `remote_invite_room_id` should appear because it is unencrypted according to + # the stripped state + self.assertEqual( + falsy_filtered_room_map.keys(), {room_id, remote_invite_room_id} + ) def test_filter_invite_rooms(self) -> None: """ @@ -3461,48 +3987,236 @@ def test_filter_not_room_types(self) -> None: self.assertEqual(filtered_room_map.keys(), {space_room_id}) - def test_filter_room_types_with_invite_remote_room(self) -> None: - """Test that we can apply a room type filter, even if we have an invite - for a remote room. + def test_filter_room_types_server_left_room(self) -> None: + """ + Test that we can apply a `filter.room_types` against a room that everyone has left. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + before_rooms_token = self.event_sources.get_current_token() + + # Create a normal room (no room type) + room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + # Leave the room + self.helper.leave(room_id, user1_id, tok=user1_tok) + + # Create a space room + space_room_id = self.helper.create_room_as( + user1_id, + tok=user1_tok, + extra_content={ + "creation_content": {EventContentFields.ROOM_TYPE: RoomTypes.SPACE} + }, + ) + # Leave the room + self.helper.leave(space_room_id, user1_id, tok=user1_tok) + + after_rooms_token = self.event_sources.get_current_token() + + # Get the rooms the user should be syncing with + sync_room_map = self._get_sync_room_ids_for_user( + UserID.from_string(user1_id), + # We're using a `from_token` so that the room is considered `newly_left` and + # appears in our list of relevant sync rooms + from_token=before_rooms_token, + to_token=after_rooms_token, + ) + + # Try finding only normal rooms + filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters(room_types=[None]), + after_rooms_token, + ) + ) + + self.assertEqual(filtered_room_map.keys(), {room_id}) + + # Try finding only spaces + filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters(room_types=[RoomTypes.SPACE]), + after_rooms_token, + ) + ) + + self.assertEqual(filtered_room_map.keys(), {space_room_id}) - This is a regression test. + def test_filter_room_types_server_left_room2(self) -> None: """ + Test that we can apply a `filter.room_types` against a room that everyone has left. + There is still someone local who is invited to the rooms but that doesn't affect + whether the server is participating in the room (users need to be joined). + """ user1_id = self.register_user("user1", "pass") user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + _user2_tok = self.login(user2_id, "pass") - # Create a fake remote invite and persist it. - invite_room_id = "!some:room" - invite_event = make_event_from_dict( - { - "room_id": invite_room_id, - "sender": "@user:test.serv", - "state_key": user1_id, - "depth": 1, - "origin_server_ts": 1, - "type": EventTypes.Member, - "content": {"membership": Membership.INVITE}, - "auth_events": [], - "prev_events": [], + before_rooms_token = self.event_sources.get_current_token() + + # Create a normal room (no room type) + room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + # Invite user2 + self.helper.invite(room_id, targ=user2_id, tok=user1_tok) + # User1 leaves the room + self.helper.leave(room_id, user1_id, tok=user1_tok) + + # Create a space room + space_room_id = self.helper.create_room_as( + user1_id, + tok=user1_tok, + extra_content={ + "creation_content": {EventContentFields.ROOM_TYPE: RoomTypes.SPACE} }, - room_version=RoomVersions.V10, ) - invite_event.internal_metadata.outlier = True - invite_event.internal_metadata.out_of_band_membership = True + # Invite user2 + self.helper.invite(space_room_id, targ=user2_id, tok=user1_tok) + # User1 leaves the room + self.helper.leave(space_room_id, user1_id, tok=user1_tok) - self.get_success( - self.store.maybe_store_room_on_outlier_membership( - room_id=invite_room_id, room_version=invite_event.room_version + after_rooms_token = self.event_sources.get_current_token() + + # Get the rooms the user should be syncing with + sync_room_map = self._get_sync_room_ids_for_user( + UserID.from_string(user1_id), + # We're using a `from_token` so that the room is considered `newly_left` and + # appears in our list of relevant sync rooms + from_token=before_rooms_token, + to_token=after_rooms_token, + ) + + # Try finding only normal rooms + filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters(room_types=[None]), + after_rooms_token, ) ) - context = EventContext.for_outlier(self.hs.get_storage_controllers()) - persist_controller = self.hs.get_storage_controllers().persistence - assert persist_controller is not None - self.get_success(persist_controller.persist_event(invite_event, context)) + + self.assertEqual(filtered_room_map.keys(), {room_id}) + + # Try finding only spaces + filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters(room_types=[RoomTypes.SPACE]), + after_rooms_token, + ) + ) + + self.assertEqual(filtered_room_map.keys(), {space_room_id}) + + def test_filter_room_types_with_remote_invite_room_no_stripped_state(self) -> None: + """ + Test that we can apply a `filter.room_types` filter against a remote invite + room without any `unsigned.invite_room_state` (stripped state). + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + # Create a remote invite room without any `unsigned.invite_room_state` + _remote_invite_room_id = self._create_remote_invite_room_for_user( + user1_id, None + ) + + # Create a normal room (no room type) + room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + + # Create a space room + space_room_id = self.helper.create_room_as( + user1_id, + tok=user1_tok, + extra_content={ + "creation_content": {EventContentFields.ROOM_TYPE: RoomTypes.SPACE} + }, + ) + + after_rooms_token = self.event_sources.get_current_token() + + # Get the rooms the user should be syncing with + sync_room_map = self._get_sync_room_ids_for_user( + UserID.from_string(user1_id), + from_token=None, + to_token=after_rooms_token, + ) + + # Try finding only normal rooms + filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters(room_types=[None]), + after_rooms_token, + ) + ) + + # `remote_invite_room_id` should not appear because we can't figure out what + # room type it is (no stripped state, `unsigned.invite_room_state`) + self.assertEqual(filtered_room_map.keys(), {room_id}) + + # Try finding only spaces + filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters(room_types=[RoomTypes.SPACE]), + after_rooms_token, + ) + ) + + # `remote_invite_room_id` should not appear because we can't figure out what + # room type it is (no stripped state, `unsigned.invite_room_state`) + self.assertEqual(filtered_room_map.keys(), {space_room_id}) + + def test_filter_room_types_with_remote_invite_space(self) -> None: + """ + Test that we can apply a `filter.room_types` filter against a remote invite + to a space room with some `unsigned.invite_room_state` (stripped state). + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + # Create a remote invite room with some `unsigned.invite_room_state` indicating + # that it is a space room + remote_invite_room_id = self._create_remote_invite_room_for_user( + user1_id, + [ + StrippedStateEvent( + type=EventTypes.Create, + state_key="", + sender="@inviter:remote_server", + content={ + EventContentFields.ROOM_CREATOR: "@inviter:remote_server", + EventContentFields.ROOM_VERSION: RoomVersions.V10.identifier, + # Specify that it is a space room + EventContentFields.ROOM_TYPE: RoomTypes.SPACE, + }, + ), + ], + ) # Create a normal room (no room type) room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + # Create a space room + space_room_id = self.helper.create_room_as( + user1_id, + tok=user1_tok, + extra_content={ + "creation_content": {EventContentFields.ROOM_TYPE: RoomTypes.SPACE} + }, + ) + after_rooms_token = self.event_sources.get_current_token() # Get the rooms the user should be syncing with @@ -3512,18 +4226,110 @@ def test_filter_room_types_with_invite_remote_room(self) -> None: to_token=after_rooms_token, ) + # Try finding only normal rooms filtered_room_map = self.get_success( self.sliding_sync_handler.filter_rooms( UserID.from_string(user1_id), sync_room_map, - SlidingSyncConfig.SlidingSyncList.Filters( - room_types=[None, RoomTypes.SPACE], + SlidingSyncConfig.SlidingSyncList.Filters(room_types=[None]), + after_rooms_token, + ) + ) + + # `remote_invite_room_id` should not appear here because it is a space room + # according to the stripped state + self.assertEqual(filtered_room_map.keys(), {room_id}) + + # Try finding only spaces + filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters(room_types=[RoomTypes.SPACE]), + after_rooms_token, + ) + ) + + # `remote_invite_room_id` should appear here because it is a space room + # according to the stripped state + self.assertEqual( + filtered_room_map.keys(), {space_room_id, remote_invite_room_id} + ) + + def test_filter_room_types_with_remote_invite_normal_room(self) -> None: + """ + Test that we can apply a `filter.room_types` filter against a remote invite + to a normal room with some `unsigned.invite_room_state` (stripped state). + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + # Create a remote invite room with some `unsigned.invite_room_state` + # but the create event does not specify a room type (normal room) + remote_invite_room_id = self._create_remote_invite_room_for_user( + user1_id, + [ + StrippedStateEvent( + type=EventTypes.Create, + state_key="", + sender="@inviter:remote_server", + content={ + EventContentFields.ROOM_CREATOR: "@inviter:remote_server", + EventContentFields.ROOM_VERSION: RoomVersions.V10.identifier, + # No room type means this is a normal room + }, ), + ], + ) + + # Create a normal room (no room type) + room_id = self.helper.create_room_as(user1_id, tok=user1_tok) + + # Create a space room + space_room_id = self.helper.create_room_as( + user1_id, + tok=user1_tok, + extra_content={ + "creation_content": {EventContentFields.ROOM_TYPE: RoomTypes.SPACE} + }, + ) + + after_rooms_token = self.event_sources.get_current_token() + + # Get the rooms the user should be syncing with + sync_room_map = self._get_sync_room_ids_for_user( + UserID.from_string(user1_id), + from_token=None, + to_token=after_rooms_token, + ) + + # Try finding only normal rooms + filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters(room_types=[None]), + after_rooms_token, + ) + ) + + # `remote_invite_room_id` should appear here because it is a normal room + # according to the stripped state (no room type) + self.assertEqual(filtered_room_map.keys(), {room_id, remote_invite_room_id}) + + # Try finding only spaces + filtered_room_map = self.get_success( + self.sliding_sync_handler.filter_rooms( + UserID.from_string(user1_id), + sync_room_map, + SlidingSyncConfig.SlidingSyncList.Filters(room_types=[RoomTypes.SPACE]), after_rooms_token, ) ) - self.assertEqual(filtered_room_map.keys(), {room_id, invite_room_id}) + # `remote_invite_room_id` should not appear here because it is a normal room + # according to the stripped state (no room type) + self.assertEqual(filtered_room_map.keys(), {space_room_id}) class SortRoomsTestCase(HomeserverTestCase): diff --git a/tests/rest/client/sliding_sync/__init__.py b/tests/rest/client/sliding_sync/__init__.py new file mode 100644 index 000000000000..c4de9d53e24f --- /dev/null +++ b/tests/rest/client/sliding_sync/__init__.py @@ -0,0 +1,13 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# diff --git a/tests/rest/client/sliding_sync/test_connection_tracking.py b/tests/rest/client/sliding_sync/test_connection_tracking.py new file mode 100644 index 000000000000..4d8866b30af3 --- /dev/null +++ b/tests/rest/client/sliding_sync/test_connection_tracking.py @@ -0,0 +1,453 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +import logging + +from parameterized import parameterized + +from twisted.test.proto_helpers import MemoryReactor + +import synapse.rest.admin +from synapse.api.constants import EventTypes +from synapse.rest.client import login, room, sync +from synapse.server import HomeServer +from synapse.types import SlidingSyncStreamToken +from synapse.types.handlers import SlidingSyncConfig +from synapse.util import Clock + +from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase + +logger = logging.getLogger(__name__) + + +class SlidingSyncConnectionTrackingTestCase(SlidingSyncBase): + """ + Test connection tracking in the Sliding Sync API. + """ + + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + room.register_servlets, + sync.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + self.storage_controllers = hs.get_storage_controllers() + + def test_rooms_required_state_incremental_sync_LIVE(self) -> None: + """Test that we only get state updates in incremental sync for rooms + we've already seen (LIVE). + """ + + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + + # Make the Sliding Sync request + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Create, ""], + [EventTypes.RoomHistoryVisibility, ""], + # This one doesn't exist in the room + [EventTypes.Name, ""], + ], + "timeline_limit": 0, + } + } + } + + response_body, from_token = self.do_sync(sync_body, tok=user1_tok) + + state_map = self.get_success( + self.storage_controllers.state.get_current_state(room_id1) + ) + + self._assertRequiredStateIncludes( + response_body["rooms"][room_id1]["required_state"], + { + state_map[(EventTypes.Create, "")], + state_map[(EventTypes.RoomHistoryVisibility, "")], + }, + exact=True, + ) + + # Send a state event + self.helper.send_state( + room_id1, EventTypes.Name, body={"name": "foo"}, tok=user2_tok + ) + + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + state_map = self.get_success( + self.storage_controllers.state.get_current_state(room_id1) + ) + + self.assertNotIn("initial", response_body["rooms"][room_id1]) + self._assertRequiredStateIncludes( + response_body["rooms"][room_id1]["required_state"], + { + state_map[(EventTypes.Name, "")], + }, + exact=True, + ) + + @parameterized.expand([(False,), (True,)]) + def test_rooms_timeline_incremental_sync_PREVIOUSLY(self, limited: bool) -> None: + """ + Test getting room data where we have previously sent down the room, but + we missed sending down some timeline events previously and so its status + is considered PREVIOUSLY. + + There are two versions of this test, one where there are more messages + than the timeline limit, and one where there isn't. + """ + + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + room_id1 = self.helper.create_room_as(user1_id, tok=user1_tok) + room_id2 = self.helper.create_room_as(user1_id, tok=user1_tok) + + self.helper.send(room_id1, "msg", tok=user1_tok) + + timeline_limit = 5 + conn_id = "conn_id" + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 0]], + "required_state": [], + "timeline_limit": timeline_limit, + } + }, + "conn_id": "conn_id", + } + + # The first room gets sent down the initial sync + response_body, initial_from_token = self.do_sync(sync_body, tok=user1_tok) + self.assertCountEqual( + response_body["rooms"].keys(), {room_id1}, response_body["rooms"] + ) + + # We now send down some events in room1 (depending on the test param). + expected_events = [] # The set of events in the timeline + if limited: + for _ in range(10): + resp = self.helper.send(room_id1, "msg1", tok=user1_tok) + expected_events.append(resp["event_id"]) + else: + resp = self.helper.send(room_id1, "msg1", tok=user1_tok) + expected_events.append(resp["event_id"]) + + # A second messages happens in the other room, so room1 won't get sent down. + self.helper.send(room_id2, "msg", tok=user1_tok) + + # Only the second room gets sent down sync. + response_body, from_token = self.do_sync( + sync_body, since=initial_from_token, tok=user1_tok + ) + + self.assertCountEqual( + response_body["rooms"].keys(), {room_id2}, response_body["rooms"] + ) + + # FIXME: This is a hack to record that the first room wasn't sent down + # sync, as we don't implement that currently. + sliding_sync_handler = self.hs.get_sliding_sync_handler() + requester = self.get_success( + self.hs.get_auth().get_user_by_access_token(user1_tok) + ) + sync_config = SlidingSyncConfig( + user=requester.user, + requester=requester, + conn_id=conn_id, + ) + + parsed_initial_from_token = self.get_success( + SlidingSyncStreamToken.from_string(self.store, initial_from_token) + ) + connection_position = self.get_success( + sliding_sync_handler.connection_store.record_rooms( + sync_config, + parsed_initial_from_token, + sent_room_ids=[], + unsent_room_ids=[room_id1], + ) + ) + + # FIXME: Now fix up `from_token` with new connect position above. + parsed_from_token = self.get_success( + SlidingSyncStreamToken.from_string(self.store, from_token) + ) + parsed_from_token = SlidingSyncStreamToken( + stream_token=parsed_from_token.stream_token, + connection_position=connection_position, + ) + from_token = self.get_success(parsed_from_token.to_string(self.store)) + + # We now send another event to room1, so we should sync all the missing events. + resp = self.helper.send(room_id1, "msg2", tok=user1_tok) + expected_events.append(resp["event_id"]) + + # This sync should contain the messages from room1 not yet sent down. + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + self.assertCountEqual( + response_body["rooms"].keys(), {room_id1}, response_body["rooms"] + ) + self.assertNotIn("initial", response_body["rooms"][room_id1]) + + self.assertEqual( + [ev["event_id"] for ev in response_body["rooms"][room_id1]["timeline"]], + expected_events[-timeline_limit:], + ) + self.assertEqual(response_body["rooms"][room_id1]["limited"], limited) + self.assertEqual(response_body["rooms"][room_id1].get("required_state"), None) + + def test_rooms_required_state_incremental_sync_PREVIOUSLY(self) -> None: + """ + Test getting room data where we have previously sent down the room, but + we missed sending down some state previously and so its status is + considered PREVIOUSLY. + """ + + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + room_id1 = self.helper.create_room_as(user1_id, tok=user1_tok) + room_id2 = self.helper.create_room_as(user1_id, tok=user1_tok) + + self.helper.send(room_id1, "msg", tok=user1_tok) + + conn_id = "conn_id" + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 0]], + "required_state": [ + [EventTypes.Create, ""], + [EventTypes.RoomHistoryVisibility, ""], + # This one doesn't exist in the room + [EventTypes.Name, ""], + ], + "timeline_limit": 0, + } + }, + "conn_id": "conn_id", + } + + # The first room gets sent down the initial sync + response_body, initial_from_token = self.do_sync(sync_body, tok=user1_tok) + self.assertCountEqual( + response_body["rooms"].keys(), {room_id1}, response_body["rooms"] + ) + + # We now send down some state in room1 + resp = self.helper.send_state( + room_id1, EventTypes.Name, {"name": "foo"}, tok=user1_tok + ) + name_change_id = resp["event_id"] + + # A second messages happens in the other room, so room1 won't get sent down. + self.helper.send(room_id2, "msg", tok=user1_tok) + + # Only the second room gets sent down sync. + response_body, from_token = self.do_sync( + sync_body, since=initial_from_token, tok=user1_tok + ) + + self.assertCountEqual( + response_body["rooms"].keys(), {room_id2}, response_body["rooms"] + ) + + # FIXME: This is a hack to record that the first room wasn't sent down + # sync, as we don't implement that currently. + sliding_sync_handler = self.hs.get_sliding_sync_handler() + requester = self.get_success( + self.hs.get_auth().get_user_by_access_token(user1_tok) + ) + sync_config = SlidingSyncConfig( + user=requester.user, + requester=requester, + conn_id=conn_id, + ) + + parsed_initial_from_token = self.get_success( + SlidingSyncStreamToken.from_string(self.store, initial_from_token) + ) + connection_position = self.get_success( + sliding_sync_handler.connection_store.record_rooms( + sync_config, + parsed_initial_from_token, + sent_room_ids=[], + unsent_room_ids=[room_id1], + ) + ) + + # FIXME: Now fix up `from_token` with new connect position above. + parsed_from_token = self.get_success( + SlidingSyncStreamToken.from_string(self.store, from_token) + ) + parsed_from_token = SlidingSyncStreamToken( + stream_token=parsed_from_token.stream_token, + connection_position=connection_position, + ) + from_token = self.get_success(parsed_from_token.to_string(self.store)) + + # We now send another event to room1, so we should sync all the missing state. + self.helper.send(room_id1, "msg", tok=user1_tok) + + # This sync should contain the state changes from room1. + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + self.assertCountEqual( + response_body["rooms"].keys(), {room_id1}, response_body["rooms"] + ) + self.assertNotIn("initial", response_body["rooms"][room_id1]) + + # We should only see the name change. + self.assertEqual( + [ + ev["event_id"] + for ev in response_body["rooms"][room_id1]["required_state"] + ], + [name_change_id], + ) + + def test_rooms_required_state_incremental_sync_NEVER(self) -> None: + """ + Test getting `required_state` where we have NEVER sent down the room before + """ + + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + room_id1 = self.helper.create_room_as(user1_id, tok=user1_tok) + room_id2 = self.helper.create_room_as(user1_id, tok=user1_tok) + + self.helper.send(room_id1, "msg", tok=user1_tok) + + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 0]], + "required_state": [ + [EventTypes.Create, ""], + [EventTypes.RoomHistoryVisibility, ""], + # This one doesn't exist in the room + [EventTypes.Name, ""], + ], + "timeline_limit": 1, + } + }, + } + + # A message happens in the other room, so room1 won't get sent down. + self.helper.send(room_id2, "msg", tok=user1_tok) + + # Only the second room gets sent down sync. + response_body, from_token = self.do_sync(sync_body, tok=user1_tok) + + self.assertCountEqual( + response_body["rooms"].keys(), {room_id2}, response_body["rooms"] + ) + + # We now send another event to room1, so we should send down the full + # room. + self.helper.send(room_id1, "msg2", tok=user1_tok) + + # This sync should contain the messages from room1 not yet sent down. + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + self.assertCountEqual( + response_body["rooms"].keys(), {room_id1}, response_body["rooms"] + ) + + self.assertEqual(response_body["rooms"][room_id1]["initial"], True) + + state_map = self.get_success( + self.storage_controllers.state.get_current_state(room_id1) + ) + + self._assertRequiredStateIncludes( + response_body["rooms"][room_id1]["required_state"], + { + state_map[(EventTypes.Create, "")], + state_map[(EventTypes.RoomHistoryVisibility, "")], + }, + exact=True, + ) + + def test_rooms_timeline_incremental_sync_NEVER(self) -> None: + """ + Test getting timeline room data where we have NEVER sent down the room + before + """ + + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + room_id1 = self.helper.create_room_as(user1_id, tok=user1_tok) + room_id2 = self.helper.create_room_as(user1_id, tok=user1_tok) + + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 0]], + "required_state": [], + "timeline_limit": 5, + } + }, + } + + expected_events = [] + for _ in range(4): + resp = self.helper.send(room_id1, "msg", tok=user1_tok) + expected_events.append(resp["event_id"]) + + # A message happens in the other room, so room1 won't get sent down. + self.helper.send(room_id2, "msg", tok=user1_tok) + + # Only the second room gets sent down sync. + response_body, from_token = self.do_sync(sync_body, tok=user1_tok) + + self.assertCountEqual( + response_body["rooms"].keys(), {room_id2}, response_body["rooms"] + ) + + # We now send another event to room1 so it comes down sync + resp = self.helper.send(room_id1, "msg2", tok=user1_tok) + expected_events.append(resp["event_id"]) + + # This sync should contain the messages from room1 not yet sent down. + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + self.assertCountEqual( + response_body["rooms"].keys(), {room_id1}, response_body["rooms"] + ) + + self.assertEqual( + [ev["event_id"] for ev in response_body["rooms"][room_id1]["timeline"]], + expected_events, + ) + self.assertEqual(response_body["rooms"][room_id1]["limited"], True) + self.assertEqual(response_body["rooms"][room_id1]["initial"], True) diff --git a/tests/rest/client/sliding_sync/test_extension_account_data.py b/tests/rest/client/sliding_sync/test_extension_account_data.py new file mode 100644 index 000000000000..3482a5f8878d --- /dev/null +++ b/tests/rest/client/sliding_sync/test_extension_account_data.py @@ -0,0 +1,495 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +import logging + +from twisted.test.proto_helpers import MemoryReactor + +import synapse.rest.admin +from synapse.api.constants import AccountDataTypes +from synapse.rest.client import login, room, sendtodevice, sync +from synapse.server import HomeServer +from synapse.types import StreamKeyType +from synapse.util import Clock + +from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase +from tests.server import TimedOutException + +logger = logging.getLogger(__name__) + + +class SlidingSyncAccountDataExtensionTestCase(SlidingSyncBase): + """Tests for the account_data sliding sync extension""" + + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + room.register_servlets, + sync.register_servlets, + sendtodevice.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + self.account_data_handler = hs.get_account_data_handler() + + def test_no_data_initial_sync(self) -> None: + """ + Test that enabling the account_data extension works during an intitial sync, + even if there is no-data. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + # Make an initial Sliding Sync request with the account_data extension enabled + sync_body = { + "lists": {}, + "extensions": { + "account_data": { + "enabled": True, + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + self.assertIncludes( + { + global_event["type"] + for global_event in response_body["extensions"]["account_data"].get( + "global" + ) + }, + # Even though we don't have any global account data set, Synapse saves some + # default push rules for us. + {AccountDataTypes.PUSH_RULES}, + exact=True, + ) + self.assertIncludes( + response_body["extensions"]["account_data"].get("rooms").keys(), + set(), + exact=True, + ) + + def test_no_data_incremental_sync(self) -> None: + """ + Test that enabling account_data extension works during an incremental sync, even + if there is no-data. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + sync_body = { + "lists": {}, + "extensions": { + "account_data": { + "enabled": True, + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Make an incremental Sliding Sync request with the account_data extension enabled + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + # There has been no account data changes since the `from_token` so we shouldn't + # see any account data here. + self.assertIncludes( + { + global_event["type"] + for global_event in response_body["extensions"]["account_data"].get( + "global" + ) + }, + set(), + exact=True, + ) + self.assertIncludes( + response_body["extensions"]["account_data"].get("rooms").keys(), + set(), + exact=True, + ) + + def test_global_account_data_initial_sync(self) -> None: + """ + On initial sync, we should return all global account data on initial sync. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + # Update the global account data + self.get_success( + self.account_data_handler.add_account_data_for_user( + user_id=user1_id, + account_data_type="org.matrix.foobarbaz", + content={"foo": "bar"}, + ) + ) + + # Make an initial Sliding Sync request with the account_data extension enabled + sync_body = { + "lists": {}, + "extensions": { + "account_data": { + "enabled": True, + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # It should show us all of the global account data + self.assertIncludes( + { + global_event["type"] + for global_event in response_body["extensions"]["account_data"].get( + "global" + ) + }, + {AccountDataTypes.PUSH_RULES, "org.matrix.foobarbaz"}, + exact=True, + ) + self.assertIncludes( + response_body["extensions"]["account_data"].get("rooms").keys(), + set(), + exact=True, + ) + + def test_global_account_data_incremental_sync(self) -> None: + """ + On incremental sync, we should only account data that has changed since the + `from_token`. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + # Add some global account data + self.get_success( + self.account_data_handler.add_account_data_for_user( + user_id=user1_id, + account_data_type="org.matrix.foobarbaz", + content={"foo": "bar"}, + ) + ) + + sync_body = { + "lists": {}, + "extensions": { + "account_data": { + "enabled": True, + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Add some other global account data + self.get_success( + self.account_data_handler.add_account_data_for_user( + user_id=user1_id, + account_data_type="org.matrix.doodardaz", + content={"doo": "dar"}, + ) + ) + + # Make an incremental Sliding Sync request with the account_data extension enabled + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + self.assertIncludes( + { + global_event["type"] + for global_event in response_body["extensions"]["account_data"].get( + "global" + ) + }, + # We should only see the new global account data that happened after the `from_token` + {"org.matrix.doodardaz"}, + exact=True, + ) + self.assertIncludes( + response_body["extensions"]["account_data"].get("rooms").keys(), + set(), + exact=True, + ) + + def test_room_account_data_initial_sync(self) -> None: + """ + On initial sync, we return all account data for a given room but only for + rooms that we request and are being returned in the Sliding Sync response. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + # Create a room and add some room account data + room_id1 = self.helper.create_room_as(user1_id, tok=user1_tok) + self.get_success( + self.account_data_handler.add_account_data_to_room( + user_id=user1_id, + room_id=room_id1, + account_data_type="org.matrix.roorarraz", + content={"roo": "rar"}, + ) + ) + + # Create another room with some room account data + room_id2 = self.helper.create_room_as(user1_id, tok=user1_tok) + self.get_success( + self.account_data_handler.add_account_data_to_room( + user_id=user1_id, + room_id=room_id2, + account_data_type="org.matrix.roorarraz", + content={"roo": "rar"}, + ) + ) + + # Make an initial Sliding Sync request with the account_data extension enabled + sync_body = { + "lists": {}, + "room_subscriptions": { + room_id1: { + "required_state": [], + "timeline_limit": 0, + } + }, + "extensions": { + "account_data": { + "enabled": True, + "rooms": [room_id1, room_id2], + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + self.assertIsNotNone(response_body["extensions"]["account_data"].get("global")) + # Even though we requested room2, we only expect room1 to show up because that's + # the only room in the Sliding Sync response (room2 is not one of our room + # subscriptions or in a sliding window list). + self.assertIncludes( + response_body["extensions"]["account_data"].get("rooms").keys(), + {room_id1}, + exact=True, + ) + self.assertIncludes( + { + event["type"] + for event in response_body["extensions"]["account_data"] + .get("rooms") + .get(room_id1) + }, + {"org.matrix.roorarraz"}, + exact=True, + ) + + def test_room_account_data_incremental_sync(self) -> None: + """ + On incremental sync, we return all account data for a given room but only for + rooms that we request and are being returned in the Sliding Sync response. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + # Create a room and add some room account data + room_id1 = self.helper.create_room_as(user1_id, tok=user1_tok) + self.get_success( + self.account_data_handler.add_account_data_to_room( + user_id=user1_id, + room_id=room_id1, + account_data_type="org.matrix.roorarraz", + content={"roo": "rar"}, + ) + ) + + # Create another room with some room account data + room_id2 = self.helper.create_room_as(user1_id, tok=user1_tok) + self.get_success( + self.account_data_handler.add_account_data_to_room( + user_id=user1_id, + room_id=room_id2, + account_data_type="org.matrix.roorarraz", + content={"roo": "rar"}, + ) + ) + + sync_body = { + "lists": {}, + "room_subscriptions": { + room_id1: { + "required_state": [], + "timeline_limit": 0, + } + }, + "extensions": { + "account_data": { + "enabled": True, + "rooms": [room_id1, room_id2], + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Add some other room account data + self.get_success( + self.account_data_handler.add_account_data_to_room( + user_id=user1_id, + room_id=room_id1, + account_data_type="org.matrix.roorarraz2", + content={"roo": "rar"}, + ) + ) + self.get_success( + self.account_data_handler.add_account_data_to_room( + user_id=user1_id, + room_id=room_id2, + account_data_type="org.matrix.roorarraz2", + content={"roo": "rar"}, + ) + ) + + # Make an incremental Sliding Sync request with the account_data extension enabled + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + self.assertIsNotNone(response_body["extensions"]["account_data"].get("global")) + # Even though we requested room2, we only expect room1 to show up because that's + # the only room in the Sliding Sync response (room2 is not one of our room + # subscriptions or in a sliding window list). + self.assertIncludes( + response_body["extensions"]["account_data"].get("rooms").keys(), + {room_id1}, + exact=True, + ) + # We should only see the new room account data that happened after the `from_token` + self.assertIncludes( + { + event["type"] + for event in response_body["extensions"]["account_data"] + .get("rooms") + .get(room_id1) + }, + {"org.matrix.roorarraz2"}, + exact=True, + ) + + def test_wait_for_new_data(self) -> None: + """ + Test to make sure that the Sliding Sync request waits for new data to arrive. + + (Only applies to incremental syncs with a `timeout` specified) + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id, user1_id, tok=user1_tok) + + sync_body = { + "lists": {}, + "extensions": { + "account_data": { + "enabled": True, + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Make an incremental Sliding Sync request with the account_data extension enabled + channel = self.make_request( + "POST", + self.sync_endpoint + f"?timeout=10000&pos={from_token}", + content=sync_body, + access_token=user1_tok, + await_result=False, + ) + # Block for 5 seconds to make sure we are `notifier.wait_for_events(...)` + with self.assertRaises(TimedOutException): + channel.await_result(timeout_ms=5000) + # Bump the global account data to trigger new results + self.get_success( + self.account_data_handler.add_account_data_for_user( + user1_id, + "org.matrix.foobarbaz", + {"foo": "bar"}, + ) + ) + # Should respond before the 10 second timeout + channel.await_result(timeout_ms=3000) + self.assertEqual(channel.code, 200, channel.json_body) + + # We should see the global account data update + self.assertIncludes( + { + global_event["type"] + for global_event in channel.json_body["extensions"]["account_data"].get( + "global" + ) + }, + {"org.matrix.foobarbaz"}, + exact=True, + ) + self.assertIncludes( + channel.json_body["extensions"]["account_data"].get("rooms").keys(), + set(), + exact=True, + ) + + def test_wait_for_new_data_timeout(self) -> None: + """ + Test to make sure that the Sliding Sync request waits for new data to arrive but + no data ever arrives so we timeout. We're also making sure that the default data + from the account_data extension doesn't trigger a false-positive for new data. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + sync_body = { + "lists": {}, + "extensions": { + "account_data": { + "enabled": True, + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Make the Sliding Sync request + channel = self.make_request( + "POST", + self.sync_endpoint + f"?timeout=10000&pos={from_token}", + content=sync_body, + access_token=user1_tok, + await_result=False, + ) + # Block for 5 seconds to make sure we are `notifier.wait_for_events(...)` + with self.assertRaises(TimedOutException): + channel.await_result(timeout_ms=5000) + # Wake-up `notifier.wait_for_events(...)` that will cause us test + # `SlidingSyncResult.__bool__` for new results. + self._bump_notifier_wait_for_events( + user1_id, + # We choose `StreamKeyType.PRESENCE` because we're testing for account data + # and don't want to contaminate the account data results using + # `StreamKeyType.ACCOUNT_DATA`. + wake_stream_key=StreamKeyType.PRESENCE, + ) + # Block for a little bit more to ensure we don't see any new results. + with self.assertRaises(TimedOutException): + channel.await_result(timeout_ms=4000) + # Wait for the sync to complete (wait for the rest of the 10 second timeout, + # 5000 + 4000 + 1200 > 10000) + channel.await_result(timeout_ms=1200) + self.assertEqual(channel.code, 200, channel.json_body) + + self.assertIsNotNone( + channel.json_body["extensions"]["account_data"].get("global") + ) + self.assertIsNotNone( + channel.json_body["extensions"]["account_data"].get("rooms") + ) diff --git a/tests/rest/client/sliding_sync/test_extension_e2ee.py b/tests/rest/client/sliding_sync/test_extension_e2ee.py new file mode 100644 index 000000000000..320f8c788f40 --- /dev/null +++ b/tests/rest/client/sliding_sync/test_extension_e2ee.py @@ -0,0 +1,441 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +import logging + +from twisted.test.proto_helpers import MemoryReactor + +import synapse.rest.admin +from synapse.rest.client import devices, login, room, sync +from synapse.server import HomeServer +from synapse.types import JsonDict, StreamKeyType +from synapse.util import Clock + +from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase +from tests.server import TimedOutException + +logger = logging.getLogger(__name__) + + +class SlidingSyncE2eeExtensionTestCase(SlidingSyncBase): + """Tests for the e2ee sliding sync extension""" + + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + room.register_servlets, + sync.register_servlets, + devices.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + self.e2e_keys_handler = hs.get_e2e_keys_handler() + + def test_no_data_initial_sync(self) -> None: + """ + Test that enabling e2ee extension works during an intitial sync, even if there + is no-data + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + # Make an initial Sliding Sync request with the e2ee extension enabled + sync_body = { + "lists": {}, + "extensions": { + "e2ee": { + "enabled": True, + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Device list updates are only present for incremental syncs + self.assertIsNone(response_body["extensions"]["e2ee"].get("device_lists")) + + # Both of these should be present even when empty + self.assertEqual( + response_body["extensions"]["e2ee"]["device_one_time_keys_count"], + { + # This is always present because of + # https://github.com/element-hq/element-android/issues/3725 and + # https://github.com/matrix-org/synapse/issues/10456 + "signed_curve25519": 0 + }, + ) + self.assertEqual( + response_body["extensions"]["e2ee"]["device_unused_fallback_key_types"], + [], + ) + + def test_no_data_incremental_sync(self) -> None: + """ + Test that enabling e2ee extension works during an incremental sync, even if + there is no-data + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + sync_body = { + "lists": {}, + "extensions": { + "e2ee": { + "enabled": True, + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Make an incremental Sliding Sync request with the e2ee extension enabled + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + # Device list shows up for incremental syncs + self.assertEqual( + response_body["extensions"]["e2ee"].get("device_lists", {}).get("changed"), + [], + ) + self.assertEqual( + response_body["extensions"]["e2ee"].get("device_lists", {}).get("left"), + [], + ) + + # Both of these should be present even when empty + self.assertEqual( + response_body["extensions"]["e2ee"]["device_one_time_keys_count"], + { + # Note that "signed_curve25519" is always returned in key count responses + # regardless of whether we uploaded any keys for it. This is necessary until + # https://github.com/matrix-org/matrix-doc/issues/3298 is fixed. + # + # Also related: + # https://github.com/element-hq/element-android/issues/3725 and + # https://github.com/matrix-org/synapse/issues/10456 + "signed_curve25519": 0 + }, + ) + self.assertEqual( + response_body["extensions"]["e2ee"]["device_unused_fallback_key_types"], + [], + ) + + def test_wait_for_new_data(self) -> None: + """ + Test to make sure that the Sliding Sync request waits for new data to arrive. + + (Only applies to incremental syncs with a `timeout` specified) + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + test_device_id = "TESTDEVICE" + user3_id = self.register_user("user3", "pass") + user3_tok = self.login(user3_id, "pass", device_id=test_device_id) + + room_id = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id, user1_id, tok=user1_tok) + self.helper.join(room_id, user3_id, tok=user3_tok) + + sync_body = { + "lists": {}, + "extensions": { + "e2ee": { + "enabled": True, + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Make the Sliding Sync request + channel = self.make_request( + "POST", + self.sync_endpoint + "?timeout=10000" + f"&pos={from_token}", + content=sync_body, + access_token=user1_tok, + await_result=False, + ) + # Block for 5 seconds to make sure we are `notifier.wait_for_events(...)` + with self.assertRaises(TimedOutException): + channel.await_result(timeout_ms=5000) + # Bump the device lists to trigger new results + # Have user3 update their device list + device_update_channel = self.make_request( + "PUT", + f"/devices/{test_device_id}", + { + "display_name": "New Device Name", + }, + access_token=user3_tok, + ) + self.assertEqual( + device_update_channel.code, 200, device_update_channel.json_body + ) + # Should respond before the 10 second timeout + channel.await_result(timeout_ms=3000) + self.assertEqual(channel.code, 200, channel.json_body) + + # We should see the device list update + self.assertEqual( + channel.json_body["extensions"]["e2ee"] + .get("device_lists", {}) + .get("changed"), + [user3_id], + ) + self.assertEqual( + channel.json_body["extensions"]["e2ee"].get("device_lists", {}).get("left"), + [], + ) + + def test_wait_for_new_data_timeout(self) -> None: + """ + Test to make sure that the Sliding Sync request waits for new data to arrive but + no data ever arrives so we timeout. We're also making sure that the default data + from the E2EE extension doesn't trigger a false-positive for new data (see + `device_one_time_keys_count.signed_curve25519`). + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + sync_body = { + "lists": {}, + "extensions": { + "e2ee": { + "enabled": True, + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Make the Sliding Sync request + channel = self.make_request( + "POST", + self.sync_endpoint + f"?timeout=10000&pos={from_token}", + content=sync_body, + access_token=user1_tok, + await_result=False, + ) + # Block for 5 seconds to make sure we are `notifier.wait_for_events(...)` + with self.assertRaises(TimedOutException): + channel.await_result(timeout_ms=5000) + # Wake-up `notifier.wait_for_events(...)` that will cause us test + # `SlidingSyncResult.__bool__` for new results. + self._bump_notifier_wait_for_events( + user1_id, wake_stream_key=StreamKeyType.ACCOUNT_DATA + ) + # Block for a little bit more to ensure we don't see any new results. + with self.assertRaises(TimedOutException): + channel.await_result(timeout_ms=4000) + # Wait for the sync to complete (wait for the rest of the 10 second timeout, + # 5000 + 4000 + 1200 > 10000) + channel.await_result(timeout_ms=1200) + self.assertEqual(channel.code, 200, channel.json_body) + + # Device lists are present for incremental syncs but empty because no device changes + self.assertEqual( + channel.json_body["extensions"]["e2ee"] + .get("device_lists", {}) + .get("changed"), + [], + ) + self.assertEqual( + channel.json_body["extensions"]["e2ee"].get("device_lists", {}).get("left"), + [], + ) + + # Both of these should be present even when empty + self.assertEqual( + channel.json_body["extensions"]["e2ee"]["device_one_time_keys_count"], + { + # Note that "signed_curve25519" is always returned in key count responses + # regardless of whether we uploaded any keys for it. This is necessary until + # https://github.com/matrix-org/matrix-doc/issues/3298 is fixed. + # + # Also related: + # https://github.com/element-hq/element-android/issues/3725 and + # https://github.com/matrix-org/synapse/issues/10456 + "signed_curve25519": 0 + }, + ) + self.assertEqual( + channel.json_body["extensions"]["e2ee"]["device_unused_fallback_key_types"], + [], + ) + + def test_device_lists(self) -> None: + """ + Test that device list updates are included in the response + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + test_device_id = "TESTDEVICE" + user3_id = self.register_user("user3", "pass") + user3_tok = self.login(user3_id, "pass", device_id=test_device_id) + + user4_id = self.register_user("user4", "pass") + user4_tok = self.login(user4_id, "pass") + + room_id = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id, user1_id, tok=user1_tok) + self.helper.join(room_id, user3_id, tok=user3_tok) + self.helper.join(room_id, user4_id, tok=user4_tok) + + sync_body = { + "lists": {}, + "extensions": { + "e2ee": { + "enabled": True, + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Have user3 update their device list + channel = self.make_request( + "PUT", + f"/devices/{test_device_id}", + { + "display_name": "New Device Name", + }, + access_token=user3_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + + # User4 leaves the room + self.helper.leave(room_id, user4_id, tok=user4_tok) + + # Make an incremental Sliding Sync request with the e2ee extension enabled + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + # Device list updates show up + self.assertEqual( + response_body["extensions"]["e2ee"].get("device_lists", {}).get("changed"), + [user3_id], + ) + self.assertEqual( + response_body["extensions"]["e2ee"].get("device_lists", {}).get("left"), + [user4_id], + ) + + def test_device_one_time_keys_count(self) -> None: + """ + Test that `device_one_time_keys_count` are included in the response + """ + test_device_id = "TESTDEVICE" + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass", device_id=test_device_id) + + # Upload one time keys for the user/device + keys: JsonDict = { + "alg1:k1": "key1", + "alg2:k2": {"key": "key2", "signatures": {"k1": "sig1"}}, + "alg2:k3": {"key": "key3"}, + } + upload_keys_response = self.get_success( + self.e2e_keys_handler.upload_keys_for_user( + user1_id, test_device_id, {"one_time_keys": keys} + ) + ) + self.assertDictEqual( + upload_keys_response, + { + "one_time_key_counts": { + "alg1": 1, + "alg2": 2, + # Note that "signed_curve25519" is always returned in key count responses + # regardless of whether we uploaded any keys for it. This is necessary until + # https://github.com/matrix-org/matrix-doc/issues/3298 is fixed. + # + # Also related: + # https://github.com/element-hq/element-android/issues/3725 and + # https://github.com/matrix-org/synapse/issues/10456 + "signed_curve25519": 0, + } + }, + ) + + # Make a Sliding Sync request with the e2ee extension enabled + sync_body = { + "lists": {}, + "extensions": { + "e2ee": { + "enabled": True, + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Check for those one time key counts + self.assertEqual( + response_body["extensions"]["e2ee"].get("device_one_time_keys_count"), + { + "alg1": 1, + "alg2": 2, + # Note that "signed_curve25519" is always returned in key count responses + # regardless of whether we uploaded any keys for it. This is necessary until + # https://github.com/matrix-org/matrix-doc/issues/3298 is fixed. + # + # Also related: + # https://github.com/element-hq/element-android/issues/3725 and + # https://github.com/matrix-org/synapse/issues/10456 + "signed_curve25519": 0, + }, + ) + + def test_device_unused_fallback_key_types(self) -> None: + """ + Test that `device_unused_fallback_key_types` are included in the response + """ + test_device_id = "TESTDEVICE" + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass", device_id=test_device_id) + + # We shouldn't have any unused fallback keys yet + res = self.get_success( + self.store.get_e2e_unused_fallback_key_types(user1_id, test_device_id) + ) + self.assertEqual(res, []) + + # Upload a fallback key for the user/device + self.get_success( + self.e2e_keys_handler.upload_keys_for_user( + user1_id, + test_device_id, + {"fallback_keys": {"alg1:k1": "fallback_key1"}}, + ) + ) + # We should now have an unused alg1 key + fallback_res = self.get_success( + self.store.get_e2e_unused_fallback_key_types(user1_id, test_device_id) + ) + self.assertEqual(fallback_res, ["alg1"], fallback_res) + + # Make a Sliding Sync request with the e2ee extension enabled + sync_body = { + "lists": {}, + "extensions": { + "e2ee": { + "enabled": True, + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Check for the unused fallback key types + self.assertListEqual( + response_body["extensions"]["e2ee"].get("device_unused_fallback_key_types"), + ["alg1"], + ) diff --git a/tests/rest/client/sliding_sync/test_extension_receipts.py b/tests/rest/client/sliding_sync/test_extension_receipts.py new file mode 100644 index 000000000000..65fbac260ef4 --- /dev/null +++ b/tests/rest/client/sliding_sync/test_extension_receipts.py @@ -0,0 +1,679 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +import logging + +from twisted.test.proto_helpers import MemoryReactor + +import synapse.rest.admin +from synapse.api.constants import EduTypes, ReceiptTypes +from synapse.rest.client import login, receipts, room, sync +from synapse.server import HomeServer +from synapse.types import StreamKeyType +from synapse.util import Clock + +from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase +from tests.server import TimedOutException + +logger = logging.getLogger(__name__) + + +class SlidingSyncReceiptsExtensionTestCase(SlidingSyncBase): + """Tests for the receipts sliding sync extension""" + + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + room.register_servlets, + sync.register_servlets, + receipts.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + + def test_no_data_initial_sync(self) -> None: + """ + Test that enabling the receipts extension works during an intitial sync, + even if there is no-data. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + # Make an initial Sliding Sync request with the receipts extension enabled + sync_body = { + "lists": {}, + "extensions": { + "receipts": { + "enabled": True, + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + self.assertIncludes( + response_body["extensions"]["receipts"].get("rooms").keys(), + set(), + exact=True, + ) + + def test_no_data_incremental_sync(self) -> None: + """ + Test that enabling receipts extension works during an incremental sync, even + if there is no-data. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + sync_body = { + "lists": {}, + "extensions": { + "receipts": { + "enabled": True, + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Make an incremental Sliding Sync request with the receipts extension enabled + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + self.assertIncludes( + response_body["extensions"]["receipts"].get("rooms").keys(), + set(), + exact=True, + ) + + def test_receipts_initial_sync_with_timeline(self) -> None: + """ + On initial sync, we only return receipts for events in a given room's timeline. + + We also make sure that we only return receipts for rooms that we request and are + already being returned in the Sliding Sync response. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + user3_id = self.register_user("user3", "pass") + user3_tok = self.login(user3_id, "pass") + user4_id = self.register_user("user4", "pass") + user4_tok = self.login(user4_id, "pass") + + # Create a room + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + self.helper.join(room_id1, user3_id, tok=user3_tok) + self.helper.join(room_id1, user4_id, tok=user4_tok) + room1_event_response1 = self.helper.send( + room_id1, body="new event1", tok=user2_tok + ) + room1_event_response2 = self.helper.send( + room_id1, body="new event2", tok=user2_tok + ) + # User1 reads the last event + channel = self.make_request( + "POST", + f"/rooms/{room_id1}/receipt/{ReceiptTypes.READ}/{room1_event_response2['event_id']}", + {}, + access_token=user1_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + # User2 reads the last event + channel = self.make_request( + "POST", + f"/rooms/{room_id1}/receipt/{ReceiptTypes.READ}/{room1_event_response2['event_id']}", + {}, + access_token=user2_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + # User3 reads the first event + channel = self.make_request( + "POST", + f"/rooms/{room_id1}/receipt/{ReceiptTypes.READ}/{room1_event_response1['event_id']}", + {}, + access_token=user3_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + # User4 privately reads the last event (make sure this doesn't leak to the other users) + channel = self.make_request( + "POST", + f"/rooms/{room_id1}/receipt/{ReceiptTypes.READ_PRIVATE}/{room1_event_response2['event_id']}", + {}, + access_token=user4_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + + # Create another room + room_id2 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id2, user1_id, tok=user1_tok) + self.helper.join(room_id2, user3_id, tok=user3_tok) + self.helper.join(room_id2, user4_id, tok=user4_tok) + room2_event_response1 = self.helper.send( + room_id2, body="new event2", tok=user2_tok + ) + # User1 reads the last event + channel = self.make_request( + "POST", + f"/rooms/{room_id2}/receipt/{ReceiptTypes.READ}/{room2_event_response1['event_id']}", + {}, + access_token=user1_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + # User2 reads the last event + channel = self.make_request( + "POST", + f"/rooms/{room_id2}/receipt/{ReceiptTypes.READ}/{room2_event_response1['event_id']}", + {}, + access_token=user2_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + # User4 privately reads the last event (make sure this doesn't leak to the other users) + channel = self.make_request( + "POST", + f"/rooms/{room_id2}/receipt/{ReceiptTypes.READ_PRIVATE}/{room2_event_response1['event_id']}", + {}, + access_token=user4_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + + # Make an initial Sliding Sync request with the receipts extension enabled + sync_body = { + "lists": {}, + "room_subscriptions": { + room_id1: { + "required_state": [], + # On initial sync, we only have receipts for events in the timeline + "timeline_limit": 1, + } + }, + "extensions": { + "receipts": { + "enabled": True, + "rooms": [room_id1, room_id2], + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Only the latest event in the room is in the timelie because the `timeline_limit` is 1 + self.assertIncludes( + { + event["event_id"] + for event in response_body["rooms"][room_id1].get("timeline", []) + }, + {room1_event_response2["event_id"]}, + exact=True, + message=str(response_body["rooms"][room_id1]), + ) + + # Even though we requested room2, we only expect room1 to show up because that's + # the only room in the Sliding Sync response (room2 is not one of our room + # subscriptions or in a sliding window list). + self.assertIncludes( + response_body["extensions"]["receipts"].get("rooms").keys(), + {room_id1}, + exact=True, + ) + # Sanity check that it's the correct ephemeral event type + self.assertEqual( + response_body["extensions"]["receipts"]["rooms"][room_id1]["type"], + EduTypes.RECEIPT, + ) + # We can see user1 and user2 read receipts + self.assertIncludes( + response_body["extensions"]["receipts"]["rooms"][room_id1]["content"][ + room1_event_response2["event_id"] + ][ReceiptTypes.READ].keys(), + {user1_id, user2_id}, + exact=True, + ) + # User1 did not have a private read receipt and we shouldn't leak others' + # private read receipts + self.assertIncludes( + response_body["extensions"]["receipts"]["rooms"][room_id1]["content"][ + room1_event_response2["event_id"] + ] + .get(ReceiptTypes.READ_PRIVATE, {}) + .keys(), + set(), + exact=True, + ) + + # We shouldn't see receipts for event2 since it wasn't in the timeline and this is an initial sync + self.assertIsNone( + response_body["extensions"]["receipts"]["rooms"][room_id1]["content"].get( + room1_event_response1["event_id"] + ) + ) + + def test_receipts_incremental_sync(self) -> None: + """ + On incremental sync, we return all receipts in the token range for a given room + but only for rooms that we request and are being returned in the Sliding Sync + response. + """ + + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + user3_id = self.register_user("user3", "pass") + user3_tok = self.login(user3_id, "pass") + + # Create room1 + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + self.helper.join(room_id1, user3_id, tok=user3_tok) + room1_event_response1 = self.helper.send( + room_id1, body="new event2", tok=user2_tok + ) + # User2 reads the last event (before the `from_token`) + channel = self.make_request( + "POST", + f"/rooms/{room_id1}/receipt/{ReceiptTypes.READ}/{room1_event_response1['event_id']}", + {}, + access_token=user2_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + + # Create room2 + room_id2 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id2, user1_id, tok=user1_tok) + room2_event_response1 = self.helper.send( + room_id2, body="new event2", tok=user2_tok + ) + # User1 reads the last event (before the `from_token`) + channel = self.make_request( + "POST", + f"/rooms/{room_id2}/receipt/{ReceiptTypes.READ}/{room2_event_response1['event_id']}", + {}, + access_token=user1_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + + # Create room3 + room_id3 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id3, user1_id, tok=user1_tok) + self.helper.join(room_id3, user3_id, tok=user3_tok) + room3_event_response1 = self.helper.send( + room_id3, body="new event", tok=user2_tok + ) + + # Create room4 + room_id4 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id4, user1_id, tok=user1_tok) + self.helper.join(room_id4, user3_id, tok=user3_tok) + event_response4 = self.helper.send(room_id4, body="new event", tok=user2_tok) + # User1 reads the last event (before the `from_token`) + channel = self.make_request( + "POST", + f"/rooms/{room_id4}/receipt/{ReceiptTypes.READ}/{event_response4['event_id']}", + {}, + access_token=user1_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + + sync_body = { + "lists": {}, + "room_subscriptions": { + room_id1: { + "required_state": [], + "timeline_limit": 0, + }, + room_id3: { + "required_state": [], + "timeline_limit": 0, + }, + room_id4: { + "required_state": [], + "timeline_limit": 0, + }, + }, + "extensions": { + "receipts": { + "enabled": True, + "rooms": [room_id1, room_id2, room_id3, room_id4], + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Add some more read receipts after the `from_token` + # + # User1 reads room1 + channel = self.make_request( + "POST", + f"/rooms/{room_id1}/receipt/{ReceiptTypes.READ}/{room1_event_response1['event_id']}", + {}, + access_token=user1_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + # User1 privately reads room2 + channel = self.make_request( + "POST", + f"/rooms/{room_id2}/receipt/{ReceiptTypes.READ_PRIVATE}/{room2_event_response1['event_id']}", + {}, + access_token=user1_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + # User3 reads room3 + channel = self.make_request( + "POST", + f"/rooms/{room_id3}/receipt/{ReceiptTypes.READ}/{room3_event_response1['event_id']}", + {}, + access_token=user3_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + # No activity for room4 after the `from_token` + + # Make an incremental Sliding Sync request with the receipts extension enabled + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + # Even though we requested room2, we only expect rooms to show up if they are + # already in the Sliding Sync response. room4 doesn't show up because there is + # no activity after the `from_token`. + self.assertIncludes( + response_body["extensions"]["receipts"].get("rooms").keys(), + {room_id1, room_id3}, + exact=True, + ) + + # Check room1: + # + # Sanity check that it's the correct ephemeral event type + self.assertEqual( + response_body["extensions"]["receipts"]["rooms"][room_id1]["type"], + EduTypes.RECEIPT, + ) + # We only see that user1 has read something in room1 since the `from_token` + self.assertIncludes( + response_body["extensions"]["receipts"]["rooms"][room_id1]["content"][ + room1_event_response1["event_id"] + ][ReceiptTypes.READ].keys(), + {user1_id}, + exact=True, + ) + # User1 did not send a private read receipt in this room and we shouldn't leak + # others' private read receipts + self.assertIncludes( + response_body["extensions"]["receipts"]["rooms"][room_id1]["content"][ + room1_event_response1["event_id"] + ] + .get(ReceiptTypes.READ_PRIVATE, {}) + .keys(), + set(), + exact=True, + ) + # No events in the timeline since they were sent before the `from_token` + self.assertNotIn(room_id1, response_body["rooms"]) + + # Check room3: + # + # Sanity check that it's the correct ephemeral event type + self.assertEqual( + response_body["extensions"]["receipts"]["rooms"][room_id3]["type"], + EduTypes.RECEIPT, + ) + # We only see that user3 has read something in room1 since the `from_token` + self.assertIncludes( + response_body["extensions"]["receipts"]["rooms"][room_id3]["content"][ + room3_event_response1["event_id"] + ][ReceiptTypes.READ].keys(), + {user3_id}, + exact=True, + ) + # User1 did not send a private read receipt in this room and we shouldn't leak + # others' private read receipts + self.assertIncludes( + response_body["extensions"]["receipts"]["rooms"][room_id3]["content"][ + room3_event_response1["event_id"] + ] + .get(ReceiptTypes.READ_PRIVATE, {}) + .keys(), + set(), + exact=True, + ) + # No events in the timeline since they were sent before the `from_token` + self.assertNotIn(room_id3, response_body["rooms"]) + + def test_receipts_incremental_sync_all_live_receipts(self) -> None: + """ + On incremental sync, we return all receipts in the token range for a given room + even if they are not in the timeline. + """ + + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + # Create room1 + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + + sync_body = { + "lists": {}, + "room_subscriptions": { + room_id1: { + "required_state": [], + # The timeline will only include event2 + "timeline_limit": 1, + }, + }, + "extensions": { + "receipts": { + "enabled": True, + "rooms": [room_id1], + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + room1_event_response1 = self.helper.send( + room_id1, body="new event1", tok=user2_tok + ) + room1_event_response2 = self.helper.send( + room_id1, body="new event2", tok=user2_tok + ) + + # User1 reads event1 + channel = self.make_request( + "POST", + f"/rooms/{room_id1}/receipt/{ReceiptTypes.READ}/{room1_event_response1['event_id']}", + {}, + access_token=user1_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + # User2 reads event2 + channel = self.make_request( + "POST", + f"/rooms/{room_id1}/receipt/{ReceiptTypes.READ}/{room1_event_response2['event_id']}", + {}, + access_token=user2_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + + # Make an incremental Sliding Sync request with the receipts extension enabled + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + # We should see room1 because it has receipts in the token range + self.assertIncludes( + response_body["extensions"]["receipts"].get("rooms").keys(), + {room_id1}, + exact=True, + ) + # Sanity check that it's the correct ephemeral event type + self.assertEqual( + response_body["extensions"]["receipts"]["rooms"][room_id1]["type"], + EduTypes.RECEIPT, + ) + # We should see all receipts in the token range regardless of whether the events + # are in the timeline + self.assertIncludes( + response_body["extensions"]["receipts"]["rooms"][room_id1]["content"][ + room1_event_response1["event_id"] + ][ReceiptTypes.READ].keys(), + {user1_id}, + exact=True, + ) + self.assertIncludes( + response_body["extensions"]["receipts"]["rooms"][room_id1]["content"][ + room1_event_response2["event_id"] + ][ReceiptTypes.READ].keys(), + {user2_id}, + exact=True, + ) + # Only the latest event in the timeline because the `timeline_limit` is 1 + self.assertIncludes( + { + event["event_id"] + for event in response_body["rooms"][room_id1].get("timeline", []) + }, + {room1_event_response2["event_id"]}, + exact=True, + message=str(response_body["rooms"][room_id1]), + ) + + def test_wait_for_new_data(self) -> None: + """ + Test to make sure that the Sliding Sync request waits for new data to arrive. + + (Only applies to incremental syncs with a `timeout` specified) + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id, user1_id, tok=user1_tok) + event_response = self.helper.send(room_id, body="new event", tok=user2_tok) + + sync_body = { + "lists": {}, + "room_subscriptions": { + room_id: { + "required_state": [], + "timeline_limit": 0, + }, + }, + "extensions": { + "receipts": { + "enabled": True, + "rooms": [room_id], + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Make an incremental Sliding Sync request with the receipts extension enabled + channel = self.make_request( + "POST", + self.sync_endpoint + f"?timeout=10000&pos={from_token}", + content=sync_body, + access_token=user1_tok, + await_result=False, + ) + # Block for 5 seconds to make sure we are `notifier.wait_for_events(...)` + with self.assertRaises(TimedOutException): + channel.await_result(timeout_ms=5000) + # Bump the receipts to trigger new results + receipt_channel = self.make_request( + "POST", + f"/rooms/{room_id}/receipt/{ReceiptTypes.READ}/{event_response['event_id']}", + {}, + access_token=user2_tok, + ) + self.assertEqual(receipt_channel.code, 200, receipt_channel.json_body) + # Should respond before the 10 second timeout + channel.await_result(timeout_ms=3000) + self.assertEqual(channel.code, 200, channel.json_body) + + # We should see the new receipt + self.assertIncludes( + channel.json_body.get("extensions", {}) + .get("receipts", {}) + .get("rooms", {}) + .keys(), + {room_id}, + exact=True, + message=str(channel.json_body), + ) + self.assertIncludes( + channel.json_body["extensions"]["receipts"]["rooms"][room_id]["content"][ + event_response["event_id"] + ][ReceiptTypes.READ].keys(), + {user2_id}, + exact=True, + ) + # User1 did not send a private read receipt in this room and we shouldn't leak + # others' private read receipts + self.assertIncludes( + channel.json_body["extensions"]["receipts"]["rooms"][room_id]["content"][ + event_response["event_id"] + ] + .get(ReceiptTypes.READ_PRIVATE, {}) + .keys(), + set(), + exact=True, + ) + + def test_wait_for_new_data_timeout(self) -> None: + """ + Test to make sure that the Sliding Sync request waits for new data to arrive but + no data ever arrives so we timeout. We're also making sure that the default data + from the receipts extension doesn't trigger a false-positive for new data. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + sync_body = { + "lists": {}, + "extensions": { + "receipts": { + "enabled": True, + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Make the Sliding Sync request + channel = self.make_request( + "POST", + self.sync_endpoint + f"?timeout=10000&pos={from_token}", + content=sync_body, + access_token=user1_tok, + await_result=False, + ) + # Block for 5 seconds to make sure we are `notifier.wait_for_events(...)` + with self.assertRaises(TimedOutException): + channel.await_result(timeout_ms=5000) + # Wake-up `notifier.wait_for_events(...)` that will cause us test + # `SlidingSyncResult.__bool__` for new results. + self._bump_notifier_wait_for_events( + user1_id, wake_stream_key=StreamKeyType.ACCOUNT_DATA + ) + # Block for a little bit more to ensure we don't see any new results. + with self.assertRaises(TimedOutException): + channel.await_result(timeout_ms=4000) + # Wait for the sync to complete (wait for the rest of the 10 second timeout, + # 5000 + 4000 + 1200 > 10000) + channel.await_result(timeout_ms=1200) + self.assertEqual(channel.code, 200, channel.json_body) + + self.assertIncludes( + channel.json_body["extensions"]["receipts"].get("rooms").keys(), + set(), + exact=True, + ) diff --git a/tests/rest/client/sliding_sync/test_extension_to_device.py b/tests/rest/client/sliding_sync/test_extension_to_device.py new file mode 100644 index 000000000000..f8500812ea78 --- /dev/null +++ b/tests/rest/client/sliding_sync/test_extension_to_device.py @@ -0,0 +1,278 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +import logging +from typing import List + +from twisted.test.proto_helpers import MemoryReactor + +import synapse.rest.admin +from synapse.rest.client import login, sendtodevice, sync +from synapse.server import HomeServer +from synapse.types import JsonDict, StreamKeyType +from synapse.util import Clock + +from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase +from tests.server import TimedOutException + +logger = logging.getLogger(__name__) + + +class SlidingSyncToDeviceExtensionTestCase(SlidingSyncBase): + """Tests for the to-device sliding sync extension""" + + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + sync.register_servlets, + sendtodevice.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + + def _assert_to_device_response( + self, response_body: JsonDict, expected_messages: List[JsonDict] + ) -> str: + """Assert the sliding sync response was successful and has the expected + to-device messages. + + Returns the next_batch token from the to-device section. + """ + extensions = response_body["extensions"] + to_device = extensions["to_device"] + self.assertIsInstance(to_device["next_batch"], str) + self.assertEqual(to_device["events"], expected_messages) + + return to_device["next_batch"] + + def test_no_data(self) -> None: + """Test that enabling to-device extension works, even if there is + no-data + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + sync_body = { + "lists": {}, + "extensions": { + "to_device": { + "enabled": True, + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # We expect no to-device messages + self._assert_to_device_response(response_body, []) + + def test_data_initial_sync(self) -> None: + """Test that we get to-device messages when we don't specify a since + token""" + + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass", "d1") + user2_id = self.register_user("u2", "pass") + user2_tok = self.login(user2_id, "pass", "d2") + + # Send the to-device message + test_msg = {"foo": "bar"} + chan = self.make_request( + "PUT", + "/_matrix/client/r0/sendToDevice/m.test/1234", + content={"messages": {user1_id: {"d1": test_msg}}}, + access_token=user2_tok, + ) + self.assertEqual(chan.code, 200, chan.result) + + sync_body = { + "lists": {}, + "extensions": { + "to_device": { + "enabled": True, + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + self._assert_to_device_response( + response_body, + [{"content": test_msg, "sender": user2_id, "type": "m.test"}], + ) + + def test_data_incremental_sync(self) -> None: + """Test that we get to-device messages over incremental syncs""" + + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass", "d1") + user2_id = self.register_user("u2", "pass") + user2_tok = self.login(user2_id, "pass", "d2") + + sync_body: JsonDict = { + "lists": {}, + "extensions": { + "to_device": { + "enabled": True, + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + # No to-device messages yet. + next_batch = self._assert_to_device_response(response_body, []) + + test_msg = {"foo": "bar"} + chan = self.make_request( + "PUT", + "/_matrix/client/r0/sendToDevice/m.test/1234", + content={"messages": {user1_id: {"d1": test_msg}}}, + access_token=user2_tok, + ) + self.assertEqual(chan.code, 200, chan.result) + + sync_body = { + "lists": {}, + "extensions": { + "to_device": { + "enabled": True, + "since": next_batch, + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + next_batch = self._assert_to_device_response( + response_body, + [{"content": test_msg, "sender": user2_id, "type": "m.test"}], + ) + + # The next sliding sync request should not include the to-device + # message. + sync_body = { + "lists": {}, + "extensions": { + "to_device": { + "enabled": True, + "since": next_batch, + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + self._assert_to_device_response(response_body, []) + + # An initial sliding sync request should not include the to-device + # message, as it should have been deleted + sync_body = { + "lists": {}, + "extensions": { + "to_device": { + "enabled": True, + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + self._assert_to_device_response(response_body, []) + + def test_wait_for_new_data(self) -> None: + """ + Test to make sure that the Sliding Sync request waits for new data to arrive. + + (Only applies to incremental syncs with a `timeout` specified) + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass", "d1") + user2_id = self.register_user("u2", "pass") + user2_tok = self.login(user2_id, "pass", "d2") + + sync_body = { + "lists": {}, + "extensions": { + "to_device": { + "enabled": True, + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Make the Sliding Sync request + channel = self.make_request( + "POST", + self.sync_endpoint + "?timeout=10000" + f"&pos={from_token}", + content=sync_body, + access_token=user1_tok, + await_result=False, + ) + # Block for 5 seconds to make sure we are `notifier.wait_for_events(...)` + with self.assertRaises(TimedOutException): + channel.await_result(timeout_ms=5000) + # Bump the to-device messages to trigger new results + test_msg = {"foo": "bar"} + send_to_device_channel = self.make_request( + "PUT", + "/_matrix/client/r0/sendToDevice/m.test/1234", + content={"messages": {user1_id: {"d1": test_msg}}}, + access_token=user2_tok, + ) + self.assertEqual( + send_to_device_channel.code, 200, send_to_device_channel.result + ) + # Should respond before the 10 second timeout + channel.await_result(timeout_ms=3000) + self.assertEqual(channel.code, 200, channel.json_body) + + self._assert_to_device_response( + channel.json_body, + [{"content": test_msg, "sender": user2_id, "type": "m.test"}], + ) + + def test_wait_for_new_data_timeout(self) -> None: + """ + Test to make sure that the Sliding Sync request waits for new data to arrive but + no data ever arrives so we timeout. We're also making sure that the default data + from the To-Device extension doesn't trigger a false-positive for new data. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + sync_body = { + "lists": {}, + "extensions": { + "to_device": { + "enabled": True, + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Make the Sliding Sync request + channel = self.make_request( + "POST", + self.sync_endpoint + "?timeout=10000" + f"&pos={from_token}", + content=sync_body, + access_token=user1_tok, + await_result=False, + ) + # Block for 5 seconds to make sure we are `notifier.wait_for_events(...)` + with self.assertRaises(TimedOutException): + channel.await_result(timeout_ms=5000) + # Wake-up `notifier.wait_for_events(...)` that will cause us test + # `SlidingSyncResult.__bool__` for new results. + self._bump_notifier_wait_for_events( + user1_id, wake_stream_key=StreamKeyType.ACCOUNT_DATA + ) + # Block for a little bit more to ensure we don't see any new results. + with self.assertRaises(TimedOutException): + channel.await_result(timeout_ms=4000) + # Wait for the sync to complete (wait for the rest of the 10 second timeout, + # 5000 + 4000 + 1200 > 10000) + channel.await_result(timeout_ms=1200) + self.assertEqual(channel.code, 200, channel.json_body) + + self._assert_to_device_response(channel.json_body, []) diff --git a/tests/rest/client/sliding_sync/test_extension_typing.py b/tests/rest/client/sliding_sync/test_extension_typing.py new file mode 100644 index 000000000000..7f523e0f1065 --- /dev/null +++ b/tests/rest/client/sliding_sync/test_extension_typing.py @@ -0,0 +1,482 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +import logging + +from twisted.test.proto_helpers import MemoryReactor + +import synapse.rest.admin +from synapse.api.constants import EduTypes +from synapse.rest.client import login, room, sync +from synapse.server import HomeServer +from synapse.types import StreamKeyType +from synapse.util import Clock + +from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase +from tests.server import TimedOutException + +logger = logging.getLogger(__name__) + + +class SlidingSyncTypingExtensionTestCase(SlidingSyncBase): + """Tests for the typing notification sliding sync extension""" + + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + room.register_servlets, + sync.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + + def test_no_data_initial_sync(self) -> None: + """ + Test that enabling the typing extension works during an intitial sync, + even if there is no-data. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + # Make an initial Sliding Sync request with the typing extension enabled + sync_body = { + "lists": {}, + "extensions": { + "typing": { + "enabled": True, + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + self.assertIncludes( + response_body["extensions"]["typing"].get("rooms").keys(), + set(), + exact=True, + ) + + def test_no_data_incremental_sync(self) -> None: + """ + Test that enabling typing extension works during an incremental sync, even + if there is no-data. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + sync_body = { + "lists": {}, + "extensions": { + "typing": { + "enabled": True, + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Make an incremental Sliding Sync request with the typing extension enabled + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + self.assertIncludes( + response_body["extensions"]["typing"].get("rooms").keys(), + set(), + exact=True, + ) + + def test_typing_initial_sync(self) -> None: + """ + On initial sync, we return all typing notifications for rooms that we request + and are being returned in the Sliding Sync response. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + user3_id = self.register_user("user3", "pass") + user3_tok = self.login(user3_id, "pass") + user4_id = self.register_user("user4", "pass") + user4_tok = self.login(user4_id, "pass") + + # Create a room + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + self.helper.join(room_id1, user3_id, tok=user3_tok) + self.helper.join(room_id1, user4_id, tok=user4_tok) + # User1 starts typing in room1 + channel = self.make_request( + "PUT", + f"/rooms/{room_id1}/typing/{user1_id}", + b'{"typing": true, "timeout": 30000}', + access_token=user1_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + # User2 starts typing in room1 + channel = self.make_request( + "PUT", + f"/rooms/{room_id1}/typing/{user2_id}", + b'{"typing": true, "timeout": 30000}', + access_token=user2_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + + # Create another room + room_id2 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id2, user1_id, tok=user1_tok) + self.helper.join(room_id2, user3_id, tok=user3_tok) + self.helper.join(room_id2, user4_id, tok=user4_tok) + # User1 starts typing in room2 + channel = self.make_request( + "PUT", + f"/rooms/{room_id2}/typing/{user1_id}", + b'{"typing": true, "timeout": 30000}', + access_token=user1_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + # User2 starts typing in room2 + channel = self.make_request( + "PUT", + f"/rooms/{room_id2}/typing/{user2_id}", + b'{"typing": true, "timeout": 30000}', + access_token=user2_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + + # Make an initial Sliding Sync request with the typing extension enabled + sync_body = { + "lists": {}, + "room_subscriptions": { + room_id1: { + "required_state": [], + "timeline_limit": 0, + } + }, + "extensions": { + "typing": { + "enabled": True, + "rooms": [room_id1, room_id2], + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Even though we requested room2, we only expect room1 to show up because that's + # the only room in the Sliding Sync response (room2 is not one of our room + # subscriptions or in a sliding window list). + self.assertIncludes( + response_body["extensions"]["typing"].get("rooms").keys(), + {room_id1}, + exact=True, + ) + # Sanity check that it's the correct ephemeral event type + self.assertEqual( + response_body["extensions"]["typing"]["rooms"][room_id1]["type"], + EduTypes.TYPING, + ) + # We can see user1 and user2 typing + self.assertIncludes( + set( + response_body["extensions"]["typing"]["rooms"][room_id1]["content"][ + "user_ids" + ] + ), + {user1_id, user2_id}, + exact=True, + ) + + def test_typing_incremental_sync(self) -> None: + """ + On incremental sync, we return all typing notifications in the token range for a + given room but only for rooms that we request and are being returned in the + Sliding Sync response. + """ + + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + user3_id = self.register_user("user3", "pass") + user3_tok = self.login(user3_id, "pass") + + # Create room1 + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + self.helper.join(room_id1, user3_id, tok=user3_tok) + # User2 starts typing in room1 + channel = self.make_request( + "PUT", + f"/rooms/{room_id1}/typing/{user2_id}", + b'{"typing": true, "timeout": 30000}', + access_token=user2_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + + # Create room2 + room_id2 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id2, user1_id, tok=user1_tok) + # User1 starts typing in room2 (before the `from_token`) + channel = self.make_request( + "PUT", + f"/rooms/{room_id2}/typing/{user1_id}", + b'{"typing": true, "timeout": 30000}', + access_token=user1_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + + # Create room3 + room_id3 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id3, user1_id, tok=user1_tok) + self.helper.join(room_id3, user3_id, tok=user3_tok) + + # Create room4 + room_id4 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id4, user1_id, tok=user1_tok) + self.helper.join(room_id4, user3_id, tok=user3_tok) + # User1 starts typing in room4 (before the `from_token`) + channel = self.make_request( + "PUT", + f"/rooms/{room_id4}/typing/{user1_id}", + b'{"typing": true, "timeout": 30000}', + access_token=user1_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + + # Advance time so all of the typing notifications timeout before we make our + # Sliding Sync requests. Even though these are sent before the `from_token`, the + # typing code only keeps track of stream position of the latest typing + # notification so "old" typing notifications that are still "alive" (haven't + # timed out) can appear in the response. + self.reactor.advance(36) + + sync_body = { + "lists": {}, + "room_subscriptions": { + room_id1: { + "required_state": [], + "timeline_limit": 0, + }, + room_id3: { + "required_state": [], + "timeline_limit": 0, + }, + room_id4: { + "required_state": [], + "timeline_limit": 0, + }, + }, + "extensions": { + "typing": { + "enabled": True, + "rooms": [room_id1, room_id2, room_id3, room_id4], + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Add some more typing notifications after the `from_token` + # + # User1 starts typing in room1 + channel = self.make_request( + "PUT", + f"/rooms/{room_id1}/typing/{user1_id}", + b'{"typing": true, "timeout": 30000}', + access_token=user1_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + # User1 starts typing in room2 + channel = self.make_request( + "PUT", + f"/rooms/{room_id2}/typing/{user1_id}", + b'{"typing": true, "timeout": 30000}', + access_token=user1_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + # User3 starts typing in room3 + channel = self.make_request( + "PUT", + f"/rooms/{room_id3}/typing/{user3_id}", + b'{"typing": true, "timeout": 30000}', + access_token=user3_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + # No activity for room4 after the `from_token` + + # Make an incremental Sliding Sync request with the typing extension enabled + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + # Even though we requested room2, we only expect rooms to show up if they are + # already in the Sliding Sync response. room4 doesn't show up because there is + # no activity after the `from_token`. + self.assertIncludes( + response_body["extensions"]["typing"].get("rooms").keys(), + {room_id1, room_id3}, + exact=True, + ) + + # Check room1: + # + # Sanity check that it's the correct ephemeral event type + self.assertEqual( + response_body["extensions"]["typing"]["rooms"][room_id1]["type"], + EduTypes.TYPING, + ) + # We only see that user1 is typing in room1 since the `from_token` + self.assertIncludes( + set( + response_body["extensions"]["typing"]["rooms"][room_id1]["content"][ + "user_ids" + ] + ), + {user1_id}, + exact=True, + ) + + # Check room3: + # + # Sanity check that it's the correct ephemeral event type + self.assertEqual( + response_body["extensions"]["typing"]["rooms"][room_id3]["type"], + EduTypes.TYPING, + ) + # We only see that user3 is typing in room1 since the `from_token` + self.assertIncludes( + set( + response_body["extensions"]["typing"]["rooms"][room_id3]["content"][ + "user_ids" + ] + ), + {user3_id}, + exact=True, + ) + + def test_wait_for_new_data(self) -> None: + """ + Test to make sure that the Sliding Sync request waits for new data to arrive. + + (Only applies to incremental syncs with a `timeout` specified) + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id, user1_id, tok=user1_tok) + + sync_body = { + "lists": {}, + "room_subscriptions": { + room_id: { + "required_state": [], + "timeline_limit": 0, + }, + }, + "extensions": { + "typing": { + "enabled": True, + "rooms": [room_id], + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Make an incremental Sliding Sync request with the typing extension enabled + channel = self.make_request( + "POST", + self.sync_endpoint + f"?timeout=10000&pos={from_token}", + content=sync_body, + access_token=user1_tok, + await_result=False, + ) + # Block for 5 seconds to make sure we are `notifier.wait_for_events(...)` + with self.assertRaises(TimedOutException): + channel.await_result(timeout_ms=5000) + # Bump the typing status to trigger new results + typing_channel = self.make_request( + "PUT", + f"/rooms/{room_id}/typing/{user2_id}", + b'{"typing": true, "timeout": 30000}', + access_token=user2_tok, + ) + self.assertEqual(typing_channel.code, 200, typing_channel.json_body) + # Should respond before the 10 second timeout + channel.await_result(timeout_ms=3000) + self.assertEqual(channel.code, 200, channel.json_body) + + # We should see the new typing notification + self.assertIncludes( + channel.json_body.get("extensions", {}) + .get("typing", {}) + .get("rooms", {}) + .keys(), + {room_id}, + exact=True, + message=str(channel.json_body), + ) + self.assertIncludes( + set( + channel.json_body["extensions"]["typing"]["rooms"][room_id]["content"][ + "user_ids" + ] + ), + {user2_id}, + exact=True, + ) + + def test_wait_for_new_data_timeout(self) -> None: + """ + Test to make sure that the Sliding Sync request waits for new data to arrive but + no data ever arrives so we timeout. We're also making sure that the default data + from the typing extension doesn't trigger a false-positive for new data. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + sync_body = { + "lists": {}, + "extensions": { + "typing": { + "enabled": True, + } + }, + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Make the Sliding Sync request + channel = self.make_request( + "POST", + self.sync_endpoint + f"?timeout=10000&pos={from_token}", + content=sync_body, + access_token=user1_tok, + await_result=False, + ) + # Block for 5 seconds to make sure we are `notifier.wait_for_events(...)` + with self.assertRaises(TimedOutException): + channel.await_result(timeout_ms=5000) + # Wake-up `notifier.wait_for_events(...)` that will cause us test + # `SlidingSyncResult.__bool__` for new results. + self._bump_notifier_wait_for_events( + user1_id, wake_stream_key=StreamKeyType.ACCOUNT_DATA + ) + # Block for a little bit more to ensure we don't see any new results. + with self.assertRaises(TimedOutException): + channel.await_result(timeout_ms=4000) + # Wait for the sync to complete (wait for the rest of the 10 second timeout, + # 5000 + 4000 + 1200 > 10000) + channel.await_result(timeout_ms=1200) + self.assertEqual(channel.code, 200, channel.json_body) + + self.assertIncludes( + channel.json_body["extensions"]["typing"].get("rooms").keys(), + set(), + exact=True, + ) diff --git a/tests/rest/client/sliding_sync/test_extensions.py b/tests/rest/client/sliding_sync/test_extensions.py new file mode 100644 index 000000000000..68f666133471 --- /dev/null +++ b/tests/rest/client/sliding_sync/test_extensions.py @@ -0,0 +1,283 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +import logging +from typing import Literal + +from parameterized import parameterized +from typing_extensions import assert_never + +from twisted.test.proto_helpers import MemoryReactor + +import synapse.rest.admin +from synapse.api.constants import ReceiptTypes +from synapse.rest.client import login, receipts, room, sync +from synapse.server import HomeServer +from synapse.util import Clock + +from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase + +logger = logging.getLogger(__name__) + + +class SlidingSyncExtensionsTestCase(SlidingSyncBase): + """ + Test general extensions behavior in the Sliding Sync API. Each extension has their + own suite of tests in their own file as well. + """ + + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + room.register_servlets, + sync.register_servlets, + receipts.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + self.storage_controllers = hs.get_storage_controllers() + self.account_data_handler = hs.get_account_data_handler() + + # Any extensions that use `lists`/`rooms` should be tested here + @parameterized.expand([("account_data",), ("receipts",), ("typing",)]) + def test_extensions_lists_rooms_relevant_rooms( + self, + extension_name: Literal["account_data", "receipts", "typing"], + ) -> None: + """ + With various extensions, test out requesting different variations of + `lists`/`rooms`. + + Stresses `SlidingSyncHandler.find_relevant_room_ids_for_extension(...)` + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + # Create some rooms + room_id1 = self.helper.create_room_as(user1_id, tok=user1_tok) + room_id2 = self.helper.create_room_as(user1_id, tok=user1_tok) + room_id3 = self.helper.create_room_as(user1_id, tok=user1_tok) + room_id4 = self.helper.create_room_as(user1_id, tok=user1_tok) + room_id5 = self.helper.create_room_as(user1_id, tok=user1_tok) + + room_id_to_human_name_map = { + room_id1: "room1", + room_id2: "room2", + room_id3: "room3", + room_id4: "room4", + room_id5: "room5", + } + + for room_id in room_id_to_human_name_map.keys(): + if extension_name == "account_data": + # Add some account data to each room + self.get_success( + self.account_data_handler.add_account_data_to_room( + user_id=user1_id, + room_id=room_id, + account_data_type="org.matrix.roorarraz", + content={"roo": "rar"}, + ) + ) + elif extension_name == "receipts": + event_response = self.helper.send( + room_id, body="new event", tok=user1_tok + ) + # Read last event + channel = self.make_request( + "POST", + f"/rooms/{room_id}/receipt/{ReceiptTypes.READ}/{event_response['event_id']}", + {}, + access_token=user1_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + elif extension_name == "typing": + # Start a typing notification + channel = self.make_request( + "PUT", + f"/rooms/{room_id}/typing/{user1_id}", + b'{"typing": true, "timeout": 30000}', + access_token=user1_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + else: + assert_never(extension_name) + + main_sync_body = { + "lists": { + # We expect this list range to include room5 and room4 + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": 0, + }, + # We expect this list range to include room5, room4, room3 + "bar-list": { + "ranges": [[0, 2]], + "required_state": [], + "timeline_limit": 0, + }, + }, + "room_subscriptions": { + room_id1: { + "required_state": [], + "timeline_limit": 0, + } + }, + } + + # Mix lists and rooms + sync_body = { + **main_sync_body, + "extensions": { + extension_name: { + "enabled": True, + "lists": ["foo-list", "non-existent-list"], + "rooms": [room_id1, room_id2, "!non-existent-room"], + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # room1: ✅ Requested via `rooms` and a room subscription exists + # room2: ❌ Requested via `rooms` but not in the response (from lists or room subscriptions) + # room3: ❌ Not requested + # room4: ✅ Shows up because requested via `lists` and list exists in the response + # room5: ✅ Shows up because requested via `lists` and list exists in the response + self.assertIncludes( + { + room_id_to_human_name_map[room_id] + for room_id in response_body["extensions"][extension_name] + .get("rooms") + .keys() + }, + {"room1", "room4", "room5"}, + exact=True, + ) + + # Try wildcards (this is the default) + sync_body = { + **main_sync_body, + "extensions": { + extension_name: { + "enabled": True, + # "lists": ["*"], + # "rooms": ["*"], + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # room1: ✅ Shows up because of default `rooms` wildcard and is in one of the room subscriptions + # room2: ❌ Not requested + # room3: ✅ Shows up because of default `lists` wildcard and is in a list + # room4: ✅ Shows up because of default `lists` wildcard and is in a list + # room5: ✅ Shows up because of default `lists` wildcard and is in a list + self.assertIncludes( + { + room_id_to_human_name_map[room_id] + for room_id in response_body["extensions"][extension_name] + .get("rooms") + .keys() + }, + {"room1", "room3", "room4", "room5"}, + exact=True, + ) + + # Empty list will return nothing + sync_body = { + **main_sync_body, + "extensions": { + extension_name: { + "enabled": True, + "lists": [], + "rooms": [], + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # room1: ❌ Not requested + # room2: ❌ Not requested + # room3: ❌ Not requested + # room4: ❌ Not requested + # room5: ❌ Not requested + self.assertIncludes( + { + room_id_to_human_name_map[room_id] + for room_id in response_body["extensions"][extension_name] + .get("rooms") + .keys() + }, + set(), + exact=True, + ) + + # Try wildcard and none + sync_body = { + **main_sync_body, + "extensions": { + extension_name: { + "enabled": True, + "lists": ["*"], + "rooms": [], + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # room1: ❌ Not requested + # room2: ❌ Not requested + # room3: ✅ Shows up because of default `lists` wildcard and is in a list + # room4: ✅ Shows up because of default `lists` wildcard and is in a list + # room5: ✅ Shows up because of default `lists` wildcard and is in a list + self.assertIncludes( + { + room_id_to_human_name_map[room_id] + for room_id in response_body["extensions"][extension_name] + .get("rooms") + .keys() + }, + {"room3", "room4", "room5"}, + exact=True, + ) + + # Try requesting a room that is only in a list + sync_body = { + **main_sync_body, + "extensions": { + extension_name: { + "enabled": True, + "lists": [], + "rooms": [room_id5], + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # room1: ❌ Not requested + # room2: ❌ Not requested + # room3: ❌ Not requested + # room4: ❌ Not requested + # room5: ✅ Requested via `rooms` and is in a list + self.assertIncludes( + { + room_id_to_human_name_map[room_id] + for room_id in response_body["extensions"][extension_name] + .get("rooms") + .keys() + }, + {"room5"}, + exact=True, + ) diff --git a/tests/rest/client/sliding_sync/test_room_subscriptions.py b/tests/rest/client/sliding_sync/test_room_subscriptions.py new file mode 100644 index 000000000000..cc17b0b35435 --- /dev/null +++ b/tests/rest/client/sliding_sync/test_room_subscriptions.py @@ -0,0 +1,285 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +import logging +from http import HTTPStatus + +from twisted.test.proto_helpers import MemoryReactor + +import synapse.rest.admin +from synapse.api.constants import EventTypes, HistoryVisibility +from synapse.rest.client import login, room, sync +from synapse.server import HomeServer +from synapse.util import Clock + +from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase + +logger = logging.getLogger(__name__) + + +class SlidingSyncRoomSubscriptionsTestCase(SlidingSyncBase): + """ + Test `room_subscriptions` in the Sliding Sync API. + """ + + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + room.register_servlets, + sync.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + self.storage_controllers = hs.get_storage_controllers() + + def test_room_subscriptions_with_join_membership(self) -> None: + """ + Test `room_subscriptions` with a joined room should give us timeline and current + state events. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + join_response = self.helper.join(room_id1, user1_id, tok=user1_tok) + + # Make the Sliding Sync request with just the room subscription + sync_body = { + "room_subscriptions": { + room_id1: { + "required_state": [ + [EventTypes.Create, ""], + ], + "timeline_limit": 1, + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + state_map = self.get_success( + self.storage_controllers.state.get_current_state(room_id1) + ) + + # We should see some state + self._assertRequiredStateIncludes( + response_body["rooms"][room_id1]["required_state"], + { + state_map[(EventTypes.Create, "")], + }, + exact=True, + ) + self.assertIsNone(response_body["rooms"][room_id1].get("invite_state")) + + # We should see some events + self.assertEqual( + [ + event["event_id"] + for event in response_body["rooms"][room_id1]["timeline"] + ], + [ + join_response["event_id"], + ], + response_body["rooms"][room_id1]["timeline"], + ) + # No "live" events in an initial sync (no `from_token` to define the "live" + # range) + self.assertEqual( + response_body["rooms"][room_id1]["num_live"], + 0, + response_body["rooms"][room_id1], + ) + # There are more events to paginate to + self.assertEqual( + response_body["rooms"][room_id1]["limited"], + True, + response_body["rooms"][room_id1], + ) + + def test_room_subscriptions_with_leave_membership(self) -> None: + """ + Test `room_subscriptions` with a leave room should give us timeline and state + events up to the leave event. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.send_state( + room_id1, + event_type="org.matrix.foo_state", + state_key="", + body={"foo": "bar"}, + tok=user2_tok, + ) + + join_response = self.helper.join(room_id1, user1_id, tok=user1_tok) + leave_response = self.helper.leave(room_id1, user1_id, tok=user1_tok) + + state_map = self.get_success( + self.storage_controllers.state.get_current_state(room_id1) + ) + + # Send some events after user1 leaves + self.helper.send(room_id1, "activity after leave", tok=user2_tok) + # Update state after user1 leaves + self.helper.send_state( + room_id1, + event_type="org.matrix.foo_state", + state_key="", + body={"foo": "qux"}, + tok=user2_tok, + ) + + # Make the Sliding Sync request with just the room subscription + sync_body = { + "room_subscriptions": { + room_id1: { + "required_state": [ + ["org.matrix.foo_state", ""], + ], + "timeline_limit": 2, + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # We should see the state at the time of the leave + self._assertRequiredStateIncludes( + response_body["rooms"][room_id1]["required_state"], + { + state_map[("org.matrix.foo_state", "")], + }, + exact=True, + ) + self.assertIsNone(response_body["rooms"][room_id1].get("invite_state")) + + # We should see some before we left (nothing after) + self.assertEqual( + [ + event["event_id"] + for event in response_body["rooms"][room_id1]["timeline"] + ], + [ + join_response["event_id"], + leave_response["event_id"], + ], + response_body["rooms"][room_id1]["timeline"], + ) + # No "live" events in an initial sync (no `from_token` to define the "live" + # range) + self.assertEqual( + response_body["rooms"][room_id1]["num_live"], + 0, + response_body["rooms"][room_id1], + ) + # There are more events to paginate to + self.assertEqual( + response_body["rooms"][room_id1]["limited"], + True, + response_body["rooms"][room_id1], + ) + + def test_room_subscriptions_no_leak_private_room(self) -> None: + """ + Test `room_subscriptions` with a private room we have never been in should not + leak any data to the user. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok, is_public=False) + + # We should not be able to join the private room + self.helper.join( + room_id1, user1_id, tok=user1_tok, expect_code=HTTPStatus.FORBIDDEN + ) + + # Make the Sliding Sync request with just the room subscription + sync_body = { + "room_subscriptions": { + room_id1: { + "required_state": [ + [EventTypes.Create, ""], + ], + "timeline_limit": 1, + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # We should not see the room at all (we're not in it) + self.assertIsNone(response_body["rooms"].get(room_id1), response_body["rooms"]) + + def test_room_subscriptions_world_readable(self) -> None: + """ + Test `room_subscriptions` with a room that has `world_readable` history visibility + + FIXME: We should be able to see the room timeline and state + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + # Create a room with `world_readable` history visibility + room_id1 = self.helper.create_room_as( + user2_id, + tok=user2_tok, + extra_content={ + "preset": "public_chat", + "initial_state": [ + { + "content": { + "history_visibility": HistoryVisibility.WORLD_READABLE + }, + "state_key": "", + "type": EventTypes.RoomHistoryVisibility, + } + ], + }, + ) + # Ensure we're testing with a room with `world_readable` history visibility + # which means events are visible to anyone even without membership. + history_visibility_response = self.helper.get_state( + room_id1, EventTypes.RoomHistoryVisibility, tok=user2_tok + ) + self.assertEqual( + history_visibility_response.get("history_visibility"), + HistoryVisibility.WORLD_READABLE, + ) + + # Note: We never join the room + + # Make the Sliding Sync request with just the room subscription + sync_body = { + "room_subscriptions": { + room_id1: { + "required_state": [ + [EventTypes.Create, ""], + ], + "timeline_limit": 1, + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # FIXME: In the future, we should be able to see the room because it's + # `world_readable` but currently we don't support this. + self.assertIsNone(response_body["rooms"].get(room_id1), response_body["rooms"]) diff --git a/tests/rest/client/sliding_sync/test_rooms_invites.py b/tests/rest/client/sliding_sync/test_rooms_invites.py new file mode 100644 index 000000000000..f08ffaf67437 --- /dev/null +++ b/tests/rest/client/sliding_sync/test_rooms_invites.py @@ -0,0 +1,510 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +import logging + +from twisted.test.proto_helpers import MemoryReactor + +import synapse.rest.admin +from synapse.api.constants import EventTypes, HistoryVisibility +from synapse.rest.client import login, room, sync +from synapse.server import HomeServer +from synapse.types import UserID +from synapse.util import Clock + +from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase + +logger = logging.getLogger(__name__) + + +class SlidingSyncRoomsInvitesTestCase(SlidingSyncBase): + """ + Test to make sure the `rooms` response looks good for invites in the Sliding Sync API. + + Invites behave a lot different than other rooms because we don't include the + `timeline` (`num_live`, `limited`, `prev_batch`) or `required_state` in favor of + some stripped state under the `invite_state` key. + + Knocks probably have the same behavior but the spec doesn't mention knocks yet. + """ + + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + room.register_servlets, + sync.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + self.storage_controllers = hs.get_storage_controllers() + + def test_rooms_invite_shared_history_initial_sync(self) -> None: + """ + Test that `rooms` we are invited to have some stripped `invite_state` during an + initial sync. + + This is an `invite` room so we should only have `stripped_state` (no `timeline`) + but we also shouldn't see any timeline events because the history visiblity is + `shared` and we haven't joined the room yet. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user1 = UserID.from_string(user1_id) + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + user2 = UserID.from_string(user2_id) + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + # Ensure we're testing with a room with `shared` history visibility which means + # history visible until you actually join the room. + history_visibility_response = self.helper.get_state( + room_id1, EventTypes.RoomHistoryVisibility, tok=user2_tok + ) + self.assertEqual( + history_visibility_response.get("history_visibility"), + HistoryVisibility.SHARED, + ) + + self.helper.send(room_id1, "activity before1", tok=user2_tok) + self.helper.send(room_id1, "activity before2", tok=user2_tok) + self.helper.invite(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) + self.helper.send(room_id1, "activity after3", tok=user2_tok) + self.helper.send(room_id1, "activity after4", tok=user2_tok) + + # Make the Sliding Sync request + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": 3, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # `timeline` is omitted for `invite` rooms with `stripped_state` + self.assertIsNone( + response_body["rooms"][room_id1].get("timeline"), + response_body["rooms"][room_id1], + ) + # `num_live` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) + self.assertIsNone( + response_body["rooms"][room_id1].get("num_live"), + response_body["rooms"][room_id1], + ) + # `limited` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) + self.assertIsNone( + response_body["rooms"][room_id1].get("limited"), + response_body["rooms"][room_id1], + ) + # `prev_batch` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) + self.assertIsNone( + response_body["rooms"][room_id1].get("prev_batch"), + response_body["rooms"][room_id1], + ) + # `required_state` is omitted for `invite` rooms with `stripped_state` + self.assertIsNone( + response_body["rooms"][room_id1].get("required_state"), + response_body["rooms"][room_id1], + ) + # We should have some `stripped_state` so the potential joiner can identify the + # room (we don't care about the order). + self.assertCountEqual( + response_body["rooms"][room_id1]["invite_state"], + [ + { + "content": {"creator": user2_id, "room_version": "10"}, + "sender": user2_id, + "state_key": "", + "type": "m.room.create", + }, + { + "content": {"join_rule": "public"}, + "sender": user2_id, + "state_key": "", + "type": "m.room.join_rules", + }, + { + "content": {"displayname": user2.localpart, "membership": "join"}, + "sender": user2_id, + "state_key": user2_id, + "type": "m.room.member", + }, + { + "content": {"displayname": user1.localpart, "membership": "invite"}, + "sender": user2_id, + "state_key": user1_id, + "type": "m.room.member", + }, + ], + response_body["rooms"][room_id1]["invite_state"], + ) + + def test_rooms_invite_shared_history_incremental_sync(self) -> None: + """ + Test that `rooms` we are invited to have some stripped `invite_state` during an + incremental sync. + + This is an `invite` room so we should only have `stripped_state` (no `timeline`) + but we also shouldn't see any timeline events because the history visiblity is + `shared` and we haven't joined the room yet. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user1 = UserID.from_string(user1_id) + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + user2 = UserID.from_string(user2_id) + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + # Ensure we're testing with a room with `shared` history visibility which means + # history visible until you actually join the room. + history_visibility_response = self.helper.get_state( + room_id1, EventTypes.RoomHistoryVisibility, tok=user2_tok + ) + self.assertEqual( + history_visibility_response.get("history_visibility"), + HistoryVisibility.SHARED, + ) + + self.helper.send(room_id1, "activity before invite1", tok=user2_tok) + self.helper.send(room_id1, "activity before invite2", tok=user2_tok) + self.helper.invite(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) + self.helper.send(room_id1, "activity after invite3", tok=user2_tok) + self.helper.send(room_id1, "activity after invite4", tok=user2_tok) + + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": 3, + } + } + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + self.helper.send(room_id1, "activity after token5", tok=user2_tok) + self.helper.send(room_id1, "activity after toekn6", tok=user2_tok) + + # Make the Sliding Sync request + response_body, from_token = self.do_sync( + sync_body, since=from_token, tok=user1_tok + ) + + # `timeline` is omitted for `invite` rooms with `stripped_state` + self.assertIsNone( + response_body["rooms"][room_id1].get("timeline"), + response_body["rooms"][room_id1], + ) + # `num_live` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) + self.assertIsNone( + response_body["rooms"][room_id1].get("num_live"), + response_body["rooms"][room_id1], + ) + # `limited` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) + self.assertIsNone( + response_body["rooms"][room_id1].get("limited"), + response_body["rooms"][room_id1], + ) + # `prev_batch` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) + self.assertIsNone( + response_body["rooms"][room_id1].get("prev_batch"), + response_body["rooms"][room_id1], + ) + # `required_state` is omitted for `invite` rooms with `stripped_state` + self.assertIsNone( + response_body["rooms"][room_id1].get("required_state"), + response_body["rooms"][room_id1], + ) + # We should have some `stripped_state` so the potential joiner can identify the + # room (we don't care about the order). + self.assertCountEqual( + response_body["rooms"][room_id1]["invite_state"], + [ + { + "content": {"creator": user2_id, "room_version": "10"}, + "sender": user2_id, + "state_key": "", + "type": "m.room.create", + }, + { + "content": {"join_rule": "public"}, + "sender": user2_id, + "state_key": "", + "type": "m.room.join_rules", + }, + { + "content": {"displayname": user2.localpart, "membership": "join"}, + "sender": user2_id, + "state_key": user2_id, + "type": "m.room.member", + }, + { + "content": {"displayname": user1.localpart, "membership": "invite"}, + "sender": user2_id, + "state_key": user1_id, + "type": "m.room.member", + }, + ], + response_body["rooms"][room_id1]["invite_state"], + ) + + def test_rooms_invite_world_readable_history_initial_sync(self) -> None: + """ + Test that `rooms` we are invited to have some stripped `invite_state` during an + initial sync. + + This is an `invite` room so we should only have `stripped_state` (no `timeline`) + but depending on the semantics we decide, we could potentially see some + historical events before/after the `from_token` because the history is + `world_readable`. Same situation for events after the `from_token` if the + history visibility was set to `invited`. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user1 = UserID.from_string(user1_id) + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + user2 = UserID.from_string(user2_id) + + room_id1 = self.helper.create_room_as( + user2_id, + tok=user2_tok, + extra_content={ + "preset": "public_chat", + "initial_state": [ + { + "content": { + "history_visibility": HistoryVisibility.WORLD_READABLE + }, + "state_key": "", + "type": EventTypes.RoomHistoryVisibility, + } + ], + }, + ) + # Ensure we're testing with a room with `world_readable` history visibility + # which means events are visible to anyone even without membership. + history_visibility_response = self.helper.get_state( + room_id1, EventTypes.RoomHistoryVisibility, tok=user2_tok + ) + self.assertEqual( + history_visibility_response.get("history_visibility"), + HistoryVisibility.WORLD_READABLE, + ) + + self.helper.send(room_id1, "activity before1", tok=user2_tok) + self.helper.send(room_id1, "activity before2", tok=user2_tok) + self.helper.invite(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) + self.helper.send(room_id1, "activity after3", tok=user2_tok) + self.helper.send(room_id1, "activity after4", tok=user2_tok) + + # Make the Sliding Sync request + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + # Large enough to see the latest events and before the invite + "timeline_limit": 4, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # `timeline` is omitted for `invite` rooms with `stripped_state` + self.assertIsNone( + response_body["rooms"][room_id1].get("timeline"), + response_body["rooms"][room_id1], + ) + # `num_live` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) + self.assertIsNone( + response_body["rooms"][room_id1].get("num_live"), + response_body["rooms"][room_id1], + ) + # `limited` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) + self.assertIsNone( + response_body["rooms"][room_id1].get("limited"), + response_body["rooms"][room_id1], + ) + # `prev_batch` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) + self.assertIsNone( + response_body["rooms"][room_id1].get("prev_batch"), + response_body["rooms"][room_id1], + ) + # `required_state` is omitted for `invite` rooms with `stripped_state` + self.assertIsNone( + response_body["rooms"][room_id1].get("required_state"), + response_body["rooms"][room_id1], + ) + # We should have some `stripped_state` so the potential joiner can identify the + # room (we don't care about the order). + self.assertCountEqual( + response_body["rooms"][room_id1]["invite_state"], + [ + { + "content": {"creator": user2_id, "room_version": "10"}, + "sender": user2_id, + "state_key": "", + "type": "m.room.create", + }, + { + "content": {"join_rule": "public"}, + "sender": user2_id, + "state_key": "", + "type": "m.room.join_rules", + }, + { + "content": {"displayname": user2.localpart, "membership": "join"}, + "sender": user2_id, + "state_key": user2_id, + "type": "m.room.member", + }, + { + "content": {"displayname": user1.localpart, "membership": "invite"}, + "sender": user2_id, + "state_key": user1_id, + "type": "m.room.member", + }, + ], + response_body["rooms"][room_id1]["invite_state"], + ) + + def test_rooms_invite_world_readable_history_incremental_sync(self) -> None: + """ + Test that `rooms` we are invited to have some stripped `invite_state` during an + incremental sync. + + This is an `invite` room so we should only have `stripped_state` (no `timeline`) + but depending on the semantics we decide, we could potentially see some + historical events before/after the `from_token` because the history is + `world_readable`. Same situation for events after the `from_token` if the + history visibility was set to `invited`. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user1 = UserID.from_string(user1_id) + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + user2 = UserID.from_string(user2_id) + + room_id1 = self.helper.create_room_as( + user2_id, + tok=user2_tok, + extra_content={ + "preset": "public_chat", + "initial_state": [ + { + "content": { + "history_visibility": HistoryVisibility.WORLD_READABLE + }, + "state_key": "", + "type": EventTypes.RoomHistoryVisibility, + } + ], + }, + ) + # Ensure we're testing with a room with `world_readable` history visibility + # which means events are visible to anyone even without membership. + history_visibility_response = self.helper.get_state( + room_id1, EventTypes.RoomHistoryVisibility, tok=user2_tok + ) + self.assertEqual( + history_visibility_response.get("history_visibility"), + HistoryVisibility.WORLD_READABLE, + ) + + self.helper.send(room_id1, "activity before invite1", tok=user2_tok) + self.helper.send(room_id1, "activity before invite2", tok=user2_tok) + self.helper.invite(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) + self.helper.send(room_id1, "activity after invite3", tok=user2_tok) + self.helper.send(room_id1, "activity after invite4", tok=user2_tok) + + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + # Large enough to see the latest events and before the invite + "timeline_limit": 4, + } + } + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + self.helper.send(room_id1, "activity after token5", tok=user2_tok) + self.helper.send(room_id1, "activity after toekn6", tok=user2_tok) + + # Make the incremental Sliding Sync request + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + # `timeline` is omitted for `invite` rooms with `stripped_state` + self.assertIsNone( + response_body["rooms"][room_id1].get("timeline"), + response_body["rooms"][room_id1], + ) + # `num_live` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) + self.assertIsNone( + response_body["rooms"][room_id1].get("num_live"), + response_body["rooms"][room_id1], + ) + # `limited` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) + self.assertIsNone( + response_body["rooms"][room_id1].get("limited"), + response_body["rooms"][room_id1], + ) + # `prev_batch` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) + self.assertIsNone( + response_body["rooms"][room_id1].get("prev_batch"), + response_body["rooms"][room_id1], + ) + # `required_state` is omitted for `invite` rooms with `stripped_state` + self.assertIsNone( + response_body["rooms"][room_id1].get("required_state"), + response_body["rooms"][room_id1], + ) + # We should have some `stripped_state` so the potential joiner can identify the + # room (we don't care about the order). + self.assertCountEqual( + response_body["rooms"][room_id1]["invite_state"], + [ + { + "content": {"creator": user2_id, "room_version": "10"}, + "sender": user2_id, + "state_key": "", + "type": "m.room.create", + }, + { + "content": {"join_rule": "public"}, + "sender": user2_id, + "state_key": "", + "type": "m.room.join_rules", + }, + { + "content": {"displayname": user2.localpart, "membership": "join"}, + "sender": user2_id, + "state_key": user2_id, + "type": "m.room.member", + }, + { + "content": {"displayname": user1.localpart, "membership": "invite"}, + "sender": user2_id, + "state_key": user1_id, + "type": "m.room.member", + }, + ], + response_body["rooms"][room_id1]["invite_state"], + ) diff --git a/tests/rest/client/sliding_sync/test_rooms_meta.py b/tests/rest/client/sliding_sync/test_rooms_meta.py new file mode 100644 index 000000000000..04f11c05241b --- /dev/null +++ b/tests/rest/client/sliding_sync/test_rooms_meta.py @@ -0,0 +1,710 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +import logging + +from twisted.test.proto_helpers import MemoryReactor + +import synapse.rest.admin +from synapse.api.constants import EventTypes, Membership +from synapse.api.room_versions import RoomVersions +from synapse.rest.client import login, room, sync +from synapse.server import HomeServer +from synapse.util import Clock + +from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase +from tests.test_utils.event_injection import create_event + +logger = logging.getLogger(__name__) + + +class SlidingSyncRoomsMetaTestCase(SlidingSyncBase): + """ + Test rooms meta info like name, avatar, joined_count, invited_count, is_dm, + bump_stamp in the Sliding Sync API. + """ + + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + room.register_servlets, + sync.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + self.storage_controllers = hs.get_storage_controllers() + + def test_rooms_meta_when_joined(self) -> None: + """ + Test that the `rooms` `name` and `avatar` are included in the response and + reflect the current state of the room when the user is joined to the room. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as( + user2_id, + tok=user2_tok, + extra_content={ + "name": "my super room", + }, + ) + # Set the room avatar URL + self.helper.send_state( + room_id1, + EventTypes.RoomAvatar, + {"url": "mxc://DUMMY_MEDIA_ID"}, + tok=user2_tok, + ) + + self.helper.join(room_id1, user1_id, tok=user1_tok) + + # Make the Sliding Sync request + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": 0, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Reflect the current state of the room + self.assertEqual( + response_body["rooms"][room_id1]["name"], + "my super room", + response_body["rooms"][room_id1], + ) + self.assertEqual( + response_body["rooms"][room_id1]["avatar"], + "mxc://DUMMY_MEDIA_ID", + response_body["rooms"][room_id1], + ) + self.assertEqual( + response_body["rooms"][room_id1]["joined_count"], + 2, + ) + self.assertEqual( + response_body["rooms"][room_id1]["invited_count"], + 0, + ) + self.assertIsNone( + response_body["rooms"][room_id1].get("is_dm"), + ) + + def test_rooms_meta_when_invited(self) -> None: + """ + Test that the `rooms` `name` and `avatar` are included in the response and + reflect the current state of the room when the user is invited to the room. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as( + user2_id, + tok=user2_tok, + extra_content={ + "name": "my super room", + }, + ) + # Set the room avatar URL + self.helper.send_state( + room_id1, + EventTypes.RoomAvatar, + {"url": "mxc://DUMMY_MEDIA_ID"}, + tok=user2_tok, + ) + + # User1 is invited to the room + self.helper.invite(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) + + # Update the room name after user1 has left + self.helper.send_state( + room_id1, + EventTypes.Name, + {"name": "my super duper room"}, + tok=user2_tok, + ) + # Update the room avatar URL after user1 has left + self.helper.send_state( + room_id1, + EventTypes.RoomAvatar, + {"url": "mxc://UPDATED_DUMMY_MEDIA_ID"}, + tok=user2_tok, + ) + + # Make the Sliding Sync request + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": 0, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # This should still reflect the current state of the room even when the user is + # invited. + self.assertEqual( + response_body["rooms"][room_id1]["name"], + "my super duper room", + response_body["rooms"][room_id1], + ) + self.assertEqual( + response_body["rooms"][room_id1]["avatar"], + "mxc://UPDATED_DUMMY_MEDIA_ID", + response_body["rooms"][room_id1], + ) + self.assertEqual( + response_body["rooms"][room_id1]["joined_count"], + 1, + ) + self.assertEqual( + response_body["rooms"][room_id1]["invited_count"], + 1, + ) + self.assertIsNone( + response_body["rooms"][room_id1].get("is_dm"), + ) + + def test_rooms_meta_when_banned(self) -> None: + """ + Test that the `rooms` `name` and `avatar` reflect the state of the room when the + user was banned (do not leak current state). + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as( + user2_id, + tok=user2_tok, + extra_content={ + "name": "my super room", + }, + ) + # Set the room avatar URL + self.helper.send_state( + room_id1, + EventTypes.RoomAvatar, + {"url": "mxc://DUMMY_MEDIA_ID"}, + tok=user2_tok, + ) + + self.helper.join(room_id1, user1_id, tok=user1_tok) + self.helper.ban(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) + + # Update the room name after user1 has left + self.helper.send_state( + room_id1, + EventTypes.Name, + {"name": "my super duper room"}, + tok=user2_tok, + ) + # Update the room avatar URL after user1 has left + self.helper.send_state( + room_id1, + EventTypes.RoomAvatar, + {"url": "mxc://UPDATED_DUMMY_MEDIA_ID"}, + tok=user2_tok, + ) + + # Make the Sliding Sync request + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": 0, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Reflect the state of the room at the time of leaving + self.assertEqual( + response_body["rooms"][room_id1]["name"], + "my super room", + response_body["rooms"][room_id1], + ) + self.assertEqual( + response_body["rooms"][room_id1]["avatar"], + "mxc://DUMMY_MEDIA_ID", + response_body["rooms"][room_id1], + ) + self.assertEqual( + response_body["rooms"][room_id1]["joined_count"], + # FIXME: The actual number should be "1" (user2) but we currently don't + # support this for rooms where the user has left/been banned. + 0, + ) + self.assertEqual( + response_body["rooms"][room_id1]["invited_count"], + 0, + ) + self.assertIsNone( + response_body["rooms"][room_id1].get("is_dm"), + ) + + def test_rooms_meta_heroes(self) -> None: + """ + Test that the `rooms` `heroes` are included in the response when the room + doesn't have a room name set. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + user3_id = self.register_user("user3", "pass") + _user3_tok = self.login(user3_id, "pass") + + room_id1 = self.helper.create_room_as( + user2_id, + tok=user2_tok, + extra_content={ + "name": "my super room", + }, + ) + self.helper.join(room_id1, user1_id, tok=user1_tok) + # User3 is invited + self.helper.invite(room_id1, src=user2_id, targ=user3_id, tok=user2_tok) + + room_id2 = self.helper.create_room_as( + user2_id, + tok=user2_tok, + extra_content={ + # No room name set so that `heroes` is populated + # + # "name": "my super room2", + }, + ) + self.helper.join(room_id2, user1_id, tok=user1_tok) + # User3 is invited + self.helper.invite(room_id2, src=user2_id, targ=user3_id, tok=user2_tok) + + # Make the Sliding Sync request + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": 0, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Room1 has a name so we shouldn't see any `heroes` which the client would use + # the calculate the room name themselves. + self.assertEqual( + response_body["rooms"][room_id1]["name"], + "my super room", + response_body["rooms"][room_id1], + ) + self.assertIsNone(response_body["rooms"][room_id1].get("heroes")) + self.assertEqual( + response_body["rooms"][room_id1]["joined_count"], + 2, + ) + self.assertEqual( + response_body["rooms"][room_id1]["invited_count"], + 1, + ) + + # Room2 doesn't have a name so we should see `heroes` populated + self.assertIsNone(response_body["rooms"][room_id2].get("name")) + self.assertCountEqual( + [ + hero["user_id"] + for hero in response_body["rooms"][room_id2].get("heroes", []) + ], + # Heroes shouldn't include the user themselves (we shouldn't see user1) + [user2_id, user3_id], + ) + self.assertEqual( + response_body["rooms"][room_id2]["joined_count"], + 2, + ) + self.assertEqual( + response_body["rooms"][room_id2]["invited_count"], + 1, + ) + + # We didn't request any state so we shouldn't see any `required_state` + self.assertIsNone(response_body["rooms"][room_id1].get("required_state")) + self.assertIsNone(response_body["rooms"][room_id2].get("required_state")) + + def test_rooms_meta_heroes_max(self) -> None: + """ + Test that the `rooms` `heroes` only includes the first 5 users (not including + yourself). + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + user3_id = self.register_user("user3", "pass") + user3_tok = self.login(user3_id, "pass") + user4_id = self.register_user("user4", "pass") + user4_tok = self.login(user4_id, "pass") + user5_id = self.register_user("user5", "pass") + user5_tok = self.login(user5_id, "pass") + user6_id = self.register_user("user6", "pass") + user6_tok = self.login(user6_id, "pass") + user7_id = self.register_user("user7", "pass") + user7_tok = self.login(user7_id, "pass") + + room_id1 = self.helper.create_room_as( + user2_id, + tok=user2_tok, + extra_content={ + # No room name set so that `heroes` is populated + # + # "name": "my super room", + }, + ) + self.helper.join(room_id1, user1_id, tok=user1_tok) + self.helper.join(room_id1, user3_id, tok=user3_tok) + self.helper.join(room_id1, user4_id, tok=user4_tok) + self.helper.join(room_id1, user5_id, tok=user5_tok) + self.helper.join(room_id1, user6_id, tok=user6_tok) + self.helper.join(room_id1, user7_id, tok=user7_tok) + + # Make the Sliding Sync request + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": 0, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Room2 doesn't have a name so we should see `heroes` populated + self.assertIsNone(response_body["rooms"][room_id1].get("name")) + self.assertCountEqual( + [ + hero["user_id"] + for hero in response_body["rooms"][room_id1].get("heroes", []) + ], + # Heroes should be the first 5 users in the room (excluding the user + # themselves, we shouldn't see `user1`) + [user2_id, user3_id, user4_id, user5_id, user6_id], + ) + self.assertEqual( + response_body["rooms"][room_id1]["joined_count"], + 7, + ) + self.assertEqual( + response_body["rooms"][room_id1]["invited_count"], + 0, + ) + + # We didn't request any state so we shouldn't see any `required_state` + self.assertIsNone(response_body["rooms"][room_id1].get("required_state")) + + def test_rooms_meta_heroes_when_banned(self) -> None: + """ + Test that the `rooms` `heroes` are included in the response when the room + doesn't have a room name set but doesn't leak information past their ban. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + user3_id = self.register_user("user3", "pass") + _user3_tok = self.login(user3_id, "pass") + user4_id = self.register_user("user4", "pass") + user4_tok = self.login(user4_id, "pass") + user5_id = self.register_user("user5", "pass") + _user5_tok = self.login(user5_id, "pass") + + room_id1 = self.helper.create_room_as( + user2_id, + tok=user2_tok, + extra_content={ + # No room name set so that `heroes` is populated + # + # "name": "my super room", + }, + ) + # User1 joins the room + self.helper.join(room_id1, user1_id, tok=user1_tok) + # User3 is invited + self.helper.invite(room_id1, src=user2_id, targ=user3_id, tok=user2_tok) + + # User1 is banned from the room + self.helper.ban(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) + + # User4 joins the room after user1 is banned + self.helper.join(room_id1, user4_id, tok=user4_tok) + # User5 is invited after user1 is banned + self.helper.invite(room_id1, src=user2_id, targ=user5_id, tok=user2_tok) + + # Make the Sliding Sync request + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": 0, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Room2 doesn't have a name so we should see `heroes` populated + self.assertIsNone(response_body["rooms"][room_id1].get("name")) + self.assertCountEqual( + [ + hero["user_id"] + for hero in response_body["rooms"][room_id1].get("heroes", []) + ], + # Heroes shouldn't include the user themselves (we shouldn't see user1). We + # also shouldn't see user4 since they joined after user1 was banned. + # + # FIXME: The actual result should be `[user2_id, user3_id]` but we currently + # don't support this for rooms where the user has left/been banned. + [], + ) + + self.assertEqual( + response_body["rooms"][room_id1]["joined_count"], + # FIXME: The actual number should be "1" (user2) but we currently don't + # support this for rooms where the user has left/been banned. + 0, + ) + self.assertEqual( + response_body["rooms"][room_id1]["invited_count"], + # We shouldn't see user5 since they were invited after user1 was banned. + # + # FIXME: The actual number should be "1" (user3) but we currently don't + # support this for rooms where the user has left/been banned. + 0, + ) + + def test_rooms_bump_stamp(self) -> None: + """ + Test that `bump_stamp` is present and pointing to relevant events. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + room_id1 = self.helper.create_room_as( + user1_id, + tok=user1_tok, + ) + event_response1 = message_response = self.helper.send( + room_id1, "message in room1", tok=user1_tok + ) + event_pos1 = self.get_success( + self.store.get_position_for_event(event_response1["event_id"]) + ) + room_id2 = self.helper.create_room_as( + user1_id, + tok=user1_tok, + ) + send_response2 = self.helper.send(room_id2, "message in room2", tok=user1_tok) + event_pos2 = self.get_success( + self.store.get_position_for_event(send_response2["event_id"]) + ) + + # Send a reaction in room1 but it shouldn't affect the `bump_stamp` + # because reactions are not part of the `DEFAULT_BUMP_EVENT_TYPES` + self.helper.send_event( + room_id1, + type=EventTypes.Reaction, + content={ + "m.relates_to": { + "event_id": message_response["event_id"], + "key": "👍", + "rel_type": "m.annotation", + } + }, + tok=user1_tok, + ) + + # Make the Sliding Sync request + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": 100, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Make sure it has the foo-list we requested + self.assertListEqual( + list(response_body["lists"].keys()), + ["foo-list"], + response_body["lists"].keys(), + ) + + # Make sure the list includes the rooms in the right order + self.assertListEqual( + list(response_body["lists"]["foo-list"]["ops"]), + [ + { + "op": "SYNC", + "range": [0, 1], + # room1 sorts before room2 because it has the latest event (the + # reaction) + "room_ids": [room_id1, room_id2], + } + ], + response_body["lists"]["foo-list"], + ) + + # The `bump_stamp` for room1 should point at the latest message (not the + # reaction since it's not one of the `DEFAULT_BUMP_EVENT_TYPES`) + self.assertEqual( + response_body["rooms"][room_id1]["bump_stamp"], + event_pos1.stream, + response_body["rooms"][room_id1], + ) + + # The `bump_stamp` for room2 should point at the latest message + self.assertEqual( + response_body["rooms"][room_id2]["bump_stamp"], + event_pos2.stream, + response_body["rooms"][room_id2], + ) + + def test_rooms_bump_stamp_backfill(self) -> None: + """ + Test that `bump_stamp` ignores backfilled events, i.e. events with a + negative stream ordering. + """ + + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + # Create a remote room + creator = "@user:other" + room_id = "!foo:other" + shared_kwargs = { + "room_id": room_id, + "room_version": "10", + } + + create_tuple = self.get_success( + create_event( + self.hs, + prev_event_ids=[], + type=EventTypes.Create, + state_key="", + sender=creator, + **shared_kwargs, + ) + ) + creator_tuple = self.get_success( + create_event( + self.hs, + prev_event_ids=[create_tuple[0].event_id], + auth_event_ids=[create_tuple[0].event_id], + type=EventTypes.Member, + state_key=creator, + content={"membership": Membership.JOIN}, + sender=creator, + **shared_kwargs, + ) + ) + # We add a message event as a valid "bump type" + msg_tuple = self.get_success( + create_event( + self.hs, + prev_event_ids=[creator_tuple[0].event_id], + auth_event_ids=[create_tuple[0].event_id], + type=EventTypes.Message, + content={"body": "foo", "msgtype": "m.text"}, + sender=creator, + **shared_kwargs, + ) + ) + invite_tuple = self.get_success( + create_event( + self.hs, + prev_event_ids=[msg_tuple[0].event_id], + auth_event_ids=[create_tuple[0].event_id, creator_tuple[0].event_id], + type=EventTypes.Member, + state_key=user1_id, + content={"membership": Membership.INVITE}, + sender=creator, + **shared_kwargs, + ) + ) + + remote_events_and_contexts = [ + create_tuple, + creator_tuple, + msg_tuple, + invite_tuple, + ] + + # Ensure the local HS knows the room version + self.get_success( + self.store.store_room(room_id, creator, False, RoomVersions.V10) + ) + + # Persist these events as backfilled events. + persistence = self.hs.get_storage_controllers().persistence + assert persistence is not None + + for event, context in remote_events_and_contexts: + self.get_success(persistence.persist_event(event, context, backfilled=True)) + + # Now we join the local user to the room + join_tuple = self.get_success( + create_event( + self.hs, + prev_event_ids=[invite_tuple[0].event_id], + auth_event_ids=[create_tuple[0].event_id, invite_tuple[0].event_id], + type=EventTypes.Member, + state_key=user1_id, + content={"membership": Membership.JOIN}, + sender=user1_id, + **shared_kwargs, + ) + ) + self.get_success(persistence.persist_event(*join_tuple)) + + # Doing an SS request should return a positive `bump_stamp`, even though + # the only event that matches the bump types has as negative stream + # ordering. + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": 5, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + self.assertGreater(response_body["rooms"][room_id]["bump_stamp"], 0) diff --git a/tests/rest/client/sliding_sync/test_rooms_required_state.py b/tests/rest/client/sliding_sync/test_rooms_required_state.py new file mode 100644 index 000000000000..a13cad223f4f --- /dev/null +++ b/tests/rest/client/sliding_sync/test_rooms_required_state.py @@ -0,0 +1,707 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +import logging + +from parameterized import parameterized + +from twisted.test.proto_helpers import MemoryReactor + +import synapse.rest.admin +from synapse.api.constants import EventTypes, Membership +from synapse.handlers.sliding_sync import StateValues +from synapse.rest.client import login, room, sync +from synapse.server import HomeServer +from synapse.util import Clock + +from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase +from tests.test_utils.event_injection import mark_event_as_partial_state + +logger = logging.getLogger(__name__) + + +class SlidingSyncRoomsRequiredStateTestCase(SlidingSyncBase): + """ + Test `rooms.required_state` in the Sliding Sync API. + """ + + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + room.register_servlets, + sync.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + self.storage_controllers = hs.get_storage_controllers() + + def test_rooms_no_required_state(self) -> None: + """ + Empty `rooms.required_state` should not return any state events in the room + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + + # Make the Sliding Sync request + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + # Empty `required_state` + "required_state": [], + "timeline_limit": 0, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # No `required_state` in response + self.assertIsNone( + response_body["rooms"][room_id1].get("required_state"), + response_body["rooms"][room_id1], + ) + + def test_rooms_required_state_initial_sync(self) -> None: + """ + Test `rooms.required_state` returns requested state events in the room during an + initial sync. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + + # Make the Sliding Sync request + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Create, ""], + [EventTypes.RoomHistoryVisibility, ""], + # This one doesn't exist in the room + [EventTypes.Tombstone, ""], + ], + "timeline_limit": 0, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + state_map = self.get_success( + self.storage_controllers.state.get_current_state(room_id1) + ) + + self._assertRequiredStateIncludes( + response_body["rooms"][room_id1]["required_state"], + { + state_map[(EventTypes.Create, "")], + state_map[(EventTypes.RoomHistoryVisibility, "")], + }, + exact=True, + ) + self.assertIsNone(response_body["rooms"][room_id1].get("invite_state")) + + def test_rooms_required_state_incremental_sync(self) -> None: + """ + Test `rooms.required_state` returns requested state events in the room during an + incremental sync. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Create, ""], + [EventTypes.RoomHistoryVisibility, ""], + # This one doesn't exist in the room + [EventTypes.Tombstone, ""], + ], + "timeline_limit": 1, + } + } + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Send a message so the room comes down sync. + self.helper.send(room_id1, "msg", tok=user1_tok) + + # Make the incremental Sliding Sync request + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + # We only return updates but only if we've sent the room down the + # connection before. + self.assertIsNone(response_body["rooms"][room_id1].get("required_state")) + self.assertIsNone(response_body["rooms"][room_id1].get("invite_state")) + + def test_rooms_incremental_sync_restart(self) -> None: + """ + Test that after a restart (and so the in memory caches are reset) that + we correctly return an `M_UNKNOWN_POS` + """ + + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Create, ""], + [EventTypes.RoomHistoryVisibility, ""], + # This one doesn't exist in the room + [EventTypes.Tombstone, ""], + ], + "timeline_limit": 1, + } + } + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Reset the in-memory cache + self.hs.get_sliding_sync_handler().connection_store._connections.clear() + + # Make the Sliding Sync request + channel = self.make_request( + method="POST", + path=self.sync_endpoint + f"?pos={from_token}", + content=sync_body, + access_token=user1_tok, + ) + self.assertEqual(channel.code, 400, channel.json_body) + self.assertEqual( + channel.json_body["errcode"], "M_UNKNOWN_POS", channel.json_body + ) + + def test_rooms_required_state_wildcard(self) -> None: + """ + Test `rooms.required_state` returns all state events when using wildcard `["*", "*"]`. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + + self.helper.send_state( + room_id1, + event_type="org.matrix.foo_state", + state_key="", + body={"foo": "bar"}, + tok=user2_tok, + ) + self.helper.send_state( + room_id1, + event_type="org.matrix.foo_state", + state_key="namespaced", + body={"foo": "bar"}, + tok=user2_tok, + ) + + # Make the Sliding Sync request with wildcards for the `event_type` and `state_key` + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [StateValues.WILDCARD, StateValues.WILDCARD], + ], + "timeline_limit": 0, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + state_map = self.get_success( + self.storage_controllers.state.get_current_state(room_id1) + ) + + self._assertRequiredStateIncludes( + response_body["rooms"][room_id1]["required_state"], + # We should see all the state events in the room + state_map.values(), + exact=True, + ) + self.assertIsNone(response_body["rooms"][room_id1].get("invite_state")) + + def test_rooms_required_state_wildcard_event_type(self) -> None: + """ + Test `rooms.required_state` returns relevant state events when using wildcard in + the event_type `["*", "foobarbaz"]`. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + + self.helper.send_state( + room_id1, + event_type="org.matrix.foo_state", + state_key="", + body={"foo": "bar"}, + tok=user2_tok, + ) + self.helper.send_state( + room_id1, + event_type="org.matrix.foo_state", + state_key=user2_id, + body={"foo": "bar"}, + tok=user2_tok, + ) + + # Make the Sliding Sync request with wildcards for the `event_type` + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [StateValues.WILDCARD, user2_id], + ], + "timeline_limit": 0, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + state_map = self.get_success( + self.storage_controllers.state.get_current_state(room_id1) + ) + + # We expect at-least any state event with the `user2_id` as the `state_key` + self._assertRequiredStateIncludes( + response_body["rooms"][room_id1]["required_state"], + { + state_map[(EventTypes.Member, user2_id)], + state_map[("org.matrix.foo_state", user2_id)], + }, + # Ideally, this would be exact but we're currently returning all state + # events when the `event_type` is a wildcard. + exact=False, + ) + self.assertIsNone(response_body["rooms"][room_id1].get("invite_state")) + + def test_rooms_required_state_wildcard_state_key(self) -> None: + """ + Test `rooms.required_state` returns relevant state events when using wildcard in + the state_key `["foobarbaz","*"]`. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + + # Make the Sliding Sync request with wildcards for the `state_key` + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Member, StateValues.WILDCARD], + ], + "timeline_limit": 0, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + state_map = self.get_success( + self.storage_controllers.state.get_current_state(room_id1) + ) + + self._assertRequiredStateIncludes( + response_body["rooms"][room_id1]["required_state"], + { + state_map[(EventTypes.Member, user1_id)], + state_map[(EventTypes.Member, user2_id)], + }, + exact=True, + ) + self.assertIsNone(response_body["rooms"][room_id1].get("invite_state")) + + def test_rooms_required_state_lazy_loading_room_members(self) -> None: + """ + Test `rooms.required_state` returns people relevant to the timeline when + lazy-loading room members, `["m.room.member","$LAZY"]`. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + user3_id = self.register_user("user3", "pass") + user3_tok = self.login(user3_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + self.helper.join(room_id1, user3_id, tok=user3_tok) + + self.helper.send(room_id1, "1", tok=user2_tok) + self.helper.send(room_id1, "2", tok=user3_tok) + self.helper.send(room_id1, "3", tok=user2_tok) + + # Make the Sliding Sync request with lazy loading for the room members + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Create, ""], + [EventTypes.Member, StateValues.LAZY], + ], + "timeline_limit": 3, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + state_map = self.get_success( + self.storage_controllers.state.get_current_state(room_id1) + ) + + # Only user2 and user3 sent events in the 3 events we see in the `timeline` + self._assertRequiredStateIncludes( + response_body["rooms"][room_id1]["required_state"], + { + state_map[(EventTypes.Create, "")], + state_map[(EventTypes.Member, user2_id)], + state_map[(EventTypes.Member, user3_id)], + }, + exact=True, + ) + self.assertIsNone(response_body["rooms"][room_id1].get("invite_state")) + + def test_rooms_required_state_me(self) -> None: + """ + Test `rooms.required_state` correctly handles $ME. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + + self.helper.send(room_id1, "1", tok=user2_tok) + + # Also send normal state events with state keys of the users, first + # change the power levels to allow this. + self.helper.send_state( + room_id1, + event_type=EventTypes.PowerLevels, + body={"users": {user1_id: 50, user2_id: 100}}, + tok=user2_tok, + ) + self.helper.send_state( + room_id1, + event_type="org.matrix.foo", + state_key=user1_id, + body={}, + tok=user1_tok, + ) + self.helper.send_state( + room_id1, + event_type="org.matrix.foo", + state_key=user2_id, + body={}, + tok=user2_tok, + ) + + # Make the Sliding Sync request with a request for '$ME'. + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Create, ""], + [EventTypes.Member, StateValues.ME], + ["org.matrix.foo", StateValues.ME], + ], + "timeline_limit": 3, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + state_map = self.get_success( + self.storage_controllers.state.get_current_state(room_id1) + ) + + # Only user2 and user3 sent events in the 3 events we see in the `timeline` + self._assertRequiredStateIncludes( + response_body["rooms"][room_id1]["required_state"], + { + state_map[(EventTypes.Create, "")], + state_map[(EventTypes.Member, user1_id)], + state_map[("org.matrix.foo", user1_id)], + }, + exact=True, + ) + self.assertIsNone(response_body["rooms"][room_id1].get("invite_state")) + + @parameterized.expand([(Membership.LEAVE,), (Membership.BAN,)]) + def test_rooms_required_state_leave_ban(self, stop_membership: str) -> None: + """ + Test `rooms.required_state` should not return state past a leave/ban event. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + user3_id = self.register_user("user3", "pass") + user3_tok = self.login(user3_id, "pass") + + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Create, ""], + [EventTypes.Member, "*"], + ["org.matrix.foo_state", ""], + ], + "timeline_limit": 3, + } + } + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + self.helper.join(room_id1, user3_id, tok=user3_tok) + + self.helper.send_state( + room_id1, + event_type="org.matrix.foo_state", + state_key="", + body={"foo": "bar"}, + tok=user2_tok, + ) + + if stop_membership == Membership.LEAVE: + # User 1 leaves + self.helper.leave(room_id1, user1_id, tok=user1_tok) + elif stop_membership == Membership.BAN: + # User 1 is banned + self.helper.ban(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) + + state_map = self.get_success( + self.storage_controllers.state.get_current_state(room_id1) + ) + + # Change the state after user 1 leaves + self.helper.send_state( + room_id1, + event_type="org.matrix.foo_state", + state_key="", + body={"foo": "qux"}, + tok=user2_tok, + ) + self.helper.leave(room_id1, user3_id, tok=user3_tok) + + # Make the Sliding Sync request with lazy loading for the room members + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + # Only user2 and user3 sent events in the 3 events we see in the `timeline` + self._assertRequiredStateIncludes( + response_body["rooms"][room_id1]["required_state"], + { + state_map[(EventTypes.Create, "")], + state_map[(EventTypes.Member, user1_id)], + state_map[(EventTypes.Member, user2_id)], + state_map[(EventTypes.Member, user3_id)], + state_map[("org.matrix.foo_state", "")], + }, + exact=True, + ) + self.assertIsNone(response_body["rooms"][room_id1].get("invite_state")) + + def test_rooms_required_state_combine_superset(self) -> None: + """ + Test `rooms.required_state` is combined across lists and room subscriptions. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + + self.helper.send_state( + room_id1, + event_type="org.matrix.foo_state", + state_key="", + body={"foo": "bar"}, + tok=user2_tok, + ) + self.helper.send_state( + room_id1, + event_type="org.matrix.bar_state", + state_key="", + body={"bar": "qux"}, + tok=user2_tok, + ) + + # Make the Sliding Sync request with wildcards for the `state_key` + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Create, ""], + [EventTypes.Member, user1_id], + ], + "timeline_limit": 0, + }, + "bar-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Member, StateValues.WILDCARD], + ["org.matrix.foo_state", ""], + ], + "timeline_limit": 0, + }, + }, + "room_subscriptions": { + room_id1: { + "required_state": [["org.matrix.bar_state", ""]], + "timeline_limit": 0, + } + }, + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + state_map = self.get_success( + self.storage_controllers.state.get_current_state(room_id1) + ) + + self._assertRequiredStateIncludes( + response_body["rooms"][room_id1]["required_state"], + { + state_map[(EventTypes.Create, "")], + state_map[(EventTypes.Member, user1_id)], + state_map[(EventTypes.Member, user2_id)], + state_map[("org.matrix.foo_state", "")], + state_map[("org.matrix.bar_state", "")], + }, + exact=True, + ) + self.assertIsNone(response_body["rooms"][room_id1].get("invite_state")) + + def test_rooms_required_state_partial_state(self) -> None: + """ + Test partially-stated room are excluded unless `rooms.required_state` is + lazy-loading room members. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + room_id2 = self.helper.create_room_as(user2_id, tok=user2_tok) + _join_response1 = self.helper.join(room_id1, user1_id, tok=user1_tok) + join_response2 = self.helper.join(room_id2, user1_id, tok=user1_tok) + + # Mark room2 as partial state + self.get_success( + mark_event_as_partial_state(self.hs, join_response2["event_id"], room_id2) + ) + + # Make the Sliding Sync request (NOT lazy-loading room members) + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Create, ""], + ], + "timeline_limit": 0, + }, + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Make sure the list includes room1 but room2 is excluded because it's still + # partially-stated + self.assertListEqual( + list(response_body["lists"]["foo-list"]["ops"]), + [ + { + "op": "SYNC", + "range": [0, 1], + "room_ids": [room_id1], + } + ], + response_body["lists"]["foo-list"], + ) + + # Make the Sliding Sync request (with lazy-loading room members) + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [ + [EventTypes.Create, ""], + # Lazy-load room members + [EventTypes.Member, StateValues.LAZY], + ], + "timeline_limit": 0, + }, + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # The list should include both rooms now because we're lazy-loading room members + self.assertListEqual( + list(response_body["lists"]["foo-list"]["ops"]), + [ + { + "op": "SYNC", + "range": [0, 1], + "room_ids": [room_id2, room_id1], + } + ], + response_body["lists"]["foo-list"], + ) diff --git a/tests/rest/client/sliding_sync/test_rooms_timeline.py b/tests/rest/client/sliding_sync/test_rooms_timeline.py new file mode 100644 index 000000000000..2e9586ca733b --- /dev/null +++ b/tests/rest/client/sliding_sync/test_rooms_timeline.py @@ -0,0 +1,575 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +import logging +from typing import List, Optional + +from twisted.test.proto_helpers import MemoryReactor + +import synapse.rest.admin +from synapse.rest.client import login, room, sync +from synapse.server import HomeServer +from synapse.types import StreamToken, StrSequence +from synapse.util import Clock + +from tests.rest.client.sliding_sync.test_sliding_sync import SlidingSyncBase + +logger = logging.getLogger(__name__) + + +class SlidingSyncRoomsTimelineTestCase(SlidingSyncBase): + """ + Test `rooms.timeline` in the Sliding Sync API. + """ + + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + room.register_servlets, + sync.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + self.storage_controllers = hs.get_storage_controllers() + + def _assertListEqual( + self, + actual_items: StrSequence, + expected_items: StrSequence, + message: Optional[str] = None, + ) -> None: + """ + Like `self.assertListEqual(...)` but with an actually understandable diff message. + """ + + if actual_items == expected_items: + return + + expected_lines: List[str] = [] + for expected_item in expected_items: + is_expected_in_actual = expected_item in actual_items + expected_lines.append( + "{} {}".format(" " if is_expected_in_actual else "?", expected_item) + ) + + actual_lines: List[str] = [] + for actual_item in actual_items: + is_actual_in_expected = actual_item in expected_items + actual_lines.append( + "{} {}".format("+" if is_actual_in_expected else " ", actual_item) + ) + + newline = "\n" + expected_string = f"Expected items to be in actual ('?' = missing expected items):\n [\n{newline.join(expected_lines)}\n ]" + actual_string = f"Actual ('+' = found expected items):\n [\n{newline.join(actual_lines)}\n ]" + first_message = "Items must" + diff_message = f"{first_message}\n{expected_string}\n{actual_string}" + + self.fail(f"{diff_message}\n{message}") + + def _assertTimelineEqual( + self, + *, + room_id: str, + actual_event_ids: List[str], + expected_event_ids: List[str], + message: Optional[str] = None, + ) -> None: + """ + Like `self.assertListEqual(...)` for event IDs in a room but will give a nicer + output with context for what each event_id is (type, stream_ordering, content, + etc). + """ + if actual_event_ids == expected_event_ids: + return + + event_id_set = set(actual_event_ids + expected_event_ids) + events = self.get_success(self.store.get_events(event_id_set)) + + def event_id_to_string(event_id: str) -> str: + event = events.get(event_id) + if event: + state_key = event.get_state_key() + state_key_piece = f", {state_key}" if state_key is not None else "" + return ( + f"({event.internal_metadata.stream_ordering: >2}, {event.internal_metadata.instance_name}) " + + f"{event.event_id} ({event.type}{state_key_piece}) {event.content.get('membership', '')}{event.content.get('body', '')}" + ) + + return f"{event_id} " + + self._assertListEqual( + actual_items=[ + event_id_to_string(event_id) for event_id in actual_event_ids + ], + expected_items=[ + event_id_to_string(event_id) for event_id in expected_event_ids + ], + message=message, + ) + + def test_rooms_limited_initial_sync(self) -> None: + """ + Test that we mark `rooms` as `limited=True` when we saturate the `timeline_limit` + on initial sync. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.send(room_id1, "activity1", tok=user2_tok) + self.helper.send(room_id1, "activity2", tok=user2_tok) + event_response3 = self.helper.send(room_id1, "activity3", tok=user2_tok) + event_pos3 = self.get_success( + self.store.get_position_for_event(event_response3["event_id"]) + ) + event_response4 = self.helper.send(room_id1, "activity4", tok=user2_tok) + event_pos4 = self.get_success( + self.store.get_position_for_event(event_response4["event_id"]) + ) + event_response5 = self.helper.send(room_id1, "activity5", tok=user2_tok) + user1_join_response = self.helper.join(room_id1, user1_id, tok=user1_tok) + + # Make the Sliding Sync request + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": 3, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # We expect to saturate the `timeline_limit` (there are more than 3 messages in the room) + self.assertEqual( + response_body["rooms"][room_id1]["limited"], + True, + response_body["rooms"][room_id1], + ) + # Check to make sure the latest events are returned + self._assertTimelineEqual( + room_id=room_id1, + actual_event_ids=[ + event["event_id"] + for event in response_body["rooms"][room_id1]["timeline"] + ], + expected_event_ids=[ + event_response4["event_id"], + event_response5["event_id"], + user1_join_response["event_id"], + ], + message=str(response_body["rooms"][room_id1]["timeline"]), + ) + + # Check to make sure the `prev_batch` points at the right place + prev_batch_token = self.get_success( + StreamToken.from_string( + self.store, response_body["rooms"][room_id1]["prev_batch"] + ) + ) + prev_batch_room_stream_token_serialized = self.get_success( + prev_batch_token.room_key.to_string(self.store) + ) + # If we use the `prev_batch` token to look backwards, we should see `event3` + # next so make sure the token encompasses it + self.assertEqual( + event_pos3.persisted_after(prev_batch_token.room_key), + False, + f"`prev_batch` token {prev_batch_room_stream_token_serialized} should be >= event_pos3={self.get_success(event_pos3.to_room_stream_token().to_string(self.store))}", + ) + # If we use the `prev_batch` token to look backwards, we shouldn't see `event4` + # anymore since it was just returned in this response. + self.assertEqual( + event_pos4.persisted_after(prev_batch_token.room_key), + True, + f"`prev_batch` token {prev_batch_room_stream_token_serialized} should be < event_pos4={self.get_success(event_pos4.to_room_stream_token().to_string(self.store))}", + ) + + # With no `from_token` (initial sync), it's all historical since there is no + # "live" range + self.assertEqual( + response_body["rooms"][room_id1]["num_live"], + 0, + response_body["rooms"][room_id1], + ) + + def test_rooms_not_limited_initial_sync(self) -> None: + """ + Test that we mark `rooms` as `limited=False` when there are no more events to + paginate to. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.send(room_id1, "activity1", tok=user2_tok) + self.helper.send(room_id1, "activity2", tok=user2_tok) + self.helper.send(room_id1, "activity3", tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + + # Make the Sliding Sync request + timeline_limit = 100 + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": timeline_limit, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # The timeline should be `limited=False` because we have all of the events (no + # more to paginate to) + self.assertEqual( + response_body["rooms"][room_id1]["limited"], + False, + response_body["rooms"][room_id1], + ) + expected_number_of_events = 9 + # We're just looking to make sure we got all of the events before hitting the `timeline_limit` + self.assertEqual( + len(response_body["rooms"][room_id1]["timeline"]), + expected_number_of_events, + response_body["rooms"][room_id1]["timeline"], + ) + self.assertLessEqual(expected_number_of_events, timeline_limit) + + # With no `from_token` (initial sync), it's all historical since there is no + # "live" token range. + self.assertEqual( + response_body["rooms"][room_id1]["num_live"], + 0, + response_body["rooms"][room_id1], + ) + + def test_rooms_incremental_sync(self) -> None: + """ + Test `rooms` data during an incremental sync after an initial sync. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + self.helper.send(room_id1, "activity before initial sync1", tok=user2_tok) + + # Make an initial Sliding Sync request to grab a token. This is also a sanity + # check that we can go from initial to incremental sync. + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": 3, + } + } + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Send some events but don't send enough to saturate the `timeline_limit`. + # We want to later test that we only get the new events since the `next_pos` + event_response2 = self.helper.send(room_id1, "activity after2", tok=user2_tok) + event_response3 = self.helper.send(room_id1, "activity after3", tok=user2_tok) + + # Make an incremental Sliding Sync request (what we're trying to test) + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + # We only expect to see the new events since the last sync which isn't enough to + # fill up the `timeline_limit`. + self.assertEqual( + response_body["rooms"][room_id1]["limited"], + False, + f'Our `timeline_limit` was {sync_body["lists"]["foo-list"]["timeline_limit"]} ' + + f'and {len(response_body["rooms"][room_id1]["timeline"])} events were returned in the timeline. ' + + str(response_body["rooms"][room_id1]), + ) + # Check to make sure the latest events are returned + self._assertTimelineEqual( + room_id=room_id1, + actual_event_ids=[ + event["event_id"] + for event in response_body["rooms"][room_id1]["timeline"] + ], + expected_event_ids=[ + event_response2["event_id"], + event_response3["event_id"], + ], + message=str(response_body["rooms"][room_id1]["timeline"]), + ) + + # All events are "live" + self.assertEqual( + response_body["rooms"][room_id1]["num_live"], + 2, + response_body["rooms"][room_id1], + ) + + def test_rooms_newly_joined_incremental_sync(self) -> None: + """ + Test that when we make an incremental sync with a `newly_joined` `rooms`, we are + able to see some historical events before the `from_token`. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.send(room_id1, "activity before token1", tok=user2_tok) + event_response2 = self.helper.send( + room_id1, "activity before token2", tok=user2_tok + ) + + # The `timeline_limit` is set to 4 so we can at least see one historical event + # before the `from_token`. We should see historical events because this is a + # `newly_joined` room. + timeline_limit = 4 + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": timeline_limit, + } + } + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Join the room after the `from_token` which will make us consider this room as + # `newly_joined`. + user1_join_response = self.helper.join(room_id1, user1_id, tok=user1_tok) + + # Send some events but don't send enough to saturate the `timeline_limit`. + # We want to later test that we only get the new events since the `next_pos` + event_response3 = self.helper.send( + room_id1, "activity after token3", tok=user2_tok + ) + event_response4 = self.helper.send( + room_id1, "activity after token4", tok=user2_tok + ) + + # Make an incremental Sliding Sync request (what we're trying to test) + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + # We should see the new events and the rest should be filled with historical + # events which will make us `limited=True` since there are more to paginate to. + self.assertEqual( + response_body["rooms"][room_id1]["limited"], + True, + f"Our `timeline_limit` was {timeline_limit} " + + f'and {len(response_body["rooms"][room_id1]["timeline"])} events were returned in the timeline. ' + + str(response_body["rooms"][room_id1]), + ) + # Check to make sure that the "live" and historical events are returned + self._assertTimelineEqual( + room_id=room_id1, + actual_event_ids=[ + event["event_id"] + for event in response_body["rooms"][room_id1]["timeline"] + ], + expected_event_ids=[ + event_response2["event_id"], + user1_join_response["event_id"], + event_response3["event_id"], + event_response4["event_id"], + ], + message=str(response_body["rooms"][room_id1]["timeline"]), + ) + + # Only events after the `from_token` are "live" (join, event3, event4) + self.assertEqual( + response_body["rooms"][room_id1]["num_live"], + 3, + response_body["rooms"][room_id1], + ) + + def test_rooms_ban_initial_sync(self) -> None: + """ + Test that `rooms` we are banned from in an intial sync only allows us to see + timeline events up to the ban event. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.send(room_id1, "activity before1", tok=user2_tok) + self.helper.send(room_id1, "activity before2", tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + + event_response3 = self.helper.send(room_id1, "activity after3", tok=user2_tok) + event_response4 = self.helper.send(room_id1, "activity after4", tok=user2_tok) + user1_ban_response = self.helper.ban( + room_id1, src=user2_id, targ=user1_id, tok=user2_tok + ) + + self.helper.send(room_id1, "activity after5", tok=user2_tok) + self.helper.send(room_id1, "activity after6", tok=user2_tok) + + # Make the Sliding Sync request + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": 3, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # We should see events before the ban but not after + self._assertTimelineEqual( + room_id=room_id1, + actual_event_ids=[ + event["event_id"] + for event in response_body["rooms"][room_id1]["timeline"] + ], + expected_event_ids=[ + event_response3["event_id"], + event_response4["event_id"], + user1_ban_response["event_id"], + ], + message=str(response_body["rooms"][room_id1]["timeline"]), + ) + # No "live" events in an initial sync (no `from_token` to define the "live" + # range) + self.assertEqual( + response_body["rooms"][room_id1]["num_live"], + 0, + response_body["rooms"][room_id1], + ) + # There are more events to paginate to + self.assertEqual( + response_body["rooms"][room_id1]["limited"], + True, + response_body["rooms"][room_id1], + ) + + def test_rooms_ban_incremental_sync1(self) -> None: + """ + Test that `rooms` we are banned from during the next incremental sync only + allows us to see timeline events up to the ban event. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.send(room_id1, "activity before1", tok=user2_tok) + self.helper.send(room_id1, "activity before2", tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": 4, + } + } + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + event_response3 = self.helper.send(room_id1, "activity after3", tok=user2_tok) + event_response4 = self.helper.send(room_id1, "activity after4", tok=user2_tok) + # The ban is within the token range (between the `from_token` and the sliding + # sync request) + user1_ban_response = self.helper.ban( + room_id1, src=user2_id, targ=user1_id, tok=user2_tok + ) + + self.helper.send(room_id1, "activity after5", tok=user2_tok) + self.helper.send(room_id1, "activity after6", tok=user2_tok) + + # Make the incremental Sliding Sync request + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + # We should see events before the ban but not after + self._assertTimelineEqual( + room_id=room_id1, + actual_event_ids=[ + event["event_id"] + for event in response_body["rooms"][room_id1]["timeline"] + ], + expected_event_ids=[ + event_response3["event_id"], + event_response4["event_id"], + user1_ban_response["event_id"], + ], + message=str(response_body["rooms"][room_id1]["timeline"]), + ) + # All live events in the incremental sync + self.assertEqual( + response_body["rooms"][room_id1]["num_live"], + 3, + response_body["rooms"][room_id1], + ) + # There aren't anymore events to paginate to in this range + self.assertEqual( + response_body["rooms"][room_id1]["limited"], + False, + response_body["rooms"][room_id1], + ) + + def test_rooms_ban_incremental_sync2(self) -> None: + """ + Test that `rooms` we are banned from before the incremental sync don't return + any events in the timeline. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.send(room_id1, "activity before1", tok=user2_tok) + self.helper.join(room_id1, user1_id, tok=user1_tok) + + self.helper.send(room_id1, "activity after2", tok=user2_tok) + # The ban is before we get our `from_token` + self.helper.ban(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) + + self.helper.send(room_id1, "activity after3", tok=user2_tok) + + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": 4, + } + } + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + self.helper.send(room_id1, "activity after4", tok=user2_tok) + + # Make the incremental Sliding Sync request + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + # Nothing to see for this banned user in the room in the token range + self.assertIsNone(response_body["rooms"].get(room_id1)) diff --git a/tests/rest/client/sliding_sync/test_sliding_sync.py b/tests/rest/client/sliding_sync/test_sliding_sync.py new file mode 100644 index 000000000000..cb7638c5ba63 --- /dev/null +++ b/tests/rest/client/sliding_sync/test_sliding_sync.py @@ -0,0 +1,974 @@ +# +# This file is licensed under the Affero General Public License (AGPL) version 3. +# +# Copyright (C) 2024 New Vector, Ltd +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# See the GNU Affero General Public License for more details: +# . +# +import logging +from typing import Any, Dict, Iterable, List, Literal, Optional, Tuple + +from typing_extensions import assert_never + +from twisted.test.proto_helpers import MemoryReactor + +import synapse.rest.admin +from synapse.api.constants import ( + AccountDataTypes, + EventContentFields, + EventTypes, + RoomTypes, +) +from synapse.events import EventBase +from synapse.rest.client import devices, login, receipts, room, sync +from synapse.server import HomeServer +from synapse.types import ( + JsonDict, + RoomStreamToken, + SlidingSyncStreamToken, + StreamKeyType, + StreamToken, +) +from synapse.util import Clock +from synapse.util.stringutils import random_string + +from tests import unittest +from tests.server import TimedOutException + +logger = logging.getLogger(__name__) + + +class SlidingSyncBase(unittest.HomeserverTestCase): + """Base class for sliding sync test cases""" + + sync_endpoint = "/_matrix/client/unstable/org.matrix.simplified_msc3575/sync" + + def default_config(self) -> JsonDict: + config = super().default_config() + # Enable sliding sync + config["experimental_features"] = {"msc3575_enabled": True} + return config + + def do_sync( + self, sync_body: JsonDict, *, since: Optional[str] = None, tok: str + ) -> Tuple[JsonDict, str]: + """Do a sliding sync request with given body. + + Asserts the request was successful. + + Attributes: + sync_body: The full request body to use + since: Optional since token + tok: Access token to use + + Returns: + A tuple of the response body and the `pos` field. + """ + + sync_path = self.sync_endpoint + if since: + sync_path += f"?pos={since}" + + channel = self.make_request( + method="POST", + path=sync_path, + content=sync_body, + access_token=tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + + return channel.json_body, channel.json_body["pos"] + + def _assertRequiredStateIncludes( + self, + actual_required_state: Any, + expected_state_events: Iterable[EventBase], + exact: bool = False, + ) -> None: + """ + Wrapper around `assertIncludes` to give slightly better looking diff error + messages that include some context "$event_id (type, state_key)". + + Args: + actual_required_state: The "required_state" of a room from a Sliding Sync + request response. + expected_state_events: The expected state events to be included in the + `actual_required_state`. + exact: Whether the actual state should be exactly equal to the expected + state (no extras). + """ + + assert isinstance(actual_required_state, list) + for event in actual_required_state: + assert isinstance(event, dict) + + self.assertIncludes( + { + f'{event["event_id"]} ("{event["type"]}", "{event["state_key"]}")' + for event in actual_required_state + }, + { + f'{event.event_id} ("{event.type}", "{event.state_key}")' + for event in expected_state_events + }, + exact=exact, + # Message to help understand the diff in context + message=str(actual_required_state), + ) + + def _bump_notifier_wait_for_events( + self, + user_id: str, + wake_stream_key: Literal[ + StreamKeyType.ACCOUNT_DATA, + StreamKeyType.PRESENCE, + ], + ) -> None: + """ + Wake-up a `notifier.wait_for_events(user_id)` call without affecting the Sliding + Sync results. + + Args: + user_id: The user ID to wake up the notifier for + wake_stream_key: The stream key to wake up. This will create an actual new + entity in that stream so it's best to choose one that won't affect the + Sliding Sync results you're testing for. In other words, if your testing + account data, choose `StreamKeyType.PRESENCE` instead. We support two + possible stream keys because you're probably testing one or the other so + one is always a "safe" option. + """ + # We're expecting some new activity from this point onwards + from_token = self.hs.get_event_sources().get_current_token() + + triggered_notifier_wait_for_events = False + + async def _on_new_acivity( + before_token: StreamToken, after_token: StreamToken + ) -> bool: + nonlocal triggered_notifier_wait_for_events + triggered_notifier_wait_for_events = True + return True + + notifier = self.hs.get_notifier() + + # Listen for some new activity for the user. We're just trying to confirm that + # our bump below actually does what we think it does (triggers new activity for + # the user). + result_awaitable = notifier.wait_for_events( + user_id, + 1000, + _on_new_acivity, + from_token=from_token, + ) + + # Update the account data or presence so that `notifier.wait_for_events(...)` + # wakes up. We chose these two options because they're least likely to show up + # in the Sliding Sync response so it won't affect whether we have results. + if wake_stream_key == StreamKeyType.ACCOUNT_DATA: + self.get_success( + self.hs.get_account_data_handler().add_account_data_for_user( + user_id, + "org.matrix.foobarbaz", + {"foo": "bar"}, + ) + ) + elif wake_stream_key == StreamKeyType.PRESENCE: + sending_user_id = self.register_user( + "user_bump_notifier_wait_for_events_" + random_string(10), "pass" + ) + sending_user_tok = self.login(sending_user_id, "pass") + test_msg = {"foo": "bar"} + chan = self.make_request( + "PUT", + "/_matrix/client/r0/sendToDevice/m.test/1234", + content={"messages": {user_id: {"d1": test_msg}}}, + access_token=sending_user_tok, + ) + self.assertEqual(chan.code, 200, chan.result) + else: + assert_never(wake_stream_key) + + # Wait for our notifier result + self.get_success(result_awaitable) + + if not triggered_notifier_wait_for_events: + raise AssertionError( + "Expected `notifier.wait_for_events(...)` to be triggered" + ) + + +class SlidingSyncTestCase(SlidingSyncBase): + """ + Tests regarding MSC3575 Sliding Sync `/sync` endpoint. + + Please put tests in more specific test files if applicable. This test class is meant + for generic behavior of the endpoint. + """ + + servlets = [ + synapse.rest.admin.register_servlets, + login.register_servlets, + room.register_servlets, + sync.register_servlets, + devices.register_servlets, + receipts.register_servlets, + ] + + def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: + self.store = hs.get_datastores().main + self.event_sources = hs.get_event_sources() + self.storage_controllers = hs.get_storage_controllers() + self.account_data_handler = hs.get_account_data_handler() + + def _add_new_dm_to_global_account_data( + self, source_user_id: str, target_user_id: str, target_room_id: str + ) -> None: + """ + Helper to handle inserting a new DM for the source user into global account data + (handles all of the list merging). + + Args: + source_user_id: The user ID of the DM mapping we're going to update + target_user_id: User ID of the person the DM is with + target_room_id: Room ID of the DM + """ + + # Get the current DM map + existing_dm_map = self.get_success( + self.store.get_global_account_data_by_type_for_user( + source_user_id, AccountDataTypes.DIRECT + ) + ) + # Scrutinize the account data since it has no concrete type. We're just copying + # everything into a known type. It should be a mapping from user ID to a list of + # room IDs. Ignore anything else. + new_dm_map: Dict[str, List[str]] = {} + if isinstance(existing_dm_map, dict): + for user_id, room_ids in existing_dm_map.items(): + if isinstance(user_id, str) and isinstance(room_ids, list): + for room_id in room_ids: + if isinstance(room_id, str): + new_dm_map[user_id] = new_dm_map.get(user_id, []) + [ + room_id + ] + + # Add the new DM to the map + new_dm_map[target_user_id] = new_dm_map.get(target_user_id, []) + [ + target_room_id + ] + # Save the DM map to global account data + self.get_success( + self.store.add_account_data_for_user( + source_user_id, + AccountDataTypes.DIRECT, + new_dm_map, + ) + ) + + def _create_dm_room( + self, + inviter_user_id: str, + inviter_tok: str, + invitee_user_id: str, + invitee_tok: str, + should_join_room: bool = True, + ) -> str: + """ + Helper to create a DM room as the "inviter" and invite the "invitee" user to the + room. The "invitee" user also will join the room. The `m.direct` account data + will be set for both users. + """ + + # Create a room and send an invite the other user + room_id = self.helper.create_room_as( + inviter_user_id, + is_public=False, + tok=inviter_tok, + ) + self.helper.invite( + room_id, + src=inviter_user_id, + targ=invitee_user_id, + tok=inviter_tok, + extra_data={"is_direct": True}, + ) + if should_join_room: + # Person that was invited joins the room + self.helper.join(room_id, invitee_user_id, tok=invitee_tok) + + # Mimic the client setting the room as a direct message in the global account + # data for both users. + self._add_new_dm_to_global_account_data( + invitee_user_id, inviter_user_id, room_id + ) + self._add_new_dm_to_global_account_data( + inviter_user_id, invitee_user_id, room_id + ) + + return room_id + + def test_sync_list(self) -> None: + """ + Test that room IDs show up in the Sliding Sync `lists` + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + room_id = self.helper.create_room_as(user1_id, tok=user1_tok, is_public=True) + + # Make the Sliding Sync request + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 99]], + "required_state": [], + "timeline_limit": 1, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Make sure it has the foo-list we requested + self.assertListEqual( + list(response_body["lists"].keys()), + ["foo-list"], + response_body["lists"].keys(), + ) + + # Make sure the list includes the room we are joined to + self.assertListEqual( + list(response_body["lists"]["foo-list"]["ops"]), + [ + { + "op": "SYNC", + "range": [0, 99], + "room_ids": [room_id], + } + ], + response_body["lists"]["foo-list"], + ) + + def test_wait_for_sync_token(self) -> None: + """ + Test that worker will wait until it catches up to the given token + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + # Create a future token that will cause us to wait. Since we never send a new + # event to reach that future stream_ordering, the worker will wait until the + # full timeout. + stream_id_gen = self.store.get_events_stream_id_generator() + stream_id = self.get_success(stream_id_gen.get_next().__aenter__()) + current_token = self.event_sources.get_current_token() + future_position_token = current_token.copy_and_replace( + StreamKeyType.ROOM, + RoomStreamToken(stream=stream_id), + ) + + future_position_token_serialized = self.get_success( + SlidingSyncStreamToken(future_position_token, 0).to_string(self.store) + ) + + # Make the Sliding Sync request + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 99]], + "required_state": [], + "timeline_limit": 1, + } + } + } + channel = self.make_request( + "POST", + self.sync_endpoint + f"?pos={future_position_token_serialized}", + content=sync_body, + access_token=user1_tok, + await_result=False, + ) + # Block for 10 seconds to make `notifier.wait_for_stream_token(from_token)` + # timeout + with self.assertRaises(TimedOutException): + channel.await_result(timeout_ms=9900) + channel.await_result(timeout_ms=200) + self.assertEqual(channel.code, 200, channel.json_body) + + # We expect the next `pos` in the result to be the same as what we requested + # with because we weren't able to find anything new yet. + self.assertEqual(channel.json_body["pos"], future_position_token_serialized) + + def test_wait_for_new_data(self) -> None: + """ + Test to make sure that the Sliding Sync request waits for new data to arrive. + + (Only applies to incremental syncs with a `timeout` specified) + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id, user1_id, tok=user1_tok) + + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 0]], + "required_state": [], + "timeline_limit": 1, + } + } + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Make the Sliding Sync request + channel = self.make_request( + "POST", + self.sync_endpoint + f"?timeout=10000&pos={from_token}", + content=sync_body, + access_token=user1_tok, + await_result=False, + ) + # Block for 5 seconds to make sure we are `notifier.wait_for_events(...)` + with self.assertRaises(TimedOutException): + channel.await_result(timeout_ms=5000) + # Bump the room with new events to trigger new results + event_response1 = self.helper.send( + room_id, "new activity in room", tok=user1_tok + ) + # Should respond before the 10 second timeout + channel.await_result(timeout_ms=3000) + self.assertEqual(channel.code, 200, channel.json_body) + + # Check to make sure the new event is returned + self.assertEqual( + [ + event["event_id"] + for event in channel.json_body["rooms"][room_id]["timeline"] + ], + [ + event_response1["event_id"], + ], + channel.json_body["rooms"][room_id]["timeline"], + ) + + def test_wait_for_new_data_timeout(self) -> None: + """ + Test to make sure that the Sliding Sync request waits for new data to arrive but + no data ever arrives so we timeout. We're also making sure that the default data + doesn't trigger a false-positive for new data. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + room_id = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id, user1_id, tok=user1_tok) + + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 0]], + "required_state": [], + "timeline_limit": 1, + } + } + } + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Make the Sliding Sync request + channel = self.make_request( + "POST", + self.sync_endpoint + f"?timeout=10000&pos={from_token}", + content=sync_body, + access_token=user1_tok, + await_result=False, + ) + # Block for 5 seconds to make sure we are `notifier.wait_for_events(...)` + with self.assertRaises(TimedOutException): + channel.await_result(timeout_ms=5000) + # Wake-up `notifier.wait_for_events(...)` that will cause us test + # `SlidingSyncResult.__bool__` for new results. + self._bump_notifier_wait_for_events( + user1_id, wake_stream_key=StreamKeyType.ACCOUNT_DATA + ) + # Block for a little bit more to ensure we don't see any new results. + with self.assertRaises(TimedOutException): + channel.await_result(timeout_ms=4000) + # Wait for the sync to complete (wait for the rest of the 10 second timeout, + # 5000 + 4000 + 1200 > 10000) + channel.await_result(timeout_ms=1200) + self.assertEqual(channel.code, 200, channel.json_body) + + # There should be no room sent down. + self.assertFalse(channel.json_body["rooms"]) + + def test_filter_list(self) -> None: + """ + Test that filters apply to `lists` + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + # Create a DM room + joined_dm_room_id = self._create_dm_room( + inviter_user_id=user1_id, + inviter_tok=user1_tok, + invitee_user_id=user2_id, + invitee_tok=user2_tok, + should_join_room=True, + ) + invited_dm_room_id = self._create_dm_room( + inviter_user_id=user1_id, + inviter_tok=user1_tok, + invitee_user_id=user2_id, + invitee_tok=user2_tok, + should_join_room=False, + ) + + # Create a normal room + room_id = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.join(room_id, user1_id, tok=user1_tok) + + # Create a room that user1 is invited to + invite_room_id = self.helper.create_room_as(user2_id, tok=user2_tok) + self.helper.invite(invite_room_id, src=user2_id, targ=user1_id, tok=user2_tok) + + # Make the Sliding Sync request + sync_body = { + "lists": { + # Absense of filters does not imply "False" values + "all": { + "ranges": [[0, 99]], + "required_state": [], + "timeline_limit": 1, + "filters": {}, + }, + # Test single truthy filter + "dms": { + "ranges": [[0, 99]], + "required_state": [], + "timeline_limit": 1, + "filters": {"is_dm": True}, + }, + # Test single falsy filter + "non-dms": { + "ranges": [[0, 99]], + "required_state": [], + "timeline_limit": 1, + "filters": {"is_dm": False}, + }, + # Test how multiple filters should stack (AND'd together) + "room-invites": { + "ranges": [[0, 99]], + "required_state": [], + "timeline_limit": 1, + "filters": {"is_dm": False, "is_invite": True}, + }, + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Make sure it has the foo-list we requested + self.assertListEqual( + list(response_body["lists"].keys()), + ["all", "dms", "non-dms", "room-invites"], + response_body["lists"].keys(), + ) + + # Make sure the lists have the correct rooms + self.assertListEqual( + list(response_body["lists"]["all"]["ops"]), + [ + { + "op": "SYNC", + "range": [0, 99], + "room_ids": [ + invite_room_id, + room_id, + invited_dm_room_id, + joined_dm_room_id, + ], + } + ], + list(response_body["lists"]["all"]), + ) + self.assertListEqual( + list(response_body["lists"]["dms"]["ops"]), + [ + { + "op": "SYNC", + "range": [0, 99], + "room_ids": [invited_dm_room_id, joined_dm_room_id], + } + ], + list(response_body["lists"]["dms"]), + ) + self.assertListEqual( + list(response_body["lists"]["non-dms"]["ops"]), + [ + { + "op": "SYNC", + "range": [0, 99], + "room_ids": [invite_room_id, room_id], + } + ], + list(response_body["lists"]["non-dms"]), + ) + self.assertListEqual( + list(response_body["lists"]["room-invites"]["ops"]), + [ + { + "op": "SYNC", + "range": [0, 99], + "room_ids": [invite_room_id], + } + ], + list(response_body["lists"]["room-invites"]), + ) + + # Ensure DM's are correctly marked + self.assertDictEqual( + { + room_id: room.get("is_dm") + for room_id, room in response_body["rooms"].items() + }, + { + invite_room_id: None, + room_id: None, + invited_dm_room_id: True, + joined_dm_room_id: True, + }, + ) + + def test_filter_regardless_of_membership_server_left_room(self) -> None: + """ + Test that filters apply to rooms regardless of membership. We're also + compounding the problem by having all of the local users leave the room causing + our server to leave the room. + + We want to make sure that if someone is filtering rooms, and leaves, you still + get that final update down sync that you left. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + user2_id = self.register_user("user2", "pass") + user2_tok = self.login(user2_id, "pass") + + # Create a normal room + room_id = self.helper.create_room_as(user1_id, tok=user2_tok) + self.helper.join(room_id, user1_id, tok=user1_tok) + + # Create an encrypted space room + space_room_id = self.helper.create_room_as( + user2_id, + tok=user2_tok, + extra_content={ + "creation_content": {EventContentFields.ROOM_TYPE: RoomTypes.SPACE} + }, + ) + self.helper.send_state( + space_room_id, + EventTypes.RoomEncryption, + {EventContentFields.ENCRYPTION_ALGORITHM: "m.megolm.v1.aes-sha2"}, + tok=user2_tok, + ) + self.helper.join(space_room_id, user1_id, tok=user1_tok) + + # Make an initial Sliding Sync request + channel = self.make_request( + "POST", + self.sync_endpoint, + { + "lists": { + "all-list": { + "ranges": [[0, 99]], + "required_state": [], + "timeline_limit": 0, + "filters": {}, + }, + "foo-list": { + "ranges": [[0, 99]], + "required_state": [], + "timeline_limit": 1, + "filters": { + "is_encrypted": True, + "room_types": [RoomTypes.SPACE], + }, + }, + } + }, + access_token=user1_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + from_token = channel.json_body["pos"] + + # Make sure the response has the lists we requested + self.assertListEqual( + list(channel.json_body["lists"].keys()), + ["all-list", "foo-list"], + channel.json_body["lists"].keys(), + ) + + # Make sure the lists have the correct rooms + self.assertListEqual( + list(channel.json_body["lists"]["all-list"]["ops"]), + [ + { + "op": "SYNC", + "range": [0, 99], + "room_ids": [space_room_id, room_id], + } + ], + ) + self.assertListEqual( + list(channel.json_body["lists"]["foo-list"]["ops"]), + [ + { + "op": "SYNC", + "range": [0, 99], + "room_ids": [space_room_id], + } + ], + ) + + # Everyone leaves the encrypted space room + self.helper.leave(space_room_id, user1_id, tok=user1_tok) + self.helper.leave(space_room_id, user2_id, tok=user2_tok) + + # Make an incremental Sliding Sync request + channel = self.make_request( + "POST", + self.sync_endpoint + f"?pos={from_token}", + { + "lists": { + "all-list": { + "ranges": [[0, 99]], + "required_state": [], + "timeline_limit": 0, + "filters": {}, + }, + "foo-list": { + "ranges": [[0, 99]], + "required_state": [], + "timeline_limit": 1, + "filters": { + "is_encrypted": True, + "room_types": [RoomTypes.SPACE], + }, + }, + } + }, + access_token=user1_tok, + ) + self.assertEqual(channel.code, 200, channel.json_body) + + # Make sure the lists have the correct rooms even though we `newly_left` + self.assertListEqual( + list(channel.json_body["lists"]["all-list"]["ops"]), + [ + { + "op": "SYNC", + "range": [0, 99], + "room_ids": [space_room_id, room_id], + } + ], + ) + self.assertListEqual( + list(channel.json_body["lists"]["foo-list"]["ops"]), + [ + { + "op": "SYNC", + "range": [0, 99], + "room_ids": [space_room_id], + } + ], + ) + + def test_sort_list(self) -> None: + """ + Test that the `lists` are sorted by `stream_ordering` + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + room_id1 = self.helper.create_room_as(user1_id, tok=user1_tok, is_public=True) + room_id2 = self.helper.create_room_as(user1_id, tok=user1_tok, is_public=True) + room_id3 = self.helper.create_room_as(user1_id, tok=user1_tok, is_public=True) + + # Activity that will order the rooms + self.helper.send(room_id3, "activity in room3", tok=user1_tok) + self.helper.send(room_id1, "activity in room1", tok=user1_tok) + self.helper.send(room_id2, "activity in room2", tok=user1_tok) + + # Make the Sliding Sync request + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 99]], + "required_state": [], + "timeline_limit": 1, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Make sure it has the foo-list we requested + self.assertListEqual( + list(response_body["lists"].keys()), + ["foo-list"], + response_body["lists"].keys(), + ) + + # Make sure the list is sorted in the way we expect + self.assertListEqual( + list(response_body["lists"]["foo-list"]["ops"]), + [ + { + "op": "SYNC", + "range": [0, 99], + "room_ids": [room_id2, room_id1, room_id3], + } + ], + response_body["lists"]["foo-list"], + ) + + def test_sliced_windows(self) -> None: + """ + Test that the `lists` `ranges` are sliced correctly. Both sides of each range + are inclusive. + """ + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + _room_id1 = self.helper.create_room_as(user1_id, tok=user1_tok, is_public=True) + room_id2 = self.helper.create_room_as(user1_id, tok=user1_tok, is_public=True) + room_id3 = self.helper.create_room_as(user1_id, tok=user1_tok, is_public=True) + + # Make the Sliding Sync request for a single room + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 0]], + "required_state": [], + "timeline_limit": 1, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Make sure it has the foo-list we requested + self.assertListEqual( + list(response_body["lists"].keys()), + ["foo-list"], + response_body["lists"].keys(), + ) + # Make sure the list is sorted in the way we expect + self.assertListEqual( + list(response_body["lists"]["foo-list"]["ops"]), + [ + { + "op": "SYNC", + "range": [0, 0], + "room_ids": [room_id3], + } + ], + response_body["lists"]["foo-list"], + ) + + # Make the Sliding Sync request for the first two rooms + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": 1, + } + } + } + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + + # Make sure it has the foo-list we requested + self.assertListEqual( + list(response_body["lists"].keys()), + ["foo-list"], + response_body["lists"].keys(), + ) + # Make sure the list is sorted in the way we expect + self.assertListEqual( + list(response_body["lists"]["foo-list"]["ops"]), + [ + { + "op": "SYNC", + "range": [0, 1], + "room_ids": [room_id3, room_id2], + } + ], + response_body["lists"]["foo-list"], + ) + + def test_rooms_with_no_updates_do_not_come_down_incremental_sync(self) -> None: + """ + Test that rooms with no updates are returned in subsequent incremental + syncs. + """ + + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + room_id1 = self.helper.create_room_as(user1_id, tok=user1_tok) + + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": 0, + } + } + } + + _, from_token = self.do_sync(sync_body, tok=user1_tok) + + # Make the incremental Sliding Sync request + response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok) + + # Nothing has happened in the room, so the room should not come down + # /sync. + self.assertIsNone(response_body["rooms"].get(room_id1)) + + def test_empty_initial_room_comes_down_sync(self) -> None: + """ + Test that rooms come down /sync even with empty required state and + timeline limit in initial sync. + """ + + user1_id = self.register_user("user1", "pass") + user1_tok = self.login(user1_id, "pass") + + room_id1 = self.helper.create_room_as(user1_id, tok=user1_tok) + + sync_body = { + "lists": { + "foo-list": { + "ranges": [[0, 1]], + "required_state": [], + "timeline_limit": 0, + } + } + } + + # Make the Sliding Sync request + response_body, _ = self.do_sync(sync_body, tok=user1_tok) + self.assertEqual(response_body["rooms"][room_id1]["initial"], True) diff --git a/tests/rest/client/test_sync.py b/tests/rest/client/test_sync.py index 2628869de6e6..63df31ec7597 100644 --- a/tests/rest/client/test_sync.py +++ b/tests/rest/client/test_sync.py @@ -20,8 +20,7 @@ # import json import logging -from http import HTTPStatus -from typing import Any, Dict, Iterable, List +from typing import List from parameterized import parameterized, parameterized_class @@ -29,37 +28,21 @@ import synapse.rest.admin from synapse.api.constants import ( - AccountDataTypes, EventContentFields, EventTypes, - HistoryVisibility, - Membership, ReceiptTypes, RelationTypes, ) -from synapse.events import EventBase -from synapse.handlers.sliding_sync import StateValues -from synapse.rest.client import ( - devices, - knock, - login, - read_marker, - receipts, - room, - sendtodevice, - sync, -) +from synapse.rest.client import devices, knock, login, read_marker, receipts, room, sync from synapse.server import HomeServer -from synapse.types import JsonDict, RoomStreamToken, StreamKeyType, StreamToken, UserID +from synapse.types import JsonDict from synapse.util import Clock from tests import unittest from tests.federation.transport.test_knocking import ( KnockingStrippedStateEventHelperMixin, ) -from tests.server import FakeChannel, TimedOutException -from tests.test_utils.event_injection import mark_event_as_partial_state -from tests.unittest import skip_unless +from tests.server import TimedOutException logger = logging.getLogger(__name__) @@ -1223,4084 +1206,3 @@ def test_incremental_sync(self) -> None: self.assertNotIn(self.excluded_room_id, channel.json_body["rooms"]["join"]) self.assertIn(self.included_room_id, channel.json_body["rooms"]["join"]) - - -class SlidingSyncTestCase(unittest.HomeserverTestCase): - """ - Tests regarding MSC3575 Sliding Sync `/sync` endpoint. - """ - - servlets = [ - synapse.rest.admin.register_servlets, - login.register_servlets, - room.register_servlets, - sync.register_servlets, - devices.register_servlets, - ] - - def default_config(self) -> JsonDict: - config = super().default_config() - # Enable sliding sync - config["experimental_features"] = {"msc3575_enabled": True} - return config - - def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: - self.store = hs.get_datastores().main - self.sync_endpoint = ( - "/_matrix/client/unstable/org.matrix.simplified_msc3575/sync" - ) - self.store = hs.get_datastores().main - self.event_sources = hs.get_event_sources() - self.storage_controllers = hs.get_storage_controllers() - self.account_data_handler = hs.get_account_data_handler() - self.notifier = hs.get_notifier() - - def _assertRequiredStateIncludes( - self, - actual_required_state: Any, - expected_state_events: Iterable[EventBase], - exact: bool = False, - ) -> None: - """ - Wrapper around `assertIncludes` to give slightly better looking diff error - messages that include some context "$event_id (type, state_key)". - - Args: - actual_required_state: The "required_state" of a room from a Sliding Sync - request response. - expected_state_events: The expected state events to be included in the - `actual_required_state`. - exact: Whether the actual state should be exactly equal to the expected - state (no extras). - """ - - assert isinstance(actual_required_state, list) - for event in actual_required_state: - assert isinstance(event, dict) - - self.assertIncludes( - { - f'{event["event_id"]} ("{event["type"]}", "{event["state_key"]}")' - for event in actual_required_state - }, - { - f'{event.event_id} ("{event.type}", "{event.state_key}")' - for event in expected_state_events - }, - exact=exact, - # Message to help understand the diff in context - message=str(actual_required_state), - ) - - def _add_new_dm_to_global_account_data( - self, source_user_id: str, target_user_id: str, target_room_id: str - ) -> None: - """ - Helper to handle inserting a new DM for the source user into global account data - (handles all of the list merging). - - Args: - source_user_id: The user ID of the DM mapping we're going to update - target_user_id: User ID of the person the DM is with - target_room_id: Room ID of the DM - """ - - # Get the current DM map - existing_dm_map = self.get_success( - self.store.get_global_account_data_by_type_for_user( - source_user_id, AccountDataTypes.DIRECT - ) - ) - # Scrutinize the account data since it has no concrete type. We're just copying - # everything into a known type. It should be a mapping from user ID to a list of - # room IDs. Ignore anything else. - new_dm_map: Dict[str, List[str]] = {} - if isinstance(existing_dm_map, dict): - for user_id, room_ids in existing_dm_map.items(): - if isinstance(user_id, str) and isinstance(room_ids, list): - for room_id in room_ids: - if isinstance(room_id, str): - new_dm_map[user_id] = new_dm_map.get(user_id, []) + [ - room_id - ] - - # Add the new DM to the map - new_dm_map[target_user_id] = new_dm_map.get(target_user_id, []) + [ - target_room_id - ] - # Save the DM map to global account data - self.get_success( - self.store.add_account_data_for_user( - source_user_id, - AccountDataTypes.DIRECT, - new_dm_map, - ) - ) - - def _create_dm_room( - self, - inviter_user_id: str, - inviter_tok: str, - invitee_user_id: str, - invitee_tok: str, - should_join_room: bool = True, - ) -> str: - """ - Helper to create a DM room as the "inviter" and invite the "invitee" user to the - room. The "invitee" user also will join the room. The `m.direct` account data - will be set for both users. - """ - - # Create a room and send an invite the other user - room_id = self.helper.create_room_as( - inviter_user_id, - is_public=False, - tok=inviter_tok, - ) - self.helper.invite( - room_id, - src=inviter_user_id, - targ=invitee_user_id, - tok=inviter_tok, - extra_data={"is_direct": True}, - ) - if should_join_room: - # Person that was invited joins the room - self.helper.join(room_id, invitee_user_id, tok=invitee_tok) - - # Mimic the client setting the room as a direct message in the global account - # data for both users. - self._add_new_dm_to_global_account_data( - invitee_user_id, inviter_user_id, room_id - ) - self._add_new_dm_to_global_account_data( - inviter_user_id, invitee_user_id, room_id - ) - - return room_id - - def _bump_notifier_wait_for_events(self, user_id: str) -> None: - """ - Wake-up a `notifier.wait_for_events(user_id)` call without affecting the Sliding - Sync results. - """ - # We're expecting some new activity from this point onwards - from_token = self.event_sources.get_current_token() - - triggered_notifier_wait_for_events = False - - async def _on_new_acivity( - before_token: StreamToken, after_token: StreamToken - ) -> bool: - nonlocal triggered_notifier_wait_for_events - triggered_notifier_wait_for_events = True - return True - - # Listen for some new activity for the user. We're just trying to confirm that - # our bump below actually does what we think it does (triggers new activity for - # the user). - result_awaitable = self.notifier.wait_for_events( - user_id, - 1000, - _on_new_acivity, - from_token=from_token, - ) - - # Update the account data so that `notifier.wait_for_events(...)` wakes up. - # We're bumping account data because it won't show up in the Sliding Sync - # response so it won't affect whether we have results. - self.get_success( - self.account_data_handler.add_account_data_for_user( - user_id, - "org.matrix.foobarbaz", - {"foo": "bar"}, - ) - ) - - # Wait for our notifier result - self.get_success(result_awaitable) - - if not triggered_notifier_wait_for_events: - raise AssertionError( - "Expected `notifier.wait_for_events(...)` to be triggered" - ) - - def test_sync_list(self) -> None: - """ - Test that room IDs show up in the Sliding Sync `lists` - """ - alice_user_id = self.register_user("alice", "correcthorse") - alice_access_token = self.login(alice_user_id, "correcthorse") - - room_id = self.helper.create_room_as( - alice_user_id, tok=alice_access_token, is_public=True - ) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 99]], - "required_state": [ - ["m.room.join_rules", ""], - ["m.room.history_visibility", ""], - ["m.space.child", "*"], - ], - "timeline_limit": 1, - } - } - }, - access_token=alice_access_token, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # Make sure it has the foo-list we requested - self.assertListEqual( - list(channel.json_body["lists"].keys()), - ["foo-list"], - channel.json_body["lists"].keys(), - ) - - # Make sure the list includes the room we are joined to - self.assertListEqual( - list(channel.json_body["lists"]["foo-list"]["ops"]), - [ - { - "op": "SYNC", - "range": [0, 99], - "room_ids": [room_id], - } - ], - channel.json_body["lists"]["foo-list"], - ) - - def test_wait_for_sync_token(self) -> None: - """ - Test that worker will wait until it catches up to the given token - """ - alice_user_id = self.register_user("alice", "correcthorse") - alice_access_token = self.login(alice_user_id, "correcthorse") - - # Create a future token that will cause us to wait. Since we never send a new - # event to reach that future stream_ordering, the worker will wait until the - # full timeout. - stream_id_gen = self.store.get_events_stream_id_generator() - stream_id = self.get_success(stream_id_gen.get_next().__aenter__()) - current_token = self.event_sources.get_current_token() - future_position_token = current_token.copy_and_replace( - StreamKeyType.ROOM, - RoomStreamToken(stream=stream_id), - ) - - future_position_token_serialized = self.get_success( - future_position_token.to_string(self.store) - ) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint + f"?pos={future_position_token_serialized}", - { - "lists": { - "foo-list": { - "ranges": [[0, 99]], - "required_state": [ - ["m.room.join_rules", ""], - ["m.room.history_visibility", ""], - ["m.space.child", "*"], - ], - "timeline_limit": 1, - } - } - }, - access_token=alice_access_token, - await_result=False, - ) - # Block for 10 seconds to make `notifier.wait_for_stream_token(from_token)` - # timeout - with self.assertRaises(TimedOutException): - channel.await_result(timeout_ms=9900) - channel.await_result(timeout_ms=200) - self.assertEqual(channel.code, 200, channel.json_body) - - # We expect the next `pos` in the result to be the same as what we requested - # with because we weren't able to find anything new yet. - self.assertEqual(channel.json_body["pos"], future_position_token_serialized) - - def test_wait_for_new_data(self) -> None: - """ - Test to make sure that the Sliding Sync request waits for new data to arrive. - - (Only applies to incremental syncs with a `timeout` specified) - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.join(room_id, user1_id, tok=user1_tok) - - from_token = self.event_sources.get_current_token() - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint - + "?timeout=10000" - + f"&pos={self.get_success(from_token.to_string(self.store))}", - { - "lists": { - "foo-list": { - "ranges": [[0, 0]], - "required_state": [], - "timeline_limit": 1, - } - } - }, - access_token=user1_tok, - await_result=False, - ) - # Block for 5 seconds to make sure we are `notifier.wait_for_events(...)` - with self.assertRaises(TimedOutException): - channel.await_result(timeout_ms=5000) - # Bump the room with new events to trigger new results - event_response1 = self.helper.send( - room_id, "new activity in room", tok=user1_tok - ) - # Should respond before the 10 second timeout - channel.await_result(timeout_ms=3000) - self.assertEqual(channel.code, 200, channel.json_body) - - # Check to make sure the new event is returned - self.assertEqual( - [ - event["event_id"] - for event in channel.json_body["rooms"][room_id]["timeline"] - ], - [ - event_response1["event_id"], - ], - channel.json_body["rooms"][room_id]["timeline"], - ) - - # TODO: Once we remove `ops`, we should be able to add a `RoomResult.__bool__` to - # check if there are any updates since the `from_token`. - @skip_unless( - False, - "Once we remove ops from the Sliding Sync response, this test should pass", - ) - def test_wait_for_new_data_timeout(self) -> None: - """ - Test to make sure that the Sliding Sync request waits for new data to arrive but - no data ever arrives so we timeout. We're also making sure that the default data - doesn't trigger a false-positive for new data. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.join(room_id, user1_id, tok=user1_tok) - - from_token = self.event_sources.get_current_token() - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint - + "?timeout=10000" - + f"&pos={self.get_success(from_token.to_string(self.store))}", - { - "lists": { - "foo-list": { - "ranges": [[0, 0]], - "required_state": [], - "timeline_limit": 1, - } - } - }, - access_token=user1_tok, - await_result=False, - ) - # Block for 5 seconds to make sure we are `notifier.wait_for_events(...)` - with self.assertRaises(TimedOutException): - channel.await_result(timeout_ms=5000) - # Wake-up `notifier.wait_for_events(...)` that will cause us test - # `SlidingSyncResult.__bool__` for new results. - self._bump_notifier_wait_for_events(user1_id) - # Block for a little bit more to ensure we don't see any new results. - with self.assertRaises(TimedOutException): - channel.await_result(timeout_ms=4000) - # Wait for the sync to complete (wait for the rest of the 10 second timeout, - # 5000 + 4000 + 1200 > 10000) - channel.await_result(timeout_ms=1200) - self.assertEqual(channel.code, 200, channel.json_body) - - # We still see rooms because that's how Sliding Sync lists work but we reached - # the timeout before seeing them - self.assertEqual( - [event["event_id"] for event in channel.json_body["rooms"].keys()], - [room_id], - ) - - def test_filter_list(self) -> None: - """ - Test that filters apply to `lists` - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - # Create a DM room - joined_dm_room_id = self._create_dm_room( - inviter_user_id=user1_id, - inviter_tok=user1_tok, - invitee_user_id=user2_id, - invitee_tok=user2_tok, - should_join_room=True, - ) - invited_dm_room_id = self._create_dm_room( - inviter_user_id=user1_id, - inviter_tok=user1_tok, - invitee_user_id=user2_id, - invitee_tok=user2_tok, - should_join_room=False, - ) - - # Create a normal room - room_id = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.join(room_id, user1_id, tok=user1_tok) - - # Create a room that user1 is invited to - invite_room_id = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.invite(invite_room_id, src=user2_id, targ=user1_id, tok=user2_tok) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - # Absense of filters does not imply "False" values - "all": { - "ranges": [[0, 99]], - "required_state": [], - "timeline_limit": 1, - "filters": {}, - }, - # Test single truthy filter - "dms": { - "ranges": [[0, 99]], - "required_state": [], - "timeline_limit": 1, - "filters": {"is_dm": True}, - }, - # Test single falsy filter - "non-dms": { - "ranges": [[0, 99]], - "required_state": [], - "timeline_limit": 1, - "filters": {"is_dm": False}, - }, - # Test how multiple filters should stack (AND'd together) - "room-invites": { - "ranges": [[0, 99]], - "required_state": [], - "timeline_limit": 1, - "filters": {"is_dm": False, "is_invite": True}, - }, - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # Make sure it has the foo-list we requested - self.assertListEqual( - list(channel.json_body["lists"].keys()), - ["all", "dms", "non-dms", "room-invites"], - channel.json_body["lists"].keys(), - ) - - # Make sure the lists have the correct rooms - self.assertListEqual( - list(channel.json_body["lists"]["all"]["ops"]), - [ - { - "op": "SYNC", - "range": [0, 99], - "room_ids": [ - invite_room_id, - room_id, - invited_dm_room_id, - joined_dm_room_id, - ], - } - ], - list(channel.json_body["lists"]["all"]), - ) - self.assertListEqual( - list(channel.json_body["lists"]["dms"]["ops"]), - [ - { - "op": "SYNC", - "range": [0, 99], - "room_ids": [invited_dm_room_id, joined_dm_room_id], - } - ], - list(channel.json_body["lists"]["dms"]), - ) - self.assertListEqual( - list(channel.json_body["lists"]["non-dms"]["ops"]), - [ - { - "op": "SYNC", - "range": [0, 99], - "room_ids": [invite_room_id, room_id], - } - ], - list(channel.json_body["lists"]["non-dms"]), - ) - self.assertListEqual( - list(channel.json_body["lists"]["room-invites"]["ops"]), - [ - { - "op": "SYNC", - "range": [0, 99], - "room_ids": [invite_room_id], - } - ], - list(channel.json_body["lists"]["room-invites"]), - ) - - # Ensure DM's are correctly marked - self.assertDictEqual( - { - room_id: room.get("is_dm") - for room_id, room in channel.json_body["rooms"].items() - }, - { - invite_room_id: None, - room_id: None, - invited_dm_room_id: True, - joined_dm_room_id: True, - }, - ) - - def test_sort_list(self) -> None: - """ - Test that the `lists` are sorted by `stream_ordering` - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - - room_id1 = self.helper.create_room_as(user1_id, tok=user1_tok, is_public=True) - room_id2 = self.helper.create_room_as(user1_id, tok=user1_tok, is_public=True) - room_id3 = self.helper.create_room_as(user1_id, tok=user1_tok, is_public=True) - - # Activity that will order the rooms - self.helper.send(room_id3, "activity in room3", tok=user1_tok) - self.helper.send(room_id1, "activity in room1", tok=user1_tok) - self.helper.send(room_id2, "activity in room2", tok=user1_tok) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 99]], - "required_state": [ - ["m.room.join_rules", ""], - ["m.room.history_visibility", ""], - ["m.space.child", "*"], - ], - "timeline_limit": 1, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # Make sure it has the foo-list we requested - self.assertListEqual( - list(channel.json_body["lists"].keys()), - ["foo-list"], - channel.json_body["lists"].keys(), - ) - - # Make sure the list is sorted in the way we expect - self.assertListEqual( - list(channel.json_body["lists"]["foo-list"]["ops"]), - [ - { - "op": "SYNC", - "range": [0, 99], - "room_ids": [room_id2, room_id1, room_id3], - } - ], - channel.json_body["lists"]["foo-list"], - ) - - def test_sliced_windows(self) -> None: - """ - Test that the `lists` `ranges` are sliced correctly. Both sides of each range - are inclusive. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - - _room_id1 = self.helper.create_room_as(user1_id, tok=user1_tok, is_public=True) - room_id2 = self.helper.create_room_as(user1_id, tok=user1_tok, is_public=True) - room_id3 = self.helper.create_room_as(user1_id, tok=user1_tok, is_public=True) - - # Make the Sliding Sync request for a single room - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 0]], - "required_state": [ - ["m.room.join_rules", ""], - ["m.room.history_visibility", ""], - ["m.space.child", "*"], - ], - "timeline_limit": 1, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # Make sure it has the foo-list we requested - self.assertListEqual( - list(channel.json_body["lists"].keys()), - ["foo-list"], - channel.json_body["lists"].keys(), - ) - # Make sure the list is sorted in the way we expect - self.assertListEqual( - list(channel.json_body["lists"]["foo-list"]["ops"]), - [ - { - "op": "SYNC", - "range": [0, 0], - "room_ids": [room_id3], - } - ], - channel.json_body["lists"]["foo-list"], - ) - - # Make the Sliding Sync request for the first two rooms - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [ - ["m.room.join_rules", ""], - ["m.room.history_visibility", ""], - ["m.space.child", "*"], - ], - "timeline_limit": 1, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # Make sure it has the foo-list we requested - self.assertListEqual( - list(channel.json_body["lists"].keys()), - ["foo-list"], - channel.json_body["lists"].keys(), - ) - # Make sure the list is sorted in the way we expect - self.assertListEqual( - list(channel.json_body["lists"]["foo-list"]["ops"]), - [ - { - "op": "SYNC", - "range": [0, 1], - "room_ids": [room_id3, room_id2], - } - ], - channel.json_body["lists"]["foo-list"], - ) - - def test_rooms_meta_when_joined(self) -> None: - """ - Test that the `rooms` `name` and `avatar` are included in the response and - reflect the current state of the room when the user is joined to the room. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as( - user2_id, - tok=user2_tok, - extra_content={ - "name": "my super room", - }, - ) - # Set the room avatar URL - self.helper.send_state( - room_id1, - EventTypes.RoomAvatar, - {"url": "mxc://DUMMY_MEDIA_ID"}, - tok=user2_tok, - ) - - self.helper.join(room_id1, user1_id, tok=user1_tok) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [], - "timeline_limit": 0, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # Reflect the current state of the room - self.assertEqual( - channel.json_body["rooms"][room_id1]["name"], - "my super room", - channel.json_body["rooms"][room_id1], - ) - self.assertEqual( - channel.json_body["rooms"][room_id1]["avatar"], - "mxc://DUMMY_MEDIA_ID", - channel.json_body["rooms"][room_id1], - ) - self.assertEqual( - channel.json_body["rooms"][room_id1]["joined_count"], - 2, - ) - self.assertEqual( - channel.json_body["rooms"][room_id1]["invited_count"], - 0, - ) - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("is_dm"), - ) - - def test_rooms_meta_when_invited(self) -> None: - """ - Test that the `rooms` `name` and `avatar` are included in the response and - reflect the current state of the room when the user is invited to the room. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as( - user2_id, - tok=user2_tok, - extra_content={ - "name": "my super room", - }, - ) - # Set the room avatar URL - self.helper.send_state( - room_id1, - EventTypes.RoomAvatar, - {"url": "mxc://DUMMY_MEDIA_ID"}, - tok=user2_tok, - ) - - # User1 is invited to the room - self.helper.invite(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) - - # Update the room name after user1 has left - self.helper.send_state( - room_id1, - EventTypes.Name, - {"name": "my super duper room"}, - tok=user2_tok, - ) - # Update the room avatar URL after user1 has left - self.helper.send_state( - room_id1, - EventTypes.RoomAvatar, - {"url": "mxc://UPDATED_DUMMY_MEDIA_ID"}, - tok=user2_tok, - ) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [], - "timeline_limit": 0, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # This should still reflect the current state of the room even when the user is - # invited. - self.assertEqual( - channel.json_body["rooms"][room_id1]["name"], - "my super duper room", - channel.json_body["rooms"][room_id1], - ) - self.assertEqual( - channel.json_body["rooms"][room_id1]["avatar"], - "mxc://UPDATED_DUMMY_MEDIA_ID", - channel.json_body["rooms"][room_id1], - ) - self.assertEqual( - channel.json_body["rooms"][room_id1]["joined_count"], - 1, - ) - self.assertEqual( - channel.json_body["rooms"][room_id1]["invited_count"], - 1, - ) - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("is_dm"), - ) - - def test_rooms_meta_when_banned(self) -> None: - """ - Test that the `rooms` `name` and `avatar` reflect the state of the room when the - user was banned (do not leak current state). - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as( - user2_id, - tok=user2_tok, - extra_content={ - "name": "my super room", - }, - ) - # Set the room avatar URL - self.helper.send_state( - room_id1, - EventTypes.RoomAvatar, - {"url": "mxc://DUMMY_MEDIA_ID"}, - tok=user2_tok, - ) - - self.helper.join(room_id1, user1_id, tok=user1_tok) - self.helper.ban(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) - - # Update the room name after user1 has left - self.helper.send_state( - room_id1, - EventTypes.Name, - {"name": "my super duper room"}, - tok=user2_tok, - ) - # Update the room avatar URL after user1 has left - self.helper.send_state( - room_id1, - EventTypes.RoomAvatar, - {"url": "mxc://UPDATED_DUMMY_MEDIA_ID"}, - tok=user2_tok, - ) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [], - "timeline_limit": 0, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # Reflect the state of the room at the time of leaving - self.assertEqual( - channel.json_body["rooms"][room_id1]["name"], - "my super room", - channel.json_body["rooms"][room_id1], - ) - self.assertEqual( - channel.json_body["rooms"][room_id1]["avatar"], - "mxc://DUMMY_MEDIA_ID", - channel.json_body["rooms"][room_id1], - ) - self.assertEqual( - channel.json_body["rooms"][room_id1]["joined_count"], - # FIXME: The actual number should be "1" (user2) but we currently don't - # support this for rooms where the user has left/been banned. - 0, - ) - self.assertEqual( - channel.json_body["rooms"][room_id1]["invited_count"], - 0, - ) - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("is_dm"), - ) - - def test_rooms_meta_heroes(self) -> None: - """ - Test that the `rooms` `heroes` are included in the response when the room - doesn't have a room name set. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - user3_id = self.register_user("user3", "pass") - _user3_tok = self.login(user3_id, "pass") - - room_id1 = self.helper.create_room_as( - user2_id, - tok=user2_tok, - extra_content={ - "name": "my super room", - }, - ) - self.helper.join(room_id1, user1_id, tok=user1_tok) - # User3 is invited - self.helper.invite(room_id1, src=user2_id, targ=user3_id, tok=user2_tok) - - room_id2 = self.helper.create_room_as( - user2_id, - tok=user2_tok, - extra_content={ - # No room name set so that `heroes` is populated - # - # "name": "my super room2", - }, - ) - self.helper.join(room_id2, user1_id, tok=user1_tok) - # User3 is invited - self.helper.invite(room_id2, src=user2_id, targ=user3_id, tok=user2_tok) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [], - "timeline_limit": 0, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # Room1 has a name so we shouldn't see any `heroes` which the client would use - # the calculate the room name themselves. - self.assertEqual( - channel.json_body["rooms"][room_id1]["name"], - "my super room", - channel.json_body["rooms"][room_id1], - ) - self.assertIsNone(channel.json_body["rooms"][room_id1].get("heroes")) - self.assertEqual( - channel.json_body["rooms"][room_id1]["joined_count"], - 2, - ) - self.assertEqual( - channel.json_body["rooms"][room_id1]["invited_count"], - 1, - ) - - # Room2 doesn't have a name so we should see `heroes` populated - self.assertIsNone(channel.json_body["rooms"][room_id2].get("name")) - self.assertCountEqual( - [ - hero["user_id"] - for hero in channel.json_body["rooms"][room_id2].get("heroes", []) - ], - # Heroes shouldn't include the user themselves (we shouldn't see user1) - [user2_id, user3_id], - ) - self.assertEqual( - channel.json_body["rooms"][room_id2]["joined_count"], - 2, - ) - self.assertEqual( - channel.json_body["rooms"][room_id2]["invited_count"], - 1, - ) - - # We didn't request any state so we shouldn't see any `required_state` - self.assertIsNone(channel.json_body["rooms"][room_id1].get("required_state")) - self.assertIsNone(channel.json_body["rooms"][room_id2].get("required_state")) - - def test_rooms_meta_heroes_max(self) -> None: - """ - Test that the `rooms` `heroes` only includes the first 5 users (not including - yourself). - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - user3_id = self.register_user("user3", "pass") - user3_tok = self.login(user3_id, "pass") - user4_id = self.register_user("user4", "pass") - user4_tok = self.login(user4_id, "pass") - user5_id = self.register_user("user5", "pass") - user5_tok = self.login(user5_id, "pass") - user6_id = self.register_user("user6", "pass") - user6_tok = self.login(user6_id, "pass") - user7_id = self.register_user("user7", "pass") - user7_tok = self.login(user7_id, "pass") - - room_id1 = self.helper.create_room_as( - user2_id, - tok=user2_tok, - extra_content={ - # No room name set so that `heroes` is populated - # - # "name": "my super room", - }, - ) - self.helper.join(room_id1, user1_id, tok=user1_tok) - self.helper.join(room_id1, user3_id, tok=user3_tok) - self.helper.join(room_id1, user4_id, tok=user4_tok) - self.helper.join(room_id1, user5_id, tok=user5_tok) - self.helper.join(room_id1, user6_id, tok=user6_tok) - self.helper.join(room_id1, user7_id, tok=user7_tok) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [], - "timeline_limit": 0, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # Room2 doesn't have a name so we should see `heroes` populated - self.assertIsNone(channel.json_body["rooms"][room_id1].get("name")) - self.assertCountEqual( - [ - hero["user_id"] - for hero in channel.json_body["rooms"][room_id1].get("heroes", []) - ], - # Heroes should be the first 5 users in the room (excluding the user - # themselves, we shouldn't see `user1`) - [user2_id, user3_id, user4_id, user5_id, user6_id], - ) - self.assertEqual( - channel.json_body["rooms"][room_id1]["joined_count"], - 7, - ) - self.assertEqual( - channel.json_body["rooms"][room_id1]["invited_count"], - 0, - ) - - # We didn't request any state so we shouldn't see any `required_state` - self.assertIsNone(channel.json_body["rooms"][room_id1].get("required_state")) - - def test_rooms_meta_heroes_when_banned(self) -> None: - """ - Test that the `rooms` `heroes` are included in the response when the room - doesn't have a room name set but doesn't leak information past their ban. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - user3_id = self.register_user("user3", "pass") - _user3_tok = self.login(user3_id, "pass") - user4_id = self.register_user("user4", "pass") - user4_tok = self.login(user4_id, "pass") - user5_id = self.register_user("user5", "pass") - _user5_tok = self.login(user5_id, "pass") - - room_id1 = self.helper.create_room_as( - user2_id, - tok=user2_tok, - extra_content={ - # No room name set so that `heroes` is populated - # - # "name": "my super room", - }, - ) - # User1 joins the room - self.helper.join(room_id1, user1_id, tok=user1_tok) - # User3 is invited - self.helper.invite(room_id1, src=user2_id, targ=user3_id, tok=user2_tok) - - # User1 is banned from the room - self.helper.ban(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) - - # User4 joins the room after user1 is banned - self.helper.join(room_id1, user4_id, tok=user4_tok) - # User5 is invited after user1 is banned - self.helper.invite(room_id1, src=user2_id, targ=user5_id, tok=user2_tok) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [], - "timeline_limit": 0, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # Room2 doesn't have a name so we should see `heroes` populated - self.assertIsNone(channel.json_body["rooms"][room_id1].get("name")) - self.assertCountEqual( - [ - hero["user_id"] - for hero in channel.json_body["rooms"][room_id1].get("heroes", []) - ], - # Heroes shouldn't include the user themselves (we shouldn't see user1). We - # also shouldn't see user4 since they joined after user1 was banned. - # - # FIXME: The actual result should be `[user2_id, user3_id]` but we currently - # don't support this for rooms where the user has left/been banned. - [], - ) - - self.assertEqual( - channel.json_body["rooms"][room_id1]["joined_count"], - # FIXME: The actual number should be "1" (user2) but we currently don't - # support this for rooms where the user has left/been banned. - 0, - ) - self.assertEqual( - channel.json_body["rooms"][room_id1]["invited_count"], - # We shouldn't see user5 since they were invited after user1 was banned. - # - # FIXME: The actual number should be "1" (user3) but we currently don't - # support this for rooms where the user has left/been banned. - 0, - ) - - def test_rooms_limited_initial_sync(self) -> None: - """ - Test that we mark `rooms` as `limited=True` when we saturate the `timeline_limit` - on initial sync. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.send(room_id1, "activity1", tok=user2_tok) - self.helper.send(room_id1, "activity2", tok=user2_tok) - event_response3 = self.helper.send(room_id1, "activity3", tok=user2_tok) - event_pos3 = self.get_success( - self.store.get_position_for_event(event_response3["event_id"]) - ) - event_response4 = self.helper.send(room_id1, "activity4", tok=user2_tok) - event_pos4 = self.get_success( - self.store.get_position_for_event(event_response4["event_id"]) - ) - event_response5 = self.helper.send(room_id1, "activity5", tok=user2_tok) - user1_join_response = self.helper.join(room_id1, user1_id, tok=user1_tok) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [], - "timeline_limit": 3, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # We expect to saturate the `timeline_limit` (there are more than 3 messages in the room) - self.assertEqual( - channel.json_body["rooms"][room_id1]["limited"], - True, - channel.json_body["rooms"][room_id1], - ) - # Check to make sure the latest events are returned - self.assertEqual( - [ - event["event_id"] - for event in channel.json_body["rooms"][room_id1]["timeline"] - ], - [ - event_response4["event_id"], - event_response5["event_id"], - user1_join_response["event_id"], - ], - channel.json_body["rooms"][room_id1]["timeline"], - ) - - # Check to make sure the `prev_batch` points at the right place - prev_batch_token = self.get_success( - StreamToken.from_string( - self.store, channel.json_body["rooms"][room_id1]["prev_batch"] - ) - ) - prev_batch_room_stream_token_serialized = self.get_success( - prev_batch_token.room_key.to_string(self.store) - ) - # If we use the `prev_batch` token to look backwards, we should see `event3` - # next so make sure the token encompasses it - self.assertEqual( - event_pos3.persisted_after(prev_batch_token.room_key), - False, - f"`prev_batch` token {prev_batch_room_stream_token_serialized} should be >= event_pos3={self.get_success(event_pos3.to_room_stream_token().to_string(self.store))}", - ) - # If we use the `prev_batch` token to look backwards, we shouldn't see `event4` - # anymore since it was just returned in this response. - self.assertEqual( - event_pos4.persisted_after(prev_batch_token.room_key), - True, - f"`prev_batch` token {prev_batch_room_stream_token_serialized} should be < event_pos4={self.get_success(event_pos4.to_room_stream_token().to_string(self.store))}", - ) - - # With no `from_token` (initial sync), it's all historical since there is no - # "live" range - self.assertEqual( - channel.json_body["rooms"][room_id1]["num_live"], - 0, - channel.json_body["rooms"][room_id1], - ) - - def test_rooms_not_limited_initial_sync(self) -> None: - """ - Test that we mark `rooms` as `limited=False` when there are no more events to - paginate to. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.send(room_id1, "activity1", tok=user2_tok) - self.helper.send(room_id1, "activity2", tok=user2_tok) - self.helper.send(room_id1, "activity3", tok=user2_tok) - self.helper.join(room_id1, user1_id, tok=user1_tok) - - # Make the Sliding Sync request - timeline_limit = 100 - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [], - "timeline_limit": timeline_limit, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # The timeline should be `limited=False` because we have all of the events (no - # more to paginate to) - self.assertEqual( - channel.json_body["rooms"][room_id1]["limited"], - False, - channel.json_body["rooms"][room_id1], - ) - expected_number_of_events = 9 - # We're just looking to make sure we got all of the events before hitting the `timeline_limit` - self.assertEqual( - len(channel.json_body["rooms"][room_id1]["timeline"]), - expected_number_of_events, - channel.json_body["rooms"][room_id1]["timeline"], - ) - self.assertLessEqual(expected_number_of_events, timeline_limit) - - # With no `from_token` (initial sync), it's all historical since there is no - # "live" token range. - self.assertEqual( - channel.json_body["rooms"][room_id1]["num_live"], - 0, - channel.json_body["rooms"][room_id1], - ) - - def test_rooms_incremental_sync(self) -> None: - """ - Test `rooms` data during an incremental sync after an initial sync. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.join(room_id1, user1_id, tok=user1_tok) - self.helper.send(room_id1, "activity before initial sync1", tok=user2_tok) - - # Make an initial Sliding Sync request to grab a token. This is also a sanity - # check that we can go from initial to incremental sync. - sync_params = { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [], - "timeline_limit": 3, - } - } - } - channel = self.make_request( - "POST", - self.sync_endpoint, - sync_params, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - next_pos = channel.json_body["pos"] - - # Send some events but don't send enough to saturate the `timeline_limit`. - # We want to later test that we only get the new events since the `next_pos` - event_response2 = self.helper.send(room_id1, "activity after2", tok=user2_tok) - event_response3 = self.helper.send(room_id1, "activity after3", tok=user2_tok) - - # Make an incremental Sliding Sync request (what we're trying to test) - channel = self.make_request( - "POST", - self.sync_endpoint + f"?pos={next_pos}", - sync_params, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # We only expect to see the new events since the last sync which isn't enough to - # fill up the `timeline_limit`. - self.assertEqual( - channel.json_body["rooms"][room_id1]["limited"], - False, - f'Our `timeline_limit` was {sync_params["lists"]["foo-list"]["timeline_limit"]} ' - + f'and {len(channel.json_body["rooms"][room_id1]["timeline"])} events were returned in the timeline. ' - + str(channel.json_body["rooms"][room_id1]), - ) - # Check to make sure the latest events are returned - self.assertEqual( - [ - event["event_id"] - for event in channel.json_body["rooms"][room_id1]["timeline"] - ], - [ - event_response2["event_id"], - event_response3["event_id"], - ], - channel.json_body["rooms"][room_id1]["timeline"], - ) - - # All events are "live" - self.assertEqual( - channel.json_body["rooms"][room_id1]["num_live"], - 2, - channel.json_body["rooms"][room_id1], - ) - - def test_rooms_bump_stamp(self) -> None: - """ - Test that `bump_stamp` is present and pointing to relevant events. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - - room_id1 = self.helper.create_room_as( - user1_id, - tok=user1_tok, - ) - event_response1 = message_response = self.helper.send( - room_id1, "message in room1", tok=user1_tok - ) - event_pos1 = self.get_success( - self.store.get_position_for_event(event_response1["event_id"]) - ) - room_id2 = self.helper.create_room_as( - user1_id, - tok=user1_tok, - ) - send_response2 = self.helper.send(room_id2, "message in room2", tok=user1_tok) - event_pos2 = self.get_success( - self.store.get_position_for_event(send_response2["event_id"]) - ) - - # Send a reaction in room1 but it shouldn't affect the `bump_stamp` - # because reactions are not part of the `DEFAULT_BUMP_EVENT_TYPES` - self.helper.send_event( - room_id1, - type=EventTypes.Reaction, - content={ - "m.relates_to": { - "event_id": message_response["event_id"], - "key": "👍", - "rel_type": "m.annotation", - } - }, - tok=user1_tok, - ) - - # Make the Sliding Sync request - timeline_limit = 100 - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [], - "timeline_limit": timeline_limit, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # Make sure it has the foo-list we requested - self.assertListEqual( - list(channel.json_body["lists"].keys()), - ["foo-list"], - channel.json_body["lists"].keys(), - ) - - # Make sure the list includes the rooms in the right order - self.assertListEqual( - list(channel.json_body["lists"]["foo-list"]["ops"]), - [ - { - "op": "SYNC", - "range": [0, 1], - # room1 sorts before room2 because it has the latest event (the - # reaction) - "room_ids": [room_id1, room_id2], - } - ], - channel.json_body["lists"]["foo-list"], - ) - - # The `bump_stamp` for room1 should point at the latest message (not the - # reaction since it's not one of the `DEFAULT_BUMP_EVENT_TYPES`) - self.assertEqual( - channel.json_body["rooms"][room_id1]["bump_stamp"], - event_pos1.stream, - channel.json_body["rooms"][room_id1], - ) - - # The `bump_stamp` for room2 should point at the latest message - self.assertEqual( - channel.json_body["rooms"][room_id2]["bump_stamp"], - event_pos2.stream, - channel.json_body["rooms"][room_id2], - ) - - def test_rooms_newly_joined_incremental_sync(self) -> None: - """ - Test that when we make an incremental sync with a `newly_joined` `rooms`, we are - able to see some historical events before the `from_token`. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.send(room_id1, "activity before token1", tok=user2_tok) - event_response2 = self.helper.send( - room_id1, "activity before token2", tok=user2_tok - ) - - from_token = self.event_sources.get_current_token() - - # Join the room after the `from_token` which will make us consider this room as - # `newly_joined`. - user1_join_response = self.helper.join(room_id1, user1_id, tok=user1_tok) - - # Send some events but don't send enough to saturate the `timeline_limit`. - # We want to later test that we only get the new events since the `next_pos` - event_response3 = self.helper.send( - room_id1, "activity after token3", tok=user2_tok - ) - event_response4 = self.helper.send( - room_id1, "activity after token4", tok=user2_tok - ) - - # The `timeline_limit` is set to 4 so we can at least see one historical event - # before the `from_token`. We should see historical events because this is a - # `newly_joined` room. - timeline_limit = 4 - # Make an incremental Sliding Sync request (what we're trying to test) - channel = self.make_request( - "POST", - self.sync_endpoint - + f"?pos={self.get_success(from_token.to_string(self.store))}", - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [], - "timeline_limit": timeline_limit, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # We should see the new events and the rest should be filled with historical - # events which will make us `limited=True` since there are more to paginate to. - self.assertEqual( - channel.json_body["rooms"][room_id1]["limited"], - True, - f"Our `timeline_limit` was {timeline_limit} " - + f'and {len(channel.json_body["rooms"][room_id1]["timeline"])} events were returned in the timeline. ' - + str(channel.json_body["rooms"][room_id1]), - ) - # Check to make sure that the "live" and historical events are returned - self.assertEqual( - [ - event["event_id"] - for event in channel.json_body["rooms"][room_id1]["timeline"] - ], - [ - event_response2["event_id"], - user1_join_response["event_id"], - event_response3["event_id"], - event_response4["event_id"], - ], - channel.json_body["rooms"][room_id1]["timeline"], - ) - - # Only events after the `from_token` are "live" (join, event3, event4) - self.assertEqual( - channel.json_body["rooms"][room_id1]["num_live"], - 3, - channel.json_body["rooms"][room_id1], - ) - - def test_rooms_invite_shared_history_initial_sync(self) -> None: - """ - Test that `rooms` we are invited to have some stripped `invite_state` during an - initial sync. - - This is an `invite` room so we should only have `stripped_state` (no `timeline`) - but we also shouldn't see any timeline events because the history visiblity is - `shared` and we haven't joined the room yet. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user1 = UserID.from_string(user1_id) - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - user2 = UserID.from_string(user2_id) - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - # Ensure we're testing with a room with `shared` history visibility which means - # history visible until you actually join the room. - history_visibility_response = self.helper.get_state( - room_id1, EventTypes.RoomHistoryVisibility, tok=user2_tok - ) - self.assertEqual( - history_visibility_response.get("history_visibility"), - HistoryVisibility.SHARED, - ) - - self.helper.send(room_id1, "activity before1", tok=user2_tok) - self.helper.send(room_id1, "activity before2", tok=user2_tok) - self.helper.invite(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) - self.helper.send(room_id1, "activity after3", tok=user2_tok) - self.helper.send(room_id1, "activity after4", tok=user2_tok) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [], - "timeline_limit": 3, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # `timeline` is omitted for `invite` rooms with `stripped_state` - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("timeline"), - channel.json_body["rooms"][room_id1], - ) - # `num_live` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("num_live"), - channel.json_body["rooms"][room_id1], - ) - # `limited` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("limited"), - channel.json_body["rooms"][room_id1], - ) - # `prev_batch` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("prev_batch"), - channel.json_body["rooms"][room_id1], - ) - # `required_state` is omitted for `invite` rooms with `stripped_state` - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("required_state"), - channel.json_body["rooms"][room_id1], - ) - # We should have some `stripped_state` so the potential joiner can identify the - # room (we don't care about the order). - self.assertCountEqual( - channel.json_body["rooms"][room_id1]["invite_state"], - [ - { - "content": {"creator": user2_id, "room_version": "10"}, - "sender": user2_id, - "state_key": "", - "type": "m.room.create", - }, - { - "content": {"join_rule": "public"}, - "sender": user2_id, - "state_key": "", - "type": "m.room.join_rules", - }, - { - "content": {"displayname": user2.localpart, "membership": "join"}, - "sender": user2_id, - "state_key": user2_id, - "type": "m.room.member", - }, - { - "content": {"displayname": user1.localpart, "membership": "invite"}, - "sender": user2_id, - "state_key": user1_id, - "type": "m.room.member", - }, - ], - channel.json_body["rooms"][room_id1]["invite_state"], - ) - - def test_rooms_invite_shared_history_incremental_sync(self) -> None: - """ - Test that `rooms` we are invited to have some stripped `invite_state` during an - incremental sync. - - This is an `invite` room so we should only have `stripped_state` (no `timeline`) - but we also shouldn't see any timeline events because the history visiblity is - `shared` and we haven't joined the room yet. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user1 = UserID.from_string(user1_id) - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - user2 = UserID.from_string(user2_id) - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - # Ensure we're testing with a room with `shared` history visibility which means - # history visible until you actually join the room. - history_visibility_response = self.helper.get_state( - room_id1, EventTypes.RoomHistoryVisibility, tok=user2_tok - ) - self.assertEqual( - history_visibility_response.get("history_visibility"), - HistoryVisibility.SHARED, - ) - - self.helper.send(room_id1, "activity before invite1", tok=user2_tok) - self.helper.send(room_id1, "activity before invite2", tok=user2_tok) - self.helper.invite(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) - self.helper.send(room_id1, "activity after invite3", tok=user2_tok) - self.helper.send(room_id1, "activity after invite4", tok=user2_tok) - - from_token = self.event_sources.get_current_token() - - self.helper.send(room_id1, "activity after token5", tok=user2_tok) - self.helper.send(room_id1, "activity after toekn6", tok=user2_tok) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint - + f"?pos={self.get_success(from_token.to_string(self.store))}", - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [], - "timeline_limit": 3, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # `timeline` is omitted for `invite` rooms with `stripped_state` - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("timeline"), - channel.json_body["rooms"][room_id1], - ) - # `num_live` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("num_live"), - channel.json_body["rooms"][room_id1], - ) - # `limited` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("limited"), - channel.json_body["rooms"][room_id1], - ) - # `prev_batch` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("prev_batch"), - channel.json_body["rooms"][room_id1], - ) - # `required_state` is omitted for `invite` rooms with `stripped_state` - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("required_state"), - channel.json_body["rooms"][room_id1], - ) - # We should have some `stripped_state` so the potential joiner can identify the - # room (we don't care about the order). - self.assertCountEqual( - channel.json_body["rooms"][room_id1]["invite_state"], - [ - { - "content": {"creator": user2_id, "room_version": "10"}, - "sender": user2_id, - "state_key": "", - "type": "m.room.create", - }, - { - "content": {"join_rule": "public"}, - "sender": user2_id, - "state_key": "", - "type": "m.room.join_rules", - }, - { - "content": {"displayname": user2.localpart, "membership": "join"}, - "sender": user2_id, - "state_key": user2_id, - "type": "m.room.member", - }, - { - "content": {"displayname": user1.localpart, "membership": "invite"}, - "sender": user2_id, - "state_key": user1_id, - "type": "m.room.member", - }, - ], - channel.json_body["rooms"][room_id1]["invite_state"], - ) - - def test_rooms_invite_world_readable_history_initial_sync(self) -> None: - """ - Test that `rooms` we are invited to have some stripped `invite_state` during an - initial sync. - - This is an `invite` room so we should only have `stripped_state` (no `timeline`) - but depending on the semantics we decide, we could potentially see some - historical events before/after the `from_token` because the history is - `world_readable`. Same situation for events after the `from_token` if the - history visibility was set to `invited`. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user1 = UserID.from_string(user1_id) - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - user2 = UserID.from_string(user2_id) - - room_id1 = self.helper.create_room_as( - user2_id, - tok=user2_tok, - extra_content={ - "preset": "public_chat", - "initial_state": [ - { - "content": { - "history_visibility": HistoryVisibility.WORLD_READABLE - }, - "state_key": "", - "type": EventTypes.RoomHistoryVisibility, - } - ], - }, - ) - # Ensure we're testing with a room with `world_readable` history visibility - # which means events are visible to anyone even without membership. - history_visibility_response = self.helper.get_state( - room_id1, EventTypes.RoomHistoryVisibility, tok=user2_tok - ) - self.assertEqual( - history_visibility_response.get("history_visibility"), - HistoryVisibility.WORLD_READABLE, - ) - - self.helper.send(room_id1, "activity before1", tok=user2_tok) - self.helper.send(room_id1, "activity before2", tok=user2_tok) - self.helper.invite(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) - self.helper.send(room_id1, "activity after3", tok=user2_tok) - self.helper.send(room_id1, "activity after4", tok=user2_tok) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [], - # Large enough to see the latest events and before the invite - "timeline_limit": 4, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # `timeline` is omitted for `invite` rooms with `stripped_state` - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("timeline"), - channel.json_body["rooms"][room_id1], - ) - # `num_live` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("num_live"), - channel.json_body["rooms"][room_id1], - ) - # `limited` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("limited"), - channel.json_body["rooms"][room_id1], - ) - # `prev_batch` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("prev_batch"), - channel.json_body["rooms"][room_id1], - ) - # `required_state` is omitted for `invite` rooms with `stripped_state` - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("required_state"), - channel.json_body["rooms"][room_id1], - ) - # We should have some `stripped_state` so the potential joiner can identify the - # room (we don't care about the order). - self.assertCountEqual( - channel.json_body["rooms"][room_id1]["invite_state"], - [ - { - "content": {"creator": user2_id, "room_version": "10"}, - "sender": user2_id, - "state_key": "", - "type": "m.room.create", - }, - { - "content": {"join_rule": "public"}, - "sender": user2_id, - "state_key": "", - "type": "m.room.join_rules", - }, - { - "content": {"displayname": user2.localpart, "membership": "join"}, - "sender": user2_id, - "state_key": user2_id, - "type": "m.room.member", - }, - { - "content": {"displayname": user1.localpart, "membership": "invite"}, - "sender": user2_id, - "state_key": user1_id, - "type": "m.room.member", - }, - ], - channel.json_body["rooms"][room_id1]["invite_state"], - ) - - def test_rooms_invite_world_readable_history_incremental_sync(self) -> None: - """ - Test that `rooms` we are invited to have some stripped `invite_state` during an - incremental sync. - - This is an `invite` room so we should only have `stripped_state` (no `timeline`) - but depending on the semantics we decide, we could potentially see some - historical events before/after the `from_token` because the history is - `world_readable`. Same situation for events after the `from_token` if the - history visibility was set to `invited`. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user1 = UserID.from_string(user1_id) - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - user2 = UserID.from_string(user2_id) - - room_id1 = self.helper.create_room_as( - user2_id, - tok=user2_tok, - extra_content={ - "preset": "public_chat", - "initial_state": [ - { - "content": { - "history_visibility": HistoryVisibility.WORLD_READABLE - }, - "state_key": "", - "type": EventTypes.RoomHistoryVisibility, - } - ], - }, - ) - # Ensure we're testing with a room with `world_readable` history visibility - # which means events are visible to anyone even without membership. - history_visibility_response = self.helper.get_state( - room_id1, EventTypes.RoomHistoryVisibility, tok=user2_tok - ) - self.assertEqual( - history_visibility_response.get("history_visibility"), - HistoryVisibility.WORLD_READABLE, - ) - - self.helper.send(room_id1, "activity before invite1", tok=user2_tok) - self.helper.send(room_id1, "activity before invite2", tok=user2_tok) - self.helper.invite(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) - self.helper.send(room_id1, "activity after invite3", tok=user2_tok) - self.helper.send(room_id1, "activity after invite4", tok=user2_tok) - - from_token = self.event_sources.get_current_token() - - self.helper.send(room_id1, "activity after token5", tok=user2_tok) - self.helper.send(room_id1, "activity after toekn6", tok=user2_tok) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint - + f"?pos={self.get_success(from_token.to_string(self.store))}", - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [], - # Large enough to see the latest events and before the invite - "timeline_limit": 4, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # `timeline` is omitted for `invite` rooms with `stripped_state` - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("timeline"), - channel.json_body["rooms"][room_id1], - ) - # `num_live` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("num_live"), - channel.json_body["rooms"][room_id1], - ) - # `limited` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("limited"), - channel.json_body["rooms"][room_id1], - ) - # `prev_batch` is omitted for `invite` rooms with `stripped_state` (no timeline anyway) - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("prev_batch"), - channel.json_body["rooms"][room_id1], - ) - # `required_state` is omitted for `invite` rooms with `stripped_state` - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("required_state"), - channel.json_body["rooms"][room_id1], - ) - # We should have some `stripped_state` so the potential joiner can identify the - # room (we don't care about the order). - self.assertCountEqual( - channel.json_body["rooms"][room_id1]["invite_state"], - [ - { - "content": {"creator": user2_id, "room_version": "10"}, - "sender": user2_id, - "state_key": "", - "type": "m.room.create", - }, - { - "content": {"join_rule": "public"}, - "sender": user2_id, - "state_key": "", - "type": "m.room.join_rules", - }, - { - "content": {"displayname": user2.localpart, "membership": "join"}, - "sender": user2_id, - "state_key": user2_id, - "type": "m.room.member", - }, - { - "content": {"displayname": user1.localpart, "membership": "invite"}, - "sender": user2_id, - "state_key": user1_id, - "type": "m.room.member", - }, - ], - channel.json_body["rooms"][room_id1]["invite_state"], - ) - - def test_rooms_ban_initial_sync(self) -> None: - """ - Test that `rooms` we are banned from in an intial sync only allows us to see - timeline events up to the ban event. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.send(room_id1, "activity before1", tok=user2_tok) - self.helper.send(room_id1, "activity before2", tok=user2_tok) - self.helper.join(room_id1, user1_id, tok=user1_tok) - - event_response3 = self.helper.send(room_id1, "activity after3", tok=user2_tok) - event_response4 = self.helper.send(room_id1, "activity after4", tok=user2_tok) - user1_ban_response = self.helper.ban( - room_id1, src=user2_id, targ=user1_id, tok=user2_tok - ) - - self.helper.send(room_id1, "activity after5", tok=user2_tok) - self.helper.send(room_id1, "activity after6", tok=user2_tok) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [], - "timeline_limit": 3, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # We should see events before the ban but not after - self.assertEqual( - [ - event["event_id"] - for event in channel.json_body["rooms"][room_id1]["timeline"] - ], - [ - event_response3["event_id"], - event_response4["event_id"], - user1_ban_response["event_id"], - ], - channel.json_body["rooms"][room_id1]["timeline"], - ) - # No "live" events in an initial sync (no `from_token` to define the "live" - # range) - self.assertEqual( - channel.json_body["rooms"][room_id1]["num_live"], - 0, - channel.json_body["rooms"][room_id1], - ) - # There are more events to paginate to - self.assertEqual( - channel.json_body["rooms"][room_id1]["limited"], - True, - channel.json_body["rooms"][room_id1], - ) - - def test_rooms_ban_incremental_sync1(self) -> None: - """ - Test that `rooms` we are banned from during the next incremental sync only - allows us to see timeline events up to the ban event. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.send(room_id1, "activity before1", tok=user2_tok) - self.helper.send(room_id1, "activity before2", tok=user2_tok) - self.helper.join(room_id1, user1_id, tok=user1_tok) - - from_token = self.event_sources.get_current_token() - - event_response3 = self.helper.send(room_id1, "activity after3", tok=user2_tok) - event_response4 = self.helper.send(room_id1, "activity after4", tok=user2_tok) - # The ban is within the token range (between the `from_token` and the sliding - # sync request) - user1_ban_response = self.helper.ban( - room_id1, src=user2_id, targ=user1_id, tok=user2_tok - ) - - self.helper.send(room_id1, "activity after5", tok=user2_tok) - self.helper.send(room_id1, "activity after6", tok=user2_tok) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint - + f"?pos={self.get_success(from_token.to_string(self.store))}", - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [], - "timeline_limit": 4, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # We should see events before the ban but not after - self.assertEqual( - [ - event["event_id"] - for event in channel.json_body["rooms"][room_id1]["timeline"] - ], - [ - event_response3["event_id"], - event_response4["event_id"], - user1_ban_response["event_id"], - ], - channel.json_body["rooms"][room_id1]["timeline"], - ) - # All live events in the incremental sync - self.assertEqual( - channel.json_body["rooms"][room_id1]["num_live"], - 3, - channel.json_body["rooms"][room_id1], - ) - # There aren't anymore events to paginate to in this range - self.assertEqual( - channel.json_body["rooms"][room_id1]["limited"], - False, - channel.json_body["rooms"][room_id1], - ) - - def test_rooms_ban_incremental_sync2(self) -> None: - """ - Test that `rooms` we are banned from before the incremental sync don't return - any events in the timeline. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.send(room_id1, "activity before1", tok=user2_tok) - self.helper.join(room_id1, user1_id, tok=user1_tok) - - self.helper.send(room_id1, "activity after2", tok=user2_tok) - # The ban is before we get our `from_token` - self.helper.ban(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) - - self.helper.send(room_id1, "activity after3", tok=user2_tok) - - from_token = self.event_sources.get_current_token() - - self.helper.send(room_id1, "activity after4", tok=user2_tok) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint - + f"?pos={self.get_success(from_token.to_string(self.store))}", - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [], - "timeline_limit": 4, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # Nothing to see for this banned user in the room in the token range - self.assertIsNone(channel.json_body["rooms"][room_id1].get("timeline")) - # No events returned in the timeline so nothing is "live" - self.assertEqual( - channel.json_body["rooms"][room_id1]["num_live"], - 0, - channel.json_body["rooms"][room_id1], - ) - # There aren't anymore events to paginate to in this range - self.assertEqual( - channel.json_body["rooms"][room_id1]["limited"], - False, - channel.json_body["rooms"][room_id1], - ) - - def test_rooms_no_required_state(self) -> None: - """ - Empty `rooms.required_state` should not return any state events in the room - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.join(room_id1, user1_id, tok=user1_tok) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - # Empty `required_state` - "required_state": [], - "timeline_limit": 0, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # No `required_state` in response - self.assertIsNone( - channel.json_body["rooms"][room_id1].get("required_state"), - channel.json_body["rooms"][room_id1], - ) - - def test_rooms_required_state_initial_sync(self) -> None: - """ - Test `rooms.required_state` returns requested state events in the room during an - initial sync. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.join(room_id1, user1_id, tok=user1_tok) - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [ - [EventTypes.Create, ""], - [EventTypes.RoomHistoryVisibility, ""], - # This one doesn't exist in the room - [EventTypes.Tombstone, ""], - ], - "timeline_limit": 0, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - state_map = self.get_success( - self.storage_controllers.state.get_current_state(room_id1) - ) - - self._assertRequiredStateIncludes( - channel.json_body["rooms"][room_id1]["required_state"], - { - state_map[(EventTypes.Create, "")], - state_map[(EventTypes.RoomHistoryVisibility, "")], - }, - exact=True, - ) - self.assertIsNone(channel.json_body["rooms"][room_id1].get("invite_state")) - - def test_rooms_required_state_incremental_sync(self) -> None: - """ - Test `rooms.required_state` returns requested state events in the room during an - incremental sync. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.join(room_id1, user1_id, tok=user1_tok) - - after_room_token = self.event_sources.get_current_token() - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint - + f"?pos={self.get_success(after_room_token.to_string(self.store))}", - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [ - [EventTypes.Create, ""], - [EventTypes.RoomHistoryVisibility, ""], - # This one doesn't exist in the room - [EventTypes.Tombstone, ""], - ], - "timeline_limit": 0, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - state_map = self.get_success( - self.storage_controllers.state.get_current_state(room_id1) - ) - - # The returned state doesn't change from initial to incremental sync. In the - # future, we will only return updates but only if we've sent the room down the - # connection before. - self._assertRequiredStateIncludes( - channel.json_body["rooms"][room_id1]["required_state"], - { - state_map[(EventTypes.Create, "")], - state_map[(EventTypes.RoomHistoryVisibility, "")], - }, - exact=True, - ) - self.assertIsNone(channel.json_body["rooms"][room_id1].get("invite_state")) - - def test_rooms_required_state_wildcard(self) -> None: - """ - Test `rooms.required_state` returns all state events when using wildcard `["*", "*"]`. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.join(room_id1, user1_id, tok=user1_tok) - - self.helper.send_state( - room_id1, - event_type="org.matrix.foo_state", - state_key="", - body={"foo": "bar"}, - tok=user2_tok, - ) - self.helper.send_state( - room_id1, - event_type="org.matrix.foo_state", - state_key="namespaced", - body={"foo": "bar"}, - tok=user2_tok, - ) - - # Make the Sliding Sync request with wildcards for the `event_type` and `state_key` - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [ - [StateValues.WILDCARD, StateValues.WILDCARD], - ], - "timeline_limit": 0, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - state_map = self.get_success( - self.storage_controllers.state.get_current_state(room_id1) - ) - - self._assertRequiredStateIncludes( - channel.json_body["rooms"][room_id1]["required_state"], - # We should see all the state events in the room - state_map.values(), - exact=True, - ) - self.assertIsNone(channel.json_body["rooms"][room_id1].get("invite_state")) - - def test_rooms_required_state_wildcard_event_type(self) -> None: - """ - Test `rooms.required_state` returns relevant state events when using wildcard in - the event_type `["*", "foobarbaz"]`. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.join(room_id1, user1_id, tok=user1_tok) - - self.helper.send_state( - room_id1, - event_type="org.matrix.foo_state", - state_key="", - body={"foo": "bar"}, - tok=user2_tok, - ) - self.helper.send_state( - room_id1, - event_type="org.matrix.foo_state", - state_key=user2_id, - body={"foo": "bar"}, - tok=user2_tok, - ) - - # Make the Sliding Sync request with wildcards for the `event_type` - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [ - [StateValues.WILDCARD, user2_id], - ], - "timeline_limit": 0, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - state_map = self.get_success( - self.storage_controllers.state.get_current_state(room_id1) - ) - - # We expect at-least any state event with the `user2_id` as the `state_key` - self._assertRequiredStateIncludes( - channel.json_body["rooms"][room_id1]["required_state"], - { - state_map[(EventTypes.Member, user2_id)], - state_map[("org.matrix.foo_state", user2_id)], - }, - # Ideally, this would be exact but we're currently returning all state - # events when the `event_type` is a wildcard. - exact=False, - ) - self.assertIsNone(channel.json_body["rooms"][room_id1].get("invite_state")) - - def test_rooms_required_state_wildcard_state_key(self) -> None: - """ - Test `rooms.required_state` returns relevant state events when using wildcard in - the state_key `["foobarbaz","*"]`. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.join(room_id1, user1_id, tok=user1_tok) - - # Make the Sliding Sync request with wildcards for the `state_key` - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [ - [EventTypes.Member, StateValues.WILDCARD], - ], - "timeline_limit": 0, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - state_map = self.get_success( - self.storage_controllers.state.get_current_state(room_id1) - ) - - self._assertRequiredStateIncludes( - channel.json_body["rooms"][room_id1]["required_state"], - { - state_map[(EventTypes.Member, user1_id)], - state_map[(EventTypes.Member, user2_id)], - }, - exact=True, - ) - self.assertIsNone(channel.json_body["rooms"][room_id1].get("invite_state")) - - def test_rooms_required_state_lazy_loading_room_members(self) -> None: - """ - Test `rooms.required_state` returns people relevant to the timeline when - lazy-loading room members, `["m.room.member","$LAZY"]`. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - user3_id = self.register_user("user3", "pass") - user3_tok = self.login(user3_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.join(room_id1, user1_id, tok=user1_tok) - self.helper.join(room_id1, user3_id, tok=user3_tok) - - self.helper.send(room_id1, "1", tok=user2_tok) - self.helper.send(room_id1, "2", tok=user3_tok) - self.helper.send(room_id1, "3", tok=user2_tok) - - # Make the Sliding Sync request with lazy loading for the room members - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [ - [EventTypes.Create, ""], - [EventTypes.Member, StateValues.LAZY], - ], - "timeline_limit": 3, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - state_map = self.get_success( - self.storage_controllers.state.get_current_state(room_id1) - ) - - # Only user2 and user3 sent events in the 3 events we see in the `timeline` - self._assertRequiredStateIncludes( - channel.json_body["rooms"][room_id1]["required_state"], - { - state_map[(EventTypes.Create, "")], - state_map[(EventTypes.Member, user2_id)], - state_map[(EventTypes.Member, user3_id)], - }, - exact=True, - ) - self.assertIsNone(channel.json_body["rooms"][room_id1].get("invite_state")) - - def test_rooms_required_state_me(self) -> None: - """ - Test `rooms.required_state` correctly handles $ME. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.join(room_id1, user1_id, tok=user1_tok) - - self.helper.send(room_id1, "1", tok=user2_tok) - - # Also send normal state events with state keys of the users, first - # change the power levels to allow this. - self.helper.send_state( - room_id1, - event_type=EventTypes.PowerLevels, - body={"users": {user1_id: 50, user2_id: 100}}, - tok=user2_tok, - ) - self.helper.send_state( - room_id1, - event_type="org.matrix.foo", - state_key=user1_id, - body={}, - tok=user1_tok, - ) - self.helper.send_state( - room_id1, - event_type="org.matrix.foo", - state_key=user2_id, - body={}, - tok=user2_tok, - ) - - # Make the Sliding Sync request with a request for '$ME'. - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [ - [EventTypes.Create, ""], - [EventTypes.Member, StateValues.ME], - ["org.matrix.foo", StateValues.ME], - ], - "timeline_limit": 3, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - state_map = self.get_success( - self.storage_controllers.state.get_current_state(room_id1) - ) - - # Only user2 and user3 sent events in the 3 events we see in the `timeline` - self._assertRequiredStateIncludes( - channel.json_body["rooms"][room_id1]["required_state"], - { - state_map[(EventTypes.Create, "")], - state_map[(EventTypes.Member, user1_id)], - state_map[("org.matrix.foo", user1_id)], - }, - exact=True, - ) - self.assertIsNone(channel.json_body["rooms"][room_id1].get("invite_state")) - - @parameterized.expand([(Membership.LEAVE,), (Membership.BAN,)]) - def test_rooms_required_state_leave_ban(self, stop_membership: str) -> None: - """ - Test `rooms.required_state` should not return state past a leave/ban event. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - user3_id = self.register_user("user3", "pass") - user3_tok = self.login(user3_id, "pass") - - from_token = self.event_sources.get_current_token() - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.join(room_id1, user1_id, tok=user1_tok) - self.helper.join(room_id1, user3_id, tok=user3_tok) - - self.helper.send_state( - room_id1, - event_type="org.matrix.foo_state", - state_key="", - body={"foo": "bar"}, - tok=user2_tok, - ) - - if stop_membership == Membership.LEAVE: - # User 1 leaves - self.helper.leave(room_id1, user1_id, tok=user1_tok) - elif stop_membership == Membership.BAN: - # User 1 is banned - self.helper.ban(room_id1, src=user2_id, targ=user1_id, tok=user2_tok) - - state_map = self.get_success( - self.storage_controllers.state.get_current_state(room_id1) - ) - - # Change the state after user 1 leaves - self.helper.send_state( - room_id1, - event_type="org.matrix.foo_state", - state_key="", - body={"foo": "qux"}, - tok=user2_tok, - ) - self.helper.leave(room_id1, user3_id, tok=user3_tok) - - # Make the Sliding Sync request with lazy loading for the room members - channel = self.make_request( - "POST", - self.sync_endpoint - + f"?pos={self.get_success(from_token.to_string(self.store))}", - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [ - [EventTypes.Create, ""], - [EventTypes.Member, "*"], - ["org.matrix.foo_state", ""], - ], - "timeline_limit": 3, - } - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # Only user2 and user3 sent events in the 3 events we see in the `timeline` - self._assertRequiredStateIncludes( - channel.json_body["rooms"][room_id1]["required_state"], - { - state_map[(EventTypes.Create, "")], - state_map[(EventTypes.Member, user1_id)], - state_map[(EventTypes.Member, user2_id)], - state_map[(EventTypes.Member, user3_id)], - state_map[("org.matrix.foo_state", "")], - }, - exact=True, - ) - self.assertIsNone(channel.json_body["rooms"][room_id1].get("invite_state")) - - def test_rooms_required_state_combine_superset(self) -> None: - """ - Test `rooms.required_state` is combined across lists and room subscriptions. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.join(room_id1, user1_id, tok=user1_tok) - - self.helper.send_state( - room_id1, - event_type="org.matrix.foo_state", - state_key="", - body={"foo": "bar"}, - tok=user2_tok, - ) - self.helper.send_state( - room_id1, - event_type="org.matrix.bar_state", - state_key="", - body={"bar": "qux"}, - tok=user2_tok, - ) - - # Make the Sliding Sync request with wildcards for the `state_key` - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [ - [EventTypes.Create, ""], - [EventTypes.Member, user1_id], - ], - "timeline_limit": 0, - }, - "bar-list": { - "ranges": [[0, 1]], - "required_state": [ - [EventTypes.Member, StateValues.WILDCARD], - ["org.matrix.foo_state", ""], - ], - "timeline_limit": 0, - }, - }, - "room_subscriptions": { - room_id1: { - "required_state": [["org.matrix.bar_state", ""]], - "timeline_limit": 0, - } - }, - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - state_map = self.get_success( - self.storage_controllers.state.get_current_state(room_id1) - ) - - self._assertRequiredStateIncludes( - channel.json_body["rooms"][room_id1]["required_state"], - { - state_map[(EventTypes.Create, "")], - state_map[(EventTypes.Member, user1_id)], - state_map[(EventTypes.Member, user2_id)], - state_map[("org.matrix.foo_state", "")], - state_map[("org.matrix.bar_state", "")], - }, - exact=True, - ) - self.assertIsNone(channel.json_body["rooms"][room_id1].get("invite_state")) - - def test_rooms_required_state_partial_state(self) -> None: - """ - Test partially-stated room are excluded unless `rooms.required_state` is - lazy-loading room members. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - room_id2 = self.helper.create_room_as(user2_id, tok=user2_tok) - _join_response1 = self.helper.join(room_id1, user1_id, tok=user1_tok) - join_response2 = self.helper.join(room_id2, user1_id, tok=user1_tok) - - # Mark room2 as partial state - self.get_success( - mark_event_as_partial_state(self.hs, join_response2["event_id"], room_id2) - ) - - # Make the Sliding Sync request (NOT lazy-loading room members) - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [ - [EventTypes.Create, ""], - ], - "timeline_limit": 0, - }, - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # Make sure the list includes room1 but room2 is excluded because it's still - # partially-stated - self.assertListEqual( - list(channel.json_body["lists"]["foo-list"]["ops"]), - [ - { - "op": "SYNC", - "range": [0, 1], - "room_ids": [room_id1], - } - ], - channel.json_body["lists"]["foo-list"], - ) - - # Make the Sliding Sync request (with lazy-loading room members) - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": { - "foo-list": { - "ranges": [[0, 1]], - "required_state": [ - [EventTypes.Create, ""], - # Lazy-load room members - [EventTypes.Member, StateValues.LAZY], - ], - "timeline_limit": 0, - }, - } - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # The list should include both rooms now because we're lazy-loading room members - self.assertListEqual( - list(channel.json_body["lists"]["foo-list"]["ops"]), - [ - { - "op": "SYNC", - "range": [0, 1], - "room_ids": [room_id2, room_id1], - } - ], - channel.json_body["lists"]["foo-list"], - ) - - def test_room_subscriptions_with_join_membership(self) -> None: - """ - Test `room_subscriptions` with a joined room should give us timeline and current - state events. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - join_response = self.helper.join(room_id1, user1_id, tok=user1_tok) - - # Make the Sliding Sync request with just the room subscription - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "room_subscriptions": { - room_id1: { - "required_state": [ - [EventTypes.Create, ""], - ], - "timeline_limit": 1, - } - }, - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - state_map = self.get_success( - self.storage_controllers.state.get_current_state(room_id1) - ) - - # We should see some state - self._assertRequiredStateIncludes( - channel.json_body["rooms"][room_id1]["required_state"], - { - state_map[(EventTypes.Create, "")], - }, - exact=True, - ) - self.assertIsNone(channel.json_body["rooms"][room_id1].get("invite_state")) - - # We should see some events - self.assertEqual( - [ - event["event_id"] - for event in channel.json_body["rooms"][room_id1]["timeline"] - ], - [ - join_response["event_id"], - ], - channel.json_body["rooms"][room_id1]["timeline"], - ) - # No "live" events in an initial sync (no `from_token` to define the "live" - # range) - self.assertEqual( - channel.json_body["rooms"][room_id1]["num_live"], - 0, - channel.json_body["rooms"][room_id1], - ) - # There are more events to paginate to - self.assertEqual( - channel.json_body["rooms"][room_id1]["limited"], - True, - channel.json_body["rooms"][room_id1], - ) - - def test_room_subscriptions_with_leave_membership(self) -> None: - """ - Test `room_subscriptions` with a leave room should give us timeline and state - events up to the leave event. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.send_state( - room_id1, - event_type="org.matrix.foo_state", - state_key="", - body={"foo": "bar"}, - tok=user2_tok, - ) - - join_response = self.helper.join(room_id1, user1_id, tok=user1_tok) - leave_response = self.helper.leave(room_id1, user1_id, tok=user1_tok) - - state_map = self.get_success( - self.storage_controllers.state.get_current_state(room_id1) - ) - - # Send some events after user1 leaves - self.helper.send(room_id1, "activity after leave", tok=user2_tok) - # Update state after user1 leaves - self.helper.send_state( - room_id1, - event_type="org.matrix.foo_state", - state_key="", - body={"foo": "qux"}, - tok=user2_tok, - ) - - # Make the Sliding Sync request with just the room subscription - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "room_subscriptions": { - room_id1: { - "required_state": [ - ["org.matrix.foo_state", ""], - ], - "timeline_limit": 2, - } - }, - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # We should see the state at the time of the leave - self._assertRequiredStateIncludes( - channel.json_body["rooms"][room_id1]["required_state"], - { - state_map[("org.matrix.foo_state", "")], - }, - exact=True, - ) - self.assertIsNone(channel.json_body["rooms"][room_id1].get("invite_state")) - - # We should see some before we left (nothing after) - self.assertEqual( - [ - event["event_id"] - for event in channel.json_body["rooms"][room_id1]["timeline"] - ], - [ - join_response["event_id"], - leave_response["event_id"], - ], - channel.json_body["rooms"][room_id1]["timeline"], - ) - # No "live" events in an initial sync (no `from_token` to define the "live" - # range) - self.assertEqual( - channel.json_body["rooms"][room_id1]["num_live"], - 0, - channel.json_body["rooms"][room_id1], - ) - # There are more events to paginate to - self.assertEqual( - channel.json_body["rooms"][room_id1]["limited"], - True, - channel.json_body["rooms"][room_id1], - ) - - def test_room_subscriptions_no_leak_private_room(self) -> None: - """ - Test `room_subscriptions` with a private room we have never been in should not - leak any data to the user. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - room_id1 = self.helper.create_room_as(user2_id, tok=user2_tok, is_public=False) - - # We should not be able to join the private room - self.helper.join( - room_id1, user1_id, tok=user1_tok, expect_code=HTTPStatus.FORBIDDEN - ) - - # Make the Sliding Sync request with just the room subscription - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "room_subscriptions": { - room_id1: { - "required_state": [ - [EventTypes.Create, ""], - ], - "timeline_limit": 1, - } - }, - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # We should not see the room at all (we're not in it) - self.assertIsNone( - channel.json_body["rooms"].get(room_id1), channel.json_body["rooms"] - ) - - def test_room_subscriptions_world_readable(self) -> None: - """ - Test `room_subscriptions` with a room that has `world_readable` history visibility - - FIXME: We should be able to see the room timeline and state - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - # Create a room with `world_readable` history visibility - room_id1 = self.helper.create_room_as( - user2_id, - tok=user2_tok, - extra_content={ - "preset": "public_chat", - "initial_state": [ - { - "content": { - "history_visibility": HistoryVisibility.WORLD_READABLE - }, - "state_key": "", - "type": EventTypes.RoomHistoryVisibility, - } - ], - }, - ) - # Ensure we're testing with a room with `world_readable` history visibility - # which means events are visible to anyone even without membership. - history_visibility_response = self.helper.get_state( - room_id1, EventTypes.RoomHistoryVisibility, tok=user2_tok - ) - self.assertEqual( - history_visibility_response.get("history_visibility"), - HistoryVisibility.WORLD_READABLE, - ) - - # Note: We never join the room - - # Make the Sliding Sync request with just the room subscription - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "room_subscriptions": { - room_id1: { - "required_state": [ - [EventTypes.Create, ""], - ], - "timeline_limit": 1, - } - }, - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # FIXME: In the future, we should be able to see the room because it's - # `world_readable` but currently we don't support this. - self.assertIsNone( - channel.json_body["rooms"].get(room_id1), channel.json_body["rooms"] - ) - - -class SlidingSyncToDeviceExtensionTestCase(unittest.HomeserverTestCase): - """Tests for the to-device sliding sync extension""" - - servlets = [ - synapse.rest.admin.register_servlets, - login.register_servlets, - sync.register_servlets, - sendtodevice.register_servlets, - ] - - def default_config(self) -> JsonDict: - config = super().default_config() - # Enable sliding sync - config["experimental_features"] = {"msc3575_enabled": True} - return config - - def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: - self.store = hs.get_datastores().main - self.event_sources = hs.get_event_sources() - self.account_data_handler = hs.get_account_data_handler() - self.notifier = hs.get_notifier() - self.sync_endpoint = ( - "/_matrix/client/unstable/org.matrix.simplified_msc3575/sync" - ) - - def _bump_notifier_wait_for_events(self, user_id: str) -> None: - """ - Wake-up a `notifier.wait_for_events(user_id)` call without affecting the Sliding - Sync results. - """ - # We're expecting some new activity from this point onwards - from_token = self.event_sources.get_current_token() - - triggered_notifier_wait_for_events = False - - async def _on_new_acivity( - before_token: StreamToken, after_token: StreamToken - ) -> bool: - nonlocal triggered_notifier_wait_for_events - triggered_notifier_wait_for_events = True - return True - - # Listen for some new activity for the user. We're just trying to confirm that - # our bump below actually does what we think it does (triggers new activity for - # the user). - result_awaitable = self.notifier.wait_for_events( - user_id, - 1000, - _on_new_acivity, - from_token=from_token, - ) - - # Update the account data so that `notifier.wait_for_events(...)` wakes up. - # We're bumping account data because it won't show up in the Sliding Sync - # response so it won't affect whether we have results. - self.get_success( - self.account_data_handler.add_account_data_for_user( - user_id, - "org.matrix.foobarbaz", - {"foo": "bar"}, - ) - ) - - # Wait for our notifier result - self.get_success(result_awaitable) - - if not triggered_notifier_wait_for_events: - raise AssertionError( - "Expected `notifier.wait_for_events(...)` to be triggered" - ) - - def _assert_to_device_response( - self, channel: FakeChannel, expected_messages: List[JsonDict] - ) -> str: - """Assert the sliding sync response was successful and has the expected - to-device messages. - - Returns the next_batch token from the to-device section. - """ - self.assertEqual(channel.code, 200, channel.json_body) - extensions = channel.json_body["extensions"] - to_device = extensions["to_device"] - self.assertIsInstance(to_device["next_batch"], str) - self.assertEqual(to_device["events"], expected_messages) - - return to_device["next_batch"] - - def test_no_data(self) -> None: - """Test that enabling to-device extension works, even if there is - no-data - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": {}, - "extensions": { - "to_device": { - "enabled": True, - } - }, - }, - access_token=user1_tok, - ) - - # We expect no to-device messages - self._assert_to_device_response(channel, []) - - def test_data_initial_sync(self) -> None: - """Test that we get to-device messages when we don't specify a since - token""" - - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass", "d1") - user2_id = self.register_user("u2", "pass") - user2_tok = self.login(user2_id, "pass", "d2") - - # Send the to-device message - test_msg = {"foo": "bar"} - chan = self.make_request( - "PUT", - "/_matrix/client/r0/sendToDevice/m.test/1234", - content={"messages": {user1_id: {"d1": test_msg}}}, - access_token=user2_tok, - ) - self.assertEqual(chan.code, 200, chan.result) - - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": {}, - "extensions": { - "to_device": { - "enabled": True, - } - }, - }, - access_token=user1_tok, - ) - self._assert_to_device_response( - channel, - [{"content": test_msg, "sender": user2_id, "type": "m.test"}], - ) - - def test_data_incremental_sync(self) -> None: - """Test that we get to-device messages over incremental syncs""" - - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass", "d1") - user2_id = self.register_user("u2", "pass") - user2_tok = self.login(user2_id, "pass", "d2") - - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": {}, - "extensions": { - "to_device": { - "enabled": True, - } - }, - }, - access_token=user1_tok, - ) - # No to-device messages yet. - next_batch = self._assert_to_device_response(channel, []) - - test_msg = {"foo": "bar"} - chan = self.make_request( - "PUT", - "/_matrix/client/r0/sendToDevice/m.test/1234", - content={"messages": {user1_id: {"d1": test_msg}}}, - access_token=user2_tok, - ) - self.assertEqual(chan.code, 200, chan.result) - - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": {}, - "extensions": { - "to_device": { - "enabled": True, - "since": next_batch, - } - }, - }, - access_token=user1_tok, - ) - next_batch = self._assert_to_device_response( - channel, - [{"content": test_msg, "sender": user2_id, "type": "m.test"}], - ) - - # The next sliding sync request should not include the to-device - # message. - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": {}, - "extensions": { - "to_device": { - "enabled": True, - "since": next_batch, - } - }, - }, - access_token=user1_tok, - ) - self._assert_to_device_response(channel, []) - - # An initial sliding sync request should not include the to-device - # message, as it should have been deleted - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": {}, - "extensions": { - "to_device": { - "enabled": True, - } - }, - }, - access_token=user1_tok, - ) - self._assert_to_device_response(channel, []) - - def test_wait_for_new_data(self) -> None: - """ - Test to make sure that the Sliding Sync request waits for new data to arrive. - - (Only applies to incremental syncs with a `timeout` specified) - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass", "d1") - user2_id = self.register_user("u2", "pass") - user2_tok = self.login(user2_id, "pass", "d2") - - from_token = self.event_sources.get_current_token() - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint - + "?timeout=10000" - + f"&pos={self.get_success(from_token.to_string(self.store))}", - { - "lists": {}, - "extensions": { - "to_device": { - "enabled": True, - } - }, - }, - access_token=user1_tok, - await_result=False, - ) - # Block for 5 seconds to make sure we are `notifier.wait_for_events(...)` - with self.assertRaises(TimedOutException): - channel.await_result(timeout_ms=5000) - # Bump the to-device messages to trigger new results - test_msg = {"foo": "bar"} - send_to_device_channel = self.make_request( - "PUT", - "/_matrix/client/r0/sendToDevice/m.test/1234", - content={"messages": {user1_id: {"d1": test_msg}}}, - access_token=user2_tok, - ) - self.assertEqual( - send_to_device_channel.code, 200, send_to_device_channel.result - ) - # Should respond before the 10 second timeout - channel.await_result(timeout_ms=3000) - self.assertEqual(channel.code, 200, channel.json_body) - - self._assert_to_device_response( - channel, - [{"content": test_msg, "sender": user2_id, "type": "m.test"}], - ) - - def test_wait_for_new_data_timeout(self) -> None: - """ - Test to make sure that the Sliding Sync request waits for new data to arrive but - no data ever arrives so we timeout. We're also making sure that the default data - from the To-Device extension doesn't trigger a false-positive for new data. - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - - from_token = self.event_sources.get_current_token() - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint - + "?timeout=10000" - + f"&pos={self.get_success(from_token.to_string(self.store))}", - { - "lists": {}, - "extensions": { - "to_device": { - "enabled": True, - } - }, - }, - access_token=user1_tok, - await_result=False, - ) - # Block for 5 seconds to make sure we are `notifier.wait_for_events(...)` - with self.assertRaises(TimedOutException): - channel.await_result(timeout_ms=5000) - # Wake-up `notifier.wait_for_events(...)` that will cause us test - # `SlidingSyncResult.__bool__` for new results. - self._bump_notifier_wait_for_events(user1_id) - # Block for a little bit more to ensure we don't see any new results. - with self.assertRaises(TimedOutException): - channel.await_result(timeout_ms=4000) - # Wait for the sync to complete (wait for the rest of the 10 second timeout, - # 5000 + 4000 + 1200 > 10000) - channel.await_result(timeout_ms=1200) - self.assertEqual(channel.code, 200, channel.json_body) - - self._assert_to_device_response(channel, []) - - -class SlidingSyncE2eeExtensionTestCase(unittest.HomeserverTestCase): - """Tests for the e2ee sliding sync extension""" - - servlets = [ - synapse.rest.admin.register_servlets, - login.register_servlets, - room.register_servlets, - sync.register_servlets, - devices.register_servlets, - ] - - def default_config(self) -> JsonDict: - config = super().default_config() - # Enable sliding sync - config["experimental_features"] = {"msc3575_enabled": True} - return config - - def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: - self.store = hs.get_datastores().main - self.event_sources = hs.get_event_sources() - self.e2e_keys_handler = hs.get_e2e_keys_handler() - self.account_data_handler = hs.get_account_data_handler() - self.notifier = hs.get_notifier() - self.sync_endpoint = ( - "/_matrix/client/unstable/org.matrix.simplified_msc3575/sync" - ) - - def _bump_notifier_wait_for_events(self, user_id: str) -> None: - """ - Wake-up a `notifier.wait_for_events(user_id)` call without affecting the Sliding - Sync results. - """ - # We're expecting some new activity from this point onwards - from_token = self.event_sources.get_current_token() - - triggered_notifier_wait_for_events = False - - async def _on_new_acivity( - before_token: StreamToken, after_token: StreamToken - ) -> bool: - nonlocal triggered_notifier_wait_for_events - triggered_notifier_wait_for_events = True - return True - - # Listen for some new activity for the user. We're just trying to confirm that - # our bump below actually does what we think it does (triggers new activity for - # the user). - result_awaitable = self.notifier.wait_for_events( - user_id, - 1000, - _on_new_acivity, - from_token=from_token, - ) - - # Update the account data so that `notifier.wait_for_events(...)` wakes up. - # We're bumping account data because it won't show up in the Sliding Sync - # response so it won't affect whether we have results. - self.get_success( - self.account_data_handler.add_account_data_for_user( - user_id, - "org.matrix.foobarbaz", - {"foo": "bar"}, - ) - ) - - # Wait for our notifier result - self.get_success(result_awaitable) - - if not triggered_notifier_wait_for_events: - raise AssertionError( - "Expected `notifier.wait_for_events(...)` to be triggered" - ) - - def test_no_data_initial_sync(self) -> None: - """ - Test that enabling e2ee extension works during an intitial sync, even if there - is no-data - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - - # Make an initial Sliding Sync request with the e2ee extension enabled - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": {}, - "extensions": { - "e2ee": { - "enabled": True, - } - }, - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # Device list updates are only present for incremental syncs - self.assertIsNone(channel.json_body["extensions"]["e2ee"].get("device_lists")) - - # Both of these should be present even when empty - self.assertEqual( - channel.json_body["extensions"]["e2ee"]["device_one_time_keys_count"], - { - # This is always present because of - # https://github.com/element-hq/element-android/issues/3725 and - # https://github.com/matrix-org/synapse/issues/10456 - "signed_curve25519": 0 - }, - ) - self.assertEqual( - channel.json_body["extensions"]["e2ee"]["device_unused_fallback_key_types"], - [], - ) - - def test_no_data_incremental_sync(self) -> None: - """ - Test that enabling e2ee extension works during an incremental sync, even if - there is no-data - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - - from_token = self.event_sources.get_current_token() - - # Make an incremental Sliding Sync request with the e2ee extension enabled - channel = self.make_request( - "POST", - self.sync_endpoint - + f"?pos={self.get_success(from_token.to_string(self.store))}", - { - "lists": {}, - "extensions": { - "e2ee": { - "enabled": True, - } - }, - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # Device list shows up for incremental syncs - self.assertEqual( - channel.json_body["extensions"]["e2ee"] - .get("device_lists", {}) - .get("changed"), - [], - ) - self.assertEqual( - channel.json_body["extensions"]["e2ee"].get("device_lists", {}).get("left"), - [], - ) - - # Both of these should be present even when empty - self.assertEqual( - channel.json_body["extensions"]["e2ee"]["device_one_time_keys_count"], - { - # Note that "signed_curve25519" is always returned in key count responses - # regardless of whether we uploaded any keys for it. This is necessary until - # https://github.com/matrix-org/matrix-doc/issues/3298 is fixed. - # - # Also related: - # https://github.com/element-hq/element-android/issues/3725 and - # https://github.com/matrix-org/synapse/issues/10456 - "signed_curve25519": 0 - }, - ) - self.assertEqual( - channel.json_body["extensions"]["e2ee"]["device_unused_fallback_key_types"], - [], - ) - - def test_wait_for_new_data(self) -> None: - """ - Test to make sure that the Sliding Sync request waits for new data to arrive. - - (Only applies to incremental syncs with a `timeout` specified) - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - test_device_id = "TESTDEVICE" - user3_id = self.register_user("user3", "pass") - user3_tok = self.login(user3_id, "pass", device_id=test_device_id) - - room_id = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.join(room_id, user1_id, tok=user1_tok) - self.helper.join(room_id, user3_id, tok=user3_tok) - - from_token = self.event_sources.get_current_token() - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint - + "?timeout=10000" - + f"&pos={self.get_success(from_token.to_string(self.store))}", - { - "lists": {}, - "extensions": { - "e2ee": { - "enabled": True, - } - }, - }, - access_token=user1_tok, - await_result=False, - ) - # Block for 5 seconds to make sure we are `notifier.wait_for_events(...)` - with self.assertRaises(TimedOutException): - channel.await_result(timeout_ms=5000) - # Bump the device lists to trigger new results - # Have user3 update their device list - device_update_channel = self.make_request( - "PUT", - f"/devices/{test_device_id}", - { - "display_name": "New Device Name", - }, - access_token=user3_tok, - ) - self.assertEqual( - device_update_channel.code, 200, device_update_channel.json_body - ) - # Should respond before the 10 second timeout - channel.await_result(timeout_ms=3000) - self.assertEqual(channel.code, 200, channel.json_body) - - # We should see the device list update - self.assertEqual( - channel.json_body["extensions"]["e2ee"] - .get("device_lists", {}) - .get("changed"), - [user3_id], - ) - self.assertEqual( - channel.json_body["extensions"]["e2ee"].get("device_lists", {}).get("left"), - [], - ) - - def test_wait_for_new_data_timeout(self) -> None: - """ - Test to make sure that the Sliding Sync request waits for new data to arrive but - no data ever arrives so we timeout. We're also making sure that the default data - from the E2EE extension doesn't trigger a false-positive for new data (see - `device_one_time_keys_count.signed_curve25519`). - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - - from_token = self.event_sources.get_current_token() - - # Make the Sliding Sync request - channel = self.make_request( - "POST", - self.sync_endpoint - + "?timeout=10000" - + f"&pos={self.get_success(from_token.to_string(self.store))}", - { - "lists": {}, - "extensions": { - "e2ee": { - "enabled": True, - } - }, - }, - access_token=user1_tok, - await_result=False, - ) - # Block for 5 seconds to make sure we are `notifier.wait_for_events(...)` - with self.assertRaises(TimedOutException): - channel.await_result(timeout_ms=5000) - # Wake-up `notifier.wait_for_events(...)` that will cause us test - # `SlidingSyncResult.__bool__` for new results. - self._bump_notifier_wait_for_events(user1_id) - # Block for a little bit more to ensure we don't see any new results. - with self.assertRaises(TimedOutException): - channel.await_result(timeout_ms=4000) - # Wait for the sync to complete (wait for the rest of the 10 second timeout, - # 5000 + 4000 + 1200 > 10000) - channel.await_result(timeout_ms=1200) - self.assertEqual(channel.code, 200, channel.json_body) - - # Device lists are present for incremental syncs but empty because no device changes - self.assertEqual( - channel.json_body["extensions"]["e2ee"] - .get("device_lists", {}) - .get("changed"), - [], - ) - self.assertEqual( - channel.json_body["extensions"]["e2ee"].get("device_lists", {}).get("left"), - [], - ) - - # Both of these should be present even when empty - self.assertEqual( - channel.json_body["extensions"]["e2ee"]["device_one_time_keys_count"], - { - # Note that "signed_curve25519" is always returned in key count responses - # regardless of whether we uploaded any keys for it. This is necessary until - # https://github.com/matrix-org/matrix-doc/issues/3298 is fixed. - # - # Also related: - # https://github.com/element-hq/element-android/issues/3725 and - # https://github.com/matrix-org/synapse/issues/10456 - "signed_curve25519": 0 - }, - ) - self.assertEqual( - channel.json_body["extensions"]["e2ee"]["device_unused_fallback_key_types"], - [], - ) - - def test_device_lists(self) -> None: - """ - Test that device list updates are included in the response - """ - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass") - user2_id = self.register_user("user2", "pass") - user2_tok = self.login(user2_id, "pass") - - test_device_id = "TESTDEVICE" - user3_id = self.register_user("user3", "pass") - user3_tok = self.login(user3_id, "pass", device_id=test_device_id) - - user4_id = self.register_user("user4", "pass") - user4_tok = self.login(user4_id, "pass") - - room_id = self.helper.create_room_as(user2_id, tok=user2_tok) - self.helper.join(room_id, user1_id, tok=user1_tok) - self.helper.join(room_id, user3_id, tok=user3_tok) - self.helper.join(room_id, user4_id, tok=user4_tok) - - from_token = self.event_sources.get_current_token() - - # Have user3 update their device list - channel = self.make_request( - "PUT", - f"/devices/{test_device_id}", - { - "display_name": "New Device Name", - }, - access_token=user3_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # User4 leaves the room - self.helper.leave(room_id, user4_id, tok=user4_tok) - - # Make an incremental Sliding Sync request with the e2ee extension enabled - channel = self.make_request( - "POST", - self.sync_endpoint - + f"?pos={self.get_success(from_token.to_string(self.store))}", - { - "lists": {}, - "extensions": { - "e2ee": { - "enabled": True, - } - }, - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # Device list updates show up - self.assertEqual( - channel.json_body["extensions"]["e2ee"] - .get("device_lists", {}) - .get("changed"), - [user3_id], - ) - self.assertEqual( - channel.json_body["extensions"]["e2ee"].get("device_lists", {}).get("left"), - [user4_id], - ) - - def test_device_one_time_keys_count(self) -> None: - """ - Test that `device_one_time_keys_count` are included in the response - """ - test_device_id = "TESTDEVICE" - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass", device_id=test_device_id) - - # Upload one time keys for the user/device - keys: JsonDict = { - "alg1:k1": "key1", - "alg2:k2": {"key": "key2", "signatures": {"k1": "sig1"}}, - "alg2:k3": {"key": "key3"}, - } - upload_keys_response = self.get_success( - self.e2e_keys_handler.upload_keys_for_user( - user1_id, test_device_id, {"one_time_keys": keys} - ) - ) - self.assertDictEqual( - upload_keys_response, - { - "one_time_key_counts": { - "alg1": 1, - "alg2": 2, - # Note that "signed_curve25519" is always returned in key count responses - # regardless of whether we uploaded any keys for it. This is necessary until - # https://github.com/matrix-org/matrix-doc/issues/3298 is fixed. - # - # Also related: - # https://github.com/element-hq/element-android/issues/3725 and - # https://github.com/matrix-org/synapse/issues/10456 - "signed_curve25519": 0, - } - }, - ) - - # Make a Sliding Sync request with the e2ee extension enabled - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": {}, - "extensions": { - "e2ee": { - "enabled": True, - } - }, - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # Check for those one time key counts - self.assertEqual( - channel.json_body["extensions"]["e2ee"].get("device_one_time_keys_count"), - { - "alg1": 1, - "alg2": 2, - # Note that "signed_curve25519" is always returned in key count responses - # regardless of whether we uploaded any keys for it. This is necessary until - # https://github.com/matrix-org/matrix-doc/issues/3298 is fixed. - # - # Also related: - # https://github.com/element-hq/element-android/issues/3725 and - # https://github.com/matrix-org/synapse/issues/10456 - "signed_curve25519": 0, - }, - ) - - def test_device_unused_fallback_key_types(self) -> None: - """ - Test that `device_unused_fallback_key_types` are included in the response - """ - test_device_id = "TESTDEVICE" - user1_id = self.register_user("user1", "pass") - user1_tok = self.login(user1_id, "pass", device_id=test_device_id) - - # We shouldn't have any unused fallback keys yet - res = self.get_success( - self.store.get_e2e_unused_fallback_key_types(user1_id, test_device_id) - ) - self.assertEqual(res, []) - - # Upload a fallback key for the user/device - self.get_success( - self.e2e_keys_handler.upload_keys_for_user( - user1_id, - test_device_id, - {"fallback_keys": {"alg1:k1": "fallback_key1"}}, - ) - ) - # We should now have an unused alg1 key - fallback_res = self.get_success( - self.store.get_e2e_unused_fallback_key_types(user1_id, test_device_id) - ) - self.assertEqual(fallback_res, ["alg1"], fallback_res) - - # Make a Sliding Sync request with the e2ee extension enabled - channel = self.make_request( - "POST", - self.sync_endpoint, - { - "lists": {}, - "extensions": { - "e2ee": { - "enabled": True, - } - }, - }, - access_token=user1_tok, - ) - self.assertEqual(channel.code, 200, channel.json_body) - - # Check for the unused fallback key types - self.assertListEqual( - channel.json_body["extensions"]["e2ee"].get( - "device_unused_fallback_key_types" - ), - ["alg1"], - )