2021-04-06 13:21:02 +02:00
|
|
|
# Copyright 2018-2021 The Matrix.org Foundation C.I.C.
|
2018-07-19 21:19:32 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2018-07-19 21:49:44 +02:00
|
|
|
import logging
|
|
|
|
|
2021-09-14 17:35:53 +02:00
|
|
|
from frozendict import frozendict
|
|
|
|
|
2022-12-09 18:36:32 +01:00
|
|
|
from twisted.test.proto_helpers import MemoryReactor
|
|
|
|
|
2019-04-01 11:24:38 +02:00
|
|
|
from synapse.api.constants import EventTypes, Membership
|
|
|
|
from synapse.api.room_versions import RoomVersions
|
2022-12-09 18:36:32 +01:00
|
|
|
from synapse.events import EventBase
|
|
|
|
from synapse.server import HomeServer
|
|
|
|
from synapse.types import JsonDict, RoomID, StateMap, UserID
|
2022-12-12 17:19:30 +01:00
|
|
|
from synapse.types.state import StateFilter
|
2022-12-09 18:36:32 +01:00
|
|
|
from synapse.util import Clock
|
2018-07-19 21:19:32 +02:00
|
|
|
|
2022-12-13 01:54:46 +01:00
|
|
|
from tests.unittest import HomeserverTestCase
|
2018-07-19 21:19:32 +02:00
|
|
|
|
2018-07-19 21:49:44 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2018-07-19 21:19:32 +02:00
|
|
|
|
2021-04-06 13:21:02 +02:00
|
|
|
class StateStoreTestCase(HomeserverTestCase):
|
2022-12-09 18:36:32 +01:00
|
|
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
2022-02-23 12:04:02 +01:00
|
|
|
self.store = hs.get_datastores().main
|
2022-05-31 14:17:50 +02:00
|
|
|
self.storage = hs.get_storage_controllers()
|
2019-12-20 11:48:24 +01:00
|
|
|
self.state_datastore = self.storage.state.stores.state
|
2018-07-19 21:19:32 +02:00
|
|
|
self.event_builder_factory = hs.get_event_builder_factory()
|
|
|
|
self.event_creation_handler = hs.get_event_creation_handler()
|
|
|
|
|
|
|
|
self.u_alice = UserID.from_string("@alice:test")
|
|
|
|
self.u_bob = UserID.from_string("@bob:test")
|
|
|
|
|
|
|
|
self.room = RoomID.from_string("!abc123:test")
|
|
|
|
|
2021-04-06 13:21:02 +02:00
|
|
|
self.get_success(
|
2020-07-30 13:20:41 +02:00
|
|
|
self.store.store_room(
|
|
|
|
self.room.to_string(),
|
|
|
|
room_creator_user_id="@creator:text",
|
|
|
|
is_public=True,
|
|
|
|
room_version=RoomVersions.V1,
|
|
|
|
)
|
2018-07-19 21:19:32 +02:00
|
|
|
)
|
|
|
|
|
2022-12-09 18:36:32 +01:00
|
|
|
def inject_state_event(
|
|
|
|
self, room: RoomID, sender: UserID, typ: str, state_key: str, content: JsonDict
|
|
|
|
) -> EventBase:
|
2019-04-01 11:24:38 +02:00
|
|
|
builder = self.event_builder_factory.for_room_version(
|
2019-01-24 10:28:16 +01:00
|
|
|
RoomVersions.V1,
|
2018-08-10 15:54:09 +02:00
|
|
|
{
|
|
|
|
"type": typ,
|
|
|
|
"sender": sender.to_string(),
|
|
|
|
"state_key": state_key,
|
|
|
|
"room_id": room.to_string(),
|
|
|
|
"content": content,
|
2019-05-10 07:12:11 +02:00
|
|
|
},
|
2018-08-10 15:54:09 +02:00
|
|
|
)
|
2018-07-19 21:19:32 +02:00
|
|
|
|
2023-02-09 22:05:02 +01:00
|
|
|
event, unpersisted_context = self.get_success(
|
2020-07-22 18:29:15 +02:00
|
|
|
self.event_creation_handler.create_new_client_event(builder)
|
2018-07-19 21:19:32 +02:00
|
|
|
)
|
|
|
|
|
2023-02-09 22:05:02 +01:00
|
|
|
context = self.get_success(unpersisted_context.persist(event))
|
|
|
|
|
2022-12-09 18:36:32 +01:00
|
|
|
assert self.storage.persistence is not None
|
2021-04-06 13:21:02 +02:00
|
|
|
self.get_success(self.storage.persistence.persist_event(event, context))
|
2018-07-19 21:19:32 +02:00
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return event
|
2018-07-19 21:19:32 +02:00
|
|
|
|
2022-12-09 18:36:32 +01:00
|
|
|
def assertStateMapEqual(
|
|
|
|
self, s1: StateMap[EventBase], s2: StateMap[EventBase]
|
|
|
|
) -> None:
|
2018-07-19 21:49:44 +02:00
|
|
|
for t in s1:
|
|
|
|
# just compare event IDs for simplicity
|
|
|
|
self.assertEqual(s1[t].event_id, s2[t].event_id)
|
|
|
|
self.assertEqual(len(s1), len(s2))
|
|
|
|
|
2022-12-09 18:36:32 +01:00
|
|
|
def test_get_state_groups_ids(self) -> None:
|
2021-04-06 13:21:02 +02:00
|
|
|
e1 = self.inject_state_event(self.room, self.u_alice, EventTypes.Create, "", {})
|
|
|
|
e2 = self.inject_state_event(
|
2019-06-20 11:32:02 +02:00
|
|
|
self.room, self.u_alice, EventTypes.Name, "", {"name": "test room"}
|
2018-09-27 12:22:25 +02:00
|
|
|
)
|
|
|
|
|
2021-04-06 13:21:02 +02:00
|
|
|
state_group_map = self.get_success(
|
2022-12-09 18:36:32 +01:00
|
|
|
self.storage.state.get_state_groups_ids(
|
|
|
|
self.room.to_string(), [e2.event_id]
|
|
|
|
)
|
2019-05-10 07:12:11 +02:00
|
|
|
)
|
2018-09-27 12:22:25 +02:00
|
|
|
self.assertEqual(len(state_group_map), 1)
|
|
|
|
state_map = list(state_group_map.values())[0]
|
|
|
|
self.assertDictEqual(
|
|
|
|
state_map,
|
2019-06-20 11:32:02 +02:00
|
|
|
{(EventTypes.Create, ""): e1.event_id, (EventTypes.Name, ""): e2.event_id},
|
2018-09-27 12:22:25 +02:00
|
|
|
)
|
|
|
|
|
2022-12-09 18:36:32 +01:00
|
|
|
def test_get_state_groups(self) -> None:
|
2021-04-06 13:21:02 +02:00
|
|
|
e1 = self.inject_state_event(self.room, self.u_alice, EventTypes.Create, "", {})
|
|
|
|
e2 = self.inject_state_event(
|
2019-06-20 11:32:02 +02:00
|
|
|
self.room, self.u_alice, EventTypes.Name, "", {"name": "test room"}
|
2018-09-27 12:22:25 +02:00
|
|
|
)
|
|
|
|
|
2021-04-06 13:21:02 +02:00
|
|
|
state_group_map = self.get_success(
|
2022-12-09 18:36:32 +01:00
|
|
|
self.storage.state.get_state_groups(self.room.to_string(), [e2.event_id])
|
2019-10-23 18:25:54 +02:00
|
|
|
)
|
2018-09-27 12:22:25 +02:00
|
|
|
self.assertEqual(len(state_group_map), 1)
|
|
|
|
state_list = list(state_group_map.values())[0]
|
|
|
|
|
2019-05-10 07:12:11 +02:00
|
|
|
self.assertEqual({ev.event_id for ev in state_list}, {e1.event_id, e2.event_id})
|
2018-09-27 12:22:25 +02:00
|
|
|
|
2022-12-09 18:36:32 +01:00
|
|
|
def test_get_state_for_event(self) -> None:
|
2018-07-19 21:19:32 +02:00
|
|
|
# this defaults to a linear DAG as each new injection defaults to whatever
|
|
|
|
# forward extremities are currently in the DB for this room.
|
2021-04-06 13:21:02 +02:00
|
|
|
e1 = self.inject_state_event(self.room, self.u_alice, EventTypes.Create, "", {})
|
|
|
|
e2 = self.inject_state_event(
|
2019-06-20 11:32:02 +02:00
|
|
|
self.room, self.u_alice, EventTypes.Name, "", {"name": "test room"}
|
2018-07-19 21:19:32 +02:00
|
|
|
)
|
2021-04-06 13:21:02 +02:00
|
|
|
e3 = self.inject_state_event(
|
2018-08-10 15:54:09 +02:00
|
|
|
self.room,
|
|
|
|
self.u_alice,
|
|
|
|
EventTypes.Member,
|
|
|
|
self.u_alice.to_string(),
|
|
|
|
{"membership": Membership.JOIN},
|
2018-07-19 21:19:32 +02:00
|
|
|
)
|
2021-04-06 13:21:02 +02:00
|
|
|
e4 = self.inject_state_event(
|
2018-08-10 15:54:09 +02:00
|
|
|
self.room,
|
|
|
|
self.u_bob,
|
|
|
|
EventTypes.Member,
|
|
|
|
self.u_bob.to_string(),
|
|
|
|
{"membership": Membership.JOIN},
|
2018-07-19 21:19:32 +02:00
|
|
|
)
|
2021-04-06 13:21:02 +02:00
|
|
|
e5 = self.inject_state_event(
|
2018-08-10 15:54:09 +02:00
|
|
|
self.room,
|
|
|
|
self.u_bob,
|
|
|
|
EventTypes.Member,
|
|
|
|
self.u_bob.to_string(),
|
|
|
|
{"membership": Membership.LEAVE},
|
2018-07-19 21:19:32 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# check we get the full state as of the final event
|
2021-04-06 13:21:02 +02:00
|
|
|
state = self.get_success(self.storage.state.get_state_for_event(e5.event_id))
|
2018-07-19 21:19:32 +02:00
|
|
|
|
2018-07-19 21:49:44 +02:00
|
|
|
self.assertIsNotNone(e4)
|
|
|
|
|
2018-08-10 15:54:09 +02:00
|
|
|
self.assertStateMapEqual(
|
|
|
|
{
|
|
|
|
(e1.type, e1.state_key): e1,
|
|
|
|
(e2.type, e2.state_key): e2,
|
|
|
|
(e3.type, e3.state_key): e3,
|
|
|
|
# e4 is overwritten by e5
|
|
|
|
(e5.type, e5.state_key): e5,
|
|
|
|
},
|
|
|
|
state,
|
|
|
|
)
|
2018-07-19 21:19:32 +02:00
|
|
|
|
|
|
|
# check we can filter to the m.room.name event (with a '' state key)
|
2021-04-06 13:21:02 +02:00
|
|
|
state = self.get_success(
|
2020-07-28 22:09:53 +02:00
|
|
|
self.storage.state.get_state_for_event(
|
|
|
|
e5.event_id, StateFilter.from_types([(EventTypes.Name, "")])
|
|
|
|
)
|
2018-07-19 21:19:32 +02:00
|
|
|
)
|
|
|
|
|
2018-08-10 15:54:09 +02:00
|
|
|
self.assertStateMapEqual({(e2.type, e2.state_key): e2}, state)
|
2018-07-19 21:19:32 +02:00
|
|
|
|
|
|
|
# check we can filter to the m.room.name event (with a wildcard None state key)
|
2021-04-06 13:21:02 +02:00
|
|
|
state = self.get_success(
|
2020-07-28 22:09:53 +02:00
|
|
|
self.storage.state.get_state_for_event(
|
|
|
|
e5.event_id, StateFilter.from_types([(EventTypes.Name, None)])
|
|
|
|
)
|
2018-07-19 21:19:32 +02:00
|
|
|
)
|
|
|
|
|
2018-08-10 15:54:09 +02:00
|
|
|
self.assertStateMapEqual({(e2.type, e2.state_key): e2}, state)
|
2018-07-19 21:19:32 +02:00
|
|
|
|
|
|
|
# check we can grab the m.room.member events (with a wildcard None state key)
|
2021-04-06 13:21:02 +02:00
|
|
|
state = self.get_success(
|
2020-07-28 22:09:53 +02:00
|
|
|
self.storage.state.get_state_for_event(
|
|
|
|
e5.event_id, StateFilter.from_types([(EventTypes.Member, None)])
|
|
|
|
)
|
2018-07-19 21:19:32 +02:00
|
|
|
)
|
|
|
|
|
2018-08-10 15:54:09 +02:00
|
|
|
self.assertStateMapEqual(
|
|
|
|
{(e3.type, e3.state_key): e3, (e5.type, e5.state_key): e5}, state
|
|
|
|
)
|
2018-07-19 21:19:32 +02:00
|
|
|
|
2018-10-25 18:49:55 +02:00
|
|
|
# check we can grab a specific room member without filtering out the
|
|
|
|
# other event types
|
2021-04-06 13:21:02 +02:00
|
|
|
state = self.get_success(
|
2020-07-28 22:09:53 +02:00
|
|
|
self.storage.state.get_state_for_event(
|
|
|
|
e5.event_id,
|
|
|
|
state_filter=StateFilter(
|
2021-09-14 17:35:53 +02:00
|
|
|
types=frozendict(
|
|
|
|
{EventTypes.Member: frozenset({self.u_alice.to_string()})}
|
|
|
|
),
|
2020-07-28 22:09:53 +02:00
|
|
|
include_others=True,
|
|
|
|
),
|
|
|
|
)
|
2018-07-19 21:19:32 +02:00
|
|
|
)
|
|
|
|
|
2018-08-10 15:54:09 +02:00
|
|
|
self.assertStateMapEqual(
|
|
|
|
{
|
|
|
|
(e1.type, e1.state_key): e1,
|
|
|
|
(e2.type, e2.state_key): e2,
|
|
|
|
(e3.type, e3.state_key): e3,
|
|
|
|
},
|
|
|
|
state,
|
|
|
|
)
|
2018-07-24 13:39:40 +02:00
|
|
|
|
2018-10-25 18:49:55 +02:00
|
|
|
# check that we can grab everything except members
|
2021-04-06 13:21:02 +02:00
|
|
|
state = self.get_success(
|
2020-07-28 22:09:53 +02:00
|
|
|
self.storage.state.get_state_for_event(
|
|
|
|
e5.event_id,
|
|
|
|
state_filter=StateFilter(
|
2021-09-14 17:35:53 +02:00
|
|
|
types=frozendict({EventTypes.Member: frozenset()}),
|
|
|
|
include_others=True,
|
2020-07-28 22:09:53 +02:00
|
|
|
),
|
|
|
|
)
|
2018-07-24 13:39:40 +02:00
|
|
|
)
|
|
|
|
|
2018-08-10 15:54:09 +02:00
|
|
|
self.assertStateMapEqual(
|
|
|
|
{(e1.type, e1.state_key): e1, (e2.type, e2.state_key): e2}, state
|
|
|
|
)
|
2018-07-25 17:10:34 +02:00
|
|
|
|
2018-07-25 18:21:17 +02:00
|
|
|
#######################################################
|
2018-10-25 18:49:55 +02:00
|
|
|
# _get_state_for_group_using_cache tests against a full cache
|
2018-07-25 18:21:17 +02:00
|
|
|
#######################################################
|
2018-07-25 17:10:34 +02:00
|
|
|
|
|
|
|
room_id = self.room.to_string()
|
2021-04-06 13:21:02 +02:00
|
|
|
group_ids = self.get_success(
|
2020-07-28 22:09:53 +02:00
|
|
|
self.storage.state.get_state_groups_ids(room_id, [e5.event_id])
|
2019-10-23 18:25:54 +02:00
|
|
|
)
|
2018-08-09 04:22:01 +02:00
|
|
|
group = list(group_ids.keys())[0]
|
2018-07-25 17:10:34 +02:00
|
|
|
|
2018-10-25 18:49:55 +02:00
|
|
|
# test _get_state_for_group_using_cache correctly filters out members
|
|
|
|
# with types=[]
|
2023-02-22 21:29:09 +01:00
|
|
|
state_dict, is_all = self.state_datastore._get_state_for_group_using_cache(
|
2019-10-23 18:25:54 +02:00
|
|
|
self.state_datastore._state_group_cache,
|
2019-05-10 07:12:11 +02:00
|
|
|
group,
|
2018-10-25 18:49:55 +02:00
|
|
|
state_filter=StateFilter(
|
2021-09-14 17:35:53 +02:00
|
|
|
types=frozendict({EventTypes.Member: frozenset()}), include_others=True
|
2018-10-25 18:49:55 +02:00
|
|
|
),
|
2018-07-25 17:10:34 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(is_all, True)
|
2018-08-10 15:54:09 +02:00
|
|
|
self.assertDictEqual(
|
|
|
|
{
|
|
|
|
(e1.type, e1.state_key): e1.event_id,
|
|
|
|
(e2.type, e2.state_key): e2.event_id,
|
|
|
|
},
|
|
|
|
state_dict,
|
|
|
|
)
|
2018-07-25 17:10:34 +02:00
|
|
|
|
2023-02-22 21:29:09 +01:00
|
|
|
state_dict, is_all = self.state_datastore._get_state_for_group_using_cache(
|
2019-10-23 18:25:54 +02:00
|
|
|
self.state_datastore._state_group_members_cache,
|
2018-09-06 18:58:18 +02:00
|
|
|
group,
|
2018-10-25 18:49:55 +02:00
|
|
|
state_filter=StateFilter(
|
2021-09-14 17:35:53 +02:00
|
|
|
types=frozendict({EventTypes.Member: frozenset()}), include_others=True
|
2018-10-25 18:49:55 +02:00
|
|
|
),
|
2018-08-22 00:56:37 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(is_all, True)
|
2018-09-06 18:58:18 +02:00
|
|
|
self.assertDictEqual({}, state_dict)
|
2018-08-22 00:56:37 +02:00
|
|
|
|
2018-10-25 18:49:55 +02:00
|
|
|
# test _get_state_for_group_using_cache correctly filters in members
|
|
|
|
# with wildcard types
|
2023-02-22 21:29:09 +01:00
|
|
|
state_dict, is_all = self.state_datastore._get_state_for_group_using_cache(
|
2019-10-23 18:25:54 +02:00
|
|
|
self.state_datastore._state_group_cache,
|
2018-09-06 18:58:18 +02:00
|
|
|
group,
|
2018-10-25 18:49:55 +02:00
|
|
|
state_filter=StateFilter(
|
2021-09-14 17:35:53 +02:00
|
|
|
types=frozendict({EventTypes.Member: None}), include_others=True
|
2018-10-25 18:49:55 +02:00
|
|
|
),
|
2018-07-25 17:10:34 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(is_all, True)
|
2018-08-10 15:54:09 +02:00
|
|
|
self.assertDictEqual(
|
|
|
|
{
|
|
|
|
(e1.type, e1.state_key): e1.event_id,
|
|
|
|
(e2.type, e2.state_key): e2.event_id,
|
2018-08-22 00:56:37 +02:00
|
|
|
},
|
|
|
|
state_dict,
|
|
|
|
)
|
|
|
|
|
2023-02-22 21:29:09 +01:00
|
|
|
state_dict, is_all = self.state_datastore._get_state_for_group_using_cache(
|
2019-10-23 18:25:54 +02:00
|
|
|
self.state_datastore._state_group_members_cache,
|
2018-09-06 18:58:18 +02:00
|
|
|
group,
|
2018-10-25 18:49:55 +02:00
|
|
|
state_filter=StateFilter(
|
2021-09-14 17:35:53 +02:00
|
|
|
types=frozendict({EventTypes.Member: None}), include_others=True
|
2018-10-25 18:49:55 +02:00
|
|
|
),
|
2018-08-22 00:56:37 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(is_all, True)
|
|
|
|
self.assertDictEqual(
|
|
|
|
{
|
2018-08-10 15:54:09 +02:00
|
|
|
(e3.type, e3.state_key): e3.event_id,
|
|
|
|
# e4 is overwritten by e5
|
|
|
|
(e5.type, e5.state_key): e5.event_id,
|
|
|
|
},
|
|
|
|
state_dict,
|
|
|
|
)
|
2018-07-25 17:10:34 +02:00
|
|
|
|
2018-10-25 18:49:55 +02:00
|
|
|
# test _get_state_for_group_using_cache correctly filters in members
|
|
|
|
# with specific types
|
2023-02-22 21:29:09 +01:00
|
|
|
state_dict, is_all = self.state_datastore._get_state_for_group_using_cache(
|
2019-10-23 18:25:54 +02:00
|
|
|
self.state_datastore._state_group_cache,
|
2018-08-10 15:54:09 +02:00
|
|
|
group,
|
2018-10-25 18:49:55 +02:00
|
|
|
state_filter=StateFilter(
|
2021-09-14 17:35:53 +02:00
|
|
|
types=frozendict({EventTypes.Member: frozenset({e5.state_key})}),
|
|
|
|
include_others=True,
|
2018-10-25 18:49:55 +02:00
|
|
|
),
|
2018-07-25 17:10:34 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(is_all, True)
|
2018-08-10 15:54:09 +02:00
|
|
|
self.assertDictEqual(
|
|
|
|
{
|
|
|
|
(e1.type, e1.state_key): e1.event_id,
|
|
|
|
(e2.type, e2.state_key): e2.event_id,
|
2018-08-22 00:56:37 +02:00
|
|
|
},
|
|
|
|
state_dict,
|
|
|
|
)
|
|
|
|
|
2023-02-22 21:29:09 +01:00
|
|
|
state_dict, is_all = self.state_datastore._get_state_for_group_using_cache(
|
2019-10-23 18:25:54 +02:00
|
|
|
self.state_datastore._state_group_members_cache,
|
2018-08-22 00:56:37 +02:00
|
|
|
group,
|
2018-10-25 18:49:55 +02:00
|
|
|
state_filter=StateFilter(
|
2021-09-14 17:35:53 +02:00
|
|
|
types=frozendict({EventTypes.Member: frozenset({e5.state_key})}),
|
|
|
|
include_others=True,
|
2018-10-25 18:49:55 +02:00
|
|
|
),
|
2018-08-22 00:56:37 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(is_all, True)
|
2018-09-06 18:58:18 +02:00
|
|
|
self.assertDictEqual({(e5.type, e5.state_key): e5.event_id}, state_dict)
|
2018-07-25 17:10:34 +02:00
|
|
|
|
2018-10-25 18:49:55 +02:00
|
|
|
# test _get_state_for_group_using_cache correctly filters in members
|
|
|
|
# with specific types
|
2023-02-22 21:29:09 +01:00
|
|
|
state_dict, is_all = self.state_datastore._get_state_for_group_using_cache(
|
2019-10-23 18:25:54 +02:00
|
|
|
self.state_datastore._state_group_members_cache,
|
2018-09-06 18:58:18 +02:00
|
|
|
group,
|
2018-10-25 18:49:55 +02:00
|
|
|
state_filter=StateFilter(
|
2021-09-14 17:35:53 +02:00
|
|
|
types=frozendict({EventTypes.Member: frozenset({e5.state_key})}),
|
|
|
|
include_others=False,
|
2018-10-25 18:49:55 +02:00
|
|
|
),
|
2018-07-25 17:10:34 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(is_all, True)
|
2018-08-10 15:54:09 +02:00
|
|
|
self.assertDictEqual({(e5.type, e5.state_key): e5.event_id}, state_dict)
|
2018-07-25 17:10:34 +02:00
|
|
|
|
|
|
|
#######################################################
|
|
|
|
# deliberately remove e2 (room name) from the _state_group_cache
|
|
|
|
|
2021-03-29 18:15:33 +02:00
|
|
|
cache_entry = self.state_datastore._state_group_cache.get(group)
|
|
|
|
state_dict_ids = cache_entry.value
|
2018-07-25 17:10:34 +02:00
|
|
|
|
2021-03-29 18:15:33 +02:00
|
|
|
self.assertEqual(cache_entry.full, True)
|
|
|
|
self.assertEqual(cache_entry.known_absent, set())
|
2018-08-10 15:54:09 +02:00
|
|
|
self.assertDictEqual(
|
|
|
|
state_dict_ids,
|
|
|
|
{
|
|
|
|
(e1.type, e1.state_key): e1.event_id,
|
|
|
|
(e2.type, e2.state_key): e2.event_id,
|
|
|
|
},
|
|
|
|
)
|
2018-07-25 17:10:34 +02:00
|
|
|
|
|
|
|
state_dict_ids.pop((e2.type, e2.state_key))
|
2019-10-23 18:25:54 +02:00
|
|
|
self.state_datastore._state_group_cache.invalidate(group)
|
|
|
|
self.state_datastore._state_group_cache.update(
|
|
|
|
sequence=self.state_datastore._state_group_cache.sequence,
|
2018-07-25 17:10:34 +02:00
|
|
|
key=group,
|
|
|
|
value=state_dict_ids,
|
|
|
|
# list fetched keys so it knows it's partial
|
2018-09-06 18:58:18 +02:00
|
|
|
fetched_keys=((e1.type, e1.state_key),),
|
2018-07-25 17:10:34 +02:00
|
|
|
)
|
|
|
|
|
2021-03-29 18:15:33 +02:00
|
|
|
cache_entry = self.state_datastore._state_group_cache.get(group)
|
|
|
|
state_dict_ids = cache_entry.value
|
2018-07-25 17:10:34 +02:00
|
|
|
|
2021-03-29 18:15:33 +02:00
|
|
|
self.assertEqual(cache_entry.full, False)
|
2022-07-21 18:13:44 +02:00
|
|
|
self.assertEqual(cache_entry.known_absent, set())
|
|
|
|
self.assertDictEqual(state_dict_ids, {})
|
2018-07-25 17:10:34 +02:00
|
|
|
|
2018-07-25 18:21:17 +02:00
|
|
|
############################################
|
2018-07-25 17:10:34 +02:00
|
|
|
# test that things work with a partial cache
|
|
|
|
|
2018-10-25 18:49:55 +02:00
|
|
|
# test _get_state_for_group_using_cache correctly filters out members
|
|
|
|
# with types=[]
|
2018-07-25 17:10:34 +02:00
|
|
|
room_id = self.room.to_string()
|
2023-02-22 21:29:09 +01:00
|
|
|
state_dict, is_all = self.state_datastore._get_state_for_group_using_cache(
|
2019-10-23 18:25:54 +02:00
|
|
|
self.state_datastore._state_group_cache,
|
2019-05-10 07:12:11 +02:00
|
|
|
group,
|
2018-10-25 18:49:55 +02:00
|
|
|
state_filter=StateFilter(
|
2021-09-14 17:35:53 +02:00
|
|
|
types=frozendict({EventTypes.Member: frozenset()}), include_others=True
|
2018-10-25 18:49:55 +02:00
|
|
|
),
|
2018-07-25 17:10:34 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(is_all, False)
|
2022-07-21 18:13:44 +02:00
|
|
|
self.assertDictEqual({}, state_dict)
|
2018-07-25 17:10:34 +02:00
|
|
|
|
2018-08-22 00:56:37 +02:00
|
|
|
room_id = self.room.to_string()
|
2023-02-22 21:29:09 +01:00
|
|
|
state_dict, is_all = self.state_datastore._get_state_for_group_using_cache(
|
2019-10-23 18:25:54 +02:00
|
|
|
self.state_datastore._state_group_members_cache,
|
2018-09-06 18:58:18 +02:00
|
|
|
group,
|
2018-10-25 18:49:55 +02:00
|
|
|
state_filter=StateFilter(
|
2021-09-14 17:35:53 +02:00
|
|
|
types=frozendict({EventTypes.Member: frozenset()}), include_others=True
|
2018-10-25 18:49:55 +02:00
|
|
|
),
|
2018-08-22 00:56:37 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(is_all, True)
|
|
|
|
self.assertDictEqual({}, state_dict)
|
|
|
|
|
2018-10-25 18:49:55 +02:00
|
|
|
# test _get_state_for_group_using_cache correctly filters in members
|
|
|
|
# wildcard types
|
2023-02-22 21:29:09 +01:00
|
|
|
state_dict, is_all = self.state_datastore._get_state_for_group_using_cache(
|
2019-10-23 18:25:54 +02:00
|
|
|
self.state_datastore._state_group_cache,
|
2018-09-06 18:58:18 +02:00
|
|
|
group,
|
2018-10-25 18:49:55 +02:00
|
|
|
state_filter=StateFilter(
|
2021-09-14 17:35:53 +02:00
|
|
|
types=frozendict({EventTypes.Member: None}), include_others=True
|
2018-10-25 18:49:55 +02:00
|
|
|
),
|
2018-07-25 17:10:34 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(is_all, False)
|
2022-07-21 18:13:44 +02:00
|
|
|
self.assertDictEqual({}, state_dict)
|
2018-08-22 00:56:37 +02:00
|
|
|
|
2023-02-22 21:29:09 +01:00
|
|
|
state_dict, is_all = self.state_datastore._get_state_for_group_using_cache(
|
2019-10-23 18:25:54 +02:00
|
|
|
self.state_datastore._state_group_members_cache,
|
2018-09-06 18:58:18 +02:00
|
|
|
group,
|
2018-10-25 18:49:55 +02:00
|
|
|
state_filter=StateFilter(
|
2021-09-14 17:35:53 +02:00
|
|
|
types=frozendict({EventTypes.Member: None}), include_others=True
|
2018-10-25 18:49:55 +02:00
|
|
|
),
|
2018-08-22 00:56:37 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(is_all, True)
|
|
|
|
self.assertDictEqual(
|
|
|
|
{
|
2018-08-10 15:54:09 +02:00
|
|
|
(e3.type, e3.state_key): e3.event_id,
|
|
|
|
(e5.type, e5.state_key): e5.event_id,
|
|
|
|
},
|
|
|
|
state_dict,
|
|
|
|
)
|
2018-07-25 17:10:34 +02:00
|
|
|
|
2018-10-25 18:49:55 +02:00
|
|
|
# test _get_state_for_group_using_cache correctly filters in members
|
|
|
|
# with specific types
|
2023-02-22 21:29:09 +01:00
|
|
|
state_dict, is_all = self.state_datastore._get_state_for_group_using_cache(
|
2019-10-23 18:25:54 +02:00
|
|
|
self.state_datastore._state_group_cache,
|
2018-08-10 15:54:09 +02:00
|
|
|
group,
|
2018-10-25 18:49:55 +02:00
|
|
|
state_filter=StateFilter(
|
2021-09-14 17:35:53 +02:00
|
|
|
types=frozendict({EventTypes.Member: frozenset({e5.state_key})}),
|
|
|
|
include_others=True,
|
2018-10-25 18:49:55 +02:00
|
|
|
),
|
2018-07-25 17:10:34 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(is_all, False)
|
2022-07-21 18:13:44 +02:00
|
|
|
self.assertDictEqual({}, state_dict)
|
2018-08-22 00:56:37 +02:00
|
|
|
|
2023-02-22 21:29:09 +01:00
|
|
|
state_dict, is_all = self.state_datastore._get_state_for_group_using_cache(
|
2019-10-23 18:25:54 +02:00
|
|
|
self.state_datastore._state_group_members_cache,
|
2018-08-22 00:56:37 +02:00
|
|
|
group,
|
2018-10-25 18:49:55 +02:00
|
|
|
state_filter=StateFilter(
|
2021-09-14 17:35:53 +02:00
|
|
|
types=frozendict({EventTypes.Member: frozenset({e5.state_key})}),
|
|
|
|
include_others=True,
|
2018-10-25 18:49:55 +02:00
|
|
|
),
|
2018-08-22 00:56:37 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(is_all, True)
|
2018-09-06 18:58:18 +02:00
|
|
|
self.assertDictEqual({(e5.type, e5.state_key): e5.event_id}, state_dict)
|
2018-07-25 17:10:34 +02:00
|
|
|
|
2018-10-25 18:49:55 +02:00
|
|
|
# test _get_state_for_group_using_cache correctly filters in members
|
|
|
|
# with specific types
|
2023-02-22 21:29:09 +01:00
|
|
|
state_dict, is_all = self.state_datastore._get_state_for_group_using_cache(
|
2019-10-23 18:25:54 +02:00
|
|
|
self.state_datastore._state_group_cache,
|
2018-09-06 18:58:18 +02:00
|
|
|
group,
|
2018-10-25 18:49:55 +02:00
|
|
|
state_filter=StateFilter(
|
2021-09-14 17:35:53 +02:00
|
|
|
types=frozendict({EventTypes.Member: frozenset({e5.state_key})}),
|
|
|
|
include_others=False,
|
2018-10-25 18:49:55 +02:00
|
|
|
),
|
2018-08-22 00:56:37 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(is_all, False)
|
|
|
|
self.assertDictEqual({}, state_dict)
|
|
|
|
|
2023-02-22 21:29:09 +01:00
|
|
|
state_dict, is_all = self.state_datastore._get_state_for_group_using_cache(
|
2019-10-23 18:25:54 +02:00
|
|
|
self.state_datastore._state_group_members_cache,
|
2018-09-06 18:58:18 +02:00
|
|
|
group,
|
2018-10-25 18:49:55 +02:00
|
|
|
state_filter=StateFilter(
|
2021-09-14 17:35:53 +02:00
|
|
|
types=frozendict({EventTypes.Member: frozenset({e5.state_key})}),
|
|
|
|
include_others=False,
|
2018-10-25 18:49:55 +02:00
|
|
|
),
|
2018-07-25 17:10:34 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(is_all, True)
|
2018-09-06 18:58:18 +02:00
|
|
|
self.assertDictEqual({(e5.type, e5.state_key): e5.event_id}, state_dict)
|
2023-02-24 22:15:29 +01:00
|
|
|
|
|
|
|
def test_batched_state_group_storing(self) -> None:
|
|
|
|
creation_event = self.inject_state_event(
|
|
|
|
self.room, self.u_alice, EventTypes.Create, "", {}
|
|
|
|
)
|
|
|
|
state_to_event = self.get_success(
|
|
|
|
self.storage.state.get_state_groups(
|
|
|
|
self.room.to_string(), [creation_event.event_id]
|
|
|
|
)
|
|
|
|
)
|
|
|
|
current_state_group = list(state_to_event.keys())[0]
|
|
|
|
|
|
|
|
# create some unpersisted events and event contexts to store against room
|
|
|
|
events_and_context = []
|
|
|
|
builder = self.event_builder_factory.for_room_version(
|
|
|
|
RoomVersions.V1,
|
|
|
|
{
|
|
|
|
"type": EventTypes.Name,
|
|
|
|
"sender": self.u_alice.to_string(),
|
|
|
|
"state_key": "",
|
|
|
|
"room_id": self.room.to_string(),
|
|
|
|
"content": {"name": "first rename of room"},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
event1, unpersisted_context1 = self.get_success(
|
|
|
|
self.event_creation_handler.create_new_client_event(builder)
|
|
|
|
)
|
|
|
|
events_and_context.append((event1, unpersisted_context1))
|
|
|
|
|
|
|
|
builder2 = self.event_builder_factory.for_room_version(
|
|
|
|
RoomVersions.V1,
|
|
|
|
{
|
|
|
|
"type": EventTypes.JoinRules,
|
|
|
|
"sender": self.u_alice.to_string(),
|
|
|
|
"state_key": "",
|
|
|
|
"room_id": self.room.to_string(),
|
|
|
|
"content": {"join_rule": "private"},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
event2, unpersisted_context2 = self.get_success(
|
|
|
|
self.event_creation_handler.create_new_client_event(builder2)
|
|
|
|
)
|
|
|
|
events_and_context.append((event2, unpersisted_context2))
|
|
|
|
|
|
|
|
builder3 = self.event_builder_factory.for_room_version(
|
|
|
|
RoomVersions.V1,
|
|
|
|
{
|
|
|
|
"type": EventTypes.Message,
|
|
|
|
"sender": self.u_alice.to_string(),
|
|
|
|
"room_id": self.room.to_string(),
|
|
|
|
"content": {"body": "hello from event 3", "msgtype": "m.text"},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
event3, unpersisted_context3 = self.get_success(
|
|
|
|
self.event_creation_handler.create_new_client_event(builder3)
|
|
|
|
)
|
|
|
|
events_and_context.append((event3, unpersisted_context3))
|
|
|
|
|
|
|
|
builder4 = self.event_builder_factory.for_room_version(
|
|
|
|
RoomVersions.V1,
|
|
|
|
{
|
|
|
|
"type": EventTypes.JoinRules,
|
|
|
|
"sender": self.u_alice.to_string(),
|
|
|
|
"state_key": "",
|
|
|
|
"room_id": self.room.to_string(),
|
|
|
|
"content": {"join_rule": "public"},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
event4, unpersisted_context4 = self.get_success(
|
|
|
|
self.event_creation_handler.create_new_client_event(builder4)
|
|
|
|
)
|
|
|
|
events_and_context.append((event4, unpersisted_context4))
|
|
|
|
|
|
|
|
processed_events_and_context = self.get_success(
|
|
|
|
self.hs.get_datastores().state.store_state_deltas_for_batched(
|
|
|
|
events_and_context, self.room.to_string(), current_state_group
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# check that only state events are in state_groups, and all state events are in state_groups
|
|
|
|
res = self.get_success(
|
|
|
|
self.store.db_pool.simple_select_list(
|
|
|
|
table="state_groups",
|
|
|
|
keyvalues=None,
|
|
|
|
retcols=("event_id",),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
events = []
|
|
|
|
for result in res:
|
|
|
|
self.assertNotIn(event3.event_id, result)
|
|
|
|
events.append(result.get("event_id"))
|
|
|
|
|
|
|
|
for event, _ in processed_events_and_context:
|
|
|
|
if event.is_state():
|
|
|
|
self.assertIn(event.event_id, events)
|
|
|
|
|
|
|
|
# check that each unique state has state group in state_groups_state and that the
|
|
|
|
# type/state key is correct, and check that each state event's state group
|
|
|
|
# has an entry and prev event in state_group_edges
|
|
|
|
for event, context in processed_events_and_context:
|
|
|
|
if event.is_state():
|
|
|
|
state = self.get_success(
|
|
|
|
self.store.db_pool.simple_select_list(
|
|
|
|
table="state_groups_state",
|
|
|
|
keyvalues={"state_group": context.state_group_after_event},
|
|
|
|
retcols=("type", "state_key"),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
self.assertEqual(event.type, state[0].get("type"))
|
|
|
|
self.assertEqual(event.state_key, state[0].get("state_key"))
|
|
|
|
|
|
|
|
groups = self.get_success(
|
|
|
|
self.store.db_pool.simple_select_list(
|
|
|
|
table="state_group_edges",
|
|
|
|
keyvalues={"state_group": str(context.state_group_after_event)},
|
|
|
|
retcols=("*",),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
context.state_group_before_event, groups[0].get("prev_state_group")
|
|
|
|
)
|