2014-10-15 11:04:55 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-07 05:26:29 +01:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2014-10-15 11:04:55 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2017-11-13 11:30:38 +01:00
|
|
|
import logging
|
2020-07-30 13:20:41 +02:00
|
|
|
from typing import Awaitable, Dict, Iterable, List, Optional, Set, Tuple, TypeVar
|
2014-10-15 11:04:55 +02:00
|
|
|
|
2018-10-25 18:49:55 +02:00
|
|
|
import attr
|
|
|
|
|
2018-07-25 23:10:39 +02:00
|
|
|
from synapse.api.constants import EventTypes
|
2020-07-28 22:09:53 +02:00
|
|
|
from synapse.events import EventBase
|
2020-09-29 16:57:36 +02:00
|
|
|
from synapse.types import MutableStateMap, StateMap
|
2018-07-09 08:09:20 +02:00
|
|
|
|
2015-01-06 12:18:12 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2020-01-16 14:31:22 +01:00
|
|
|
# Used for generic functions below
|
|
|
|
T = TypeVar("T")
|
|
|
|
|
2014-10-15 11:04:55 +02:00
|
|
|
|
2018-10-25 18:49:55 +02:00
|
|
|
@attr.s(slots=True)
|
2020-09-04 12:54:56 +02:00
|
|
|
class StateFilter:
|
2018-10-25 18:49:55 +02:00
|
|
|
"""A filter used when querying for state.
|
|
|
|
|
|
|
|
Attributes:
|
2020-07-28 22:09:53 +02:00
|
|
|
types: Map from type to set of state keys (or None). This specifies
|
|
|
|
which state_keys for the given type to fetch from the DB. If None
|
|
|
|
then all events with that type are fetched. If the set is empty
|
|
|
|
then no events with that type are fetched.
|
|
|
|
include_others: Whether to fetch events with types that do not
|
2018-10-25 18:49:55 +02:00
|
|
|
appear in `types`.
|
|
|
|
"""
|
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
types = attr.ib(type=Dict[str, Optional[Set[str]]])
|
|
|
|
include_others = attr.ib(default=False, type=bool)
|
2018-10-25 18:49:55 +02:00
|
|
|
|
|
|
|
def __attrs_post_init__(self):
|
|
|
|
# If `include_others` is set we canonicalise the filter by removing
|
|
|
|
# wildcards from the types dictionary
|
|
|
|
if self.include_others:
|
2020-06-15 13:03:36 +02:00
|
|
|
self.types = {k: v for k, v in self.types.items() if v is not None}
|
2018-10-25 18:49:55 +02:00
|
|
|
|
|
|
|
@staticmethod
|
2020-07-28 22:09:53 +02:00
|
|
|
def all() -> "StateFilter":
|
2018-10-25 18:49:55 +02:00
|
|
|
"""Creates a filter that fetches everything.
|
|
|
|
|
|
|
|
Returns:
|
2020-07-28 22:09:53 +02:00
|
|
|
The new state filter.
|
2018-10-25 18:49:55 +02:00
|
|
|
"""
|
|
|
|
return StateFilter(types={}, include_others=True)
|
|
|
|
|
|
|
|
@staticmethod
|
2020-07-28 22:09:53 +02:00
|
|
|
def none() -> "StateFilter":
|
2018-10-25 18:49:55 +02:00
|
|
|
"""Creates a filter that fetches nothing.
|
|
|
|
|
|
|
|
Returns:
|
2020-07-28 22:09:53 +02:00
|
|
|
The new state filter.
|
2018-10-25 18:49:55 +02:00
|
|
|
"""
|
|
|
|
return StateFilter(types={}, include_others=False)
|
|
|
|
|
|
|
|
@staticmethod
|
2020-07-28 22:09:53 +02:00
|
|
|
def from_types(types: Iterable[Tuple[str, Optional[str]]]) -> "StateFilter":
|
2018-10-25 18:49:55 +02:00
|
|
|
"""Creates a filter that only fetches the given types
|
|
|
|
|
|
|
|
Args:
|
2020-07-28 22:09:53 +02:00
|
|
|
types: A list of type and state keys to fetch. A state_key of None
|
|
|
|
fetches everything for that type
|
2018-10-25 18:49:55 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-07-28 22:09:53 +02:00
|
|
|
The new state filter.
|
2018-10-25 18:49:55 +02:00
|
|
|
"""
|
2020-07-28 22:09:53 +02:00
|
|
|
type_dict = {} # type: Dict[str, Optional[Set[str]]]
|
2018-10-25 18:49:55 +02:00
|
|
|
for typ, s in types:
|
|
|
|
if typ in type_dict:
|
|
|
|
if type_dict[typ] is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if s is None:
|
|
|
|
type_dict[typ] = None
|
|
|
|
continue
|
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
type_dict.setdefault(typ, set()).add(s) # type: ignore
|
2018-10-25 18:49:55 +02:00
|
|
|
|
|
|
|
return StateFilter(types=type_dict)
|
|
|
|
|
|
|
|
@staticmethod
|
2020-07-28 22:09:53 +02:00
|
|
|
def from_lazy_load_member_list(members: Iterable[str]) -> "StateFilter":
|
2018-10-25 18:49:55 +02:00
|
|
|
"""Creates a filter that returns all non-member events, plus the member
|
|
|
|
events for the given users
|
|
|
|
|
|
|
|
Args:
|
2020-07-28 22:09:53 +02:00
|
|
|
members: Set of user IDs
|
2018-10-25 18:49:55 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-07-28 22:09:53 +02:00
|
|
|
The new state filter
|
2018-10-25 18:49:55 +02:00
|
|
|
"""
|
2019-04-03 11:07:29 +02:00
|
|
|
return StateFilter(types={EventTypes.Member: set(members)}, include_others=True)
|
2018-10-25 18:49:55 +02:00
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
def return_expanded(self) -> "StateFilter":
|
2018-10-25 18:49:55 +02:00
|
|
|
"""Creates a new StateFilter where type wild cards have been removed
|
|
|
|
(except for memberships). The returned filter is a superset of the
|
|
|
|
current one, i.e. anything that passes the current filter will pass
|
|
|
|
the returned filter.
|
|
|
|
|
|
|
|
This helps the caching as the DictionaryCache knows if it has *all* the
|
|
|
|
state, but does not know if it has all of the keys of a particular type,
|
|
|
|
which makes wildcard lookups expensive unless we have a complete cache.
|
|
|
|
Hence, if we are doing a wildcard lookup, populate the cache fully so
|
|
|
|
that we can do an efficient lookup next time.
|
|
|
|
|
|
|
|
Note that since we have two caches, one for membership events and one for
|
|
|
|
other events, we can be a bit more clever than simply returning
|
|
|
|
`StateFilter.all()` if `has_wildcards()` is True.
|
|
|
|
|
|
|
|
We return a StateFilter where:
|
|
|
|
1. the list of membership events to return is the same
|
|
|
|
2. if there is a wildcard that matches non-member events we
|
|
|
|
return all non-member events
|
|
|
|
|
|
|
|
Returns:
|
2020-07-28 22:09:53 +02:00
|
|
|
The new state filter.
|
2018-10-25 18:49:55 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
if self.is_full():
|
|
|
|
# If we're going to return everything then there's nothing to do
|
|
|
|
return self
|
|
|
|
|
|
|
|
if not self.has_wildcards():
|
|
|
|
# If there are no wild cards, there's nothing to do
|
|
|
|
return self
|
|
|
|
|
|
|
|
if EventTypes.Member in self.types:
|
|
|
|
get_all_members = self.types[EventTypes.Member] is None
|
|
|
|
else:
|
|
|
|
get_all_members = self.include_others
|
|
|
|
|
|
|
|
has_non_member_wildcard = self.include_others or any(
|
|
|
|
state_keys is None
|
2020-06-15 13:03:36 +02:00
|
|
|
for t, state_keys in self.types.items()
|
2018-10-25 18:49:55 +02:00
|
|
|
if t != EventTypes.Member
|
|
|
|
)
|
|
|
|
|
|
|
|
if not has_non_member_wildcard:
|
|
|
|
# If there are no non-member wild cards we can just return ourselves
|
|
|
|
return self
|
|
|
|
|
|
|
|
if get_all_members:
|
|
|
|
# We want to return everything.
|
|
|
|
return StateFilter.all()
|
|
|
|
else:
|
|
|
|
# We want to return all non-members, but only particular
|
|
|
|
# memberships
|
|
|
|
return StateFilter(
|
|
|
|
types={EventTypes.Member: self.types[EventTypes.Member]},
|
|
|
|
include_others=True,
|
|
|
|
)
|
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
def make_sql_filter_clause(self) -> Tuple[str, List[str]]:
|
2018-10-25 18:49:55 +02:00
|
|
|
"""Converts the filter to an SQL clause.
|
|
|
|
|
|
|
|
For example:
|
|
|
|
|
|
|
|
f = StateFilter.from_types([("m.room.create", "")])
|
|
|
|
clause, args = f.make_sql_filter_clause()
|
|
|
|
clause == "(type = ? AND state_key = ?)"
|
|
|
|
args == ['m.room.create', '']
|
|
|
|
|
|
|
|
|
|
|
|
Returns:
|
2020-07-28 22:09:53 +02:00
|
|
|
The SQL string (may be empty) and arguments. An empty SQL string is
|
|
|
|
returned when the filter matches everything (i.e. is "full").
|
2018-10-25 18:49:55 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
where_clause = ""
|
2020-07-28 22:09:53 +02:00
|
|
|
where_args = [] # type: List[str]
|
2018-10-25 18:49:55 +02:00
|
|
|
|
|
|
|
if self.is_full():
|
|
|
|
return where_clause, where_args
|
|
|
|
|
|
|
|
if not self.include_others and not self.types:
|
|
|
|
# i.e. this is an empty filter, so we need to return a clause that
|
|
|
|
# will match nothing
|
|
|
|
return "1 = 2", []
|
|
|
|
|
|
|
|
# First we build up a lost of clauses for each type/state_key combo
|
|
|
|
clauses = []
|
2020-06-15 13:03:36 +02:00
|
|
|
for etype, state_keys in self.types.items():
|
2018-10-25 18:49:55 +02:00
|
|
|
if state_keys is None:
|
|
|
|
clauses.append("(type = ?)")
|
|
|
|
where_args.append(etype)
|
|
|
|
continue
|
|
|
|
|
|
|
|
for state_key in state_keys:
|
|
|
|
clauses.append("(type = ? AND state_key = ?)")
|
|
|
|
where_args.extend((etype, state_key))
|
|
|
|
|
|
|
|
# This will match anything that appears in `self.types`
|
|
|
|
where_clause = " OR ".join(clauses)
|
|
|
|
|
|
|
|
# If we want to include stuff that's not in the types dict then we add
|
|
|
|
# a `OR type NOT IN (...)` clause to the end.
|
|
|
|
if self.include_others:
|
|
|
|
if where_clause:
|
|
|
|
where_clause += " OR "
|
|
|
|
|
2019-04-03 11:07:29 +02:00
|
|
|
where_clause += "type NOT IN (%s)" % (",".join(["?"] * len(self.types)),)
|
2018-10-25 18:49:55 +02:00
|
|
|
where_args.extend(self.types)
|
|
|
|
|
|
|
|
return where_clause, where_args
|
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
def max_entries_returned(self) -> Optional[int]:
|
2018-10-25 18:49:55 +02:00
|
|
|
"""Returns the maximum number of entries this filter will return if
|
|
|
|
known, otherwise returns None.
|
|
|
|
|
|
|
|
For example a simple state filter asking for `("m.room.create", "")`
|
|
|
|
will return 1, whereas the default state filter will return None.
|
|
|
|
|
|
|
|
This is used to bail out early if the right number of entries have been
|
|
|
|
fetched.
|
|
|
|
"""
|
|
|
|
if self.has_wildcards():
|
|
|
|
return None
|
|
|
|
|
|
|
|
return len(self.concrete_types())
|
|
|
|
|
2020-01-16 14:31:22 +01:00
|
|
|
def filter_state(self, state_dict: StateMap[T]) -> StateMap[T]:
|
2018-10-25 18:49:55 +02:00
|
|
|
"""Returns the state filtered with by this StateFilter
|
|
|
|
|
|
|
|
Args:
|
2020-01-16 14:31:22 +01:00
|
|
|
state: The state map to filter
|
2018-10-25 18:49:55 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-01-16 14:31:22 +01:00
|
|
|
The filtered state map
|
2018-10-25 18:49:55 +02:00
|
|
|
"""
|
|
|
|
if self.is_full():
|
|
|
|
return dict(state_dict)
|
|
|
|
|
|
|
|
filtered_state = {}
|
2020-06-15 13:03:36 +02:00
|
|
|
for k, v in state_dict.items():
|
2018-10-25 18:49:55 +02:00
|
|
|
typ, state_key = k
|
|
|
|
if typ in self.types:
|
|
|
|
state_keys = self.types[typ]
|
|
|
|
if state_keys is None or state_key in state_keys:
|
|
|
|
filtered_state[k] = v
|
|
|
|
elif self.include_others:
|
|
|
|
filtered_state[k] = v
|
|
|
|
|
|
|
|
return filtered_state
|
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
def is_full(self) -> bool:
|
2018-10-25 18:49:55 +02:00
|
|
|
"""Whether this filter fetches everything or not
|
|
|
|
|
|
|
|
Returns:
|
2020-07-28 22:09:53 +02:00
|
|
|
True if the filter fetches everything.
|
2018-10-25 18:49:55 +02:00
|
|
|
"""
|
|
|
|
return self.include_others and not self.types
|
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
def has_wildcards(self) -> bool:
|
2018-10-25 18:49:55 +02:00
|
|
|
"""Whether the filter includes wildcards or is attempting to fetch
|
|
|
|
specific state.
|
|
|
|
|
|
|
|
Returns:
|
2020-07-28 22:09:53 +02:00
|
|
|
True if the filter includes wildcards.
|
2018-10-25 18:49:55 +02:00
|
|
|
"""
|
|
|
|
|
2019-04-03 11:07:29 +02:00
|
|
|
return self.include_others or any(
|
2020-06-15 13:03:36 +02:00
|
|
|
state_keys is None for state_keys in self.types.values()
|
2018-10-25 18:49:55 +02:00
|
|
|
)
|
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
def concrete_types(self) -> List[Tuple[str, str]]:
|
2018-10-25 18:49:55 +02:00
|
|
|
"""Returns a list of concrete type/state_keys (i.e. not None) that
|
|
|
|
will be fetched. This will be a complete list if `has_wildcards`
|
|
|
|
returns False, but otherwise will be a subset (or even empty).
|
|
|
|
|
|
|
|
Returns:
|
2020-07-28 22:09:53 +02:00
|
|
|
A list of type/state_keys tuples.
|
2018-10-25 18:49:55 +02:00
|
|
|
"""
|
|
|
|
return [
|
|
|
|
(t, s)
|
2020-06-15 13:03:36 +02:00
|
|
|
for t, state_keys in self.types.items()
|
2018-10-25 18:49:55 +02:00
|
|
|
if state_keys is not None
|
|
|
|
for s in state_keys
|
|
|
|
]
|
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
def get_member_split(self) -> Tuple["StateFilter", "StateFilter"]:
|
2018-10-25 18:49:55 +02:00
|
|
|
"""Return the filter split into two: one which assumes it's exclusively
|
|
|
|
matching against member state, and one which assumes it's matching
|
|
|
|
against non member state.
|
|
|
|
|
|
|
|
This is useful due to the returned filters giving correct results for
|
|
|
|
`is_full()`, `has_wildcards()`, etc, when operating against maps that
|
|
|
|
either exclusively contain member events or only contain non-member
|
|
|
|
events. (Which is the case when dealing with the member vs non-member
|
|
|
|
state caches).
|
|
|
|
|
|
|
|
Returns:
|
2020-07-28 22:09:53 +02:00
|
|
|
The member and non member filters
|
2018-10-25 18:49:55 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
if EventTypes.Member in self.types:
|
|
|
|
state_keys = self.types[EventTypes.Member]
|
|
|
|
if state_keys is None:
|
|
|
|
member_filter = StateFilter.all()
|
|
|
|
else:
|
|
|
|
member_filter = StateFilter({EventTypes.Member: state_keys})
|
|
|
|
elif self.include_others:
|
|
|
|
member_filter = StateFilter.all()
|
|
|
|
else:
|
|
|
|
member_filter = StateFilter.none()
|
|
|
|
|
|
|
|
non_member_filter = StateFilter(
|
2020-06-15 13:03:36 +02:00
|
|
|
types={k: v for k, v in self.types.items() if k != EventTypes.Member},
|
2018-10-25 18:49:55 +02:00
|
|
|
include_others=self.include_others,
|
|
|
|
)
|
|
|
|
|
|
|
|
return member_filter, non_member_filter
|
2019-10-30 15:07:48 +01:00
|
|
|
|
|
|
|
|
2020-09-04 12:54:56 +02:00
|
|
|
class StateGroupStorage:
|
2019-10-30 15:07:48 +01:00
|
|
|
"""High level interface to fetching state for event.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, hs, stores):
|
|
|
|
self.stores = stores
|
|
|
|
|
2020-08-28 15:37:55 +02:00
|
|
|
async def get_state_group_delta(self, state_group: int):
|
2019-10-30 15:07:48 +01:00
|
|
|
"""Given a state group try to return a previous group and a delta between
|
|
|
|
the old and the new.
|
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
Args:
|
|
|
|
state_group: The state group used to retrieve state deltas.
|
|
|
|
|
2019-10-30 15:07:48 +01:00
|
|
|
Returns:
|
2020-08-28 15:37:55 +02:00
|
|
|
Tuple[Optional[int], Optional[StateMap[str]]]:
|
2019-10-31 15:44:31 +01:00
|
|
|
(prev_group, delta_ids)
|
2019-10-30 15:07:48 +01:00
|
|
|
"""
|
|
|
|
|
2020-08-28 15:37:55 +02:00
|
|
|
return await self.stores.state.get_state_group_delta(state_group)
|
2019-10-30 15:07:48 +01:00
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
async def get_state_groups_ids(
|
|
|
|
self, _room_id: str, event_ids: Iterable[str]
|
2020-09-29 16:57:36 +02:00
|
|
|
) -> Dict[int, MutableStateMap[str]]:
|
2019-10-30 15:07:48 +01:00
|
|
|
"""Get the event IDs of all the state for the state groups for the given events
|
|
|
|
|
|
|
|
Args:
|
2020-07-28 22:09:53 +02:00
|
|
|
_room_id: id of the room for these events
|
|
|
|
event_ids: ids of the events
|
2019-10-30 15:07:48 +01:00
|
|
|
|
|
|
|
Returns:
|
2020-07-28 22:09:53 +02:00
|
|
|
dict of state_group_id -> (dict of (type, state_key) -> event id)
|
2019-10-30 15:07:48 +01:00
|
|
|
"""
|
|
|
|
if not event_ids:
|
|
|
|
return {}
|
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
event_to_groups = await self.stores.main._get_state_group_for_events(event_ids)
|
2019-10-30 15:07:48 +01:00
|
|
|
|
2020-06-15 13:03:36 +02:00
|
|
|
groups = set(event_to_groups.values())
|
2020-07-28 22:09:53 +02:00
|
|
|
group_to_state = await self.stores.state._get_state_for_groups(groups)
|
2019-10-30 15:07:48 +01:00
|
|
|
|
|
|
|
return group_to_state
|
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
async def get_state_ids_for_group(self, state_group: int) -> StateMap[str]:
|
2019-10-30 15:07:48 +01:00
|
|
|
"""Get the event IDs of all the state in the given state group
|
|
|
|
|
|
|
|
Args:
|
2020-07-28 22:09:53 +02:00
|
|
|
state_group: A state group for which we want to get the state IDs.
|
2019-10-30 15:07:48 +01:00
|
|
|
|
|
|
|
Returns:
|
2020-07-28 22:09:53 +02:00
|
|
|
Resolves to a map of (type, state_key) -> event_id
|
2019-10-30 15:07:48 +01:00
|
|
|
"""
|
2020-07-28 22:09:53 +02:00
|
|
|
group_to_state = await self._get_state_for_groups((state_group,))
|
2019-10-30 15:07:48 +01:00
|
|
|
|
|
|
|
return group_to_state[state_group]
|
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
async def get_state_groups(
|
|
|
|
self, room_id: str, event_ids: Iterable[str]
|
|
|
|
) -> Dict[int, List[EventBase]]:
|
2019-10-30 15:07:48 +01:00
|
|
|
""" Get the state groups for the given list of event_ids
|
2020-07-28 22:09:53 +02:00
|
|
|
|
|
|
|
Args:
|
|
|
|
room_id: ID of the room for these events.
|
|
|
|
event_ids: The event IDs to retrieve state for.
|
|
|
|
|
2019-10-30 15:07:48 +01:00
|
|
|
Returns:
|
2020-07-28 22:09:53 +02:00
|
|
|
dict of state_group_id -> list of state events.
|
2019-10-30 15:07:48 +01:00
|
|
|
"""
|
|
|
|
if not event_ids:
|
|
|
|
return {}
|
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
group_to_ids = await self.get_state_groups_ids(room_id, event_ids)
|
2019-10-30 15:07:48 +01:00
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
state_event_map = await self.stores.main.get_events(
|
2019-10-30 15:07:48 +01:00
|
|
|
[
|
|
|
|
ev_id
|
2020-06-15 13:03:36 +02:00
|
|
|
for group_ids in group_to_ids.values()
|
|
|
|
for ev_id in group_ids.values()
|
2019-10-30 15:07:48 +01:00
|
|
|
],
|
|
|
|
get_prev_content=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
return {
|
|
|
|
group: [
|
|
|
|
state_event_map[v]
|
2020-06-15 13:03:36 +02:00
|
|
|
for v in event_id_map.values()
|
2019-10-30 15:07:48 +01:00
|
|
|
if v in state_event_map
|
|
|
|
]
|
2020-06-15 13:03:36 +02:00
|
|
|
for group, event_id_map in group_to_ids.items()
|
2019-10-30 15:07:48 +01:00
|
|
|
}
|
|
|
|
|
2020-01-16 14:31:22 +01:00
|
|
|
def _get_state_groups_from_groups(
|
|
|
|
self, groups: List[int], state_filter: StateFilter
|
2020-07-30 13:20:41 +02:00
|
|
|
) -> Awaitable[Dict[int, StateMap[str]]]:
|
2019-10-30 15:07:48 +01:00
|
|
|
"""Returns the state groups for a given set of groups, filtering on
|
|
|
|
types of state events.
|
|
|
|
|
|
|
|
Args:
|
2020-01-16 14:31:22 +01:00
|
|
|
groups: list of state group IDs to query
|
|
|
|
state_filter: The state filter used to fetch state
|
2019-10-30 15:07:48 +01:00
|
|
|
from the database.
|
2020-07-28 22:09:53 +02:00
|
|
|
|
2019-10-30 15:07:48 +01:00
|
|
|
Returns:
|
2020-07-30 13:20:41 +02:00
|
|
|
Dict of state group to state map.
|
2019-10-30 15:07:48 +01:00
|
|
|
"""
|
|
|
|
|
2019-12-20 11:48:24 +01:00
|
|
|
return self.stores.state._get_state_groups_from_groups(groups, state_filter)
|
2019-10-30 15:07:48 +01:00
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
async def get_state_for_events(
|
|
|
|
self, event_ids: List[str], state_filter: StateFilter = StateFilter.all()
|
|
|
|
):
|
2019-10-30 15:07:48 +01:00
|
|
|
"""Given a list of event_ids and type tuples, return a list of state
|
|
|
|
dicts for each event.
|
2020-07-28 22:09:53 +02:00
|
|
|
|
2019-10-30 15:07:48 +01:00
|
|
|
Args:
|
2020-07-28 22:09:53 +02:00
|
|
|
event_ids: The events to fetch the state of.
|
|
|
|
state_filter: The state filter used to fetch state.
|
|
|
|
|
2019-10-30 15:07:48 +01:00
|
|
|
Returns:
|
2020-07-28 22:09:53 +02:00
|
|
|
A dict of (event_id) -> (type, state_key) -> [state_events]
|
2019-10-30 15:07:48 +01:00
|
|
|
"""
|
2020-07-28 22:09:53 +02:00
|
|
|
event_to_groups = await self.stores.main._get_state_group_for_events(event_ids)
|
2019-10-30 15:07:48 +01:00
|
|
|
|
2020-06-15 13:03:36 +02:00
|
|
|
groups = set(event_to_groups.values())
|
2020-07-28 22:09:53 +02:00
|
|
|
group_to_state = await self.stores.state._get_state_for_groups(
|
2019-10-30 15:07:48 +01:00
|
|
|
groups, state_filter
|
|
|
|
)
|
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
state_event_map = await self.stores.main.get_events(
|
2020-06-15 13:03:36 +02:00
|
|
|
[ev_id for sd in group_to_state.values() for ev_id in sd.values()],
|
2019-10-30 15:07:48 +01:00
|
|
|
get_prev_content=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
event_to_state = {
|
|
|
|
event_id: {
|
|
|
|
k: state_event_map[v]
|
2020-06-15 13:03:36 +02:00
|
|
|
for k, v in group_to_state[group].items()
|
2019-10-30 15:07:48 +01:00
|
|
|
if v in state_event_map
|
|
|
|
}
|
2020-06-15 13:03:36 +02:00
|
|
|
for event_id, group in event_to_groups.items()
|
2019-10-30 15:07:48 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return {event: event_to_state[event] for event in event_ids}
|
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
async def get_state_ids_for_events(
|
|
|
|
self, event_ids: List[str], state_filter: StateFilter = StateFilter.all()
|
|
|
|
):
|
2019-10-30 15:07:48 +01:00
|
|
|
"""
|
|
|
|
Get the state dicts corresponding to a list of events, containing the event_ids
|
|
|
|
of the state events (as opposed to the events themselves)
|
|
|
|
|
|
|
|
Args:
|
2020-07-28 22:09:53 +02:00
|
|
|
event_ids: events whose state should be returned
|
|
|
|
state_filter: The state filter used to fetch state from the database.
|
2019-10-30 15:07:48 +01:00
|
|
|
|
|
|
|
Returns:
|
2020-07-28 22:09:53 +02:00
|
|
|
A dict from event_id -> (type, state_key) -> event_id
|
2019-10-30 15:07:48 +01:00
|
|
|
"""
|
2020-07-28 22:09:53 +02:00
|
|
|
event_to_groups = await self.stores.main._get_state_group_for_events(event_ids)
|
2019-10-30 15:07:48 +01:00
|
|
|
|
2020-06-15 13:03:36 +02:00
|
|
|
groups = set(event_to_groups.values())
|
2020-07-28 22:09:53 +02:00
|
|
|
group_to_state = await self.stores.state._get_state_for_groups(
|
2019-10-30 15:07:48 +01:00
|
|
|
groups, state_filter
|
|
|
|
)
|
|
|
|
|
|
|
|
event_to_state = {
|
|
|
|
event_id: group_to_state[group]
|
2020-06-15 13:03:36 +02:00
|
|
|
for event_id, group in event_to_groups.items()
|
2019-10-30 15:07:48 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return {event: event_to_state[event] for event in event_ids}
|
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
async def get_state_for_event(
|
|
|
|
self, event_id: str, state_filter: StateFilter = StateFilter.all()
|
|
|
|
):
|
2019-10-30 15:07:48 +01:00
|
|
|
"""
|
|
|
|
Get the state dict corresponding to a particular event
|
|
|
|
|
|
|
|
Args:
|
2020-07-28 22:09:53 +02:00
|
|
|
event_id: event whose state should be returned
|
|
|
|
state_filter: The state filter used to fetch state from the database.
|
2019-10-30 15:07:48 +01:00
|
|
|
|
|
|
|
Returns:
|
2020-07-28 22:09:53 +02:00
|
|
|
A dict from (type, state_key) -> state_event
|
2019-10-30 15:07:48 +01:00
|
|
|
"""
|
2020-07-28 22:09:53 +02:00
|
|
|
state_map = await self.get_state_for_events([event_id], state_filter)
|
2019-10-30 15:07:48 +01:00
|
|
|
return state_map[event_id]
|
|
|
|
|
2020-07-28 22:09:53 +02:00
|
|
|
async def get_state_ids_for_event(
|
|
|
|
self, event_id: str, state_filter: StateFilter = StateFilter.all()
|
|
|
|
):
|
2019-10-30 15:07:48 +01:00
|
|
|
"""
|
|
|
|
Get the state dict corresponding to a particular event
|
|
|
|
|
|
|
|
Args:
|
2020-07-28 22:09:53 +02:00
|
|
|
event_id: event whose state should be returned
|
|
|
|
state_filter: The state filter used to fetch state from the database.
|
2019-10-30 15:07:48 +01:00
|
|
|
|
|
|
|
Returns:
|
2020-08-28 15:37:55 +02:00
|
|
|
A dict from (type, state_key) -> state_event
|
2019-10-30 15:07:48 +01:00
|
|
|
"""
|
2020-07-28 22:09:53 +02:00
|
|
|
state_map = await self.get_state_ids_for_events([event_id], state_filter)
|
2019-10-30 15:07:48 +01:00
|
|
|
return state_map[event_id]
|
|
|
|
|
2020-01-16 14:31:22 +01:00
|
|
|
def _get_state_for_groups(
|
|
|
|
self, groups: Iterable[int], state_filter: StateFilter = StateFilter.all()
|
2020-09-29 16:57:36 +02:00
|
|
|
) -> Awaitable[Dict[int, MutableStateMap[str]]]:
|
2019-10-30 15:07:48 +01:00
|
|
|
"""Gets the state at each of a list of state groups, optionally
|
|
|
|
filtering by type/state_key
|
|
|
|
|
|
|
|
Args:
|
2020-07-28 22:09:53 +02:00
|
|
|
groups: list of state groups for which we want to get the state.
|
|
|
|
state_filter: The state filter used to fetch state.
|
2019-10-30 15:07:48 +01:00
|
|
|
from the database.
|
2020-07-30 13:20:41 +02:00
|
|
|
|
2019-10-30 15:07:48 +01:00
|
|
|
Returns:
|
2020-07-30 13:20:41 +02:00
|
|
|
Dict of state group to state map.
|
2019-10-30 15:07:48 +01:00
|
|
|
"""
|
2019-12-20 11:48:24 +01:00
|
|
|
return self.stores.state._get_state_for_groups(groups, state_filter)
|
2019-10-30 15:07:48 +01:00
|
|
|
|
2020-08-28 15:37:55 +02:00
|
|
|
async def store_state_group(
|
2020-07-28 22:09:53 +02:00
|
|
|
self,
|
|
|
|
event_id: str,
|
|
|
|
room_id: str,
|
|
|
|
prev_group: Optional[int],
|
|
|
|
delta_ids: Optional[dict],
|
|
|
|
current_state_ids: dict,
|
2020-08-28 15:37:55 +02:00
|
|
|
) -> int:
|
2019-10-30 15:07:48 +01:00
|
|
|
"""Store a new set of state, returning a newly assigned state group.
|
|
|
|
|
|
|
|
Args:
|
2020-07-28 22:09:53 +02:00
|
|
|
event_id: The event ID for which the state was calculated.
|
|
|
|
room_id: ID of the room for which the state was calculated.
|
|
|
|
prev_group: A previous state group for the room, optional.
|
|
|
|
delta_ids: The delta between state at `prev_group` and
|
2019-10-30 15:07:48 +01:00
|
|
|
`current_state_ids`, if `prev_group` was given. Same format as
|
|
|
|
`current_state_ids`.
|
2020-07-28 22:09:53 +02:00
|
|
|
current_state_ids: The state to store. Map of (type, state_key)
|
2019-10-30 15:07:48 +01:00
|
|
|
to event_id.
|
|
|
|
|
|
|
|
Returns:
|
2020-08-28 15:37:55 +02:00
|
|
|
The state group ID
|
2019-10-30 15:07:48 +01:00
|
|
|
"""
|
2020-08-28 15:37:55 +02:00
|
|
|
return await self.stores.state.store_state_group(
|
2019-10-30 15:07:48 +01:00
|
|
|
event_id, room_id, prev_group, delta_ids, current_state_ids
|
|
|
|
)
|