2014-10-15 11:04:55 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-07 05:26:29 +01:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2014-10-15 11:04:55 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2015-08-11 18:59:32 +02:00
|
|
|
from ._base import SQLBaseStore
|
2016-03-23 12:37:58 +01:00
|
|
|
from synapse.util.caches.descriptors import cached, cachedList
|
2016-03-22 19:22:52 +01:00
|
|
|
from synapse.util.caches import intern_string
|
2014-10-15 11:04:55 +02:00
|
|
|
|
2015-03-20 14:52:56 +01:00
|
|
|
from twisted.internet import defer
|
|
|
|
|
2015-01-06 12:18:12 +01:00
|
|
|
import logging
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2014-10-15 11:04:55 +02:00
|
|
|
|
|
|
|
class StateStore(SQLBaseStore):
|
2014-11-12 15:33:34 +01:00
|
|
|
""" Keeps track of the state at a given event.
|
|
|
|
|
|
|
|
This is done by the concept of `state groups`. Every event is a assigned
|
|
|
|
a state group (identified by an arbitrary string), which references a
|
|
|
|
collection of state events. The current state of an event is then the
|
|
|
|
collection of state events referenced by the event's state group.
|
|
|
|
|
|
|
|
Hence, every change in the current state causes a new state group to be
|
|
|
|
generated. However, if no change happens (e.g., if we get a message event
|
|
|
|
with only one parent it inherits the state group from its parent.)
|
|
|
|
|
|
|
|
There are three tables:
|
|
|
|
* `state_groups`: Stores group name, first event with in the group and
|
|
|
|
room id.
|
|
|
|
* `event_to_state_groups`: Maps events to state groups.
|
|
|
|
* `state_groups_state`: Maps state group to state events.
|
|
|
|
"""
|
2014-10-15 11:04:55 +02:00
|
|
|
|
2015-05-13 12:13:31 +02:00
|
|
|
@defer.inlineCallbacks
|
2015-08-05 16:06:51 +02:00
|
|
|
def get_state_groups(self, room_id, event_ids):
|
2014-11-12 15:33:34 +01:00
|
|
|
""" Get the state groups for the given list of event_ids
|
|
|
|
|
|
|
|
The return value is a dict mapping group names to lists of events.
|
|
|
|
"""
|
2015-08-07 19:15:30 +02:00
|
|
|
if not event_ids:
|
|
|
|
defer.returnValue({})
|
2014-11-12 15:33:34 +01:00
|
|
|
|
2015-08-07 19:15:30 +02:00
|
|
|
event_to_groups = yield self._get_state_group_for_events(
|
2015-10-12 16:06:14 +02:00
|
|
|
event_ids,
|
2015-08-07 19:15:30 +02:00
|
|
|
)
|
2014-11-12 15:33:34 +01:00
|
|
|
|
2015-08-07 19:15:30 +02:00
|
|
|
groups = set(event_to_groups.values())
|
|
|
|
group_to_state = yield self._get_state_for_groups(groups)
|
2015-05-13 12:29:03 +02:00
|
|
|
|
2015-08-05 16:06:51 +02:00
|
|
|
defer.returnValue({
|
|
|
|
group: state_map.values()
|
2015-08-07 19:15:30 +02:00
|
|
|
for group, state_map in group_to_state.items()
|
2015-08-05 16:06:51 +02:00
|
|
|
})
|
2015-06-03 15:45:55 +02:00
|
|
|
|
2015-06-25 18:18:19 +02:00
|
|
|
def _store_mult_state_groups_txn(self, txn, events_and_contexts):
|
|
|
|
state_groups = {}
|
|
|
|
for event, context in events_and_contexts:
|
2016-03-30 13:55:02 +02:00
|
|
|
if event.internal_metadata.is_outlier():
|
|
|
|
continue
|
|
|
|
|
2015-06-25 18:18:19 +02:00
|
|
|
if context.current_state is None:
|
|
|
|
continue
|
2014-12-05 17:20:48 +01:00
|
|
|
|
2015-06-25 18:18:19 +02:00
|
|
|
if context.state_group is not None:
|
|
|
|
state_groups[event.event_id] = context.state_group
|
|
|
|
continue
|
|
|
|
|
|
|
|
state_events = dict(context.current_state)
|
|
|
|
|
|
|
|
if event.is_state():
|
|
|
|
state_events[(event.type, event.state_key)] = event
|
2014-12-05 17:20:48 +01:00
|
|
|
|
2016-03-30 13:55:02 +02:00
|
|
|
state_group = context.new_state_group_id
|
|
|
|
|
2015-04-07 13:05:36 +02:00
|
|
|
self._simple_insert_txn(
|
2014-10-15 11:04:55 +02:00
|
|
|
txn,
|
|
|
|
table="state_groups",
|
|
|
|
values={
|
2015-04-07 13:05:36 +02:00
|
|
|
"id": state_group,
|
2014-10-15 11:04:55 +02:00
|
|
|
"room_id": event.room_id,
|
|
|
|
"event_id": event.event_id,
|
2014-11-12 12:22:51 +01:00
|
|
|
},
|
2014-10-15 11:04:55 +02:00
|
|
|
)
|
|
|
|
|
2015-05-05 16:13:25 +02:00
|
|
|
self._simple_insert_many_txn(
|
|
|
|
txn,
|
|
|
|
table="state_groups_state",
|
|
|
|
values=[
|
|
|
|
{
|
2014-10-15 11:04:55 +02:00
|
|
|
"state_group": state_group,
|
|
|
|
"room_id": state.room_id,
|
|
|
|
"type": state.type,
|
|
|
|
"state_key": state.state_key,
|
|
|
|
"event_id": state.event_id,
|
2015-05-05 16:13:25 +02:00
|
|
|
}
|
|
|
|
for state in state_events.values()
|
|
|
|
],
|
|
|
|
)
|
2015-06-25 18:18:19 +02:00
|
|
|
state_groups[event.event_id] = state_group
|
2014-10-15 11:04:55 +02:00
|
|
|
|
2015-06-25 18:18:19 +02:00
|
|
|
self._simple_insert_many_txn(
|
2014-10-15 11:04:55 +02:00
|
|
|
txn,
|
|
|
|
table="event_to_state_groups",
|
2015-06-25 18:18:19 +02:00
|
|
|
values=[
|
|
|
|
{
|
2016-03-30 13:55:02 +02:00
|
|
|
"state_group": state_group_id,
|
|
|
|
"event_id": event_id,
|
2015-06-25 18:18:19 +02:00
|
|
|
}
|
2016-03-30 13:55:02 +02:00
|
|
|
for event_id, state_group_id in state_groups.items()
|
2015-06-25 18:18:19 +02:00
|
|
|
],
|
2014-10-15 11:04:55 +02:00
|
|
|
)
|
2015-03-19 16:59:48 +01:00
|
|
|
|
2015-03-20 14:52:56 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_current_state(self, room_id, event_type=None, state_key=""):
|
2015-05-21 12:13:19 +02:00
|
|
|
if event_type and state_key is not None:
|
|
|
|
result = yield self.get_current_state_for_key(
|
|
|
|
room_id, event_type, state_key
|
|
|
|
)
|
|
|
|
defer.returnValue(result)
|
|
|
|
|
2015-04-30 18:12:52 +02:00
|
|
|
def f(txn):
|
|
|
|
sql = (
|
2015-04-30 19:49:26 +02:00
|
|
|
"SELECT event_id FROM current_state_events"
|
|
|
|
" WHERE room_id = ? "
|
2015-04-30 18:12:52 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
if event_type and state_key is not None:
|
2015-04-30 19:49:26 +02:00
|
|
|
sql += " AND type = ? AND state_key = ? "
|
2015-04-30 18:12:52 +02:00
|
|
|
args = (room_id, event_type, state_key)
|
|
|
|
elif event_type:
|
2015-04-30 19:49:26 +02:00
|
|
|
sql += " AND type = ?"
|
2015-04-30 18:12:52 +02:00
|
|
|
args = (room_id, event_type)
|
|
|
|
else:
|
|
|
|
args = (room_id, )
|
2015-03-20 14:52:56 +01:00
|
|
|
|
2015-04-30 18:12:52 +02:00
|
|
|
txn.execute(sql, args)
|
2015-05-15 16:20:05 +02:00
|
|
|
results = txn.fetchall()
|
2015-03-20 14:52:56 +01:00
|
|
|
|
2015-05-15 16:20:05 +02:00
|
|
|
return [r[0] for r in results]
|
2015-03-20 14:52:56 +01:00
|
|
|
|
2015-05-15 16:20:05 +02:00
|
|
|
event_ids = yield self.runInteraction("get_current_state", f)
|
2015-05-15 16:33:01 +02:00
|
|
|
events = yield self._get_events(event_ids, get_prev_content=False)
|
2015-03-20 14:52:56 +01:00
|
|
|
defer.returnValue(events)
|
|
|
|
|
2016-03-23 12:37:58 +01:00
|
|
|
@defer.inlineCallbacks
|
2015-05-21 12:13:19 +02:00
|
|
|
def get_current_state_for_key(self, room_id, event_type, state_key):
|
2016-03-23 17:13:05 +01:00
|
|
|
event_ids = yield self._get_current_state_for_key(room_id, event_type, state_key)
|
2016-03-23 12:37:58 +01:00
|
|
|
events = yield self._get_events(event_ids, get_prev_content=False)
|
|
|
|
defer.returnValue(events)
|
|
|
|
|
|
|
|
@cached(num_args=3)
|
|
|
|
def _get_current_state_for_key(self, room_id, event_type, state_key):
|
2015-05-21 12:13:19 +02:00
|
|
|
def f(txn):
|
|
|
|
sql = (
|
|
|
|
"SELECT event_id FROM current_state_events"
|
|
|
|
" WHERE room_id = ? AND type = ? AND state_key = ?"
|
|
|
|
)
|
|
|
|
|
|
|
|
args = (room_id, event_type, state_key)
|
|
|
|
txn.execute(sql, args)
|
|
|
|
results = txn.fetchall()
|
|
|
|
return [r[0] for r in results]
|
2016-03-23 12:37:58 +01:00
|
|
|
return self.runInteraction("get_current_state_for_key", f)
|
2015-05-21 12:13:19 +02:00
|
|
|
|
2016-04-19 18:22:03 +02:00
|
|
|
@cached(num_args=2, lru=True, max_entries=1000)
|
|
|
|
def _get_state_group_from_group(self, group, types):
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
@cachedList(cached_method_name="_get_state_group_from_group",
|
|
|
|
list_name="groups", num_args=2, inlineCallbacks=True)
|
2016-02-10 14:24:42 +01:00
|
|
|
def _get_state_groups_from_groups(self, groups, types):
|
2016-03-22 12:59:31 +01:00
|
|
|
"""Returns dictionary state_group -> (dict of (type, state_key) -> event id)
|
2015-08-10 16:01:06 +02:00
|
|
|
"""
|
2016-02-10 14:24:42 +01:00
|
|
|
def f(txn, groups):
|
|
|
|
if types is not None:
|
|
|
|
where_clause = "AND (%s)" % (
|
|
|
|
" OR ".join(["(type = ? AND state_key = ?)"] * len(types)),
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
where_clause = ""
|
2015-08-07 19:15:30 +02:00
|
|
|
|
2016-02-10 14:24:42 +01:00
|
|
|
sql = (
|
2016-03-22 12:59:31 +01:00
|
|
|
"SELECT state_group, event_id, type, state_key"
|
|
|
|
" FROM state_groups_state WHERE"
|
2016-02-10 14:24:42 +01:00
|
|
|
" state_group IN (%s) %s" % (
|
|
|
|
",".join("?" for _ in groups),
|
|
|
|
where_clause,
|
|
|
|
)
|
|
|
|
)
|
2015-08-07 19:15:30 +02:00
|
|
|
|
2016-02-10 14:24:42 +01:00
|
|
|
args = list(groups)
|
|
|
|
if types is not None:
|
|
|
|
args.extend([i for typ in types for i in typ])
|
2015-08-07 19:15:30 +02:00
|
|
|
|
2016-02-10 14:24:42 +01:00
|
|
|
txn.execute(sql, args)
|
|
|
|
rows = self.cursor_to_dict(txn)
|
2015-08-07 19:15:30 +02:00
|
|
|
|
2016-04-19 18:22:03 +02:00
|
|
|
results = {group: {} for group in groups}
|
2016-02-10 14:24:42 +01:00
|
|
|
for row in rows:
|
2016-03-23 17:13:05 +01:00
|
|
|
key = (row["type"], row["state_key"])
|
2016-04-19 18:22:03 +02:00
|
|
|
results[row["state_group"]][key] = row["event_id"]
|
2015-08-07 19:15:30 +02:00
|
|
|
return results
|
|
|
|
|
2016-04-19 18:22:03 +02:00
|
|
|
results = {}
|
|
|
|
|
2016-02-10 14:24:42 +01:00
|
|
|
chunks = [groups[i:i + 100] for i in xrange(0, len(groups), 100)]
|
|
|
|
for chunk in chunks:
|
2016-04-19 18:22:03 +02:00
|
|
|
res = yield self.runInteraction(
|
2016-02-10 14:24:42 +01:00
|
|
|
"_get_state_groups_from_groups",
|
|
|
|
f, chunk
|
|
|
|
)
|
2016-04-19 18:22:03 +02:00
|
|
|
results.update(res)
|
|
|
|
|
|
|
|
defer.returnValue(results)
|
2015-08-07 19:15:30 +02:00
|
|
|
|
2015-08-04 10:32:23 +02:00
|
|
|
@defer.inlineCallbacks
|
2015-10-12 16:06:14 +02:00
|
|
|
def get_state_for_events(self, event_ids, types):
|
2015-08-04 12:08:07 +02:00
|
|
|
"""Given a list of event_ids and type tuples, return a list of state
|
|
|
|
dicts for each event. The state dicts will only have the type/state_keys
|
|
|
|
that are in the `types` list.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
event_ids (list)
|
|
|
|
types (list): List of (type, state_key) tuples which are used to
|
|
|
|
filter the state fetched. `state_key` may be None, which matches
|
|
|
|
any `state_key`
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
deferred: A list of dicts corresponding to the event_ids given.
|
|
|
|
The dicts are mappings from (type, state_key) -> state_events
|
|
|
|
"""
|
2015-08-07 19:15:30 +02:00
|
|
|
event_to_groups = yield self._get_state_group_for_events(
|
2015-10-12 16:06:14 +02:00
|
|
|
event_ids,
|
2015-08-07 19:15:30 +02:00
|
|
|
)
|
2015-08-04 10:32:23 +02:00
|
|
|
|
2015-08-07 19:15:30 +02:00
|
|
|
groups = set(event_to_groups.values())
|
2015-08-13 18:11:30 +02:00
|
|
|
group_to_state = yield self._get_state_for_groups(groups, types)
|
2015-07-02 17:20:10 +02:00
|
|
|
|
2015-08-04 10:32:23 +02:00
|
|
|
event_to_state = {
|
2015-08-05 16:06:51 +02:00
|
|
|
event_id: group_to_state[group]
|
2015-08-07 19:15:30 +02:00
|
|
|
for event_id, group in event_to_groups.items()
|
2015-07-02 17:20:10 +02:00
|
|
|
}
|
|
|
|
|
2015-08-13 18:11:30 +02:00
|
|
|
defer.returnValue({event: event_to_state[event] for event in event_ids})
|
2015-07-02 17:20:10 +02:00
|
|
|
|
2015-11-10 19:27:23 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_state_for_event(self, event_id, types=None):
|
|
|
|
"""
|
|
|
|
Get the state dict corresponding to a particular event
|
|
|
|
|
2016-04-01 17:08:59 +02:00
|
|
|
Args:
|
|
|
|
event_id(str): event whose state should be returned
|
|
|
|
types(list[(str, str)]|None): List of (type, state_key) tuples
|
|
|
|
which are used to filter the state fetched. May be None, which
|
|
|
|
matches any key
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A deferred dict from (type, state_key) -> state_event
|
2015-11-10 19:27:23 +01:00
|
|
|
"""
|
|
|
|
state_map = yield self.get_state_for_events([event_id], types)
|
|
|
|
defer.returnValue(state_map[event_id])
|
|
|
|
|
2015-08-18 12:00:38 +02:00
|
|
|
@cached(num_args=2, lru=True, max_entries=10000)
|
2015-08-05 16:06:51 +02:00
|
|
|
def _get_state_group_for_event(self, room_id, event_id):
|
|
|
|
return self._simple_select_one_onecol(
|
|
|
|
table="event_to_state_groups",
|
|
|
|
keyvalues={
|
|
|
|
"event_id": event_id,
|
|
|
|
},
|
|
|
|
retcol="state_group",
|
|
|
|
allow_none=True,
|
|
|
|
desc="_get_state_group_for_event",
|
|
|
|
)
|
|
|
|
|
2016-04-06 14:08:05 +02:00
|
|
|
@cachedList(cached_method_name="_get_state_group_for_event",
|
|
|
|
list_name="event_ids", num_args=1, inlineCallbacks=True)
|
2015-10-12 16:06:14 +02:00
|
|
|
def _get_state_group_for_events(self, event_ids):
|
2015-08-10 16:01:06 +02:00
|
|
|
"""Returns mapping event_id -> state_group
|
|
|
|
"""
|
2016-02-10 13:57:50 +01:00
|
|
|
rows = yield self._simple_select_many_batch(
|
|
|
|
table="event_to_state_groups",
|
|
|
|
column="event_id",
|
|
|
|
iterable=event_ids,
|
|
|
|
keyvalues={},
|
|
|
|
retcols=("event_id", "state_group",),
|
|
|
|
desc="_get_state_group_for_events",
|
|
|
|
)
|
2015-08-07 19:15:30 +02:00
|
|
|
|
2016-03-23 17:29:46 +01:00
|
|
|
defer.returnValue({row["event_id"]: row["state_group"] for row in rows})
|
2015-08-07 19:15:30 +02:00
|
|
|
|
2015-08-12 18:06:21 +02:00
|
|
|
def _get_some_state_from_cache(self, group, types):
|
2015-08-10 16:01:06 +02:00
|
|
|
"""Checks if group is in cache. See `_get_state_for_groups`
|
2015-08-11 10:12:41 +02:00
|
|
|
|
2015-08-11 12:40:40 +02:00
|
|
|
Returns 3-tuple (`state_dict`, `missing_types`, `got_all`).
|
|
|
|
`missing_types` is the list of types that aren't in the cache for that
|
2015-08-12 18:06:21 +02:00
|
|
|
group. `got_all` is a bool indicating if we successfully retrieved all
|
|
|
|
requests state from the cache, if False we need to query the DB for the
|
|
|
|
missing state.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
group: The state group to lookup
|
|
|
|
types (list): List of 2-tuples of the form (`type`, `state_key`),
|
|
|
|
where a `state_key` of `None` matches all state_keys for the
|
|
|
|
`type`.
|
2015-08-10 16:01:06 +02:00
|
|
|
"""
|
2016-03-22 12:59:31 +01:00
|
|
|
is_all, state_dict_ids = self._state_group_cache.get(group)
|
2015-08-05 16:06:51 +02:00
|
|
|
|
|
|
|
type_to_key = {}
|
|
|
|
missing_types = set()
|
2015-08-12 18:06:21 +02:00
|
|
|
for typ, state_key in types:
|
|
|
|
if state_key is None:
|
|
|
|
type_to_key[typ] = None
|
|
|
|
missing_types.add((typ, state_key))
|
|
|
|
else:
|
|
|
|
if type_to_key.get(typ, object()) is not None:
|
|
|
|
type_to_key.setdefault(typ, set()).add(state_key)
|
2015-08-05 16:06:51 +02:00
|
|
|
|
2016-03-22 12:59:31 +01:00
|
|
|
if (typ, state_key) not in state_dict_ids:
|
2015-08-12 18:06:21 +02:00
|
|
|
missing_types.add((typ, state_key))
|
2015-08-07 11:17:38 +02:00
|
|
|
|
2015-08-11 10:12:41 +02:00
|
|
|
sentinel = object()
|
2015-08-05 16:06:51 +02:00
|
|
|
|
2015-08-11 10:12:41 +02:00
|
|
|
def include(typ, state_key):
|
|
|
|
valid_state_keys = type_to_key.get(typ, sentinel)
|
|
|
|
if valid_state_keys is sentinel:
|
|
|
|
return False
|
|
|
|
if valid_state_keys is None:
|
|
|
|
return True
|
|
|
|
if state_key in valid_state_keys:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2015-08-11 12:40:40 +02:00
|
|
|
got_all = not (missing_types or types is None)
|
|
|
|
|
2015-08-11 10:12:41 +02:00
|
|
|
return {
|
2016-03-22 12:59:31 +01:00
|
|
|
k: v for k, v in state_dict_ids.items()
|
2015-08-11 10:12:41 +02:00
|
|
|
if include(k[0], k[1])
|
2015-08-11 12:40:40 +02:00
|
|
|
}, missing_types, got_all
|
2015-08-07 19:15:30 +02:00
|
|
|
|
2015-08-12 18:06:21 +02:00
|
|
|
def _get_all_state_from_cache(self, group):
|
|
|
|
"""Checks if group is in cache. See `_get_state_for_groups`
|
|
|
|
|
|
|
|
Returns 2-tuple (`state_dict`, `got_all`). `got_all` is a bool
|
|
|
|
indicating if we successfully retrieved all requests state from the
|
|
|
|
cache, if False we need to query the DB for the missing state.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
group: The state group to lookup
|
|
|
|
"""
|
2016-03-22 12:59:31 +01:00
|
|
|
is_all, state_dict_ids = self._state_group_cache.get(group)
|
|
|
|
|
|
|
|
return state_dict_ids, is_all
|
2015-08-12 18:06:21 +02:00
|
|
|
|
2015-08-07 19:15:30 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _get_state_for_groups(self, groups, types=None):
|
2015-08-10 16:01:06 +02:00
|
|
|
"""Given list of groups returns dict of group -> list of state events
|
|
|
|
with matching types. `types` is a list of `(type, state_key)`, where
|
|
|
|
a `state_key` of None matches all state_keys. If `types` is None then
|
|
|
|
all events are returned.
|
|
|
|
"""
|
2016-04-19 18:22:03 +02:00
|
|
|
if types:
|
|
|
|
types = frozenset(types)
|
2015-08-07 19:15:30 +02:00
|
|
|
results = {}
|
2016-02-10 14:24:42 +01:00
|
|
|
missing_groups = []
|
2015-08-12 18:06:21 +02:00
|
|
|
if types is not None:
|
|
|
|
for group in set(groups):
|
2016-03-22 12:59:31 +01:00
|
|
|
state_dict_ids, missing_types, got_all = self._get_some_state_from_cache(
|
2015-08-12 18:06:21 +02:00
|
|
|
group, types
|
|
|
|
)
|
2016-03-22 12:59:31 +01:00
|
|
|
results[group] = state_dict_ids
|
2015-08-12 18:06:21 +02:00
|
|
|
|
|
|
|
if not got_all:
|
2016-02-10 14:24:42 +01:00
|
|
|
missing_groups.append(group)
|
2015-08-12 18:06:21 +02:00
|
|
|
else:
|
|
|
|
for group in set(groups):
|
2016-03-22 12:59:31 +01:00
|
|
|
state_dict_ids, got_all = self._get_all_state_from_cache(
|
2015-08-12 18:06:21 +02:00
|
|
|
group
|
|
|
|
)
|
2016-03-22 12:59:31 +01:00
|
|
|
|
|
|
|
results[group] = state_dict_ids
|
2015-08-11 10:12:41 +02:00
|
|
|
|
2015-08-12 18:06:21 +02:00
|
|
|
if not got_all:
|
2016-02-10 14:24:42 +01:00
|
|
|
missing_groups.append(group)
|
2015-08-07 19:15:30 +02:00
|
|
|
|
2016-03-22 12:59:31 +01:00
|
|
|
if missing_groups:
|
|
|
|
# Okay, so we have some missing_types, lets fetch them.
|
|
|
|
cache_seq_num = self._state_group_cache.sequence
|
2015-08-05 16:06:51 +02:00
|
|
|
|
2016-03-22 12:59:31 +01:00
|
|
|
group_to_state_dict = yield self._get_state_groups_from_groups(
|
|
|
|
missing_groups, types
|
|
|
|
)
|
2015-08-07 19:15:30 +02:00
|
|
|
|
2016-03-22 12:59:31 +01:00
|
|
|
# Now we want to update the cache with all the things we fetched
|
|
|
|
# from the database.
|
|
|
|
for group, group_state_dict in group_to_state_dict.items():
|
|
|
|
if types:
|
|
|
|
# We delibrately put key -> None mappings into the cache to
|
|
|
|
# cache absence of the key, on the assumption that if we've
|
|
|
|
# explicitly asked for some types then we will probably ask
|
|
|
|
# for them again.
|
2016-03-22 19:22:52 +01:00
|
|
|
state_dict = {
|
|
|
|
(intern_string(etype), intern_string(state_key)): None
|
|
|
|
for (etype, state_key) in types
|
|
|
|
}
|
2016-03-22 12:59:31 +01:00
|
|
|
state_dict.update(results[group])
|
|
|
|
results[group] = state_dict
|
|
|
|
else:
|
|
|
|
state_dict = results[group]
|
|
|
|
|
|
|
|
state_dict.update(group_state_dict)
|
|
|
|
|
|
|
|
self._state_group_cache.update(
|
|
|
|
cache_seq_num,
|
|
|
|
key=group,
|
|
|
|
value=state_dict,
|
|
|
|
full=(types is None),
|
|
|
|
)
|
2015-08-05 16:06:51 +02:00
|
|
|
|
2015-08-07 19:15:30 +02:00
|
|
|
state_events = yield self._get_events(
|
2016-03-22 12:59:31 +01:00
|
|
|
[ev_id for sd in results.values() for ev_id in sd.values()],
|
2015-08-07 19:15:30 +02:00
|
|
|
get_prev_content=False
|
2015-08-05 16:06:51 +02:00
|
|
|
)
|
|
|
|
|
2015-08-13 18:11:30 +02:00
|
|
|
state_events = {e.event_id: e for e in state_events}
|
2015-08-07 19:15:30 +02:00
|
|
|
|
2015-08-18 12:44:10 +02:00
|
|
|
# Remove all the entries with None values. The None values were just
|
|
|
|
# used for bookkeeping in the cache.
|
|
|
|
for group, state_dict in results.items():
|
2015-08-17 10:39:12 +02:00
|
|
|
results[group] = {
|
2016-03-22 12:59:31 +01:00
|
|
|
key: state_events[event_id]
|
|
|
|
for key, event_id in state_dict.items()
|
|
|
|
if event_id and event_id in state_events
|
2015-08-17 10:39:12 +02:00
|
|
|
}
|
2015-08-07 19:15:30 +02:00
|
|
|
|
|
|
|
defer.returnValue(results)
|
2016-03-30 16:58:20 +02:00
|
|
|
|
|
|
|
def get_all_new_state_groups(self, last_id, current_id, limit):
|
|
|
|
def get_all_new_state_groups_txn(txn):
|
|
|
|
sql = (
|
|
|
|
"SELECT id, room_id, event_id FROM state_groups"
|
|
|
|
" WHERE ? < id AND id <= ? ORDER BY id LIMIT ?"
|
|
|
|
)
|
|
|
|
txn.execute(sql, (last_id, current_id, limit))
|
|
|
|
groups = txn.fetchall()
|
|
|
|
|
|
|
|
if not groups:
|
|
|
|
return ([], [])
|
|
|
|
|
|
|
|
lower_bound = groups[0][0]
|
|
|
|
upper_bound = groups[-1][0]
|
|
|
|
sql = (
|
|
|
|
"SELECT state_group, type, state_key, event_id"
|
|
|
|
" FROM state_groups_state"
|
|
|
|
" WHERE ? <= state_group AND state_group <= ?"
|
|
|
|
)
|
|
|
|
|
|
|
|
txn.execute(sql, (lower_bound, upper_bound))
|
|
|
|
state_group_state = txn.fetchall()
|
|
|
|
return (groups, state_group_state)
|
|
|
|
return self.runInteraction(
|
|
|
|
"get_all_new_state_groups", get_all_new_state_groups_txn
|
|
|
|
)
|
|
|
|
|
|
|
|
def get_state_stream_token(self):
|
2016-04-01 14:29:05 +02:00
|
|
|
return self._state_groups_id_gen.get_current_token()
|