Change the way we cache events

pull/836/head
Erik Johnston 2016-06-03 17:55:32 +01:00
parent f6be734be9
commit 10ea3f46ba
1 changed files with 41 additions and 39 deletions

View File

@ -139,6 +139,9 @@ class _EventPeristenceQueue(object):
pass pass
_EventCacheEntry = namedtuple("_EventCacheEntry", ("event", "redacted_event"))
class EventsStore(SQLBaseStore): class EventsStore(SQLBaseStore):
EVENT_ORIGIN_SERVER_TS_NAME = "event_origin_server_ts" EVENT_ORIGIN_SERVER_TS_NAME = "event_origin_server_ts"
@ -741,7 +744,6 @@ class EventsStore(SQLBaseStore):
event_map = self._get_events_from_cache( event_map = self._get_events_from_cache(
event_ids, event_ids,
check_redacted=check_redacted, check_redacted=check_redacted,
get_prev_content=get_prev_content,
allow_rejected=allow_rejected, allow_rejected=allow_rejected,
) )
@ -751,40 +753,49 @@ class EventsStore(SQLBaseStore):
missing_events = yield self._enqueue_events( missing_events = yield self._enqueue_events(
missing_events_ids, missing_events_ids,
check_redacted=check_redacted, check_redacted=check_redacted,
get_prev_content=get_prev_content,
allow_rejected=allow_rejected, allow_rejected=allow_rejected,
) )
event_map.update(missing_events) event_map.update(missing_events)
defer.returnValue([ events = [
event_map[e_id] for e_id in event_id_list event_map[e_id] for e_id in event_id_list
if e_id in event_map and event_map[e_id] if e_id in event_map and event_map[e_id]
]) ]
if get_prev_content:
for event in events:
if "replaces_state" in event.unsigned:
prev = yield self.get_event(
event.unsigned["replaces_state"],
get_prev_content=False,
allow_none=True,
)
if prev:
event.unsigned = dict(event.unsigned)
event.unsigned["prev_content"] = prev.content
event.unsigned["prev_sender"] = prev.sender
defer.returnValue(events)
def _invalidate_get_event_cache(self, event_id): def _invalidate_get_event_cache(self, event_id):
for check_redacted in (False, True): self._get_event_cache.invalidate((event_id,))
for get_prev_content in (False, True):
self._get_event_cache.invalidate(
(event_id, check_redacted, get_prev_content)
)
def _get_events_from_cache(self, events, check_redacted, get_prev_content, def _get_events_from_cache(self, events, check_redacted, allow_rejected):
allow_rejected):
event_map = {} event_map = {}
for event_id in events: for event_id in events:
try: ret = self._get_event_cache.get((event_id,), None)
ret = self._get_event_cache.get( if not ret:
(event_id, check_redacted, get_prev_content,) continue
)
if allow_rejected or not ret.rejected_reason: if allow_rejected or not ret.event.rejected_reason:
event_map[event_id] = ret if check_redacted and ret.redacted_event:
event_map[event_id] = ret.redacted_event
else: else:
event_map[event_id] = None event_map[event_id] = ret.event
except KeyError: else:
pass event_map[event_id] = None
return event_map return event_map
@ -855,8 +866,7 @@ class EventsStore(SQLBaseStore):
reactor.callFromThread(fire, event_list) reactor.callFromThread(fire, event_list)
@defer.inlineCallbacks @defer.inlineCallbacks
def _enqueue_events(self, events, check_redacted=True, def _enqueue_events(self, events, check_redacted=True, allow_rejected=False):
get_prev_content=False, allow_rejected=False):
"""Fetches events from the database using the _event_fetch_list. This """Fetches events from the database using the _event_fetch_list. This
allows batch and bulk fetching of events - it allows us to fetch events allows batch and bulk fetching of events - it allows us to fetch events
without having to create a new transaction for each request for events. without having to create a new transaction for each request for events.
@ -895,7 +905,6 @@ class EventsStore(SQLBaseStore):
preserve_fn(self._get_event_from_row)( preserve_fn(self._get_event_from_row)(
row["internal_metadata"], row["json"], row["redacts"], row["internal_metadata"], row["json"], row["redacts"],
check_redacted=check_redacted, check_redacted=check_redacted,
get_prev_content=get_prev_content,
rejected_reason=row["rejects"], rejected_reason=row["rejects"],
) )
for row in rows for row in rows
@ -936,8 +945,7 @@ class EventsStore(SQLBaseStore):
@defer.inlineCallbacks @defer.inlineCallbacks
def _get_event_from_row(self, internal_metadata, js, redacted, def _get_event_from_row(self, internal_metadata, js, redacted,
check_redacted=True, get_prev_content=False, check_redacted=True, rejected_reason=None):
rejected_reason=None):
d = json.loads(js) d = json.loads(js)
internal_metadata = json.loads(internal_metadata) internal_metadata = json.loads(internal_metadata)
@ -949,14 +957,17 @@ class EventsStore(SQLBaseStore):
desc="_get_event_from_row", desc="_get_event_from_row",
) )
ev = FrozenEvent( original_ev = FrozenEvent(
d, d,
internal_metadata_dict=internal_metadata, internal_metadata_dict=internal_metadata,
rejected_reason=rejected_reason, rejected_reason=rejected_reason,
) )
ev = original_ev
redacted_event = None
if check_redacted and redacted: if check_redacted and redacted:
ev = prune_event(ev) ev = prune_event(ev)
redacted_event = ev
redaction_id = yield self._simple_select_one_onecol( redaction_id = yield self._simple_select_one_onecol(
table="redactions", table="redactions",
@ -979,19 +990,10 @@ class EventsStore(SQLBaseStore):
# will serialise this field correctly # will serialise this field correctly
ev.unsigned["redacted_because"] = because ev.unsigned["redacted_because"] = because
if get_prev_content and "replaces_state" in ev.unsigned: self._get_event_cache.prefill((ev.event_id,), _EventCacheEntry(
prev = yield self.get_event( event=original_ev,
ev.unsigned["replaces_state"], redacted_event=redacted_event,
get_prev_content=False, ))
allow_none=True,
)
if prev:
ev.unsigned["prev_content"] = prev.content
ev.unsigned["prev_sender"] = prev.sender
self._get_event_cache.prefill(
(ev.event_id, check_redacted, get_prev_content), ev
)
defer.returnValue(ev) defer.returnValue(ev)