From f346048a6e9ac798b742d939a38e0cfe71475f38 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 5 May 2017 10:34:10 +0100 Subject: [PATCH 01/22] Handle exceptions thrown in handling remote device list updates --- synapse/handlers/device.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index c22f65ce5d..72915b85d8 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -17,6 +17,7 @@ from synapse.api.constants import EventTypes from synapse.util import stringutils from synapse.util.async import Linearizer from synapse.util.caches.expiringcache import ExpiringCache +from synapse.util.retryutils import NotRetryingDestination from synapse.util.metrics import measure_func from synapse.types import get_domain_from_id, RoomStreamToken from twisted.internet import defer @@ -430,7 +431,21 @@ class DeviceListEduUpdater(object): if resync: # Fetch all devices for the user. origin = get_domain_from_id(user_id) - result = yield self.federation.query_user_devices(origin, user_id) + try: + result = yield self.federation.query_user_devices(origin, user_id) + except NotRetryingDestination: + logger.warn( + "Failed to handle device list update for %s," + " we're not retrying the remote", + user_id, + ) + return + except Exception: + logger.exception( + "Failed to handle device list update for %s", user_id + ) + return + stream_id = result["stream_id"] devices = result["devices"] yield self.store.update_remote_device_list_cache( From db7d0c31272954f01d89b2dbd653d54db0cbb040 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 5 May 2017 10:34:53 +0100 Subject: [PATCH 02/22] Always mark remotes as up if we receive a signed request from them --- synapse/federation/transport/server.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index c840da834c..828dcd01a7 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -79,6 +79,7 @@ class Authenticator(object): def __init__(self, hs): self.keyring = hs.get_keyring() self.server_name = hs.hostname + self.store = hs.get_datastore() # A method just so we can pass 'self' as the authenticator to the Servlets @defer.inlineCallbacks @@ -138,6 +139,12 @@ class Authenticator(object): logger.info("Request from %s", origin) request.authenticated_entity = origin + # If we get a valid signed request from the other side, its probably + # alive + retry_timings = yield self.store.get_destination_retry_timings(origin) + if retry_timings and retry_timings["retry_last_ts"]: + self.store.set_destination_retry_timings(origin, 0, 0) + defer.returnValue(origin) From 7dd3bf5e246144806c83b5ed2127733906908331 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Fri, 5 May 2017 10:49:19 +0100 Subject: [PATCH 03/22] Rewrite SimpleHttpClient.request to include timeouts Fixes #2191 --- synapse/http/client.py | 32 +++++++++++++++++++------------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/synapse/http/client.py b/synapse/http/client.py index 9cf797043a..68bd06abd9 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -75,36 +75,42 @@ class SimpleHttpClient(object): if hs.config.user_agent_suffix: self.user_agent = "%s %s" % (self.user_agent, hs.config.user_agent_suffix,) + @defer.inlineCallbacks def request(self, method, uri, *args, **kwargs): # A small wrapper around self.agent.request() so we can easily attach # counters to it outgoing_requests_counter.inc(method) - d = preserve_context_over_fn( - self.agent.request, - method, uri, *args, **kwargs - ) + + + def send_request(): + request_deferred = self.agent.request( + method, uri, *args, **kwargs + ) + + return self.clock.time_bound_deferred( + request_deferred, + time_out=60, + ) logger.info("Sending request %s %s", method, uri) - def _cb(response): + try: + with logcontext.PreserveLoggingContext(): + response = yield send_request() + incoming_responses_counter.inc(method, response.code) logger.info( "Received response to %s %s: %s", method, uri, response.code ) return response - - def _eb(failure): + except Exception as e: incoming_responses_counter.inc(method, "ERR") logger.info( "Error sending request to %s %s: %s %s", - method, uri, failure.type, failure.getErrorMessage() + method, uri, type(e).__name__, e.message ) - return failure - - d.addCallbacks(_cb, _eb) - - return d + raise e @defer.inlineCallbacks def post_urlencoded_get_json(self, uri, args={}): From c2ddd773bc23dfc9fa2f20d39547bf6a4bd7d0f1 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Fri, 5 May 2017 10:52:46 +0100 Subject: [PATCH 04/22] Include the clock --- synapse/http/client.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/http/client.py b/synapse/http/client.py index 68bd06abd9..58f12cf8ee 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -72,6 +72,7 @@ class SimpleHttpClient(object): contextFactory=hs.get_http_client_context_factory() ) self.user_agent = hs.version_string + self.clock = hs.get_clock() if hs.config.user_agent_suffix: self.user_agent = "%s %s" % (self.user_agent, hs.config.user_agent_suffix,) From b843631d7157f0beb64db62a86c691369aa49b14 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 5 May 2017 10:59:32 +0100 Subject: [PATCH 05/22] Add comment and TODO --- synapse/handlers/device.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 72915b85d8..187af03fb1 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -426,6 +426,8 @@ class DeviceListEduUpdater(object): # This can happen since we batch updates return + # Given a list of updates we check if we need to resync. This + # happens if we've missed updates. resync = yield self._need_to_do_resync(user_id, pending_updates) if resync: @@ -434,6 +436,8 @@ class DeviceListEduUpdater(object): try: result = yield self.federation.query_user_devices(origin, user_id) except NotRetryingDestination: + # TODO: Remember that we are now out of sync and try again + # later logger.warn( "Failed to handle device list update for %s," " we're not retrying the remote", @@ -441,6 +445,8 @@ class DeviceListEduUpdater(object): ) return except Exception: + # TODO: Remember that we are now out of sync and try again + # later logger.exception( "Failed to handle device list update for %s", user_id ) From d0debb21162df5e6975d8b19e0365961d5f75ddb Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Fri, 5 May 2017 11:00:21 +0100 Subject: [PATCH 06/22] Remember how twisted works --- synapse/http/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/http/client.py b/synapse/http/client.py index 58f12cf8ee..9eba046bbf 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -19,6 +19,7 @@ from synapse.api.errors import ( CodeMessageException, MatrixCodeMessageException, SynapseError, Codes, ) from synapse.util.logcontext import preserve_context_over_fn +from synapse.util import logcontext import synapse.metrics from synapse.http.endpoint import SpiderEndpoint @@ -82,7 +83,6 @@ class SimpleHttpClient(object): # counters to it outgoing_requests_counter.inc(method) - def send_request(): request_deferred = self.agent.request( method, uri, *args, **kwargs @@ -104,7 +104,7 @@ class SimpleHttpClient(object): "Received response to %s %s: %s", method, uri, response.code ) - return response + defer.returnValue(response) except Exception as e: incoming_responses_counter.inc(method, "ERR") logger.info( From 7b222fc56e9854d1f8f084d996d9ca694e91dd6c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 5 May 2017 11:14:09 +0100 Subject: [PATCH 07/22] Remove redundant reset of destination timers --- synapse/handlers/federation.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 2af9849ed0..52d97dfbf3 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -380,13 +380,6 @@ class FederationHandler(BaseHandler): affected=event.event_id, ) - # if we're receiving valid events from an origin, - # it's probably a good idea to mark it as not in retry-state - # for sending (although this is a bit of a leap) - retry_timings = yield self.store.get_destination_retry_timings(origin) - if retry_timings and retry_timings["retry_last_ts"]: - self.store.set_destination_retry_timings(origin, 0, 0) - room = yield self.store.get_room(event.room_id) if not room: From 310b1ccdc1e80164811d4b1287c0a504d0a33c77 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 5 May 2017 13:41:19 +0100 Subject: [PATCH 08/22] Use preserve_fn and add logs --- synapse/federation/transport/server.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 828dcd01a7..3d676e7d8b 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -24,6 +24,7 @@ from synapse.http.servlet import ( ) from synapse.util.ratelimitutils import FederationRateLimiter from synapse.util.versionstring import get_version_string +from synapse.util.logcontext import preserve_fn from synapse.types import ThirdPartyInstanceID import functools @@ -143,7 +144,8 @@ class Authenticator(object): # alive retry_timings = yield self.store.get_destination_retry_timings(origin) if retry_timings and retry_timings["retry_last_ts"]: - self.store.set_destination_retry_timings(origin, 0, 0) + logger.info("Marking origin %r as up", origin) + preserve_fn(self.store.set_destination_retry_timings)(origin, 0, 0) defer.returnValue(origin) From 653d90c1a529ff553d506ac806ab0403f34955ad Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 5 May 2017 14:01:17 +0100 Subject: [PATCH 09/22] Comment --- synapse/handlers/device.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 187af03fb1..982cda3edf 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -443,6 +443,12 @@ class DeviceListEduUpdater(object): " we're not retrying the remote", user_id, ) + # We abort on exceptions rather than accepting the update + # as otherwise synapse will 'forget' that its device list + # is out of date. If we bail then we will retry the resync + # next time we get a device list update for this user_id. + # This makes it more likely that the device lists will + # eventually become consistent. return except Exception: # TODO: Remember that we are now out of sync and try again From 9ac98197bb88d3ec85fe586b2f483a86e19f6206 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 8 May 2017 11:07:54 +0100 Subject: [PATCH 10/22] Bump version and changelog --- CHANGES.rst | 49 +++++++++++++++++++++++++++++++++++++++++++++ synapse/__init__.py | 2 +- 2 files changed, 50 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 6c85241eaf..183586e8dd 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,52 @@ +Changes in synapse v0.21.0-rc1 (2017-05-08) +=========================================== + +Features: + +* Add username availability checker API (PR #2183) +* Add read marker API (PR #2120) + + +Changes: + +* Enable guest access for the 3pl/3pid APIs (PR #1986) +* Add setting to support TURN for guests (PR #2011) +* Various performance improvements (PR #2075, #2076, #2080, #2083, #2108, + #2158, #2176, #2185) +* Make synctl a bit more user friendly (PR #2078, #2127) Thanks @APwhitehat! +* Replace HTTP replication with TCP replication (PR #2082, #2097, #2098, + #2099, #2103, #2014, #2016, #2115, #2116, #2117) +* Support authenticated SMTP (PR #2102) Thanks @DanielDent! +* Add a counter metric for successfully-sent transactions (PR #2121) +* Propagate errors sensibly from proxied IS requests (PR #2147) +* Add more granular event send metrics (PR #2178) + + + +Bug fixes: + +* Fix nuke-room script to work with current schema (PR #1927) Thanks + @zuckschwerdt! +* Fix db port script to not assume postgres tables are in the public schema + (PR #2024) Thanks @jerrykan! +* Fix getting latest device IP for user with no devices (PR #2118) +* Fix rejection of invites to unreachable servers (PR #2145) +* Fix code for reporting old verify keys in synapse (PR #2156) +* Fix invite state to always include all events (PR #2163) +* Fix bug where synapse would always fetch state for any missing event (PR #2170) +* Fix a leak with timed out HTTP connections (PR #2180) +* Fix bug where we didn't time out HTTP requests to ASes (PR #2192) + + +Docs: + +* Clarify doc for SQLite to PostgreSQL port (PR #1961) Thanks @benhylau! +* Fix typo in synctl help (PR #2107) Thanks @HarHarLinks! +* ``web_client_location`` documentation fix (PR #2131) Thanks @matthewjwolff! +* Update README.rst with FreeBSD changes (PR #2132) Thanks @feld! +* Clarify setting up metrics (PR #2149) Thanks @encks! + + Changes in synapse v0.20.0 (2017-04-11) ======================================= diff --git a/synapse/__init__.py b/synapse/__init__.py index 2e5f4e0ead..500739b9a0 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -16,4 +16,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.20.0" +__version__ = "0.21.0-rc1" From 78f306a6f70552672f8c70171b2d9d79f20f8f8d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 8 May 2017 13:07:41 +0100 Subject: [PATCH 11/22] Revert "Speed up filtering of a single event in push" This reverts commit 421fdf74609439edaaffce117436e6a6df147841. --- synapse/push/bulk_push_rule_evaluator.py | 27 +++++++++++++++++------- synapse/storage/account_data.py | 13 ------------ synapse/storage/push_rule.py | 5 ++--- synapse/visibility.py | 19 +++++++++++++++++ 4 files changed, 40 insertions(+), 24 deletions(-) diff --git a/synapse/push/bulk_push_rule_evaluator.py b/synapse/push/bulk_push_rule_evaluator.py index cb13874ccf..f943ff640f 100644 --- a/synapse/push/bulk_push_rule_evaluator.py +++ b/synapse/push/bulk_push_rule_evaluator.py @@ -20,6 +20,7 @@ from twisted.internet import defer from .push_rule_evaluator import PushRuleEvaluatorForEvent from synapse.api.constants import EventTypes +from synapse.visibility import filter_events_for_clients_context logger = logging.getLogger(__name__) @@ -66,6 +67,17 @@ class BulkPushRuleEvaluator: def action_for_event_by_user(self, event, context): actions_by_user = {} + # None of these users can be peeking since this list of users comes + # from the set of users in the room, so we know for sure they're all + # actually in the room. + user_tuples = [ + (u, False) for u in self.rules_by_user.keys() + ] + + filtered_by_user = yield filter_events_for_clients_context( + self.store, user_tuples, [event], {event.event_id: context} + ) + room_members = yield self.store.get_joined_users_from_context( event, context ) @@ -75,14 +87,6 @@ class BulkPushRuleEvaluator: condition_cache = {} for uid, rules in self.rules_by_user.items(): - if event.sender == uid: - continue - - if not event.is_state(): - is_ignored = yield self.store.is_ignored_by(event.sender, uid) - if is_ignored: - continue - display_name = None profile_info = room_members.get(uid) if profile_info: @@ -94,6 +98,13 @@ class BulkPushRuleEvaluator: if event.type == EventTypes.Member and event.state_key == uid: display_name = event.content.get("displayname", None) + filtered = filtered_by_user[uid] + if len(filtered) == 0: + continue + + if filtered[0].sender == uid: + continue + for rule in rules: if 'enabled' in rule and not rule['enabled']: continue diff --git a/synapse/storage/account_data.py b/synapse/storage/account_data.py index ff14e54c11..aa84ffc2b0 100644 --- a/synapse/storage/account_data.py +++ b/synapse/storage/account_data.py @@ -308,16 +308,3 @@ class AccountDataStore(SQLBaseStore): " WHERE stream_id < ?" ) txn.execute(update_max_id_sql, (next_id, next_id)) - - @cachedInlineCallbacks(num_args=2, cache_context=True, max_entries=5000) - def is_ignored_by(self, ignored_user_id, ignorer_user_id, cache_context): - ignored_account_data = yield self.get_global_account_data_by_type_for_user( - "m.ignored_user_list", ignorer_user_id, - on_invalidate=cache_context.invalidate, - ) - if not ignored_account_data: - defer.returnValue(False) - - defer.returnValue( - ignored_user_id in ignored_account_data.get("ignored_users", {}) - ) diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py index 353a135c4e..cbec255966 100644 --- a/synapse/storage/push_rule.py +++ b/synapse/storage/push_rule.py @@ -188,7 +188,7 @@ class PushRuleStore(SQLBaseStore): user_ids, on_invalidate=cache_context.invalidate, ) - rules_by_user = {k: v for k, v in rules_by_user.iteritems() if v is not None} + rules_by_user = {k: v for k, v in rules_by_user.items() if v is not None} defer.returnValue(rules_by_user) @@ -398,8 +398,7 @@ class PushRuleStore(SQLBaseStore): with self._push_rules_stream_id_gen.get_next() as ids: stream_id, event_stream_ordering = ids yield self.runInteraction( - "delete_push_rule", delete_push_rule_txn, stream_id, - event_stream_ordering, + "delete_push_rule", delete_push_rule_txn, stream_id, event_stream_ordering ) @defer.inlineCallbacks diff --git a/synapse/visibility.py b/synapse/visibility.py index 5590b866ed..c4dd9ae2c7 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -188,6 +188,25 @@ def filter_events_for_clients(store, user_tuples, events, event_id_to_state): }) +@defer.inlineCallbacks +def filter_events_for_clients_context(store, user_tuples, events, event_id_to_context): + user_ids = set(u[0] for u in user_tuples) + event_id_to_state = {} + for event_id, context in event_id_to_context.items(): + state = yield store.get_events([ + e_id + for key, e_id in context.current_state_ids.iteritems() + if key == (EventTypes.RoomHistoryVisibility, "") + or (key[0] == EventTypes.Member and key[1] in user_ids) + ]) + event_id_to_state[event_id] = state + + res = yield filter_events_for_clients( + store, user_tuples, events, event_id_to_state + ) + defer.returnValue(res) + + @defer.inlineCallbacks def filter_events_for_client(store, user_id, events, is_peeking=False): """ From fe7c1b969c5b0f51b6fe86e78e96350224fd0fb1 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 8 May 2017 13:07:43 +0100 Subject: [PATCH 12/22] Revert "We don't care about forgotten rooms" This reverts commit ad8b316939d59230526e60660caf9094cff62c8f. --- synapse/storage/push_rule.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py index cbec255966..5467ba51c7 100644 --- a/synapse/storage/push_rule.py +++ b/synapse/storage/push_rule.py @@ -184,6 +184,18 @@ class PushRuleStore(SQLBaseStore): if uid in local_users_in_room: user_ids.add(uid) + forgotten = yield self.who_forgot_in_room( + event.room_id, on_invalidate=cache_context.invalidate, + ) + + for row in forgotten: + user_id = row["user_id"] + event_id = row["event_id"] + + mem_id = current_state_ids.get((EventTypes.Member, user_id), None) + if event_id == mem_id: + user_ids.discard(user_id) + rules_by_user = yield self.bulk_get_push_rules( user_ids, on_invalidate=cache_context.invalidate, ) From e0f20e9425f5fa0aecf0b8bf5b58ce72c2363d8b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 8 May 2017 13:07:43 +0100 Subject: [PATCH 13/22] Revert "Remove unused import" This reverts commit ab37bef83bebd7cdaeb7cfd98553d18883d09103. --- synapse/storage/push_rule.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/storage/push_rule.py b/synapse/storage/push_rule.py index 5467ba51c7..0a819d32c5 100644 --- a/synapse/storage/push_rule.py +++ b/synapse/storage/push_rule.py @@ -16,6 +16,7 @@ from ._base import SQLBaseStore from synapse.util.caches.descriptors import cachedInlineCallbacks, cachedList from synapse.push.baserules import list_with_base_rules +from synapse.api.constants import EventTypes from twisted.internet import defer import logging From 771c8a83c7f1f53aa03074d8fe69217037fbe188 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 8 May 2017 13:23:46 +0100 Subject: [PATCH 14/22] Bump version and changelog --- CHANGES.rst | 13 +++++++++++++ synapse/__init__.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/CHANGES.rst b/CHANGES.rst index 183586e8dd..babeaa0ded 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,16 @@ +Changes in synapse v0.21.0-rc2 (2017-05-08) +=========================================== + +Changes: + +* Always mark remotes as up if we receive a signed request from them (PR #2190) + + +Bug fixes: + +* Fix bug where users got pushed for rooms they had muted (PR #2200) + + Changes in synapse v0.21.0-rc1 (2017-05-08) =========================================== diff --git a/synapse/__init__.py b/synapse/__init__.py index 500739b9a0..d4ad23fa3d 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -16,4 +16,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.21.0-rc1" +__version__ = "0.21.0-rc2" From dcabef952c0c75ec756a364fc225a72eea391e1b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 8 May 2017 15:09:19 +0100 Subject: [PATCH 15/22] Increase client_ip cache size --- synapse/storage/client_ips.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/storage/client_ips.py b/synapse/storage/client_ips.py index b01f0046e9..747d2df622 100644 --- a/synapse/storage/client_ips.py +++ b/synapse/storage/client_ips.py @@ -33,6 +33,7 @@ class ClientIpStore(background_updates.BackgroundUpdateStore): self.client_ip_last_seen = Cache( name="client_ip_last_seen", keylen=4, + max_entries=5000, ) super(ClientIpStore, self).__init__(hs) From 738ccf61c01df04e1aef521ea7d1ae2844784214 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 8 May 2017 15:32:18 +0100 Subject: [PATCH 16/22] Cache check to see if device exists --- synapse/storage/devices.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index c8d5f5ba8b..fc87c92182 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -18,7 +18,7 @@ import ujson as json from twisted.internet import defer from synapse.api.errors import StoreError -from ._base import SQLBaseStore +from ._base import SQLBaseStore, Cache from synapse.util.caches.descriptors import cached, cachedList, cachedInlineCallbacks @@ -29,6 +29,12 @@ class DeviceStore(SQLBaseStore): def __init__(self, hs): super(DeviceStore, self).__init__(hs) + self.device_id_exists_cache = Cache( + name="device_id_exists", + keylen=2, + max_entries=10000, + ) + self._clock.looping_call( self._prune_old_outbound_device_pokes, 60 * 60 * 1000 ) @@ -54,6 +60,10 @@ class DeviceStore(SQLBaseStore): defer.Deferred: boolean whether the device was inserted or an existing device existed with that ID. """ + key = (user_id, device_id) + if self.device_id_exists_cache.get(key, None): + defer.returnValue(False) + try: inserted = yield self._simple_insert( "devices", @@ -65,6 +75,7 @@ class DeviceStore(SQLBaseStore): desc="store_device", or_ignore=True, ) + self.device_id_exists_cache.prefill(key, True) defer.returnValue(inserted) except Exception as e: logger.error("store_device with device_id=%s(%r) user_id=%s(%r)" From fc6d4974a60a0d47492f5c5c8dff45abbf9abe03 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 8 May 2017 15:33:57 +0100 Subject: [PATCH 17/22] Comment --- synapse/storage/devices.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index fc87c92182..6727861eb5 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -29,6 +29,8 @@ class DeviceStore(SQLBaseStore): def __init__(self, hs): super(DeviceStore, self).__init__(hs) + # Map of (user_id, device_id) -> bool. If there is an entry that implies + # the device exists. self.device_id_exists_cache = Cache( name="device_id_exists", keylen=2, From 8571f864d2fc20986341b7e9d6e18c3e061e48e0 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 8 May 2017 15:34:27 +0100 Subject: [PATCH 18/22] Cache one time key counts --- synapse/storage/end_to_end_keys.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/synapse/storage/end_to_end_keys.py b/synapse/storage/end_to_end_keys.py index 7cbc1470fd..c96dae352d 100644 --- a/synapse/storage/end_to_end_keys.py +++ b/synapse/storage/end_to_end_keys.py @@ -15,6 +15,7 @@ from twisted.internet import defer from synapse.api.errors import SynapseError +from synapse.util.caches.descriptors import cached from canonicaljson import encode_canonical_json import ujson as json @@ -177,10 +178,14 @@ class EndToEndKeyStore(SQLBaseStore): for algorithm, key_id, json_bytes in new_keys ], ) + txn.call_after( + self.count_e2e_one_time_keys.invalidate, (user_id, device_id,) + ) yield self.runInteraction( "add_e2e_one_time_keys_insert", _add_e2e_one_time_keys ) + @cached(max_entries=10000) def count_e2e_one_time_keys(self, user_id, device_id): """ Count the number of one time keys the server has for a device Returns: @@ -225,6 +230,9 @@ class EndToEndKeyStore(SQLBaseStore): ) for user_id, device_id, algorithm, key_id in delete: txn.execute(sql, (user_id, device_id, algorithm, key_id)) + txn.call_after( + self.count_e2e_one_time_keys.invalidate, (user_id, device_id,) + ) return result return self.runInteraction( "claim_e2e_one_time_keys", _claim_e2e_one_time_keys @@ -242,3 +250,4 @@ class EndToEndKeyStore(SQLBaseStore): keyvalues={"user_id": user_id, "device_id": device_id}, desc="delete_e2e_one_time_keys_by_device" ) + self.count_e2e_one_time_keys.invalidate((user_id, device_id,)) From 94e6ad71f5445e014f3c9f6c260ab664635c7b59 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 8 May 2017 15:55:59 +0100 Subject: [PATCH 19/22] Invalidate cache on device deletion --- synapse/storage/devices.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 6727861eb5..75c30abc28 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -115,12 +115,14 @@ class DeviceStore(SQLBaseStore): Returns: defer.Deferred """ - return self._simple_delete_one( + self._simple_delete_one( table="devices", keyvalues={"user_id": user_id, "device_id": device_id}, desc="delete_device", ) + self.device_id_exists_cache.invalidate((user_id, device_id)) + def delete_devices(self, user_id, device_ids): """Deletes several devices. @@ -130,13 +132,15 @@ class DeviceStore(SQLBaseStore): Returns: defer.Deferred """ - return self._simple_delete_many( + self._simple_delete_many( table="devices", column="device_id", iterable=device_ids, keyvalues={"user_id": user_id}, desc="delete_devices", ) + for device_id in device_ids: + self.device_id_exists_cache.invalidate((user_id, device_id)) def update_device(self, user_id, device_id, new_display_name=None): """Update a device. From ffad4fe35be3baba5b2fffaa4e9b31f3008d09af Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 8 May 2017 16:06:17 +0100 Subject: [PATCH 20/22] Don't update event cache hit ratio from get_joined_users Otherwise the hit ration of plain get_events gets completely skewed by calls to get_joined_users* functions. --- synapse/storage/events.py | 13 +++++++++++-- synapse/storage/roommember.py | 4 ++++ synapse/util/caches/descriptors.py | 9 ++++++--- 3 files changed, 21 insertions(+), 5 deletions(-) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index 98707d40ee..d944984d61 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -1343,11 +1343,20 @@ class EventsStore(SQLBaseStore): def _invalidate_get_event_cache(self, event_id): self._get_event_cache.invalidate((event_id,)) - def _get_events_from_cache(self, events, allow_rejected): + def _get_events_from_cache(self, events, allow_rejected, update_metrics=True): + """ + Args: + events (list(str)): list of event_ids to fetch + allow_rejected (bool): Whether to teturn events that were rejected + update_metrics (bool): Whether to update the cache hit ratio metrics + """ event_map = {} for event_id in events: - ret = self._get_event_cache.get((event_id,), None) + ret = self._get_event_cache.get( + (event_id,), None, + update_metrics=update_metrics, + ) if not ret: continue diff --git a/synapse/storage/roommember.py b/synapse/storage/roommember.py index ad3c9b06d9..2fa20bd87c 100644 --- a/synapse/storage/roommember.py +++ b/synapse/storage/roommember.py @@ -421,9 +421,13 @@ class RoomMemberStore(SQLBaseStore): # We check if we have any of the member event ids in the event cache # before we ask the DB + # We don't update the event cache hit ratio as it completely throws off + # the hit ratio counts. After all, we don't populate the cache if we + # miss it here event_map = self._get_events_from_cache( member_event_ids, allow_rejected=False, + update_metrics=False, ) missing_member_event_ids = [] diff --git a/synapse/util/caches/descriptors.py b/synapse/util/caches/descriptors.py index aa182eeac7..48dcbafeef 100644 --- a/synapse/util/caches/descriptors.py +++ b/synapse/util/caches/descriptors.py @@ -96,7 +96,7 @@ class Cache(object): "Cache objects can only be accessed from the main thread" ) - def get(self, key, default=_CacheSentinel, callback=None): + def get(self, key, default=_CacheSentinel, callback=None, update_metrics=True): """Looks the key up in the caches. Args: @@ -104,6 +104,7 @@ class Cache(object): default: What is returned if key is not in the caches. If not specified then function throws KeyError instead callback(fn): Gets called when the entry in the cache is invalidated + update_metrics (bool): whether to update the cache hit rate metrics Returns: Either a Deferred or the raw result @@ -113,7 +114,8 @@ class Cache(object): if val is not _CacheSentinel: if val.sequence == self.sequence: val.callbacks.update(callbacks) - self.metrics.inc_hits() + if update_metrics: + self.metrics.inc_hits() return val.deferred val = self.cache.get(key, _CacheSentinel, callbacks=callbacks) @@ -121,7 +123,8 @@ class Cache(object): self.metrics.inc_hits() return val - self.metrics.inc_misses() + if update_metrics: + self.metrics.inc_misses() if default is _CacheSentinel: raise KeyError() From 6a12998a83791137db0b7988646cfc4bff572427 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 8 May 2017 16:10:51 +0100 Subject: [PATCH 21/22] Add missing yields --- synapse/storage/devices.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/synapse/storage/devices.py b/synapse/storage/devices.py index 75c30abc28..d9936c88bb 100644 --- a/synapse/storage/devices.py +++ b/synapse/storage/devices.py @@ -106,6 +106,7 @@ class DeviceStore(SQLBaseStore): desc="get_device", ) + @defer.inlineCallbacks def delete_device(self, user_id, device_id): """Delete a device. @@ -115,7 +116,7 @@ class DeviceStore(SQLBaseStore): Returns: defer.Deferred """ - self._simple_delete_one( + yield self._simple_delete_one( table="devices", keyvalues={"user_id": user_id, "device_id": device_id}, desc="delete_device", @@ -123,6 +124,7 @@ class DeviceStore(SQLBaseStore): self.device_id_exists_cache.invalidate((user_id, device_id)) + @defer.inlineCallbacks def delete_devices(self, user_id, device_ids): """Deletes several devices. @@ -132,7 +134,7 @@ class DeviceStore(SQLBaseStore): Returns: defer.Deferred """ - self._simple_delete_many( + yield self._simple_delete_many( table="devices", column="device_id", iterable=device_ids, From 093f7e47ccf318181c262c79bb60ffd3b83edaee Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 8 May 2017 16:13:51 +0100 Subject: [PATCH 22/22] Expand docstring a bit --- synapse/storage/events.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/synapse/storage/events.py b/synapse/storage/events.py index d944984d61..2ab44ceaa7 100644 --- a/synapse/storage/events.py +++ b/synapse/storage/events.py @@ -1344,11 +1344,17 @@ class EventsStore(SQLBaseStore): self._get_event_cache.invalidate((event_id,)) def _get_events_from_cache(self, events, allow_rejected, update_metrics=True): - """ + """Fetch events from the caches + Args: events (list(str)): list of event_ids to fetch allow_rejected (bool): Whether to teturn events that were rejected update_metrics (bool): Whether to update the cache hit ratio metrics + + Returns: + dict of event_id -> _EventCacheEntry for each event_id in cache. If + allow_rejected is `False` then there will still be an entry but it + will be `None` """ event_map = {}