From 941f59101b51e9225dbdc38b22110a01de194242 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 2 Feb 2015 16:56:01 +0000 Subject: [PATCH 01/33] Don't fail an entire request if one of the returned events fails a signature check. If an event does fail a signature check, look in the local database and request it from the originator. --- synapse/federation/federation_client.py | 107 ++++++++++++++++++------ synapse/storage/__init__.py | 21 +++-- 2 files changed, 94 insertions(+), 34 deletions(-) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index e1539bd0e0..b809e935a0 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -224,17 +224,17 @@ class FederationClient(object): for p in result.get("auth_chain", []) ] - for i, pdu in enumerate(pdus): - pdus[i] = yield self._check_sigs_and_hash(pdu) + signed_pdus = yield self._check_sigs_and_hash_and_fetch( + pdus, outlier=True + ) - # FIXME: We should handle signature failures more gracefully. + signed_auth = yield self._check_sigs_and_hash_and_fetch( + auth_chain, outlier=True + ) - for i, pdu in enumerate(auth_chain): - auth_chain[i] = yield self._check_sigs_and_hash(pdu) + signed_auth.sort(key=lambda e: e.depth) - # FIXME: We should handle signature failures more gracefully. - - defer.returnValue((pdus, auth_chain)) + defer.returnValue((signed_pdus, signed_auth)) @defer.inlineCallbacks @log_function @@ -248,14 +248,13 @@ class FederationClient(object): for p in res["auth_chain"] ] - for i, pdu in enumerate(auth_chain): - auth_chain[i] = yield self._check_sigs_and_hash(pdu) + signed_auth = yield self._check_sigs_and_hash_and_fetch( + auth_chain, outlier=True + ) - # FIXME: We should handle signature failures more gracefully. + signed_auth.sort(key=lambda e: e.depth) - auth_chain.sort(key=lambda e: e.depth) - - defer.returnValue(auth_chain) + defer.returnValue(signed_auth) @defer.inlineCallbacks def make_join(self, destination, room_id, user_id): @@ -291,21 +290,19 @@ class FederationClient(object): for p in content.get("auth_chain", []) ] - for i, pdu in enumerate(state): - state[i] = yield self._check_sigs_and_hash(pdu) + signed_state = yield self._check_sigs_and_hash_and_fetch( + state, outlier=True + ) - # FIXME: We should handle signature failures more gracefully. - - for i, pdu in enumerate(auth_chain): - auth_chain[i] = yield self._check_sigs_and_hash(pdu) - - # FIXME: We should handle signature failures more gracefully. + signed_auth = yield self._check_sigs_and_hash_and_fetch( + auth_chain, outlier=True + ) auth_chain.sort(key=lambda e: e.depth) defer.returnValue({ - "state": state, - "auth_chain": auth_chain, + "state": signed_state, + "auth_chain": signed_auth, }) @defer.inlineCallbacks @@ -353,12 +350,18 @@ class FederationClient(object): ) auth_chain = [ - (yield self._check_sigs_and_hash(self.event_from_pdu_json(e))) + self.event_from_pdu_json(e) for e in content["auth_chain"] ] + signed_auth = yield self._check_sigs_and_hash_and_fetch( + auth_chain, outlier=True + ) + + signed_auth.sort(key=lambda e: e.depth) + ret = { - "auth_chain": auth_chain, + "auth_chain": signed_auth, "rejects": content.get("rejects", []), "missing": content.get("missing", []), } @@ -374,6 +377,58 @@ class FederationClient(object): return event + @defer.inlineCallbacks + def _check_sigs_and_hash_and_fetch(self, pdus, outlier=False): + """Takes a list of PDUs and checks the signatures and hashs of each + one. If a PDU fails its signature check then we check if we have it in + the database and if not then request if from the originating server of + that PDU. + + If a PDU fails its content hash check then it is redacted. + + The given list of PDUs are not modified, instead the function returns + a new list. + + Args: + pdu (list) + outlier (bool) + + Returns: + Deferred : A list of PDUs that have valid signatures and hashes. + """ + signed_pdus = [] + for pdu in pdus: + try: + new_pdu = yield self._check_sigs_and_hash(pdu) + signed_pdus.append(new_pdu) + except SynapseError: + # FIXME: We should handle signature failures more gracefully. + + # Check local db. + new_pdu = yield self.store.get_event( + pdu.event_id, + allow_rejected=True + ) + if new_pdu: + signed_pdus.append(new_pdu) + continue + + # Check pdu.origin + new_pdu = yield self.get_pdu( + destinations=[pdu.origin], + event_id=pdu.event_id, + outlier=outlier, + ) + + if new_pdu: + signed_pdus.append(new_pdu) + continue + + logger.warn("Failed to find copy of %s with valid signature") + + defer.returnValue(signed_pdus) + + @defer.inlineCallbacks def _check_sigs_and_hash(self, pdu): """Throws a SynapseError if the PDU does not have the correct diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 7c54b1b9d3..b4a7a3f068 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -128,16 +128,21 @@ class DataStore(RoomMemberStore, RoomStore, pass @defer.inlineCallbacks - def get_event(self, event_id, allow_none=False): - events = yield self._get_events([event_id]) + def get_event(self, event_id, check_redacted=True, + get_prev_content=False, allow_rejected=False, + allow_none=False): + event = yield self.runInteraction( + "get_event", self._get_event_txn, + event_id, + check_redacted=check_redacted, + get_prev_content=get_prev_content, + allow_rejected=allow_rejected, + ) - if not events: - if allow_none: - defer.returnValue(None) - else: - raise RuntimeError("Could not find event %s" % (event_id,)) + if not event and not allow_none: + raise RuntimeError("Could not find event %s" % (event_id,)) - defer.returnValue(events[0]) + defer.returnValue(event) @log_function def _persist_event_txn(self, txn, event, context, backfilled, From 40c6fe1b81e4d92cba797b0c966fd774e2a60a28 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 2 Feb 2015 17:06:37 +0000 Subject: [PATCH 02/33] Don't bother requesting PDUs with bad signatures from the same server --- synapse/federation/federation_client.py | 31 +++++++++++++------------ 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index b809e935a0..f87e84db73 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -225,11 +225,11 @@ class FederationClient(object): ] signed_pdus = yield self._check_sigs_and_hash_and_fetch( - pdus, outlier=True + destination, pdus, outlier=True ) signed_auth = yield self._check_sigs_and_hash_and_fetch( - auth_chain, outlier=True + destination, auth_chain, outlier=True ) signed_auth.sort(key=lambda e: e.depth) @@ -249,7 +249,7 @@ class FederationClient(object): ] signed_auth = yield self._check_sigs_and_hash_and_fetch( - auth_chain, outlier=True + destination, auth_chain, outlier=True ) signed_auth.sort(key=lambda e: e.depth) @@ -291,11 +291,11 @@ class FederationClient(object): ] signed_state = yield self._check_sigs_and_hash_and_fetch( - state, outlier=True + destination, state, outlier=True ) signed_auth = yield self._check_sigs_and_hash_and_fetch( - auth_chain, outlier=True + destination, auth_chain, outlier=True ) auth_chain.sort(key=lambda e: e.depth) @@ -355,7 +355,7 @@ class FederationClient(object): ] signed_auth = yield self._check_sigs_and_hash_and_fetch( - auth_chain, outlier=True + destination, auth_chain, outlier=True ) signed_auth.sort(key=lambda e: e.depth) @@ -378,7 +378,7 @@ class FederationClient(object): return event @defer.inlineCallbacks - def _check_sigs_and_hash_and_fetch(self, pdus, outlier=False): + def _check_sigs_and_hash_and_fetch(self, origin, pdus, outlier=False): """Takes a list of PDUs and checks the signatures and hashs of each one. If a PDU fails its signature check then we check if we have it in the database and if not then request if from the originating server of @@ -414,15 +414,16 @@ class FederationClient(object): continue # Check pdu.origin - new_pdu = yield self.get_pdu( - destinations=[pdu.origin], - event_id=pdu.event_id, - outlier=outlier, - ) + if pdu.origin != origin: + new_pdu = yield self.get_pdu( + destinations=[pdu.origin], + event_id=pdu.event_id, + outlier=outlier, + ) - if new_pdu: - signed_pdus.append(new_pdu) - continue + if new_pdu: + signed_pdus.append(new_pdu) + continue logger.warn("Failed to find copy of %s with valid signature") From e7ca813dd476c83497d4130ad8efa9424d86e921 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 3 Feb 2015 10:38:14 +0000 Subject: [PATCH 03/33] Try to ensure we don't persist an event we have already persisted. In persist_event check if we already have the event, if so then update instead of replacing so that we don't cause a bump of the stream_ordering. --- synapse/handlers/federation.py | 42 ++++++++++++++++++++----------- synapse/storage/__init__.py | 40 ++++++++++++++++++++++++++--- tests/handlers/test_federation.py | 5 +++- 3 files changed, 68 insertions(+), 19 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 8bf5a4cc11..c384789c2f 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -112,6 +112,14 @@ class FederationHandler(BaseHandler): logger.debug("Event: %s", event) + event_ids = set() + if state: + event_ids += {e.event_id for e in state} + if auth_chain: + event_ids += {e.event_id for e in auth_chain} + + seen_ids = (yield self.store.have_events(event_ids)).keys() + # FIXME (erikj): Awful hack to make the case where we are not currently # in the room work current_state = None @@ -124,20 +132,26 @@ class FederationHandler(BaseHandler): current_state = state if state and auth_chain is not None: - for e in state: - e.internal_metadata.outlier = True - try: - auth_ids = [e_id for e_id, _ in e.auth_events] - auth = { - (e.type, e.state_key): e for e in auth_chain - if e.event_id in auth_ids - } - yield self._handle_new_event(origin, e, auth_events=auth) - except: - logger.exception( - "Failed to handle state event %s", - e.event_id, - ) + for list_of_pdus in [auth_chain, state]: + for e in list_of_pdus: + if e.event_id in seen_ids: + continue + + e.internal_metadata.outlier = True + try: + auth_ids = [e_id for e_id, _ in e.auth_events] + auth = { + (e.type, e.state_key): e for e in auth_chain + if e.event_id in auth_ids + } + yield self._handle_new_event( + origin, e, auth_events=auth + ) + except: + logger.exception( + "Failed to handle state event %s", + e.event_id, + ) try: yield self._handle_new_event( diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index b4a7a3f068..93aefe0c48 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -161,6 +161,39 @@ class DataStore(RoomMemberStore, RoomStore, outlier = event.internal_metadata.is_outlier() + have_persisted = self._simple_select_one_onecol_txn( + txn, + table="event_json", + keyvalues={"event_id": event.event_id}, + retcol="event_id", + allow_none=True, + ) + + metadata_json = encode_canonical_json( + event.internal_metadata.get_dict() + ) + + if have_persisted: + if not outlier: + sql = ( + "UPDATE event_json SET internal_metadata = ?" + " WHERE event_id = ?" + ) + txn.execute( + sql, + (metadata_json.decode("UTF-8"), event.event_id,) + ) + + sql = ( + "UPDATE events SET outlier = 0" + " WHERE event_id = ?" + ) + txn.execute( + sql, + (event.event_id,) + ) + return + event_dict = { k: v for k, v in event.get_dict().items() @@ -170,10 +203,6 @@ class DataStore(RoomMemberStore, RoomStore, ] } - metadata_json = encode_canonical_json( - event.internal_metadata.get_dict() - ) - self._simple_insert_txn( txn, table="event_json", @@ -482,6 +511,9 @@ class DataStore(RoomMemberStore, RoomStore, the rejected reason string if we rejected the event, else maps to None. """ + if not event_ids: + return defer.succeed({}) + def f(txn): sql = ( "SELECT e.event_id, reason FROM events as e " diff --git a/tests/handlers/test_federation.py b/tests/handlers/test_federation.py index 44dbce6bea..4270481139 100644 --- a/tests/handlers/test_federation.py +++ b/tests/handlers/test_federation.py @@ -91,7 +91,10 @@ class FederationTestCase(unittest.TestCase): self.datastore.persist_event.return_value = defer.succeed(None) self.datastore.get_room.return_value = defer.succeed(True) self.auth.check_host_in_room.return_value = defer.succeed(True) - self.datastore.have_events.return_value = defer.succeed({}) + + def have_events(event_ids): + return defer.succeed({}) + self.datastore.have_events.side_effect = have_events def annotate(ev, old_state=None): context = Mock() From 51969f9e5f4774e4d0ddcc2e8ebcf96803cbf295 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 3 Feb 2015 10:40:14 +0000 Subject: [PATCH 04/33] Return rejected events if asked for it over federation. --- synapse/handlers/federation.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index c384789c2f..0161fbe49c 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -632,6 +632,7 @@ class FederationHandler(BaseHandler): event = yield self.store.get_event( event_id, allow_none=True, + allow_rejected=True, ) if event: From 0f48e22ef66ff8a34d4af13c25e20a461c8a8390 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 3 Feb 2015 10:43:29 +0000 Subject: [PATCH 05/33] PEP8 --- synapse/federation/federation_client.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index f87e84db73..9ceb66e6f4 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -429,7 +429,6 @@ class FederationClient(object): defer.returnValue(signed_pdus) - @defer.inlineCallbacks def _check_sigs_and_hash(self, pdu): """Throws a SynapseError if the PDU does not have the correct From 4ff2273b300c0a40fcf054a613aac24c51af22f1 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 3 Feb 2015 11:23:26 +0000 Subject: [PATCH 06/33] Add FIXME note. --- synapse/handlers/federation.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 0161fbe49c..322c1b407d 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -716,6 +716,8 @@ class FederationHandler(BaseHandler): context.rejected = RejectedReason.AUTH_ERROR + # FIXME: Don't store as rejected with AUTH_ERROR if we haven't + # seen all the auth events. yield self.store.persist_event( event, context=context, From 06c34bfbaee553b851e0f02d8e17d5bff7360376 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 3 Feb 2015 11:23:44 +0000 Subject: [PATCH 07/33] Give exception better message --- synapse/handlers/federation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 322c1b407d..9d7557f578 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1000,7 +1000,7 @@ class FederationHandler(BaseHandler): if reason is None: # FIXME: ERRR?! logger.warn("Could not find reason for %s", e.event_id) - raise RuntimeError("") + raise RuntimeError("Could not find reason for %s" % e.event_id) reason_map[e.event_id] = reason From fed29251d70b050ea5799708b6b13be9617d1f6d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 3 Feb 2015 13:23:58 +0000 Subject: [PATCH 08/33] Spelling --- synapse/handlers/federation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 9d7557f578..b9b2e25d1f 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -767,7 +767,7 @@ class FederationHandler(BaseHandler): ) ) - logger.debug("on_query_auth reutrning: %s", ret) + logger.debug("on_query_auth returning: %s", ret) defer.returnValue(ret) From 77a076bd25fc355c49251bcf489c0511c0f0f0af Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 3 Feb 2015 13:35:17 +0000 Subject: [PATCH 09/33] Set combinations is | and not + --- synapse/handlers/federation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index b9b2e25d1f..653ab0dbfa 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -114,9 +114,9 @@ class FederationHandler(BaseHandler): event_ids = set() if state: - event_ids += {e.event_id for e in state} + event_ids |= {e.event_id for e in state} if auth_chain: - event_ids += {e.event_id for e in auth_chain} + event_ids |= {e.event_id for e in auth_chain} seen_ids = (yield self.store.have_events(event_ids)).keys() From 6efd4d1649a539ba4bf1c884ebb90a48c2d1c8df Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 3 Feb 2015 13:57:54 +0000 Subject: [PATCH 10/33] Don't completely die if get auth_chain or querying auth_chain requests fail --- synapse/handlers/federation.py | 135 ++++++++++++++++++--------------- 1 file changed, 72 insertions(+), 63 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 653ab0dbfa..6727155c38 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -787,41 +787,45 @@ class FederationHandler(BaseHandler): if missing_auth: logger.debug("Missing auth: %s", missing_auth) # If we don't have all the auth events, we need to get them. - remote_auth_chain = yield self.replication_layer.get_event_auth( - origin, event.room_id, event.event_id - ) + try: + remote_auth_chain = yield self.replication_layer.get_event_auth( + origin, event.room_id, event.event_id + ) - seen_remotes = yield self.store.have_events( - [e.event_id for e in remote_auth_chain] - ) + seen_remotes = yield self.store.have_events( + [e.event_id for e in remote_auth_chain] + ) - for e in remote_auth_chain: - if e.event_id in seen_remotes.keys(): - continue + for e in remote_auth_chain: + if e.event_id in seen_remotes.keys(): + continue - if e.event_id == event.event_id: - continue + if e.event_id == event.event_id: + continue - try: - auth_ids = [e_id for e_id, _ in e.auth_events] - auth = { - (e.type, e.state_key): e for e in remote_auth_chain - if e.event_id in auth_ids - } - e.internal_metadata.outlier = True + try: + auth_ids = [e_id for e_id, _ in e.auth_events] + auth = { + (e.type, e.state_key): e for e in remote_auth_chain + if e.event_id in auth_ids + } + e.internal_metadata.outlier = True - logger.debug( - "do_auth %s missing_auth: %s", - event.event_id, e.event_id - ) - yield self._handle_new_event( - origin, e, auth_events=auth - ) + logger.debug( + "do_auth %s missing_auth: %s", + event.event_id, e.event_id + ) + yield self._handle_new_event( + origin, e, auth_events=auth + ) - if e.event_id in event_auth_events: - auth_events[(e.type, e.state_key)] = e - except AuthError: - pass + if e.event_id in event_auth_events: + auth_events[(e.type, e.state_key)] = e + except AuthError: + pass + except: + # FIXME: + logger.exception("Failed to get auth chain") # FIXME: Assumes we have and stored all the state for all the # prev_events @@ -836,47 +840,52 @@ class FederationHandler(BaseHandler): auth_ids = self.auth.compute_auth_events(event, context) local_auth_chain = yield self.store.get_auth_chain(auth_ids) - # 2. Get remote difference. - result = yield self.replication_layer.query_auth( - origin, - event.room_id, - event.event_id, - local_auth_chain, - ) + try: + # 2. Get remote difference. + result = yield self.replication_layer.query_auth( + origin, + event.room_id, + event.event_id, + local_auth_chain, + ) - seen_remotes = yield self.store.have_events( - [e.event_id for e in result["auth_chain"]] - ) + seen_remotes = yield self.store.have_events( + [e.event_id for e in result["auth_chain"]] + ) - # 3. Process any remote auth chain events we haven't seen. - for ev in result["auth_chain"]: - if ev.event_id in seen_remotes.keys(): - continue + # 3. Process any remote auth chain events we haven't seen. + for ev in result["auth_chain"]: + if ev.event_id in seen_remotes.keys(): + continue - if ev.event_id == event.event_id: - continue + if ev.event_id == event.event_id: + continue - try: - auth_ids = [e_id for e_id, _ in ev.auth_events] - auth = { - (e.type, e.state_key): e for e in result["auth_chain"] - if e.event_id in auth_ids - } - ev.internal_metadata.outlier = True + try: + auth_ids = [e_id for e_id, _ in ev.auth_events] + auth = { + (e.type, e.state_key): e for e in result["auth_chain"] + if e.event_id in auth_ids + } + ev.internal_metadata.outlier = True - logger.debug( - "do_auth %s different_auth: %s", - event.event_id, e.event_id - ) + logger.debug( + "do_auth %s different_auth: %s", + event.event_id, e.event_id + ) - yield self._handle_new_event( - origin, ev, auth_events=auth - ) + yield self._handle_new_event( + origin, ev, auth_events=auth + ) - if ev.event_id in event_auth_events: - auth_events[(ev.type, ev.state_key)] = ev - except AuthError: - pass + if ev.event_id in event_auth_events: + auth_events[(ev.type, ev.state_key)] = ev + except AuthError: + pass + + except: + # FIXME: + logger.exception("Failed to query auth chain") # 4. Look at rejects and their proofs. # TODO. From 0dd3aea319c13e66eb1d75b5b8a196032ee332b7 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 3 Feb 2015 14:58:30 +0000 Subject: [PATCH 11/33] Keep around the old (buggy) version of the prune_event function so that we can use it to check signatures for events on old servers --- synapse/api/auth.py | 2 - synapse/events/utils.py | 79 ++++++++++++++++++++ synapse/federation/federation_client.py | 96 +------------------------ synapse/federation/federation_server.py | 52 +++----------- 4 files changed, 92 insertions(+), 137 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 37e31d2b6f..3471afd7e7 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -102,8 +102,6 @@ class Auth(object): def check_host_in_room(self, room_id, host): curr_state = yield self.state.get_current_state(room_id) - logger.debug("Got curr_state %s", curr_state) - for event in curr_state: if event.type == EventTypes.Member: try: diff --git a/synapse/events/utils.py b/synapse/events/utils.py index 1aa952150e..65a9f70982 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -94,6 +94,85 @@ def prune_event(event): ) +def old_prune_event(event): + """This is an old and buggy version of the prune event function. The + difference between this and the new version is that when including dicts + in the content they were included as frozen_dicts rather than dicts. This + caused the JSON encoder to encode as a list of the keys rather than the + dict. + """ + event_type = event.type + + allowed_keys = [ + "event_id", + "sender", + "room_id", + "hashes", + "signatures", + "content", + "type", + "state_key", + "depth", + "prev_events", + "prev_state", + "auth_events", + "origin", + "origin_server_ts", + "membership", + ] + + event_dict = event.get_dict() + + new_content = {} + + def add_fields(*fields): + for field in fields: + if field in event.content: + # This is the line that is buggy: event.content may return + # a frozen_dict which the json encoders encode as lists rather + # than dicts. + new_content[field] = event.content[field] + + if event_type == EventTypes.Member: + add_fields("membership") + elif event_type == EventTypes.Create: + add_fields("creator") + elif event_type == EventTypes.JoinRules: + add_fields("join_rule") + elif event_type == EventTypes.PowerLevels: + add_fields( + "users", + "users_default", + "events", + "events_default", + "events_default", + "state_default", + "ban", + "kick", + "redact", + ) + elif event_type == EventTypes.Aliases: + add_fields("aliases") + + allowed_fields = { + k: v + for k, v in event_dict.items() + if k in allowed_keys + } + + allowed_fields["content"] = new_content + + allowed_fields["unsigned"] = {} + + if "age_ts" in event.unsigned: + allowed_fields["unsigned"]["age_ts"] = event.unsigned["age_ts"] + + return type(event)( + allowed_fields, + internal_metadata_dict=event.internal_metadata.get_dict() + ) + + def format_event_raw(d): return d diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 9ceb66e6f4..5fac629709 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -16,17 +16,11 @@ from twisted.internet import defer +from .federation_base import FederationBase from .units import Edu from synapse.util.logutils import log_function from synapse.events import FrozenEvent -from synapse.events.utils import prune_event - -from syutil.jsonutil import encode_canonical_json - -from synapse.crypto.event_signing import check_event_content_hash - -from synapse.api.errors import SynapseError import logging @@ -34,7 +28,7 @@ import logging logger = logging.getLogger(__name__) -class FederationClient(object): +class FederationClient(FederationBase): @log_function def send_pdu(self, pdu, destinations): """Informs the replication layer about a new PDU generated within the @@ -376,89 +370,3 @@ class FederationClient(object): event.internal_metadata.outlier = outlier return event - - @defer.inlineCallbacks - def _check_sigs_and_hash_and_fetch(self, origin, pdus, outlier=False): - """Takes a list of PDUs and checks the signatures and hashs of each - one. If a PDU fails its signature check then we check if we have it in - the database and if not then request if from the originating server of - that PDU. - - If a PDU fails its content hash check then it is redacted. - - The given list of PDUs are not modified, instead the function returns - a new list. - - Args: - pdu (list) - outlier (bool) - - Returns: - Deferred : A list of PDUs that have valid signatures and hashes. - """ - signed_pdus = [] - for pdu in pdus: - try: - new_pdu = yield self._check_sigs_and_hash(pdu) - signed_pdus.append(new_pdu) - except SynapseError: - # FIXME: We should handle signature failures more gracefully. - - # Check local db. - new_pdu = yield self.store.get_event( - pdu.event_id, - allow_rejected=True - ) - if new_pdu: - signed_pdus.append(new_pdu) - continue - - # Check pdu.origin - if pdu.origin != origin: - new_pdu = yield self.get_pdu( - destinations=[pdu.origin], - event_id=pdu.event_id, - outlier=outlier, - ) - - if new_pdu: - signed_pdus.append(new_pdu) - continue - - logger.warn("Failed to find copy of %s with valid signature") - - defer.returnValue(signed_pdus) - - @defer.inlineCallbacks - def _check_sigs_and_hash(self, pdu): - """Throws a SynapseError if the PDU does not have the correct - signatures. - - Returns: - FrozenEvent: Either the given event or it redacted if it failed the - content hash check. - """ - # Check signatures are correct. - redacted_event = prune_event(pdu) - redacted_pdu_json = redacted_event.get_pdu_json() - - try: - yield self.keyring.verify_json_for_server( - pdu.origin, redacted_pdu_json - ) - except SynapseError: - logger.warn( - "Signature check failed for %s redacted to %s", - encode_canonical_json(pdu.get_pdu_json()), - encode_canonical_json(redacted_pdu_json), - ) - raise - - if not check_event_content_hash(pdu): - logger.warn( - "Event content has been tampered, redacting %s, %s", - pdu.event_id, encode_canonical_json(pdu.get_dict()) - ) - defer.returnValue(redacted_event) - - defer.returnValue(pdu) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 5fbd8b19de..97dca3f84c 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -16,6 +16,7 @@ from twisted.internet import defer +from .federation_base import FederationBase from .units import Transaction, Edu from synapse.util.logutils import log_function @@ -35,7 +36,7 @@ import logging logger = logging.getLogger(__name__) -class FederationServer(object): +class FederationServer(FederationBase): def set_handler(self, handler): """Sets the handler that the replication layer will use to communicate receipt of new PDUs from other home servers. The required methods are @@ -251,17 +252,20 @@ class FederationServer(object): Deferred: Results in `dict` with the same format as `content` """ auth_chain = [ - (yield self._check_sigs_and_hash(self.event_from_pdu_json(e))) + self.event_from_pdu_json(e) for e in content["auth_chain"] ] - missing = [ - (yield self._check_sigs_and_hash(self.event_from_pdu_json(e))) - for e in content.get("missing", []) - ] + signed_auth = yield self._check_sigs_and_hash_and_fetch( + origin, auth_chain, outlier=True + ) ret = yield self.handler.on_query_auth( - origin, event_id, auth_chain, content.get("rejects", []), missing + origin, + event_id, + signed_auth, + content.get("rejects", []), + content.get("missing", []), ) time_now = self._clock.time_msec() @@ -426,37 +430,3 @@ class FederationServer(object): event.internal_metadata.outlier = outlier return event - - @defer.inlineCallbacks - def _check_sigs_and_hash(self, pdu): - """Throws a SynapseError if the PDU does not have the correct - signatures. - - Returns: - FrozenEvent: Either the given event or it redacted if it failed the - content hash check. - """ - # Check signatures are correct. - redacted_event = prune_event(pdu) - redacted_pdu_json = redacted_event.get_pdu_json() - - try: - yield self.keyring.verify_json_for_server( - pdu.origin, redacted_pdu_json - ) - except SynapseError: - logger.warn( - "Signature check failed for %s redacted to %s", - encode_canonical_json(pdu.get_pdu_json()), - encode_canonical_json(redacted_pdu_json), - ) - raise - - if not check_event_content_hash(pdu): - logger.warn( - "Event content has been tampered, redacting %s, %s", - pdu.event_id, encode_canonical_json(pdu.get_dict()) - ) - defer.returnValue(redacted_event) - - defer.returnValue(pdu) From 7b810e136ef2040fe55a778d4a4a790cdbd0f84c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 3 Feb 2015 15:00:42 +0000 Subject: [PATCH 12/33] Add new FederationBase --- synapse/federation/federation_base.py | 126 ++++++++++++++++++++++++++ 1 file changed, 126 insertions(+) create mode 100644 synapse/federation/federation_base.py diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py new file mode 100644 index 0000000000..d26a2396ac --- /dev/null +++ b/synapse/federation/federation_base.py @@ -0,0 +1,126 @@ +# -*- coding: utf-8 -*- +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from twisted.internet import defer + +from synapse.events.utils import prune_event, old_prune_event + +from syutil.jsonutil import encode_canonical_json + +from synapse.crypto.event_signing import check_event_content_hash + +from synapse.api.errors import SynapseError + +import logging + + +logger = logging.getLogger(__name__) + + +class FederationBase(object): + @defer.inlineCallbacks + def _check_sigs_and_hash_and_fetch(self, origin, pdus, outlier=False): + """Takes a list of PDUs and checks the signatures and hashs of each + one. If a PDU fails its signature check then we check if we have it in + the database and if not then request if from the originating server of + that PDU. + + If a PDU fails its content hash check then it is redacted. + + The given list of PDUs are not modified, instead the function returns + a new list. + + Args: + pdu (list) + outlier (bool) + + Returns: + Deferred : A list of PDUs that have valid signatures and hashes. + """ + signed_pdus = [] + for pdu in pdus: + try: + new_pdu = yield self._check_sigs_and_hash(pdu) + signed_pdus.append(new_pdu) + except SynapseError: + # FIXME: We should handle signature failures more gracefully. + + # Check local db. + new_pdu = yield self.store.get_event( + pdu.event_id, + allow_rejected=True + ) + if new_pdu: + signed_pdus.append(new_pdu) + continue + + # Check pdu.origin + if pdu.origin != origin: + new_pdu = yield self.get_pdu( + destinations=[pdu.origin], + event_id=pdu.event_id, + outlier=outlier, + ) + + if new_pdu: + signed_pdus.append(new_pdu) + continue + + logger.warn("Failed to find copy of %s with valid signature") + + defer.returnValue(signed_pdus) + + @defer.inlineCallbacks + def _check_sigs_and_hash(self, pdu): + """Throws a SynapseError if the PDU does not have the correct + signatures. + + Returns: + FrozenEvent: Either the given event or it redacted if it failed the + content hash check. + """ + # Check signatures are correct. + redacted_event = prune_event(pdu) + redacted_pdu_json = redacted_event.get_pdu_json() + + old_redacted = old_prune_event(pdu) + old_redacted_pdu_json = old_redacted.get_pdu_json() + + try: + try: + yield self.keyring.verify_json_for_server( + pdu.origin, old_redacted_pdu_json + ) + except SynapseError: + yield self.keyring.verify_json_for_server( + pdu.origin, redacted_pdu_json + ) + except SynapseError: + logger.warn( + "Signature check failed for %s redacted to %s", + encode_canonical_json(pdu.get_pdu_json()), + encode_canonical_json(redacted_pdu_json), + ) + raise + + if not check_event_content_hash(pdu): + logger.warn( + "Event content has been tampered, redacting %s, %s", + pdu.event_id, encode_canonical_json(pdu.get_dict()) + ) + defer.returnValue(redacted_event) + + defer.returnValue(pdu) \ No newline at end of file From 8dae5c81085458a3a0b3dc92278e8c317d5de204 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 3 Feb 2015 15:01:12 +0000 Subject: [PATCH 13/33] Remove unused imports --- synapse/federation/federation_server.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 97dca3f84c..4742ca9390 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -22,11 +22,6 @@ from .units import Transaction, Edu from synapse.util.logutils import log_function from synapse.util.logcontext import PreserveLoggingContext from synapse.events import FrozenEvent -from synapse.events.utils import prune_event - -from syutil.jsonutil import encode_canonical_json - -from synapse.crypto.event_signing import check_event_content_hash from synapse.api.errors import FederationError, SynapseError From 9bace3a36751deed141225ccabd5bebecebc25f3 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 3 Feb 2015 15:32:17 +0000 Subject: [PATCH 14/33] Actually, the old prune_event function was non-deterministic, so no point keeping it around :( --- synapse/events/utils.py | 79 --------------------------- synapse/federation/federation_base.py | 16 ++---- 2 files changed, 4 insertions(+), 91 deletions(-) diff --git a/synapse/events/utils.py b/synapse/events/utils.py index 65a9f70982..1aa952150e 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -94,85 +94,6 @@ def prune_event(event): ) -def old_prune_event(event): - """This is an old and buggy version of the prune event function. The - difference between this and the new version is that when including dicts - in the content they were included as frozen_dicts rather than dicts. This - caused the JSON encoder to encode as a list of the keys rather than the - dict. - """ - event_type = event.type - - allowed_keys = [ - "event_id", - "sender", - "room_id", - "hashes", - "signatures", - "content", - "type", - "state_key", - "depth", - "prev_events", - "prev_state", - "auth_events", - "origin", - "origin_server_ts", - "membership", - ] - - event_dict = event.get_dict() - - new_content = {} - - def add_fields(*fields): - for field in fields: - if field in event.content: - # This is the line that is buggy: event.content may return - # a frozen_dict which the json encoders encode as lists rather - # than dicts. - new_content[field] = event.content[field] - - if event_type == EventTypes.Member: - add_fields("membership") - elif event_type == EventTypes.Create: - add_fields("creator") - elif event_type == EventTypes.JoinRules: - add_fields("join_rule") - elif event_type == EventTypes.PowerLevels: - add_fields( - "users", - "users_default", - "events", - "events_default", - "events_default", - "state_default", - "ban", - "kick", - "redact", - ) - elif event_type == EventTypes.Aliases: - add_fields("aliases") - - allowed_fields = { - k: v - for k, v in event_dict.items() - if k in allowed_keys - } - - allowed_fields["content"] = new_content - - allowed_fields["unsigned"] = {} - - if "age_ts" in event.unsigned: - allowed_fields["unsigned"]["age_ts"] = event.unsigned["age_ts"] - - return type(event)( - allowed_fields, - internal_metadata_dict=event.internal_metadata.get_dict() - ) - - def format_event_raw(d): return d diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index d26a2396ac..27c5918e0f 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -16,7 +16,7 @@ from twisted.internet import defer -from synapse.events.utils import prune_event, old_prune_event +from synapse.events.utils import prune_event from syutil.jsonutil import encode_canonical_json @@ -96,18 +96,10 @@ class FederationBase(object): redacted_event = prune_event(pdu) redacted_pdu_json = redacted_event.get_pdu_json() - old_redacted = old_prune_event(pdu) - old_redacted_pdu_json = old_redacted.get_pdu_json() - try: - try: - yield self.keyring.verify_json_for_server( - pdu.origin, old_redacted_pdu_json - ) - except SynapseError: - yield self.keyring.verify_json_for_server( - pdu.origin, redacted_pdu_json - ) + yield self.keyring.verify_json_for_server( + pdu.origin, redacted_pdu_json + ) except SynapseError: logger.warn( "Signature check failed for %s redacted to %s", From 9a71add1c077130bf2cf3998ab0dd2226ba0c75b Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 3 Feb 2015 16:06:31 +0000 Subject: [PATCH 15/33] Use set_tweak instead of set_sound --- synapse/push/__init__.py | 4 ++-- synapse/push/baserules.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py index 28e5dae81d..00f3513c23 100644 --- a/synapse/push/__init__.py +++ b/synapse/push/__init__.py @@ -400,8 +400,8 @@ def _tweaks_for_actions(actions): for a in actions: if not isinstance(a, dict): continue - if 'set_sound' in a: - tweaks['sound'] = a['set_sound'] + if 'set_tweak' in a and 'value' in a: + tweaks[a['set_tweak']] = a['value'] return tweaks diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 382de118e0..376d1d4d33 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -38,7 +38,8 @@ def make_base_rules(user_name): 'actions': [ 'notify', { - 'set_sound': 'default' + 'set_tweak': 'sound', + 'value': 'default' } ] } From 7dd1c5c542d58464085b11ababe746c6a60515ec Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 3 Feb 2015 16:12:04 +0000 Subject: [PATCH 16/33] Neaten the handling of state and auth_chain up a bit --- synapse/handlers/federation.py | 59 ++++++++++++++++++---------------- 1 file changed, 31 insertions(+), 28 deletions(-) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 6727155c38..86953bf8c8 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -30,6 +30,7 @@ from synapse.types import UserID from twisted.internet import defer +import itertools import logging @@ -112,14 +113,6 @@ class FederationHandler(BaseHandler): logger.debug("Event: %s", event) - event_ids = set() - if state: - event_ids |= {e.event_id for e in state} - if auth_chain: - event_ids |= {e.event_id for e in auth_chain} - - seen_ids = (yield self.store.have_events(event_ids)).keys() - # FIXME (erikj): Awful hack to make the case where we are not currently # in the room work current_state = None @@ -131,27 +124,37 @@ class FederationHandler(BaseHandler): logger.debug("Got event for room we're not in.") current_state = state - if state and auth_chain is not None: - for list_of_pdus in [auth_chain, state]: - for e in list_of_pdus: - if e.event_id in seen_ids: - continue + event_ids = set() + if state: + event_ids |= {e.event_id for e in state} + if auth_chain: + event_ids |= {e.event_id for e in auth_chain} - e.internal_metadata.outlier = True - try: - auth_ids = [e_id for e_id, _ in e.auth_events] - auth = { - (e.type, e.state_key): e for e in auth_chain - if e.event_id in auth_ids - } - yield self._handle_new_event( - origin, e, auth_events=auth - ) - except: - logger.exception( - "Failed to handle state event %s", - e.event_id, - ) + seen_ids = (yield self.store.have_events(event_ids)).keys() + + if state and auth_chain is not None: + # If we have any state or auth_chain given to us by the replication + # layer, then we should handle them (if we haven't before.) + for e in itertools.chain(auth_chain, state): + if e.event_id in seen_ids: + continue + + e.internal_metadata.outlier = True + try: + auth_ids = [e_id for e_id, _ in e.auth_events] + auth = { + (e.type, e.state_key): e for e in auth_chain + if e.event_id in auth_ids + } + yield self._handle_new_event( + origin, e, auth_events=auth + ) + seen_ids.add(e.event_id) + except: + logger.exception( + "Failed to handle state event %s", + e.event_id, + ) try: yield self._handle_new_event( From 3c39f42a0526186f85e69988504fff6adbf41d91 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 3 Feb 2015 16:14:19 +0000 Subject: [PATCH 17/33] New line --- synapse/federation/federation_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index 27c5918e0f..a990aec4fd 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -115,4 +115,4 @@ class FederationBase(object): ) defer.returnValue(redacted_event) - defer.returnValue(pdu) \ No newline at end of file + defer.returnValue(pdu) From dc7bb70f22edf8ef0631c961f2c77a82de7c76d5 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 3 Feb 2015 16:51:07 +0000 Subject: [PATCH 18/33] s/instance_handle/profile_tag/ --- synapse/push/__init__.py | 8 ++++---- synapse/push/httppusher.py | 4 ++-- synapse/push/pusherpool.py | 12 ++++++------ synapse/rest/client/v1/push_rule.py | 28 ++++++++++++++-------------- synapse/rest/client/v1/pusher.py | 4 ++-- synapse/storage/pusher.py | 14 +++++++------- synapse/storage/schema/delta/v12.sql | 2 +- synapse/storage/schema/pusher.sql | 2 +- 8 files changed, 37 insertions(+), 37 deletions(-) diff --git a/synapse/push/__init__.py b/synapse/push/__init__.py index 00f3513c23..8c6f0a6571 100644 --- a/synapse/push/__init__.py +++ b/synapse/push/__init__.py @@ -37,14 +37,14 @@ class Pusher(object): INEQUALITY_EXPR = re.compile("^([=<>]*)([0-9]*)$") - def __init__(self, _hs, instance_handle, user_name, app_id, + def __init__(self, _hs, profile_tag, user_name, app_id, app_display_name, device_display_name, pushkey, pushkey_ts, data, last_token, last_success, failing_since): self.hs = _hs self.evStreamHandler = self.hs.get_handlers().event_stream_handler self.store = self.hs.get_datastore() self.clock = self.hs.get_clock() - self.instance_handle = instance_handle + self.profile_tag = profile_tag self.user_name = user_name self.app_id = app_id self.app_display_name = app_display_name @@ -147,9 +147,9 @@ class Pusher(object): return False return fnmatch.fnmatch(val.upper(), pat.upper()) elif condition['kind'] == 'device': - if 'instance_handle' not in condition: + if 'profile_tag' not in condition: return True - return condition['instance_handle'] == self.instance_handle + return condition['profile_tag'] == self.profile_tag elif condition['kind'] == 'contains_display_name': # This is special because display names can be different # between rooms and so you can't really hard code it in a rule. diff --git a/synapse/push/httppusher.py b/synapse/push/httppusher.py index 7c6953c989..5788db4eba 100644 --- a/synapse/push/httppusher.py +++ b/synapse/push/httppusher.py @@ -24,12 +24,12 @@ logger = logging.getLogger(__name__) class HttpPusher(Pusher): - def __init__(self, _hs, instance_handle, user_name, app_id, + def __init__(self, _hs, profile_tag, user_name, app_id, app_display_name, device_display_name, pushkey, pushkey_ts, data, last_token, last_success, failing_since): super(HttpPusher, self).__init__( _hs, - instance_handle, + profile_tag, user_name, app_id, app_display_name, diff --git a/synapse/push/pusherpool.py b/synapse/push/pusherpool.py index 4892c21e7b..5a525befd7 100644 --- a/synapse/push/pusherpool.py +++ b/synapse/push/pusherpool.py @@ -55,7 +55,7 @@ class PusherPool: self._start_pushers(pushers) @defer.inlineCallbacks - def add_pusher(self, user_name, instance_handle, kind, app_id, + def add_pusher(self, user_name, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, lang, data): # we try to create the pusher just to validate the config: it # will then get pulled out of the database, @@ -64,7 +64,7 @@ class PusherPool: self._create_pusher({ "user_name": user_name, "kind": kind, - "instance_handle": instance_handle, + "profile_tag": profile_tag, "app_id": app_id, "app_display_name": app_display_name, "device_display_name": device_display_name, @@ -77,18 +77,18 @@ class PusherPool: "failing_since": None }) yield self._add_pusher_to_store( - user_name, instance_handle, kind, app_id, + user_name, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, lang, data ) @defer.inlineCallbacks - def _add_pusher_to_store(self, user_name, instance_handle, kind, app_id, + def _add_pusher_to_store(self, user_name, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, lang, data): yield self.store.add_pusher( user_name=user_name, - instance_handle=instance_handle, + profile_tag=profile_tag, kind=kind, app_id=app_id, app_display_name=app_display_name, @@ -104,7 +104,7 @@ class PusherPool: if pusherdict['kind'] == 'http': return HttpPusher( self.hs, - instance_handle=pusherdict['instance_handle'], + profile_tag=pusherdict['profile_tag'], user_name=pusherdict['user_name'], app_id=pusherdict['app_id'], app_display_name=pusherdict['app_display_name'], diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py index faa7919fbb..348adb9c0d 100644 --- a/synapse/rest/client/v1/push_rule.py +++ b/synapse/rest/client/v1/push_rule.py @@ -112,7 +112,7 @@ class PushRuleRestServlet(ClientV1RestServlet): if device: conditions.append({ 'kind': 'device', - 'instance_handle': device + 'profile_tag': device }) if 'actions' not in req_obj: @@ -195,7 +195,7 @@ class PushRuleRestServlet(ClientV1RestServlet): for r in rules: conditions = json.loads(r['conditions']) - ih = _instance_handle_from_conditions(conditions) + ih = _profile_tag_from_conditions(conditions) if ih == spec['device'] and r['priority_class'] == priority_class: yield self.hs.get_datastore().delete_push_rule( user.to_string(), spec['rule_id'] @@ -239,19 +239,19 @@ class PushRuleRestServlet(ClientV1RestServlet): if r['priority_class'] > PushRuleRestServlet.PRIORITY_CLASS_MAP['override']: # per-device rule - instance_handle = _instance_handle_from_conditions(r["conditions"]) + profile_tag = _profile_tag_from_conditions(r["conditions"]) r = _strip_device_condition(r) - if not instance_handle: + if not profile_tag: continue - if instance_handle not in rules['device']: - rules['device'][instance_handle] = {} - rules['device'][instance_handle] = ( + if profile_tag not in rules['device']: + rules['device'][profile_tag] = {} + rules['device'][profile_tag] = ( _add_empty_priority_class_arrays( - rules['device'][instance_handle] + rules['device'][profile_tag] ) ) - rulearray = rules['device'][instance_handle][template_name] + rulearray = rules['device'][profile_tag][template_name] else: rulearray = rules['global'][template_name] @@ -282,13 +282,13 @@ class PushRuleRestServlet(ClientV1RestServlet): if path[0] == '': defer.returnValue((200, rules['device'])) - instance_handle = path[0] + profile_tag = path[0] path = path[1:] - if instance_handle not in rules['device']: + if profile_tag not in rules['device']: ret = {} ret = _add_empty_priority_class_arrays(ret) defer.returnValue((200, ret)) - ruleset = rules['device'][instance_handle] + ruleset = rules['device'][profile_tag] result = _filter_ruleset_with_path(ruleset, path) defer.returnValue((200, result)) else: @@ -304,14 +304,14 @@ def _add_empty_priority_class_arrays(d): return d -def _instance_handle_from_conditions(conditions): +def _profile_tag_from_conditions(conditions): """ Given a list of conditions, return the instance handle of the device rule if there is one """ for c in conditions: if c['kind'] == 'device': - return c['instance_handle'] + return c['profile_tag'] return None diff --git a/synapse/rest/client/v1/pusher.py b/synapse/rest/client/v1/pusher.py index 353a4a6589..e10d2576d2 100644 --- a/synapse/rest/client/v1/pusher.py +++ b/synapse/rest/client/v1/pusher.py @@ -41,7 +41,7 @@ class PusherRestServlet(ClientV1RestServlet): ) defer.returnValue((200, {})) - reqd = ['instance_handle', 'kind', 'app_id', 'app_display_name', + reqd = ['profile_tag', 'kind', 'app_id', 'app_display_name', 'device_display_name', 'pushkey', 'lang', 'data'] missing = [] for i in reqd: @@ -54,7 +54,7 @@ class PusherRestServlet(ClientV1RestServlet): try: yield pusher_pool.add_pusher( user_name=user.to_string(), - instance_handle=content['instance_handle'], + profile_tag=content['profile_tag'], kind=content['kind'], app_id=content['app_id'], app_display_name=content['app_display_name'], diff --git a/synapse/storage/pusher.py b/synapse/storage/pusher.py index f253c9e2c3..e2a662a6c7 100644 --- a/synapse/storage/pusher.py +++ b/synapse/storage/pusher.py @@ -29,7 +29,7 @@ class PusherStore(SQLBaseStore): @defer.inlineCallbacks def get_pushers_by_app_id_and_pushkey(self, app_id_and_pushkey): sql = ( - "SELECT id, user_name, kind, instance_handle, app_id," + "SELECT id, user_name, kind, profile_tag, app_id," "app_display_name, device_display_name, pushkey, ts, data, " "last_token, last_success, failing_since " "FROM pushers " @@ -45,7 +45,7 @@ class PusherStore(SQLBaseStore): "id": r[0], "user_name": r[1], "kind": r[2], - "instance_handle": r[3], + "profile_tag": r[3], "app_id": r[4], "app_display_name": r[5], "device_display_name": r[6], @@ -64,7 +64,7 @@ class PusherStore(SQLBaseStore): @defer.inlineCallbacks def get_all_pushers(self): sql = ( - "SELECT id, user_name, kind, instance_handle, app_id," + "SELECT id, user_name, kind, profile_tag, app_id," "app_display_name, device_display_name, pushkey, ts, data, " "last_token, last_success, failing_since " "FROM pushers" @@ -77,7 +77,7 @@ class PusherStore(SQLBaseStore): "id": r[0], "user_name": r[1], "kind": r[2], - "instance_handle": r[3], + "profile_tag": r[3], "app_id": r[4], "app_display_name": r[5], "device_display_name": r[6], @@ -94,7 +94,7 @@ class PusherStore(SQLBaseStore): defer.returnValue(ret) @defer.inlineCallbacks - def add_pusher(self, user_name, instance_handle, kind, app_id, + def add_pusher(self, user_name, profile_tag, kind, app_id, app_display_name, device_display_name, pushkey, pushkey_ts, lang, data): try: @@ -107,7 +107,7 @@ class PusherStore(SQLBaseStore): dict( user_name=user_name, kind=kind, - instance_handle=instance_handle, + profile_tag=profile_tag, app_display_name=app_display_name, device_display_name=device_display_name, ts=pushkey_ts, @@ -158,7 +158,7 @@ class PushersTable(Table): "id", "user_name", "kind", - "instance_handle", + "profile_tag", "app_id", "app_display_name", "device_display_name", diff --git a/synapse/storage/schema/delta/v12.sql b/synapse/storage/schema/delta/v12.sql index a6867cba62..16c2258ca4 100644 --- a/synapse/storage/schema/delta/v12.sql +++ b/synapse/storage/schema/delta/v12.sql @@ -24,7 +24,7 @@ CREATE TABLE IF NOT EXISTS rejections( CREATE TABLE IF NOT EXISTS pushers ( id INTEGER PRIMARY KEY AUTOINCREMENT, user_name TEXT NOT NULL, - instance_handle varchar(32) NOT NULL, + profile_tag varchar(32) NOT NULL, kind varchar(8) NOT NULL, app_id varchar(64) NOT NULL, app_display_name varchar(64) NOT NULL, diff --git a/synapse/storage/schema/pusher.sql b/synapse/storage/schema/pusher.sql index 8c4dfd5c1b..3735b11547 100644 --- a/synapse/storage/schema/pusher.sql +++ b/synapse/storage/schema/pusher.sql @@ -16,7 +16,7 @@ CREATE TABLE IF NOT EXISTS pushers ( id INTEGER PRIMARY KEY AUTOINCREMENT, user_name TEXT NOT NULL, - instance_handle varchar(32) NOT NULL, + profile_tag varchar(32) NOT NULL, kind varchar(8) NOT NULL, app_id varchar(64) NOT NULL, app_display_name varchar(64) NOT NULL, From 02be8da5e11d9abcfc962f962bbc4e9940b69199 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 3 Feb 2015 17:34:07 +0000 Subject: [PATCH 19/33] Add doc to get_event --- synapse/storage/__init__.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 93aefe0c48..93ab26fcd1 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -131,6 +131,21 @@ class DataStore(RoomMemberStore, RoomStore, def get_event(self, event_id, check_redacted=True, get_prev_content=False, allow_rejected=False, allow_none=False): + """Get an event from the database by event_id. + + Args: + event_id (str): The event_id of the event to fetch + check_redacted (bool): If True, check if event has been redacted + and redact it. + get_prev_content (bool): If True and event is a state event, + include the previous states content in the unsigned field. + allow_rejected (bool): If True return rejected events. + allow_none (bool): If True, return None if no event found, if + False throw an exception. + + Returns: + Deferred : A FrozenEvent. + """ event = yield self.runInteraction( "get_event", self._get_event_txn, event_id, From c0462dbf1533f285f632dcb0a74c0ef0c3e2475b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 4 Feb 2015 10:16:51 +0000 Subject: [PATCH 20/33] Rearrange persist_event so that do all the queries that need to be done before returning early if we have already persisted that event. --- synapse/events/__init__.py | 2 +- synapse/handlers/federation.py | 2 + synapse/storage/__init__.py | 145 +++++++++++++++++---------------- 3 files changed, 77 insertions(+), 72 deletions(-) diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index bf07951027..8f0c6e959f 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -77,7 +77,7 @@ class EventBase(object): return self.content["membership"] def is_state(self): - return hasattr(self, "state_key") + return hasattr(self, "state_key") and self.state_key is not None def get_dict(self): d = dict(self._event_dict) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 86953bf8c8..0876589e31 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -515,6 +515,8 @@ class FederationHandler(BaseHandler): "Failed to get destination from event %s", s.event_id ) + destinations.remove(origin) + logger.debug( "on_send_join_request: Sending event: %s, signatures: %s", event.event_id, diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 93ab26fcd1..30ce378900 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -163,19 +163,70 @@ class DataStore(RoomMemberStore, RoomStore, def _persist_event_txn(self, txn, event, context, backfilled, stream_ordering=None, is_new_state=True, current_state=None): - if event.type == EventTypes.Member: - self._store_room_member_txn(txn, event) - elif event.type == EventTypes.Feedback: - self._store_feedback_txn(txn, event) - elif event.type == EventTypes.Name: - self._store_room_name_txn(txn, event) - elif event.type == EventTypes.Topic: - self._store_room_topic_txn(txn, event) - elif event.type == EventTypes.Redaction: - self._store_redaction(txn, event) + + # We purposefully do this first since if we include a `current_state` + # key, we *want* to update the `current_state_events` table + if current_state: + txn.execute( + "DELETE FROM current_state_events WHERE room_id = ?", + (event.room_id,) + ) + + for s in current_state: + self._simple_insert_txn( + txn, + "current_state_events", + { + "event_id": s.event_id, + "room_id": s.room_id, + "type": s.type, + "state_key": s.state_key, + }, + or_replace=True, + ) + + if event.is_state() and is_new_state: + if not backfilled and not context.rejected: + self._simple_insert_txn( + txn, + table="state_forward_extremities", + values={ + "event_id": event.event_id, + "room_id": event.room_id, + "type": event.type, + "state_key": event.state_key, + }, + or_replace=True, + ) + + for prev_state_id, _ in event.prev_state: + self._simple_delete_txn( + txn, + table="state_forward_extremities", + keyvalues={ + "event_id": prev_state_id, + } + ) outlier = event.internal_metadata.is_outlier() + if not outlier: + self._store_state_groups_txn(txn, event, context) + + self._update_min_depth_for_room_txn( + txn, + event.room_id, + event.depth + ) + + self._handle_prev_events( + txn, + outlier=outlier, + event_id=event.event_id, + prev_events=event.prev_events, + room_id=event.room_id, + ) + have_persisted = self._simple_select_one_onecol_txn( txn, table="event_json", @@ -209,6 +260,17 @@ class DataStore(RoomMemberStore, RoomStore, ) return + if event.type == EventTypes.Member: + self._store_room_member_txn(txn, event) + elif event.type == EventTypes.Feedback: + self._store_feedback_txn(txn, event) + elif event.type == EventTypes.Name: + self._store_room_name_txn(txn, event) + elif event.type == EventTypes.Topic: + self._store_room_topic_txn(txn, event) + elif event.type == EventTypes.Redaction: + self._store_redaction(txn, event) + event_dict = { k: v for k, v in event.get_dict().items() @@ -273,41 +335,10 @@ class DataStore(RoomMemberStore, RoomStore, ) raise _RollbackButIsFineException("_persist_event") - self._handle_prev_events( - txn, - outlier=outlier, - event_id=event.event_id, - prev_events=event.prev_events, - room_id=event.room_id, - ) - - if not outlier: - self._store_state_groups_txn(txn, event, context) - if context.rejected: self._store_rejections_txn(txn, event.event_id, context.rejected) - if current_state: - txn.execute( - "DELETE FROM current_state_events WHERE room_id = ?", - (event.room_id,) - ) - - for s in current_state: - self._simple_insert_txn( - txn, - "current_state_events", - { - "event_id": s.event_id, - "room_id": s.room_id, - "type": s.type, - "state_key": s.state_key, - }, - or_replace=True, - ) - - is_state = hasattr(event, "state_key") and event.state_key is not None - if is_state: + if event.is_state(): vals = { "event_id": event.event_id, "room_id": event.room_id, @@ -315,6 +346,7 @@ class DataStore(RoomMemberStore, RoomStore, "state_key": event.state_key, } + # TODO: How does this work with backfilling? if hasattr(event, "replaces_state"): vals["prev_state"] = event.replaces_state @@ -351,28 +383,6 @@ class DataStore(RoomMemberStore, RoomStore, or_ignore=True, ) - if not backfilled and not context.rejected: - self._simple_insert_txn( - txn, - table="state_forward_extremities", - values={ - "event_id": event.event_id, - "room_id": event.room_id, - "type": event.type, - "state_key": event.state_key, - }, - or_replace=True, - ) - - for prev_state_id, _ in event.prev_state: - self._simple_delete_txn( - txn, - table="state_forward_extremities", - keyvalues={ - "event_id": prev_state_id, - } - ) - for hash_alg, hash_base64 in event.hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_event_content_hash_txn( @@ -403,13 +413,6 @@ class DataStore(RoomMemberStore, RoomStore, txn, event.event_id, ref_alg, ref_hash_bytes ) - if not outlier: - self._update_min_depth_for_room_txn( - txn, - event.room_id, - event.depth - ) - def _store_redaction(self, txn, event): txn.execute( "INSERT OR IGNORE INTO redactions " From f275ba49bbcb86e111ed0d66dd99da617220ae79 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 4 Feb 2015 10:36:28 +0000 Subject: [PATCH 21/33] Fix state resolution to remember join_rules is a type of auth event. --- synapse/state.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/synapse/state.py b/synapse/state.py index 8a056ee955..6a6fb8aea0 100644 --- a/synapse/state.py +++ b/synapse/state.py @@ -37,7 +37,10 @@ def _get_state_key_from_event(event): KeyStateTuple = namedtuple("KeyStateTuple", ("context", "type", "state_key")) -AuthEventTypes = (EventTypes.Create, EventTypes.Member, EventTypes.PowerLevels,) +AuthEventTypes = ( + EventTypes.Create, EventTypes.Member, EventTypes.PowerLevels, + EventTypes.JoinRules, +) class StateHandler(object): @@ -258,6 +261,15 @@ class StateHandler(object): auth_events.update(resolved_state) + for key, events in conflicted_state.items(): + if key[0] == EventTypes.JoinRules: + resolved_state[key] = self._resolve_auth_events( + events, + auth_events + ) + + auth_events.update(resolved_state) + for key, events in conflicted_state.items(): if key[0] == EventTypes.Member: resolved_state[key] = self._resolve_auth_events( From 03d415a6a23300e36b5e6c35080ac4dd8ab06815 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 4 Feb 2015 10:40:59 +0000 Subject: [PATCH 22/33] Brief comment on why we do some things on every call to persist_event and not others --- synapse/storage/__init__.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 30ce378900..a63c59a8a2 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -239,6 +239,12 @@ class DataStore(RoomMemberStore, RoomStore, event.internal_metadata.get_dict() ) + # If we have already persisted this event, we don't need to do any + # more processing. + # The processing above must be done on every call to persist event, + # since they might not have happened on previous calls. For example, + # if we are persisting an event that we had persisted as an outlier, + # but is no longer one. if have_persisted: if not outlier: sql = ( From ff78eded015b7596e883623bf826aa579662e766 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 4 Feb 2015 13:55:10 +0000 Subject: [PATCH 23/33] Retry make_join --- synapse/federation/federation_client.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 5fac629709..d6b8c43916 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -251,16 +251,21 @@ class FederationClient(FederationBase): defer.returnValue(signed_auth) @defer.inlineCallbacks - def make_join(self, destination, room_id, user_id): - ret = yield self.transport_layer.make_join( - destination, room_id, user_id - ) + def make_join(self, destinations, room_id, user_id): + for destination in destinations: + try: + ret = yield self.transport_layer.make_join( + destination, room_id, user_id + ) - pdu_dict = ret["event"] + pdu_dict = ret["event"] - logger.debug("Got response to make_join: %s", pdu_dict) + logger.debug("Got response to make_join: %s", pdu_dict) - defer.returnValue(self.event_from_pdu_json(pdu_dict)) + defer.returnValue(self.event_from_pdu_json(pdu_dict)) + break + except Exception as e: + logger.warn("Failed to make_join via %s", destination) @defer.inlineCallbacks def send_join(self, destination, pdu): From 650e32d45580ddd13826364291bda6760c014df9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 4 Feb 2015 14:06:42 +0000 Subject: [PATCH 24/33] Change context.auth_events to what the auth_events would be bases on context.current_state, rather than based on the auth_events from the event. --- synapse/api/auth.py | 12 ++++++------ synapse/handlers/federation.py | 4 +++- synapse/state.py | 8 ++++++-- 3 files changed, 15 insertions(+), 9 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 3471afd7e7..7105ee21dc 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -358,7 +358,7 @@ class Auth(object): def add_auth_events(self, builder, context): yield run_on_reactor() - auth_ids = self.compute_auth_events(builder, context) + auth_ids = self.compute_auth_events(builder, context.current_state) auth_events_entries = yield self.store.add_event_hashes( auth_ids @@ -372,26 +372,26 @@ class Auth(object): if v.event_id in auth_ids } - def compute_auth_events(self, event, context): + def compute_auth_events(self, event, current_state): if event.type == EventTypes.Create: return [] auth_ids = [] key = (EventTypes.PowerLevels, "", ) - power_level_event = context.current_state.get(key) + power_level_event = current_state.get(key) if power_level_event: auth_ids.append(power_level_event.event_id) key = (EventTypes.JoinRules, "", ) - join_rule_event = context.current_state.get(key) + join_rule_event = current_state.get(key) key = (EventTypes.Member, event.user_id, ) - member_event = context.current_state.get(key) + member_event = current_state.get(key) key = (EventTypes.Create, "", ) - create_event = context.current_state.get(key) + create_event = current_state.get(key) if create_event: auth_ids.append(create_event.event_id) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 0876589e31..2e2c23ef63 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -842,7 +842,9 @@ class FederationHandler(BaseHandler): logger.debug("Different auth: %s", different_auth) # 1. Get what we think is the auth chain. - auth_ids = self.auth.compute_auth_events(event, context) + auth_ids = self.auth.compute_auth_events( + event, context.current_state + ) local_auth_chain = yield self.store.get_auth_chain(auth_ids) try: diff --git a/synapse/state.py b/synapse/state.py index 6a6fb8aea0..695a5e7ac4 100644 --- a/synapse/state.py +++ b/synapse/state.py @@ -103,7 +103,9 @@ class StateHandler(object): context.state_group = None if hasattr(event, "auth_events") and event.auth_events: - auth_ids = zip(*event.auth_events)[0] + auth_ids = self.hs.get_auth().compute_auth_events( + event, context.current_state + ) context.auth_events = { k: v for k, v in context.current_state.items() @@ -149,7 +151,9 @@ class StateHandler(object): event.unsigned["replaces_state"] = replaces.event_id if hasattr(event, "auth_events") and event.auth_events: - auth_ids = zip(*event.auth_events)[0] + auth_ids = self.hs.get_auth().compute_auth_events( + event, context.current_state + ) context.auth_events = { k: v for k, v in context.current_state.items() From 95e2d2d36d6dfb7205c40fa8e59ef350f8096395 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 4 Feb 2015 15:02:23 +0000 Subject: [PATCH 25/33] When returning lists of servers from alias lookups, put the current server first in the list --- synapse/handlers/directory.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 58e9a91562..7b60921040 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -113,7 +113,16 @@ class DirectoryHandler(BaseHandler): ) extra_servers = yield self.store.get_joined_hosts_for_room(room_id) - servers = list(set(extra_servers) | set(servers)) + servers = set(extra_servers) | set(servers) + + # If this server is in the list of servers, return it first. + if self.server_name in servers: + servers = ( + [self.server_name] + + [s for s in servers if s != self.server_name] + ) + else: + servers = list(servers) defer.returnValue({ "room_id": room_id, From 2e77ba637a75adcf0681e04c281f4fea74ebec6b Mon Sep 17 00:00:00 2001 From: David Baker Date: Wed, 4 Feb 2015 16:24:15 +0000 Subject: [PATCH 26/33] More s/instance_handle/profile_tag/ --- synapse/rest/client/v1/push_rule.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/synapse/rest/client/v1/push_rule.py b/synapse/rest/client/v1/push_rule.py index 348adb9c0d..5582f33c8e 100644 --- a/synapse/rest/client/v1/push_rule.py +++ b/synapse/rest/client/v1/push_rule.py @@ -73,7 +73,7 @@ class PushRuleRestServlet(ClientV1RestServlet): 'rule_id': rule_id } if device: - spec['device'] = device + spec['profile_tag'] = device return spec def rule_tuple_from_request_object(self, rule_template, rule_id, req_obj, device=None): @@ -188,15 +188,15 @@ class PushRuleRestServlet(ClientV1RestServlet): user, _ = yield self.auth.get_user_by_req(request) - if 'device' in spec: + if 'profile_tag' in spec: rules = yield self.hs.get_datastore().get_push_rules_for_user_name( user.to_string() ) for r in rules: conditions = json.loads(r['conditions']) - ih = _profile_tag_from_conditions(conditions) - if ih == spec['device'] and r['priority_class'] == priority_class: + pt = _profile_tag_from_conditions(conditions) + if pt == spec['profile_tag'] and r['priority_class'] == priority_class: yield self.hs.get_datastore().delete_push_rule( user.to_string(), spec['rule_id'] ) @@ -306,7 +306,7 @@ def _add_empty_priority_class_arrays(d): def _profile_tag_from_conditions(conditions): """ - Given a list of conditions, return the instance handle of the + Given a list of conditions, return the profile tag of the device rule if there is one """ for c in conditions: From ae46f10fc5dba0f81518e3144ab8d9ed7a7d03bc Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 4 Feb 2015 16:28:12 +0000 Subject: [PATCH 27/33] Apply sanity to the transport client interface. Convert 'make_join' and 'send_join' to accept iterables of destinations --- synapse/api/errors.py | 8 ++- synapse/federation/federation_client.py | 82 +++++++++++++++--------- synapse/federation/transaction_queue.py | 23 +++++-- synapse/federation/transport/client.py | 84 +++++++++++-------------- synapse/handlers/federation.py | 4 +- synapse/http/matrixfederationclient.py | 42 +++++++++++-- 6 files changed, 151 insertions(+), 92 deletions(-) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index ad478aa6b7..5041828f18 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -39,7 +39,7 @@ class Codes(object): TOO_LARGE = "M_TOO_LARGE" -class CodeMessageException(Exception): +class CodeMessageException(RuntimeError): """An exception with integer code and message string attributes.""" def __init__(self, code, msg): @@ -227,3 +227,9 @@ class FederationError(RuntimeError): "affected": self.affected, "source": self.source if self.source else self.affected, } + + +class HttpResponseException(CodeMessageException): + def __init__(self, code, msg, response): + self.response = response + super(HttpResponseException, self).__init__(code, msg) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index d6b8c43916..eb36ec040b 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -19,6 +19,7 @@ from twisted.internet import defer from .federation_base import FederationBase from .units import Edu +from synapse.api.errors import CodeMessageException from synapse.util.logutils import log_function from synapse.events import FrozenEvent @@ -180,7 +181,8 @@ class FederationClient(FederationBase): pdu = yield self._check_sigs_and_hash(pdu) break - + except CodeMessageException: + raise except Exception as e: logger.info( "Failed to get PDU %s from %s because %s", @@ -264,45 +266,63 @@ class FederationClient(FederationBase): defer.returnValue(self.event_from_pdu_json(pdu_dict)) break - except Exception as e: - logger.warn("Failed to make_join via %s", destination) + except CodeMessageException: + raise + except RuntimeError as e: + logger.warn( + "Failed to make_join via %s: %s", + destination, e.message + ) + + raise RuntimeError("Failed to send to any server.") @defer.inlineCallbacks - def send_join(self, destination, pdu): - time_now = self._clock.time_msec() - _, content = yield self.transport_layer.send_join( - destination=destination, - room_id=pdu.room_id, - event_id=pdu.event_id, - content=pdu.get_pdu_json(time_now), - ) + def send_join(self, destinations, pdu): + for destination in destinations: + try: + time_now = self._clock.time_msec() + _, content = yield self.transport_layer.send_join( + destination=destination, + room_id=pdu.room_id, + event_id=pdu.event_id, + content=pdu.get_pdu_json(time_now), + ) - logger.debug("Got content: %s", content) + logger.debug("Got content: %s", content) - state = [ - self.event_from_pdu_json(p, outlier=True) - for p in content.get("state", []) - ] + state = [ + self.event_from_pdu_json(p, outlier=True) + for p in content.get("state", []) + ] - auth_chain = [ - self.event_from_pdu_json(p, outlier=True) - for p in content.get("auth_chain", []) - ] + auth_chain = [ + self.event_from_pdu_json(p, outlier=True) + for p in content.get("auth_chain", []) + ] - signed_state = yield self._check_sigs_and_hash_and_fetch( - destination, state, outlier=True - ) + signed_state = yield self._check_sigs_and_hash_and_fetch( + destination, state, outlier=True + ) - signed_auth = yield self._check_sigs_and_hash_and_fetch( - destination, auth_chain, outlier=True - ) + signed_auth = yield self._check_sigs_and_hash_and_fetch( + destination, auth_chain, outlier=True + ) - auth_chain.sort(key=lambda e: e.depth) + auth_chain.sort(key=lambda e: e.depth) - defer.returnValue({ - "state": signed_state, - "auth_chain": signed_auth, - }) + defer.returnValue({ + "state": signed_state, + "auth_chain": signed_auth, + }) + except CodeMessageException: + raise + except RuntimeError as e: + logger.warn( + "Failed to send_join via %s: %s", + destination, e.message + ) + + raise RuntimeError("Failed to send to any server.") @defer.inlineCallbacks def send_invite(self, destination, room_id, event_id, pdu): diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index 9d4f2c09a2..f38aeba7cc 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -19,6 +19,7 @@ from twisted.internet import defer from .persistence import TransactionActions from .units import Transaction +from synapse.api.errors import HttpResponseException from synapse.util.logutils import log_function from synapse.util.logcontext import PreserveLoggingContext @@ -238,9 +239,14 @@ class TransactionQueue(object): del p["age_ts"] return data - code, response = yield self.transport_layer.send_transaction( - transaction, json_data_cb - ) + try: + response = yield self.transport_layer.send_transaction( + transaction, json_data_cb + ) + code = 200 + except HttpResponseException as e: + code = e.code + response = e.response logger.info("TX [%s] got %d response", destination, code) @@ -274,8 +280,7 @@ class TransactionQueue(object): pass logger.debug("TX [%s] Yielded to callbacks", destination) - - except Exception as e: + except RuntimeError as e: # We capture this here as there as nothing actually listens # for this finishing functions deferred. logger.warn( @@ -283,6 +288,14 @@ class TransactionQueue(object): destination, e, ) + except Exception as e: + # We capture this here as there as nothing actually listens + # for this finishing functions deferred. + logger.exception( + "TX [%s] Problem in _attempt_transaction: %s", + destination, + e, + ) self.set_retrying(destination, retry_interval) diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index 4cb1dea2de..8b137e7128 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -19,7 +19,6 @@ from synapse.api.urls import FEDERATION_PREFIX as PREFIX from synapse.util.logutils import log_function import logging -import json logger = logging.getLogger(__name__) @@ -129,7 +128,7 @@ class TransportLayerClient(object): # generated by the json_data_callback. json_data = transaction.get_dict() - code, response = yield self.client.put_json( + response = yield self.client.put_json( transaction.destination, path=PREFIX + "/send/%s/" % transaction.transaction_id, data=json_data, @@ -137,95 +136,86 @@ class TransportLayerClient(object): ) logger.debug( - "send_data dest=%s, txid=%s, got response: %d", - transaction.destination, transaction.transaction_id, code + "send_data dest=%s, txid=%s, got response: 200", + transaction.destination, transaction.transaction_id, ) - defer.returnValue((code, response)) + defer.returnValue(response) @defer.inlineCallbacks @log_function def make_query(self, destination, query_type, args, retry_on_dns_fail): path = PREFIX + "/query/%s" % query_type - response = yield self.client.get_json( + content = yield self.client.get_json( destination=destination, path=path, args=args, retry_on_dns_fail=retry_on_dns_fail, ) - defer.returnValue(response) + defer.returnValue(content) @defer.inlineCallbacks @log_function def make_join(self, destination, room_id, user_id, retry_on_dns_fail=True): path = PREFIX + "/make_join/%s/%s" % (room_id, user_id) - response = yield self.client.get_json( + content = yield self.client.get_json( destination=destination, path=path, retry_on_dns_fail=retry_on_dns_fail, ) - defer.returnValue(response) + defer.returnValue(content) @defer.inlineCallbacks @log_function def send_join(self, destination, room_id, event_id, content): path = PREFIX + "/send_join/%s/%s" % (room_id, event_id) - code, content = yield self.client.put_json( + response = yield self.client.put_json( destination=destination, path=path, data=content, ) - if not 200 <= code < 300: - raise RuntimeError("Got %d from send_join", code) - - defer.returnValue(json.loads(content)) - - @defer.inlineCallbacks - @log_function - def send_invite(self, destination, room_id, event_id, content): - path = PREFIX + "/invite/%s/%s" % (room_id, event_id) - - code, content = yield self.client.put_json( - destination=destination, - path=path, - data=content, - ) - - if not 200 <= code < 300: - raise RuntimeError("Got %d from send_invite", code) - - defer.returnValue(json.loads(content)) - - @defer.inlineCallbacks - @log_function - def get_event_auth(self, destination, room_id, event_id): - path = PREFIX + "/event_auth/%s/%s" % (room_id, event_id) - - response = yield self.client.get_json( - destination=destination, - path=path, - ) - defer.returnValue(response) @defer.inlineCallbacks @log_function - def send_query_auth(self, destination, room_id, event_id, content): - path = PREFIX + "/query_auth/%s/%s" % (room_id, event_id) + def send_invite(self, destination, room_id, event_id, content): + path = PREFIX + "/invite/%s/%s" % (room_id, event_id) - code, content = yield self.client.post_json( + response = yield self.client.put_json( destination=destination, path=path, data=content, ) - if not 200 <= code < 300: - raise RuntimeError("Got %d from send_invite", code) + defer.returnValue(response) - defer.returnValue(json.loads(content)) + @defer.inlineCallbacks + @log_function + def get_event_auth(self, destination, room_id, event_id): + path = PREFIX + "/event_auth/%s/%s" % (room_id, event_id) + + content = yield self.client.get_json( + destination=destination, + path=path, + ) + + defer.returnValue(content) + + @defer.inlineCallbacks + @log_function + def send_query_auth(self, destination, room_id, event_id, content): + path = PREFIX + "/query_auth/%s/%s" % (room_id, event_id) + + content = yield self.client.post_json( + destination=destination, + path=path, + data=content, + ) + + defer.returnValue(content) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 0876589e31..a968a87360 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -288,7 +288,7 @@ class FederationHandler(BaseHandler): logger.debug("Joining %s to %s", joinee, room_id) pdu = yield self.replication_layer.make_join( - target_host, + [target_host], room_id, joinee ) @@ -331,7 +331,7 @@ class FederationHandler(BaseHandler): new_event = builder.build() ret = yield self.replication_layer.send_join( - target_host, + [target_host], new_event ) diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index c7bf1b47b8..8559d06b7f 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -27,7 +27,9 @@ from synapse.util.logcontext import PreserveLoggingContext from syutil.jsonutil import encode_canonical_json -from synapse.api.errors import CodeMessageException, SynapseError, Codes +from synapse.api.errors import ( + SynapseError, Codes, HttpResponseException, +) from syutil.crypto.jsonsign import sign_json @@ -163,13 +165,12 @@ class MatrixFederationHttpClient(object): ) if 200 <= response.code < 300: - # We need to update the transactions table to say it was sent? pass else: # :'( # Update transactions table? - raise CodeMessageException( - response.code, response.phrase + raise HttpResponseException( + response.code, response.phrase, response ) defer.returnValue(response) @@ -238,11 +239,20 @@ class MatrixFederationHttpClient(object): headers_dict={"Content-Type": ["application/json"]}, ) + if 200 <= response.code < 300: + # We need to update the transactions table to say it was sent? + c_type = response.headers.getRawHeaders("Content-Type") + + if "application/json" not in c_type: + raise RuntimeError( + "Content-Type not application/json" + ) + logger.debug("Getting resp body") body = yield readBody(response) logger.debug("Got resp body") - defer.returnValue((response.code, body)) + defer.returnValue(json.loads(body)) @defer.inlineCallbacks def post_json(self, destination, path, data={}): @@ -275,11 +285,20 @@ class MatrixFederationHttpClient(object): headers_dict={"Content-Type": ["application/json"]}, ) + if 200 <= response.code < 300: + # We need to update the transactions table to say it was sent? + c_type = response.headers.getRawHeaders("Content-Type") + + if "application/json" not in c_type: + raise RuntimeError( + "Content-Type not application/json" + ) + logger.debug("Getting resp body") body = yield readBody(response) logger.debug("Got resp body") - defer.returnValue((response.code, body)) + defer.returnValue(json.loads(body)) @defer.inlineCallbacks def get_json(self, destination, path, args={}, retry_on_dns_fail=True): @@ -321,7 +340,18 @@ class MatrixFederationHttpClient(object): retry_on_dns_fail=retry_on_dns_fail ) + if 200 <= response.code < 300: + # We need to update the transactions table to say it was sent? + c_type = response.headers.getRawHeaders("Content-Type") + + if "application/json" not in c_type: + raise RuntimeError( + "Content-Type not application/json" + ) + + logger.debug("Getting resp body") body = yield readBody(response) + logger.debug("Got resp body") defer.returnValue(json.loads(body)) From 6de799422db434bab4c687cbb465cfb730601d86 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 4 Feb 2015 17:39:38 +0000 Subject: [PATCH 28/33] Mention new pydenticon dep. --- CHANGES.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGES.rst b/CHANGES.rst index 297ae914fd..922fa5b035 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,8 @@ +Changes in develop +================== + + * pydenticon support -- adds dep on pydenticon + Changes in synapse 0.6.1 (2015-01-07) ===================================== From f292ad4b2bc8fbd3d86f26236714cff53c47e9c2 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 4 Feb 2015 18:09:18 +0000 Subject: [PATCH 29/33] Add script to check and auth chain and current state of a room --- scripts/check_auth.py | 65 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 65 insertions(+) create mode 100644 scripts/check_auth.py diff --git a/scripts/check_auth.py b/scripts/check_auth.py new file mode 100644 index 0000000000..341f00e719 --- /dev/null +++ b/scripts/check_auth.py @@ -0,0 +1,65 @@ +from synapse.events import FrozenEvent +from synapse.api.auth import Auth + +from mock import Mock + +import argparse +import itertools +import json +import sys + + + +def check_auth(auth, auth_chain, events): + auth_chain.sort(key=lambda e: e.depth) + + auth_map = { + e.event_id: e + for e in auth_chain + } + + create_events = {} + for e in auth_chain: + if e.type == "m.room.create": + create_events[e.room_id] = e + + for e in itertools.chain(auth_chain, events): + auth_events_list = [auth_map[i] for i, _ in e.auth_events] + + auth_events = { + (e.type, e.state_key): e + for e in auth_events_list + } + + auth_events[("m.room.create", "")] = create_events[e.room_id] + + try: + auth.check(e, auth_events=auth_events) + except Exception as ex: + print "Failed:", e.event_id, e.type, e.state_key + print ex + print json.dumps(e.get_dict(), sort_keys=True, indent=4) + # raise + print "Success:", e.event_id, e.type, e.state_key + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + + parser.add_argument( + 'json', + nargs='?', + type=argparse.FileType('r'), + default=sys.stdin, + ) + + args = parser.parse_args() + + js = json.load(args.json) + + + auth = Auth(Mock()) + check_auth( + auth, + [FrozenEvent(d) for d in js["auth_chain"]], + [FrozenEvent(d) for d in js["pdus"]], + ) From 6a7e168009b6631fb7deb6bac5351085e993e620 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 5 Feb 2015 11:25:20 +0000 Subject: [PATCH 30/33] Print out the auth events on failure --- scripts/check_auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/check_auth.py b/scripts/check_auth.py index 341f00e719..b889ac7fa7 100644 --- a/scripts/check_auth.py +++ b/scripts/check_auth.py @@ -9,7 +9,6 @@ import json import sys - def check_auth(auth, auth_chain, events): auth_chain.sort(key=lambda e: e.depth) @@ -37,6 +36,7 @@ def check_auth(auth, auth_chain, events): auth.check(e, auth_events=auth_events) except Exception as ex: print "Failed:", e.event_id, e.type, e.state_key + print "Auth_events:", auth_events print ex print json.dumps(e.get_dict(), sort_keys=True, indent=4) # raise From 26a041541baed887dc069c3667a86ddef81802bc Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 5 Feb 2015 13:17:05 +0000 Subject: [PATCH 31/33] SYN-202: Log as WARN the 404 'Presence information not visible' errors instead of as ERROR since they were spamming the logs --- synapse/handlers/message.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 6fbd2af4ab..3f51f38f18 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -16,7 +16,7 @@ from twisted.internet import defer from synapse.api.constants import EventTypes, Membership -from synapse.api.errors import RoomError +from synapse.api.errors import RoomError, SynapseError from synapse.streams.config import PaginationConfig from synapse.events.utils import serialize_event from synapse.events.validator import EventValidator @@ -372,10 +372,17 @@ class MessageHandler(BaseHandler): as_event=True, ) presence.append(member_presence) - except Exception: - logger.exception( - "Failed to get member presence of %r", m.user_id - ) + except SynapseError as e: + if e.code == 404: + # FIXME: We are doing this as a warn since this gets hit a + # lot and spams the logs. Why is this happening? + logger.warn( + "Failed to get member presence of %r", m.user_id + ) + else: + logger.exception( + "Failed to get member presence of %r", m.user_id + ) time_now = self.clock.time_msec() From e1515c3e91f2117adc3976b5e606728560ce9e96 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 5 Feb 2015 13:43:28 +0000 Subject: [PATCH 32/33] Pass through list of room hosts from room alias query to federation so that it can retry against different room hosts --- synapse/federation/federation_client.py | 5 ++++- synapse/handlers/federation.py | 20 +++++++++++++------- synapse/handlers/room.py | 12 +++++------- 3 files changed, 22 insertions(+), 15 deletions(-) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index eb36ec040b..9923b3fc0d 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -264,7 +264,9 @@ class FederationClient(FederationBase): logger.debug("Got response to make_join: %s", pdu_dict) - defer.returnValue(self.event_from_pdu_json(pdu_dict)) + defer.returnValue( + (destination, self.event_from_pdu_json(pdu_dict)) + ) break except CodeMessageException: raise @@ -313,6 +315,7 @@ class FederationClient(FederationBase): defer.returnValue({ "state": signed_state, "auth_chain": signed_auth, + "origin": destination, }) except CodeMessageException: raise diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 04a4689483..aba266c2bc 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -273,7 +273,7 @@ class FederationHandler(BaseHandler): @log_function @defer.inlineCallbacks - def do_invite_join(self, target_host, room_id, joinee, content, snapshot): + def do_invite_join(self, target_hosts, room_id, joinee, content, snapshot): """ Attempts to join the `joinee` to the room `room_id` via the server `target_host`. @@ -287,8 +287,8 @@ class FederationHandler(BaseHandler): """ logger.debug("Joining %s to %s", joinee, room_id) - pdu = yield self.replication_layer.make_join( - [target_host], + origin, pdu = yield self.replication_layer.make_join( + target_hosts, room_id, joinee ) @@ -330,11 +330,17 @@ class FederationHandler(BaseHandler): new_event = builder.build() + # Try the host we successfully got a response to /make_join/ + # request first. + target_hosts.remove(origin) + target_hosts.insert(0, origin) + ret = yield self.replication_layer.send_join( - [target_host], + target_hosts, new_event ) + origin = ret["origin"] state = ret["state"] auth_chain = ret["auth_chain"] auth_chain.sort(key=lambda e: e.depth) @@ -371,7 +377,7 @@ class FederationHandler(BaseHandler): if e.event_id in auth_ids } yield self._handle_new_event( - target_host, e, auth_events=auth + origin, e, auth_events=auth ) except: logger.exception( @@ -391,7 +397,7 @@ class FederationHandler(BaseHandler): if e.event_id in auth_ids } yield self._handle_new_event( - target_host, e, auth_events=auth + origin, e, auth_events=auth ) except: logger.exception( @@ -406,7 +412,7 @@ class FederationHandler(BaseHandler): } yield self._handle_new_event( - target_host, + origin, new_event, state=state, current_state=state, diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 23821d321f..0369b907a5 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -389,8 +389,6 @@ class RoomMemberHandler(BaseHandler): if not hosts: raise SynapseError(404, "No known servers") - host = hosts[0] - # If event doesn't include a display name, add one. yield self.distributor.fire( "collect_presencelike_data", joinee, content @@ -407,12 +405,12 @@ class RoomMemberHandler(BaseHandler): }) event, context = yield self._create_new_client_event(builder) - yield self._do_join(event, context, room_host=host, do_auth=True) + yield self._do_join(event, context, room_hosts=hosts, do_auth=True) defer.returnValue({"room_id": room_id}) @defer.inlineCallbacks - def _do_join(self, event, context, room_host=None, do_auth=True): + def _do_join(self, event, context, room_hosts=None, do_auth=True): joinee = UserID.from_string(event.state_key) # room_id = RoomID.from_string(event.room_id, self.hs) room_id = event.room_id @@ -441,7 +439,7 @@ class RoomMemberHandler(BaseHandler): if is_host_in_room: should_do_dance = False - elif room_host: # TODO: Shouldn't this be remote_room_host? + elif room_hosts: # TODO: Shouldn't this be remote_room_host? should_do_dance = True else: # TODO(markjh): get prev_state from snapshot @@ -453,7 +451,7 @@ class RoomMemberHandler(BaseHandler): inviter = UserID.from_string(prev_state.user_id) should_do_dance = not self.hs.is_mine(inviter) - room_host = inviter.domain + room_hosts = [inviter.domain] else: # return the same error as join_room_alias does raise SynapseError(404, "No known servers") @@ -461,7 +459,7 @@ class RoomMemberHandler(BaseHandler): if should_do_dance: handler = self.hs.get_handlers().federation_handler yield handler.do_invite_join( - room_host, + room_hosts, room_id, event.user_id, event.get_dict()["content"], # FIXME To get a non-frozen dict From e9c85a4d5ab332021f93634182ad8ed93bd0091c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 5 Feb 2015 13:50:15 +0000 Subject: [PATCH 33/33] Connection errors in twisted aren't RuntimeErrors --- synapse/federation/federation_client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index 9923b3fc0d..70c9a6f46b 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -270,7 +270,7 @@ class FederationClient(FederationBase): break except CodeMessageException: raise - except RuntimeError as e: + except Exception as e: logger.warn( "Failed to make_join via %s: %s", destination, e.message @@ -319,7 +319,7 @@ class FederationClient(FederationBase): }) except CodeMessageException: raise - except RuntimeError as e: + except Exception as e: logger.warn( "Failed to send_join via %s: %s", destination, e.message