From 42368ea8db5a2e6e15679cfc17b3c90075ea66ab Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 18 May 2016 11:38:00 +0100 Subject: [PATCH 01/18] Add desc to get_presence_for_users --- synapse/storage/presence.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/storage/presence.py b/synapse/storage/presence.py index 07f5fae8dd..3fab57a7e8 100644 --- a/synapse/storage/presence.py +++ b/synapse/storage/presence.py @@ -149,6 +149,7 @@ class PresenceStore(SQLBaseStore): "status_msg", "currently_active", ), + desc="get_presence_for_users", ) for row in rows: From e837df6adb3a3b103b5fbc4f55e110af3a2fcc71 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Wed, 18 May 2016 11:53:25 +0100 Subject: [PATCH 02/18] increment badge count per missed convo, not per msg --- synapse/push/push_tools.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/push/push_tools.py b/synapse/push/push_tools.py index e71d01e77d..89a3b5e90a 100644 --- a/synapse/push/push_tools.py +++ b/synapse/push/push_tools.py @@ -38,7 +38,9 @@ def get_badge_count(store, user_id): r.room_id, user_id, last_unread_event_id ) ) - badge += notifs["notify_count"] + # return one badge count per conversation, as count per + # message is so noisy as to be almost useless + badge += 1 if notifs["notify_count"] else 0 defer.returnValue(badge) From 332d7e9b97ca8dabf2640c16709b5ac3a16559b7 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Thu, 19 May 2016 13:50:52 +0100 Subject: [PATCH 03/18] Allow clients to specify a server_name to avoid 'No known servers' Multiple server_names are supported via ?server_name=foo&server_name=bar --- synapse/rest/client/v1/room.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index cf478c6f79..644aa4e513 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -232,7 +232,10 @@ class JoinRoomAliasServlet(ClientV1RestServlet): if RoomID.is_valid(room_identifier): room_id = room_identifier - remote_room_hosts = None + try: + remote_room_hosts = request.args["server_name"] + except: + remote_room_hosts = None elif RoomAlias.is_valid(room_identifier): handler = self.handlers.room_member_handler room_alias = RoomAlias.from_string(room_identifier) From 149fa411e24a30b738094e40f02f18c2b9230cbe Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 20 May 2016 15:25:12 +0100 Subject: [PATCH 04/18] Only delete push actions after 30 days --- synapse/storage/event_push_actions.py | 42 +++++++++++++++++++++++---- synapse/storage/receipts.py | 2 +- 2 files changed, 38 insertions(+), 6 deletions(-) diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py index 9705db5c47..336c03c68a 100644 --- a/synapse/storage/event_push_actions.py +++ b/synapse/storage/event_push_actions.py @@ -22,6 +22,8 @@ import ujson as json logger = logging.getLogger(__name__) +KEEP_PUSH_ACTIONS_FOR_MS = 30 * 24 * 60 * 60 * 1000 + class EventPushActionsStore(SQLBaseStore): def _set_push_actions_for_event_and_users_txn(self, txn, event, tuples): @@ -224,16 +226,46 @@ class EventPushActionsStore(SQLBaseStore): (room_id, event_id) ) - def _remove_push_actions_before_txn(self, txn, room_id, user_id, - topological_ordering): + def _remove_old_push_actions_before_txn(self, txn, room_id, user_id, + topological_ordering): + """ + Purges old, stale push actions for a user and room before a given + topological_ordering + Args: + txn: The transcation + room_id: Room ID to delete from + user_id: user ID to delete for + topological_ordering: The lowest topological ordering which will + not be deleted. + + Returns: + + """ txn.call_after( self.get_unread_event_push_actions_by_room_for_user.invalidate_many, (room_id, user_id, ) ) + + threshold = self._clock.time_msec() - KEEP_PUSH_ACTIONS_FOR_MS + + # We need to join on the events table to get the received_ts for + # event_push_actions and sqlite won't let us use a join in a delete so + # we can't just delete where received_ts < x. Furthermore we can + # only identify event_push_actions by a tuple of room_id, event_id + # we we can't use a subquery. + # Instead, we look up the stream ordering for the last event in that + # room received before the threshold time and delete event_push_actions + # in the room with a stream_odering before that. txn.execute( - "DELETE FROM event_push_actions" - " WHERE room_id = ? AND user_id = ? AND topological_ordering < ?", - (room_id, user_id, topological_ordering,) + "DELETE FROM event_push_actions " + " WHERE user_id = ? AND room_id = ? AND " + " topological_ordering < ? AND stream_ordering < (" + " SELECT stream_ordering FROM events" + " WHERE room_id = ? AND received_ts < ?" + " ORDER BY stream_ordering DESC" + " LIMIT 1" + ")", + (user_id, room_id, topological_ordering, room_id, threshold) ) diff --git a/synapse/storage/receipts.py b/synapse/storage/receipts.py index fdcf28f3e1..f1774f0e44 100644 --- a/synapse/storage/receipts.py +++ b/synapse/storage/receipts.py @@ -297,7 +297,7 @@ class ReceiptsStore(SQLBaseStore): ) if receipt_type == "m.read" and topological_ordering: - self._remove_push_actions_before_txn( + self._remove_old_push_actions_before_txn( txn, room_id=room_id, user_id=user_id, From d4503e25ed01b6053bd5bb503f858a2ab934e350 Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 20 May 2016 17:56:10 +0100 Subject: [PATCH 05/18] Make deleting push actions more efficient There's no index on received_ts, so manually binary search using the stream_ordering index, and only update it once an hour. --- synapse/storage/__init__.py | 9 ++++ synapse/storage/_base.py | 1 - synapse/storage/event_push_actions.py | 71 ++++++++++++++++++++++----- 3 files changed, 68 insertions(+), 13 deletions(-) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index d970fde9e8..49feb77779 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -88,6 +88,7 @@ class DataStore(RoomMemberStore, RoomStore, def __init__(self, db_conn, hs): self.hs = hs + self._clock = hs.get_clock() self.database_engine = hs.database_engine self.client_ip_last_seen = Cache( @@ -173,6 +174,14 @@ class DataStore(RoomMemberStore, RoomStore, prefilled_cache=push_rules_prefill, ) + cur = db_conn.cursor() + self._find_stream_orderings_for_times_txn(cur) + cur.close() + + self.find_stream_orderings_looping_call = self._clock.looping_call( + self._find_stream_orderings_for_times, 60 * 60 * 1000 + ) + super(DataStore, self).__init__(hs) def take_presence_startup_info(self): diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index e0d7098692..56a0dd80f3 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -153,7 +153,6 @@ class SQLBaseStore(object): def __init__(self, hs): self.hs = hs self._db_pool = hs.get_db_pool() - self._clock = hs.get_clock() self._previous_txn_total_time = 0 self._current_txn_total_time = 0 diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py index 336c03c68a..4425d4bce5 100644 --- a/synapse/storage/event_push_actions.py +++ b/synapse/storage/event_push_actions.py @@ -22,10 +22,12 @@ import ujson as json logger = logging.getLogger(__name__) -KEEP_PUSH_ACTIONS_FOR_MS = 30 * 24 * 60 * 60 * 1000 - class EventPushActionsStore(SQLBaseStore): + def __init__(self, hs): + self.stream_ordering_month_ago = None + super(EventPushActionsStore, self).__init__(hs) + def _set_push_actions_for_event_and_users_txn(self, txn, event, tuples): """ Args: @@ -237,9 +239,6 @@ class EventPushActionsStore(SQLBaseStore): user_id: user ID to delete for topological_ordering: The lowest topological ordering which will not be deleted. - - Returns: - """ txn.call_after( self.get_unread_event_push_actions_by_room_for_user.invalidate_many, @@ -259,15 +258,63 @@ class EventPushActionsStore(SQLBaseStore): txn.execute( "DELETE FROM event_push_actions " " WHERE user_id = ? AND room_id = ? AND " - " topological_ordering < ? AND stream_ordering < (" - " SELECT stream_ordering FROM events" - " WHERE room_id = ? AND received_ts < ?" - " ORDER BY stream_ordering DESC" - " LIMIT 1" - ")", - (user_id, room_id, topological_ordering, room_id, threshold) + " topological_ordering < ? AND stream_ordering < ?" + (user_id, room_id, topological_ordering, self.stream_ordering_month_ago) ) + @defer.inlineCallbacks + def _find_stream_orderings_for_times(self): + yield self.runInteraction( + "_find_stream_orderings_for_times", + self._find_stream_orderings_for_times_txn + ) + + def _find_stream_orderings_for_times_txn(self, txn): + logger.info("Searching for stream ordering 1 month ago") + self.stream_ordering_month_ago = self._find_first_stream_ordering_after_ts_txn( + txn, self._clock.time_msec() - 30 * 24 * 60 * 60 * 1000 + ) + logger.info( + "Found stream ordering 1 month ago: it's %d", + self.stream_ordering_month_ago + ) + + def _find_first_stream_ordering_after_ts_txn(self, txn, ts): + """ + Find the stream_ordering of the first event that was received after + a given timestamp. This is relatively slow as there is no index on + received_ts but we can then use this to delete push actions before + this. + + received_ts must necessarily be in the same order as stream_ordering + and stream_ordering is indexed, so we manually binary search using + stream_ordering + """ + txn.execute("SELECT MAX(stream_ordering) FROM events") + max_stream_ordering = txn.fetchone()[0] + + range_start = 0 + range_end = max_stream_ordering + + sql = ( + "SELECT received_ts FROM events" + " WHERE stream_ordering > ?" + " ORDER BY stream_ordering" + " LIMIT 1" + ) + + while range_end - range_start > 1: + middle = int((range_end + range_start) / 2) + txn.execute(sql, (middle,)) + middle_ts = txn.fetchone()[0] + if ts > middle_ts: + range_start = middle + else: + range_end = middle + logger.info("done: picking %d from %d and %d", range_end, range_start, range_end) + + return range_end + def _action_has_highlight(actions): for action in actions: From 18d68bfee40078e4195adc43c93872d4a6facaf8 Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 20 May 2016 17:58:09 +0100 Subject: [PATCH 06/18] Handle empty events table --- synapse/storage/event_push_actions.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py index 4425d4bce5..d630a846c9 100644 --- a/synapse/storage/event_push_actions.py +++ b/synapse/storage/event_push_actions.py @@ -293,6 +293,9 @@ class EventPushActionsStore(SQLBaseStore): txn.execute("SELECT MAX(stream_ordering) FROM events") max_stream_ordering = txn.fetchone()[0] + if max_stream_ordering is None: + return 0 + range_start = 0 range_end = max_stream_ordering From ccffb0965de8a567ba56eeae5c16945921268802 Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 20 May 2016 17:59:10 +0100 Subject: [PATCH 07/18] Remove stale line --- synapse/storage/event_push_actions.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py index d630a846c9..cb38d5f525 100644 --- a/synapse/storage/event_push_actions.py +++ b/synapse/storage/event_push_actions.py @@ -245,8 +245,6 @@ class EventPushActionsStore(SQLBaseStore): (room_id, user_id, ) ) - threshold = self._clock.time_msec() - KEEP_PUSH_ACTIONS_FOR_MS - # We need to join on the events table to get the received_ts for # event_push_actions and sqlite won't let us use a join in a delete so # we can't just delete where received_ts < x. Furthermore we can From c2da3406fcadefa3efbd302ae22a21c7ffbac9ce Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 20 May 2016 18:03:31 +0100 Subject: [PATCH 08/18] Oops, missing comma --- synapse/storage/event_push_actions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py index cb38d5f525..88f9b4b642 100644 --- a/synapse/storage/event_push_actions.py +++ b/synapse/storage/event_push_actions.py @@ -256,7 +256,7 @@ class EventPushActionsStore(SQLBaseStore): txn.execute( "DELETE FROM event_push_actions " " WHERE user_id = ? AND room_id = ? AND " - " topological_ordering < ? AND stream_ordering < ?" + " topological_ordering < ? AND stream_ordering < ?", (user_id, room_id, topological_ordering, self.stream_ordering_month_ago) ) From 09804c98625187d289c0123908da2fc8eecec346 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 23 May 2016 16:29:38 +0100 Subject: [PATCH 09/18] Fix link to A-S spec --- docs/application_services.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/application_services.rst b/docs/application_services.rst index 7e87ac9ad6..fbc0c7e960 100644 --- a/docs/application_services.rst +++ b/docs/application_services.rst @@ -32,5 +32,4 @@ The format of the AS configuration file is as follows: See the spec_ for further details on how application services work. -.. _spec: https://github.com/matrix-org/matrix-doc/blob/master/specification/25_application_service_api.rst#application-service-api - +.. _spec: https://matrix.org/docs/spec/application_service/unstable.html From 31b5395ab6f1bec6a7198f2302ec43a891c95d03 Mon Sep 17 00:00:00 2001 From: David Baker Date: Mon, 23 May 2016 16:32:01 +0100 Subject: [PATCH 10/18] Remove debug logging --- synapse/storage/event_push_actions.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/storage/event_push_actions.py b/synapse/storage/event_push_actions.py index 88f9b4b642..4dae51a172 100644 --- a/synapse/storage/event_push_actions.py +++ b/synapse/storage/event_push_actions.py @@ -312,7 +312,6 @@ class EventPushActionsStore(SQLBaseStore): range_start = middle else: range_end = middle - logger.info("done: picking %d from %d and %d", range_end, range_start, range_end) return range_end From 6fe04ffef2fd44a3af61fe262266b25158c46db7 Mon Sep 17 00:00:00 2001 From: Negi Fazeli Date: Mon, 23 May 2016 11:14:11 +0200 Subject: [PATCH 11/18] Fix set profile error with Requester. Replace flush_user with delete access token due to function removal Add a new test case for if the user is already registered --- synapse/handlers/register.py | 9 +++++---- tests/handlers/test_register.py | 34 ++++++++++++++++++++++++--------- 2 files changed, 30 insertions(+), 13 deletions(-) diff --git a/synapse/handlers/register.py b/synapse/handlers/register.py index 5883b9111e..16f33f8371 100644 --- a/synapse/handlers/register.py +++ b/synapse/handlers/register.py @@ -16,7 +16,7 @@ """Contains functions for registering clients.""" from twisted.internet import defer -from synapse.types import UserID +from synapse.types import UserID, Requester from synapse.api.errors import ( AuthError, Codes, SynapseError, RegistrationError, InvalidCaptchaError ) @@ -360,7 +360,8 @@ class RegistrationHandler(BaseHandler): @defer.inlineCallbacks def get_or_create_user(self, localpart, displayname, duration_seconds): - """Creates a new user or returns an access token for an existing one + """Creates a new user if the user does not exist, + else revokes all previous access tokens and generates a new one. Args: localpart : The local part of the user ID to register. If None, @@ -399,14 +400,14 @@ class RegistrationHandler(BaseHandler): yield registered_user(self.distributor, user) else: - yield self.store.flush_user(user_id=user_id) + yield self.store.user_delete_access_tokens(user_id=user_id) yield self.store.add_access_token_to_user(user_id=user_id, token=token) if displayname is not None: logger.info("setting user display name: %s -> %s", user_id, displayname) profile_handler = self.hs.get_handlers().profile_handler yield profile_handler.set_displayname( - user, user, displayname + user, Requester(user, token, False), displayname ) defer.returnValue((user_id, token)) diff --git a/tests/handlers/test_register.py b/tests/handlers/test_register.py index 8b7be96bd9..9d5c653b45 100644 --- a/tests/handlers/test_register.py +++ b/tests/handlers/test_register.py @@ -17,6 +17,7 @@ from twisted.internet import defer from .. import unittest from synapse.handlers.register import RegistrationHandler +from synapse.types import UserID from tests.utils import setup_test_homeserver @@ -36,25 +37,21 @@ class RegistrationTestCase(unittest.TestCase): self.mock_distributor = Mock() self.mock_distributor.declare("registered_user") self.mock_captcha_client = Mock() - hs = yield setup_test_homeserver( + self.hs = yield setup_test_homeserver( handlers=None, http_client=None, expire_access_token=True) - hs.handlers = RegistrationHandlers(hs) - self.handler = hs.get_handlers().registration_handler - hs.get_handlers().profile_handler = Mock() + self.hs.handlers = RegistrationHandlers(self.hs) + self.handler = self.hs.get_handlers().registration_handler + self.hs.get_handlers().profile_handler = Mock() self.mock_handler = Mock(spec=[ "generate_short_term_login_token", ]) - hs.get_handlers().auth_handler = self.mock_handler + self.hs.get_handlers().auth_handler = self.mock_handler @defer.inlineCallbacks def test_user_is_created_and_logged_in_if_doesnt_exist(self): - """ - Returns: - The user doess not exist in this case so it will register and log it in - """ duration_ms = 200 local_part = "someone" display_name = "someone" @@ -65,3 +62,22 @@ class RegistrationTestCase(unittest.TestCase): local_part, display_name, duration_ms) self.assertEquals(result_user_id, user_id) self.assertEquals(result_token, 'secret') + + @defer.inlineCallbacks + def test_if_user_exists(self): + store = self.hs.get_datastore() + frank = UserID.from_string("@frank:test") + yield store.register( + user_id=frank.to_string(), + token="jkv;g498752-43gj['eamb!-5", + password_hash=None) + duration_ms = 200 + local_part = "frank" + display_name = "Frank" + user_id = "@frank:test" + mock_token = self.mock_handler.generate_short_term_login_token + mock_token.return_value = 'secret' + result_user_id, result_token = yield self.handler.get_or_create_user( + local_part, display_name, duration_ms) + self.assertEquals(result_user_id, user_id) + self.assertEquals(result_token, 'secret') From 989bdc9e569e6ca414369e90e7b88e2f1d99c753 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Mon, 23 May 2016 19:24:11 +0100 Subject: [PATCH 12/18] Tune email notifs to make them quieter: * After initial 10 minute window, only alert every 24h for room notifs * Reset room state after 6h of idleness * Synchronise throttles for messages sent in the same notif, so the 24 hourly notifs 'line up' * Fix the email subjects to say what triggered the notification * Order the rooms in reverse activity order in the email, so the 'reason' room should always come first --- synapse/push/emailpusher.py | 26 +++++++++++++------- synapse/push/mailer.py | 48 +++++++++++++++++++++++++++++-------- 2 files changed, 56 insertions(+), 18 deletions(-) diff --git a/synapse/push/emailpusher.py b/synapse/push/emailpusher.py index b4b728adc5..a72cba8306 100644 --- a/synapse/push/emailpusher.py +++ b/synapse/push/emailpusher.py @@ -32,12 +32,19 @@ DELAY_BEFORE_MAIL_MS = 10 * 60 * 1000 # Each room maintains its own throttle counter, but each new mail notification # sends the pending notifications for all rooms. THROTTLE_START_MS = 10 * 60 * 1000 -THROTTLE_MAX_MS = 24 * 60 * 60 * 1000 # (2 * 60 * 1000) * (2 ** 11) # ~3 days -THROTTLE_MULTIPLIER = 6 # 10 mins, 1 hour, 6 hours, 24 hours +THROTTLE_MAX_MS = 24 * 60 * 60 * 1000 # 24h +# THROTTLE_MULTIPLIER = 6 # 10 mins, 1 hour, 6 hours, 24 hours +THROTTLE_MULTIPLIER = 144 # 10 mins, 24 hours - i.e. jump straight to 1 day # If no event triggers a notification for this long after the previous, # the throttle is released. -THROTTLE_RESET_AFTER_MS = (2 * 60 * 1000) * (2 ** 11) # ~3 days +# 12 hours - a gap of 12 hours in conversation is surely enough to merit a new +# notification when things get going again... +THROTTLE_RESET_AFTER_MS = (12 * 60 * 60 * 1000) + +# does each email include all unread notifs, or just the ones which have happened +# since the last mail? +INCLUDE_ALL_UNREAD_NOTIFS = True class EmailPusher(object): @@ -126,8 +133,9 @@ class EmailPusher(object): up logging, measures and guards against multiple instances of it being run. """ + start = 0 if INCLUDE_ALL_UNREAD_NOTIFS else self.last_stream_ordering unprocessed = yield self.store.get_unread_push_actions_for_user_in_range( - self.user_id, self.last_stream_ordering, self.max_stream_ordering + self.user_id, start, self.max_stream_ordering ) soonest_due_at = None @@ -150,7 +158,6 @@ class EmailPusher(object): # we then consider all previously outstanding notifications # to be delivered. - # debugging: reason = { 'room_id': push_action['room_id'], 'now': self.clock.time_msec(), @@ -165,9 +172,12 @@ class EmailPusher(object): yield self.save_last_stream_ordering_and_success(max([ ea['stream_ordering'] for ea in unprocessed ])) - yield self.sent_notif_update_throttle( - push_action['room_id'], push_action - ) + + # we update the throttle on all the possible unprocessed push actions + for ea in unprocessed: + yield self.sent_notif_update_throttle( + ea['room_id'], ea + ) break else: if soonest_due_at is None or should_notify_at < soonest_due_at: diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index c2c2ca3fa7..8a9e9895ba 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -45,7 +45,10 @@ MESSAGE_FROM_PERSON_IN_ROOM = "You have a message on %(app)s from %(person)s " \ MESSAGE_FROM_PERSON = "You have a message on %(app)s from %(person)s..." MESSAGES_FROM_PERSON = "You have messages on %(app)s from %(person)s..." MESSAGES_IN_ROOM = "There are some messages on %(app)s for you in the %(room)s room..." -MESSAGES_IN_ROOMS = "Here are some messages on %(app)s you may have missed..." +MESSAGES_IN_ROOM_AND_OTHERS = \ + "You have messages on %(app)s in the %(room)s room and others..." +MESSAGES_FROM_PERSON_AND_OTHERS = \ + "You have messages on %(app)s from %(person)s and others..." INVITE_FROM_PERSON_TO_ROOM = "%(person)s has invited you to join the " \ "%(room)s room on %(app)s..." INVITE_FROM_PERSON = "%(person)s has invited you to chat on %(app)s..." @@ -128,9 +131,14 @@ class Mailer(object): state_by_room[room_id] = room_state # Run at most 3 of these at once: sync does 10 at a time but email - # notifs are much realtime than sync so we can afford to wait a bit. + # notifs are much less realtime than sync so we can afford to wait a bit. yield concurrently_execute(_fetch_room_state, rooms_in_order, 3) + # actually sort our so-called rooms_in_order list, most recent room first + rooms_in_order = rooms_in_order.sort( + key=lambda r: -notifs_by_room[r]['received_ts'] + ) + rooms = [] for r in rooms_in_order: @@ -139,12 +147,12 @@ class Mailer(object): ) rooms.append(roomvars) - summary_text = self.make_summary_text( - notifs_by_room, state_by_room, notif_events, user_id + reason['room_name'] = calculate_room_name( + state_by_room[reason['room_id']], user_id, fallback_to_members=True ) - reason['room_name'] = calculate_room_name( - state_by_room[reason['room_id']], user_id, fallback_to_members=False + summary_text = self.make_summary_text( + notifs_by_room, state_by_room, notif_events, user_id, reason ) template_vars = { @@ -296,7 +304,8 @@ class Mailer(object): return messagevars - def make_summary_text(self, notifs_by_room, state_by_room, notif_events, user_id): + def make_summary_text(self, notifs_by_room, state_by_room, + notif_events, user_id, reason): if len(notifs_by_room) == 1: # Only one room has new stuff room_id = notifs_by_room.keys()[0] @@ -371,9 +380,28 @@ class Mailer(object): } else: # Stuff's happened in multiple different rooms - return MESSAGES_IN_ROOMS % { - "app": self.app_name, - } + + # ...but we still refer to the 'reason' room which triggered the mail + if reason['room_name'] is not None: + return MESSAGES_IN_ROOM_AND_OTHERS % { + "room": reason['room_name'], + "app": self.app_name, + } + else: + # If the reason room doesn't have a name, say who the messages + # are from explicitly to avoid, "messages in the Bob room" + sender_ids = list(set([ + notif_events[n['event_id']].sender + for n in notifs_by_room[reason['room_id']] + ])) + + return MESSAGES_FROM_PERSON_AND_OTHERS % { + "person": descriptor_from_member_events([ + state_by_room[reason['room_id']][("m.room.member", s)] + for s in sender_ids + ]), + "app": self.app_name, + } def make_room_link(self, room_id): # need /beta for Universal Links to work on iOS From 88ea5ab2c37345c1266db1446afa6d6472b7c9fe Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Mon, 23 May 2016 19:33:45 +0100 Subject: [PATCH 13/18] consistency is the better part of valour --- synapse/push/mailer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index 8a9e9895ba..766888ca79 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -44,7 +44,7 @@ MESSAGE_FROM_PERSON_IN_ROOM = "You have a message on %(app)s from %(person)s " \ "in the %s room..." MESSAGE_FROM_PERSON = "You have a message on %(app)s from %(person)s..." MESSAGES_FROM_PERSON = "You have messages on %(app)s from %(person)s..." -MESSAGES_IN_ROOM = "There are some messages on %(app)s for you in the %(room)s room..." +MESSAGES_IN_ROOM = "You have messages on %(app)s in the %(room)s room..." MESSAGES_IN_ROOM_AND_OTHERS = \ "You have messages on %(app)s in the %(room)s room and others..." MESSAGES_FROM_PERSON_AND_OTHERS = \ From d4378825811eaee75acaa88ba6c9769af3159eb1 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Mon, 23 May 2016 22:54:32 +0100 Subject: [PATCH 14/18] fix debug text --- res/templates/mail.css | 5 +++++ res/templates/notif_mail.html | 4 ++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/res/templates/mail.css b/res/templates/mail.css index f2b5e84abc..5ab3e1b06d 100644 --- a/res/templates/mail.css +++ b/res/templates/mail.css @@ -145,6 +145,11 @@ pre, code { text-decoration: none; } +.debug { + font-size: 10px; + color: #888; +} + .footer { margin-top: 20px; text-align: center; diff --git a/res/templates/notif_mail.html b/res/templates/notif_mail.html index dc13398df1..1146ee133b 100644 --- a/res/templates/notif_mail.html +++ b/res/templates/notif_mail.html @@ -30,7 +30,7 @@ {% include 'room.html' with context %} {% endfor %} From cb8a321bdd35278fc1214d2dd2c1a8163ce25871 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Mon, 23 May 2016 22:54:56 +0100 Subject: [PATCH 15/18] fix NPE in room ordering --- synapse/push/mailer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index 766888ca79..d8a0c35c79 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -135,8 +135,8 @@ class Mailer(object): yield concurrently_execute(_fetch_room_state, rooms_in_order, 3) # actually sort our so-called rooms_in_order list, most recent room first - rooms_in_order = rooms_in_order.sort( - key=lambda r: -notifs_by_room[r]['received_ts'] + rooms_in_order.sort( + key=lambda r: -(notifs_by_room[r][-1]['received_ts'] or 0) ) rooms = [] From 680f1d9387f48ced9902f4e7d54758ab6a0aa9b0 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Mon, 23 May 2016 22:55:11 +0100 Subject: [PATCH 16/18] catch thinko in presentable names --- synapse/util/presentable_names.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/synapse/util/presentable_names.py b/synapse/util/presentable_names.py index 3efa8a8206..a6866f6117 100644 --- a/synapse/util/presentable_names.py +++ b/synapse/util/presentable_names.py @@ -14,6 +14,9 @@ # limitations under the License. import re +import logging + +logger = logging.getLogger(__name__) # intentionally looser than what aliases we allow to be registered since # other HSes may allow aliases that we would not @@ -105,13 +108,21 @@ def calculate_room_name(room_state, user_id, fallback_to_members=True): # or inbound invite, or outbound 3PID invite. if all_members[0].sender == user_id: if "m.room.third_party_invite" in room_state_bytype: - third_party_invites = room_state_bytype["m.room.third_party_invite"] + third_party_invites = ( + room_state_bytype["m.room.third_party_invite"].values() + ) + if len(third_party_invites) > 0: # technically third party invite events are not member # events, but they are close enough - return "Inviting %s" ( - descriptor_from_member_events(third_party_invites) - ) + + # FIXME: no they're not - they look nothing like a member; + # they have a great big encrypted thing as their name to + # prevent leaking the 3PID name... + # return "Inviting %s" % ( + # descriptor_from_member_events(third_party_invites) + # ) + return "Inviting email address" else: return ALL_ALONE else: From 95b86e6dada4135c2b30932809084510adb3a076 Mon Sep 17 00:00:00 2001 From: Matrix Date: Wed, 18 May 2016 01:07:44 +0100 Subject: [PATCH 17/18] tweak mail notifs --- res/templates/notif_mail.html | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/res/templates/notif_mail.html b/res/templates/notif_mail.html index 1146ee133b..8aee68b591 100644 --- a/res/templates/notif_mail.html +++ b/res/templates/notif_mail.html @@ -30,18 +30,20 @@ {% include 'room.html' with context %} {% endfor %} From b007ee46065ed25ed1ca248cf47c19ee7f4b56c2 Mon Sep 17 00:00:00 2001 From: David Baker Date: Tue, 24 May 2016 15:11:28 +0100 Subject: [PATCH 18/18] Check for presence of 'avatar_url' key --- synapse/push/mailer.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/push/mailer.py b/synapse/push/mailer.py index d8a0c35c79..3ae92d1574 100644 --- a/synapse/push/mailer.py +++ b/synapse/push/mailer.py @@ -259,7 +259,9 @@ class Mailer(object): sender_state_event = room_state[("m.room.member", event.sender)] sender_name = name_from_member_event(sender_state_event) - sender_avatar_url = sender_state_event.content["avatar_url"] + sender_avatar_url = None + if "avatar_url" in sender_state_event.content: + sender_avatar_url = sender_state_event.content["avatar_url"] # 'hash' for deterministically picking default images: use # sender_hash % the number of default images to choose from