From d528406cb875b78907c7f7dc9872c1d2c22dc46c Mon Sep 17 00:00:00 2001 From: Willem Mulder Date: Wed, 23 Jan 2019 18:41:59 +0100 Subject: [PATCH 01/18] Fix error message for optional dependencies Signed-off-by: Willem Mulder --- changelog.d/4450.bugfix | 2 ++ synapse/python_dependencies.py | 7 +++++-- 2 files changed, 7 insertions(+), 2 deletions(-) create mode 100644 changelog.d/4450.bugfix diff --git a/changelog.d/4450.bugfix b/changelog.d/4450.bugfix new file mode 100644 index 0000000000..b194e94c15 --- /dev/null +++ b/changelog.d/4450.bugfix @@ -0,0 +1,2 @@ +The dependency checker now correctly reports a version mismatch for optional +dependencies, instead of reporting the dependency missing. diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 756721e304..df3e94dee0 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -143,9 +143,12 @@ def check_requirements(for_feature=None, _get_distribution=get_distribution): for dependency in OPTS: try: _get_distribution(dependency) - except VersionConflict: + except VersionConflict as e: deps_needed.append(dependency) - errors.append("Needed %s but it was not installed" % (dependency,)) + errors.append( + "Needed optional %s, got %s==%s" + % (dependency, e.dist.project_name, e.dist.version) + ) except DistributionNotFound: # If it's not found, we don't care pass From 4588b0d64a954ed4fef1f9034dec9e8407867a5c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 8 Feb 2019 09:37:16 +0000 Subject: [PATCH 02/18] Update MSC1711_certificates_FAQ.md Fix incorrect heading level --- docs/MSC1711_certificates_FAQ.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/MSC1711_certificates_FAQ.md b/docs/MSC1711_certificates_FAQ.md index 0a781d00e3..c4a57d6ed1 100644 --- a/docs/MSC1711_certificates_FAQ.md +++ b/docs/MSC1711_certificates_FAQ.md @@ -125,7 +125,7 @@ doing one of the following: * Use Synapse's [ACME support](./ACME.md), and forward port 80 on the `server_name` domain to your Synapse instance. -### Option 2: run Synapse behind a reverse proxy +#### Option 2: run Synapse behind a reverse proxy If you have an existing reverse proxy set up with correct TLS certificates for your domain, you can simply route all traffic through the reverse proxy by From bb4fd8f927807bdd6efb08ee65ec01269e000417 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Wed, 13 Feb 2019 23:05:32 +1100 Subject: [PATCH 03/18] Run `black` on user directory code (#4635) --- changelog.d/4635.misc | 1 + synapse/handlers/user_directory.py | 94 +++++++++--------- synapse/storage/user_directory.py | 153 +++++++++++++---------------- 3 files changed, 117 insertions(+), 131 deletions(-) create mode 100644 changelog.d/4635.misc diff --git a/changelog.d/4635.misc b/changelog.d/4635.misc new file mode 100644 index 0000000000..0f45957b84 --- /dev/null +++ b/changelog.d/4635.misc @@ -0,0 +1 @@ +Run `black` to reformat user directory code. diff --git a/synapse/handlers/user_directory.py b/synapse/handlers/user_directory.py index 120815b09b..283c6c1b81 100644 --- a/synapse/handlers/user_directory.py +++ b/synapse/handlers/user_directory.py @@ -130,7 +130,7 @@ class UserDirectoryHandler(object): # Support users are for diagnostics and should not appear in the user directory. if not is_support: yield self.store.update_profile_in_user_dir( - user_id, profile.display_name, profile.avatar_url, None, + user_id, profile.display_name, profile.avatar_url, None ) @defer.inlineCallbacks @@ -166,8 +166,9 @@ class UserDirectoryHandler(object): self.pos = deltas[-1]["stream_id"] # Expose current event processing position to prometheus - synapse.metrics.event_processing_positions.labels( - "user_dir").set(self.pos) + synapse.metrics.event_processing_positions.labels("user_dir").set( + self.pos + ) yield self.store.update_user_directory_stream_pos(self.pos) @@ -191,21 +192,25 @@ class UserDirectoryHandler(object): logger.info("Handling room %d/%d", num_processed_rooms + 1, len(room_ids)) yield self._handle_initial_room(room_id) num_processed_rooms += 1 - yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.) + yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.0) logger.info("Processed all rooms.") if self.search_all_users: num_processed_users = 0 user_ids = yield self.store.get_all_local_users() - logger.info("Doing initial update of user directory. %d users", len(user_ids)) + logger.info( + "Doing initial update of user directory. %d users", len(user_ids) + ) for user_id in user_ids: # We add profiles for all users even if they don't match the # include pattern, just in case we want to change it in future - logger.info("Handling user %d/%d", num_processed_users + 1, len(user_ids)) + logger.info( + "Handling user %d/%d", num_processed_users + 1, len(user_ids) + ) yield self._handle_local_user(user_id) num_processed_users += 1 - yield self.clock.sleep(self.INITIAL_USER_SLEEP_MS / 1000.) + yield self.clock.sleep(self.INITIAL_USER_SLEEP_MS / 1000.0) logger.info("Processed all users") @@ -224,24 +229,24 @@ class UserDirectoryHandler(object): if not is_in_room: return - is_public = yield self.store.is_room_world_readable_or_publicly_joinable(room_id) + is_public = yield self.store.is_room_world_readable_or_publicly_joinable( + room_id + ) users_with_profile = yield self.state.get_current_user_in_room(room_id) user_ids = set(users_with_profile) unhandled_users = user_ids - self.initially_handled_users yield self.store.add_profiles_to_user_dir( - room_id, { - user_id: users_with_profile[user_id] for user_id in unhandled_users - } + room_id, + {user_id: users_with_profile[user_id] for user_id in unhandled_users}, ) self.initially_handled_users |= unhandled_users if is_public: yield self.store.add_users_to_public_room( - room_id, - user_ids=user_ids - self.initially_handled_users_in_public + room_id, user_ids=user_ids - self.initially_handled_users_in_public ) self.initially_handled_users_in_public |= user_ids @@ -253,7 +258,7 @@ class UserDirectoryHandler(object): count = 0 for user_id in user_ids: if count % self.INITIAL_ROOM_SLEEP_COUNT == 0: - yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.) + yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.0) if not self.is_mine_id(user_id): count += 1 @@ -268,7 +273,7 @@ class UserDirectoryHandler(object): continue if count % self.INITIAL_ROOM_SLEEP_COUNT == 0: - yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.) + yield self.clock.sleep(self.INITIAL_ROOM_SLEEP_MS / 1000.0) count += 1 user_set = (user_id, other_user_id) @@ -290,25 +295,23 @@ class UserDirectoryHandler(object): if len(to_insert) > self.INITIAL_ROOM_BATCH_SIZE: yield self.store.add_users_who_share_room( - room_id, not is_public, to_insert, + room_id, not is_public, to_insert ) to_insert.clear() if len(to_update) > self.INITIAL_ROOM_BATCH_SIZE: yield self.store.update_users_who_share_room( - room_id, not is_public, to_update, + room_id, not is_public, to_update ) to_update.clear() if to_insert: - yield self.store.add_users_who_share_room( - room_id, not is_public, to_insert, - ) + yield self.store.add_users_who_share_room(room_id, not is_public, to_insert) to_insert.clear() if to_update: yield self.store.update_users_who_share_room( - room_id, not is_public, to_update, + room_id, not is_public, to_update ) to_update.clear() @@ -329,11 +332,12 @@ class UserDirectoryHandler(object): # may have become public or not and add/remove the users in said room if typ in (EventTypes.RoomHistoryVisibility, EventTypes.JoinRules): yield self._handle_room_publicity_change( - room_id, prev_event_id, event_id, typ, + room_id, prev_event_id, event_id, typ ) elif typ == EventTypes.Member: change = yield self._get_key_change( - prev_event_id, event_id, + prev_event_id, + event_id, key_name="membership", public_value=Membership.JOIN, ) @@ -342,14 +346,16 @@ class UserDirectoryHandler(object): # Need to check if the server left the room entirely, if so # we might need to remove all the users in that room is_in_room = yield self.store.is_host_joined( - room_id, self.server_name, + room_id, self.server_name ) if not is_in_room: logger.info("Server left room: %r", room_id) # Fetch all the users that we marked as being in user # directory due to being in the room and then check if # need to remove those users or not - user_ids = yield self.store.get_users_in_dir_due_to_room(room_id) + user_ids = yield self.store.get_users_in_dir_due_to_room( + room_id + ) for user_id in user_ids: yield self._handle_remove_user(room_id, user_id) return @@ -361,7 +367,7 @@ class UserDirectoryHandler(object): if change is None: # Handle any profile changes yield self._handle_profile_change( - state_key, room_id, prev_event_id, event_id, + state_key, room_id, prev_event_id, event_id ) continue @@ -393,13 +399,15 @@ class UserDirectoryHandler(object): if typ == EventTypes.RoomHistoryVisibility: change = yield self._get_key_change( - prev_event_id, event_id, + prev_event_id, + event_id, key_name="history_visibility", public_value="world_readable", ) elif typ == EventTypes.JoinRules: change = yield self._get_key_change( - prev_event_id, event_id, + prev_event_id, + event_id, key_name="join_rule", public_value=JoinRules.PUBLIC, ) @@ -524,7 +532,7 @@ class UserDirectoryHandler(object): ) if self.is_mine_id(other_user_id) and not is_appservice: shared_is_private = yield self.store.get_if_users_share_a_room( - other_user_id, user_id, + other_user_id, user_id ) if shared_is_private is True: # We've already marked in the database they share a private room @@ -539,13 +547,11 @@ class UserDirectoryHandler(object): to_insert.add((other_user_id, user_id)) if to_insert: - yield self.store.add_users_who_share_room( - room_id, not is_public, to_insert, - ) + yield self.store.add_users_who_share_room(room_id, not is_public, to_insert) if to_update: yield self.store.update_users_who_share_room( - room_id, not is_public, to_update, + room_id, not is_public, to_update ) @defer.inlineCallbacks @@ -564,15 +570,15 @@ class UserDirectoryHandler(object): row = yield self.store.get_user_in_public_room(user_id) update_user_in_public = row and row["room_id"] == room_id - if (update_user_in_public or update_user_dir): + if update_user_in_public or update_user_dir: # XXX: Make this faster? rooms = yield self.store.get_rooms_for_user(user_id) for j_room_id in rooms: - if (not update_user_in_public and not update_user_dir): + if not update_user_in_public and not update_user_dir: break is_in_room = yield self.store.is_host_joined( - j_room_id, self.server_name, + j_room_id, self.server_name ) if not is_in_room: @@ -600,19 +606,19 @@ class UserDirectoryHandler(object): # Get a list of user tuples that were in the DB due to this room and # users (this includes tuples where the other user matches `user_id`) user_tuples = yield self.store.get_users_in_share_dir_with_room_id( - user_id, room_id, + user_id, room_id ) for user_id, other_user_id in user_tuples: # For each user tuple get a list of rooms that they still share, # trying to find a private room, and update the entry in the DB - rooms = yield self.store.get_rooms_in_common_for_users(user_id, other_user_id) + rooms = yield self.store.get_rooms_in_common_for_users( + user_id, other_user_id + ) # If they dont share a room anymore, remove the mapping if not rooms: - yield self.store.remove_user_who_share_room( - user_id, other_user_id, - ) + yield self.store.remove_user_who_share_room(user_id, other_user_id) continue found_public_share = None @@ -626,13 +632,13 @@ class UserDirectoryHandler(object): else: found_public_share = None yield self.store.update_users_who_share_room( - room_id, not is_public, [(user_id, other_user_id)], + room_id, not is_public, [(user_id, other_user_id)] ) break if found_public_share: yield self.store.update_users_who_share_room( - room_id, not is_public, [(user_id, other_user_id)], + room_id, not is_public, [(user_id, other_user_id)] ) @defer.inlineCallbacks @@ -660,7 +666,7 @@ class UserDirectoryHandler(object): if prev_name != new_name or prev_avatar != new_avatar: yield self.store.update_profile_in_user_dir( - user_id, new_name, new_avatar, room_id, + user_id, new_name, new_avatar, room_id ) @defer.inlineCallbacks diff --git a/synapse/storage/user_directory.py b/synapse/storage/user_directory.py index e8b574ee5e..fea866c043 100644 --- a/synapse/storage/user_directory.py +++ b/synapse/storage/user_directory.py @@ -44,7 +44,7 @@ class UserDirectoryStore(SQLBaseStore): ) current_state_ids = yield self.get_filtered_current_state_ids( - room_id, StateFilter.from_types(types_to_filter), + room_id, StateFilter.from_types(types_to_filter) ) join_rules_id = current_state_ids.get((EventTypes.JoinRules, "")) @@ -74,14 +74,8 @@ class UserDirectoryStore(SQLBaseStore): """ yield self._simple_insert_many( table="users_in_public_rooms", - values=[ - { - "user_id": user_id, - "room_id": room_id, - } - for user_id in user_ids - ], - desc="add_users_to_public_room" + values=[{"user_id": user_id, "room_id": room_id} for user_id in user_ids], + desc="add_users_to_public_room", ) for user_id in user_ids: self.get_user_in_public_room.invalidate((user_id,)) @@ -107,7 +101,9 @@ class UserDirectoryStore(SQLBaseStore): """ args = ( ( - user_id, get_localpart_from_id(user_id), get_domain_from_id(user_id), + user_id, + get_localpart_from_id(user_id), + get_domain_from_id(user_id), profile.display_name, ) for user_id, profile in iteritems(users_with_profile) @@ -120,7 +116,7 @@ class UserDirectoryStore(SQLBaseStore): args = ( ( user_id, - "%s %s" % (user_id, p.display_name,) if p.display_name else user_id + "%s %s" % (user_id, p.display_name) if p.display_name else user_id, ) for user_id, p in iteritems(users_with_profile) ) @@ -141,12 +137,10 @@ class UserDirectoryStore(SQLBaseStore): "avatar_url": profile.avatar_url, } for user_id, profile in iteritems(users_with_profile) - ] + ], ) for user_id in users_with_profile: - txn.call_after( - self.get_user_in_directory.invalidate, (user_id,) - ) + txn.call_after(self.get_user_in_directory.invalidate, (user_id,)) return self.runInteraction( "add_profiles_to_user_dir", _add_profiles_to_user_dir_txn @@ -188,9 +182,11 @@ class UserDirectoryStore(SQLBaseStore): txn.execute( sql, ( - user_id, get_localpart_from_id(user_id), - get_domain_from_id(user_id), display_name, - ) + user_id, + get_localpart_from_id(user_id), + get_domain_from_id(user_id), + display_name, + ), ) else: # TODO: Remove this code after we've bumped the minimum version @@ -208,9 +204,11 @@ class UserDirectoryStore(SQLBaseStore): txn.execute( sql, ( - user_id, get_localpart_from_id(user_id), - get_domain_from_id(user_id), display_name, - ) + user_id, + get_localpart_from_id(user_id), + get_domain_from_id(user_id), + display_name, + ), ) elif new_entry is False: sql = """ @@ -225,15 +223,16 @@ class UserDirectoryStore(SQLBaseStore): ( get_localpart_from_id(user_id), get_domain_from_id(user_id), - display_name, user_id, - ) + display_name, + user_id, + ), ) else: raise RuntimeError( "upsert returned None when 'can_native_upsert' is False" ) elif isinstance(self.database_engine, Sqlite3Engine): - value = "%s %s" % (user_id, display_name,) if display_name else user_id + value = "%s %s" % (user_id, display_name) if display_name else user_id self._simple_upsert_txn( txn, table="user_directory_search", @@ -264,29 +263,18 @@ class UserDirectoryStore(SQLBaseStore): def remove_from_user_dir(self, user_id): def _remove_from_user_dir_txn(txn): self._simple_delete_txn( - txn, - table="user_directory", - keyvalues={"user_id": user_id}, + txn, table="user_directory", keyvalues={"user_id": user_id} ) self._simple_delete_txn( - txn, - table="user_directory_search", - keyvalues={"user_id": user_id}, + txn, table="user_directory_search", keyvalues={"user_id": user_id} ) self._simple_delete_txn( - txn, - table="users_in_public_rooms", - keyvalues={"user_id": user_id}, + txn, table="users_in_public_rooms", keyvalues={"user_id": user_id} ) - txn.call_after( - self.get_user_in_directory.invalidate, (user_id,) - ) - txn.call_after( - self.get_user_in_public_room.invalidate, (user_id,) - ) - return self.runInteraction( - "remove_from_user_dir", _remove_from_user_dir_txn, - ) + txn.call_after(self.get_user_in_directory.invalidate, (user_id,)) + txn.call_after(self.get_user_in_public_room.invalidate, (user_id,)) + + return self.runInteraction("remove_from_user_dir", _remove_from_user_dir_txn) @defer.inlineCallbacks def remove_from_user_in_public_room(self, user_id): @@ -371,6 +359,7 @@ class UserDirectoryStore(SQLBaseStore): share_private (bool): Is the room private user_id_tuples([(str, str)]): iterable of 2-tuple of user IDs. """ + def _add_users_who_share_room_txn(txn): self._simple_insert_many_txn( txn, @@ -387,13 +376,12 @@ class UserDirectoryStore(SQLBaseStore): ) for user_id, other_user_id in user_id_tuples: txn.call_after( - self.get_users_who_share_room_from_dir.invalidate, - (user_id,), + self.get_users_who_share_room_from_dir.invalidate, (user_id,) ) txn.call_after( - self.get_if_users_share_a_room.invalidate, - (user_id, other_user_id), + self.get_if_users_share_a_room.invalidate, (user_id, other_user_id) ) + return self.runInteraction( "add_users_who_share_room", _add_users_who_share_room_txn ) @@ -407,6 +395,7 @@ class UserDirectoryStore(SQLBaseStore): share_private (bool): Is the room private user_id_tuples([(str, str)]): iterable of 2-tuple of user IDs. """ + def _update_users_who_share_room_txn(txn): sql = """ UPDATE users_who_share_rooms @@ -414,21 +403,16 @@ class UserDirectoryStore(SQLBaseStore): WHERE user_id = ? AND other_user_id = ? """ txn.executemany( - sql, - ( - (room_id, share_private, uid, oid) - for uid, oid in user_id_sets - ) + sql, ((room_id, share_private, uid, oid) for uid, oid in user_id_sets) ) for user_id, other_user_id in user_id_sets: txn.call_after( - self.get_users_who_share_room_from_dir.invalidate, - (user_id,), + self.get_users_who_share_room_from_dir.invalidate, (user_id,) ) txn.call_after( - self.get_if_users_share_a_room.invalidate, - (user_id, other_user_id), + self.get_if_users_share_a_room.invalidate, (user_id, other_user_id) ) + return self.runInteraction( "update_users_who_share_room", _update_users_who_share_room_txn ) @@ -442,22 +426,18 @@ class UserDirectoryStore(SQLBaseStore): share_private (bool): Is the room private user_id_tuples([(str, str)]): iterable of 2-tuple of user IDs. """ + def _remove_user_who_share_room_txn(txn): self._simple_delete_txn( txn, table="users_who_share_rooms", - keyvalues={ - "user_id": user_id, - "other_user_id": other_user_id, - }, + keyvalues={"user_id": user_id, "other_user_id": other_user_id}, ) txn.call_after( - self.get_users_who_share_room_from_dir.invalidate, - (user_id,), + self.get_users_who_share_room_from_dir.invalidate, (user_id,) ) txn.call_after( - self.get_if_users_share_a_room.invalidate, - (user_id, other_user_id), + self.get_if_users_share_a_room.invalidate, (user_id, other_user_id) ) return self.runInteraction( @@ -478,10 +458,7 @@ class UserDirectoryStore(SQLBaseStore): """ return self._simple_select_one_onecol( table="users_who_share_rooms", - keyvalues={ - "user_id": user_id, - "other_user_id": other_user_id, - }, + keyvalues={"user_id": user_id, "other_user_id": other_user_id}, retcol="share_private", allow_none=True, desc="get_if_users_share_a_room", @@ -499,17 +476,12 @@ class UserDirectoryStore(SQLBaseStore): """ rows = yield self._simple_select_list( table="users_who_share_rooms", - keyvalues={ - "user_id": user_id, - }, - retcols=("other_user_id", "share_private",), + keyvalues={"user_id": user_id}, + retcols=("other_user_id", "share_private"), desc="get_users_who_share_room_with_user", ) - defer.returnValue({ - row["other_user_id"]: row["share_private"] - for row in rows - }) + defer.returnValue({row["other_user_id"]: row["share_private"] for row in rows}) def get_users_in_share_dir_with_room_id(self, user_id, room_id): """Get all user tuples that are in the users_who_share_rooms due to the @@ -556,6 +528,7 @@ class UserDirectoryStore(SQLBaseStore): def delete_all_from_user_dir(self): """Delete the entire user directory """ + def _delete_all_from_user_dir_txn(txn): txn.execute("DELETE FROM user_directory") txn.execute("DELETE FROM user_directory_search") @@ -565,6 +538,7 @@ class UserDirectoryStore(SQLBaseStore): txn.call_after(self.get_user_in_public_room.invalidate_all) txn.call_after(self.get_users_who_share_room_from_dir.invalidate_all) txn.call_after(self.get_if_users_share_a_room.invalidate_all) + return self.runInteraction( "delete_all_from_user_dir", _delete_all_from_user_dir_txn ) @@ -574,7 +548,7 @@ class UserDirectoryStore(SQLBaseStore): return self._simple_select_one( table="user_directory", keyvalues={"user_id": user_id}, - retcols=("room_id", "display_name", "avatar_url",), + retcols=("room_id", "display_name", "avatar_url"), allow_none=True, desc="get_user_in_directory", ) @@ -607,7 +581,9 @@ class UserDirectoryStore(SQLBaseStore): def get_current_state_deltas(self, prev_stream_id): prev_stream_id = int(prev_stream_id) - if not self._curr_state_delta_stream_cache.has_any_entity_changed(prev_stream_id): + if not self._curr_state_delta_stream_cache.has_any_entity_changed( + prev_stream_id + ): return [] def get_current_state_deltas_txn(txn): @@ -641,7 +617,7 @@ class UserDirectoryStore(SQLBaseStore): WHERE ? < stream_id AND stream_id <= ? ORDER BY stream_id ASC """ - txn.execute(sql, (prev_stream_id, max_stream_id,)) + txn.execute(sql, (prev_stream_id, max_stream_id)) return self.cursor_to_dict(txn) return self.runInteraction( @@ -731,8 +707,11 @@ class UserDirectoryStore(SQLBaseStore): display_name IS NULL, avatar_url IS NULL LIMIT ? - """ % (join_clause, where_clause) - args = join_args + (full_query, exact_query, prefix_query, limit + 1,) + """ % ( + join_clause, + where_clause, + ) + args = join_args + (full_query, exact_query, prefix_query, limit + 1) elif isinstance(self.database_engine, Sqlite3Engine): search_query = _parse_query_sqlite(search_term) @@ -749,7 +728,10 @@ class UserDirectoryStore(SQLBaseStore): display_name IS NULL, avatar_url IS NULL LIMIT ? - """ % (join_clause, where_clause) + """ % ( + join_clause, + where_clause, + ) args = join_args + (search_query, limit + 1) else: # This should be unreachable. @@ -761,10 +743,7 @@ class UserDirectoryStore(SQLBaseStore): limited = len(results) > limit - defer.returnValue({ - "limited": limited, - "results": results, - }) + defer.returnValue({"limited": limited, "results": results}) def _parse_query_sqlite(search_term): @@ -779,7 +758,7 @@ def _parse_query_sqlite(search_term): # Pull out the individual words, discarding any non-word characters. results = re.findall(r"([\w\-]+)", search_term, re.UNICODE) - return " & ".join("(%s* OR %s)" % (result, result,) for result in results) + return " & ".join("(%s* OR %s)" % (result, result) for result in results) def _parse_query_postgres(search_term): @@ -792,7 +771,7 @@ def _parse_query_postgres(search_term): # Pull out the individual words, discarding any non-word characters. results = re.findall(r"([\w\-]+)", search_term, re.UNICODE) - both = " & ".join("(%s:* | %s)" % (result, result,) for result in results) + both = " & ".join("(%s:* | %s)" % (result, result) for result in results) exact = " & ".join("%s" % (result,) for result in results) prefix = " & ".join("%s:*" % (result,) for result in results) From 3bc238629eb61e6a3f6c652e3ddae3261179d624 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 13 Feb 2019 14:46:18 +0000 Subject: [PATCH 04/18] 0.99.1rc2 --- CHANGES.md | 9 +++++++++ changelog.d/4636.bugfix | 1 - synapse/__init__.py | 2 +- 3 files changed, 10 insertions(+), 2 deletions(-) delete mode 100644 changelog.d/4636.bugfix diff --git a/CHANGES.md b/CHANGES.md index cc629978c2..d0b3437b2d 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,12 @@ +Synapse 0.99.1rc2 (2019-02-13) +============================== + +Bugfixes +-------- + +- Fix errors when using default bind_addresses with replication/metrics listeners. ([\#4636](https://github.com/matrix-org/synapse/issues/4636)) + + Synapse 0.99.1rc1 (2019-02-12) ============================== diff --git a/changelog.d/4636.bugfix b/changelog.d/4636.bugfix deleted file mode 100644 index 7607aa1d53..0000000000 --- a/changelog.d/4636.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix errors when using default bind_addresses with replication/metrics listeners. \ No newline at end of file diff --git a/synapse/__init__.py b/synapse/__init__.py index c211cb4e6f..4629fa76ec 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -27,4 +27,4 @@ try: except ImportError: pass -__version__ = "0.99.1rc1" +__version__ = "0.99.1rc2" From a214ba93e0770f87c1a13a62375a11fd6c60bf64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=94=D0=B0=D0=BC=D1=98=D0=B0=D0=BD=20=D0=93=D0=B5=D0=BE?= =?UTF-8?q?=D1=80=D0=B3=D0=B8=D0=B5=D0=B2=D1=81=D0=BA=D0=B8?= Date: Thu, 14 Feb 2019 14:44:22 +0100 Subject: [PATCH 05/18] implement `reload` by sending the HUP signal (#4622) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * implement `reload` by sending the HUP signal According to the 0.99 release info* synapse now uses the HUP signal to reload certificates: > Synapse will now reload TLS certificates from disk upon SIGHUP. (#4495, #4524) So the matrix-synapse.service unit file should include a reload directive. Signed-off-by: Дамјан Георгиевски --- debian/changelog | 6 ++++++ debian/matrix-synapse.service | 1 + 2 files changed, 7 insertions(+) diff --git a/debian/changelog b/debian/changelog index 04b5d69053..d4b9ed8ed7 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (0.99.1) UNRELEASED; urgency=medium + + * Added ExecReload= in service unit file to send a HUP signal + + -- Damjan Georgievski Thu, 14 Feb 2019 12:53:13 +0000 + matrix-synapse-py3 (0.99.0) stable; urgency=medium * New synapse release 0.99.0 diff --git a/debian/matrix-synapse.service b/debian/matrix-synapse.service index 2e9cd83b5f..942e4b83fe 100644 --- a/debian/matrix-synapse.service +++ b/debian/matrix-synapse.service @@ -8,6 +8,7 @@ WorkingDirectory=/var/lib/matrix-synapse EnvironmentFile=/etc/default/matrix-synapse ExecStartPre=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/ --generate-keys ExecStart=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/ +ExecReload=/bin/kill -HUP $MAINPID Restart=always RestartSec=3 From 06cd757ae718ca4ec01c8e6480547f48f9410cfa Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 14 Feb 2019 14:24:24 +0000 Subject: [PATCH 06/18] 0.99.1 --- CHANGES.md | 15 +++------------ debian/changelog | 8 ++++++-- synapse/__init__.py | 4 ++-- 3 files changed, 11 insertions(+), 16 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index d0b3437b2d..872ac440b5 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,14 +1,5 @@ -Synapse 0.99.1rc2 (2019-02-13) -============================== - -Bugfixes --------- - -- Fix errors when using default bind_addresses with replication/metrics listeners. ([\#4636](https://github.com/matrix-org/synapse/issues/4636)) - - -Synapse 0.99.1rc1 (2019-02-12) -============================== +Synapse 0.99.1 (2019-02-14) +=========================== Features -------- @@ -19,7 +10,7 @@ Features - Add ability to update backup versions ([\#4580](https://github.com/matrix-org/synapse/issues/4580)) - Allow the "unavailable" presence status for /sync. This change makes Synapse compliant with r0.4.0 of the Client-Server specification. ([\#4592](https://github.com/matrix-org/synapse/issues/4592)) -- There is no longer any need to specify `no_tls`: it is inferred from the absence of TLS listeners ([\#4613](https://github.com/matrix-org/synapse/issues/4613), [\#4615](https://github.com/matrix-org/synapse/issues/4615), [\#4617](https://github.com/matrix-org/synapse/issues/4617)) +- There is no longer any need to specify `no_tls`: it is inferred from the absence of TLS listeners ([\#4613](https://github.com/matrix-org/synapse/issues/4613), [\#4615](https://github.com/matrix-org/synapse/issues/4615), [\#4617](https://github.com/matrix-org/synapse/issues/4617), [\#4636](https://github.com/matrix-org/synapse/issues/4636)) - The default configuration no longer requires TLS certificates. ([\#4614](https://github.com/matrix-org/synapse/issues/4614)) diff --git a/debian/changelog b/debian/changelog index d4b9ed8ed7..bc1ba153e3 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,8 +1,12 @@ -matrix-synapse-py3 (0.99.1) UNRELEASED; urgency=medium +matrix-synapse-py3 (0.99.1) stable; urgency=medium + [ Damjan Georgievski ] * Added ExecReload= in service unit file to send a HUP signal - -- Damjan Georgievski Thu, 14 Feb 2019 12:53:13 +0000 + [ Synapse Packaging team ] + * New synapse release 0.99.1 + + -- Synapse Packaging team Thu, 14 Feb 2019 14:12:26 +0000 matrix-synapse-py3 (0.99.0) stable; urgency=medium diff --git a/synapse/__init__.py b/synapse/__init__.py index 4629fa76ec..f7bac0ea4e 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd -# Copyright 2018 New Vector Ltd +# Copyright 2018-9 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -27,4 +27,4 @@ try: except ImportError: pass -__version__ = "0.99.1rc2" +__version__ = "0.99.1" From eaf4d11af9da7d6d9ce71cb83f70424bb38e0703 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 14 Feb 2019 16:02:23 +0000 Subject: [PATCH 07/18] Add configurable room list publishing rules This allows specifying who and what is allowed to be published onto the public room list --- synapse/config/room_directory.py | 94 +++++++++++++++++++++++------ synapse/handlers/directory.py | 29 +++++++-- synapse/storage/state.py | 25 ++++++++ tests/config/test_room_directory.py | 73 ++++++++++++++++++++++ tests/handlers/test_directory.py | 1 + 5 files changed, 200 insertions(+), 22 deletions(-) diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py index 9da13ab11b..a0869ed6ab 100644 --- a/synapse/config/room_directory.py +++ b/synapse/config/room_directory.py @@ -23,70 +23,121 @@ class RoomDirectoryConfig(Config): alias_creation_rules = config["alias_creation_rules"] self._alias_creation_rules = [ - _AliasRule(rule) + _RoomDirectoryRule("alias_creation_rules", rule) for rule in alias_creation_rules ] + room_list_publication_rules = config["room_list_publication_rules"] + + self._room_list_publication_rules = [ + _RoomDirectoryRule("room_list_publication_rules", rule) + for rule in room_list_publication_rules + ] + def default_config(self, config_dir_path, server_name, **kwargs): return """ # The `alias_creation` option controls who's allowed to create aliases # on this server. # # The format of this option is a list of rules that contain globs that - # match against user_id and the new alias (fully qualified with server - # name). The action in the first rule that matches is taken, which can - # currently either be "allow" or "deny". + # match against user_id, room_id and the new alias (fully qualified with + # server name). The action in the first rule that matches is taken, + # which can currently either be "allow" or "deny". + # + # Missing user_id/room_id/alias fields default to "*". # # If no rules match the request is denied. alias_creation_rules: - user_id: "*" - alias: "*" + alias: "*" # This matches alias being created + room_id: "*" + action: allow + + # The `room_list_publication_rules` option control who and what can be + # published in the public room list. + # + # The format of this option is the same as that for + # `alias_creation_rules` + room_list_publication_rules: + - user_id: "*" + alias: "*" # This matches any local or canonical alias + # associated with the room + room_id: "*" action: allow """ - def is_alias_creation_allowed(self, user_id, alias): + def is_alias_creation_allowed(self, user_id, room_id, alias): """Checks if the given user is allowed to create the given alias Args: user_id (str) + room_id (str) alias (str) Returns: boolean: True if user is allowed to crate the alias """ for rule in self._alias_creation_rules: - if rule.matches(user_id, alias): + if rule.matches(user_id, room_id, [alias]): + return rule.action == "allow" + + return False + + def is_publishing_room_allowed(self, user_id, room_id, aliases): + """Checks if the given user is allowed to publish the room + + Args: + user_id (str) + room_id (str) + aliases (list[str]): any local aliases associated with the room + + Returns: + boolean: True if user can publish room + """ + for rule in self._room_list_publication_rules: + if rule.matches(user_id, room_id, aliases): return rule.action == "allow" return False -class _AliasRule(object): - def __init__(self, rule): +class _RoomDirectoryRule(object): + """Helper class to test whether a room directory action is allowed, like + creating an alias or publishing a room. + """ + + def __init__(self, option_name, rule): action = rule["action"] - user_id = rule["user_id"] - alias = rule["alias"] + user_id = rule.get("user_id", "*") + room_id = rule.get("room_id", "*") + alias = rule.get("alias", "*") if action in ("allow", "deny"): self.action = action else: raise ConfigError( - "alias_creation_rules rules can only have action of 'allow'" - " or 'deny'" + "%s rules can only have action of 'allow'" + " or 'deny'" % (option_name,) ) + self._alias_matches_all = alias == "*" + try: self._user_id_regex = glob_to_regex(user_id) self._alias_regex = glob_to_regex(alias) + self._room_id_regex = glob_to_regex(room_id) except Exception as e: raise ConfigError("Failed to parse glob into regex: %s", e) - def matches(self, user_id, alias): - """Tests if this rule matches the given user_id and alias. + def matches(self, user_id, room_id, aliases): + """Tests if this rule matches the given user_id, room_id and aliases. Args: user_id (str) - alias (str) + room_id (str) + aliases (list[str]): The associated aliases to the room. Will be a + single element for testing alias creation, and can be empty for + testing room publishing. Returns: boolean @@ -96,7 +147,16 @@ class _AliasRule(object): if not self._user_id_regex.match(user_id): return False - if not self._alias_regex.match(alias): + # If we are not given any aliases then this rule only matches if the + # alias glob matches all aliases + if not aliases and not self._alias_matches_all: + return False + + for alias in aliases: + if not self._alias_regex.match(alias): + return False + + if not self._room_id_regex.match(room_id): return False return True diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 6bb254f899..e5319b42a6 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -112,7 +112,9 @@ class DirectoryHandler(BaseHandler): 403, "This user is not permitted to create this alias", ) - if not self.config.is_alias_creation_allowed(user_id, room_alias.to_string()): + if not self.config.is_alias_creation_allowed( + user_id, room_id, room_alias.to_string(), + ): # Lets just return a generic message, as there may be all sorts of # reasons why we said no. TODO: Allow configurable error messages # per alias creation rule? @@ -395,9 +397,9 @@ class DirectoryHandler(BaseHandler): room_id (str) visibility (str): "public" or "private" """ - if not self.spam_checker.user_may_publish_room( - requester.user.to_string(), room_id - ): + user_id = requester.user.to_string() + + if not self.spam_checker.user_may_publish_room(user_id, room_id): raise AuthError( 403, "This user is not permitted to publish rooms to the room list" @@ -415,7 +417,24 @@ class DirectoryHandler(BaseHandler): yield self.auth.check_can_change_room_list(room_id, requester.user) - yield self.store.set_room_is_public(room_id, visibility == "public") + room_aliases = yield self.store.get_aliases_for_room(room_id) + canonical_alias = yield self.store.get_canonical_alias_for_room(room_id) + if canonical_alias: + room_aliases.append(canonical_alias) + + making_public = visibility == "public" + + if making_public and not self.config.is_publishing_room_allowed( + user_id, room_id, room_aliases, + ): + # Lets just return a generic message, as there may be all sorts of + # reasons why we said no. TODO: Allow configurable error messages + # per alias creation rule? + raise SynapseError( + 403, "Not allowed to publish room", + ) + + yield self.store.set_room_is_public(room_id, making_public) @defer.inlineCallbacks def edit_published_appservice_room_list(self, appservice_id, network_id, diff --git a/synapse/storage/state.py b/synapse/storage/state.py index d14a7b2538..6ddc4055d2 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -548,6 +548,31 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): _get_filtered_current_state_ids_txn, ) + @defer.inlineCallbacks + def get_canonical_alias_for_room(self, room_id): + """Get canonical alias for room, if any + + Args: + room_id (str) + + Returns: + Deferred[str|None]: The canonical alias, if any + """ + + state = yield self.get_filtered_current_state_ids(room_id, StateFilter.from_types( + [(EventTypes.CanonicalAlias, "")] + )) + + event_id = state.get((EventTypes.CanonicalAlias, "")) + if not event_id: + return + + event = yield self.get_event(event_id, allow_none=True) + if not event: + return + + defer.returnValue(event.content.get("canonical_alias")) + @cached(max_entries=10000, iterable=True) def get_state_group_delta(self, state_group): """Given a state group try to return a previous group and a delta between diff --git a/tests/config/test_room_directory.py b/tests/config/test_room_directory.py index f37a17d618..1d4ca0055c 100644 --- a/tests/config/test_room_directory.py +++ b/tests/config/test_room_directory.py @@ -36,6 +36,8 @@ class RoomDirectoryConfigTestCase(unittest.TestCase): - user_id: "@gah:example.com" alias: "#goo:example.com" action: "allow" + + room_list_publication_rules: [] """) rd_config = RoomDirectoryConfig() @@ -43,25 +45,96 @@ class RoomDirectoryConfigTestCase(unittest.TestCase): self.assertFalse(rd_config.is_alias_creation_allowed( user_id="@bob:example.com", + room_id="!test", alias="#test:example.com", )) self.assertTrue(rd_config.is_alias_creation_allowed( user_id="@test:example.com", + room_id="!test", alias="#unofficial_st:example.com", )) self.assertTrue(rd_config.is_alias_creation_allowed( user_id="@foobar:example.com", + room_id="!test", alias="#test:example.com", )) self.assertTrue(rd_config.is_alias_creation_allowed( user_id="@gah:example.com", + room_id="!test", alias="#goo:example.com", )) self.assertFalse(rd_config.is_alias_creation_allowed( user_id="@test:example.com", + room_id="!test", alias="#test:example.com", )) + + def test_room_publish_acl(self): + config = yaml.load(""" + alias_creation_rules: [] + + room_list_publication_rules: + - user_id: "*bob*" + alias: "*" + action: "deny" + - user_id: "*" + alias: "#unofficial_*" + action: "allow" + - user_id: "@foo*:example.com" + alias: "*" + action: "allow" + - user_id: "@gah:example.com" + alias: "#goo:example.com" + action: "allow" + - room_id: "!test-deny" + action: "deny" + """) + + rd_config = RoomDirectoryConfig() + rd_config.read_config(config) + + self.assertFalse(rd_config.is_publishing_room_allowed( + user_id="@bob:example.com", + room_id="!test", + aliases=["#test:example.com"], + )) + + self.assertTrue(rd_config.is_publishing_room_allowed( + user_id="@test:example.com", + room_id="!test", + aliases=["#unofficial_st:example.com"], + )) + + self.assertTrue(rd_config.is_publishing_room_allowed( + user_id="@foobar:example.com", + room_id="!test", + aliases=[], + )) + + self.assertTrue(rd_config.is_publishing_room_allowed( + user_id="@gah:example.com", + room_id="!test", + aliases=["#goo:example.com"], + )) + + self.assertFalse(rd_config.is_publishing_room_allowed( + user_id="@test:example.com", + room_id="!test", + aliases=["#test:example.com"], + )) + + self.assertTrue(rd_config.is_publishing_room_allowed( + user_id="@foobar:example.com", + room_id="!test-deny", + aliases=[], + )) + + self.assertFalse(rd_config.is_publishing_room_allowed( + user_id="@gah:example.com", + room_id="!test-deny", + aliases=[], + )) diff --git a/tests/handlers/test_directory.py b/tests/handlers/test_directory.py index 8ae6556c0a..9bf395e923 100644 --- a/tests/handlers/test_directory.py +++ b/tests/handlers/test_directory.py @@ -121,6 +121,7 @@ class TestCreateAliasACL(unittest.HomeserverTestCase): "action": "allow", } ] + config["room_list_publication_rules"] = [] rd_config = RoomDirectoryConfig() rd_config.read_config(config) From 4074c8b968ee95fd61b610b6434ab38dfb18f623 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 14 Feb 2019 16:04:12 +0000 Subject: [PATCH 08/18] Newsfile --- changelog.d/4647.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/4647.feature diff --git a/changelog.d/4647.feature b/changelog.d/4647.feature new file mode 100644 index 0000000000..5a5b1dcebb --- /dev/null +++ b/changelog.d/4647.feature @@ -0,0 +1 @@ +Add configurable room list publishing rules From f31101882391aa37bc6fdeb71d1559d5dcdfd4e7 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 14 Feb 2019 17:10:36 +0000 Subject: [PATCH 09/18] Fix errors in acme provisioning (#4648) * Better logging for errors on startup * Fix "TypeError: '>' not supported" when starting without an existing certificate * Fix a bug where an existing certificate would be reprovisoned every day --- changelog.d/4648.bugfix | 2 ++ synapse/app/homeserver.py | 19 +++++++++++++------ synapse/config/logger.py | 2 ++ 3 files changed, 17 insertions(+), 6 deletions(-) create mode 100644 changelog.d/4648.bugfix diff --git a/changelog.d/4648.bugfix b/changelog.d/4648.bugfix new file mode 100644 index 0000000000..bd117e8664 --- /dev/null +++ b/changelog.d/4648.bugfix @@ -0,0 +1,2 @@ +Fix "TypeError: '>' not supported" when starting without an existing certificate. +Fix a bug where an existing certificate would be reprovisoned every day. diff --git a/synapse/app/homeserver.py b/synapse/app/homeserver.py index dbd9a83877..05a97979ec 100755 --- a/synapse/app/homeserver.py +++ b/synapse/app/homeserver.py @@ -1,6 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd +# Copyright 2019 New Vector Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,11 +15,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import print_function + import gc import logging import os import sys -import traceback from six import iteritems @@ -27,6 +29,7 @@ from prometheus_client import Gauge from twisted.application import service from twisted.internet import defer, reactor +from twisted.python.failure import Failure from twisted.web.resource import EncodingResourceWrapper, NoResource from twisted.web.server import GzipEncoderFactory from twisted.web.static import File @@ -394,10 +397,10 @@ def setup(config_options): # is less than our re-registration threshold. provision = False - if (cert_days_remaining is None): - provision = True - - if cert_days_remaining > hs.config.acme_reprovision_threshold: + if ( + cert_days_remaining is None or + cert_days_remaining < hs.config.acme_reprovision_threshold + ): provision = True if provision: @@ -438,7 +441,11 @@ def setup(config_options): hs.get_datastore().start_doing_background_updates() except Exception: # Print the exception and bail out. - traceback.print_exc(file=sys.stderr) + print("Error during startup:", file=sys.stderr) + + # this gives better tracebacks than traceback.print_exc() + Failure().printTraceback(file=sys.stderr) + if reactor.running: reactor.stop() sys.exit(1) diff --git a/synapse/config/logger.py b/synapse/config/logger.py index 4b938053fb..9b5994d55e 100644 --- a/synapse/config/logger.py +++ b/synapse/config/logger.py @@ -242,3 +242,5 @@ def setup_logging(config, use_worker_options=False): [_log], redirectStandardIO=not config.no_redirect_stdio, ) + if not config.no_redirect_stdio: + print("Redirected stdout/stderr to logs") From f595d6ac577696744e56da3429a53b2a0eb1d952 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 14 Feb 2019 17:20:02 +0000 Subject: [PATCH 10/18] 0.99.1.1 --- CHANGES.md | 10 ++++++++++ changelog.d/4648.bugfix | 2 -- debian/changelog | 6 ++++++ synapse/__init__.py | 2 +- 4 files changed, 17 insertions(+), 3 deletions(-) delete mode 100644 changelog.d/4648.bugfix diff --git a/CHANGES.md b/CHANGES.md index 872ac440b5..f1a9d58e4d 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,13 @@ +Synapse 0.99.1.1 (2019-02-14) +============================= + +Bugfixes +-------- + +- Fix "TypeError: '>' not supported" when starting without an existing certificate. + Fix a bug where an existing certificate would be reprovisoned every day. ([\#4648](https://github.com/matrix-org/synapse/issues/4648)) + + Synapse 0.99.1 (2019-02-14) =========================== diff --git a/changelog.d/4648.bugfix b/changelog.d/4648.bugfix deleted file mode 100644 index bd117e8664..0000000000 --- a/changelog.d/4648.bugfix +++ /dev/null @@ -1,2 +0,0 @@ -Fix "TypeError: '>' not supported" when starting without an existing certificate. -Fix a bug where an existing certificate would be reprovisoned every day. diff --git a/debian/changelog b/debian/changelog index bc1ba153e3..124128920b 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (0.99.1.1) stable; urgency=medium + + * New synapse release 0.99.1.1 + + -- Synapse Packaging team Thu, 14 Feb 2019 17:19:44 +0000 + matrix-synapse-py3 (0.99.1) stable; urgency=medium [ Damjan Georgievski ] diff --git a/synapse/__init__.py b/synapse/__init__.py index f7bac0ea4e..2004375f98 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -27,4 +27,4 @@ try: except ImportError: pass -__version__ = "0.99.1" +__version__ = "0.99.1.1" From 649fe1c2be69676ffd73c9ad1807ea0bc747b003 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 14 Feb 2019 17:29:40 +0000 Subject: [PATCH 11/18] Fix debian build dockerfile Make sure it refreshes the apt cache before trying to install stuff --- docker/Dockerfile-dhvirtualenv | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/docker/Dockerfile-dhvirtualenv b/docker/Dockerfile-dhvirtualenv index ab28e49291..224c92352d 100644 --- a/docker/Dockerfile-dhvirtualenv +++ b/docker/Dockerfile-dhvirtualenv @@ -58,7 +58,11 @@ RUN apt-get update -qq -o Acquire::Languages=none \ sqlite3 COPY --from=builder /dh-virtualenv_1.1-1_all.deb / -RUN apt-get install -yq /dh-virtualenv_1.1-1_all.deb + +# install dhvirtualenv. Update the apt cache again first, in case we got a +# cached cache from docker the first time. +RUN apt-get update -qq -o Acquire::Languages=none \ + && apt-get install -yq /dh-virtualenv_1.1-1_all.deb WORKDIR /synapse/source ENTRYPOINT ["bash","/synapse/source/docker/build_debian.sh"] From f666fe36d7a17d12e962461aeba61f5b7ca43ccf Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 14 Feb 2019 18:07:24 +0000 Subject: [PATCH 12/18] Fixup comments --- synapse/config/room_directory.py | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py index a0869ed6ab..719892be7a 100644 --- a/synapse/config/room_directory.py +++ b/synapse/config/room_directory.py @@ -48,20 +48,21 @@ class RoomDirectoryConfig(Config): # # If no rules match the request is denied. alias_creation_rules: - - user_id: "*" - alias: "*" # This matches alias being created - room_id: "*" + - user_id: "*" # Matches agaisnt the creator of the alias + alias: "*" # Matches against the alias being created + room_id: "*" # Matches against the room ID the alias is being + # pointed at action: allow - # The `room_list_publication_rules` option control who and what can be - # published in the public room list. + # The `room_list_publication_rules` option controls who can publish and + # which rooms can be published in the public room list. # # The format of this option is the same as that for # `alias_creation_rules` room_list_publication_rules: - - user_id: "*" - alias: "*" # This matches any local or canonical alias - # associated with the room + - user_id: "*" # Matches against the user publishing the room + alias: "*" # Matches against any current local or canonical + # aliases associated with the room room_id: "*" action: allow """ @@ -107,6 +108,12 @@ class _RoomDirectoryRule(object): """ def __init__(self, option_name, rule): + """ + Args: + option_name (str): Name of the config option this rule belongs to + rule (dict): The rule as specified in the config + """ + action = rule["action"] user_id = rule.get("user_id", "*") room_id = rule.get("room_id", "*") From f61b2068e633b8ea11453992749f696d0e35e7d5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 14 Feb 2019 18:08:09 +0000 Subject: [PATCH 13/18] Only fetch aliases when publishing rooms --- synapse/handlers/directory.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index e5319b42a6..8b113307d2 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -417,22 +417,22 @@ class DirectoryHandler(BaseHandler): yield self.auth.check_can_change_room_list(room_id, requester.user) - room_aliases = yield self.store.get_aliases_for_room(room_id) - canonical_alias = yield self.store.get_canonical_alias_for_room(room_id) - if canonical_alias: - room_aliases.append(canonical_alias) - making_public = visibility == "public" + if making_public: + room_aliases = yield self.store.get_aliases_for_room(room_id) + canonical_alias = yield self.store.get_canonical_alias_for_room(room_id) + if canonical_alias: + room_aliases.append(canonical_alias) - if making_public and not self.config.is_publishing_room_allowed( - user_id, room_id, room_aliases, - ): - # Lets just return a generic message, as there may be all sorts of - # reasons why we said no. TODO: Allow configurable error messages - # per alias creation rule? - raise SynapseError( - 403, "Not allowed to publish room", - ) + if not self.config.is_publishing_room_allowed( + user_id, room_id, room_aliases, + ): + # Lets just return a generic message, as there may be all sorts of + # reasons why we said no. TODO: Allow configurable error messages + # per alias creation rule? + raise SynapseError( + 403, "Not allowed to publish room", + ) yield self.store.set_room_is_public(room_id, making_public) From cb12a377082f2241ee8a8280810e9e38fb69778d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 14 Feb 2019 18:16:32 +0000 Subject: [PATCH 14/18] Clarify and fix behaviour when there are multiple aliases --- synapse/config/room_directory.py | 22 +++++++++++++++++----- tests/config/test_room_directory.py | 6 ++++++ 2 files changed, 23 insertions(+), 5 deletions(-) diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py index 719892be7a..af84adf5fa 100644 --- a/synapse/config/room_directory.py +++ b/synapse/config/room_directory.py @@ -58,7 +58,11 @@ class RoomDirectoryConfig(Config): # which rooms can be published in the public room list. # # The format of this option is the same as that for - # `alias_creation_rules` + # `alias_creation_rules`. + # + # If the room has one or more aliases associated with it, the rules are + # run against each alias. If there are no aliases then only rules with + # `alias: *` match. room_list_publication_rules: - user_id: "*" # Matches against the user publishing the room alias: "*" # Matches against any current local or canonical @@ -156,11 +160,19 @@ class _RoomDirectoryRule(object): # If we are not given any aliases then this rule only matches if the # alias glob matches all aliases - if not aliases and not self._alias_matches_all: - return False + matched = False + if not aliases: + if not self._alias_matches_all: + return False + else: + # Otherwise, we just need one alias to match + matched = False + for alias in aliases: + if self._alias_regex.match(alias): + matched = True + break - for alias in aliases: - if not self._alias_regex.match(alias): + if not matched: return False if not self._room_id_regex.match(room_id): diff --git a/tests/config/test_room_directory.py b/tests/config/test_room_directory.py index 1d4ca0055c..3dc2631523 100644 --- a/tests/config/test_room_directory.py +++ b/tests/config/test_room_directory.py @@ -138,3 +138,9 @@ class RoomDirectoryConfigTestCase(unittest.TestCase): room_id="!test-deny", aliases=[], )) + + self.assertTrue(rd_config.is_publishing_room_allowed( + user_id="@test:example.com", + room_id="!test", + aliases=["#unofficial_st:example.com", "#blah:example.com"], + )) From 8e32f26cb8f8de8554b8b75870026fe80f45add3 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 14 Feb 2019 18:21:24 +0000 Subject: [PATCH 15/18] Clarify comments --- synapse/config/room_directory.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py index af84adf5fa..f86626c354 100644 --- a/synapse/config/room_directory.py +++ b/synapse/config/room_directory.py @@ -60,9 +60,9 @@ class RoomDirectoryConfig(Config): # The format of this option is the same as that for # `alias_creation_rules`. # - # If the room has one or more aliases associated with it, the rules are - # run against each alias. If there are no aliases then only rules with - # `alias: *` match. + # If the room has one or more aliases associated with it, only one of + # the aliases needs to match the alias rule. If there are no aliases + # then only rules with `alias: *` match. room_list_publication_rules: - user_id: "*" # Matches against the user publishing the room alias: "*" # Matches against any current local or canonical From 02c46acc6a63b9e56039b5d7e24cc97ed9a4636d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 15 Feb 2019 10:17:13 +0000 Subject: [PATCH 16/18] Fixup comments --- synapse/config/room_directory.py | 35 ++++++++++++++++++++++++-------- 1 file changed, 27 insertions(+), 8 deletions(-) diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py index f86626c354..6815d6e9b7 100644 --- a/synapse/config/room_directory.py +++ b/synapse/config/room_directory.py @@ -46,12 +46,20 @@ class RoomDirectoryConfig(Config): # # Missing user_id/room_id/alias fields default to "*". # - # If no rules match the request is denied. + # If no rules match the request is denied. An empty list means no one + # can create aliases. + # + # Options for the rules include: + # + # user_id: Matches against the creator of the alias + # alias: Matches against the alias being created + # room_id: Matches against the room ID the alias is being pointed at + # action: Whether to "allow" or "deny" the request if the rule matches + # alias_creation_rules: - - user_id: "*" # Matches agaisnt the creator of the alias - alias: "*" # Matches against the alias being created - room_id: "*" # Matches against the room ID the alias is being - # pointed at + - user_id: "*" + alias: "*" + room_id: "*" action: allow # The `room_list_publication_rules` option controls who can publish and @@ -63,10 +71,21 @@ class RoomDirectoryConfig(Config): # If the room has one or more aliases associated with it, only one of # the aliases needs to match the alias rule. If there are no aliases # then only rules with `alias: *` match. + # + # If no rules match the request is denied. An empty list means no one + # can publish rooms. + # + # Options for the rules include: + # + # user_id: Matches agaisnt the creator of the alias + # room_id: Matches against the room ID being published + # alias: Matches against any current local or canonical aliases + # associated with the room + # action: Whether to "allow" or "deny" the request if the rule matches + # room_list_publication_rules: - - user_id: "*" # Matches against the user publishing the room - alias: "*" # Matches against any current local or canonical - # aliases associated with the room + - user_id: "*" + alias: "*" room_id: "*" action: allow """ From 02c729d6b0f6b8f41455737cde0b8aeb39782a7e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 15 Feb 2019 10:17:43 +0000 Subject: [PATCH 17/18] Hoist up checks to reduce overall work --- synapse/config/room_directory.py | 34 +++++++++++++++----------------- 1 file changed, 16 insertions(+), 18 deletions(-) diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py index 6815d6e9b7..aa113f0edf 100644 --- a/synapse/config/room_directory.py +++ b/synapse/config/room_directory.py @@ -177,24 +177,22 @@ class _RoomDirectoryRule(object): if not self._user_id_regex.match(user_id): return False - # If we are not given any aliases then this rule only matches if the - # alias glob matches all aliases - matched = False - if not aliases: - if not self._alias_matches_all: - return False - else: - # Otherwise, we just need one alias to match - matched = False - for alias in aliases: - if self._alias_regex.match(alias): - matched = True - break - - if not matched: - return False - if not self._room_id_regex.match(room_id): return False - return True + # We only have alias checks left, so we can short circuit if the alias + # rule matches everything. + if self._alias_matches_all: + return True + + # If we are not given any aliases then this rule only matches if the + # alias glob matches all aliases, which we checked above. + if not aliases: + return False + + # Otherwise, we just need one alias to match + for alias in aliases: + if self._alias_regex.match(alias): + return True + + return False From b99c532c1c5d79cdaea20e68bf65945f056a156d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 15 Feb 2019 10:53:39 +0000 Subject: [PATCH 18/18] Move defaults up into code --- synapse/config/room_directory.py | 62 +++++++++++++++++++++----------- 1 file changed, 42 insertions(+), 20 deletions(-) diff --git a/synapse/config/room_directory.py b/synapse/config/room_directory.py index aa113f0edf..c8e0abbae7 100644 --- a/synapse/config/room_directory.py +++ b/synapse/config/room_directory.py @@ -20,19 +20,37 @@ from ._base import Config, ConfigError class RoomDirectoryConfig(Config): def read_config(self, config): - alias_creation_rules = config["alias_creation_rules"] + alias_creation_rules = config.get("alias_creation_rules") - self._alias_creation_rules = [ - _RoomDirectoryRule("alias_creation_rules", rule) - for rule in alias_creation_rules - ] + if alias_creation_rules is not None: + self._alias_creation_rules = [ + _RoomDirectoryRule("alias_creation_rules", rule) + for rule in alias_creation_rules + ] + else: + self._alias_creation_rules = [ + _RoomDirectoryRule( + "alias_creation_rules", { + "action": "allow", + } + ) + ] - room_list_publication_rules = config["room_list_publication_rules"] + room_list_publication_rules = config.get("room_list_publication_rules") - self._room_list_publication_rules = [ - _RoomDirectoryRule("room_list_publication_rules", rule) - for rule in room_list_publication_rules - ] + if room_list_publication_rules is not None: + self._room_list_publication_rules = [ + _RoomDirectoryRule("room_list_publication_rules", rule) + for rule in room_list_publication_rules + ] + else: + self._room_list_publication_rules = [ + _RoomDirectoryRule( + "room_list_publication_rules", { + "action": "allow", + } + ) + ] def default_config(self, config_dir_path, server_name, **kwargs): return """ @@ -56,11 +74,13 @@ class RoomDirectoryConfig(Config): # room_id: Matches against the room ID the alias is being pointed at # action: Whether to "allow" or "deny" the request if the rule matches # - alias_creation_rules: - - user_id: "*" - alias: "*" - room_id: "*" - action: allow + # The default is: + # + # alias_creation_rules: + # - user_id: "*" + # alias: "*" + # room_id: "*" + # action: allow # The `room_list_publication_rules` option controls who can publish and # which rooms can be published in the public room list. @@ -83,11 +103,13 @@ class RoomDirectoryConfig(Config): # associated with the room # action: Whether to "allow" or "deny" the request if the rule matches # - room_list_publication_rules: - - user_id: "*" - alias: "*" - room_id: "*" - action: allow + # The default is: + # + # room_list_publication_rules: + # - user_id: "*" + # alias: "*" + # room_id: "*" + # action: allow """ def is_alias_creation_allowed(self, user_id, room_id, alias):