diff --git a/synapse/storage/databases/main/schema/delta/58/19instance_map.sql.postgres b/synapse/storage/databases/main/schema/delta/58/19instance_map.sql.postgres index bdbbab5738..c5788598ce 100644 --- a/synapse/storage/databases/main/schema/delta/58/19instance_map.sql.postgres +++ b/synapse/storage/databases/main/schema/delta/58/19instance_map.sql.postgres @@ -15,7 +15,7 @@ -- A unique and immutable mapping between instance name and an integer ID. This --- let's us refer to instances via a small ID in e.g. stream tokens, without +-- lets us refer to instances via a small ID in e.g. stream tokens, without -- having to encode the full name. CREATE TABLE instance_map ( instance_id SERIAL PRIMARY KEY, diff --git a/synapse/storage/databases/main/stream.py b/synapse/storage/databases/main/stream.py index 6eff289a0d..d280607f0d 100644 --- a/synapse/storage/databases/main/stream.py +++ b/synapse/storage/databases/main/stream.py @@ -219,7 +219,7 @@ def _filter_results( ) -> bool: """Filter results from fetching events in the DB against the given tokens. - This is necessary to handle the case where the tokens include positions + This is necessary to handle the case where the tokens include position maps, which we handle by fetching more than necessary from the DB and then filtering (rather than attempting to construct a complicated SQL query). """ @@ -477,7 +477,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore, metaclass=abc.ABCMeta): def f(txn): # To handle tokens with a non-empty instance_map we fetch more - # results than necessary and the filter down + # results than necessary and then filter down min_from_id = from_key.stream max_to_id = to_key.get_max_stream_pos() @@ -538,7 +538,7 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore, metaclass=abc.ABCMeta): def f(txn): # To handle tokens with a non-empty instance_map we fetch more - # results than necessary and the filter down + # results than necessary and then filter down min_from_id = from_key.stream max_to_id = to_key.get_max_stream_pos() @@ -1065,10 +1065,10 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore, metaclass=abc.ABCMeta): else: order = "ASC" - # The bounds for the stream tokens are complicated by the fact the fact + # The bounds for the stream tokens are complicated by the fact # that we need to handle the instance_map part of the tokens. We do this # by fetching all events between the min stream token and the maximum - # stream token (as return by `RoomStreamToken.get_max_stream_pos`) and + # stream token (as returned by `RoomStreamToken.get_max_stream_pos`) and # then filtering the results. if from_token.topological is not None: from_bound = from_token.as_tuple() diff --git a/synapse/types.py b/synapse/types.py index b2f18fe55e..dd8160f355 100644 --- a/synapse/types.py +++ b/synapse/types.py @@ -406,7 +406,7 @@ class RoomStreamToken: respective streams. The format of the token in such case is an initial integer min position, - followed by the mapping of instance ID to position seperate by '.' and '~': + followed by the mapping of instance ID to position separated by '.' and '~': m{min_pos}.{writer1}~{pos1}.{writer2}~{pos2}. ...