erikj/paginate_sync
Erik Johnston 2016-06-27 14:52:08 +01:00
parent f07f99387e
commit 4b7abedfd9
3 changed files with 18 additions and 12 deletions

View File

@ -47,6 +47,7 @@ class SyncPaginationConfig(collections.namedtuple("SyncPaginationConfig", [
"limit", "limit",
"tags", "tags",
])): ])):
"Initial pagination configuration from initial sync."
def __init__(self, order, limit, tags): def __init__(self, order, limit, tags):
if order not in SYNC_PAGINATION_VALID_ORDERS: if order not in SYNC_PAGINATION_VALID_ORDERS:
raise SynapseError(400, "Invalid 'order'") raise SynapseError(400, "Invalid 'order'")
@ -838,6 +839,8 @@ class SyncHandler(object):
for r in room_entries: for r in room_entries:
if r.room_id in missing_state: if r.room_id in missing_state:
if include_all_tags: if include_all_tags:
# If we're always including tagged rooms, then only
# resync rooms which are newly tagged.
change = tag_changes.get(r.room_id) change = tag_changes.get(r.room_id)
if change == TAG_CHANGE_NEWLY_TAGGED: if change == TAG_CHANGE_NEWLY_TAGGED:
r.always_include = True r.always_include = True
@ -908,13 +911,11 @@ class SyncHandler(object):
_, bottom_ts = cutoff_list[-1] _, bottom_ts = cutoff_list[-1]
new_pagination_value = bottom_ts new_pagination_value = bottom_ts
logger.info("old pagination value: %r", old_pagination_value) # We're limited if there are any rooms that are after cutoff
logger.info("New pagination value: %r", new_pagination_value) # in the list, but still have an origin server ts from after
# the pagination value from the since token.
# Are there any rooms that fall into the range between the
# old and new value?
limited = any( limited = any(
old_pagination_value < r[1] < new_pagination_value old_pagination_value < r[1]
for r in sorted_list[pagination_limit + extra_limit:] for r in sorted_list[pagination_limit + extra_limit:]
) )
@ -924,7 +925,7 @@ class SyncHandler(object):
tags=pagination_config.tags, tags=pagination_config.tags,
) )
to_sync_map = {key: value for key, value in cutoff_list} to_sync_map = dict(cutoff_list)
else: else:
to_sync_map = {} to_sync_map = {}

View File

@ -98,11 +98,8 @@ class SyncRestServlet(RestServlet):
body = parse_json_object_from_request(request) body = parse_json_object_from_request(request)
timeout = body.get("timeout", 0) timeout = body.get("timeout", 0)
since = body.get("since", None) since = body.get("since", None)
logger.info("Since: %r", since)
extras = body.get("extras", {}) extras = body.get("extras", {})
extras = SyncExtras( extras = SyncExtras(
paginate=extras.get("paginate", {}), paginate=extras.get("paginate", {}),
@ -177,8 +174,6 @@ class SyncRestServlet(RestServlet):
extras=extras, extras=extras,
) )
logger.info("next_batch: %r", sync_result[1]["next_batch"])
defer.returnValue(sync_result) defer.returnValue(sync_result)
@defer.inlineCallbacks @defer.inlineCallbacks

View File

@ -526,6 +526,16 @@ class StreamStore(SQLBaseStore):
) )
def get_last_event_id_ts_for_room(self, room_id, token): def get_last_event_id_ts_for_room(self, room_id, token):
"""Get the latest event_id and origin_server_ts for a room_id before a
given token.
Args:
room_id (str)
token (str)
Returns:
Dictionary with ``event_id`` and ``origin_server_ts`` keys.
"""
stream_ordering = RoomStreamToken.parse_stream_token(token).stream stream_ordering = RoomStreamToken.parse_stream_token(token).stream
sql = ( sql = (