2019-05-14 17:59:21 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright 2019 New Vector Ltd
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import logging
|
2020-12-30 14:09:53 +01:00
|
|
|
from typing import Any, Dict, List, Optional, Tuple
|
2019-05-14 17:59:21 +02:00
|
|
|
|
2019-05-14 17:59:21 +02:00
|
|
|
import attr
|
|
|
|
|
2019-05-14 17:59:21 +02:00
|
|
|
from synapse.api.errors import SynapseError
|
2020-12-30 14:09:53 +01:00
|
|
|
from synapse.types import JsonDict
|
2019-05-14 17:59:21 +02:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2020-09-14 18:50:06 +02:00
|
|
|
@attr.s(slots=True)
|
2020-09-04 12:54:56 +02:00
|
|
|
class PaginationChunk:
|
2019-05-14 17:59:21 +02:00
|
|
|
"""Returned by relation pagination APIs.
|
|
|
|
|
|
|
|
Attributes:
|
2020-12-30 14:09:53 +01:00
|
|
|
chunk: The rows returned by pagination
|
|
|
|
next_batch: Token to fetch next set of results with, if
|
2019-05-14 17:59:21 +02:00
|
|
|
None then there are no more results.
|
2020-12-30 14:09:53 +01:00
|
|
|
prev_batch: Token to fetch previous set of results with, if
|
2019-05-14 17:59:21 +02:00
|
|
|
None then there are no previous results.
|
2019-05-14 17:59:21 +02:00
|
|
|
"""
|
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
chunk = attr.ib(type=List[JsonDict])
|
|
|
|
next_batch = attr.ib(type=Optional[Any], default=None)
|
|
|
|
prev_batch = attr.ib(type=Optional[Any], default=None)
|
2019-05-14 17:59:21 +02:00
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
def to_dict(self) -> Dict[str, Any]:
|
2019-05-14 17:59:21 +02:00
|
|
|
d = {"chunk": self.chunk}
|
|
|
|
|
2019-05-14 17:59:21 +02:00
|
|
|
if self.next_batch:
|
|
|
|
d["next_batch"] = self.next_batch.to_string()
|
|
|
|
|
|
|
|
if self.prev_batch:
|
|
|
|
d["prev_batch"] = self.prev_batch.to_string()
|
|
|
|
|
2019-05-14 17:59:21 +02:00
|
|
|
return d
|
|
|
|
|
|
|
|
|
2019-05-16 15:24:58 +02:00
|
|
|
@attr.s(frozen=True, slots=True)
|
2020-09-04 12:54:56 +02:00
|
|
|
class RelationPaginationToken:
|
2019-05-14 17:59:21 +02:00
|
|
|
"""Pagination token for relation pagination API.
|
|
|
|
|
2019-07-09 14:43:08 +02:00
|
|
|
As the results are in topological order, we can use the
|
2019-05-14 17:59:21 +02:00
|
|
|
`topological_ordering` and `stream_ordering` fields of the events at the
|
|
|
|
boundaries of the chunk as pagination tokens.
|
|
|
|
|
|
|
|
Attributes:
|
2020-12-30 14:09:53 +01:00
|
|
|
topological: The topological ordering of the boundary event
|
|
|
|
stream: The stream ordering of the boundary event.
|
2019-05-14 17:59:21 +02:00
|
|
|
"""
|
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
topological = attr.ib(type=int)
|
|
|
|
stream = attr.ib(type=int)
|
2019-05-14 17:59:21 +02:00
|
|
|
|
|
|
|
@staticmethod
|
2020-12-30 14:09:53 +01:00
|
|
|
def from_string(string: str) -> "RelationPaginationToken":
|
2019-05-14 17:59:21 +02:00
|
|
|
try:
|
|
|
|
t, s = string.split("-")
|
|
|
|
return RelationPaginationToken(int(t), int(s))
|
|
|
|
except ValueError:
|
|
|
|
raise SynapseError(400, "Invalid token")
|
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
def to_string(self) -> str:
|
2019-05-14 17:59:21 +02:00
|
|
|
return "%d-%d" % (self.topological, self.stream)
|
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
def as_tuple(self) -> Tuple[Any, ...]:
|
2019-05-16 15:21:39 +02:00
|
|
|
return attr.astuple(self)
|
|
|
|
|
2019-05-14 17:59:21 +02:00
|
|
|
|
2019-05-16 15:24:58 +02:00
|
|
|
@attr.s(frozen=True, slots=True)
|
2020-09-04 12:54:56 +02:00
|
|
|
class AggregationPaginationToken:
|
2019-05-14 17:59:21 +02:00
|
|
|
"""Pagination token for relation aggregation pagination API.
|
|
|
|
|
|
|
|
As the results are order by count and then MAX(stream_ordering) of the
|
|
|
|
aggregation groups, we can just use them as our pagination token.
|
|
|
|
|
|
|
|
Attributes:
|
2020-12-30 14:09:53 +01:00
|
|
|
count: The count of relations in the boundary group.
|
|
|
|
stream: The MAX stream ordering in the boundary group.
|
2019-05-14 17:59:21 +02:00
|
|
|
"""
|
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
count = attr.ib(type=int)
|
|
|
|
stream = attr.ib(type=int)
|
2019-05-14 17:59:21 +02:00
|
|
|
|
|
|
|
@staticmethod
|
2020-12-30 14:09:53 +01:00
|
|
|
def from_string(string: str) -> "AggregationPaginationToken":
|
2019-05-14 17:59:21 +02:00
|
|
|
try:
|
|
|
|
c, s = string.split("-")
|
|
|
|
return AggregationPaginationToken(int(c), int(s))
|
|
|
|
except ValueError:
|
|
|
|
raise SynapseError(400, "Invalid token")
|
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
def to_string(self) -> str:
|
2019-05-14 17:59:21 +02:00
|
|
|
return "%d-%d" % (self.count, self.stream)
|
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
def as_tuple(self) -> Tuple[Any, ...]:
|
2019-05-16 15:21:39 +02:00
|
|
|
return attr.astuple(self)
|