2019-10-21 13:56:42 +02:00
|
|
|
# Copyright 2019 New Vector Ltd
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import logging
|
2022-02-08 13:43:30 +01:00
|
|
|
from typing import (
|
|
|
|
TYPE_CHECKING,
|
|
|
|
Collection,
|
|
|
|
Dict,
|
|
|
|
Iterable,
|
|
|
|
List,
|
|
|
|
Optional,
|
2022-02-11 15:50:14 +01:00
|
|
|
Set,
|
2022-02-08 13:43:30 +01:00
|
|
|
Tuple,
|
|
|
|
Union,
|
|
|
|
cast,
|
|
|
|
)
|
2019-10-21 13:56:42 +02:00
|
|
|
|
|
|
|
import attr
|
2022-01-07 15:10:46 +01:00
|
|
|
from frozendict import frozendict
|
2019-10-21 13:56:42 +02:00
|
|
|
|
2022-02-08 13:43:30 +01:00
|
|
|
from synapse.api.constants import RelationTypes
|
2020-08-12 15:28:48 +02:00
|
|
|
from synapse.events import EventBase
|
2019-10-21 13:56:42 +02:00
|
|
|
from synapse.storage._base import SQLBaseStore
|
2022-01-07 15:10:46 +01:00
|
|
|
from synapse.storage.database import (
|
|
|
|
DatabasePool,
|
|
|
|
LoggingDatabaseConnection,
|
|
|
|
LoggingTransaction,
|
|
|
|
make_in_list_sql_clause,
|
|
|
|
)
|
2020-08-05 22:38:57 +02:00
|
|
|
from synapse.storage.databases.main.stream import generate_pagination_where_clause
|
2022-02-08 13:43:30 +01:00
|
|
|
from synapse.storage.engines import PostgresEngine
|
2022-02-10 16:52:48 +01:00
|
|
|
from synapse.storage.relations import AggregationPaginationToken, PaginationChunk
|
|
|
|
from synapse.types import JsonDict, RoomStreamToken, StreamToken
|
2022-02-08 13:43:30 +01:00
|
|
|
from synapse.util.caches.descriptors import cached, cachedList
|
2019-10-21 13:56:42 +02:00
|
|
|
|
2022-01-07 15:10:46 +01:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from synapse.server import HomeServer
|
2022-02-10 16:52:48 +01:00
|
|
|
from synapse.storage.databases.main import DataStore
|
2022-01-07 15:10:46 +01:00
|
|
|
|
2019-10-21 13:56:42 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2022-01-26 14:27:04 +01:00
|
|
|
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
|
|
|
class _ThreadAggregation:
|
2022-02-15 14:26:57 +01:00
|
|
|
# The latest event in the thread.
|
2022-01-26 14:27:04 +01:00
|
|
|
latest_event: EventBase
|
2022-02-15 14:26:57 +01:00
|
|
|
# The latest edit to the latest event in the thread.
|
|
|
|
latest_edit: Optional[EventBase]
|
|
|
|
# The total number of events in the thread.
|
2022-01-26 14:27:04 +01:00
|
|
|
count: int
|
2022-02-15 14:26:57 +01:00
|
|
|
# True if the current user has sent an event to the thread.
|
2022-01-26 14:27:04 +01:00
|
|
|
current_user_participated: bool
|
|
|
|
|
|
|
|
|
|
|
|
@attr.s(slots=True, auto_attribs=True)
|
|
|
|
class BundledAggregations:
|
|
|
|
"""
|
|
|
|
The bundled aggregations for an event.
|
|
|
|
|
|
|
|
Some values require additional processing during serialization.
|
|
|
|
"""
|
|
|
|
|
|
|
|
annotations: Optional[JsonDict] = None
|
|
|
|
references: Optional[JsonDict] = None
|
|
|
|
replace: Optional[EventBase] = None
|
|
|
|
thread: Optional[_ThreadAggregation] = None
|
|
|
|
|
|
|
|
def __bool__(self) -> bool:
|
|
|
|
return bool(self.annotations or self.references or self.replace or self.thread)
|
|
|
|
|
|
|
|
|
2019-10-21 13:56:42 +02:00
|
|
|
class RelationsWorkerStore(SQLBaseStore):
|
2022-01-07 15:10:46 +01:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
database: DatabasePool,
|
|
|
|
db_conn: LoggingDatabaseConnection,
|
|
|
|
hs: "HomeServer",
|
|
|
|
):
|
|
|
|
super().__init__(database, db_conn, hs)
|
|
|
|
|
|
|
|
self._msc3440_enabled = hs.config.experimental.msc3440_enabled
|
|
|
|
|
2019-10-21 13:56:42 +02:00
|
|
|
@cached(tree=True)
|
2020-09-01 15:21:48 +02:00
|
|
|
async def get_relations_for_event(
|
2019-10-21 13:56:42 +02:00
|
|
|
self,
|
2020-09-01 15:21:48 +02:00
|
|
|
event_id: str,
|
2021-12-09 19:16:01 +01:00
|
|
|
room_id: str,
|
2020-09-01 15:21:48 +02:00
|
|
|
relation_type: Optional[str] = None,
|
|
|
|
event_type: Optional[str] = None,
|
|
|
|
aggregation_key: Optional[str] = None,
|
|
|
|
limit: int = 5,
|
|
|
|
direction: str = "b",
|
2022-02-10 16:52:48 +01:00
|
|
|
from_token: Optional[StreamToken] = None,
|
|
|
|
to_token: Optional[StreamToken] = None,
|
2020-09-01 15:21:48 +02:00
|
|
|
) -> PaginationChunk:
|
2019-10-21 13:56:42 +02:00
|
|
|
"""Get a list of relations for an event, ordered by topological ordering.
|
|
|
|
|
|
|
|
Args:
|
2020-09-01 15:21:48 +02:00
|
|
|
event_id: Fetch events that relate to this event ID.
|
2021-12-09 19:16:01 +01:00
|
|
|
room_id: The room the event belongs to.
|
2020-09-01 15:21:48 +02:00
|
|
|
relation_type: Only fetch events with this relation type, if given.
|
|
|
|
event_type: Only fetch events with this event type, if given.
|
|
|
|
aggregation_key: Only fetch events with this aggregation key, if given.
|
|
|
|
limit: Only fetch the most recent `limit` events.
|
|
|
|
direction: Whether to fetch the most recent first (`"b"`) or the
|
|
|
|
oldest first (`"f"`).
|
|
|
|
from_token: Fetch rows from the given token, or from the start if None.
|
|
|
|
to_token: Fetch rows up to the given token, or up to the end if None.
|
2019-10-21 13:56:42 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-09-01 15:21:48 +02:00
|
|
|
List of event IDs that match relations requested. The rows are of
|
|
|
|
the form `{"event_id": "..."}`.
|
2019-10-21 13:56:42 +02:00
|
|
|
"""
|
|
|
|
|
2021-12-09 19:16:01 +01:00
|
|
|
where_clause = ["relates_to_id = ?", "room_id = ?"]
|
|
|
|
where_args: List[Union[str, int]] = [event_id, room_id]
|
2019-10-21 13:56:42 +02:00
|
|
|
|
|
|
|
if relation_type is not None:
|
|
|
|
where_clause.append("relation_type = ?")
|
|
|
|
where_args.append(relation_type)
|
|
|
|
|
|
|
|
if event_type is not None:
|
|
|
|
where_clause.append("type = ?")
|
|
|
|
where_args.append(event_type)
|
|
|
|
|
|
|
|
if aggregation_key:
|
|
|
|
where_clause.append("aggregation_key = ?")
|
|
|
|
where_args.append(aggregation_key)
|
|
|
|
|
|
|
|
pagination_clause = generate_pagination_where_clause(
|
|
|
|
direction=direction,
|
|
|
|
column_names=("topological_ordering", "stream_ordering"),
|
2022-02-10 16:52:48 +01:00
|
|
|
from_token=from_token.room_key.as_historical_tuple()
|
|
|
|
if from_token
|
|
|
|
else None,
|
|
|
|
to_token=to_token.room_key.as_historical_tuple() if to_token else None,
|
2019-10-21 13:56:42 +02:00
|
|
|
engine=self.database_engine,
|
|
|
|
)
|
|
|
|
|
|
|
|
if pagination_clause:
|
|
|
|
where_clause.append(pagination_clause)
|
|
|
|
|
|
|
|
if direction == "b":
|
|
|
|
order = "DESC"
|
|
|
|
else:
|
|
|
|
order = "ASC"
|
|
|
|
|
|
|
|
sql = """
|
|
|
|
SELECT event_id, topological_ordering, stream_ordering
|
|
|
|
FROM event_relations
|
|
|
|
INNER JOIN events USING (event_id)
|
|
|
|
WHERE %s
|
|
|
|
ORDER BY topological_ordering %s, stream_ordering %s
|
|
|
|
LIMIT ?
|
|
|
|
""" % (
|
|
|
|
" AND ".join(where_clause),
|
|
|
|
order,
|
|
|
|
order,
|
|
|
|
)
|
|
|
|
|
2021-10-28 20:35:12 +02:00
|
|
|
def _get_recent_references_for_event_txn(
|
|
|
|
txn: LoggingTransaction,
|
|
|
|
) -> PaginationChunk:
|
2019-10-21 13:56:42 +02:00
|
|
|
txn.execute(sql, where_args + [limit + 1])
|
|
|
|
|
|
|
|
last_topo_id = None
|
|
|
|
last_stream_id = None
|
|
|
|
events = []
|
|
|
|
for row in txn:
|
|
|
|
events.append({"event_id": row[0]})
|
|
|
|
last_topo_id = row[1]
|
|
|
|
last_stream_id = row[2]
|
|
|
|
|
2022-02-10 16:52:48 +01:00
|
|
|
# If there are more events, generate the next pagination key.
|
|
|
|
next_token = None
|
2019-10-21 13:56:42 +02:00
|
|
|
if len(events) > limit and last_topo_id and last_stream_id:
|
2022-02-10 16:52:48 +01:00
|
|
|
next_key = RoomStreamToken(last_topo_id, last_stream_id)
|
|
|
|
if from_token:
|
|
|
|
next_token = from_token.copy_and_replace("room_key", next_key)
|
|
|
|
else:
|
|
|
|
next_token = StreamToken(
|
|
|
|
room_key=next_key,
|
|
|
|
presence_key=0,
|
|
|
|
typing_key=0,
|
|
|
|
receipt_key=0,
|
|
|
|
account_data_key=0,
|
|
|
|
push_rules_key=0,
|
|
|
|
to_device_key=0,
|
|
|
|
device_list_key=0,
|
|
|
|
groups_key=0,
|
|
|
|
)
|
2019-10-21 13:56:42 +02:00
|
|
|
|
|
|
|
return PaginationChunk(
|
2022-02-10 16:52:48 +01:00
|
|
|
chunk=list(events[:limit]), next_batch=next_token, prev_batch=from_token
|
2019-10-21 13:56:42 +02:00
|
|
|
)
|
|
|
|
|
2020-09-01 15:21:48 +02:00
|
|
|
return await self.db_pool.runInteraction(
|
2019-10-21 13:56:42 +02:00
|
|
|
"get_recent_references_for_event", _get_recent_references_for_event_txn
|
|
|
|
)
|
|
|
|
|
2021-11-18 14:43:09 +01:00
|
|
|
async def event_includes_relation(self, event_id: str) -> bool:
|
|
|
|
"""Check if the given event relates to another event.
|
|
|
|
|
|
|
|
An event has a relation if it has a valid m.relates_to with a rel_type
|
|
|
|
and event_id in the content:
|
|
|
|
|
|
|
|
{
|
|
|
|
"content": {
|
|
|
|
"m.relates_to": {
|
|
|
|
"rel_type": "m.replace",
|
|
|
|
"event_id": "$other_event_id"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Args:
|
|
|
|
event_id: The event to check.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if the event includes a valid relation.
|
|
|
|
"""
|
|
|
|
|
|
|
|
result = await self.db_pool.simple_select_one_onecol(
|
|
|
|
table="event_relations",
|
|
|
|
keyvalues={"event_id": event_id},
|
|
|
|
retcol="event_id",
|
|
|
|
allow_none=True,
|
|
|
|
desc="event_includes_relation",
|
|
|
|
)
|
|
|
|
return result is not None
|
|
|
|
|
|
|
|
async def event_is_target_of_relation(self, parent_id: str) -> bool:
|
|
|
|
"""Check if the given event is the target of another event's relation.
|
|
|
|
|
|
|
|
An event is the target of an event relation if it has a valid
|
|
|
|
m.relates_to with a rel_type and event_id pointing to parent_id in the
|
|
|
|
content:
|
|
|
|
|
|
|
|
{
|
|
|
|
"content": {
|
|
|
|
"m.relates_to": {
|
|
|
|
"rel_type": "m.replace",
|
|
|
|
"event_id": "$parent_id"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Args:
|
|
|
|
parent_id: The event to check.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if the event is the target of another event's relation.
|
|
|
|
"""
|
|
|
|
|
|
|
|
result = await self.db_pool.simple_select_one_onecol(
|
|
|
|
table="event_relations",
|
|
|
|
keyvalues={"relates_to_id": parent_id},
|
|
|
|
retcol="event_id",
|
|
|
|
allow_none=True,
|
|
|
|
desc="event_is_target_of_relation",
|
|
|
|
)
|
|
|
|
return result is not None
|
|
|
|
|
2019-10-21 13:56:42 +02:00
|
|
|
@cached(tree=True)
|
2020-09-01 15:21:48 +02:00
|
|
|
async def get_aggregation_groups_for_event(
|
2019-10-21 13:56:42 +02:00
|
|
|
self,
|
2020-09-01 15:21:48 +02:00
|
|
|
event_id: str,
|
2021-12-09 19:16:01 +01:00
|
|
|
room_id: str,
|
2020-09-01 15:21:48 +02:00
|
|
|
event_type: Optional[str] = None,
|
|
|
|
limit: int = 5,
|
|
|
|
direction: str = "b",
|
|
|
|
from_token: Optional[AggregationPaginationToken] = None,
|
|
|
|
to_token: Optional[AggregationPaginationToken] = None,
|
|
|
|
) -> PaginationChunk:
|
2019-10-21 13:56:42 +02:00
|
|
|
"""Get a list of annotations on the event, grouped by event type and
|
|
|
|
aggregation key, sorted by count.
|
|
|
|
|
|
|
|
This is used e.g. to get the what and how many reactions have happend
|
|
|
|
on an event.
|
|
|
|
|
|
|
|
Args:
|
2020-09-01 15:21:48 +02:00
|
|
|
event_id: Fetch events that relate to this event ID.
|
2021-12-09 19:16:01 +01:00
|
|
|
room_id: The room the event belongs to.
|
2020-09-01 15:21:48 +02:00
|
|
|
event_type: Only fetch events with this event type, if given.
|
|
|
|
limit: Only fetch the `limit` groups.
|
|
|
|
direction: Whether to fetch the highest count first (`"b"`) or
|
2019-10-21 13:56:42 +02:00
|
|
|
the lowest count first (`"f"`).
|
2020-09-01 15:21:48 +02:00
|
|
|
from_token: Fetch rows from the given token, or from the start if None.
|
|
|
|
to_token: Fetch rows up to the given token, or up to the end if None.
|
2019-10-21 13:56:42 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-09-01 15:21:48 +02:00
|
|
|
List of groups of annotations that match. Each row is a dict with
|
|
|
|
`type`, `key` and `count` fields.
|
2019-10-21 13:56:42 +02:00
|
|
|
"""
|
|
|
|
|
2021-12-09 19:16:01 +01:00
|
|
|
where_clause = ["relates_to_id = ?", "room_id = ?", "relation_type = ?"]
|
|
|
|
where_args: List[Union[str, int]] = [
|
|
|
|
event_id,
|
|
|
|
room_id,
|
|
|
|
RelationTypes.ANNOTATION,
|
|
|
|
]
|
2019-10-21 13:56:42 +02:00
|
|
|
|
|
|
|
if event_type:
|
|
|
|
where_clause.append("type = ?")
|
|
|
|
where_args.append(event_type)
|
|
|
|
|
|
|
|
having_clause = generate_pagination_where_clause(
|
|
|
|
direction=direction,
|
|
|
|
column_names=("COUNT(*)", "MAX(stream_ordering)"),
|
2021-10-28 20:35:12 +02:00
|
|
|
from_token=attr.astuple(from_token) if from_token else None, # type: ignore[arg-type]
|
|
|
|
to_token=attr.astuple(to_token) if to_token else None, # type: ignore[arg-type]
|
2019-10-21 13:56:42 +02:00
|
|
|
engine=self.database_engine,
|
|
|
|
)
|
|
|
|
|
|
|
|
if direction == "b":
|
|
|
|
order = "DESC"
|
|
|
|
else:
|
|
|
|
order = "ASC"
|
|
|
|
|
|
|
|
if having_clause:
|
|
|
|
having_clause = "HAVING " + having_clause
|
|
|
|
else:
|
|
|
|
having_clause = ""
|
|
|
|
|
|
|
|
sql = """
|
|
|
|
SELECT type, aggregation_key, COUNT(DISTINCT sender), MAX(stream_ordering)
|
|
|
|
FROM event_relations
|
|
|
|
INNER JOIN events USING (event_id)
|
|
|
|
WHERE {where_clause}
|
|
|
|
GROUP BY relation_type, type, aggregation_key
|
|
|
|
{having_clause}
|
|
|
|
ORDER BY COUNT(*) {order}, MAX(stream_ordering) {order}
|
|
|
|
LIMIT ?
|
|
|
|
""".format(
|
|
|
|
where_clause=" AND ".join(where_clause),
|
|
|
|
order=order,
|
|
|
|
having_clause=having_clause,
|
|
|
|
)
|
|
|
|
|
2021-10-28 20:35:12 +02:00
|
|
|
def _get_aggregation_groups_for_event_txn(
|
|
|
|
txn: LoggingTransaction,
|
|
|
|
) -> PaginationChunk:
|
2019-10-21 13:56:42 +02:00
|
|
|
txn.execute(sql, where_args + [limit + 1])
|
|
|
|
|
|
|
|
next_batch = None
|
|
|
|
events = []
|
|
|
|
for row in txn:
|
|
|
|
events.append({"type": row[0], "key": row[1], "count": row[2]})
|
|
|
|
next_batch = AggregationPaginationToken(row[2], row[3])
|
|
|
|
|
|
|
|
if len(events) <= limit:
|
|
|
|
next_batch = None
|
|
|
|
|
|
|
|
return PaginationChunk(
|
|
|
|
chunk=list(events[:limit]), next_batch=next_batch, prev_batch=from_token
|
|
|
|
)
|
|
|
|
|
2020-09-01 15:21:48 +02:00
|
|
|
return await self.db_pool.runInteraction(
|
2019-10-21 13:56:42 +02:00
|
|
|
"get_aggregation_groups_for_event", _get_aggregation_groups_for_event_txn
|
|
|
|
)
|
|
|
|
|
2020-08-12 15:28:48 +02:00
|
|
|
@cached()
|
2022-02-08 13:43:30 +01:00
|
|
|
def get_applicable_edit(self, event_id: str) -> Optional[EventBase]:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
@cachedList(cached_method_name="get_applicable_edit", list_name="event_ids")
|
|
|
|
async def _get_applicable_edits(
|
|
|
|
self, event_ids: Collection[str]
|
|
|
|
) -> Dict[str, Optional[EventBase]]:
|
2019-10-21 13:56:42 +02:00
|
|
|
"""Get the most recent edit (if any) that has happened for the given
|
2022-02-08 13:43:30 +01:00
|
|
|
events.
|
2019-10-21 13:56:42 +02:00
|
|
|
|
|
|
|
Correctly handles checking whether edits were allowed to happen.
|
|
|
|
|
|
|
|
Args:
|
2022-02-08 13:43:30 +01:00
|
|
|
event_ids: The original event IDs
|
2019-10-21 13:56:42 +02:00
|
|
|
|
|
|
|
Returns:
|
2022-02-08 13:43:30 +01:00
|
|
|
A map of the most recent edit for each event. If there are no edits,
|
|
|
|
the event will map to None.
|
2019-10-21 13:56:42 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
# We only allow edits for `m.room.message` events that have the same sender
|
|
|
|
# and event type. We can't assert these things during regular event auth so
|
|
|
|
# we have to do the checks post hoc.
|
|
|
|
|
|
|
|
# Fetches latest edit that has the same type and sender as the
|
|
|
|
# original, and is an `m.room.message`.
|
2022-02-08 13:43:30 +01:00
|
|
|
if isinstance(self.database_engine, PostgresEngine):
|
|
|
|
# The `DISTINCT ON` clause will pick the *first* row it encounters,
|
|
|
|
# so ordering by origin server ts + event ID desc will ensure we get
|
|
|
|
# the latest edit.
|
|
|
|
sql = """
|
|
|
|
SELECT DISTINCT ON (original.event_id) original.event_id, edit.event_id FROM events AS edit
|
|
|
|
INNER JOIN event_relations USING (event_id)
|
|
|
|
INNER JOIN events AS original ON
|
|
|
|
original.event_id = relates_to_id
|
|
|
|
AND edit.type = original.type
|
|
|
|
AND edit.sender = original.sender
|
|
|
|
AND edit.room_id = original.room_id
|
|
|
|
WHERE
|
|
|
|
%s
|
|
|
|
AND relation_type = ?
|
|
|
|
AND edit.type = 'm.room.message'
|
|
|
|
ORDER by original.event_id DESC, edit.origin_server_ts DESC, edit.event_id DESC
|
|
|
|
"""
|
|
|
|
else:
|
|
|
|
# SQLite uses a simplified query which returns all edits for an
|
|
|
|
# original event. The results are then de-duplicated when turned into
|
|
|
|
# a dict. Due to the chosen ordering, the latest edit stomps on
|
|
|
|
# earlier edits.
|
|
|
|
sql = """
|
|
|
|
SELECT original.event_id, edit.event_id FROM events AS edit
|
|
|
|
INNER JOIN event_relations USING (event_id)
|
|
|
|
INNER JOIN events AS original ON
|
|
|
|
original.event_id = relates_to_id
|
|
|
|
AND edit.type = original.type
|
|
|
|
AND edit.sender = original.sender
|
|
|
|
AND edit.room_id = original.room_id
|
|
|
|
WHERE
|
|
|
|
%s
|
|
|
|
AND relation_type = ?
|
|
|
|
AND edit.type = 'm.room.message'
|
|
|
|
ORDER by edit.origin_server_ts, edit.event_id
|
|
|
|
"""
|
2019-10-21 13:56:42 +02:00
|
|
|
|
2022-02-08 13:43:30 +01:00
|
|
|
def _get_applicable_edits_txn(txn: LoggingTransaction) -> Dict[str, str]:
|
|
|
|
clause, args = make_in_list_sql_clause(
|
|
|
|
txn.database_engine, "relates_to_id", event_ids
|
|
|
|
)
|
|
|
|
args.append(RelationTypes.REPLACE)
|
2019-10-21 13:56:42 +02:00
|
|
|
|
2022-02-08 13:43:30 +01:00
|
|
|
txn.execute(sql % (clause,), args)
|
|
|
|
return dict(cast(Iterable[Tuple[str, str]], txn.fetchall()))
|
|
|
|
|
|
|
|
edit_ids = await self.db_pool.runInteraction(
|
|
|
|
"get_applicable_edits", _get_applicable_edits_txn
|
2019-10-21 13:56:42 +02:00
|
|
|
)
|
|
|
|
|
2022-02-08 13:43:30 +01:00
|
|
|
edits = await self.get_events(edit_ids.values()) # type: ignore[attr-defined]
|
2019-10-21 13:56:42 +02:00
|
|
|
|
2022-02-08 13:43:30 +01:00
|
|
|
# Map to the original event IDs to the edit events.
|
|
|
|
#
|
|
|
|
# There might not be an edit event due to there being no edits or
|
|
|
|
# due to the event not being known, either case is treated the same.
|
|
|
|
return {
|
|
|
|
original_event_id: edits.get(edit_ids.get(original_event_id))
|
|
|
|
for original_event_id in event_ids
|
|
|
|
}
|
2019-10-21 13:56:42 +02:00
|
|
|
|
2021-10-21 20:39:16 +02:00
|
|
|
@cached()
|
2022-02-11 15:50:14 +01:00
|
|
|
def get_thread_summary(self, event_id: str) -> Optional[Tuple[int, EventBase]]:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
@cachedList(cached_method_name="get_thread_summary", list_name="event_ids")
|
|
|
|
async def _get_thread_summaries(
|
|
|
|
self, event_ids: Collection[str]
|
2022-02-15 14:26:57 +01:00
|
|
|
) -> Dict[str, Optional[Tuple[int, EventBase, Optional[EventBase]]]]:
|
|
|
|
"""Get the number of threaded replies, the latest reply (if any), and the latest edit for that reply for the given event.
|
2021-10-21 20:39:16 +02:00
|
|
|
|
|
|
|
Args:
|
2022-02-11 15:50:14 +01:00
|
|
|
event_ids: Summarize the thread related to this event ID.
|
2021-10-21 20:39:16 +02:00
|
|
|
|
|
|
|
Returns:
|
2022-02-11 15:50:14 +01:00
|
|
|
A map of the thread summary each event. A missing event implies there
|
|
|
|
are no threaded replies.
|
|
|
|
|
2022-02-15 14:26:57 +01:00
|
|
|
Each summary is a tuple of:
|
|
|
|
The number of events in the thread.
|
|
|
|
The most recent event in the thread.
|
|
|
|
The most recent edit to the most recent event in the thread, if applicable.
|
2021-10-21 20:39:16 +02:00
|
|
|
"""
|
|
|
|
|
2022-02-11 15:50:14 +01:00
|
|
|
def _get_thread_summaries_txn(
|
2021-10-28 20:35:12 +02:00
|
|
|
txn: LoggingTransaction,
|
2022-02-11 15:50:14 +01:00
|
|
|
) -> Tuple[Dict[str, int], Dict[str, str]]:
|
|
|
|
# Fetch the count of threaded events and the latest event ID.
|
2021-10-21 20:39:16 +02:00
|
|
|
# TODO Should this only allow m.room.message events.
|
2022-02-11 15:50:14 +01:00
|
|
|
if isinstance(self.database_engine, PostgresEngine):
|
|
|
|
# The `DISTINCT ON` clause will pick the *first* row it encounters,
|
2022-02-16 12:53:21 +01:00
|
|
|
# so ordering by topological ordering + stream ordering desc will
|
2022-02-11 15:50:14 +01:00
|
|
|
# ensure we get the latest event in the thread.
|
|
|
|
sql = """
|
|
|
|
SELECT DISTINCT ON (parent.event_id) parent.event_id, child.event_id FROM events AS child
|
|
|
|
INNER JOIN event_relations USING (event_id)
|
|
|
|
INNER JOIN events AS parent ON
|
|
|
|
parent.event_id = relates_to_id
|
|
|
|
AND parent.room_id = child.room_id
|
|
|
|
WHERE
|
|
|
|
%s
|
|
|
|
AND relation_type = ?
|
|
|
|
ORDER BY parent.event_id, child.topological_ordering DESC, child.stream_ordering DESC
|
|
|
|
"""
|
|
|
|
else:
|
|
|
|
# SQLite uses a simplified query which returns all entries for a
|
|
|
|
# thread. The first result for each thread is chosen to and subsequent
|
|
|
|
# results for a thread are ignored.
|
|
|
|
sql = """
|
|
|
|
SELECT parent.event_id, child.event_id FROM events AS child
|
|
|
|
INNER JOIN event_relations USING (event_id)
|
|
|
|
INNER JOIN events AS parent ON
|
|
|
|
parent.event_id = relates_to_id
|
|
|
|
AND parent.room_id = child.room_id
|
|
|
|
WHERE
|
|
|
|
%s
|
|
|
|
AND relation_type = ?
|
|
|
|
ORDER BY child.topological_ordering DESC, child.stream_ordering DESC
|
|
|
|
"""
|
|
|
|
|
|
|
|
clause, args = make_in_list_sql_clause(
|
|
|
|
txn.database_engine, "relates_to_id", event_ids
|
|
|
|
)
|
|
|
|
args.append(RelationTypes.THREAD)
|
2021-10-21 20:39:16 +02:00
|
|
|
|
2022-02-11 15:50:14 +01:00
|
|
|
txn.execute(sql % (clause,), args)
|
|
|
|
latest_event_ids = {}
|
|
|
|
for parent_event_id, child_event_id in txn:
|
|
|
|
# Only consider the latest threaded reply (by topological ordering).
|
|
|
|
if parent_event_id not in latest_event_ids:
|
|
|
|
latest_event_ids[parent_event_id] = child_event_id
|
2021-10-21 20:39:16 +02:00
|
|
|
|
2022-02-11 15:50:14 +01:00
|
|
|
# If no threads were found, bail.
|
|
|
|
if not latest_event_ids:
|
|
|
|
return {}, latest_event_ids
|
2021-10-21 20:39:16 +02:00
|
|
|
|
2022-01-18 17:38:57 +01:00
|
|
|
# Fetch the number of threaded replies.
|
2021-10-21 20:39:16 +02:00
|
|
|
sql = """
|
2022-02-11 15:50:14 +01:00
|
|
|
SELECT parent.event_id, COUNT(child.event_id) FROM events AS child
|
|
|
|
INNER JOIN event_relations USING (event_id)
|
|
|
|
INNER JOIN events AS parent ON
|
|
|
|
parent.event_id = relates_to_id
|
|
|
|
AND parent.room_id = child.room_id
|
2021-10-21 20:39:16 +02:00
|
|
|
WHERE
|
2022-02-11 15:50:14 +01:00
|
|
|
%s
|
2021-10-21 20:39:16 +02:00
|
|
|
AND relation_type = ?
|
2022-02-11 15:50:14 +01:00
|
|
|
GROUP BY parent.event_id
|
2021-10-21 20:39:16 +02:00
|
|
|
"""
|
|
|
|
|
2022-02-11 15:50:14 +01:00
|
|
|
# Regenerate the arguments since only threads found above could
|
|
|
|
# possibly have any replies.
|
|
|
|
clause, args = make_in_list_sql_clause(
|
|
|
|
txn.database_engine, "relates_to_id", latest_event_ids.keys()
|
|
|
|
)
|
|
|
|
args.append(RelationTypes.THREAD)
|
|
|
|
|
|
|
|
txn.execute(sql % (clause,), args)
|
|
|
|
counts = dict(cast(List[Tuple[str, int]], txn.fetchall()))
|
2021-10-21 20:39:16 +02:00
|
|
|
|
2022-02-11 15:50:14 +01:00
|
|
|
return counts, latest_event_ids
|
|
|
|
|
|
|
|
counts, latest_event_ids = await self.db_pool.runInteraction(
|
|
|
|
"get_thread_summaries", _get_thread_summaries_txn
|
2021-10-21 20:39:16 +02:00
|
|
|
)
|
|
|
|
|
2022-02-11 15:50:14 +01:00
|
|
|
latest_events = await self.get_events(latest_event_ids.values()) # type: ignore[attr-defined]
|
|
|
|
|
2022-02-15 14:26:57 +01:00
|
|
|
# Check to see if any of those events are edited.
|
|
|
|
latest_edits = await self._get_applicable_edits(latest_event_ids.values())
|
|
|
|
|
2022-02-11 15:50:14 +01:00
|
|
|
# Map to the event IDs to the thread summary.
|
|
|
|
#
|
|
|
|
# There might not be a summary due to there not being a thread or
|
|
|
|
# due to the latest event not being known, either case is treated the same.
|
|
|
|
summaries = {}
|
|
|
|
for parent_event_id, latest_event_id in latest_event_ids.items():
|
|
|
|
latest_event = latest_events.get(latest_event_id)
|
|
|
|
|
|
|
|
summary = None
|
|
|
|
if latest_event:
|
2022-02-15 14:26:57 +01:00
|
|
|
latest_edit = latest_edits.get(latest_event_id)
|
|
|
|
summary = (counts[parent_event_id], latest_event, latest_edit)
|
2022-02-11 15:50:14 +01:00
|
|
|
summaries[parent_event_id] = summary
|
2021-10-21 20:39:16 +02:00
|
|
|
|
2022-02-11 15:50:14 +01:00
|
|
|
return summaries
|
2021-10-21 20:39:16 +02:00
|
|
|
|
2022-01-18 17:38:57 +01:00
|
|
|
@cached()
|
2022-02-11 15:50:14 +01:00
|
|
|
def get_thread_participated(self, event_id: str, user_id: str) -> bool:
|
|
|
|
raise NotImplementedError()
|
2022-01-18 17:38:57 +01:00
|
|
|
|
2022-02-11 15:50:14 +01:00
|
|
|
@cachedList(cached_method_name="get_thread_participated", list_name="event_ids")
|
|
|
|
async def _get_threads_participated(
|
|
|
|
self, event_ids: Collection[str], user_id: str
|
|
|
|
) -> Dict[str, bool]:
|
|
|
|
"""Get whether the requesting user participated in the given threads.
|
|
|
|
|
|
|
|
This is separate from get_thread_summaries since that can be cached across
|
|
|
|
all users while this value is specific to the requester.
|
2022-01-18 17:38:57 +01:00
|
|
|
|
|
|
|
Args:
|
2022-02-11 15:50:14 +01:00
|
|
|
event_ids: The thread related to these event IDs.
|
2022-01-18 17:38:57 +01:00
|
|
|
user_id: The user requesting the summary.
|
|
|
|
|
|
|
|
Returns:
|
2022-02-11 15:50:14 +01:00
|
|
|
A map of event ID to a boolean which represents if the requesting
|
|
|
|
user participated in that event's thread, otherwise false.
|
2022-01-18 17:38:57 +01:00
|
|
|
"""
|
|
|
|
|
2022-02-11 15:50:14 +01:00
|
|
|
def _get_thread_summary_txn(txn: LoggingTransaction) -> Set[str]:
|
2022-01-18 17:38:57 +01:00
|
|
|
# Fetch whether the requester has participated or not.
|
|
|
|
sql = """
|
2022-02-11 15:50:14 +01:00
|
|
|
SELECT DISTINCT relates_to_id
|
|
|
|
FROM events AS child
|
|
|
|
INNER JOIN event_relations USING (event_id)
|
|
|
|
INNER JOIN events AS parent ON
|
|
|
|
parent.event_id = relates_to_id
|
|
|
|
AND parent.room_id = child.room_id
|
2022-01-18 17:38:57 +01:00
|
|
|
WHERE
|
2022-02-11 15:50:14 +01:00
|
|
|
%s
|
2022-01-18 17:38:57 +01:00
|
|
|
AND relation_type = ?
|
2022-02-11 15:50:14 +01:00
|
|
|
AND child.sender = ?
|
2022-01-18 17:38:57 +01:00
|
|
|
"""
|
|
|
|
|
2022-02-11 15:50:14 +01:00
|
|
|
clause, args = make_in_list_sql_clause(
|
|
|
|
txn.database_engine, "relates_to_id", event_ids
|
|
|
|
)
|
|
|
|
args.extend((RelationTypes.THREAD, user_id))
|
2022-01-18 17:38:57 +01:00
|
|
|
|
2022-02-11 15:50:14 +01:00
|
|
|
txn.execute(sql % (clause,), args)
|
|
|
|
return {row[0] for row in txn.fetchall()}
|
|
|
|
|
|
|
|
participated_threads = await self.db_pool.runInteraction(
|
2022-01-18 17:38:57 +01:00
|
|
|
"get_thread_summary", _get_thread_summary_txn
|
|
|
|
)
|
|
|
|
|
2022-02-11 15:50:14 +01:00
|
|
|
return {event_id: event_id in participated_threads for event_id in event_ids}
|
|
|
|
|
2021-11-09 14:10:58 +01:00
|
|
|
async def events_have_relations(
|
|
|
|
self,
|
|
|
|
parent_ids: List[str],
|
|
|
|
relation_senders: Optional[List[str]],
|
|
|
|
relation_types: Optional[List[str]],
|
|
|
|
) -> List[str]:
|
|
|
|
"""Check which events have a relationship from the given senders of the
|
|
|
|
given types.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
parent_ids: The events being annotated
|
|
|
|
relation_senders: The relation senders to check.
|
|
|
|
relation_types: The relation types to check.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if the event has at least one relationship from one of the given senders of the given type.
|
|
|
|
"""
|
|
|
|
# If no restrictions are given then the event has the required relations.
|
|
|
|
if not relation_senders and not relation_types:
|
|
|
|
return parent_ids
|
|
|
|
|
|
|
|
sql = """
|
|
|
|
SELECT relates_to_id FROM event_relations
|
|
|
|
INNER JOIN events USING (event_id)
|
|
|
|
WHERE
|
|
|
|
%s;
|
|
|
|
"""
|
|
|
|
|
2021-11-18 14:43:09 +01:00
|
|
|
def _get_if_events_have_relations(txn) -> List[str]:
|
2021-11-09 14:10:58 +01:00
|
|
|
clauses: List[str] = []
|
|
|
|
clause, args = make_in_list_sql_clause(
|
|
|
|
txn.database_engine, "relates_to_id", parent_ids
|
|
|
|
)
|
|
|
|
clauses.append(clause)
|
|
|
|
|
|
|
|
if relation_senders:
|
|
|
|
clause, temp_args = make_in_list_sql_clause(
|
|
|
|
txn.database_engine, "sender", relation_senders
|
|
|
|
)
|
|
|
|
clauses.append(clause)
|
|
|
|
args.extend(temp_args)
|
|
|
|
if relation_types:
|
|
|
|
clause, temp_args = make_in_list_sql_clause(
|
|
|
|
txn.database_engine, "relation_type", relation_types
|
|
|
|
)
|
|
|
|
clauses.append(clause)
|
|
|
|
args.extend(temp_args)
|
|
|
|
|
|
|
|
txn.execute(sql % " AND ".join(clauses), args)
|
|
|
|
|
|
|
|
return [row[0] for row in txn]
|
|
|
|
|
|
|
|
return await self.db_pool.runInteraction(
|
2021-11-18 14:43:09 +01:00
|
|
|
"get_if_events_have_relations", _get_if_events_have_relations
|
2021-11-09 14:10:58 +01:00
|
|
|
)
|
|
|
|
|
2020-09-01 15:21:48 +02:00
|
|
|
async def has_user_annotated_event(
|
|
|
|
self, parent_id: str, event_type: str, aggregation_key: str, sender: str
|
|
|
|
) -> bool:
|
2019-10-21 13:56:42 +02:00
|
|
|
"""Check if a user has already annotated an event with the same key
|
|
|
|
(e.g. already liked an event).
|
|
|
|
|
|
|
|
Args:
|
2020-09-01 15:21:48 +02:00
|
|
|
parent_id: The event being annotated
|
|
|
|
event_type: The event type of the annotation
|
|
|
|
aggregation_key: The aggregation key of the annotation
|
|
|
|
sender: The sender of the annotation
|
2019-10-21 13:56:42 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-09-01 15:21:48 +02:00
|
|
|
True if the event is already annotated.
|
2019-10-21 13:56:42 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
sql = """
|
|
|
|
SELECT 1 FROM event_relations
|
|
|
|
INNER JOIN events USING (event_id)
|
|
|
|
WHERE
|
|
|
|
relates_to_id = ?
|
|
|
|
AND relation_type = ?
|
|
|
|
AND type = ?
|
|
|
|
AND sender = ?
|
|
|
|
AND aggregation_key = ?
|
|
|
|
LIMIT 1;
|
|
|
|
"""
|
|
|
|
|
2021-10-28 20:35:12 +02:00
|
|
|
def _get_if_user_has_annotated_event(txn: LoggingTransaction) -> bool:
|
2019-10-21 13:56:42 +02:00
|
|
|
txn.execute(
|
|
|
|
sql,
|
|
|
|
(
|
|
|
|
parent_id,
|
|
|
|
RelationTypes.ANNOTATION,
|
|
|
|
event_type,
|
|
|
|
sender,
|
|
|
|
aggregation_key,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
return bool(txn.fetchone())
|
|
|
|
|
2020-09-01 15:21:48 +02:00
|
|
|
return await self.db_pool.runInteraction(
|
2019-10-21 13:56:42 +02:00
|
|
|
"get_if_user_has_annotated_event", _get_if_user_has_annotated_event
|
|
|
|
)
|
|
|
|
|
2022-01-07 15:10:46 +01:00
|
|
|
async def _get_bundled_aggregation_for_event(
|
2022-01-18 17:38:57 +01:00
|
|
|
self, event: EventBase, user_id: str
|
2022-01-26 14:27:04 +01:00
|
|
|
) -> Optional[BundledAggregations]:
|
2022-01-07 15:10:46 +01:00
|
|
|
"""Generate bundled aggregations for an event.
|
|
|
|
|
|
|
|
Note that this does not use a cache, but depends on cached methods.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
event: The event to calculate bundled aggregations for.
|
2022-01-18 17:38:57 +01:00
|
|
|
user_id: The user requesting the bundled aggregations.
|
2022-01-07 15:10:46 +01:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
The bundled aggregations for an event, if bundled aggregations are
|
|
|
|
enabled and the event can have bundled aggregations.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Do not bundle aggregations for an event which represents an edit or an
|
|
|
|
# annotation. It does not make sense for them to have related events.
|
|
|
|
relates_to = event.content.get("m.relates_to")
|
|
|
|
if isinstance(relates_to, (dict, frozendict)):
|
|
|
|
relation_type = relates_to.get("rel_type")
|
|
|
|
if relation_type in (RelationTypes.ANNOTATION, RelationTypes.REPLACE):
|
|
|
|
return None
|
|
|
|
|
|
|
|
event_id = event.event_id
|
|
|
|
room_id = event.room_id
|
|
|
|
|
|
|
|
# The bundled aggregations to include, a mapping of relation type to a
|
|
|
|
# type-specific value. Some types include the direct return type here
|
|
|
|
# while others need more processing during serialization.
|
2022-01-26 14:27:04 +01:00
|
|
|
aggregations = BundledAggregations()
|
2022-01-07 15:10:46 +01:00
|
|
|
|
|
|
|
annotations = await self.get_aggregation_groups_for_event(event_id, room_id)
|
|
|
|
if annotations.chunk:
|
2022-02-10 16:52:48 +01:00
|
|
|
aggregations.annotations = await annotations.to_dict(
|
|
|
|
cast("DataStore", self)
|
|
|
|
)
|
2022-01-07 15:10:46 +01:00
|
|
|
|
|
|
|
references = await self.get_relations_for_event(
|
|
|
|
event_id, room_id, RelationTypes.REFERENCE, direction="f"
|
|
|
|
)
|
|
|
|
if references.chunk:
|
2022-02-10 16:52:48 +01:00
|
|
|
aggregations.references = await references.to_dict(cast("DataStore", self))
|
2022-01-07 15:10:46 +01:00
|
|
|
|
|
|
|
# Store the bundled aggregations in the event metadata for later use.
|
|
|
|
return aggregations
|
|
|
|
|
|
|
|
async def get_bundled_aggregations(
|
2022-02-08 13:43:30 +01:00
|
|
|
self, events: Iterable[EventBase], user_id: str
|
2022-01-26 14:27:04 +01:00
|
|
|
) -> Dict[str, BundledAggregations]:
|
2022-01-07 15:10:46 +01:00
|
|
|
"""Generate bundled aggregations for events.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
events: The iterable of events to calculate bundled aggregations for.
|
2022-01-18 17:38:57 +01:00
|
|
|
user_id: The user requesting the bundled aggregations.
|
2022-01-07 15:10:46 +01:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
A map of event ID to the bundled aggregation for the event. Not all
|
|
|
|
events may have bundled aggregations in the results.
|
|
|
|
"""
|
2022-02-08 15:21:20 +01:00
|
|
|
# The already processed event IDs. Tracked separately from the result
|
|
|
|
# since the result omits events which do not have bundled aggregations.
|
|
|
|
seen_event_ids = set()
|
2022-01-07 15:10:46 +01:00
|
|
|
|
2022-02-08 13:43:30 +01:00
|
|
|
# State events and redacted events do not get bundled aggregations.
|
|
|
|
events = [
|
|
|
|
event
|
|
|
|
for event in events
|
|
|
|
if not event.is_state() and not event.internal_metadata.is_redacted()
|
|
|
|
]
|
|
|
|
|
|
|
|
# event ID -> bundled aggregation in non-serialized form.
|
|
|
|
results: Dict[str, BundledAggregations] = {}
|
|
|
|
|
|
|
|
# Fetch other relations per event.
|
2022-01-07 15:10:46 +01:00
|
|
|
for event in events:
|
2022-02-08 15:21:20 +01:00
|
|
|
# De-duplicate events by ID to handle the same event requested multiple
|
|
|
|
# times. The caches that _get_bundled_aggregation_for_event use should
|
|
|
|
# capture this, but best to reduce work.
|
|
|
|
if event.event_id in seen_event_ids:
|
|
|
|
continue
|
|
|
|
seen_event_ids.add(event.event_id)
|
|
|
|
|
2022-01-18 17:38:57 +01:00
|
|
|
event_result = await self._get_bundled_aggregation_for_event(event, user_id)
|
2022-01-26 14:27:04 +01:00
|
|
|
if event_result:
|
2022-01-07 15:10:46 +01:00
|
|
|
results[event.event_id] = event_result
|
|
|
|
|
2022-02-08 13:43:30 +01:00
|
|
|
# Fetch any edits.
|
2022-02-08 15:21:20 +01:00
|
|
|
edits = await self._get_applicable_edits(seen_event_ids)
|
2022-02-08 13:43:30 +01:00
|
|
|
for event_id, edit in edits.items():
|
|
|
|
results.setdefault(event_id, BundledAggregations()).replace = edit
|
|
|
|
|
2022-02-11 15:50:14 +01:00
|
|
|
# Fetch thread summaries.
|
|
|
|
if self._msc3440_enabled:
|
|
|
|
summaries = await self._get_thread_summaries(seen_event_ids)
|
|
|
|
# Only fetch participated for a limited selection based on what had
|
|
|
|
# summaries.
|
|
|
|
participated = await self._get_threads_participated(
|
|
|
|
summaries.keys(), user_id
|
|
|
|
)
|
|
|
|
for event_id, summary in summaries.items():
|
|
|
|
if summary:
|
2022-02-15 14:26:57 +01:00
|
|
|
thread_count, latest_thread_event, edit = summary
|
2022-02-11 15:50:14 +01:00
|
|
|
results.setdefault(
|
|
|
|
event_id, BundledAggregations()
|
|
|
|
).thread = _ThreadAggregation(
|
|
|
|
latest_event=latest_thread_event,
|
2022-02-15 14:26:57 +01:00
|
|
|
latest_edit=edit,
|
2022-02-11 15:50:14 +01:00
|
|
|
count=thread_count,
|
|
|
|
# If there's a thread summary it must also exist in the
|
|
|
|
# participated dictionary.
|
|
|
|
current_user_participated=participated[event_id],
|
|
|
|
)
|
|
|
|
|
2022-01-07 15:10:46 +01:00
|
|
|
return results
|
|
|
|
|
2019-10-21 13:56:42 +02:00
|
|
|
|
|
|
|
class RelationsStore(RelationsWorkerStore):
|
2020-05-13 14:38:22 +02:00
|
|
|
pass
|