2014-08-27 18:59:36 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-05 19:12:37 +01:00
|
|
|
# Copyright 2014 - 2016 OpenMarket Ltd
|
2014-08-27 18:59:36 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
from twisted.internet import defer
|
|
|
|
|
2014-12-04 16:50:01 +01:00
|
|
|
from synapse.api.constants import EventTypes, Membership
|
2016-02-15 19:21:30 +01:00
|
|
|
from synapse.api.errors import AuthError, Codes, SynapseError
|
2014-08-27 18:59:36 +02:00
|
|
|
from synapse.streams.config import PaginationConfig
|
2015-01-26 17:11:28 +01:00
|
|
|
from synapse.events.utils import serialize_event
|
2014-12-10 18:59:47 +01:00
|
|
|
from synapse.events.validator import EventValidator
|
2015-05-12 14:14:29 +02:00
|
|
|
from synapse.util import unwrapFirstError
|
2015-12-22 19:27:56 +01:00
|
|
|
from synapse.util.caches.snapshot_cache import SnapshotCache
|
2015-09-09 14:25:22 +02:00
|
|
|
from synapse.types import UserID, RoomStreamToken, StreamToken
|
2014-12-10 18:59:47 +01:00
|
|
|
|
2014-09-05 22:35:56 +02:00
|
|
|
from ._base import BaseHandler
|
2014-08-27 18:59:36 +02:00
|
|
|
|
2015-12-02 16:50:50 +01:00
|
|
|
from canonicaljson import encode_canonical_json
|
|
|
|
|
2014-08-27 18:59:36 +02:00
|
|
|
import logging
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2015-12-01 21:53:04 +01:00
|
|
|
def collect_presencelike_data(distributor, user, content):
|
2015-12-02 11:50:58 +01:00
|
|
|
return distributor.fire("collect_presencelike_data", user, content)
|
2015-12-01 21:53:04 +01:00
|
|
|
|
|
|
|
|
2014-09-05 22:35:56 +02:00
|
|
|
class MessageHandler(BaseHandler):
|
2014-08-27 18:59:36 +02:00
|
|
|
|
|
|
|
def __init__(self, hs):
|
|
|
|
super(MessageHandler, self).__init__(hs)
|
|
|
|
self.hs = hs
|
2015-02-09 18:41:29 +01:00
|
|
|
self.state = hs.get_state_handler()
|
2014-08-27 18:59:36 +02:00
|
|
|
self.clock = hs.get_clock()
|
2014-12-10 18:59:47 +01:00
|
|
|
self.validator = EventValidator()
|
2015-12-22 19:27:56 +01:00
|
|
|
self.snapshot_cache = SnapshotCache()
|
2014-08-27 18:59:36 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_message(self, msg_id=None, room_id=None, sender_id=None,
|
|
|
|
user_id=None):
|
|
|
|
""" Retrieve a message.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
msg_id (str): The message ID to obtain.
|
|
|
|
room_id (str): The room where the message resides.
|
|
|
|
sender_id (str): The user ID of the user who sent the message.
|
|
|
|
user_id (str): The user ID of the user making this request.
|
|
|
|
Returns:
|
|
|
|
The message, or None if no message exists.
|
|
|
|
Raises:
|
|
|
|
SynapseError if something went wrong.
|
|
|
|
"""
|
|
|
|
yield self.auth.check_joined_room(room_id, user_id)
|
|
|
|
|
|
|
|
# Pull out the message from the db
|
|
|
|
# msg = yield self.store.get_message(
|
|
|
|
# room_id=room_id,
|
|
|
|
# msg_id=msg_id,
|
|
|
|
# user_id=sender_id
|
|
|
|
# )
|
|
|
|
|
2014-11-11 19:00:13 +01:00
|
|
|
# TODO (erikj): Once we work out the correct c-s api we need to think
|
|
|
|
# on how to do this.
|
2014-08-27 18:59:36 +02:00
|
|
|
|
|
|
|
defer.returnValue(None)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2016-01-20 16:34:07 +01:00
|
|
|
def get_messages(self, requester, room_id=None, pagin_config=None,
|
|
|
|
as_client_event=True):
|
2014-08-27 18:59:36 +02:00
|
|
|
"""Get messages in a room.
|
|
|
|
|
|
|
|
Args:
|
2016-01-20 16:34:07 +01:00
|
|
|
requester (Requester): The user requesting messages.
|
2014-08-27 18:59:36 +02:00
|
|
|
room_id (str): The room they want messages from.
|
|
|
|
pagin_config (synapse.api.streams.PaginationConfig): The pagination
|
2015-11-04 18:29:07 +01:00
|
|
|
config rules to apply, if any.
|
2015-01-08 15:36:33 +01:00
|
|
|
as_client_event (bool): True to get events in client-server format.
|
2014-08-27 18:59:36 +02:00
|
|
|
Returns:
|
|
|
|
dict: Pagination API results
|
|
|
|
"""
|
2016-01-20 16:34:07 +01:00
|
|
|
user_id = requester.user.to_string()
|
2014-08-27 18:59:36 +02:00
|
|
|
data_source = self.hs.get_event_sources().sources["room"]
|
|
|
|
|
2015-09-09 18:31:09 +02:00
|
|
|
if pagin_config.from_token:
|
|
|
|
room_token = pagin_config.from_token.room_key
|
|
|
|
else:
|
2014-11-11 19:00:13 +01:00
|
|
|
pagin_config.from_token = (
|
2015-05-12 11:28:10 +02:00
|
|
|
yield self.hs.get_event_sources().get_current_token(
|
|
|
|
direction='b'
|
|
|
|
)
|
2014-11-11 19:00:13 +01:00
|
|
|
)
|
2015-09-09 18:31:09 +02:00
|
|
|
room_token = pagin_config.from_token.room_key
|
2014-08-27 18:59:36 +02:00
|
|
|
|
2015-09-09 18:31:09 +02:00
|
|
|
room_token = RoomStreamToken.parse(room_token)
|
2015-05-11 19:01:31 +02:00
|
|
|
|
2015-09-09 18:31:09 +02:00
|
|
|
pagin_config.from_token = pagin_config.from_token.copy_and_replace(
|
|
|
|
"room_key", str(room_token)
|
|
|
|
)
|
|
|
|
|
|
|
|
source_config = pagin_config.get_source_config("room")
|
|
|
|
|
2016-01-20 16:34:07 +01:00
|
|
|
membership, member_event_id = yield self._check_in_room_or_world_readable(
|
|
|
|
room_id, user_id
|
|
|
|
)
|
2016-01-27 18:42:45 +01:00
|
|
|
|
|
|
|
if source_config.direction == 'b':
|
|
|
|
# if we're going backwards, we might need to backfill. This
|
|
|
|
# requires that we have a topo token.
|
2016-01-28 12:52:34 +01:00
|
|
|
if room_token.topological:
|
|
|
|
max_topo = room_token.topological
|
|
|
|
else:
|
|
|
|
max_topo = yield self.store.get_max_topological_token_for_stream_and_room(
|
|
|
|
room_id, room_token.stream
|
|
|
|
)
|
2016-01-27 18:42:45 +01:00
|
|
|
|
|
|
|
if membership == Membership.LEAVE:
|
|
|
|
# If they have left the room then clamp the token to be before
|
|
|
|
# they left the room, to save the effort of loading from the
|
|
|
|
# database.
|
|
|
|
leave_token = yield self.store.get_topological_token_for_event(
|
|
|
|
member_event_id
|
|
|
|
)
|
|
|
|
leave_token = RoomStreamToken.parse(leave_token)
|
2016-01-28 12:52:34 +01:00
|
|
|
if leave_token.topological < max_topo:
|
2016-01-27 18:42:45 +01:00
|
|
|
source_config.from_key = str(leave_token)
|
|
|
|
|
|
|
|
yield self.hs.get_handlers().federation_handler.maybe_backfill(
|
2016-01-28 12:52:34 +01:00
|
|
|
room_id, max_topo
|
2016-01-20 16:34:07 +01:00
|
|
|
)
|
2015-05-11 19:01:31 +02:00
|
|
|
|
2014-10-29 16:57:23 +01:00
|
|
|
events, next_key = yield data_source.get_pagination_rows(
|
2016-01-20 16:34:07 +01:00
|
|
|
requester.user, source_config, room_id
|
2014-10-29 16:57:23 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
next_token = pagin_config.from_token.copy_and_replace(
|
|
|
|
"room_key", next_key
|
2014-08-27 18:59:36 +02:00
|
|
|
)
|
|
|
|
|
2015-07-02 17:20:10 +02:00
|
|
|
if not events:
|
|
|
|
defer.returnValue({
|
|
|
|
"chunk": [],
|
|
|
|
"start": pagin_config.from_token.to_string(),
|
|
|
|
"end": next_token.to_string(),
|
|
|
|
})
|
|
|
|
|
2016-01-20 16:34:07 +01:00
|
|
|
events = yield self._filter_events_for_client(
|
|
|
|
user_id,
|
|
|
|
events,
|
|
|
|
is_peeking=(member_event_id is None),
|
|
|
|
)
|
2015-07-02 18:02:10 +02:00
|
|
|
|
2015-01-26 17:11:28 +01:00
|
|
|
time_now = self.clock.time_msec()
|
|
|
|
|
2014-08-27 18:59:36 +02:00
|
|
|
chunk = {
|
2015-01-08 15:36:33 +01:00
|
|
|
"chunk": [
|
2015-07-02 18:02:10 +02:00
|
|
|
serialize_event(e, time_now, as_client_event)
|
|
|
|
for e in events
|
2015-01-08 15:36:33 +01:00
|
|
|
],
|
2014-08-27 18:59:36 +02:00
|
|
|
"start": pagin_config.from_token.to_string(),
|
|
|
|
"end": next_token.to_string(),
|
|
|
|
}
|
|
|
|
|
|
|
|
defer.returnValue(chunk)
|
|
|
|
|
2014-12-04 16:50:01 +01:00
|
|
|
@defer.inlineCallbacks
|
2016-01-15 17:27:26 +01:00
|
|
|
def create_event(self, event_dict, token_id=None, txn_id=None):
|
|
|
|
"""
|
|
|
|
Given a dict from a client, create a new event.
|
2014-12-15 18:01:12 +01:00
|
|
|
|
|
|
|
Creates an FrozenEvent object, filling out auth_events, prev_events,
|
|
|
|
etc.
|
|
|
|
|
|
|
|
Adds display names to Join membership events.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
event_dict (dict): An entire event
|
2016-01-15 17:27:26 +01:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
Tuple of created event (FrozenEvent), Context
|
2014-12-15 18:01:12 +01:00
|
|
|
"""
|
2014-12-04 16:50:01 +01:00
|
|
|
builder = self.event_builder_factory.new(event_dict)
|
|
|
|
|
2014-12-12 11:56:14 +01:00
|
|
|
self.validator.validate_new(builder)
|
2014-12-10 18:59:47 +01:00
|
|
|
|
2014-12-08 15:50:48 +01:00
|
|
|
if builder.type == EventTypes.Member:
|
|
|
|
membership = builder.content.get("membership", None)
|
2016-03-04 15:29:58 +01:00
|
|
|
target = UserID.from_string(builder.state_key)
|
|
|
|
|
2014-12-08 15:50:48 +01:00
|
|
|
if membership == Membership.JOIN:
|
|
|
|
# If event doesn't include a display name, add one.
|
2015-12-01 21:53:04 +01:00
|
|
|
yield collect_presencelike_data(
|
2016-03-04 15:29:58 +01:00
|
|
|
self.distributor, target, builder.content
|
2014-12-08 15:50:48 +01:00
|
|
|
)
|
2016-03-04 15:29:58 +01:00
|
|
|
elif membership == Membership.INVITE:
|
|
|
|
profile = self.hs.get_handlers().profile_handler
|
|
|
|
content = builder.content
|
|
|
|
|
|
|
|
try:
|
|
|
|
content["displayname"] = yield profile.get_displayname(target)
|
|
|
|
content["avatar_url"] = yield profile.get_avatar_url(target)
|
|
|
|
except Exception as e:
|
|
|
|
logger.info(
|
2016-03-04 16:22:39 +01:00
|
|
|
"Failed to get profile information for %r: %s",
|
2016-03-04 15:29:58 +01:00
|
|
|
target, e
|
|
|
|
)
|
2014-12-08 15:50:48 +01:00
|
|
|
|
2015-08-25 17:23:06 +02:00
|
|
|
if token_id is not None:
|
|
|
|
builder.internal_metadata.token_id = token_id
|
2015-01-28 17:58:23 +01:00
|
|
|
|
|
|
|
if txn_id is not None:
|
|
|
|
builder.internal_metadata.txn_id = txn_id
|
|
|
|
|
2014-12-04 16:50:01 +01:00
|
|
|
event, context = yield self._create_new_client_event(
|
|
|
|
builder=builder,
|
|
|
|
)
|
2016-01-15 17:27:26 +01:00
|
|
|
defer.returnValue((event, context))
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2016-03-03 17:43:42 +01:00
|
|
|
def send_nonmember_event(self, requester, event, context, ratelimit=True):
|
2016-01-15 17:27:26 +01:00
|
|
|
"""
|
|
|
|
Persists and notifies local clients and federation of an event.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
event (FrozenEvent) the event to send.
|
|
|
|
context (Context) the context of the event.
|
|
|
|
ratelimit (bool): Whether to rate limit this send.
|
|
|
|
is_guest (bool): Whether the sender is a guest.
|
|
|
|
"""
|
2016-02-15 19:21:30 +01:00
|
|
|
if event.type == EventTypes.Member:
|
|
|
|
raise SynapseError(
|
|
|
|
500,
|
2016-02-18 12:02:14 +01:00
|
|
|
"Tried to send member event through non-member codepath"
|
2016-02-15 19:21:30 +01:00
|
|
|
)
|
|
|
|
|
2016-01-15 17:27:26 +01:00
|
|
|
user = UserID.from_string(event.sender)
|
|
|
|
|
|
|
|
assert self.hs.is_mine(user), "User must be our own: %s" % (user,)
|
|
|
|
|
2015-12-02 16:50:50 +01:00
|
|
|
if event.is_state():
|
2016-02-15 19:21:30 +01:00
|
|
|
prev_state = self.deduplicate_state_event(event, context)
|
|
|
|
if prev_state is not None:
|
|
|
|
defer.returnValue(prev_state)
|
2015-12-02 16:50:50 +01:00
|
|
|
|
2016-02-15 19:21:30 +01:00
|
|
|
yield self.handle_new_client_event(
|
2016-03-03 17:43:42 +01:00
|
|
|
requester=requester,
|
2016-02-15 19:21:30 +01:00
|
|
|
event=event,
|
|
|
|
context=context,
|
|
|
|
ratelimit=ratelimit,
|
|
|
|
)
|
2014-12-04 16:50:01 +01:00
|
|
|
|
2014-12-19 15:30:57 +01:00
|
|
|
if event.type == EventTypes.Message:
|
|
|
|
presence = self.hs.get_handlers().presence_handler
|
2016-02-15 18:10:40 +01:00
|
|
|
yield presence.bump_presence_active_time(user)
|
2014-12-19 15:30:57 +01:00
|
|
|
|
2016-02-15 19:21:30 +01:00
|
|
|
def deduplicate_state_event(self, event, context):
|
2016-02-16 15:25:23 +01:00
|
|
|
"""
|
|
|
|
Checks whether event is in the latest resolved state in context.
|
|
|
|
|
|
|
|
If so, returns the version of the event in context.
|
|
|
|
Otherwise, returns None.
|
|
|
|
"""
|
|
|
|
prev_event = context.current_state.get((event.type, event.state_key))
|
|
|
|
if prev_event and event.user_id == prev_event.user_id:
|
|
|
|
prev_content = encode_canonical_json(prev_event.content)
|
2016-02-15 19:21:30 +01:00
|
|
|
next_content = encode_canonical_json(event.content)
|
|
|
|
if prev_content == next_content:
|
2016-02-16 15:25:23 +01:00
|
|
|
return prev_event
|
2016-02-15 19:21:30 +01:00
|
|
|
return None
|
|
|
|
|
2016-01-15 17:27:26 +01:00
|
|
|
@defer.inlineCallbacks
|
2016-02-15 19:21:30 +01:00
|
|
|
def create_and_send_nonmember_event(
|
|
|
|
self,
|
2016-03-03 17:43:42 +01:00
|
|
|
requester,
|
2016-02-15 19:21:30 +01:00
|
|
|
event_dict,
|
|
|
|
ratelimit=True,
|
|
|
|
txn_id=None
|
|
|
|
):
|
2016-01-15 17:27:26 +01:00
|
|
|
"""
|
|
|
|
Creates an event, then sends it.
|
|
|
|
|
2016-02-15 19:21:30 +01:00
|
|
|
See self.create_event and self.send_nonmember_event.
|
2016-01-15 17:27:26 +01:00
|
|
|
"""
|
|
|
|
event, context = yield self.create_event(
|
|
|
|
event_dict,
|
2016-03-03 17:43:42 +01:00
|
|
|
token_id=requester.access_token_id,
|
2016-01-15 17:27:26 +01:00
|
|
|
txn_id=txn_id
|
|
|
|
)
|
2016-02-15 19:21:30 +01:00
|
|
|
yield self.send_nonmember_event(
|
2016-03-03 17:43:42 +01:00
|
|
|
requester,
|
2016-01-15 17:27:26 +01:00
|
|
|
event,
|
|
|
|
context,
|
|
|
|
ratelimit=ratelimit,
|
|
|
|
)
|
2014-12-04 16:50:01 +01:00
|
|
|
defer.returnValue(event)
|
|
|
|
|
2014-08-27 18:59:36 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_room_data(self, user_id=None, room_id=None,
|
2015-11-05 15:32:26 +01:00
|
|
|
event_type=None, state_key="", is_guest=False):
|
2014-08-27 18:59:36 +02:00
|
|
|
""" Get data from a room.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
event : The room path event
|
|
|
|
Returns:
|
|
|
|
The path data content.
|
|
|
|
Raises:
|
|
|
|
SynapseError if something went wrong.
|
|
|
|
"""
|
2015-11-05 15:32:26 +01:00
|
|
|
membership, membership_event_id = yield self._check_in_room_or_world_readable(
|
2016-01-20 16:34:07 +01:00
|
|
|
room_id, user_id
|
2015-11-05 15:32:26 +01:00
|
|
|
)
|
2014-08-27 18:59:36 +02:00
|
|
|
|
2015-11-05 15:32:26 +01:00
|
|
|
if membership == Membership.JOIN:
|
2015-09-10 16:06:47 +02:00
|
|
|
data = yield self.state_handler.get_current_state(
|
|
|
|
room_id, event_type, state_key
|
|
|
|
)
|
2015-11-05 15:32:26 +01:00
|
|
|
elif membership == Membership.LEAVE:
|
2015-09-10 16:06:47 +02:00
|
|
|
key = (event_type, state_key)
|
|
|
|
room_state = yield self.store.get_state_for_events(
|
2015-11-05 15:32:26 +01:00
|
|
|
[membership_event_id], [key]
|
2015-09-10 16:06:47 +02:00
|
|
|
)
|
2015-11-05 15:32:26 +01:00
|
|
|
data = room_state[membership_event_id].get(key)
|
2014-08-27 18:59:36 +02:00
|
|
|
|
|
|
|
defer.returnValue(data)
|
|
|
|
|
2014-09-23 16:35:58 +02:00
|
|
|
@defer.inlineCallbacks
|
2016-01-20 16:34:07 +01:00
|
|
|
def _check_in_room_or_world_readable(self, room_id, user_id):
|
2015-11-12 14:37:07 +01:00
|
|
|
try:
|
|
|
|
# check_user_was_in_room will return the most recent membership
|
|
|
|
# event for the user if:
|
|
|
|
# * The user is a non-guest user, and was ever in the room
|
|
|
|
# * The user is a guest user, and has joined the room
|
|
|
|
# else it will throw.
|
2015-11-05 15:32:26 +01:00
|
|
|
member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
|
|
|
|
defer.returnValue((member_event.membership, member_event.event_id))
|
2015-11-12 14:37:07 +01:00
|
|
|
return
|
2016-01-20 16:34:07 +01:00
|
|
|
except AuthError:
|
2015-11-12 16:02:00 +01:00
|
|
|
visibility = yield self.state_handler.get_current_state(
|
|
|
|
room_id, EventTypes.RoomHistoryVisibility, ""
|
|
|
|
)
|
|
|
|
if (
|
|
|
|
visibility and
|
|
|
|
visibility.content["history_visibility"] == "world_readable"
|
|
|
|
):
|
|
|
|
defer.returnValue((Membership.JOIN, None))
|
|
|
|
return
|
2015-11-12 14:37:07 +01:00
|
|
|
raise AuthError(
|
|
|
|
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
|
|
|
|
)
|
2015-11-05 15:32:26 +01:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_state_events(self, user_id, room_id, is_guest=False):
|
2015-09-09 15:12:24 +02:00
|
|
|
"""Retrieve all state events for a given room. If the user is
|
|
|
|
joined to the room then return the current state. If the user has
|
|
|
|
left the room return the state events from when they left.
|
2014-09-23 16:35:58 +02:00
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id(str): The user requesting state events.
|
|
|
|
room_id(str): The room ID to get all state events from.
|
|
|
|
Returns:
|
|
|
|
A list of dicts representing state events. [{}, {}, {}]
|
|
|
|
"""
|
2015-11-05 15:32:26 +01:00
|
|
|
membership, membership_event_id = yield self._check_in_room_or_world_readable(
|
2016-01-20 16:34:07 +01:00
|
|
|
room_id, user_id
|
2015-11-05 15:32:26 +01:00
|
|
|
)
|
2015-09-09 15:12:24 +02:00
|
|
|
|
2015-11-05 15:32:26 +01:00
|
|
|
if membership == Membership.JOIN:
|
2015-09-09 15:12:24 +02:00
|
|
|
room_state = yield self.state_handler.get_current_state(room_id)
|
2015-11-05 15:32:26 +01:00
|
|
|
elif membership == Membership.LEAVE:
|
2015-09-09 15:12:24 +02:00
|
|
|
room_state = yield self.store.get_state_for_events(
|
2015-11-05 15:32:26 +01:00
|
|
|
[membership_event_id], None
|
2015-09-09 15:12:24 +02:00
|
|
|
)
|
2015-11-05 15:32:26 +01:00
|
|
|
room_state = room_state[membership_event_id]
|
2014-09-23 16:35:58 +02:00
|
|
|
|
2015-01-26 17:11:28 +01:00
|
|
|
now = self.clock.time_msec()
|
2015-02-09 18:41:29 +01:00
|
|
|
defer.returnValue(
|
2015-09-09 15:12:24 +02:00
|
|
|
[serialize_event(c, now) for c in room_state.values()]
|
2015-02-09 18:41:29 +01:00
|
|
|
)
|
2014-09-23 16:35:58 +02:00
|
|
|
|
2015-10-08 18:19:42 +02:00
|
|
|
def snapshot_all_rooms(self, user_id=None, pagin_config=None,
|
|
|
|
as_client_event=True, include_archived=False):
|
2015-12-22 19:53:47 +01:00
|
|
|
"""Retrieve a snapshot of all rooms the user is invited or has joined.
|
|
|
|
|
|
|
|
This snapshot may include messages for all rooms where the user is
|
|
|
|
joined, depending on the pagination config.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): The ID of the user making the request.
|
|
|
|
pagin_config (synapse.api.streams.PaginationConfig): The pagination
|
|
|
|
config used to determine how many messages *PER ROOM* to return.
|
|
|
|
as_client_event (bool): True to get events in client-server format.
|
|
|
|
include_archived (bool): True to get rooms that the user has left
|
|
|
|
Returns:
|
|
|
|
A list of dicts with "room_id" and "membership" keys for all rooms
|
|
|
|
the user is currently invited or joined in on. Rooms where the user
|
|
|
|
is joined on, may return a "messages" key with messages, depending
|
|
|
|
on the specified PaginationConfig.
|
|
|
|
"""
|
2015-12-22 19:27:56 +01:00
|
|
|
key = (
|
|
|
|
user_id,
|
|
|
|
pagin_config.from_token,
|
|
|
|
pagin_config.to_token,
|
|
|
|
pagin_config.direction,
|
|
|
|
pagin_config.limit,
|
|
|
|
as_client_event,
|
|
|
|
include_archived,
|
|
|
|
)
|
|
|
|
now_ms = self.clock.time_msec()
|
|
|
|
result = self.snapshot_cache.get(now_ms, key)
|
|
|
|
if result is not None:
|
|
|
|
return result
|
|
|
|
|
|
|
|
return self.snapshot_cache.set(now_ms, key, self._snapshot_all_rooms(
|
|
|
|
user_id, pagin_config, as_client_event, include_archived
|
|
|
|
))
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _snapshot_all_rooms(self, user_id=None, pagin_config=None,
|
|
|
|
as_client_event=True, include_archived=False):
|
2014-08-27 18:59:36 +02:00
|
|
|
|
2015-10-08 18:19:42 +02:00
|
|
|
memberships = [Membership.INVITE, Membership.JOIN]
|
|
|
|
if include_archived:
|
|
|
|
memberships.append(Membership.LEAVE)
|
|
|
|
|
2014-08-27 18:59:36 +02:00
|
|
|
room_list = yield self.store.get_rooms_for_user_where_membership_is(
|
2015-10-08 18:19:42 +02:00
|
|
|
user_id=user_id, membership_list=memberships
|
2014-08-27 18:59:36 +02:00
|
|
|
)
|
|
|
|
|
2015-01-23 12:47:15 +01:00
|
|
|
user = UserID.from_string(user_id)
|
2014-08-27 18:59:36 +02:00
|
|
|
|
|
|
|
rooms_ret = []
|
|
|
|
|
|
|
|
now_token = yield self.hs.get_event_sources().get_current_token()
|
|
|
|
|
|
|
|
presence_stream = self.hs.get_event_sources().sources["presence"]
|
|
|
|
pagination_config = PaginationConfig(from_token=now_token)
|
|
|
|
presence, _ = yield presence_stream.get_pagination_rows(
|
2014-10-29 16:57:23 +01:00
|
|
|
user, pagination_config.get_source_config("presence"), None
|
2014-08-27 18:59:36 +02:00
|
|
|
)
|
|
|
|
|
2015-07-08 11:54:01 +02:00
|
|
|
receipt_stream = self.hs.get_event_sources().sources["receipt"]
|
|
|
|
receipt, _ = yield receipt_stream.get_pagination_rows(
|
|
|
|
user, pagination_config.get_source_config("receipt"), None
|
|
|
|
)
|
|
|
|
|
2015-10-30 17:22:32 +01:00
|
|
|
tags_by_room = yield self.store.get_tags_for_user(user_id)
|
|
|
|
|
2015-12-01 19:41:32 +01:00
|
|
|
account_data, account_data_by_room = (
|
|
|
|
yield self.store.get_account_data_for_user(user_id)
|
|
|
|
)
|
|
|
|
|
2015-04-30 11:16:12 +02:00
|
|
|
public_room_ids = yield self.store.get_public_room_ids()
|
2014-09-17 17:05:30 +02:00
|
|
|
|
2014-08-27 18:59:36 +02:00
|
|
|
limit = pagin_config.limit
|
2014-11-20 20:33:45 +01:00
|
|
|
if limit is None:
|
2014-08-27 18:59:36 +02:00
|
|
|
limit = 10
|
|
|
|
|
2015-04-15 15:17:16 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def handle_room(event):
|
2014-08-27 18:59:36 +02:00
|
|
|
d = {
|
|
|
|
"room_id": event.room_id,
|
|
|
|
"membership": event.membership,
|
2014-11-11 19:00:13 +01:00
|
|
|
"visibility": (
|
|
|
|
"public" if event.room_id in public_room_ids
|
|
|
|
else "private"
|
|
|
|
),
|
2014-08-27 18:59:36 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if event.membership == Membership.INVITE:
|
2015-09-10 15:25:54 +02:00
|
|
|
time_now = self.clock.time_msec()
|
2015-01-06 16:53:50 +01:00
|
|
|
d["inviter"] = event.sender
|
2014-08-27 18:59:36 +02:00
|
|
|
|
2015-09-10 15:25:54 +02:00
|
|
|
invite_event = yield self.store.get_event(event.event_id)
|
|
|
|
d["invite"] = serialize_event(invite_event, time_now, as_client_event)
|
|
|
|
|
2014-08-27 18:59:36 +02:00
|
|
|
rooms_ret.append(d)
|
|
|
|
|
2015-09-08 19:16:09 +02:00
|
|
|
if event.membership not in (Membership.JOIN, Membership.LEAVE):
|
2015-04-15 15:17:16 +02:00
|
|
|
return
|
2015-09-21 15:17:16 +02:00
|
|
|
|
2014-08-27 18:59:36 +02:00
|
|
|
try:
|
2015-09-08 19:16:09 +02:00
|
|
|
if event.membership == Membership.JOIN:
|
|
|
|
room_end_token = now_token.room_key
|
|
|
|
deferred_room_state = self.state_handler.get_current_state(
|
|
|
|
event.room_id
|
|
|
|
)
|
2015-09-21 15:17:16 +02:00
|
|
|
elif event.membership == Membership.LEAVE:
|
2015-09-08 19:16:09 +02:00
|
|
|
room_end_token = "s%d" % (event.stream_ordering,)
|
|
|
|
deferred_room_state = self.store.get_state_for_events(
|
2015-10-14 10:29:08 +02:00
|
|
|
[event.event_id], None
|
2015-09-08 19:16:09 +02:00
|
|
|
)
|
|
|
|
deferred_room_state.addCallback(
|
|
|
|
lambda states: states[event.event_id]
|
|
|
|
)
|
|
|
|
|
2015-04-15 15:21:59 +02:00
|
|
|
(messages, token), current_state = yield defer.gatherResults(
|
|
|
|
[
|
|
|
|
self.store.get_recent_events_for_room(
|
|
|
|
event.room_id,
|
|
|
|
limit=limit,
|
2015-09-08 19:16:09 +02:00
|
|
|
end_token=room_end_token,
|
2015-04-15 15:21:59 +02:00
|
|
|
),
|
2015-09-08 19:16:09 +02:00
|
|
|
deferred_room_state,
|
2015-04-15 15:21:59 +02:00
|
|
|
]
|
2015-05-12 14:14:29 +02:00
|
|
|
).addErrback(unwrapFirstError)
|
2014-08-27 18:59:36 +02:00
|
|
|
|
2015-07-02 18:02:10 +02:00
|
|
|
messages = yield self._filter_events_for_client(
|
2015-10-16 15:52:48 +02:00
|
|
|
user_id, messages
|
2015-07-02 18:02:10 +02:00
|
|
|
)
|
|
|
|
|
2014-08-29 19:39:09 +02:00
|
|
|
start_token = now_token.copy_and_replace("room_key", token[0])
|
|
|
|
end_token = now_token.copy_and_replace("room_key", token[1])
|
2015-01-26 17:11:28 +01:00
|
|
|
time_now = self.clock.time_msec()
|
2014-08-29 14:28:02 +02:00
|
|
|
|
2014-08-27 18:59:36 +02:00
|
|
|
d["messages"] = {
|
2015-01-08 14:57:29 +01:00
|
|
|
"chunk": [
|
2015-01-26 17:11:28 +01:00
|
|
|
serialize_event(m, time_now, as_client_event)
|
2015-01-08 14:59:29 +01:00
|
|
|
for m in messages
|
2015-01-08 14:57:29 +01:00
|
|
|
],
|
2014-08-29 14:28:02 +02:00
|
|
|
"start": start_token.to_string(),
|
|
|
|
"end": end_token.to_string(),
|
2014-08-27 18:59:36 +02:00
|
|
|
}
|
|
|
|
|
2014-11-11 19:00:13 +01:00
|
|
|
d["state"] = [
|
2015-01-26 17:11:28 +01:00
|
|
|
serialize_event(c, time_now, as_client_event)
|
2015-02-09 18:41:29 +01:00
|
|
|
for c in current_state.values()
|
2014-11-11 19:00:13 +01:00
|
|
|
]
|
2015-10-30 17:22:32 +01:00
|
|
|
|
2015-12-01 19:41:32 +01:00
|
|
|
account_data_events = []
|
2015-10-30 17:22:32 +01:00
|
|
|
tags = tags_by_room.get(event.room_id)
|
|
|
|
if tags:
|
2015-12-01 19:41:32 +01:00
|
|
|
account_data_events.append({
|
2015-10-30 17:22:32 +01:00
|
|
|
"type": "m.tag",
|
|
|
|
"content": {"tags": tags},
|
|
|
|
})
|
2015-12-01 19:41:32 +01:00
|
|
|
|
|
|
|
account_data = account_data_by_room.get(event.room_id, {})
|
|
|
|
for account_data_type, content in account_data.items():
|
|
|
|
account_data_events.append({
|
|
|
|
"type": account_data_type,
|
|
|
|
"content": content,
|
|
|
|
})
|
|
|
|
|
|
|
|
d["account_data"] = account_data_events
|
2014-08-27 18:59:36 +02:00
|
|
|
except:
|
|
|
|
logger.exception("Failed to get snapshot")
|
|
|
|
|
2015-08-07 19:13:48 +02:00
|
|
|
# Only do N rooms at once
|
|
|
|
n = 5
|
|
|
|
d_list = [handle_room(e) for e in room_list]
|
2015-08-12 17:02:05 +02:00
|
|
|
for i in range(0, len(d_list), n):
|
2015-08-07 19:13:48 +02:00
|
|
|
yield defer.gatherResults(
|
2015-08-12 17:02:05 +02:00
|
|
|
d_list[i:i + n],
|
2015-08-07 19:13:48 +02:00
|
|
|
consumeErrors=True
|
|
|
|
).addErrback(unwrapFirstError)
|
2015-04-15 15:17:16 +02:00
|
|
|
|
2015-12-01 19:41:32 +01:00
|
|
|
account_data_events = []
|
|
|
|
for account_data_type, content in account_data.items():
|
|
|
|
account_data_events.append({
|
|
|
|
"type": account_data_type,
|
|
|
|
"content": content,
|
|
|
|
})
|
|
|
|
|
2014-08-27 18:59:36 +02:00
|
|
|
ret = {
|
|
|
|
"rooms": rooms_ret,
|
|
|
|
"presence": presence,
|
2015-12-01 19:41:32 +01:00
|
|
|
"account_data": account_data_events,
|
2015-07-08 11:54:01 +02:00
|
|
|
"receipts": receipt,
|
|
|
|
"end": now_token.to_string(),
|
2014-08-27 18:59:36 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
defer.returnValue(ret)
|
|
|
|
|
2014-11-10 20:02:19 +01:00
|
|
|
@defer.inlineCallbacks
|
2016-01-20 16:34:07 +01:00
|
|
|
def room_initial_sync(self, requester, room_id, pagin_config=None):
|
2015-09-09 14:25:22 +02:00
|
|
|
"""Capture the a snapshot of a room. If user is currently a member of
|
|
|
|
the room this will be what is currently in the room. If the user left
|
|
|
|
the room this will be what was in the room when they left.
|
|
|
|
|
|
|
|
Args:
|
2016-01-20 16:34:07 +01:00
|
|
|
requester(Requester): The user to get a snapshot for.
|
2015-09-09 14:25:22 +02:00
|
|
|
room_id(str): The room to get a snapshot of.
|
2015-09-21 15:18:47 +02:00
|
|
|
pagin_config(synapse.streams.config.PaginationConfig):
|
|
|
|
The pagination config used to determine how many messages to
|
|
|
|
return.
|
2015-09-09 14:25:22 +02:00
|
|
|
Raises:
|
|
|
|
AuthError if the user wasn't in the room.
|
|
|
|
Returns:
|
2015-09-21 15:21:03 +02:00
|
|
|
A JSON serialisable dict with the snapshot of the room.
|
2015-09-09 14:25:22 +02:00
|
|
|
"""
|
|
|
|
|
2016-01-20 16:34:07 +01:00
|
|
|
user_id = requester.user.to_string()
|
|
|
|
|
2015-11-11 18:13:24 +01:00
|
|
|
membership, member_event_id = yield self._check_in_room_or_world_readable(
|
2016-01-20 16:34:07 +01:00
|
|
|
room_id, user_id,
|
2015-11-11 18:13:24 +01:00
|
|
|
)
|
2016-01-20 16:34:07 +01:00
|
|
|
is_peeking = member_event_id is None
|
2015-09-09 14:25:22 +02:00
|
|
|
|
2015-11-11 18:13:24 +01:00
|
|
|
if membership == Membership.JOIN:
|
2015-09-09 14:25:22 +02:00
|
|
|
result = yield self._room_initial_sync_joined(
|
2016-01-20 16:34:07 +01:00
|
|
|
user_id, room_id, pagin_config, membership, is_peeking
|
2015-09-09 14:25:22 +02:00
|
|
|
)
|
2015-11-11 18:13:24 +01:00
|
|
|
elif membership == Membership.LEAVE:
|
2015-09-09 14:25:22 +02:00
|
|
|
result = yield self._room_initial_sync_parted(
|
2016-01-20 16:34:07 +01:00
|
|
|
user_id, room_id, pagin_config, membership, member_event_id, is_peeking
|
2015-09-09 14:25:22 +02:00
|
|
|
)
|
2015-10-30 17:28:19 +01:00
|
|
|
|
2015-12-01 19:41:32 +01:00
|
|
|
account_data_events = []
|
2015-10-30 17:28:19 +01:00
|
|
|
tags = yield self.store.get_tags_for_room(user_id, room_id)
|
|
|
|
if tags:
|
2015-12-01 19:41:32 +01:00
|
|
|
account_data_events.append({
|
2015-10-30 17:28:19 +01:00
|
|
|
"type": "m.tag",
|
|
|
|
"content": {"tags": tags},
|
|
|
|
})
|
2015-12-01 19:41:32 +01:00
|
|
|
|
|
|
|
account_data = yield self.store.get_account_data_for_room(user_id, room_id)
|
|
|
|
for account_data_type, content in account_data.items():
|
|
|
|
account_data_events.append({
|
|
|
|
"type": account_data_type,
|
|
|
|
"content": content,
|
|
|
|
})
|
|
|
|
|
|
|
|
result["account_data"] = account_data_events
|
2015-10-30 17:28:19 +01:00
|
|
|
|
2015-09-09 14:25:22 +02:00
|
|
|
defer.returnValue(result)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _room_initial_sync_parted(self, user_id, room_id, pagin_config,
|
2016-01-20 16:34:07 +01:00
|
|
|
membership, member_event_id, is_peeking):
|
2015-09-09 14:25:22 +02:00
|
|
|
room_state = yield self.store.get_state_for_events(
|
2015-11-11 18:13:24 +01:00
|
|
|
[member_event_id], None
|
2015-09-09 14:25:22 +02:00
|
|
|
)
|
|
|
|
|
2015-11-11 18:13:24 +01:00
|
|
|
room_state = room_state[member_event_id]
|
2015-09-09 14:25:22 +02:00
|
|
|
|
|
|
|
limit = pagin_config.limit if pagin_config else None
|
|
|
|
if limit is None:
|
|
|
|
limit = 10
|
|
|
|
|
|
|
|
stream_token = yield self.store.get_stream_token_for_event(
|
2015-11-11 18:13:24 +01:00
|
|
|
member_event_id
|
2015-09-09 14:25:22 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
messages, token = yield self.store.get_recent_events_for_room(
|
|
|
|
room_id,
|
|
|
|
limit=limit,
|
|
|
|
end_token=stream_token
|
2015-02-09 18:41:29 +01:00
|
|
|
)
|
|
|
|
|
2015-09-09 14:25:22 +02:00
|
|
|
messages = yield self._filter_events_for_client(
|
2016-01-20 16:34:07 +01:00
|
|
|
user_id, messages, is_peeking=is_peeking
|
2015-02-09 18:41:29 +01:00
|
|
|
)
|
|
|
|
|
2016-03-03 15:57:45 +01:00
|
|
|
start_token = StreamToken.START.copy_and_replace("room_key", token[0])
|
|
|
|
end_token = StreamToken.START.copy_and_replace("room_key", token[1])
|
2015-09-09 14:25:22 +02:00
|
|
|
|
|
|
|
time_now = self.clock.time_msec()
|
|
|
|
|
|
|
|
defer.returnValue({
|
2015-11-11 18:13:24 +01:00
|
|
|
"membership": membership,
|
2015-09-09 14:25:22 +02:00
|
|
|
"room_id": room_id,
|
|
|
|
"messages": {
|
|
|
|
"chunk": [serialize_event(m, time_now) for m in messages],
|
|
|
|
"start": start_token.to_string(),
|
|
|
|
"end": end_token.to_string(),
|
|
|
|
},
|
|
|
|
"state": [serialize_event(s, time_now) for s in room_state.values()],
|
|
|
|
"presence": [],
|
|
|
|
"receipts": [],
|
|
|
|
})
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _room_initial_sync_joined(self, user_id, room_id, pagin_config,
|
2016-01-20 16:34:07 +01:00
|
|
|
membership, is_peeking):
|
2015-09-09 14:25:22 +02:00
|
|
|
current_state = yield self.state.get_current_state(
|
|
|
|
room_id=room_id,
|
2015-02-09 18:41:29 +01:00
|
|
|
)
|
2014-08-27 18:59:36 +02:00
|
|
|
|
2014-11-10 20:34:47 +01:00
|
|
|
# TODO: These concurrently
|
2015-01-26 17:11:28 +01:00
|
|
|
time_now = self.clock.time_msec()
|
2015-02-09 18:41:29 +01:00
|
|
|
state = [
|
|
|
|
serialize_event(x, time_now)
|
|
|
|
for x in current_state.values()
|
|
|
|
]
|
2014-11-10 20:29:58 +01:00
|
|
|
|
2014-11-18 15:07:51 +01:00
|
|
|
now_token = yield self.hs.get_event_sources().get_current_token()
|
|
|
|
|
|
|
|
limit = pagin_config.limit if pagin_config else None
|
|
|
|
if limit is None:
|
|
|
|
limit = 10
|
|
|
|
|
2015-02-09 18:41:29 +01:00
|
|
|
room_members = [
|
|
|
|
m for m in current_state.values()
|
|
|
|
if m.type == EventTypes.Member
|
2015-02-12 20:19:37 +01:00
|
|
|
and m.content["membership"] == Membership.JOIN
|
2015-02-09 18:41:29 +01:00
|
|
|
]
|
2014-11-18 16:28:58 +01:00
|
|
|
|
|
|
|
presence_handler = self.hs.get_handlers().presence_handler
|
2015-06-16 18:12:27 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_presence():
|
2015-11-13 16:44:57 +01:00
|
|
|
states = yield presence_handler.get_states(
|
2016-02-15 18:10:40 +01:00
|
|
|
[m.user_id for m in room_members],
|
2015-11-13 16:44:57 +01:00
|
|
|
as_event=True,
|
|
|
|
)
|
2015-06-16 18:12:27 +02:00
|
|
|
|
2016-02-15 18:10:40 +01:00
|
|
|
defer.returnValue(states)
|
2015-06-16 18:12:27 +02:00
|
|
|
|
2015-11-13 16:44:57 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_receipts():
|
|
|
|
receipts_handler = self.hs.get_handlers().receipts_handler
|
|
|
|
receipts = yield receipts_handler.get_receipts_for_room(
|
|
|
|
room_id,
|
|
|
|
now_token.receipt_key
|
|
|
|
)
|
|
|
|
defer.returnValue(receipts)
|
2015-07-08 11:54:01 +02:00
|
|
|
|
|
|
|
presence, receipts, (messages, token) = yield defer.gatherResults(
|
2015-06-16 18:03:24 +02:00
|
|
|
[
|
2015-06-16 18:12:27 +02:00
|
|
|
get_presence(),
|
2015-11-13 16:44:57 +01:00
|
|
|
get_receipts(),
|
2015-06-16 18:12:27 +02:00
|
|
|
self.store.get_recent_events_for_room(
|
|
|
|
room_id,
|
|
|
|
limit=limit,
|
|
|
|
end_token=now_token.room_key,
|
2014-11-18 16:28:58 +01:00
|
|
|
)
|
2015-06-16 18:03:24 +02:00
|
|
|
],
|
|
|
|
consumeErrors=True,
|
2015-06-16 18:12:27 +02:00
|
|
|
).addErrback(unwrapFirstError)
|
2015-06-16 18:03:24 +02:00
|
|
|
|
2015-07-02 18:02:10 +02:00
|
|
|
messages = yield self._filter_events_for_client(
|
2016-01-20 16:34:07 +01:00
|
|
|
user_id, messages, is_peeking=is_peeking,
|
2015-07-02 18:02:10 +02:00
|
|
|
)
|
|
|
|
|
2015-06-16 18:12:27 +02:00
|
|
|
start_token = now_token.copy_and_replace("room_key", token[0])
|
|
|
|
end_token = now_token.copy_and_replace("room_key", token[1])
|
2014-11-18 16:28:58 +01:00
|
|
|
|
2015-01-26 17:11:28 +01:00
|
|
|
time_now = self.clock.time_msec()
|
|
|
|
|
2015-11-11 18:13:24 +01:00
|
|
|
ret = {
|
2014-11-10 20:02:19 +01:00
|
|
|
"room_id": room_id,
|
2014-11-18 15:07:51 +01:00
|
|
|
"messages": {
|
2015-01-26 17:11:28 +01:00
|
|
|
"chunk": [serialize_event(m, time_now) for m in messages],
|
2014-11-18 15:07:51 +01:00
|
|
|
"start": start_token.to_string(),
|
|
|
|
"end": end_token.to_string(),
|
|
|
|
},
|
2014-11-10 20:29:58 +01:00
|
|
|
"state": state,
|
2015-07-08 11:54:01 +02:00
|
|
|
"presence": presence,
|
|
|
|
"receipts": receipts,
|
2015-11-11 18:13:24 +01:00
|
|
|
}
|
2016-01-20 16:34:07 +01:00
|
|
|
if not is_peeking:
|
2015-11-11 18:13:24 +01:00
|
|
|
ret["membership"] = membership
|
|
|
|
|
|
|
|
defer.returnValue(ret)
|