2016-09-21 12:46:28 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright 2016 OpenMarket Ltd
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2018-07-09 08:09:20 +02:00
|
|
|
import logging
|
|
|
|
|
2016-09-21 12:46:28 +02:00
|
|
|
from twisted.internet import defer
|
|
|
|
|
|
|
|
from synapse.api.constants import EventTypes, Membership
|
2019-03-21 12:20:13 +01:00
|
|
|
from synapse.api.errors import AuthError, Codes, SynapseError
|
2016-09-21 12:46:28 +02:00
|
|
|
from synapse.events.validator import EventValidator
|
2017-03-15 15:27:34 +01:00
|
|
|
from synapse.handlers.presence import format_user_presence_state
|
2019-07-03 16:07:04 +02:00
|
|
|
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
2016-09-21 12:46:28 +02:00
|
|
|
from synapse.streams.config import PaginationConfig
|
2018-07-09 08:09:20 +02:00
|
|
|
from synapse.types import StreamToken, UserID
|
2016-09-21 12:46:28 +02:00
|
|
|
from synapse.util import unwrapFirstError
|
2018-08-10 15:50:21 +02:00
|
|
|
from synapse.util.async_helpers import concurrently_execute
|
2016-09-21 12:46:28 +02:00
|
|
|
from synapse.util.caches.snapshot_cache import SnapshotCache
|
|
|
|
from synapse.visibility import filter_events_for_client
|
|
|
|
|
|
|
|
from ._base import BaseHandler
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class InitialSyncHandler(BaseHandler):
|
|
|
|
def __init__(self, hs):
|
|
|
|
super(InitialSyncHandler, self).__init__(hs)
|
|
|
|
self.hs = hs
|
|
|
|
self.state = hs.get_state_handler()
|
|
|
|
self.clock = hs.get_clock()
|
|
|
|
self.validator = EventValidator()
|
|
|
|
self.snapshot_cache = SnapshotCache()
|
2019-05-09 14:21:57 +02:00
|
|
|
self._event_serializer = hs.get_event_client_serializer()
|
2019-10-23 18:25:54 +02:00
|
|
|
self.storage = hs.get_storage()
|
|
|
|
self.state_store = self.storage.state
|
2016-09-21 12:46:28 +02:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
def snapshot_all_rooms(
|
|
|
|
self,
|
|
|
|
user_id=None,
|
|
|
|
pagin_config=None,
|
|
|
|
as_client_event=True,
|
|
|
|
include_archived=False,
|
|
|
|
):
|
2016-09-21 12:46:28 +02:00
|
|
|
"""Retrieve a snapshot of all rooms the user is invited or has joined.
|
|
|
|
|
|
|
|
This snapshot may include messages for all rooms where the user is
|
|
|
|
joined, depending on the pagination config.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): The ID of the user making the request.
|
|
|
|
pagin_config (synapse.api.streams.PaginationConfig): The pagination
|
|
|
|
config used to determine how many messages *PER ROOM* to return.
|
|
|
|
as_client_event (bool): True to get events in client-server format.
|
|
|
|
include_archived (bool): True to get rooms that the user has left
|
|
|
|
Returns:
|
|
|
|
A list of dicts with "room_id" and "membership" keys for all rooms
|
|
|
|
the user is currently invited or joined in on. Rooms where the user
|
|
|
|
is joined on, may return a "messages" key with messages, depending
|
|
|
|
on the specified PaginationConfig.
|
|
|
|
"""
|
|
|
|
key = (
|
|
|
|
user_id,
|
|
|
|
pagin_config.from_token,
|
|
|
|
pagin_config.to_token,
|
|
|
|
pagin_config.direction,
|
|
|
|
pagin_config.limit,
|
|
|
|
as_client_event,
|
|
|
|
include_archived,
|
|
|
|
)
|
|
|
|
now_ms = self.clock.time_msec()
|
|
|
|
result = self.snapshot_cache.get(now_ms, key)
|
|
|
|
if result is not None:
|
|
|
|
return result
|
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
return self.snapshot_cache.set(
|
|
|
|
now_ms,
|
|
|
|
key,
|
|
|
|
self._snapshot_all_rooms(
|
|
|
|
user_id, pagin_config, as_client_event, include_archived
|
|
|
|
),
|
|
|
|
)
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2019-06-20 11:32:02 +02:00
|
|
|
def _snapshot_all_rooms(
|
|
|
|
self,
|
|
|
|
user_id=None,
|
|
|
|
pagin_config=None,
|
|
|
|
as_client_event=True,
|
|
|
|
include_archived=False,
|
|
|
|
):
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
memberships = [Membership.INVITE, Membership.JOIN]
|
|
|
|
if include_archived:
|
|
|
|
memberships.append(Membership.LEAVE)
|
|
|
|
|
|
|
|
room_list = yield self.store.get_rooms_for_user_where_membership_is(
|
|
|
|
user_id=user_id, membership_list=memberships
|
|
|
|
)
|
|
|
|
|
|
|
|
user = UserID.from_string(user_id)
|
|
|
|
|
|
|
|
rooms_ret = []
|
|
|
|
|
|
|
|
now_token = yield self.hs.get_event_sources().get_current_token()
|
|
|
|
|
|
|
|
presence_stream = self.hs.get_event_sources().sources["presence"]
|
|
|
|
pagination_config = PaginationConfig(from_token=now_token)
|
|
|
|
presence, _ = yield presence_stream.get_pagination_rows(
|
|
|
|
user, pagination_config.get_source_config("presence"), None
|
|
|
|
)
|
|
|
|
|
|
|
|
receipt_stream = self.hs.get_event_sources().sources["receipt"]
|
|
|
|
receipt, _ = yield receipt_stream.get_pagination_rows(
|
|
|
|
user, pagination_config.get_source_config("receipt"), None
|
|
|
|
)
|
|
|
|
|
|
|
|
tags_by_room = yield self.store.get_tags_for_user(user_id)
|
|
|
|
|
2019-10-31 16:43:24 +01:00
|
|
|
account_data, account_data_by_room = yield self.store.get_account_data_for_user(
|
|
|
|
user_id
|
2016-09-21 12:46:28 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
public_room_ids = yield self.store.get_public_room_ids()
|
|
|
|
|
|
|
|
limit = pagin_config.limit
|
|
|
|
if limit is None:
|
|
|
|
limit = 10
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def handle_room(event):
|
|
|
|
d = {
|
|
|
|
"room_id": event.room_id,
|
|
|
|
"membership": event.membership,
|
|
|
|
"visibility": (
|
2019-06-20 11:32:02 +02:00
|
|
|
"public" if event.room_id in public_room_ids else "private"
|
2016-09-21 12:46:28 +02:00
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
if event.membership == Membership.INVITE:
|
|
|
|
time_now = self.clock.time_msec()
|
|
|
|
d["inviter"] = event.sender
|
|
|
|
|
|
|
|
invite_event = yield self.store.get_event(event.event_id)
|
2019-05-09 14:21:57 +02:00
|
|
|
d["invite"] = yield self._event_serializer.serialize_event(
|
2019-06-20 11:32:02 +02:00
|
|
|
invite_event, time_now, as_client_event
|
2019-05-09 14:21:57 +02:00
|
|
|
)
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
rooms_ret.append(d)
|
|
|
|
|
|
|
|
if event.membership not in (Membership.JOIN, Membership.LEAVE):
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
|
|
|
if event.membership == Membership.JOIN:
|
|
|
|
room_end_token = now_token.room_key
|
2018-07-24 01:57:48 +02:00
|
|
|
deferred_room_state = run_in_background(
|
2019-06-20 11:32:02 +02:00
|
|
|
self.state_handler.get_current_state, event.room_id
|
2016-09-21 12:46:28 +02:00
|
|
|
)
|
|
|
|
elif event.membership == Membership.LEAVE:
|
|
|
|
room_end_token = "s%d" % (event.stream_ordering,)
|
2018-07-24 01:57:48 +02:00
|
|
|
deferred_room_state = run_in_background(
|
2019-10-23 18:25:54 +02:00
|
|
|
self.state_store.get_state_for_events, [event.event_id]
|
2016-09-21 12:46:28 +02:00
|
|
|
)
|
|
|
|
deferred_room_state.addCallback(
|
|
|
|
lambda states: states[event.event_id]
|
|
|
|
)
|
|
|
|
|
2017-11-14 12:22:42 +01:00
|
|
|
(messages, token), current_state = yield make_deferred_yieldable(
|
2016-09-21 12:46:28 +02:00
|
|
|
defer.gatherResults(
|
|
|
|
[
|
2018-04-27 12:29:27 +02:00
|
|
|
run_in_background(
|
|
|
|
self.store.get_recent_events_for_room,
|
2016-09-21 12:46:28 +02:00
|
|
|
event.room_id,
|
|
|
|
limit=limit,
|
|
|
|
end_token=room_end_token,
|
|
|
|
),
|
|
|
|
deferred_room_state,
|
|
|
|
]
|
|
|
|
)
|
|
|
|
).addErrback(unwrapFirstError)
|
|
|
|
|
2019-10-23 18:25:54 +02:00
|
|
|
messages = yield filter_events_for_client(
|
|
|
|
self.storage, user_id, messages
|
|
|
|
)
|
2016-09-21 12:46:28 +02:00
|
|
|
|
2018-05-09 12:55:34 +02:00
|
|
|
start_token = now_token.copy_and_replace("room_key", token)
|
|
|
|
end_token = now_token.copy_and_replace("room_key", room_end_token)
|
2016-09-21 12:46:28 +02:00
|
|
|
time_now = self.clock.time_msec()
|
|
|
|
|
|
|
|
d["messages"] = {
|
2019-05-09 14:21:57 +02:00
|
|
|
"chunk": (
|
|
|
|
yield self._event_serializer.serialize_events(
|
2019-06-20 11:32:02 +02:00
|
|
|
messages, time_now=time_now, as_client_event=as_client_event
|
2019-05-09 14:21:57 +02:00
|
|
|
)
|
|
|
|
),
|
2016-09-21 12:46:28 +02:00
|
|
|
"start": start_token.to_string(),
|
|
|
|
"end": end_token.to_string(),
|
|
|
|
}
|
|
|
|
|
2019-05-09 14:21:57 +02:00
|
|
|
d["state"] = yield self._event_serializer.serialize_events(
|
|
|
|
current_state.values(),
|
|
|
|
time_now=time_now,
|
2019-06-20 11:32:02 +02:00
|
|
|
as_client_event=as_client_event,
|
2019-05-09 14:21:57 +02:00
|
|
|
)
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
account_data_events = []
|
|
|
|
tags = tags_by_room.get(event.room_id)
|
|
|
|
if tags:
|
2019-06-20 11:32:02 +02:00
|
|
|
account_data_events.append(
|
|
|
|
{"type": "m.tag", "content": {"tags": tags}}
|
|
|
|
)
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
account_data = account_data_by_room.get(event.room_id, {})
|
|
|
|
for account_data_type, content in account_data.items():
|
2019-06-20 11:32:02 +02:00
|
|
|
account_data_events.append(
|
|
|
|
{"type": account_data_type, "content": content}
|
|
|
|
)
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
d["account_data"] = account_data_events
|
2017-10-23 16:52:32 +02:00
|
|
|
except Exception:
|
2016-09-21 12:46:28 +02:00
|
|
|
logger.exception("Failed to get snapshot")
|
|
|
|
|
|
|
|
yield concurrently_execute(handle_room, room_list, 10)
|
|
|
|
|
|
|
|
account_data_events = []
|
|
|
|
for account_data_type, content in account_data.items():
|
2019-06-20 11:32:02 +02:00
|
|
|
account_data_events.append({"type": account_data_type, "content": content})
|
2016-09-21 12:46:28 +02:00
|
|
|
|
2017-03-15 15:27:34 +01:00
|
|
|
now = self.clock.time_msec()
|
|
|
|
|
2016-09-21 12:46:28 +02:00
|
|
|
ret = {
|
|
|
|
"rooms": rooms_ret,
|
2017-03-15 15:27:34 +01:00
|
|
|
"presence": [
|
|
|
|
{
|
|
|
|
"type": "m.presence",
|
|
|
|
"content": format_user_presence_state(event, now),
|
|
|
|
}
|
|
|
|
for event in presence
|
|
|
|
],
|
2016-09-21 12:46:28 +02:00
|
|
|
"account_data": account_data_events,
|
|
|
|
"receipts": receipt,
|
|
|
|
"end": now_token.to_string(),
|
|
|
|
}
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return ret
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def room_initial_sync(self, requester, room_id, pagin_config=None):
|
|
|
|
"""Capture the a snapshot of a room. If user is currently a member of
|
|
|
|
the room this will be what is currently in the room. If the user left
|
|
|
|
the room this will be what was in the room when they left.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
requester(Requester): The user to get a snapshot for.
|
|
|
|
room_id(str): The room to get a snapshot of.
|
|
|
|
pagin_config(synapse.streams.config.PaginationConfig):
|
|
|
|
The pagination config used to determine how many messages to
|
|
|
|
return.
|
|
|
|
Raises:
|
|
|
|
AuthError if the user wasn't in the room.
|
|
|
|
Returns:
|
|
|
|
A JSON serialisable dict with the snapshot of the room.
|
|
|
|
"""
|
|
|
|
|
2019-03-21 12:20:13 +01:00
|
|
|
blocked = yield self.store.is_room_blocked(room_id)
|
|
|
|
if blocked:
|
|
|
|
raise SynapseError(403, "This room has been blocked on this server")
|
|
|
|
|
2016-09-21 12:46:28 +02:00
|
|
|
user_id = requester.user.to_string()
|
|
|
|
|
|
|
|
membership, member_event_id = yield self._check_in_room_or_world_readable(
|
2019-06-20 11:32:02 +02:00
|
|
|
room_id, user_id
|
2016-09-21 12:46:28 +02:00
|
|
|
)
|
|
|
|
is_peeking = member_event_id is None
|
|
|
|
|
|
|
|
if membership == Membership.JOIN:
|
|
|
|
result = yield self._room_initial_sync_joined(
|
|
|
|
user_id, room_id, pagin_config, membership, is_peeking
|
|
|
|
)
|
|
|
|
elif membership == Membership.LEAVE:
|
|
|
|
result = yield self._room_initial_sync_parted(
|
|
|
|
user_id, room_id, pagin_config, membership, member_event_id, is_peeking
|
|
|
|
)
|
|
|
|
|
|
|
|
account_data_events = []
|
|
|
|
tags = yield self.store.get_tags_for_room(user_id, room_id)
|
|
|
|
if tags:
|
2019-06-20 11:32:02 +02:00
|
|
|
account_data_events.append({"type": "m.tag", "content": {"tags": tags}})
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
account_data = yield self.store.get_account_data_for_room(user_id, room_id)
|
|
|
|
for account_data_type, content in account_data.items():
|
2019-06-20 11:32:02 +02:00
|
|
|
account_data_events.append({"type": account_data_type, "content": content})
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
result["account_data"] = account_data_events
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return result
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2019-06-20 11:32:02 +02:00
|
|
|
def _room_initial_sync_parted(
|
|
|
|
self, user_id, room_id, pagin_config, membership, member_event_id, is_peeking
|
|
|
|
):
|
2019-10-23 18:25:54 +02:00
|
|
|
room_state = yield self.state_store.get_state_for_events([member_event_id])
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
room_state = room_state[member_event_id]
|
|
|
|
|
|
|
|
limit = pagin_config.limit if pagin_config else None
|
|
|
|
if limit is None:
|
|
|
|
limit = 10
|
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
stream_token = yield self.store.get_stream_token_for_event(member_event_id)
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
messages, token = yield self.store.get_recent_events_for_room(
|
2019-06-20 11:32:02 +02:00
|
|
|
room_id, limit=limit, end_token=stream_token
|
2016-09-21 12:46:28 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
messages = yield filter_events_for_client(
|
2019-10-23 18:25:54 +02:00
|
|
|
self.storage, user_id, messages, is_peeking=is_peeking
|
2016-09-21 12:46:28 +02:00
|
|
|
)
|
|
|
|
|
2018-05-09 12:55:34 +02:00
|
|
|
start_token = StreamToken.START.copy_and_replace("room_key", token)
|
|
|
|
end_token = StreamToken.START.copy_and_replace("room_key", stream_token)
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
time_now = self.clock.time_msec()
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return {
|
|
|
|
"membership": membership,
|
|
|
|
"room_id": room_id,
|
|
|
|
"messages": {
|
|
|
|
"chunk": (
|
|
|
|
yield self._event_serializer.serialize_events(messages, time_now)
|
2019-06-20 11:32:02 +02:00
|
|
|
),
|
2019-07-23 15:00:55 +02:00
|
|
|
"start": start_token.to_string(),
|
|
|
|
"end": end_token.to_string(),
|
|
|
|
},
|
|
|
|
"state": (
|
|
|
|
yield self._event_serializer.serialize_events(
|
|
|
|
room_state.values(), time_now
|
|
|
|
)
|
|
|
|
),
|
|
|
|
"presence": [],
|
|
|
|
"receipts": [],
|
|
|
|
}
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2019-06-20 11:32:02 +02:00
|
|
|
def _room_initial_sync_joined(
|
|
|
|
self, user_id, room_id, pagin_config, membership, is_peeking
|
|
|
|
):
|
|
|
|
current_state = yield self.state.get_current_state(room_id=room_id)
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
# TODO: These concurrently
|
|
|
|
time_now = self.clock.time_msec()
|
2019-05-09 14:21:57 +02:00
|
|
|
state = yield self._event_serializer.serialize_events(
|
2019-06-20 11:32:02 +02:00
|
|
|
current_state.values(), time_now
|
2019-05-09 14:21:57 +02:00
|
|
|
)
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
now_token = yield self.hs.get_event_sources().get_current_token()
|
|
|
|
|
|
|
|
limit = pagin_config.limit if pagin_config else None
|
|
|
|
if limit is None:
|
|
|
|
limit = 10
|
|
|
|
|
|
|
|
room_members = [
|
2019-06-20 11:32:02 +02:00
|
|
|
m
|
|
|
|
for m in current_state.values()
|
2016-09-21 12:46:28 +02:00
|
|
|
if m.type == EventTypes.Member
|
|
|
|
and m.content["membership"] == Membership.JOIN
|
|
|
|
]
|
|
|
|
|
|
|
|
presence_handler = self.hs.get_presence_handler()
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_presence():
|
2018-08-17 17:08:45 +02:00
|
|
|
# If presence is disabled, return an empty list
|
|
|
|
if not self.hs.config.use_presence:
|
2019-07-23 15:00:55 +02:00
|
|
|
return []
|
2018-08-17 17:08:45 +02:00
|
|
|
|
2016-09-21 12:46:28 +02:00
|
|
|
states = yield presence_handler.get_states(
|
2019-06-20 11:32:02 +02:00
|
|
|
[m.user_id for m in room_members], as_event=True
|
2016-09-21 12:46:28 +02:00
|
|
|
)
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return states
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_receipts():
|
2016-11-23 16:14:24 +01:00
|
|
|
receipts = yield self.store.get_linearized_receipts_for_room(
|
2019-06-20 11:32:02 +02:00
|
|
|
room_id, to_key=now_token.receipt_key
|
2016-09-21 12:46:28 +02:00
|
|
|
)
|
2016-11-23 16:14:24 +01:00
|
|
|
if not receipts:
|
|
|
|
receipts = []
|
2019-07-23 15:00:55 +02:00
|
|
|
return receipts
|
2016-09-21 12:46:28 +02:00
|
|
|
|
2018-07-24 01:37:17 +02:00
|
|
|
presence, receipts, (messages, token) = yield make_deferred_yieldable(
|
|
|
|
defer.gatherResults(
|
|
|
|
[
|
|
|
|
run_in_background(get_presence),
|
|
|
|
run_in_background(get_receipts),
|
|
|
|
run_in_background(
|
|
|
|
self.store.get_recent_events_for_room,
|
|
|
|
room_id,
|
|
|
|
limit=limit,
|
|
|
|
end_token=now_token.room_key,
|
2019-06-20 11:32:02 +02:00
|
|
|
),
|
2018-07-24 01:37:17 +02:00
|
|
|
],
|
|
|
|
consumeErrors=True,
|
2019-06-20 11:32:02 +02:00
|
|
|
).addErrback(unwrapFirstError)
|
2018-07-24 01:37:17 +02:00
|
|
|
)
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
messages = yield filter_events_for_client(
|
2019-10-23 18:25:54 +02:00
|
|
|
self.storage, user_id, messages, is_peeking=is_peeking
|
2016-09-21 12:46:28 +02:00
|
|
|
)
|
|
|
|
|
2018-05-09 16:43:00 +02:00
|
|
|
start_token = now_token.copy_and_replace("room_key", token)
|
2018-05-09 12:55:34 +02:00
|
|
|
end_token = now_token
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
time_now = self.clock.time_msec()
|
|
|
|
|
|
|
|
ret = {
|
|
|
|
"room_id": room_id,
|
|
|
|
"messages": {
|
2019-06-20 11:32:02 +02:00
|
|
|
"chunk": (
|
|
|
|
yield self._event_serializer.serialize_events(messages, time_now)
|
|
|
|
),
|
2016-09-21 12:46:28 +02:00
|
|
|
"start": start_token.to_string(),
|
|
|
|
"end": end_token.to_string(),
|
|
|
|
},
|
|
|
|
"state": state,
|
|
|
|
"presence": presence,
|
|
|
|
"receipts": receipts,
|
|
|
|
}
|
|
|
|
if not is_peeking:
|
|
|
|
ret["membership"] = membership
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return ret
|
2016-09-21 12:46:28 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _check_in_room_or_world_readable(self, room_id, user_id):
|
|
|
|
try:
|
|
|
|
# check_user_was_in_room will return the most recent membership
|
|
|
|
# event for the user if:
|
|
|
|
# * The user is a non-guest user, and was ever in the room
|
|
|
|
# * The user is a guest user, and has joined the room
|
|
|
|
# else it will throw.
|
|
|
|
member_event = yield self.auth.check_user_was_in_room(room_id, user_id)
|
2019-08-30 17:28:26 +02:00
|
|
|
return member_event.membership, member_event.event_id
|
2016-09-21 12:46:28 +02:00
|
|
|
except AuthError:
|
|
|
|
visibility = yield self.state_handler.get_current_state(
|
|
|
|
room_id, EventTypes.RoomHistoryVisibility, ""
|
|
|
|
)
|
|
|
|
if (
|
2019-06-20 11:32:02 +02:00
|
|
|
visibility
|
|
|
|
and visibility.content["history_visibility"] == "world_readable"
|
2016-09-21 12:46:28 +02:00
|
|
|
):
|
2019-08-30 17:28:26 +02:00
|
|
|
return Membership.JOIN, None
|
2016-09-21 12:46:28 +02:00
|
|
|
raise AuthError(
|
|
|
|
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
|
|
|
|
)
|