2014-08-12 16:10:52 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-05 19:01:18 +01:00
|
|
|
# Copyright 2014 - 2016 OpenMarket Ltd
|
2019-11-05 15:52:38 +01:00
|
|
|
# Copyright 2018-2019 New Vector Ltd
|
|
|
|
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
2014-08-12 16:10:52 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2014-08-13 04:14:34 +02:00
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
"""Contains functions for performing events on rooms."""
|
2020-01-06 16:22:46 +01:00
|
|
|
|
2018-07-27 16:12:50 +02:00
|
|
|
import itertools
|
2018-07-09 08:09:20 +02:00
|
|
|
import logging
|
|
|
|
import math
|
|
|
|
import string
|
|
|
|
from collections import OrderedDict
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2018-10-26 23:51:34 +02:00
|
|
|
from six import iteritems, string_types
|
2018-07-25 23:10:39 +02:00
|
|
|
|
2018-07-09 08:09:20 +02:00
|
|
|
from twisted.internet import defer
|
2015-01-26 17:11:28 +01:00
|
|
|
|
2019-04-01 11:24:38 +02:00
|
|
|
from synapse.api.constants import EventTypes, JoinRules, RoomCreationPreset
|
2018-10-12 12:13:40 +02:00
|
|
|
from synapse.api.errors import AuthError, Codes, NotFoundError, StoreError, SynapseError
|
2020-01-27 15:30:57 +01:00
|
|
|
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
|
2020-01-28 12:02:55 +01:00
|
|
|
from synapse.events.utils import copy_power_levels_contents
|
2019-10-10 14:05:48 +02:00
|
|
|
from synapse.http.endpoint import parse_and_validate_server_name
|
2018-10-25 18:49:55 +02:00
|
|
|
from synapse.storage.state import StateFilter
|
2020-01-16 14:31:22 +01:00
|
|
|
from synapse.types import (
|
|
|
|
Requester,
|
|
|
|
RoomAlias,
|
|
|
|
RoomID,
|
|
|
|
RoomStreamToken,
|
|
|
|
StateMap,
|
|
|
|
StreamToken,
|
|
|
|
UserID,
|
|
|
|
)
|
2016-04-01 15:06:00 +02:00
|
|
|
from synapse.util import stringutils
|
2018-08-22 11:57:54 +02:00
|
|
|
from synapse.util.async_helpers import Linearizer
|
2019-06-25 15:19:21 +02:00
|
|
|
from synapse.util.caches.response_cache import ResponseCache
|
2016-05-11 14:42:37 +02:00
|
|
|
from synapse.visibility import filter_events_for_client
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2018-07-09 08:09:20 +02:00
|
|
|
from ._base import BaseHandler
|
2014-08-12 16:10:52 +02:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2015-11-05 17:43:19 +01:00
|
|
|
id_server_scheme = "https://"
|
|
|
|
|
2019-06-25 15:19:21 +02:00
|
|
|
FIVE_MINUTES_IN_MS = 5 * 60 * 1000
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2014-09-05 22:35:56 +02:00
|
|
|
class RoomCreationHandler(BaseHandler):
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2015-07-13 17:48:06 +02:00
|
|
|
PRESETS_DICT = {
|
2015-07-14 11:20:31 +02:00
|
|
|
RoomCreationPreset.PRIVATE_CHAT: {
|
2015-07-13 17:48:06 +02:00
|
|
|
"join_rules": JoinRules.INVITE,
|
2015-09-09 10:57:49 +02:00
|
|
|
"history_visibility": "shared",
|
2015-07-14 11:37:42 +02:00
|
|
|
"original_invitees_have_ops": False,
|
2016-03-17 17:07:35 +01:00
|
|
|
"guest_can_join": True,
|
2020-02-17 14:23:37 +01:00
|
|
|
"power_level_content_override": {"invite": 0},
|
2015-07-13 17:48:06 +02:00
|
|
|
},
|
2015-10-02 12:22:56 +02:00
|
|
|
RoomCreationPreset.TRUSTED_PRIVATE_CHAT: {
|
|
|
|
"join_rules": JoinRules.INVITE,
|
|
|
|
"history_visibility": "shared",
|
|
|
|
"original_invitees_have_ops": True,
|
2016-03-17 17:07:35 +01:00
|
|
|
"guest_can_join": True,
|
2020-02-17 14:23:37 +01:00
|
|
|
"power_level_content_override": {"invite": 0},
|
2015-10-02 12:22:56 +02:00
|
|
|
},
|
2015-07-14 11:20:31 +02:00
|
|
|
RoomCreationPreset.PUBLIC_CHAT: {
|
2015-07-13 17:48:06 +02:00
|
|
|
"join_rules": JoinRules.PUBLIC,
|
|
|
|
"history_visibility": "shared",
|
2015-07-14 11:37:42 +02:00
|
|
|
"original_invitees_have_ops": False,
|
2016-03-17 17:07:35 +01:00
|
|
|
"guest_can_join": False,
|
2020-02-17 14:23:37 +01:00
|
|
|
"power_level_content_override": {},
|
2015-07-13 17:48:06 +02:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2017-10-04 11:47:54 +02:00
|
|
|
def __init__(self, hs):
|
|
|
|
super(RoomCreationHandler, self).__init__(hs)
|
|
|
|
|
|
|
|
self.spam_checker = hs.get_spam_checker()
|
2018-01-15 17:52:07 +01:00
|
|
|
self.event_creation_handler = hs.get_event_creation_handler()
|
2018-10-25 18:42:37 +02:00
|
|
|
self.room_member_handler = hs.get_room_member_handler()
|
2019-05-23 16:00:20 +02:00
|
|
|
self.config = hs.config
|
2017-10-04 11:47:54 +02:00
|
|
|
|
2018-08-22 11:57:54 +02:00
|
|
|
# linearizer to stop two upgrades happening at once
|
|
|
|
self._upgrade_linearizer = Linearizer("room_upgrade_linearizer")
|
|
|
|
|
2019-06-25 15:19:21 +02:00
|
|
|
# If a user tries to update the same room multiple times in quick
|
|
|
|
# succession, only process the first attempt and return its result to
|
|
|
|
# subsequent requests
|
|
|
|
self._upgrade_response_cache = ResponseCache(
|
|
|
|
hs, "room_upgrade", timeout_ms=FIVE_MINUTES_IN_MS
|
|
|
|
)
|
2019-06-17 16:48:57 +02:00
|
|
|
self._server_notices_mxid = hs.config.server_notices_mxid
|
|
|
|
|
|
|
|
self.third_party_event_rules = hs.get_third_party_event_rules()
|
|
|
|
|
2018-08-22 11:57:54 +02:00
|
|
|
@defer.inlineCallbacks
|
2020-01-27 15:30:57 +01:00
|
|
|
def upgrade_room(
|
|
|
|
self, requester: Requester, old_room_id: str, new_version: RoomVersion
|
|
|
|
):
|
2018-08-22 11:57:54 +02:00
|
|
|
"""Replace a room with a new room with a different version
|
|
|
|
|
|
|
|
Args:
|
2020-01-27 15:30:57 +01:00
|
|
|
requester: the user requesting the upgrade
|
|
|
|
old_room_id: the id of the room to be replaced
|
|
|
|
new_version: the new room version to use
|
2018-08-22 11:57:54 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred[unicode]: the new room id
|
|
|
|
"""
|
|
|
|
yield self.ratelimit(requester)
|
|
|
|
|
|
|
|
user_id = requester.user.to_string()
|
|
|
|
|
2019-06-25 15:19:21 +02:00
|
|
|
# Check if this room is already being upgraded by another person
|
|
|
|
for key in self._upgrade_response_cache.pending_result_cache:
|
|
|
|
if key[0] == old_room_id and key[1] != user_id:
|
|
|
|
# Two different people are trying to upgrade the same room.
|
|
|
|
# Send the second an error.
|
|
|
|
#
|
|
|
|
# Note that this of course only gets caught if both users are
|
|
|
|
# on the same homeserver.
|
|
|
|
raise SynapseError(
|
|
|
|
400, "An upgrade for this room is currently in progress"
|
2018-08-22 11:57:54 +02:00
|
|
|
)
|
|
|
|
|
2019-06-25 15:19:21 +02:00
|
|
|
# Upgrade the room
|
|
|
|
#
|
|
|
|
# If this user has sent multiple upgrade requests for the same room
|
|
|
|
# and one of them is not complete yet, cache the response and
|
|
|
|
# return it to all subsequent requests
|
|
|
|
ret = yield self._upgrade_response_cache.wrap(
|
|
|
|
(old_room_id, user_id),
|
|
|
|
self._upgrade_room,
|
|
|
|
requester,
|
|
|
|
old_room_id,
|
|
|
|
new_version, # args for _upgrade_room
|
|
|
|
)
|
2019-11-01 11:28:09 +01:00
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return ret
|
2018-08-22 11:57:54 +02:00
|
|
|
|
2019-06-25 15:19:21 +02:00
|
|
|
@defer.inlineCallbacks
|
2020-02-20 22:24:04 +01:00
|
|
|
def _upgrade_room(
|
|
|
|
self, requester: Requester, old_room_id: str, new_version: RoomVersion
|
|
|
|
):
|
2019-06-25 15:19:21 +02:00
|
|
|
user_id = requester.user.to_string()
|
2018-08-22 11:57:54 +02:00
|
|
|
|
2019-06-25 15:19:21 +02:00
|
|
|
# start by allocating a new room id
|
|
|
|
r = yield self.store.get_room(old_room_id)
|
|
|
|
if r is None:
|
|
|
|
raise NotFoundError("Unknown room id %s" % (old_room_id,))
|
|
|
|
new_room_id = yield self._generate_room_id(
|
2020-01-27 15:30:57 +01:00
|
|
|
creator_id=user_id, is_public=r["is_public"], room_version=new_version,
|
2019-06-25 15:19:21 +02:00
|
|
|
)
|
2018-10-25 00:14:36 +02:00
|
|
|
|
2019-06-25 15:19:21 +02:00
|
|
|
logger.info("Creating new room %s to replace %s", new_room_id, old_room_id)
|
2018-10-26 16:11:35 +02:00
|
|
|
|
2019-06-25 15:19:21 +02:00
|
|
|
# we create and auth the tombstone event before properly creating the new
|
|
|
|
# room, to check our user has perms in the old room.
|
2019-10-31 16:43:24 +01:00
|
|
|
(
|
|
|
|
tombstone_event,
|
|
|
|
tombstone_context,
|
|
|
|
) = yield self.event_creation_handler.create_event(
|
|
|
|
requester,
|
|
|
|
{
|
|
|
|
"type": EventTypes.Tombstone,
|
|
|
|
"state_key": "",
|
|
|
|
"room_id": old_room_id,
|
|
|
|
"sender": user_id,
|
|
|
|
"content": {
|
|
|
|
"body": "This room has been replaced",
|
|
|
|
"replacement_room": new_room_id,
|
2019-06-25 15:19:21 +02:00
|
|
|
},
|
2019-10-31 16:43:24 +01:00
|
|
|
},
|
|
|
|
token_id=requester.access_token_id,
|
2019-06-25 15:19:21 +02:00
|
|
|
)
|
2020-01-31 11:06:21 +01:00
|
|
|
old_room_version = yield self.store.get_room_version_id(old_room_id)
|
2019-06-25 15:19:21 +02:00
|
|
|
yield self.auth.check_from_context(
|
|
|
|
old_room_version, tombstone_event, tombstone_context
|
|
|
|
)
|
|
|
|
|
|
|
|
yield self.clone_existing_room(
|
|
|
|
requester,
|
|
|
|
old_room_id=old_room_id,
|
|
|
|
new_room_id=new_room_id,
|
|
|
|
new_room_version=new_version,
|
|
|
|
tombstone_event_id=tombstone_event.event_id,
|
|
|
|
)
|
|
|
|
|
|
|
|
# now send the tombstone
|
|
|
|
yield self.event_creation_handler.send_nonmember_event(
|
|
|
|
requester, tombstone_event, tombstone_context
|
|
|
|
)
|
|
|
|
|
2019-12-20 11:32:02 +01:00
|
|
|
old_room_state = yield tombstone_context.get_current_state_ids()
|
2019-06-25 15:19:21 +02:00
|
|
|
|
|
|
|
# update any aliases
|
|
|
|
yield self._move_aliases_to_new_room(
|
|
|
|
requester, old_room_id, new_room_id, old_room_state
|
|
|
|
)
|
|
|
|
|
2019-11-01 11:28:09 +01:00
|
|
|
# Copy over user push rules, tags and migrate room directory state
|
|
|
|
yield self.room_member_handler.transfer_room_state_on_room_upgrade(
|
|
|
|
old_room_id, new_room_id
|
|
|
|
)
|
|
|
|
|
|
|
|
# finally, shut down the PLs in the old room, and update them in the new
|
2019-06-25 15:19:21 +02:00
|
|
|
# room.
|
|
|
|
yield self._update_upgraded_room_pls(
|
2019-12-02 16:11:32 +01:00
|
|
|
requester, old_room_id, new_room_id, old_room_state,
|
2019-06-25 15:19:21 +02:00
|
|
|
)
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return new_room_id
|
2018-10-27 00:47:37 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _update_upgraded_room_pls(
|
2020-01-16 14:31:22 +01:00
|
|
|
self,
|
|
|
|
requester: Requester,
|
|
|
|
old_room_id: str,
|
|
|
|
new_room_id: str,
|
|
|
|
old_room_state: StateMap[str],
|
2018-10-27 00:47:37 +02:00
|
|
|
):
|
|
|
|
"""Send updated power levels in both rooms after an upgrade
|
|
|
|
|
|
|
|
Args:
|
2020-01-16 14:31:22 +01:00
|
|
|
requester: the user requesting the upgrade
|
|
|
|
old_room_id: the id of the room to be replaced
|
|
|
|
new_room_id: the id of the replacement room
|
|
|
|
old_room_state: the state map for the old room
|
2018-10-27 00:47:37 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred
|
|
|
|
"""
|
|
|
|
old_room_pl_event_id = old_room_state.get((EventTypes.PowerLevels, ""))
|
|
|
|
|
|
|
|
if old_room_pl_event_id is None:
|
|
|
|
logger.warning(
|
|
|
|
"Not supported: upgrading a room with no PL event. Not setting PLs "
|
2019-06-20 11:32:02 +02:00
|
|
|
"in old room."
|
2018-10-27 00:47:37 +02:00
|
|
|
)
|
|
|
|
return
|
|
|
|
|
|
|
|
old_room_pl_state = yield self.store.get_event(old_room_pl_event_id)
|
|
|
|
|
|
|
|
# we try to stop regular users from speaking by setting the PL required
|
|
|
|
# to send regular events and invites to 'Moderator' level. That's normally
|
|
|
|
# 50, but if the default PL in a room is 50 or more, then we set the
|
|
|
|
# required PL above that.
|
|
|
|
|
|
|
|
pl_content = dict(old_room_pl_state.content)
|
|
|
|
users_default = int(pl_content.get("users_default", 0))
|
|
|
|
restricted_level = max(users_default + 1, 50)
|
|
|
|
|
|
|
|
updated = False
|
|
|
|
for v in ("invite", "events_default"):
|
|
|
|
current = int(pl_content.get(v, 0))
|
|
|
|
if current < restricted_level:
|
2020-02-06 14:31:05 +01:00
|
|
|
logger.debug(
|
2018-10-27 00:47:37 +02:00
|
|
|
"Setting level for %s in %s to %i (was %i)",
|
2019-06-20 11:32:02 +02:00
|
|
|
v,
|
|
|
|
old_room_id,
|
|
|
|
restricted_level,
|
|
|
|
current,
|
2018-10-25 00:14:36 +02:00
|
|
|
)
|
2018-10-27 00:47:37 +02:00
|
|
|
pl_content[v] = restricted_level
|
|
|
|
updated = True
|
2018-10-25 00:14:36 +02:00
|
|
|
else:
|
2020-02-06 14:31:05 +01:00
|
|
|
logger.debug("Not setting level for %s (already %i)", v, current)
|
2018-10-27 00:47:37 +02:00
|
|
|
|
|
|
|
if updated:
|
|
|
|
try:
|
|
|
|
yield self.event_creation_handler.create_and_send_nonmember_event(
|
2019-06-20 11:32:02 +02:00
|
|
|
requester,
|
|
|
|
{
|
2018-10-27 00:47:37 +02:00
|
|
|
"type": EventTypes.PowerLevels,
|
2019-06-20 11:32:02 +02:00
|
|
|
"state_key": "",
|
2018-10-27 00:47:37 +02:00
|
|
|
"room_id": old_room_id,
|
|
|
|
"sender": requester.user.to_string(),
|
|
|
|
"content": pl_content,
|
2019-06-20 11:32:02 +02:00
|
|
|
},
|
|
|
|
ratelimit=False,
|
2018-10-27 00:47:37 +02:00
|
|
|
)
|
|
|
|
except AuthError as e:
|
|
|
|
logger.warning("Unable to update PLs in old room: %s", e)
|
|
|
|
|
|
|
|
yield self.event_creation_handler.create_and_send_nonmember_event(
|
2019-06-20 11:32:02 +02:00
|
|
|
requester,
|
|
|
|
{
|
2018-10-27 00:47:37 +02:00
|
|
|
"type": EventTypes.PowerLevels,
|
2019-06-20 11:32:02 +02:00
|
|
|
"state_key": "",
|
2018-10-27 00:47:37 +02:00
|
|
|
"room_id": new_room_id,
|
|
|
|
"sender": requester.user.to_string(),
|
2020-03-17 12:37:04 +01:00
|
|
|
"content": old_room_pl_state.content,
|
2019-06-20 11:32:02 +02:00
|
|
|
},
|
|
|
|
ratelimit=False,
|
2018-10-27 00:47:37 +02:00
|
|
|
)
|
2018-08-22 11:57:54 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2019-01-17 15:11:24 +01:00
|
|
|
def clone_existing_room(
|
2020-01-27 15:30:57 +01:00
|
|
|
self,
|
|
|
|
requester: Requester,
|
|
|
|
old_room_id: str,
|
|
|
|
new_room_id: str,
|
|
|
|
new_room_version: RoomVersion,
|
|
|
|
tombstone_event_id: str,
|
2018-08-22 11:57:54 +02:00
|
|
|
):
|
|
|
|
"""Populate a new room based on an old room
|
|
|
|
|
|
|
|
Args:
|
2020-01-27 15:30:57 +01:00
|
|
|
requester: the user requesting the upgrade
|
|
|
|
old_room_id : the id of the room to be replaced
|
|
|
|
new_room_id: the id to give the new room (should already have been
|
2018-08-22 11:57:54 +02:00
|
|
|
created with _gemerate_room_id())
|
2020-01-27 15:30:57 +01:00
|
|
|
new_room_version: the new room version to use
|
|
|
|
tombstone_event_id: the ID of the tombstone event in the old room.
|
2018-08-22 11:57:54 +02:00
|
|
|
Returns:
|
2019-12-02 16:11:32 +01:00
|
|
|
Deferred
|
2018-08-22 11:57:54 +02:00
|
|
|
"""
|
|
|
|
user_id = requester.user.to_string()
|
|
|
|
|
|
|
|
if not self.spam_checker.user_may_create_room(user_id):
|
|
|
|
raise SynapseError(403, "You are not permitted to create rooms")
|
|
|
|
|
|
|
|
creation_content = {
|
2020-01-27 15:30:57 +01:00
|
|
|
"room_version": new_room_version.identifier,
|
2019-06-20 11:32:02 +02:00
|
|
|
"predecessor": {"room_id": old_room_id, "event_id": tombstone_event_id},
|
2018-08-22 11:57:54 +02:00
|
|
|
}
|
|
|
|
|
2019-01-30 17:33:51 +01:00
|
|
|
# Check if old room was non-federatable
|
|
|
|
|
|
|
|
# Get old room's create event
|
2019-01-31 19:21:39 +01:00
|
|
|
old_room_create_event = yield self.store.get_create_event_for_room(old_room_id)
|
2019-01-30 17:33:51 +01:00
|
|
|
|
|
|
|
# Check if the create event specified a non-federatable room
|
2019-01-31 12:34:45 +01:00
|
|
|
if not old_room_create_event.content.get("m.federate", True):
|
2019-01-30 17:33:51 +01:00
|
|
|
# If so, mark the new room as non-federatable as well
|
|
|
|
creation_content["m.federate"] = False
|
|
|
|
|
2020-02-21 13:15:07 +01:00
|
|
|
initial_state = {}
|
2018-10-12 13:05:18 +02:00
|
|
|
|
2019-01-17 16:22:03 +01:00
|
|
|
# Replicate relevant room events
|
2018-10-12 18:05:48 +02:00
|
|
|
types_to_copy = (
|
|
|
|
(EventTypes.JoinRules, ""),
|
|
|
|
(EventTypes.Name, ""),
|
|
|
|
(EventTypes.Topic, ""),
|
|
|
|
(EventTypes.RoomHistoryVisibility, ""),
|
2018-10-27 00:56:40 +02:00
|
|
|
(EventTypes.GuestAccess, ""),
|
|
|
|
(EventTypes.RoomAvatar, ""),
|
2020-02-04 18:25:54 +01:00
|
|
|
(EventTypes.RoomEncryption, ""),
|
2019-02-11 12:30:37 +01:00
|
|
|
(EventTypes.ServerACL, ""),
|
2019-04-02 18:15:24 +02:00
|
|
|
(EventTypes.RelatedGroups, ""),
|
2019-12-02 16:11:32 +01:00
|
|
|
(EventTypes.PowerLevels, ""),
|
2018-10-12 18:05:48 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
old_room_state_ids = yield self.store.get_filtered_current_state_ids(
|
2019-06-20 11:32:02 +02:00
|
|
|
old_room_id, StateFilter.from_types(types_to_copy)
|
2018-10-12 18:05:48 +02:00
|
|
|
)
|
|
|
|
# map from event_id to BaseEvent
|
|
|
|
old_room_state_events = yield self.store.get_events(old_room_state_ids.values())
|
|
|
|
|
2018-10-26 23:51:34 +02:00
|
|
|
for k, old_event_id in iteritems(old_room_state_ids):
|
|
|
|
old_event = old_room_state_events.get(old_event_id)
|
|
|
|
if old_event:
|
|
|
|
initial_state[k] = old_event.content
|
2018-08-22 11:57:54 +02:00
|
|
|
|
2020-01-28 12:02:55 +01:00
|
|
|
# deep-copy the power-levels event before we start modifying it
|
|
|
|
# note that if frozen_dicts are enabled, `power_levels` will be a frozen
|
|
|
|
# dict so we can't just copy.deepcopy it.
|
|
|
|
initial_state[
|
|
|
|
(EventTypes.PowerLevels, "")
|
|
|
|
] = power_levels = copy_power_levels_contents(
|
|
|
|
initial_state[(EventTypes.PowerLevels, "")]
|
|
|
|
)
|
|
|
|
|
2019-12-02 16:11:32 +01:00
|
|
|
# Resolve the minimum power level required to send any state event
|
|
|
|
# We will give the upgrading user this power level temporarily (if necessary) such that
|
|
|
|
# they are able to copy all of the state events over, then revert them back to their
|
|
|
|
# original power level afterwards in _update_upgraded_room_pls
|
|
|
|
|
|
|
|
# Copy over user power levels now as this will not be possible with >100PL users once
|
|
|
|
# the room has been created
|
|
|
|
|
|
|
|
# Calculate the minimum power level needed to clone the room
|
|
|
|
event_power_levels = power_levels.get("events", {})
|
|
|
|
state_default = power_levels.get("state_default", 0)
|
|
|
|
ban = power_levels.get("ban")
|
|
|
|
needed_power_level = max(state_default, ban, max(event_power_levels.values()))
|
|
|
|
|
|
|
|
# Raise the requester's power level in the new room if necessary
|
2020-01-06 10:53:07 +01:00
|
|
|
current_power_level = power_levels["users"][user_id]
|
2019-12-02 16:11:32 +01:00
|
|
|
if current_power_level < needed_power_level:
|
2020-01-28 12:02:55 +01:00
|
|
|
power_levels["users"][user_id] = needed_power_level
|
2019-12-02 16:11:32 +01:00
|
|
|
|
2018-08-22 11:57:54 +02:00
|
|
|
yield self._send_events_for_new_room(
|
|
|
|
requester,
|
|
|
|
new_room_id,
|
2018-10-12 18:05:48 +02:00
|
|
|
# we expect to override all the presets with initial_state, so this is
|
|
|
|
# somewhat arbitrary.
|
|
|
|
preset_config=RoomCreationPreset.PRIVATE_CHAT,
|
2018-08-22 11:57:54 +02:00
|
|
|
invite_list=[],
|
|
|
|
initial_state=initial_state,
|
|
|
|
creation_content=creation_content,
|
|
|
|
)
|
|
|
|
|
2019-02-18 15:02:09 +01:00
|
|
|
# Transfer membership events
|
2019-02-18 17:56:34 +01:00
|
|
|
old_room_member_state_ids = yield self.store.get_filtered_current_state_ids(
|
2019-06-20 11:32:02 +02:00
|
|
|
old_room_id, StateFilter.from_types([(EventTypes.Member, None)])
|
2019-02-18 17:56:34 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
# map from event_id to BaseEvent
|
2019-02-18 19:23:37 +01:00
|
|
|
old_room_member_state_events = yield self.store.get_events(
|
2019-06-20 11:32:02 +02:00
|
|
|
old_room_member_state_ids.values()
|
2019-02-18 19:23:37 +01:00
|
|
|
)
|
2019-02-18 17:56:34 +01:00
|
|
|
for k, old_event in iteritems(old_room_member_state_events):
|
2019-02-18 15:02:09 +01:00
|
|
|
# Only transfer ban events
|
2019-06-20 11:32:02 +02:00
|
|
|
if (
|
|
|
|
"membership" in old_event.content
|
|
|
|
and old_event.content["membership"] == "ban"
|
|
|
|
):
|
2019-02-18 15:02:09 +01:00
|
|
|
yield self.room_member_handler.update_membership(
|
|
|
|
requester,
|
2019-06-20 11:32:02 +02:00
|
|
|
UserID.from_string(old_event["state_key"]),
|
2019-02-18 15:02:09 +01:00
|
|
|
new_room_id,
|
|
|
|
"ban",
|
|
|
|
ratelimit=False,
|
|
|
|
content=old_event.content,
|
|
|
|
)
|
|
|
|
|
2018-08-22 11:57:54 +02:00
|
|
|
# XXX invites/joins
|
|
|
|
# XXX 3pid invites
|
2018-10-26 16:11:35 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _move_aliases_to_new_room(
|
2020-02-20 22:24:04 +01:00
|
|
|
self,
|
|
|
|
requester: Requester,
|
|
|
|
old_room_id: str,
|
|
|
|
new_room_id: str,
|
|
|
|
old_room_state: StateMap[str],
|
2018-10-26 16:11:35 +02:00
|
|
|
):
|
|
|
|
directory_handler = self.hs.get_handlers().directory_handler
|
|
|
|
|
|
|
|
aliases = yield self.store.get_aliases_for_room(old_room_id)
|
|
|
|
|
|
|
|
# check to see if we have a canonical alias.
|
2020-02-20 22:24:04 +01:00
|
|
|
canonical_alias_event = None
|
2018-10-26 16:11:35 +02:00
|
|
|
canonical_alias_event_id = old_room_state.get((EventTypes.CanonicalAlias, ""))
|
|
|
|
if canonical_alias_event_id:
|
|
|
|
canonical_alias_event = yield self.store.get_event(canonical_alias_event_id)
|
|
|
|
|
|
|
|
# first we try to remove the aliases from the old room (we suppress sending
|
|
|
|
# the room_aliases event until the end).
|
|
|
|
#
|
|
|
|
# Note that we'll only be able to remove aliases that (a) aren't owned by an AS,
|
|
|
|
# and (b) unless the user is a server admin, which the user created.
|
|
|
|
#
|
|
|
|
# This is probably correct - given we don't allow such aliases to be deleted
|
|
|
|
# normally, it would be odd to allow it in the case of doing a room upgrade -
|
|
|
|
# but it makes the upgrade less effective, and you have to wonder why a room
|
|
|
|
# admin can't remove aliases that point to that room anyway.
|
|
|
|
# (cf https://github.com/matrix-org/synapse/issues/2360)
|
|
|
|
#
|
|
|
|
removed_aliases = []
|
|
|
|
for alias_str in aliases:
|
|
|
|
alias = RoomAlias.from_string(alias_str)
|
|
|
|
try:
|
2020-02-18 13:29:44 +01:00
|
|
|
yield directory_handler.delete_association(requester, alias)
|
2018-10-29 16:20:19 +01:00
|
|
|
removed_aliases.append(alias_str)
|
2018-10-26 16:11:35 +02:00
|
|
|
except SynapseError as e:
|
2019-06-20 11:32:02 +02:00
|
|
|
logger.warning("Unable to remove alias %s from old room: %s", alias, e)
|
2018-10-26 16:11:35 +02:00
|
|
|
|
|
|
|
# if we didn't find any aliases, or couldn't remove anyway, we can skip the rest
|
|
|
|
# of this.
|
|
|
|
if not removed_aliases:
|
|
|
|
return
|
|
|
|
|
|
|
|
# we can now add any aliases we successfully removed to the new room.
|
|
|
|
for alias in removed_aliases:
|
|
|
|
try:
|
|
|
|
yield directory_handler.create_association(
|
2019-06-20 11:32:02 +02:00
|
|
|
requester,
|
|
|
|
RoomAlias.from_string(alias),
|
|
|
|
new_room_id,
|
|
|
|
servers=(self.hs.hostname,),
|
|
|
|
check_membership=False,
|
2018-10-26 16:11:35 +02:00
|
|
|
)
|
|
|
|
logger.info("Moved alias %s to new room", alias)
|
|
|
|
except SynapseError as e:
|
|
|
|
# I'm not really expecting this to happen, but it could if the spam
|
|
|
|
# checking module decides it shouldn't, or similar.
|
2019-06-20 11:32:02 +02:00
|
|
|
logger.error("Error adding alias %s to new room: %s", alias, e)
|
2018-10-26 16:11:35 +02:00
|
|
|
|
2020-02-20 22:24:04 +01:00
|
|
|
# If a canonical alias event existed for the old room, fire a canonical
|
|
|
|
# alias event for the new room with a copy of the information.
|
2018-10-26 16:11:35 +02:00
|
|
|
try:
|
2020-02-20 22:24:04 +01:00
|
|
|
if canonical_alias_event:
|
2018-10-26 16:11:35 +02:00
|
|
|
yield self.event_creation_handler.create_and_send_nonmember_event(
|
|
|
|
requester,
|
|
|
|
{
|
|
|
|
"type": EventTypes.CanonicalAlias,
|
|
|
|
"state_key": "",
|
|
|
|
"room_id": new_room_id,
|
|
|
|
"sender": requester.user.to_string(),
|
2020-02-20 22:24:04 +01:00
|
|
|
"content": canonical_alias_event.content,
|
2018-10-26 16:11:35 +02:00
|
|
|
},
|
2019-06-20 11:32:02 +02:00
|
|
|
ratelimit=False,
|
2018-10-26 16:11:35 +02:00
|
|
|
)
|
|
|
|
except SynapseError as e:
|
|
|
|
# again I'm not really expecting this to fail, but if it does, I'd rather
|
|
|
|
# we returned the new room to the client at this point.
|
2019-06-20 11:32:02 +02:00
|
|
|
logger.error("Unable to send updated alias events in new room: %s", e)
|
2018-08-22 11:57:54 +02:00
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
@defer.inlineCallbacks
|
2019-06-20 11:32:02 +02:00
|
|
|
def create_room(self, requester, config, ratelimit=True, creator_join_profile=None):
|
2014-08-12 16:10:52 +02:00
|
|
|
""" Creates a new room.
|
|
|
|
|
|
|
|
Args:
|
2018-05-17 10:01:09 +02:00
|
|
|
requester (synapse.types.Requester):
|
|
|
|
The user who requested the room creation.
|
2014-08-12 16:10:52 +02:00
|
|
|
config (dict) : A dict of configuration options.
|
2018-05-17 10:01:09 +02:00
|
|
|
ratelimit (bool): set to False to disable the rate limiter
|
2018-05-17 12:34:28 +02:00
|
|
|
|
|
|
|
creator_join_profile (dict|None):
|
|
|
|
Set to override the displayname and avatar for the creating
|
|
|
|
user in this room. If unset, displayname and avatar will be
|
|
|
|
derived from the user's profile. If set, should contain the
|
|
|
|
values to go in the body of the 'join' event (typically
|
|
|
|
`avatar_url` and/or `displayname`.
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
Returns:
|
2018-05-17 10:01:09 +02:00
|
|
|
Deferred[dict]:
|
|
|
|
a dict containing the keys `room_id` and, if an alias was
|
|
|
|
requested, `room_alias`.
|
2014-08-12 16:10:52 +02:00
|
|
|
Raises:
|
2016-02-15 19:13:10 +01:00
|
|
|
SynapseError if the room ID couldn't be stored, or something went
|
|
|
|
horribly wrong.
|
2018-08-16 22:25:16 +02:00
|
|
|
ResourceLimitError if server is blocked to some resource being
|
|
|
|
exceeded
|
2014-08-12 16:10:52 +02:00
|
|
|
"""
|
2016-02-15 19:13:10 +01:00
|
|
|
user_id = requester.user.to_string()
|
|
|
|
|
2018-12-14 19:20:59 +01:00
|
|
|
yield self.auth.check_auth_blocking(user_id)
|
2018-08-16 22:25:16 +02:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
if (
|
|
|
|
self._server_notices_mxid is not None
|
|
|
|
and requester.user.to_string() == self._server_notices_mxid
|
|
|
|
):
|
2019-06-17 16:48:57 +02:00
|
|
|
# allow the server notices mxid to create rooms
|
|
|
|
is_requester_admin = True
|
|
|
|
else:
|
2019-06-20 11:32:02 +02:00
|
|
|
is_requester_admin = yield self.auth.is_server_admin(requester.user)
|
2019-06-17 16:48:57 +02:00
|
|
|
|
|
|
|
# Check whether the third party rules allows/changes the room create
|
|
|
|
# request.
|
2020-02-06 15:15:29 +01:00
|
|
|
event_allowed = yield self.third_party_event_rules.on_create_room(
|
2019-06-20 11:32:02 +02:00
|
|
|
requester, config, is_requester_admin=is_requester_admin
|
2019-06-17 16:48:57 +02:00
|
|
|
)
|
2020-02-06 15:15:29 +01:00
|
|
|
if not event_allowed:
|
|
|
|
raise SynapseError(
|
|
|
|
403, "You are not permitted to create rooms", Codes.FORBIDDEN
|
|
|
|
)
|
2019-06-17 16:48:57 +02:00
|
|
|
|
|
|
|
if not is_requester_admin and not self.spam_checker.user_may_create_room(
|
2019-06-20 11:32:02 +02:00
|
|
|
user_id
|
2019-06-17 16:48:57 +02:00
|
|
|
):
|
2017-10-04 13:44:27 +02:00
|
|
|
raise SynapseError(403, "You are not permitted to create rooms")
|
2017-10-04 11:47:54 +02:00
|
|
|
|
2017-06-19 15:10:13 +02:00
|
|
|
if ratelimit:
|
|
|
|
yield self.ratelimit(requester)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2020-01-27 15:30:57 +01:00
|
|
|
room_version_id = config.get(
|
2019-06-20 11:32:02 +02:00
|
|
|
"room_version", self.config.default_room_version.identifier
|
2019-05-23 16:00:20 +02:00
|
|
|
)
|
|
|
|
|
2020-01-27 15:30:57 +01:00
|
|
|
if not isinstance(room_version_id, string_types):
|
2019-06-20 11:32:02 +02:00
|
|
|
raise SynapseError(400, "room_version must be a string", Codes.BAD_JSON)
|
2018-07-25 23:10:39 +02:00
|
|
|
|
2020-01-27 15:30:57 +01:00
|
|
|
room_version = KNOWN_ROOM_VERSIONS.get(room_version_id)
|
|
|
|
if room_version is None:
|
2018-07-25 23:10:39 +02:00
|
|
|
raise SynapseError(
|
|
|
|
400,
|
|
|
|
"Your homeserver does not support this room version",
|
|
|
|
Codes.UNSUPPORTED_ROOM_VERSION,
|
|
|
|
)
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
if "room_alias_name" in config:
|
2015-05-14 14:11:28 +02:00
|
|
|
for wchar in string.whitespace:
|
|
|
|
if wchar in config["room_alias_name"]:
|
|
|
|
raise SynapseError(400, "Invalid characters in room alias")
|
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
room_alias = RoomAlias(config["room_alias_name"], self.hs.hostname)
|
|
|
|
mapping = yield self.store.get_association_from_room_alias(room_alias)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
|
|
|
if mapping:
|
2019-06-20 11:32:02 +02:00
|
|
|
raise SynapseError(400, "Room alias already taken", Codes.ROOM_IN_USE)
|
2014-08-12 16:10:52 +02:00
|
|
|
else:
|
|
|
|
room_alias = None
|
|
|
|
|
2014-09-06 02:10:07 +02:00
|
|
|
invite_list = config.get("invite", [])
|
|
|
|
for i in invite_list:
|
|
|
|
try:
|
2019-10-10 14:05:48 +02:00
|
|
|
uid = UserID.from_string(i)
|
|
|
|
parse_and_validate_server_name(uid.domain)
|
2017-10-23 16:52:32 +02:00
|
|
|
except Exception:
|
2014-09-06 02:10:07 +02:00
|
|
|
raise SynapseError(400, "Invalid user_id: %s" % (i,))
|
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
yield self.event_creation_handler.assert_accepted_privacy_policy(requester)
|
2018-05-22 09:56:52 +02:00
|
|
|
|
2019-08-15 10:45:57 +02:00
|
|
|
power_level_content_override = config.get("power_level_content_override")
|
|
|
|
if (
|
|
|
|
power_level_content_override
|
|
|
|
and "users" in power_level_content_override
|
|
|
|
and user_id not in power_level_content_override["users"]
|
|
|
|
):
|
|
|
|
raise SynapseError(
|
|
|
|
400,
|
|
|
|
"Not a valid power_level_content_override: 'users' did not contain %s"
|
|
|
|
% (user_id,),
|
|
|
|
)
|
|
|
|
|
2016-01-05 12:56:21 +01:00
|
|
|
invite_3pid_list = config.get("invite_3pid", [])
|
|
|
|
|
2016-03-23 14:49:10 +01:00
|
|
|
visibility = config.get("visibility", None)
|
|
|
|
is_public = visibility == "public"
|
2014-08-28 11:59:15 +02:00
|
|
|
|
2020-01-27 15:30:57 +01:00
|
|
|
room_id = yield self._generate_room_id(
|
|
|
|
creator_id=user_id, is_public=is_public, room_version=room_version,
|
|
|
|
)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2019-09-11 17:02:42 +02:00
|
|
|
directory_handler = self.hs.get_handlers().directory_handler
|
2014-11-18 16:03:01 +01:00
|
|
|
if room_alias:
|
|
|
|
yield directory_handler.create_association(
|
2018-10-18 17:14:24 +02:00
|
|
|
requester=requester,
|
2014-11-18 16:03:01 +01:00
|
|
|
room_id=room_id,
|
|
|
|
room_alias=room_alias,
|
|
|
|
servers=[self.hs.hostname],
|
2019-05-02 10:21:29 +02:00
|
|
|
check_membership=False,
|
2014-11-18 16:03:01 +01:00
|
|
|
)
|
|
|
|
|
2020-04-13 13:42:32 +02:00
|
|
|
if is_public:
|
|
|
|
if not self.config.is_publishing_room_allowed(user_id, room_id, room_alias):
|
|
|
|
# Lets just return a generic message, as there may be all sorts of
|
|
|
|
# reasons why we said no. TODO: Allow configurable error messages
|
|
|
|
# per alias creation rule?
|
|
|
|
raise SynapseError(403, "Not allowed to publish room")
|
|
|
|
|
2015-07-13 17:48:06 +02:00
|
|
|
preset_config = config.get(
|
|
|
|
"preset",
|
2016-03-23 14:49:10 +01:00
|
|
|
RoomCreationPreset.PRIVATE_CHAT
|
|
|
|
if visibility == "private"
|
2019-06-20 11:32:02 +02:00
|
|
|
else RoomCreationPreset.PUBLIC_CHAT,
|
2015-07-13 17:48:06 +02:00
|
|
|
)
|
|
|
|
|
2015-07-16 16:25:29 +02:00
|
|
|
raw_initial_state = config.get("initial_state", [])
|
|
|
|
|
|
|
|
initial_state = OrderedDict()
|
|
|
|
for val in raw_initial_state:
|
|
|
|
initial_state[(val["type"], val.get("state_key", ""))] = val["content"]
|
|
|
|
|
2015-09-01 16:09:23 +02:00
|
|
|
creation_content = config.get("creation_content", {})
|
|
|
|
|
2018-07-25 23:10:39 +02:00
|
|
|
# override any attempt to set room versions via the creation_content
|
2020-01-27 15:30:57 +01:00
|
|
|
creation_content["room_version"] = room_version.identifier
|
2018-07-25 23:10:39 +02:00
|
|
|
|
2016-02-16 13:00:50 +01:00
|
|
|
yield self._send_events_for_new_room(
|
|
|
|
requester,
|
|
|
|
room_id,
|
2015-07-13 17:48:06 +02:00
|
|
|
preset_config=preset_config,
|
|
|
|
invite_list=invite_list,
|
2015-07-16 16:25:29 +02:00
|
|
|
initial_state=initial_state,
|
2015-09-01 16:09:23 +02:00
|
|
|
creation_content=creation_content,
|
2015-09-23 11:07:31 +02:00
|
|
|
room_alias=room_alias,
|
2019-08-15 10:45:57 +02:00
|
|
|
power_level_content_override=power_level_content_override,
|
2018-05-17 12:34:28 +02:00
|
|
|
creator_join_profile=creator_join_profile,
|
2014-08-27 16:11:51 +02:00
|
|
|
)
|
|
|
|
|
2014-09-02 11:02:14 +02:00
|
|
|
if "name" in config:
|
|
|
|
name = config["name"]
|
2018-01-15 17:52:07 +01:00
|
|
|
yield self.event_creation_handler.create_and_send_nonmember_event(
|
2016-03-03 17:43:42 +01:00
|
|
|
requester,
|
|
|
|
{
|
|
|
|
"type": EventTypes.Name,
|
|
|
|
"room_id": room_id,
|
|
|
|
"sender": user_id,
|
|
|
|
"state_key": "",
|
|
|
|
"content": {"name": name},
|
|
|
|
},
|
2019-06-20 11:32:02 +02:00
|
|
|
ratelimit=False,
|
|
|
|
)
|
2014-09-02 11:02:14 +02:00
|
|
|
|
|
|
|
if "topic" in config:
|
|
|
|
topic = config["topic"]
|
2018-01-15 17:52:07 +01:00
|
|
|
yield self.event_creation_handler.create_and_send_nonmember_event(
|
2016-03-03 17:43:42 +01:00
|
|
|
requester,
|
|
|
|
{
|
|
|
|
"type": EventTypes.Topic,
|
|
|
|
"room_id": room_id,
|
|
|
|
"sender": user_id,
|
|
|
|
"state_key": "",
|
|
|
|
"content": {"topic": topic},
|
|
|
|
},
|
2019-06-20 11:32:02 +02:00
|
|
|
ratelimit=False,
|
|
|
|
)
|
2014-09-02 11:02:14 +02:00
|
|
|
|
2014-09-06 02:10:07 +02:00
|
|
|
for invitee in invite_list:
|
2017-11-28 16:19:15 +01:00
|
|
|
content = {}
|
|
|
|
is_direct = config.get("is_direct", None)
|
|
|
|
if is_direct:
|
|
|
|
content["is_direct"] = is_direct
|
2017-11-28 16:23:26 +01:00
|
|
|
|
2018-10-25 18:42:37 +02:00
|
|
|
yield self.room_member_handler.update_membership(
|
2016-02-15 19:21:30 +01:00
|
|
|
requester,
|
|
|
|
UserID.from_string(invitee),
|
|
|
|
room_id,
|
|
|
|
"invite",
|
|
|
|
ratelimit=False,
|
2016-09-12 17:34:20 +02:00
|
|
|
content=content,
|
2016-02-15 19:21:30 +01:00
|
|
|
)
|
2014-11-17 17:37:33 +01:00
|
|
|
|
2016-01-05 12:56:21 +01:00
|
|
|
for invite_3pid in invite_3pid_list:
|
|
|
|
id_server = invite_3pid["id_server"]
|
2019-09-11 17:02:42 +02:00
|
|
|
id_access_token = invite_3pid.get("id_access_token") # optional
|
2016-01-05 12:56:21 +01:00
|
|
|
address = invite_3pid["address"]
|
|
|
|
medium = invite_3pid["medium"]
|
2018-03-01 11:54:37 +01:00
|
|
|
yield self.hs.get_room_member_handler().do_3pid_invite(
|
2016-01-05 12:56:21 +01:00
|
|
|
room_id,
|
2016-02-16 13:00:50 +01:00
|
|
|
requester.user,
|
2016-01-05 12:56:21 +01:00
|
|
|
medium,
|
|
|
|
address,
|
|
|
|
id_server,
|
2016-02-15 19:21:30 +01:00
|
|
|
requester,
|
2016-01-05 13:57:45 +01:00
|
|
|
txn_id=None,
|
2019-09-11 17:02:42 +02:00
|
|
|
id_access_token=id_access_token,
|
2016-01-05 12:56:21 +01:00
|
|
|
)
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
result = {"room_id": room_id}
|
2014-11-17 17:37:33 +01:00
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
if room_alias:
|
|
|
|
result["room_alias"] = room_alias.to_string()
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return result
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2016-02-16 13:00:50 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _send_events_for_new_room(
|
2019-06-20 11:32:02 +02:00
|
|
|
self,
|
|
|
|
creator, # A Requester object.
|
|
|
|
room_id,
|
|
|
|
preset_config,
|
|
|
|
invite_list,
|
|
|
|
initial_state,
|
|
|
|
creation_content,
|
|
|
|
room_alias=None,
|
2020-01-06 10:53:07 +01:00
|
|
|
power_level_content_override=None, # Doesn't apply when initial state has power level state event content
|
2019-06-20 11:32:02 +02:00
|
|
|
creator_join_profile=None,
|
2016-02-16 13:00:50 +01:00
|
|
|
):
|
2014-12-08 11:16:18 +01:00
|
|
|
def create(etype, content, **kwargs):
|
2019-06-20 11:32:02 +02:00
|
|
|
e = {"type": etype, "content": content}
|
2014-12-04 16:50:01 +01:00
|
|
|
|
|
|
|
e.update(event_keys)
|
2014-12-08 11:16:18 +01:00
|
|
|
e.update(kwargs)
|
2014-12-04 16:50:01 +01:00
|
|
|
|
|
|
|
return e
|
2014-09-01 17:15:34 +02:00
|
|
|
|
2016-02-16 13:00:50 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def send(etype, content, **kwargs):
|
|
|
|
event = create(etype, content, **kwargs)
|
2020-02-06 14:31:05 +01:00
|
|
|
logger.debug("Sending %s in new room", etype)
|
2018-01-15 17:52:07 +01:00
|
|
|
yield self.event_creation_handler.create_and_send_nonmember_event(
|
2019-06-20 11:32:02 +02:00
|
|
|
creator, event, ratelimit=False
|
2016-03-03 17:43:42 +01:00
|
|
|
)
|
2016-02-16 13:00:50 +01:00
|
|
|
|
|
|
|
config = RoomCreationHandler.PRESETS_DICT[preset_config]
|
|
|
|
|
|
|
|
creator_id = creator.user.to_string()
|
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
event_keys = {"room_id": room_id, "sender": creator_id, "state_key": ""}
|
2016-02-16 13:00:50 +01:00
|
|
|
|
|
|
|
creation_content.update({"creator": creator_id})
|
2019-06-20 11:32:02 +02:00
|
|
|
yield send(etype=EventTypes.Create, content=creation_content)
|
2014-08-28 11:59:15 +02:00
|
|
|
|
2020-02-06 14:31:05 +01:00
|
|
|
logger.debug("Sending %s in new room", EventTypes.Member)
|
2018-10-25 18:42:37 +02:00
|
|
|
yield self.room_member_handler.update_membership(
|
2016-02-16 13:00:50 +01:00
|
|
|
creator,
|
|
|
|
creator.user,
|
|
|
|
room_id,
|
|
|
|
"join",
|
|
|
|
ratelimit=False,
|
2018-05-17 12:34:28 +02:00
|
|
|
content=creator_join_profile,
|
2014-11-18 16:29:48 +01:00
|
|
|
)
|
|
|
|
|
2017-06-19 15:10:13 +02:00
|
|
|
# We treat the power levels override specially as this needs to be one
|
|
|
|
# of the first events that get sent into a room.
|
2019-06-20 11:32:02 +02:00
|
|
|
pl_content = initial_state.pop((EventTypes.PowerLevels, ""), None)
|
2017-06-19 15:10:13 +02:00
|
|
|
if pl_content is not None:
|
2019-06-20 11:32:02 +02:00
|
|
|
yield send(etype=EventTypes.PowerLevels, content=pl_content)
|
2017-06-19 15:10:13 +02:00
|
|
|
else:
|
2015-07-16 16:25:29 +02:00
|
|
|
power_level_content = {
|
2019-06-20 11:32:02 +02:00
|
|
|
"users": {creator_id: 100},
|
2015-07-16 16:25:29 +02:00
|
|
|
"users_default": 0,
|
|
|
|
"events": {
|
2015-08-20 15:35:40 +02:00
|
|
|
EventTypes.Name: 50,
|
2015-07-16 16:25:29 +02:00
|
|
|
EventTypes.PowerLevels: 100,
|
|
|
|
EventTypes.RoomHistoryVisibility: 100,
|
2015-08-20 15:35:40 +02:00
|
|
|
EventTypes.CanonicalAlias: 50,
|
|
|
|
EventTypes.RoomAvatar: 50,
|
2020-02-17 14:23:37 +01:00
|
|
|
EventTypes.Tombstone: 100,
|
|
|
|
EventTypes.ServerACL: 100,
|
2020-04-09 19:45:38 +02:00
|
|
|
EventTypes.RoomEncryption: 100,
|
2015-07-16 16:25:29 +02:00
|
|
|
},
|
|
|
|
"events_default": 0,
|
|
|
|
"state_default": 50,
|
|
|
|
"ban": 50,
|
|
|
|
"kick": 50,
|
|
|
|
"redact": 50,
|
2020-02-17 14:23:37 +01:00
|
|
|
"invite": 50,
|
2015-07-16 16:25:29 +02:00
|
|
|
}
|
2015-07-13 17:48:06 +02:00
|
|
|
|
2015-07-16 16:25:29 +02:00
|
|
|
if config["original_invitees_have_ops"]:
|
|
|
|
for invitee in invite_list:
|
|
|
|
power_level_content["users"][invitee] = 100
|
2015-07-13 17:48:06 +02:00
|
|
|
|
2020-02-17 14:23:37 +01:00
|
|
|
# Power levels overrides are defined per chat preset
|
|
|
|
power_level_content.update(config["power_level_content_override"])
|
|
|
|
|
2018-10-25 18:50:06 +02:00
|
|
|
if power_level_content_override:
|
|
|
|
power_level_content.update(power_level_content_override)
|
2017-06-19 15:10:13 +02:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
yield send(etype=EventTypes.PowerLevels, content=power_level_content)
|
2014-08-28 11:59:15 +02:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
if room_alias and (EventTypes.CanonicalAlias, "") not in initial_state:
|
2016-02-16 13:00:50 +01:00
|
|
|
yield send(
|
2015-09-30 17:46:24 +02:00
|
|
|
etype=EventTypes.CanonicalAlias,
|
|
|
|
content={"alias": room_alias.to_string()},
|
|
|
|
)
|
2015-09-23 11:07:31 +02:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
if (EventTypes.JoinRules, "") not in initial_state:
|
2016-02-16 13:00:50 +01:00
|
|
|
yield send(
|
2019-06-20 11:32:02 +02:00
|
|
|
etype=EventTypes.JoinRules, content={"join_rule": config["join_rules"]}
|
2015-07-16 16:25:29 +02:00
|
|
|
)
|
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
if (EventTypes.RoomHistoryVisibility, "") not in initial_state:
|
2016-02-16 13:00:50 +01:00
|
|
|
yield send(
|
2015-07-16 16:25:29 +02:00
|
|
|
etype=EventTypes.RoomHistoryVisibility,
|
2019-06-20 11:32:02 +02:00
|
|
|
content={"history_visibility": config["history_visibility"]},
|
2015-07-16 16:25:29 +02:00
|
|
|
)
|
|
|
|
|
2016-03-17 17:07:35 +01:00
|
|
|
if config["guest_can_join"]:
|
2019-06-20 11:32:02 +02:00
|
|
|
if (EventTypes.GuestAccess, "") not in initial_state:
|
2016-03-17 17:07:35 +01:00
|
|
|
yield send(
|
2019-06-20 11:32:02 +02:00
|
|
|
etype=EventTypes.GuestAccess, content={"guest_access": "can_join"}
|
2016-03-17 17:07:35 +01:00
|
|
|
)
|
|
|
|
|
2015-07-16 16:25:29 +02:00
|
|
|
for (etype, state_key), content in initial_state.items():
|
2019-06-20 11:32:02 +02:00
|
|
|
yield send(etype=etype, state_key=state_key, content=content)
|
2014-08-28 11:59:15 +02:00
|
|
|
|
2018-10-25 18:40:41 +02:00
|
|
|
@defer.inlineCallbacks
|
2020-01-27 15:30:57 +01:00
|
|
|
def _generate_room_id(
|
|
|
|
self, creator_id: str, is_public: str, room_version: RoomVersion,
|
|
|
|
):
|
2018-10-25 18:40:41 +02:00
|
|
|
# autogen room IDs and try to create it. We may clash, so just
|
|
|
|
# try a few times till one goes through, giving up eventually.
|
|
|
|
attempts = 0
|
|
|
|
while attempts < 5:
|
|
|
|
try:
|
|
|
|
random_string = stringutils.random_string(18)
|
2019-06-20 11:32:02 +02:00
|
|
|
gen_room_id = RoomID(random_string, self.hs.hostname).to_string()
|
2018-08-22 11:57:54 +02:00
|
|
|
if isinstance(gen_room_id, bytes):
|
2019-06-20 11:32:02 +02:00
|
|
|
gen_room_id = gen_room_id.decode("utf-8")
|
2018-10-25 18:40:41 +02:00
|
|
|
yield self.store.store_room(
|
|
|
|
room_id=gen_room_id,
|
|
|
|
room_creator_user_id=creator_id,
|
|
|
|
is_public=is_public,
|
2020-01-27 15:30:57 +01:00
|
|
|
room_version=room_version,
|
2018-10-25 18:40:41 +02:00
|
|
|
)
|
2019-07-23 15:00:55 +02:00
|
|
|
return gen_room_id
|
2018-10-25 18:40:41 +02:00
|
|
|
except StoreError:
|
|
|
|
attempts += 1
|
|
|
|
raise StoreError(500, "Couldn't generate a room ID.")
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2018-07-18 16:29:45 +02:00
|
|
|
class RoomContextHandler(object):
|
|
|
|
def __init__(self, hs):
|
|
|
|
self.hs = hs
|
|
|
|
self.store = hs.get_datastore()
|
2019-10-23 18:25:54 +02:00
|
|
|
self.storage = hs.get_storage()
|
|
|
|
self.state_store = self.storage.state
|
2018-07-18 16:29:45 +02:00
|
|
|
|
2015-10-28 14:45:56 +01:00
|
|
|
@defer.inlineCallbacks
|
2018-07-27 16:12:50 +02:00
|
|
|
def get_event_context(self, user, room_id, event_id, limit, event_filter):
|
2015-10-28 15:05:50 +01:00
|
|
|
"""Retrieves events, pagination tokens and state around a given event
|
|
|
|
in a room.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user (UserID)
|
|
|
|
room_id (str)
|
|
|
|
event_id (str)
|
|
|
|
limit (int): The maximum number of events to return in total
|
|
|
|
(excluding state).
|
2018-07-27 16:12:50 +02:00
|
|
|
event_filter (Filter|None): the filter to apply to the events returned
|
|
|
|
(excluding the target event_id)
|
2015-10-28 15:05:50 +01:00
|
|
|
|
|
|
|
Returns:
|
2016-01-13 15:19:22 +01:00
|
|
|
dict, or None if the event isn't found
|
2015-10-28 15:05:50 +01:00
|
|
|
"""
|
2019-06-20 11:32:02 +02:00
|
|
|
before_limit = math.floor(limit / 2.0)
|
2015-10-28 14:45:56 +01:00
|
|
|
after_limit = limit - before_limit
|
|
|
|
|
2017-02-20 15:54:50 +01:00
|
|
|
users = yield self.store.get_users_in_room(room_id)
|
|
|
|
is_peeking = user.to_string() not in users
|
|
|
|
|
2016-01-13 15:19:22 +01:00
|
|
|
def filter_evts(events):
|
2016-05-11 14:42:37 +02:00
|
|
|
return filter_events_for_client(
|
2019-10-23 18:25:54 +02:00
|
|
|
self.storage, user.to_string(), events, is_peeking=is_peeking
|
2016-05-11 14:42:37 +02:00
|
|
|
)
|
2016-01-13 15:19:22 +01:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
event = yield self.store.get_event(
|
|
|
|
event_id, get_prev_content=True, allow_none=True
|
|
|
|
)
|
2016-01-13 15:19:22 +01:00
|
|
|
if not event:
|
2019-07-23 15:00:55 +02:00
|
|
|
return None
|
2016-01-13 15:19:22 +01:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
filtered = yield (filter_evts([event]))
|
2016-01-13 15:19:22 +01:00
|
|
|
if not filtered:
|
2019-06-20 11:32:02 +02:00
|
|
|
raise AuthError(403, "You don't have permission to access that event.")
|
2016-01-13 15:19:22 +01:00
|
|
|
|
2015-10-28 14:45:56 +01:00
|
|
|
results = yield self.store.get_events_around(
|
2018-07-27 16:12:50 +02:00
|
|
|
room_id, event_id, before_limit, after_limit, event_filter
|
2015-10-28 14:45:56 +01:00
|
|
|
)
|
|
|
|
|
2019-11-05 16:27:38 +01:00
|
|
|
if event_filter:
|
|
|
|
results["events_before"] = event_filter.filter(results["events_before"])
|
|
|
|
results["events_after"] = event_filter.filter(results["events_after"])
|
|
|
|
|
|
|
|
results["events_before"] = yield filter_evts(results["events_before"])
|
|
|
|
results["events_after"] = yield filter_evts(results["events_after"])
|
2019-12-16 13:14:12 +01:00
|
|
|
# filter_evts can return a pruned event in case the user is allowed to see that
|
|
|
|
# there's something there but not see the content, so use the event that's in
|
|
|
|
# `filtered` rather than the event we retrieved from the datastore.
|
|
|
|
results["event"] = filtered[0]
|
2015-10-28 14:45:56 +01:00
|
|
|
|
|
|
|
if results["events_after"]:
|
|
|
|
last_event_id = results["events_after"][-1].event_id
|
|
|
|
else:
|
|
|
|
last_event_id = event_id
|
|
|
|
|
2018-07-27 16:12:50 +02:00
|
|
|
if event_filter and event_filter.lazy_load_members():
|
2018-10-25 18:49:55 +02:00
|
|
|
state_filter = StateFilter.from_lazy_load_member_list(
|
|
|
|
ev.sender
|
|
|
|
for ev in itertools.chain(
|
|
|
|
results["events_before"],
|
|
|
|
(results["event"],),
|
|
|
|
results["events_after"],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
state_filter = StateFilter.all()
|
2018-07-27 16:12:50 +02:00
|
|
|
|
|
|
|
# XXX: why do we return the state as of the last event rather than the
|
|
|
|
# first? Shouldn't we be consistent with /sync?
|
|
|
|
# https://github.com/matrix-org/matrix-doc/issues/687
|
|
|
|
|
2019-10-23 18:25:54 +02:00
|
|
|
state = yield self.state_store.get_state_for_events(
|
2019-06-20 11:32:02 +02:00
|
|
|
[last_event_id], state_filter=state_filter
|
2015-10-28 14:45:56 +01:00
|
|
|
)
|
2019-11-06 19:14:03 +01:00
|
|
|
|
|
|
|
state_events = list(state[last_event_id].values())
|
|
|
|
if event_filter:
|
|
|
|
state_events = event_filter.filter(state_events)
|
|
|
|
|
2019-12-16 17:00:18 +01:00
|
|
|
results["state"] = yield filter_evts(state_events)
|
2015-10-28 14:45:56 +01:00
|
|
|
|
2018-07-24 17:46:30 +02:00
|
|
|
# We use a dummy token here as we only care about the room portion of
|
|
|
|
# the token, which we replace.
|
|
|
|
token = StreamToken.START
|
|
|
|
|
|
|
|
results["start"] = token.copy_and_replace(
|
2015-10-28 14:45:56 +01:00
|
|
|
"room_key", results["start"]
|
|
|
|
).to_string()
|
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
results["end"] = token.copy_and_replace("room_key", results["end"]).to_string()
|
2015-10-28 14:45:56 +01:00
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return results
|
2015-10-28 14:45:56 +01:00
|
|
|
|
|
|
|
|
2014-08-29 18:09:15 +02:00
|
|
|
class RoomEventSource(object):
|
|
|
|
def __init__(self, hs):
|
|
|
|
self.store = hs.get_datastore()
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2015-11-05 15:32:26 +01:00
|
|
|
def get_new_events(
|
2019-06-20 11:32:02 +02:00
|
|
|
self, user, from_key, limit, room_ids, is_guest, explicit_room_id=None
|
2015-11-05 15:32:26 +01:00
|
|
|
):
|
2014-08-29 18:09:15 +02:00
|
|
|
# We just ignore the key for now.
|
|
|
|
|
2014-08-29 20:15:23 +02:00
|
|
|
to_key = yield self.get_current_key()
|
2014-08-29 18:09:15 +02:00
|
|
|
|
2016-02-02 15:11:14 +01:00
|
|
|
from_token = RoomStreamToken.parse(from_key)
|
|
|
|
if from_token.topological:
|
2019-10-31 11:23:24 +01:00
|
|
|
logger.warning("Stream has topological part!!!! %r", from_key)
|
2016-02-02 15:11:14 +01:00
|
|
|
from_key = "s%s" % (from_token.stream,)
|
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
app_service = self.store.get_app_service_by_user_id(user.to_string())
|
2015-02-25 16:00:59 +01:00
|
|
|
if app_service:
|
2018-03-05 16:42:57 +01:00
|
|
|
# We no longer support AS users using /sync directly.
|
|
|
|
# See https://github.com/matrix-org/matrix-doc/issues/1144
|
|
|
|
raise NotImplementedError()
|
2015-02-25 16:00:59 +01:00
|
|
|
else:
|
2016-02-02 17:12:10 +01:00
|
|
|
room_events = yield self.store.get_membership_changes_for_user(
|
2016-02-01 17:26:51 +01:00
|
|
|
user.to_string(), from_key, to_key
|
|
|
|
)
|
|
|
|
|
|
|
|
room_to_events = yield self.store.get_room_events_stream_for_rooms(
|
|
|
|
room_ids=room_ids,
|
2015-02-25 16:00:59 +01:00
|
|
|
from_key=from_key,
|
|
|
|
to_key=to_key,
|
2016-02-01 17:26:51 +01:00
|
|
|
limit=limit or 10,
|
2019-06-20 11:32:02 +02:00
|
|
|
order="ASC",
|
2015-02-25 16:00:59 +01:00
|
|
|
)
|
2014-08-29 18:09:15 +02:00
|
|
|
|
2016-02-01 17:26:51 +01:00
|
|
|
events = list(room_events)
|
|
|
|
events.extend(e for evs, _ in room_to_events.values() for e in evs)
|
|
|
|
|
2016-02-01 17:32:46 +01:00
|
|
|
events.sort(key=lambda e: e.internal_metadata.order)
|
2016-02-01 17:26:51 +01:00
|
|
|
|
|
|
|
if limit:
|
|
|
|
events[:] = events[:limit]
|
|
|
|
|
|
|
|
if events:
|
|
|
|
end_key = events[-1].internal_metadata.after
|
|
|
|
else:
|
|
|
|
end_key = to_key
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return (events, end_key)
|
2014-08-29 18:09:15 +02:00
|
|
|
|
2016-10-24 14:35:51 +02:00
|
|
|
def get_current_key(self):
|
|
|
|
return self.store.get_room_events_max_id()
|
|
|
|
|
|
|
|
def get_current_key_for_room(self, room_id):
|
|
|
|
return self.store.get_room_events_max_id(room_id)
|