2014-08-12 16:10:52 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-07 05:26:29 +01:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2019-07-25 17:08:24 +02:00
|
|
|
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
2014-08-12 16:10:52 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2020-08-12 15:03:08 +02:00
|
|
|
import abc
|
2018-12-07 13:10:07 +01:00
|
|
|
import re
|
2018-04-04 13:08:29 +02:00
|
|
|
import string
|
2020-01-03 17:16:09 +01:00
|
|
|
import sys
|
2018-07-09 08:09:20 +02:00
|
|
|
from collections import namedtuple
|
2020-09-08 17:48:15 +02:00
|
|
|
from typing import Any, Dict, Mapping, MutableMapping, Optional, Tuple, Type, TypeVar
|
2014-08-13 04:14:34 +02:00
|
|
|
|
2019-03-12 17:50:58 +01:00
|
|
|
import attr
|
2019-07-25 17:08:24 +02:00
|
|
|
from signedjson.key import decode_verify_key_bytes
|
|
|
|
from unpaddedbase64 import decode_base64
|
2019-03-12 17:50:58 +01:00
|
|
|
|
2020-03-03 13:12:45 +01:00
|
|
|
from synapse.api.errors import Codes, SynapseError
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2020-01-03 17:16:09 +01:00
|
|
|
# define a version of typing.Collection that works on python 3.5
|
|
|
|
if sys.version_info[:3] >= (3, 6, 0):
|
|
|
|
from typing import Collection
|
|
|
|
else:
|
2020-07-05 17:32:02 +02:00
|
|
|
from typing import Container, Iterable, Sized
|
2020-01-03 17:16:09 +01:00
|
|
|
|
|
|
|
T_co = TypeVar("T_co", covariant=True)
|
|
|
|
|
2020-08-12 15:03:08 +02:00
|
|
|
class Collection(Iterable[T_co], Container[T_co], Sized): # type: ignore
|
2020-01-03 17:16:09 +01:00
|
|
|
__slots__ = ()
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2020-01-16 14:31:22 +01:00
|
|
|
# Define a state map type from type/state_key to T (usually an event ID or
|
|
|
|
# event)
|
|
|
|
T = TypeVar("T")
|
2020-08-28 13:28:53 +02:00
|
|
|
StateKey = Tuple[str, str]
|
|
|
|
StateMap = Mapping[StateKey, T]
|
|
|
|
MutableStateMap = MutableMapping[StateKey, T]
|
2020-01-16 14:31:22 +01:00
|
|
|
|
2020-01-30 12:25:59 +01:00
|
|
|
# the type of a JSON-serialisable dict. This could be made stronger, but it will
|
|
|
|
# do for now.
|
|
|
|
JsonDict = Dict[str, Any]
|
|
|
|
|
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
class Requester(
|
|
|
|
namedtuple(
|
2020-08-14 18:37:59 +02:00
|
|
|
"Requester",
|
|
|
|
[
|
|
|
|
"user",
|
|
|
|
"access_token_id",
|
|
|
|
"is_guest",
|
|
|
|
"shadow_banned",
|
|
|
|
"device_id",
|
|
|
|
"app_service",
|
|
|
|
],
|
2019-06-20 11:32:02 +02:00
|
|
|
)
|
|
|
|
):
|
2018-02-05 18:22:16 +01:00
|
|
|
"""
|
|
|
|
Represents the user making a request
|
|
|
|
|
|
|
|
Attributes:
|
|
|
|
user (UserID): id of the user making the request
|
|
|
|
access_token_id (int|None): *ID* of the access token used for this
|
|
|
|
request, or None if it came via the appservice API or similar
|
|
|
|
is_guest (bool): True if the user making this request is a guest user
|
2020-08-14 18:37:59 +02:00
|
|
|
shadow_banned (bool): True if the user making this request has been shadow-banned.
|
2018-02-05 18:22:16 +01:00
|
|
|
device_id (str|None): device_id which was set at authentication time
|
|
|
|
app_service (ApplicationService|None): the AS requesting on behalf of the user
|
|
|
|
"""
|
|
|
|
|
|
|
|
def serialize(self):
|
|
|
|
"""Converts self to a type that can be serialized as JSON, and then
|
|
|
|
deserialized by `deserialize`
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
dict
|
|
|
|
"""
|
|
|
|
return {
|
|
|
|
"user_id": self.user.to_string(),
|
|
|
|
"access_token_id": self.access_token_id,
|
|
|
|
"is_guest": self.is_guest,
|
2020-08-14 18:37:59 +02:00
|
|
|
"shadow_banned": self.shadow_banned,
|
2018-02-05 18:22:16 +01:00
|
|
|
"device_id": self.device_id,
|
|
|
|
"app_server_id": self.app_service.id if self.app_service else None,
|
|
|
|
}
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def deserialize(store, input):
|
|
|
|
"""Converts a dict that was produced by `serialize` back into a
|
|
|
|
Requester.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
store (DataStore): Used to convert AS ID to AS object
|
|
|
|
input (dict): A dict produced by `serialize`
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Requester
|
|
|
|
"""
|
|
|
|
appservice = None
|
|
|
|
if input["app_server_id"]:
|
|
|
|
appservice = store.get_app_service_by_id(input["app_server_id"])
|
|
|
|
|
|
|
|
return Requester(
|
|
|
|
user=UserID.from_string(input["user_id"]),
|
|
|
|
access_token_id=input["access_token_id"],
|
|
|
|
is_guest=input["is_guest"],
|
2020-08-14 18:37:59 +02:00
|
|
|
shadow_banned=input["shadow_banned"],
|
2018-02-05 18:22:16 +01:00
|
|
|
device_id=input["device_id"],
|
|
|
|
app_service=appservice,
|
|
|
|
)
|
2016-07-26 17:46:53 +02:00
|
|
|
|
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
def create_requester(
|
2020-08-14 18:37:59 +02:00
|
|
|
user_id,
|
|
|
|
access_token_id=None,
|
|
|
|
is_guest=False,
|
|
|
|
shadow_banned=False,
|
|
|
|
device_id=None,
|
|
|
|
app_service=None,
|
2019-06-20 11:32:02 +02:00
|
|
|
):
|
2016-07-26 17:46:53 +02:00
|
|
|
"""
|
|
|
|
Create a new ``Requester`` object
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str|UserID): id of the user making the request
|
|
|
|
access_token_id (int|None): *ID* of the access token used for this
|
|
|
|
request, or None if it came via the appservice API or similar
|
|
|
|
is_guest (bool): True if the user making this request is a guest user
|
2020-08-14 18:37:59 +02:00
|
|
|
shadow_banned (bool): True if the user making this request is shadow-banned.
|
2016-07-26 17:46:53 +02:00
|
|
|
device_id (str|None): device_id which was set at authentication time
|
2016-10-20 13:04:54 +02:00
|
|
|
app_service (ApplicationService|None): the AS requesting on behalf of the user
|
2016-07-26 17:46:53 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
Requester
|
|
|
|
"""
|
|
|
|
if not isinstance(user_id, UserID):
|
|
|
|
user_id = UserID.from_string(user_id)
|
2020-08-14 18:37:59 +02:00
|
|
|
return Requester(
|
|
|
|
user_id, access_token_id, is_guest, shadow_banned, device_id, app_service
|
|
|
|
)
|
2016-01-11 16:29:57 +01:00
|
|
|
|
|
|
|
|
2016-05-16 20:17:03 +02:00
|
|
|
def get_domain_from_id(string):
|
2017-05-16 15:07:08 +02:00
|
|
|
idx = string.find(":")
|
|
|
|
if idx == -1:
|
2016-09-22 12:08:12 +02:00
|
|
|
raise SynapseError(400, "Invalid ID: %r" % (string,))
|
2019-06-20 11:32:02 +02:00
|
|
|
return string[idx + 1 :]
|
2016-05-09 11:36:03 +02:00
|
|
|
|
|
|
|
|
2017-05-31 15:29:32 +02:00
|
|
|
def get_localpart_from_id(string):
|
|
|
|
idx = string.find(":")
|
|
|
|
if idx == -1:
|
|
|
|
raise SynapseError(400, "Invalid ID: %r" % (string,))
|
|
|
|
return string[1:idx]
|
|
|
|
|
|
|
|
|
2020-08-12 15:03:08 +02:00
|
|
|
DS = TypeVar("DS", bound="DomainSpecificString")
|
|
|
|
|
|
|
|
|
2020-09-16 21:15:55 +02:00
|
|
|
class DomainSpecificString(
|
|
|
|
namedtuple("DomainSpecificString", ("localpart", "domain")), metaclass=abc.ABCMeta
|
|
|
|
):
|
2014-08-12 16:10:52 +02:00
|
|
|
"""Common base class among ID/name strings that have a local part and a
|
|
|
|
domain name, prefixed with a sigil.
|
|
|
|
|
|
|
|
Has the fields:
|
|
|
|
|
|
|
|
'localpart' : The local part of the name (without the leading sigil)
|
|
|
|
'domain' : The domain part of the name
|
|
|
|
"""
|
|
|
|
|
2020-08-12 15:03:08 +02:00
|
|
|
SIGIL = abc.abstractproperty() # type: str # type: ignore
|
|
|
|
|
2014-08-18 17:07:14 +02:00
|
|
|
# Deny iteration because it will bite you if you try to create a singleton
|
|
|
|
# set by:
|
|
|
|
# users = set(user)
|
|
|
|
def __iter__(self):
|
2014-12-02 11:42:28 +01:00
|
|
|
raise ValueError("Attempted to iterate a %s" % (type(self).__name__,))
|
2014-08-18 17:07:14 +02:00
|
|
|
|
2014-08-20 16:25:17 +02:00
|
|
|
# Because this class is a namedtuple of strings and booleans, it is deeply
|
|
|
|
# immutable.
|
|
|
|
def __copy__(self):
|
|
|
|
return self
|
|
|
|
|
|
|
|
def __deepcopy__(self, memo):
|
|
|
|
return self
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
@classmethod
|
2020-08-12 15:03:08 +02:00
|
|
|
def from_string(cls: Type[DS], s: str) -> DS:
|
2014-08-12 16:10:52 +02:00
|
|
|
"""Parse the string given by 's' into a structure object."""
|
2018-08-01 16:54:06 +02:00
|
|
|
if len(s) < 1 or s[0:1] != cls.SIGIL:
|
2019-06-20 11:32:02 +02:00
|
|
|
raise SynapseError(
|
2020-03-03 13:12:45 +01:00
|
|
|
400,
|
|
|
|
"Expected %s string to start with '%s'" % (cls.__name__, cls.SIGIL),
|
|
|
|
Codes.INVALID_PARAM,
|
2019-06-20 11:32:02 +02:00
|
|
|
)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
parts = s[1:].split(":", 1)
|
2014-08-12 16:10:52 +02:00
|
|
|
if len(parts) != 2:
|
2016-02-12 17:17:24 +01:00
|
|
|
raise SynapseError(
|
2019-06-20 11:32:02 +02:00
|
|
|
400,
|
|
|
|
"Expected %s of the form '%slocalname:domain'"
|
|
|
|
% (cls.__name__, cls.SIGIL),
|
2020-03-03 13:12:45 +01:00
|
|
|
Codes.INVALID_PARAM,
|
2014-08-12 16:10:52 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
domain = parts[1]
|
|
|
|
|
|
|
|
# This code will need changing if we want to support multiple domain
|
|
|
|
# names on one HS
|
2014-12-02 11:42:28 +01:00
|
|
|
return cls(localpart=parts[0], domain=domain)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2020-08-12 15:03:08 +02:00
|
|
|
def to_string(self) -> str:
|
2014-08-12 16:10:52 +02:00
|
|
|
"""Return a string encoding the fields of the structure object."""
|
|
|
|
return "%s%s:%s" % (self.SIGIL, self.localpart, self.domain)
|
|
|
|
|
2016-02-15 16:39:16 +01:00
|
|
|
@classmethod
|
2020-08-12 15:03:08 +02:00
|
|
|
def is_valid(cls: Type[DS], s: str) -> bool:
|
2016-02-15 16:39:16 +01:00
|
|
|
try:
|
|
|
|
cls.from_string(s)
|
|
|
|
return True
|
2017-10-23 16:52:32 +02:00
|
|
|
except Exception:
|
2016-02-15 16:39:16 +01:00
|
|
|
return False
|
|
|
|
|
2018-01-06 18:14:51 +01:00
|
|
|
__repr__ = to_string
|
2015-05-13 14:42:21 +02:00
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
|
|
|
|
class UserID(DomainSpecificString):
|
|
|
|
"""Structure representing a user ID."""
|
2019-06-20 11:32:02 +02:00
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
SIGIL = "@"
|
|
|
|
|
|
|
|
|
|
|
|
class RoomAlias(DomainSpecificString):
|
|
|
|
"""Structure representing a room name."""
|
2019-06-20 11:32:02 +02:00
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
SIGIL = "#"
|
|
|
|
|
|
|
|
|
|
|
|
class RoomID(DomainSpecificString):
|
|
|
|
"""Structure representing a room id. """
|
2019-06-20 11:32:02 +02:00
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
SIGIL = "!"
|
2014-08-21 11:55:54 +02:00
|
|
|
|
|
|
|
|
2014-10-30 18:00:11 +01:00
|
|
|
class EventID(DomainSpecificString):
|
|
|
|
"""Structure representing an event id. """
|
2019-06-20 11:32:02 +02:00
|
|
|
|
2014-10-30 18:00:11 +01:00
|
|
|
SIGIL = "$"
|
|
|
|
|
|
|
|
|
2017-07-18 10:47:25 +02:00
|
|
|
class GroupID(DomainSpecificString):
|
|
|
|
"""Structure representing a group ID."""
|
2019-06-20 11:32:02 +02:00
|
|
|
|
2017-07-18 10:47:25 +02:00
|
|
|
SIGIL = "+"
|
|
|
|
|
2017-10-21 00:51:07 +02:00
|
|
|
@classmethod
|
2020-08-12 15:03:08 +02:00
|
|
|
def from_string(cls: Type[DS], s: str) -> DS:
|
|
|
|
group_id = super().from_string(s) # type: DS # type: ignore
|
|
|
|
|
2017-10-21 00:51:07 +02:00
|
|
|
if not group_id.localpart:
|
2020-03-03 13:12:45 +01:00
|
|
|
raise SynapseError(400, "Group ID cannot be empty", Codes.INVALID_PARAM)
|
2017-10-21 00:51:07 +02:00
|
|
|
|
|
|
|
if contains_invalid_mxid_characters(group_id.localpart):
|
|
|
|
raise SynapseError(
|
2020-03-03 13:12:45 +01:00
|
|
|
400,
|
|
|
|
"Group ID can only contain characters a-z, 0-9, or '=_-./'",
|
|
|
|
Codes.INVALID_PARAM,
|
2017-10-21 00:51:07 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
return group_id
|
|
|
|
|
2017-07-18 10:47:25 +02:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
mxid_localpart_allowed_characters = set(
|
|
|
|
"_-./=" + string.ascii_lowercase + string.digits
|
|
|
|
)
|
2017-10-21 00:37:22 +02:00
|
|
|
|
|
|
|
|
|
|
|
def contains_invalid_mxid_characters(localpart):
|
|
|
|
"""Check for characters not allowed in an mxid or groupid localpart
|
|
|
|
|
|
|
|
Args:
|
|
|
|
localpart (basestring): the localpart to be checked
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
bool: True if there are any naughty characters
|
|
|
|
"""
|
2018-04-04 13:08:29 +02:00
|
|
|
return any(c not in mxid_localpart_allowed_characters for c in localpart)
|
2017-10-21 00:37:22 +02:00
|
|
|
|
2017-07-18 10:47:25 +02:00
|
|
|
|
2018-12-07 13:10:07 +01:00
|
|
|
UPPER_CASE_PATTERN = re.compile(b"[A-Z_]")
|
|
|
|
|
|
|
|
# the following is a pattern which matches '=', and bytes which are not allowed in a mxid
|
|
|
|
# localpart.
|
|
|
|
#
|
|
|
|
# It works by:
|
|
|
|
# * building a string containing the allowed characters (excluding '=')
|
|
|
|
# * escaping every special character with a backslash (to stop '-' being interpreted as a
|
|
|
|
# range operator)
|
|
|
|
# * wrapping it in a '[^...]' regex
|
|
|
|
# * converting the whole lot to a 'bytes' sequence, so that we can use it to match
|
|
|
|
# bytes rather than strings
|
|
|
|
#
|
|
|
|
NON_MXID_CHARACTER_PATTERN = re.compile(
|
2019-06-20 11:32:02 +02:00
|
|
|
("[^%s]" % (re.escape("".join(mxid_localpart_allowed_characters - {"="})),)).encode(
|
|
|
|
"ascii"
|
|
|
|
)
|
2018-12-07 13:10:07 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def map_username_to_mxid_localpart(username, case_sensitive=False):
|
|
|
|
"""Map a username onto a string suitable for a MXID
|
|
|
|
|
|
|
|
This follows the algorithm laid out at
|
|
|
|
https://matrix.org/docs/spec/appendices.html#mapping-from-other-character-sets.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
username (unicode|bytes): username to be mapped
|
|
|
|
case_sensitive (bool): true if TEST and test should be mapped
|
|
|
|
onto different mxids
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
unicode: string suitable for a mxid localpart
|
|
|
|
"""
|
|
|
|
if not isinstance(username, bytes):
|
2019-06-20 11:32:02 +02:00
|
|
|
username = username.encode("utf-8")
|
2018-12-07 13:10:07 +01:00
|
|
|
|
|
|
|
# first we sort out upper-case characters
|
|
|
|
if case_sensitive:
|
2019-06-20 11:32:02 +02:00
|
|
|
|
2018-12-07 13:10:07 +01:00
|
|
|
def f1(m):
|
|
|
|
return b"_" + m.group().lower()
|
|
|
|
|
|
|
|
username = UPPER_CASE_PATTERN.sub(f1, username)
|
|
|
|
else:
|
|
|
|
username = username.lower()
|
|
|
|
|
|
|
|
# then we sort out non-ascii characters
|
|
|
|
def f2(m):
|
|
|
|
g = m.group()[0]
|
|
|
|
if isinstance(g, str):
|
|
|
|
# on python 2, we need to do a ord(). On python 3, the
|
|
|
|
# byte itself will do.
|
|
|
|
g = ord(g)
|
|
|
|
return b"=%02x" % (g,)
|
|
|
|
|
|
|
|
username = NON_MXID_CHARACTER_PATTERN.sub(f2, username)
|
|
|
|
|
|
|
|
# we also do the =-escaping to mxids starting with an underscore.
|
2019-06-20 11:32:02 +02:00
|
|
|
username = re.sub(b"^_", b"=5f", username)
|
2018-12-07 13:10:07 +01:00
|
|
|
|
|
|
|
# we should now only have ascii bytes left, so can decode back to a
|
|
|
|
# unicode.
|
2019-06-20 11:32:02 +02:00
|
|
|
return username.decode("ascii")
|
2018-12-07 13:10:07 +01:00
|
|
|
|
|
|
|
|
2020-09-08 17:48:15 +02:00
|
|
|
@attr.s(frozen=True, slots=True)
|
|
|
|
class RoomStreamToken:
|
|
|
|
"""Tokens are positions between events. The token "s1" comes after event 1.
|
|
|
|
|
|
|
|
s0 s1
|
|
|
|
| |
|
|
|
|
[0] V [1] V [2]
|
|
|
|
|
|
|
|
Tokens can either be a point in the live event stream or a cursor going
|
|
|
|
through historic events.
|
|
|
|
|
|
|
|
When traversing the live event stream events are ordered by when they
|
|
|
|
arrived at the homeserver.
|
|
|
|
|
|
|
|
When traversing historic events the events are ordered by their depth in
|
|
|
|
the event graph "topological_ordering" and then by when they arrived at the
|
|
|
|
homeserver "stream_ordering".
|
|
|
|
|
|
|
|
Live tokens start with an "s" followed by the "stream_ordering" id of the
|
|
|
|
event it comes after. Historic tokens start with a "t" followed by the
|
|
|
|
"topological_ordering" id of the event it comes after, followed by "-",
|
|
|
|
followed by the "stream_ordering" id of the event it comes after.
|
|
|
|
"""
|
|
|
|
|
|
|
|
topological = attr.ib(
|
|
|
|
type=Optional[int],
|
|
|
|
validator=attr.validators.optional(attr.validators.instance_of(int)),
|
2019-06-20 11:32:02 +02:00
|
|
|
)
|
2020-09-08 17:48:15 +02:00
|
|
|
stream = attr.ib(type=int, validator=attr.validators.instance_of(int))
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def parse(cls, string: str) -> "RoomStreamToken":
|
|
|
|
try:
|
|
|
|
if string[0] == "s":
|
|
|
|
return cls(topological=None, stream=int(string[1:]))
|
|
|
|
if string[0] == "t":
|
|
|
|
parts = string[1:].split("-", 1)
|
|
|
|
return cls(topological=int(parts[0]), stream=int(parts[1]))
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
raise SynapseError(400, "Invalid token %r" % (string,))
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def parse_stream_token(cls, string: str) -> "RoomStreamToken":
|
|
|
|
try:
|
|
|
|
if string[0] == "s":
|
|
|
|
return cls(topological=None, stream=int(string[1:]))
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
raise SynapseError(400, "Invalid token %r" % (string,))
|
|
|
|
|
|
|
|
def as_tuple(self) -> Tuple[Optional[int], int]:
|
|
|
|
return (self.topological, self.stream)
|
|
|
|
|
|
|
|
def __str__(self) -> str:
|
|
|
|
if self.topological is not None:
|
|
|
|
return "t%d-%d" % (self.topological, self.stream)
|
|
|
|
else:
|
|
|
|
return "s%d" % (self.stream,)
|
|
|
|
|
|
|
|
|
|
|
|
@attr.s(slots=True, frozen=True)
|
|
|
|
class StreamToken:
|
2020-09-11 13:22:55 +02:00
|
|
|
room_key = attr.ib(
|
|
|
|
type=RoomStreamToken, validator=attr.validators.instance_of(RoomStreamToken)
|
|
|
|
)
|
2020-09-08 17:48:15 +02:00
|
|
|
presence_key = attr.ib(type=int)
|
|
|
|
typing_key = attr.ib(type=int)
|
|
|
|
receipt_key = attr.ib(type=int)
|
|
|
|
account_data_key = attr.ib(type=int)
|
|
|
|
push_rules_key = attr.ib(type=int)
|
|
|
|
to_device_key = attr.ib(type=int)
|
|
|
|
device_list_key = attr.ib(type=int)
|
|
|
|
groups_key = attr.ib(type=int)
|
|
|
|
|
2014-08-21 11:55:54 +02:00
|
|
|
_SEPARATOR = "_"
|
2019-10-02 14:29:01 +02:00
|
|
|
START = None # type: StreamToken
|
2014-08-21 11:55:54 +02:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def from_string(cls, string):
|
|
|
|
try:
|
2014-08-29 18:31:33 +02:00
|
|
|
keys = string.split(cls._SEPARATOR)
|
2020-09-08 17:48:15 +02:00
|
|
|
while len(keys) < len(attr.fields(cls)):
|
2015-07-02 12:40:22 +02:00
|
|
|
# i.e. old token from before receipt_key
|
|
|
|
keys.append("0")
|
2020-09-11 13:22:55 +02:00
|
|
|
return cls(RoomStreamToken.parse(keys[0]), *(int(k) for k in keys[1:]))
|
2017-10-23 16:52:32 +02:00
|
|
|
except Exception:
|
2014-08-21 11:55:54 +02:00
|
|
|
raise SynapseError(400, "Invalid Token")
|
|
|
|
|
|
|
|
def to_string(self):
|
2020-09-11 13:22:55 +02:00
|
|
|
return self._SEPARATOR.join([str(k) for k in attr.astuple(self, recurse=False)])
|
2014-08-26 19:57:46 +02:00
|
|
|
|
2015-05-13 14:42:21 +02:00
|
|
|
@property
|
|
|
|
def room_stream_id(self):
|
2020-09-11 13:22:55 +02:00
|
|
|
return self.room_key.stream
|
2015-05-13 14:42:21 +02:00
|
|
|
|
2015-10-29 16:20:52 +01:00
|
|
|
def is_after(self, other):
|
2015-05-13 14:42:21 +02:00
|
|
|
"""Does this token contain events that the other doesn't?"""
|
|
|
|
return (
|
2015-10-29 16:20:52 +01:00
|
|
|
(other.room_stream_id < self.room_stream_id)
|
|
|
|
or (int(other.presence_key) < int(self.presence_key))
|
|
|
|
or (int(other.typing_key) < int(self.typing_key))
|
|
|
|
or (int(other.receipt_key) < int(self.receipt_key))
|
2015-11-18 16:31:04 +01:00
|
|
|
or (int(other.account_data_key) < int(self.account_data_key))
|
2016-03-03 15:57:45 +01:00
|
|
|
or (int(other.push_rules_key) < int(self.push_rules_key))
|
2016-08-25 18:35:37 +02:00
|
|
|
or (int(other.to_device_key) < int(self.to_device_key))
|
2017-01-25 15:27:27 +01:00
|
|
|
or (int(other.device_list_key) < int(self.device_list_key))
|
2017-07-10 15:53:19 +02:00
|
|
|
or (int(other.groups_key) < int(self.groups_key))
|
2015-05-13 14:42:21 +02:00
|
|
|
)
|
|
|
|
|
2020-09-11 13:22:55 +02:00
|
|
|
def copy_and_advance(self, key, new_value) -> "StreamToken":
|
2015-05-18 14:17:36 +02:00
|
|
|
"""Advance the given key in the token to a new value if and only if the
|
|
|
|
new value is after the old value.
|
|
|
|
"""
|
|
|
|
new_token = self.copy_and_replace(key, new_value)
|
|
|
|
if key == "room_key":
|
|
|
|
new_id = new_token.room_stream_id
|
|
|
|
old_id = self.room_stream_id
|
|
|
|
else:
|
|
|
|
new_id = int(getattr(new_token, key))
|
|
|
|
old_id = int(getattr(self, key))
|
|
|
|
if old_id < new_id:
|
|
|
|
return new_token
|
|
|
|
else:
|
|
|
|
return self
|
|
|
|
|
2020-09-11 13:22:55 +02:00
|
|
|
def copy_and_replace(self, key, new_value) -> "StreamToken":
|
2020-09-08 17:48:15 +02:00
|
|
|
return attr.evolve(self, **{key: new_value})
|
2019-06-20 11:32:02 +02:00
|
|
|
|
2015-05-11 19:00:33 +02:00
|
|
|
|
2020-09-08 17:48:15 +02:00
|
|
|
StreamToken.START = StreamToken.from_string("s0_0")
|
2016-12-06 11:43:48 +01:00
|
|
|
|
|
|
|
|
2020-09-24 14:24:17 +02:00
|
|
|
@attr.s(slots=True, frozen=True)
|
|
|
|
class PersistedEventPosition:
|
|
|
|
"""Position of a newly persisted event with instance that persisted it.
|
|
|
|
|
|
|
|
This can be used to test whether the event is persisted before or after a
|
|
|
|
RoomStreamToken.
|
|
|
|
"""
|
|
|
|
|
|
|
|
instance_name = attr.ib(type=str)
|
|
|
|
stream = attr.ib(type=int)
|
|
|
|
|
|
|
|
def persisted_after(self, token: RoomStreamToken) -> bool:
|
|
|
|
return token.stream < self.stream
|
|
|
|
|
|
|
|
|
2016-12-06 11:43:48 +01:00
|
|
|
class ThirdPartyInstanceID(
|
2019-06-20 11:32:02 +02:00
|
|
|
namedtuple("ThirdPartyInstanceID", ("appservice_id", "network_id"))
|
2016-12-06 11:43:48 +01:00
|
|
|
):
|
|
|
|
# Deny iteration because it will bite you if you try to create a singleton
|
|
|
|
# set by:
|
|
|
|
# users = set(user)
|
|
|
|
def __iter__(self):
|
|
|
|
raise ValueError("Attempted to iterate a %s" % (type(self).__name__,))
|
|
|
|
|
|
|
|
# Because this class is a namedtuple of strings, it is deeply immutable.
|
|
|
|
def __copy__(self):
|
|
|
|
return self
|
|
|
|
|
|
|
|
def __deepcopy__(self, memo):
|
|
|
|
return self
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def from_string(cls, s):
|
|
|
|
bits = s.split("|", 2)
|
|
|
|
if len(bits) != 2:
|
|
|
|
raise SynapseError(400, "Invalid ID %r" % (s,))
|
|
|
|
|
|
|
|
return cls(appservice_id=bits[0], network_id=bits[1])
|
|
|
|
|
|
|
|
def to_string(self):
|
2019-06-20 11:32:02 +02:00
|
|
|
return "%s|%s" % (self.appservice_id, self.network_id)
|
2016-12-06 11:43:48 +01:00
|
|
|
|
|
|
|
__str__ = to_string
|
|
|
|
|
|
|
|
@classmethod
|
2019-06-20 11:32:02 +02:00
|
|
|
def create(cls, appservice_id, network_id):
|
2016-12-06 11:43:48 +01:00
|
|
|
return cls(appservice_id=appservice_id, network_id=network_id)
|
2019-03-12 17:50:58 +01:00
|
|
|
|
|
|
|
|
|
|
|
@attr.s(slots=True)
|
2020-09-04 12:54:56 +02:00
|
|
|
class ReadReceipt:
|
2019-03-12 17:50:58 +01:00
|
|
|
"""Information about a read-receipt"""
|
2019-06-20 11:32:02 +02:00
|
|
|
|
2019-03-12 17:50:58 +01:00
|
|
|
room_id = attr.ib()
|
|
|
|
receipt_type = attr.ib()
|
|
|
|
user_id = attr.ib()
|
|
|
|
event_ids = attr.ib()
|
|
|
|
data = attr.ib()
|
2019-07-25 17:08:24 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_verify_key_from_cross_signing_key(key_info):
|
|
|
|
"""Get the key ID and signedjson verify key from a cross-signing key dict
|
|
|
|
|
|
|
|
Args:
|
|
|
|
key_info (dict): a cross-signing key dict, which must have a "keys"
|
|
|
|
property that has exactly one item in it
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(str, VerifyKey): the key ID and verify key for the cross-signing key
|
|
|
|
"""
|
|
|
|
# make sure that exactly one key is provided
|
|
|
|
if "keys" not in key_info:
|
2019-08-02 03:51:19 +02:00
|
|
|
raise ValueError("Invalid key")
|
2019-07-25 17:08:24 +02:00
|
|
|
keys = key_info["keys"]
|
|
|
|
if len(keys) != 1:
|
2019-08-02 03:51:19 +02:00
|
|
|
raise ValueError("Invalid key")
|
2019-07-25 17:08:24 +02:00
|
|
|
# and return that one key
|
|
|
|
for key_id, key_data in keys.items():
|
|
|
|
return (key_id, decode_verify_key_bytes(key_id, decode_base64(key_data)))
|