2016-01-07 05:26:29 +01:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2019-01-29 18:26:24 +01:00
|
|
|
# Copyright 2019 New Vector Ltd
|
2020-02-07 14:22:17 +01:00
|
|
|
# Copyright 2020 The Matrix.org Foundation C.I.C.
|
2014-12-02 12:40:22 +01:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2022-05-03 14:59:28 +02:00
|
|
|
import abc
|
2022-05-16 14:42:45 +02:00
|
|
|
import collections.abc
|
2022-05-03 14:59:28 +02:00
|
|
|
import os
|
2021-11-02 14:55:52 +01:00
|
|
|
from typing import (
|
|
|
|
TYPE_CHECKING,
|
|
|
|
Any,
|
|
|
|
Dict,
|
|
|
|
Generic,
|
|
|
|
Iterable,
|
|
|
|
List,
|
|
|
|
Optional,
|
|
|
|
Sequence,
|
|
|
|
Tuple,
|
|
|
|
Type,
|
|
|
|
TypeVar,
|
|
|
|
Union,
|
|
|
|
overload,
|
|
|
|
)
|
|
|
|
|
2022-05-16 14:42:45 +02:00
|
|
|
import attr
|
2021-11-02 14:55:52 +01:00
|
|
|
from typing_extensions import Literal
|
2019-01-29 18:26:24 +01:00
|
|
|
from unpaddedbase64 import encode_base64
|
|
|
|
|
2022-05-16 14:42:45 +02:00
|
|
|
from synapse.api.constants import RelationTypes
|
2020-02-07 16:30:04 +01:00
|
|
|
from synapse.api.room_versions import EventFormatVersions, RoomVersion, RoomVersions
|
2020-09-29 22:48:33 +02:00
|
|
|
from synapse.types import JsonDict, RoomStreamToken
|
2016-03-23 17:13:05 +01:00
|
|
|
from synapse.util.caches import intern_dict
|
2018-07-09 08:09:20 +02:00
|
|
|
from synapse.util.frozenutils import freeze
|
2021-01-15 16:59:20 +01:00
|
|
|
from synapse.util.stringutils import strtobool
|
2014-12-05 17:20:48 +01:00
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from synapse.events.builder import EventBuilder
|
|
|
|
|
2015-05-29 13:17:33 +02:00
|
|
|
# Whether we should use frozen_dict in FrozenEvent. Using frozen_dicts prevents
|
2018-10-02 14:53:47 +02:00
|
|
|
# bugs where we accidentally share e.g. signature dicts. However, converting a
|
|
|
|
# dict to frozen_dicts is expensive.
|
|
|
|
#
|
|
|
|
# NOTE: This is overridden by the configuration by the Synapse worker apps, but
|
|
|
|
# for the sake of tests, it is set here while it cannot be configured on the
|
|
|
|
# homeserver object itself.
|
2021-01-15 16:59:20 +01:00
|
|
|
|
2018-10-02 14:53:47 +02:00
|
|
|
USE_FROZEN_DICTS = strtobool(os.environ.get("SYNAPSE_USE_FROZEN_DICTS", "0"))
|
2015-05-29 13:17:33 +02:00
|
|
|
|
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
T = TypeVar("T")
|
|
|
|
|
|
|
|
|
|
|
|
# DictProperty (and DefaultDictProperty) require the classes they're used with to
|
|
|
|
# have a _dict property to pull properties from.
|
|
|
|
#
|
|
|
|
# TODO _DictPropertyInstance should not include EventBuilder but due to
|
|
|
|
# https://github.com/python/mypy/issues/5570 it thinks the DictProperty and
|
|
|
|
# DefaultDictProperty get applied to EventBuilder when it is in a Union with
|
|
|
|
# EventBase. This is the least invasive hack to get mypy to comply.
|
|
|
|
#
|
|
|
|
# Note that DictProperty/DefaultDictProperty cannot actually be used with
|
|
|
|
# EventBuilder as it lacks a _dict property.
|
|
|
|
_DictPropertyInstance = Union["_EventInternalMetadata", "EventBase", "EventBuilder"]
|
|
|
|
|
|
|
|
|
|
|
|
class DictProperty(Generic[T]):
|
2020-02-07 14:08:34 +01:00
|
|
|
"""An object property which delegates to the `_dict` within its parent object."""
|
|
|
|
|
|
|
|
__slots__ = ["key"]
|
|
|
|
|
|
|
|
def __init__(self, key: str):
|
|
|
|
self.key = key
|
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
@overload
|
|
|
|
def __get__(
|
|
|
|
self,
|
|
|
|
instance: Literal[None],
|
|
|
|
owner: Optional[Type[_DictPropertyInstance]] = None,
|
|
|
|
) -> "DictProperty":
|
|
|
|
...
|
|
|
|
|
|
|
|
@overload
|
|
|
|
def __get__(
|
|
|
|
self,
|
|
|
|
instance: _DictPropertyInstance,
|
|
|
|
owner: Optional[Type[_DictPropertyInstance]] = None,
|
|
|
|
) -> T:
|
|
|
|
...
|
|
|
|
|
|
|
|
def __get__(
|
|
|
|
self,
|
|
|
|
instance: Optional[_DictPropertyInstance],
|
|
|
|
owner: Optional[Type[_DictPropertyInstance]] = None,
|
|
|
|
) -> Union[T, "DictProperty"]:
|
2020-02-07 14:08:34 +01:00
|
|
|
# if the property is accessed as a class property rather than an instance
|
|
|
|
# property, return the property itself rather than the value
|
|
|
|
if instance is None:
|
|
|
|
return self
|
|
|
|
try:
|
2021-11-02 14:55:52 +01:00
|
|
|
assert isinstance(instance, (EventBase, _EventInternalMetadata))
|
2020-02-07 14:08:34 +01:00
|
|
|
return instance._dict[self.key]
|
|
|
|
except KeyError as e1:
|
|
|
|
# We want this to look like a regular attribute error (mostly so that
|
|
|
|
# hasattr() works correctly), so we convert the KeyError into an
|
|
|
|
# AttributeError.
|
|
|
|
#
|
|
|
|
# To exclude the KeyError from the traceback, we explicitly
|
|
|
|
# 'raise from e1.__context__' (which is better than 'raise from None',
|
2020-10-23 18:38:40 +02:00
|
|
|
# because that would omit any *earlier* exceptions).
|
2020-02-07 14:08:34 +01:00
|
|
|
#
|
|
|
|
raise AttributeError(
|
|
|
|
"'%s' has no '%s' property" % (type(instance), self.key)
|
|
|
|
) from e1.__context__
|
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def __set__(self, instance: _DictPropertyInstance, v: T) -> None:
|
|
|
|
assert isinstance(instance, (EventBase, _EventInternalMetadata))
|
2020-02-07 14:08:34 +01:00
|
|
|
instance._dict[self.key] = v
|
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def __delete__(self, instance: _DictPropertyInstance) -> None:
|
|
|
|
assert isinstance(instance, (EventBase, _EventInternalMetadata))
|
2020-02-07 14:08:34 +01:00
|
|
|
try:
|
|
|
|
del instance._dict[self.key]
|
|
|
|
except KeyError as e1:
|
|
|
|
raise AttributeError(
|
|
|
|
"'%s' has no '%s' property" % (type(instance), self.key)
|
|
|
|
) from e1.__context__
|
|
|
|
|
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
class DefaultDictProperty(DictProperty, Generic[T]):
|
2020-02-07 14:08:34 +01:00
|
|
|
"""An extension of DictProperty which provides a default if the property is
|
|
|
|
not present in the parent's _dict.
|
|
|
|
|
|
|
|
Note that this means that hasattr() on the property always returns True.
|
|
|
|
"""
|
|
|
|
|
|
|
|
__slots__ = ["default"]
|
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def __init__(self, key: str, default: T):
|
2020-02-07 14:08:34 +01:00
|
|
|
super().__init__(key)
|
|
|
|
self.default = default
|
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
@overload
|
|
|
|
def __get__(
|
|
|
|
self,
|
|
|
|
instance: Literal[None],
|
|
|
|
owner: Optional[Type[_DictPropertyInstance]] = None,
|
|
|
|
) -> "DefaultDictProperty":
|
|
|
|
...
|
|
|
|
|
|
|
|
@overload
|
|
|
|
def __get__(
|
|
|
|
self,
|
|
|
|
instance: _DictPropertyInstance,
|
|
|
|
owner: Optional[Type[_DictPropertyInstance]] = None,
|
|
|
|
) -> T:
|
|
|
|
...
|
|
|
|
|
|
|
|
def __get__(
|
|
|
|
self,
|
|
|
|
instance: Optional[_DictPropertyInstance],
|
|
|
|
owner: Optional[Type[_DictPropertyInstance]] = None,
|
|
|
|
) -> Union[T, "DefaultDictProperty"]:
|
2020-02-07 14:08:34 +01:00
|
|
|
if instance is None:
|
|
|
|
return self
|
2021-11-02 14:55:52 +01:00
|
|
|
assert isinstance(instance, (EventBase, _EventInternalMetadata))
|
2020-02-07 14:08:34 +01:00
|
|
|
return instance._dict.get(self.key, self.default)
|
|
|
|
|
|
|
|
|
2020-09-04 12:54:56 +02:00
|
|
|
class _EventInternalMetadata:
|
2021-03-17 13:33:18 +01:00
|
|
|
__slots__ = ["_dict", "stream_ordering", "outlier"]
|
2020-02-07 14:22:17 +01:00
|
|
|
|
|
|
|
def __init__(self, internal_metadata_dict: JsonDict):
|
|
|
|
# we have to copy the dict, because it turns out that the same dict is
|
|
|
|
# reused. TODO: fix that
|
|
|
|
self._dict = dict(internal_metadata_dict)
|
|
|
|
|
2020-10-05 15:43:14 +02:00
|
|
|
# the stream ordering of this event. None, until it has been persisted.
|
2021-07-15 12:02:43 +02:00
|
|
|
self.stream_ordering: Optional[int] = None
|
2020-10-05 15:43:14 +02:00
|
|
|
|
2021-03-17 13:33:18 +01:00
|
|
|
# whether this event is an outlier (ie, whether we have the state at that point
|
|
|
|
# in the DAG)
|
|
|
|
self.outlier = False
|
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
out_of_band_membership: DictProperty[bool] = DictProperty("out_of_band_membership")
|
|
|
|
send_on_behalf_of: DictProperty[str] = DictProperty("send_on_behalf_of")
|
|
|
|
recheck_redaction: DictProperty[bool] = DictProperty("recheck_redaction")
|
|
|
|
soft_failed: DictProperty[bool] = DictProperty("soft_failed")
|
|
|
|
proactively_send: DictProperty[bool] = DictProperty("proactively_send")
|
|
|
|
redacted: DictProperty[bool] = DictProperty("redacted")
|
|
|
|
txn_id: DictProperty[str] = DictProperty("txn_id")
|
|
|
|
token_id: DictProperty[int] = DictProperty("token_id")
|
|
|
|
historical: DictProperty[bool] = DictProperty("historical")
|
2020-02-07 14:22:17 +01:00
|
|
|
|
|
|
|
# XXX: These are set by StreamWorkerStore._set_before_and_after.
|
|
|
|
# I'm pretty sure that these are never persisted to the database, so shouldn't
|
|
|
|
# be here
|
2021-11-02 14:55:52 +01:00
|
|
|
before: DictProperty[RoomStreamToken] = DictProperty("before")
|
|
|
|
after: DictProperty[RoomStreamToken] = DictProperty("after")
|
|
|
|
order: DictProperty[Tuple[int, int]] = DictProperty("order")
|
2020-02-07 14:22:17 +01:00
|
|
|
|
|
|
|
def get_dict(self) -> JsonDict:
|
|
|
|
return dict(self._dict)
|
|
|
|
|
|
|
|
def is_outlier(self) -> bool:
|
2021-03-17 13:33:18 +01:00
|
|
|
return self.outlier
|
2020-02-07 14:22:17 +01:00
|
|
|
|
|
|
|
def is_out_of_band_membership(self) -> bool:
|
2022-05-03 14:47:56 +02:00
|
|
|
"""Whether this event is an out-of-band membership.
|
|
|
|
|
|
|
|
OOB memberships are a special case of outlier events: they are membership events
|
2022-05-03 14:50:50 +02:00
|
|
|
for federated rooms that we aren't full members of. Examples include invites
|
2022-05-03 14:47:56 +02:00
|
|
|
received over federation, and rejections for such invites.
|
|
|
|
|
|
|
|
The concept of an OOB membership is needed because these events need to be
|
|
|
|
processed as if they're new regular events (e.g. updating membership state in
|
|
|
|
the database, relaying to clients via /sync, etc) despite being outliers.
|
|
|
|
|
|
|
|
See also https://matrix-org.github.io/synapse/develop/development/room-dag-concepts.html#out-of-band-membership-events.
|
2020-08-20 17:42:12 +02:00
|
|
|
|
|
|
|
(Added in synapse 0.99.0, so may be unreliable for events received before that)
|
2019-01-23 21:05:44 +01:00
|
|
|
"""
|
2020-02-07 14:22:17 +01:00
|
|
|
return self._dict.get("out_of_band_membership", False)
|
2016-03-31 16:00:42 +02:00
|
|
|
|
2020-02-07 14:22:17 +01:00
|
|
|
def get_send_on_behalf_of(self) -> Optional[str]:
|
2017-01-05 12:26:30 +01:00
|
|
|
"""Whether this server should send the event on behalf of another server.
|
|
|
|
This is used by the federation "send_join" API to forward the initial join
|
|
|
|
event for a server in the room.
|
|
|
|
|
|
|
|
returns a str with the name of the server this event is sent on behalf of.
|
|
|
|
"""
|
2020-02-07 14:22:17 +01:00
|
|
|
return self._dict.get("send_on_behalf_of")
|
2017-01-05 12:26:30 +01:00
|
|
|
|
2020-02-07 14:22:17 +01:00
|
|
|
def need_to_check_redaction(self) -> bool:
|
2019-01-29 22:22:47 +01:00
|
|
|
"""Whether the redaction event needs to be rechecked when fetching
|
|
|
|
from the database.
|
|
|
|
|
|
|
|
Starting in room v3 redaction events are accepted up front, and later
|
|
|
|
checked to see if the redacter and redactee's domains match.
|
|
|
|
|
2019-01-29 23:55:29 +01:00
|
|
|
If the sender of the redaction event is allowed to redact any event
|
|
|
|
due to auth rules, then this will always return false.
|
2019-01-29 22:22:47 +01:00
|
|
|
"""
|
2020-02-07 14:22:17 +01:00
|
|
|
return self._dict.get("recheck_redaction", False)
|
2019-01-28 22:09:45 +01:00
|
|
|
|
2020-02-07 14:22:17 +01:00
|
|
|
def is_soft_failed(self) -> bool:
|
2019-02-12 11:31:21 +01:00
|
|
|
"""Whether the event has been soft failed.
|
|
|
|
|
|
|
|
Soft failed events should be handled as usual, except:
|
|
|
|
1. They should not go down sync or event streams, or generally
|
|
|
|
sent to clients.
|
|
|
|
2. They should not be added to the forward extremities (and
|
|
|
|
therefore not to current state).
|
|
|
|
"""
|
2020-02-07 14:22:17 +01:00
|
|
|
return self._dict.get("soft_failed", False)
|
2019-02-12 11:31:21 +01:00
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def should_proactively_send(self) -> bool:
|
2019-06-19 12:33:03 +02:00
|
|
|
"""Whether the event, if ours, should be sent to other clients and
|
2019-06-17 19:04:42 +02:00
|
|
|
servers.
|
|
|
|
|
|
|
|
This is used for sending dummy events internally. Servers and clients
|
|
|
|
can still explicitly fetch the event.
|
|
|
|
"""
|
2020-02-07 14:22:17 +01:00
|
|
|
return self._dict.get("proactively_send", True)
|
2019-06-17 19:04:42 +02:00
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def is_redacted(self) -> bool:
|
2019-07-18 15:41:42 +02:00
|
|
|
"""Whether the event has been redacted.
|
|
|
|
|
|
|
|
This is used for efficiently checking whether an event has been
|
|
|
|
marked as redacted without needing to make another database call.
|
|
|
|
"""
|
2020-02-07 14:22:17 +01:00
|
|
|
return self._dict.get("redacted", False)
|
2019-07-18 15:41:42 +02:00
|
|
|
|
2021-06-22 11:02:53 +02:00
|
|
|
def is_historical(self) -> bool:
|
|
|
|
"""Whether this is a historical message.
|
|
|
|
This is used by the batchsend historical message endpoint and
|
|
|
|
is needed to and mark the event as backfilled and skip some checks
|
|
|
|
like push notifications.
|
|
|
|
"""
|
|
|
|
return self._dict.get("historical", False)
|
|
|
|
|
2014-12-02 12:40:22 +01:00
|
|
|
|
2020-03-05 16:46:44 +01:00
|
|
|
class EventBase(metaclass=abc.ABCMeta):
|
|
|
|
@property
|
|
|
|
@abc.abstractmethod
|
|
|
|
def format_version(self) -> int:
|
|
|
|
"""The EventFormatVersion implemented by this event"""
|
|
|
|
...
|
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
def __init__(
|
|
|
|
self,
|
2020-03-05 16:46:44 +01:00
|
|
|
event_dict: JsonDict,
|
|
|
|
room_version: RoomVersion,
|
|
|
|
signatures: Dict[str, Dict[str, str]],
|
|
|
|
unsigned: JsonDict,
|
|
|
|
internal_metadata_dict: JsonDict,
|
|
|
|
rejected_reason: Optional[str],
|
2019-06-20 11:32:02 +02:00
|
|
|
):
|
2020-03-05 16:46:44 +01:00
|
|
|
assert room_version.event_format == self.format_version
|
|
|
|
|
|
|
|
self.room_version = room_version
|
2015-01-06 19:51:03 +01:00
|
|
|
self.signatures = signatures
|
|
|
|
self.unsigned = unsigned
|
2015-03-25 18:26:32 +01:00
|
|
|
self.rejected_reason = rejected_reason
|
2014-12-02 12:40:22 +01:00
|
|
|
|
2020-02-07 14:08:34 +01:00
|
|
|
self._dict = event_dict
|
2014-12-02 12:40:22 +01:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
self.internal_metadata = _EventInternalMetadata(internal_metadata_dict)
|
2014-12-02 12:40:22 +01:00
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
depth: DictProperty[int] = DictProperty("depth")
|
|
|
|
content: DictProperty[JsonDict] = DictProperty("content")
|
|
|
|
hashes: DictProperty[Dict[str, str]] = DictProperty("hashes")
|
|
|
|
origin: DictProperty[str] = DictProperty("origin")
|
|
|
|
origin_server_ts: DictProperty[int] = DictProperty("origin_server_ts")
|
|
|
|
redacts: DefaultDictProperty[Optional[str]] = DefaultDictProperty("redacts", None)
|
|
|
|
room_id: DictProperty[str] = DictProperty("room_id")
|
|
|
|
sender: DictProperty[str] = DictProperty("sender")
|
2022-01-21 10:10:01 +01:00
|
|
|
# TODO state_key should be Optional[str]. This is generally asserted in Synapse
|
|
|
|
# by calling is_state() first (which ensures it is not None), but it is hard (not possible?)
|
2021-11-02 14:55:52 +01:00
|
|
|
# to properly annotate that calling is_state() asserts that state_key exists
|
2022-01-21 10:10:01 +01:00
|
|
|
# and is non-None. It would be better to replace such direct references with
|
|
|
|
# get_state_key() (and a check for None).
|
2021-11-02 14:55:52 +01:00
|
|
|
state_key: DictProperty[str] = DictProperty("state_key")
|
|
|
|
type: DictProperty[str] = DictProperty("type")
|
|
|
|
user_id: DictProperty[str] = DictProperty("sender")
|
2014-12-02 12:40:22 +01:00
|
|
|
|
2020-02-03 19:05:44 +01:00
|
|
|
@property
|
|
|
|
def event_id(self) -> str:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2014-12-05 17:20:48 +01:00
|
|
|
@property
|
2021-11-02 14:55:52 +01:00
|
|
|
def membership(self) -> str:
|
2014-12-05 17:20:48 +01:00
|
|
|
return self.content["membership"]
|
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def is_state(self) -> bool:
|
2022-01-21 10:10:01 +01:00
|
|
|
return self.get_state_key() is not None
|
|
|
|
|
|
|
|
def get_state_key(self) -> Optional[str]:
|
|
|
|
"""Get the state key of this event, or None if it's not a state event"""
|
|
|
|
return self._dict.get("state_key")
|
2014-12-04 12:27:59 +01:00
|
|
|
|
2020-01-30 12:25:59 +01:00
|
|
|
def get_dict(self) -> JsonDict:
|
2020-02-07 14:08:34 +01:00
|
|
|
d = dict(self._dict)
|
2019-06-20 11:32:02 +02:00
|
|
|
d.update({"signatures": self.signatures, "unsigned": dict(self.unsigned)})
|
2014-12-02 12:40:22 +01:00
|
|
|
|
|
|
|
return d
|
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def get(self, key: str, default: Optional[Any] = None) -> Any:
|
2020-02-07 14:08:34 +01:00
|
|
|
return self._dict.get(key, default)
|
2014-12-10 15:02:48 +01:00
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def get_internal_metadata_dict(self) -> JsonDict:
|
2014-12-02 12:40:22 +01:00
|
|
|
return self.internal_metadata.get_dict()
|
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def get_pdu_json(self, time_now: Optional[int] = None) -> JsonDict:
|
2014-12-02 12:40:22 +01:00
|
|
|
pdu_json = self.get_dict()
|
|
|
|
|
|
|
|
if time_now is not None and "age_ts" in pdu_json["unsigned"]:
|
|
|
|
age = time_now - pdu_json["unsigned"]["age_ts"]
|
|
|
|
pdu_json.setdefault("unsigned", {})["age"] = int(age)
|
|
|
|
del pdu_json["unsigned"]["age_ts"]
|
|
|
|
|
2015-08-21 10:36:07 +02:00
|
|
|
# This may be a frozen event
|
|
|
|
pdu_json["unsigned"].pop("redacted_because", None)
|
|
|
|
|
2014-12-03 17:07:21 +01:00
|
|
|
return pdu_json
|
|
|
|
|
2021-07-16 16:36:38 +02:00
|
|
|
def get_templated_pdu_json(self) -> JsonDict:
|
|
|
|
"""
|
|
|
|
Return a JSON object suitable for a templated event, as used in the
|
|
|
|
make_{join,leave,knock} workflow.
|
|
|
|
"""
|
|
|
|
# By using _dict directly we don't pull in signatures/unsigned.
|
|
|
|
template_json = dict(self._dict)
|
|
|
|
# The hashes (similar to the signature) need to be recalculated by the
|
|
|
|
# joining/leaving/knocking server after (potentially) modifying the
|
|
|
|
# event.
|
|
|
|
template_json.pop("hashes")
|
|
|
|
|
|
|
|
return template_json
|
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def __getitem__(self, field: str) -> Optional[Any]:
|
2020-02-07 14:08:34 +01:00
|
|
|
return self._dict[field]
|
2016-01-18 11:46:09 +01:00
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def __contains__(self, field: str) -> bool:
|
2020-02-07 14:08:34 +01:00
|
|
|
return field in self._dict
|
2016-01-18 11:46:09 +01:00
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def items(self) -> List[Tuple[str, Optional[Any]]]:
|
2020-02-07 14:08:34 +01:00
|
|
|
return list(self._dict.items())
|
2016-01-18 11:46:09 +01:00
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def keys(self) -> Iterable[str]:
|
2020-06-15 13:03:36 +02:00
|
|
|
return self._dict.keys()
|
2018-09-13 16:33:16 +02:00
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def prev_event_ids(self) -> Sequence[str]:
|
2018-11-05 14:35:15 +01:00
|
|
|
"""Returns the list of prev event IDs. The order matches the order
|
|
|
|
specified in the event, though there is no meaning to it.
|
|
|
|
|
|
|
|
Returns:
|
2021-11-02 14:55:52 +01:00
|
|
|
The list of event IDs of this event's prev_events
|
2018-11-05 14:35:15 +01:00
|
|
|
"""
|
2021-11-02 14:55:52 +01:00
|
|
|
return [e for e, _ in self._dict["prev_events"]]
|
2018-11-05 14:35:15 +01:00
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def auth_event_ids(self) -> Sequence[str]:
|
2018-11-05 14:35:15 +01:00
|
|
|
"""Returns the list of auth event IDs. The order matches the order
|
|
|
|
specified in the event, though there is no meaning to it.
|
|
|
|
|
|
|
|
Returns:
|
2021-11-02 14:55:52 +01:00
|
|
|
The list of event IDs of this event's auth_events
|
2018-11-05 14:35:15 +01:00
|
|
|
"""
|
2021-11-02 14:55:52 +01:00
|
|
|
return [e for e, _ in self._dict["auth_events"]]
|
2018-11-05 14:35:15 +01:00
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def freeze(self) -> None:
|
2020-10-13 23:02:41 +02:00
|
|
|
"""'Freeze' the event dict, so it cannot be modified by accident"""
|
|
|
|
|
|
|
|
# this will be a no-op if the event dict is already frozen.
|
|
|
|
self._dict = freeze(self._dict)
|
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def __str__(self) -> str:
|
2021-09-22 13:30:59 +02:00
|
|
|
return self.__repr__()
|
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def __repr__(self) -> str:
|
2021-10-19 11:21:50 +02:00
|
|
|
rejection = f"REJECTED={self.rejected_reason}, " if self.rejected_reason else ""
|
|
|
|
|
|
|
|
return (
|
|
|
|
f"<{self.__class__.__name__} "
|
|
|
|
f"{rejection}"
|
|
|
|
f"event_id={self.event_id}, "
|
|
|
|
f"type={self.get('type')}, "
|
|
|
|
f"state_key={self.get('state_key')}, "
|
|
|
|
f"outlier={self.internal_metadata.is_outlier()}"
|
|
|
|
">"
|
2021-09-22 13:30:59 +02:00
|
|
|
)
|
|
|
|
|
2014-12-03 17:07:21 +01:00
|
|
|
|
|
|
|
class FrozenEvent(EventBase):
|
2022-09-07 12:08:20 +02:00
|
|
|
format_version = EventFormatVersions.ROOM_V1_V2 # All events of this type are V1
|
2019-01-23 12:11:52 +01:00
|
|
|
|
2020-03-05 16:46:44 +01:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
event_dict: JsonDict,
|
|
|
|
room_version: RoomVersion,
|
2021-04-08 23:38:54 +02:00
|
|
|
internal_metadata_dict: Optional[JsonDict] = None,
|
2020-03-05 16:46:44 +01:00
|
|
|
rejected_reason: Optional[str] = None,
|
|
|
|
):
|
2021-04-08 23:38:54 +02:00
|
|
|
internal_metadata_dict = internal_metadata_dict or {}
|
|
|
|
|
2015-01-06 19:51:03 +01:00
|
|
|
event_dict = dict(event_dict)
|
2014-12-03 17:07:21 +01:00
|
|
|
|
2015-01-06 19:51:03 +01:00
|
|
|
# Signatures is a dict of dicts, and this is faster than doing a
|
|
|
|
# copy.deepcopy
|
|
|
|
signatures = {
|
|
|
|
name: {sig_id: sig for sig_id, sig in sigs.items()}
|
|
|
|
for name, sigs in event_dict.pop("signatures", {}).items()
|
|
|
|
}
|
|
|
|
|
|
|
|
unsigned = dict(event_dict.pop("unsigned", {}))
|
2014-12-03 17:07:21 +01:00
|
|
|
|
2016-03-23 15:58:08 +01:00
|
|
|
# We intern these strings because they turn up a lot (especially when
|
|
|
|
# caching).
|
2016-03-23 17:13:05 +01:00
|
|
|
event_dict = intern_dict(event_dict)
|
2016-03-23 15:58:08 +01:00
|
|
|
|
2015-05-29 13:17:33 +02:00
|
|
|
if USE_FROZEN_DICTS:
|
|
|
|
frozen_dict = freeze(event_dict)
|
|
|
|
else:
|
2015-05-29 16:02:55 +02:00
|
|
|
frozen_dict = event_dict
|
2014-12-03 17:07:21 +01:00
|
|
|
|
2020-02-03 19:05:44 +01:00
|
|
|
self._event_id = event_dict["event_id"]
|
2017-01-13 14:16:54 +01:00
|
|
|
|
2020-03-05 16:46:44 +01:00
|
|
|
super().__init__(
|
2014-12-03 17:07:21 +01:00
|
|
|
frozen_dict,
|
2020-03-05 16:46:44 +01:00
|
|
|
room_version=room_version,
|
2014-12-03 17:07:21 +01:00
|
|
|
signatures=signatures,
|
2014-12-16 14:17:09 +01:00
|
|
|
unsigned=unsigned,
|
|
|
|
internal_metadata_dict=internal_metadata_dict,
|
2015-03-25 18:26:32 +01:00
|
|
|
rejected_reason=rejected_reason,
|
2014-12-03 17:07:21 +01:00
|
|
|
)
|
|
|
|
|
2020-02-03 19:05:44 +01:00
|
|
|
@property
|
|
|
|
def event_id(self) -> str:
|
|
|
|
return self._event_id
|
|
|
|
|
2014-12-03 17:07:21 +01:00
|
|
|
|
2019-01-29 18:26:24 +01:00
|
|
|
class FrozenEventV2(EventBase):
|
2022-09-07 12:08:20 +02:00
|
|
|
format_version = EventFormatVersions.ROOM_V3 # All events of this type are V2
|
2019-01-29 18:26:24 +01:00
|
|
|
|
2020-03-05 16:46:44 +01:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
event_dict: JsonDict,
|
|
|
|
room_version: RoomVersion,
|
2021-04-08 23:38:54 +02:00
|
|
|
internal_metadata_dict: Optional[JsonDict] = None,
|
2020-03-05 16:46:44 +01:00
|
|
|
rejected_reason: Optional[str] = None,
|
|
|
|
):
|
2021-04-08 23:38:54 +02:00
|
|
|
internal_metadata_dict = internal_metadata_dict or {}
|
|
|
|
|
2019-01-29 18:26:24 +01:00
|
|
|
event_dict = dict(event_dict)
|
|
|
|
|
|
|
|
# Signatures is a dict of dicts, and this is faster than doing a
|
|
|
|
# copy.deepcopy
|
|
|
|
signatures = {
|
|
|
|
name: {sig_id: sig for sig_id, sig in sigs.items()}
|
|
|
|
for name, sigs in event_dict.pop("signatures", {}).items()
|
|
|
|
}
|
|
|
|
|
|
|
|
assert "event_id" not in event_dict
|
|
|
|
|
|
|
|
unsigned = dict(event_dict.pop("unsigned", {}))
|
|
|
|
|
|
|
|
# We intern these strings because they turn up a lot (especially when
|
|
|
|
# caching).
|
|
|
|
event_dict = intern_dict(event_dict)
|
|
|
|
|
|
|
|
if USE_FROZEN_DICTS:
|
|
|
|
frozen_dict = freeze(event_dict)
|
|
|
|
else:
|
|
|
|
frozen_dict = event_dict
|
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
self._event_id: Optional[str] = None
|
2019-01-29 18:26:24 +01:00
|
|
|
|
2020-03-05 16:46:44 +01:00
|
|
|
super().__init__(
|
2019-01-29 18:26:24 +01:00
|
|
|
frozen_dict,
|
2020-03-05 16:46:44 +01:00
|
|
|
room_version=room_version,
|
2019-01-29 18:26:24 +01:00
|
|
|
signatures=signatures,
|
|
|
|
unsigned=unsigned,
|
|
|
|
internal_metadata_dict=internal_metadata_dict,
|
|
|
|
rejected_reason=rejected_reason,
|
|
|
|
)
|
|
|
|
|
|
|
|
@property
|
2021-11-02 14:55:52 +01:00
|
|
|
def event_id(self) -> str:
|
2019-01-29 18:26:24 +01:00
|
|
|
# We have to import this here as otherwise we get an import loop which
|
|
|
|
# is hard to break.
|
|
|
|
from synapse.crypto.event_signing import compute_event_reference_hash
|
|
|
|
|
|
|
|
if self._event_id:
|
|
|
|
return self._event_id
|
|
|
|
self._event_id = "$" + encode_base64(compute_event_reference_hash(self)[1])
|
|
|
|
return self._event_id
|
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def prev_event_ids(self) -> Sequence[str]:
|
2019-01-29 18:26:24 +01:00
|
|
|
"""Returns the list of prev event IDs. The order matches the order
|
|
|
|
specified in the event, though there is no meaning to it.
|
|
|
|
|
|
|
|
Returns:
|
2021-11-02 14:55:52 +01:00
|
|
|
The list of event IDs of this event's prev_events
|
2019-01-29 18:26:24 +01:00
|
|
|
"""
|
2021-11-02 14:55:52 +01:00
|
|
|
return self._dict["prev_events"]
|
2019-01-29 18:26:24 +01:00
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def auth_event_ids(self) -> Sequence[str]:
|
2019-01-29 18:26:24 +01:00
|
|
|
"""Returns the list of auth event IDs. The order matches the order
|
|
|
|
specified in the event, though there is no meaning to it.
|
|
|
|
|
|
|
|
Returns:
|
2021-11-02 14:55:52 +01:00
|
|
|
The list of event IDs of this event's auth_events
|
2019-01-29 18:26:24 +01:00
|
|
|
"""
|
2021-11-02 14:55:52 +01:00
|
|
|
return self._dict["auth_events"]
|
2014-12-05 17:20:48 +01:00
|
|
|
|
2019-01-23 17:50:06 +01:00
|
|
|
|
2019-05-20 16:54:42 +02:00
|
|
|
class FrozenEventV3(FrozenEventV2):
|
|
|
|
"""FrozenEventV3, which differs from FrozenEventV2 only in the event_id format"""
|
2019-06-20 11:32:02 +02:00
|
|
|
|
2022-09-07 12:08:20 +02:00
|
|
|
format_version = EventFormatVersions.ROOM_V4_PLUS # All events of this type are V3
|
2019-05-20 16:54:42 +02:00
|
|
|
|
|
|
|
@property
|
2021-11-02 14:55:52 +01:00
|
|
|
def event_id(self) -> str:
|
2019-05-20 16:54:42 +02:00
|
|
|
# We have to import this here as otherwise we get an import loop which
|
|
|
|
# is hard to break.
|
|
|
|
from synapse.crypto.event_signing import compute_event_reference_hash
|
|
|
|
|
|
|
|
if self._event_id:
|
|
|
|
return self._event_id
|
|
|
|
self._event_id = "$" + encode_base64(
|
|
|
|
compute_event_reference_hash(self)[1], urlsafe=True
|
|
|
|
)
|
|
|
|
return self._event_id
|
|
|
|
|
|
|
|
|
2021-11-02 14:55:52 +01:00
|
|
|
def _event_type_from_format_version(
|
|
|
|
format_version: int,
|
|
|
|
) -> Type[Union[FrozenEvent, FrozenEventV2, FrozenEventV3]]:
|
2019-01-23 21:21:33 +01:00
|
|
|
"""Returns the python type to use to construct an Event object for the
|
|
|
|
given event format version.
|
|
|
|
|
|
|
|
Args:
|
2021-11-02 14:55:52 +01:00
|
|
|
format_version: The event format version
|
2019-01-23 21:21:33 +01:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
type: A type that can be initialized as per the initializer of
|
|
|
|
`FrozenEvent`
|
|
|
|
"""
|
2019-01-29 18:26:24 +01:00
|
|
|
|
2022-09-07 12:08:20 +02:00
|
|
|
if format_version == EventFormatVersions.ROOM_V1_V2:
|
2019-01-29 18:26:24 +01:00
|
|
|
return FrozenEvent
|
2022-09-07 12:08:20 +02:00
|
|
|
elif format_version == EventFormatVersions.ROOM_V3:
|
2019-01-29 18:26:24 +01:00
|
|
|
return FrozenEventV2
|
2022-09-07 12:08:20 +02:00
|
|
|
elif format_version == EventFormatVersions.ROOM_V4_PLUS:
|
2019-05-20 16:54:42 +02:00
|
|
|
return FrozenEventV3
|
2019-01-29 18:26:24 +01:00
|
|
|
else:
|
2019-06-20 11:32:02 +02:00
|
|
|
raise Exception("No event format %r" % (format_version,))
|
2020-02-07 16:30:04 +01:00
|
|
|
|
|
|
|
|
|
|
|
def make_event_from_dict(
|
|
|
|
event_dict: JsonDict,
|
|
|
|
room_version: RoomVersion = RoomVersions.V1,
|
2021-04-08 23:38:54 +02:00
|
|
|
internal_metadata_dict: Optional[JsonDict] = None,
|
2020-02-07 16:30:04 +01:00
|
|
|
rejected_reason: Optional[str] = None,
|
|
|
|
) -> EventBase:
|
|
|
|
"""Construct an EventBase from the given event dict"""
|
2020-03-05 16:46:44 +01:00
|
|
|
event_type = _event_type_from_format_version(room_version.event_format)
|
2021-04-08 23:38:54 +02:00
|
|
|
return event_type(
|
|
|
|
event_dict, room_version, internal_metadata_dict or {}, rejected_reason
|
|
|
|
)
|
2022-05-16 14:42:45 +02:00
|
|
|
|
|
|
|
|
|
|
|
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
|
|
|
class _EventRelation:
|
|
|
|
# The target event of the relation.
|
|
|
|
parent_id: str
|
|
|
|
# The relation type.
|
|
|
|
rel_type: str
|
|
|
|
# The aggregation key. Will be None if the rel_type is not m.annotation or is
|
|
|
|
# not a string.
|
|
|
|
aggregation_key: Optional[str]
|
|
|
|
|
|
|
|
|
|
|
|
def relation_from_event(event: EventBase) -> Optional[_EventRelation]:
|
|
|
|
"""
|
|
|
|
Attempt to parse relation information an event.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The event relation information, if it is valid. None, otherwise.
|
|
|
|
"""
|
|
|
|
relation = event.content.get("m.relates_to")
|
|
|
|
if not relation or not isinstance(relation, collections.abc.Mapping):
|
|
|
|
# No relation information.
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Relations must have a type and parent event ID.
|
|
|
|
rel_type = relation.get("rel_type")
|
|
|
|
if not isinstance(rel_type, str):
|
|
|
|
return None
|
|
|
|
|
|
|
|
parent_id = relation.get("event_id")
|
|
|
|
if not isinstance(parent_id, str):
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Annotations have a key field.
|
|
|
|
aggregation_key = None
|
|
|
|
if rel_type == RelationTypes.ANNOTATION:
|
|
|
|
aggregation_key = relation.get("key")
|
|
|
|
if not isinstance(aggregation_key, str):
|
|
|
|
aggregation_key = None
|
|
|
|
|
|
|
|
return _EventRelation(parent_id, rel_type, aggregation_key)
|