2018-09-03 16:13:17 +02:00
|
|
|
# Copyright 2018 New Vector Ltd
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import heapq
|
|
|
|
import itertools
|
|
|
|
import logging
|
2020-08-24 20:25:27 +02:00
|
|
|
from typing import (
|
|
|
|
Any,
|
|
|
|
Callable,
|
2021-04-22 17:43:50 +02:00
|
|
|
Collection,
|
2020-08-24 20:25:27 +02:00
|
|
|
Dict,
|
|
|
|
Generator,
|
|
|
|
Iterable,
|
|
|
|
List,
|
|
|
|
Optional,
|
|
|
|
Sequence,
|
|
|
|
Set,
|
|
|
|
Tuple,
|
|
|
|
overload,
|
|
|
|
)
|
|
|
|
|
|
|
|
from typing_extensions import Literal
|
2018-09-03 16:13:17 +02:00
|
|
|
|
2019-12-13 13:55:32 +01:00
|
|
|
import synapse.state
|
2018-09-03 16:13:17 +02:00
|
|
|
from synapse import event_auth
|
|
|
|
from synapse.api.constants import EventTypes
|
|
|
|
from synapse.api.errors import AuthError
|
2021-07-26 18:17:00 +02:00
|
|
|
from synapse.api.room_versions import RoomVersion
|
2019-12-13 13:55:32 +01:00
|
|
|
from synapse.events import EventBase
|
2021-04-22 17:43:50 +02:00
|
|
|
from synapse.types import MutableStateMap, StateMap
|
2020-06-24 19:48:18 +02:00
|
|
|
from synapse.util import Clock
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2020-07-24 16:59:51 +02:00
|
|
|
# We want to await to the reactor occasionally during state res when dealing
|
2020-06-24 19:48:18 +02:00
|
|
|
# with large data sets, so that we don't exhaust the reactor. This is done by
|
2020-07-24 16:59:51 +02:00
|
|
|
# awaiting to reactor during loops every N iterations.
|
|
|
|
_AWAIT_AFTER_ITERATIONS = 100
|
2020-06-24 19:48:18 +02:00
|
|
|
|
|
|
|
|
2020-07-24 16:59:51 +02:00
|
|
|
async def resolve_events_with_store(
|
2020-06-24 19:48:18 +02:00
|
|
|
clock: Clock,
|
2019-12-13 13:55:32 +01:00
|
|
|
room_id: str,
|
2021-07-26 18:17:00 +02:00
|
|
|
room_version: RoomVersion,
|
2020-08-24 20:25:27 +02:00
|
|
|
state_sets: Sequence[StateMap[str]],
|
2019-12-13 13:55:32 +01:00
|
|
|
event_map: Optional[Dict[str, EventBase]],
|
|
|
|
state_res_store: "synapse.state.StateResolutionStore",
|
2020-08-24 20:25:27 +02:00
|
|
|
) -> StateMap[str]:
|
2018-09-03 16:13:17 +02:00
|
|
|
"""Resolves the state using the v2 state resolution algorithm
|
|
|
|
|
|
|
|
Args:
|
2020-06-24 19:48:18 +02:00
|
|
|
clock
|
2019-12-13 13:55:32 +01:00
|
|
|
room_id: the room we are working in
|
|
|
|
room_version: The room version
|
|
|
|
state_sets: List of dicts of (type, state_key) -> event_id,
|
2018-09-03 16:13:17 +02:00
|
|
|
which are the different state groups to resolve.
|
2019-12-13 13:55:32 +01:00
|
|
|
event_map:
|
2018-09-03 16:13:17 +02:00
|
|
|
a dict from event_id to event, for any events that we happen to
|
|
|
|
have in flight (eg, those currently being persisted). This will be
|
2020-10-23 18:38:40 +02:00
|
|
|
used as a starting point for finding the state we need; any missing
|
2018-09-03 16:13:17 +02:00
|
|
|
events will be requested via state_res_store.
|
|
|
|
|
|
|
|
If None, all events will be fetched via state_res_store.
|
|
|
|
|
2019-12-13 13:55:32 +01:00
|
|
|
state_res_store:
|
2018-09-03 16:13:17 +02:00
|
|
|
|
2019-12-13 13:55:32 +01:00
|
|
|
Returns:
|
2020-08-24 20:25:27 +02:00
|
|
|
A map from (type, state_key) to event_id.
|
2018-09-03 16:13:17 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
logger.debug("Computing conflicted state")
|
|
|
|
|
2018-11-02 11:29:19 +01:00
|
|
|
# We use event_map as a cache, so if its None we need to initialize it
|
|
|
|
if event_map is None:
|
|
|
|
event_map = {}
|
|
|
|
|
2018-09-03 16:13:17 +02:00
|
|
|
# First split up the un/conflicted state
|
|
|
|
unconflicted_state, conflicted_state = _seperate(state_sets)
|
|
|
|
|
|
|
|
if not conflicted_state:
|
2019-07-23 15:00:55 +02:00
|
|
|
return unconflicted_state
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
logger.debug("%d conflicted state entries", len(conflicted_state))
|
|
|
|
logger.debug("Calculating auth chain difference")
|
|
|
|
|
|
|
|
# Also fetch all auth events that appear in only some of the state sets'
|
|
|
|
# auth chains.
|
2020-12-04 16:52:49 +01:00
|
|
|
auth_diff = await _get_auth_chain_difference(
|
|
|
|
room_id, state_sets, event_map, state_res_store
|
|
|
|
)
|
2018-09-03 16:13:17 +02:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
full_conflicted_set = set(
|
|
|
|
itertools.chain(
|
2020-06-15 13:03:36 +02:00
|
|
|
itertools.chain.from_iterable(conflicted_state.values()), auth_diff
|
2019-06-20 11:32:02 +02:00
|
|
|
)
|
|
|
|
)
|
2018-09-03 16:13:17 +02:00
|
|
|
|
2020-07-24 16:59:51 +02:00
|
|
|
events = await state_res_store.get_events(
|
2019-06-20 11:32:02 +02:00
|
|
|
[eid for eid in full_conflicted_set if eid not in event_map],
|
|
|
|
allow_rejected=True,
|
|
|
|
)
|
2018-09-03 16:13:17 +02:00
|
|
|
event_map.update(events)
|
|
|
|
|
2019-12-13 13:55:32 +01:00
|
|
|
# everything in the event map should be in the right room
|
|
|
|
for event in event_map.values():
|
|
|
|
if event.room_id != room_id:
|
|
|
|
raise Exception(
|
|
|
|
"Attempting to state-resolve for room %s with event %s which is in %s"
|
2021-02-16 23:32:34 +01:00
|
|
|
% (
|
|
|
|
room_id,
|
|
|
|
event.event_id,
|
|
|
|
event.room_id,
|
|
|
|
)
|
2019-12-13 13:55:32 +01:00
|
|
|
)
|
|
|
|
|
2020-02-21 13:15:07 +01:00
|
|
|
full_conflicted_set = {eid for eid in full_conflicted_set if eid in event_map}
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
logger.debug("%d full_conflicted_set entries", len(full_conflicted_set))
|
|
|
|
|
|
|
|
# Get and sort all the power events (kicks/bans/etc)
|
|
|
|
power_events = (
|
2019-06-20 11:32:02 +02:00
|
|
|
eid for eid in full_conflicted_set if _is_power_event(event_map[eid])
|
2018-09-03 16:13:17 +02:00
|
|
|
)
|
|
|
|
|
2020-07-24 16:59:51 +02:00
|
|
|
sorted_power_events = await _reverse_topological_power_sort(
|
2020-06-24 19:48:18 +02:00
|
|
|
clock, room_id, power_events, event_map, state_res_store, full_conflicted_set
|
2018-09-03 16:13:17 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
logger.debug("sorted %d power events", len(sorted_power_events))
|
|
|
|
|
|
|
|
# Now sequentially auth each one
|
2020-07-24 16:59:51 +02:00
|
|
|
resolved_state = await _iterative_auth_checks(
|
2020-06-26 11:44:52 +02:00
|
|
|
clock,
|
2019-12-13 13:55:32 +01:00
|
|
|
room_id,
|
2019-06-20 11:32:02 +02:00
|
|
|
room_version,
|
|
|
|
sorted_power_events,
|
|
|
|
unconflicted_state,
|
|
|
|
event_map,
|
2018-09-03 16:13:17 +02:00
|
|
|
state_res_store,
|
|
|
|
)
|
|
|
|
|
|
|
|
logger.debug("resolved power events")
|
|
|
|
|
|
|
|
# OK, so we've now resolved the power events. Now sort the remaining
|
|
|
|
# events using the mainline of the resolved power level.
|
|
|
|
|
2020-06-19 14:56:35 +02:00
|
|
|
set_power_events = set(sorted_power_events)
|
2018-09-03 16:13:17 +02:00
|
|
|
leftover_events = [
|
2020-06-19 14:56:35 +02:00
|
|
|
ev_id for ev_id in full_conflicted_set if ev_id not in set_power_events
|
2018-09-03 16:13:17 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
logger.debug("sorting %d remaining events", len(leftover_events))
|
|
|
|
|
|
|
|
pl = resolved_state.get((EventTypes.PowerLevels, ""), None)
|
2020-07-24 16:59:51 +02:00
|
|
|
leftover_events = await _mainline_sort(
|
2020-06-24 19:48:18 +02:00
|
|
|
clock, room_id, leftover_events, pl, event_map, state_res_store
|
2018-09-03 16:13:17 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
logger.debug("resolving remaining events")
|
|
|
|
|
2020-07-24 16:59:51 +02:00
|
|
|
resolved_state = await _iterative_auth_checks(
|
2020-06-26 11:44:52 +02:00
|
|
|
clock,
|
2019-12-13 13:55:32 +01:00
|
|
|
room_id,
|
|
|
|
room_version,
|
|
|
|
leftover_events,
|
|
|
|
resolved_state,
|
|
|
|
event_map,
|
|
|
|
state_res_store,
|
2018-09-03 16:13:17 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
logger.debug("resolved")
|
|
|
|
|
|
|
|
# We make sure that unconflicted state always still applies.
|
|
|
|
resolved_state.update(unconflicted_state)
|
|
|
|
|
|
|
|
logger.debug("done")
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return resolved_state
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
|
2020-08-24 20:25:27 +02:00
|
|
|
async def _get_power_level_for_sender(
|
|
|
|
room_id: str,
|
|
|
|
event_id: str,
|
|
|
|
event_map: Dict[str, EventBase],
|
|
|
|
state_res_store: "synapse.state.StateResolutionStore",
|
|
|
|
) -> int:
|
2018-09-03 16:13:17 +02:00
|
|
|
"""Return the power level of the sender of the given event according to
|
|
|
|
their auth events.
|
|
|
|
|
|
|
|
Args:
|
2020-08-24 20:25:27 +02:00
|
|
|
room_id
|
|
|
|
event_id
|
|
|
|
event_map
|
|
|
|
state_res_store
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-08-24 20:25:27 +02:00
|
|
|
The power level.
|
2018-09-03 16:13:17 +02:00
|
|
|
"""
|
2020-07-24 16:59:51 +02:00
|
|
|
event = await _get_event(room_id, event_id, event_map, state_res_store)
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
pl = None
|
2018-11-05 14:35:15 +01:00
|
|
|
for aid in event.auth_event_ids():
|
2020-07-24 16:59:51 +02:00
|
|
|
aev = await _get_event(
|
2019-12-31 11:41:44 +01:00
|
|
|
room_id, aid, event_map, state_res_store, allow_none=True
|
|
|
|
)
|
|
|
|
if aev and (aev.type, aev.state_key) == (EventTypes.PowerLevels, ""):
|
2018-09-03 16:13:17 +02:00
|
|
|
pl = aev
|
|
|
|
break
|
|
|
|
|
|
|
|
if pl is None:
|
|
|
|
# Couldn't find power level. Check if they're the creator of the room
|
2018-11-05 14:35:15 +01:00
|
|
|
for aid in event.auth_event_ids():
|
2020-07-24 16:59:51 +02:00
|
|
|
aev = await _get_event(
|
2019-12-31 11:41:44 +01:00
|
|
|
room_id, aid, event_map, state_res_store, allow_none=True
|
|
|
|
)
|
|
|
|
if aev and (aev.type, aev.state_key) == (EventTypes.Create, ""):
|
2018-09-03 16:13:17 +02:00
|
|
|
if aev.content.get("creator") == event.sender:
|
2019-07-23 15:00:55 +02:00
|
|
|
return 100
|
2018-09-03 16:13:17 +02:00
|
|
|
break
|
2019-07-23 15:00:55 +02:00
|
|
|
return 0
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
level = pl.content.get("users", {}).get(event.sender)
|
|
|
|
if level is None:
|
|
|
|
level = pl.content.get("users_default", 0)
|
|
|
|
|
|
|
|
if level is None:
|
2019-07-23 15:00:55 +02:00
|
|
|
return 0
|
2018-09-03 16:13:17 +02:00
|
|
|
else:
|
2019-07-23 15:00:55 +02:00
|
|
|
return int(level)
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
|
2020-08-24 20:25:27 +02:00
|
|
|
async def _get_auth_chain_difference(
|
2020-12-04 16:52:49 +01:00
|
|
|
room_id: str,
|
2020-08-24 20:25:27 +02:00
|
|
|
state_sets: Sequence[StateMap[str]],
|
|
|
|
event_map: Dict[str, EventBase],
|
|
|
|
state_res_store: "synapse.state.StateResolutionStore",
|
|
|
|
) -> Set[str]:
|
2018-09-03 16:13:17 +02:00
|
|
|
"""Compare the auth chains of each state set and return the set of events
|
|
|
|
that only appear in some but not all of the auth chains.
|
|
|
|
|
|
|
|
Args:
|
2020-08-24 20:25:27 +02:00
|
|
|
state_sets
|
|
|
|
event_map
|
|
|
|
state_res_store
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-08-24 20:25:27 +02:00
|
|
|
Set of event IDs
|
2018-09-03 16:13:17 +02:00
|
|
|
"""
|
|
|
|
|
2020-12-02 16:22:37 +01:00
|
|
|
# The `StateResolutionStore.get_auth_chain_difference` function assumes that
|
|
|
|
# all events passed to it (and their auth chains) have been persisted
|
|
|
|
# previously. This is not the case for any events in the `event_map`, and so
|
|
|
|
# we need to manually handle those events.
|
|
|
|
#
|
|
|
|
# We do this by:
|
|
|
|
# 1. calculating the auth chain difference for the state sets based on the
|
|
|
|
# events in `event_map` alone
|
|
|
|
# 2. replacing any events in the state_sets that are also in `event_map`
|
|
|
|
# with their auth events (recursively), and then calling
|
|
|
|
# `store.get_auth_chain_difference` as normal
|
|
|
|
# 3. adding the results of 1 and 2 together.
|
|
|
|
|
|
|
|
# Map from event ID in `event_map` to their auth event IDs, and their auth
|
|
|
|
# event IDs if they appear in the `event_map`. This is the intersection of
|
|
|
|
# the event's auth chain with the events in the `event_map` *plus* their
|
|
|
|
# auth event IDs.
|
2021-07-15 12:02:43 +02:00
|
|
|
events_to_auth_chain: Dict[str, Set[str]] = {}
|
2020-12-02 16:22:37 +01:00
|
|
|
for event in event_map.values():
|
|
|
|
chain = {event.event_id}
|
|
|
|
events_to_auth_chain[event.event_id] = chain
|
|
|
|
|
|
|
|
to_search = [event]
|
|
|
|
while to_search:
|
|
|
|
for auth_id in to_search.pop().auth_event_ids():
|
|
|
|
chain.add(auth_id)
|
|
|
|
auth_event = event_map.get(auth_id)
|
|
|
|
if auth_event:
|
|
|
|
to_search.append(auth_event)
|
|
|
|
|
|
|
|
# We now a) calculate the auth chain difference for the unpersisted events
|
|
|
|
# and b) work out the state sets to pass to the store.
|
|
|
|
#
|
|
|
|
# Note: If the `event_map` is empty (which is the common case), we can do a
|
|
|
|
# much simpler calculation.
|
|
|
|
if event_map:
|
|
|
|
# The list of state sets to pass to the store, where each state set is a set
|
|
|
|
# of the event ids making up the state. This is similar to `state_sets`,
|
|
|
|
# except that (a) we only have event ids, not the complete
|
|
|
|
# ((type, state_key)->event_id) mappings; and (b) we have stripped out
|
|
|
|
# unpersisted events and replaced them with the persisted events in
|
|
|
|
# their auth chain.
|
2021-07-15 12:02:43 +02:00
|
|
|
state_sets_ids: List[Set[str]] = []
|
2020-12-02 16:22:37 +01:00
|
|
|
|
|
|
|
# For each state set, the unpersisted event IDs reachable (by their auth
|
|
|
|
# chain) from the events in that set.
|
2021-07-15 12:02:43 +02:00
|
|
|
unpersisted_set_ids: List[Set[str]] = []
|
2020-12-02 16:22:37 +01:00
|
|
|
|
|
|
|
for state_set in state_sets:
|
2021-07-15 12:02:43 +02:00
|
|
|
set_ids: Set[str] = set()
|
2020-12-02 16:22:37 +01:00
|
|
|
state_sets_ids.append(set_ids)
|
|
|
|
|
2021-07-15 12:02:43 +02:00
|
|
|
unpersisted_ids: Set[str] = set()
|
2020-12-02 16:22:37 +01:00
|
|
|
unpersisted_set_ids.append(unpersisted_ids)
|
|
|
|
|
|
|
|
for event_id in state_set.values():
|
|
|
|
event_chain = events_to_auth_chain.get(event_id)
|
|
|
|
if event_chain is not None:
|
|
|
|
# We have an event in `event_map`. We add all the auth
|
|
|
|
# events that it references (that aren't also in `event_map`).
|
|
|
|
set_ids.update(e for e in event_chain if e not in event_map)
|
|
|
|
|
|
|
|
# We also add the full chain of unpersisted event IDs
|
|
|
|
# referenced by this state set, so that we can work out the
|
|
|
|
# auth chain difference of the unpersisted events.
|
|
|
|
unpersisted_ids.update(e for e in event_chain if e in event_map)
|
|
|
|
else:
|
|
|
|
set_ids.add(event_id)
|
|
|
|
|
|
|
|
# The auth chain difference of the unpersisted events of the state sets
|
|
|
|
# is calculated by taking the difference between the union and
|
|
|
|
# intersections.
|
|
|
|
union = unpersisted_set_ids[0].union(*unpersisted_set_ids[1:])
|
|
|
|
intersection = unpersisted_set_ids[0].intersection(*unpersisted_set_ids[1:])
|
|
|
|
|
2021-07-15 12:02:43 +02:00
|
|
|
difference_from_event_map: Collection[str] = union - intersection
|
2020-12-02 16:22:37 +01:00
|
|
|
else:
|
|
|
|
difference_from_event_map = ()
|
|
|
|
state_sets_ids = [set(state_set.values()) for state_set in state_sets]
|
|
|
|
|
2020-12-04 16:52:49 +01:00
|
|
|
difference = await state_res_store.get_auth_chain_difference(
|
|
|
|
room_id, state_sets_ids
|
|
|
|
)
|
2020-12-02 16:22:37 +01:00
|
|
|
difference.update(difference_from_event_map)
|
2018-09-03 16:13:17 +02:00
|
|
|
|
2020-03-18 17:46:41 +01:00
|
|
|
return difference
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
|
2020-08-24 20:25:27 +02:00
|
|
|
def _seperate(
|
|
|
|
state_sets: Iterable[StateMap[str]],
|
|
|
|
) -> Tuple[StateMap[str], StateMap[Set[str]]]:
|
2018-09-03 16:13:17 +02:00
|
|
|
"""Return the unconflicted and conflicted state. This is different than in
|
|
|
|
the original algorithm, as this defines a key to be conflicted if one of
|
|
|
|
the state sets doesn't have that key.
|
|
|
|
|
|
|
|
Args:
|
2020-08-24 20:25:27 +02:00
|
|
|
state_sets
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-08-24 20:25:27 +02:00
|
|
|
A tuple of unconflicted and conflicted state. The conflicted state dict
|
|
|
|
is a map from type/state_key to set of event IDs
|
2018-09-03 16:13:17 +02:00
|
|
|
"""
|
|
|
|
unconflicted_state = {}
|
|
|
|
conflicted_state = {}
|
|
|
|
|
|
|
|
for key in set(itertools.chain.from_iterable(state_sets)):
|
2020-02-21 13:15:07 +01:00
|
|
|
event_ids = {state_set.get(key) for state_set in state_sets}
|
2018-09-03 16:13:17 +02:00
|
|
|
if len(event_ids) == 1:
|
|
|
|
unconflicted_state[key] = event_ids.pop()
|
|
|
|
else:
|
|
|
|
event_ids.discard(None)
|
|
|
|
conflicted_state[key] = event_ids
|
|
|
|
|
2020-08-24 20:25:27 +02:00
|
|
|
# mypy doesn't understand that discarding None above means that conflicted
|
|
|
|
# state is StateMap[Set[str]], not StateMap[Set[Optional[Str]]].
|
|
|
|
return unconflicted_state, conflicted_state # type: ignore
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
|
2020-08-24 20:25:27 +02:00
|
|
|
def _is_power_event(event: EventBase) -> bool:
|
2018-09-03 16:13:17 +02:00
|
|
|
"""Return whether or not the event is a "power event", as defined by the
|
|
|
|
v2 state resolution algorithm
|
|
|
|
|
|
|
|
Args:
|
2020-08-24 20:25:27 +02:00
|
|
|
event
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-08-24 20:25:27 +02:00
|
|
|
True if the event is a power event.
|
2018-09-03 16:13:17 +02:00
|
|
|
"""
|
|
|
|
if (event.type, event.state_key) in (
|
|
|
|
(EventTypes.PowerLevels, ""),
|
|
|
|
(EventTypes.JoinRules, ""),
|
|
|
|
(EventTypes.Create, ""),
|
|
|
|
):
|
|
|
|
return True
|
|
|
|
|
|
|
|
if event.type == EventTypes.Member:
|
2019-06-20 11:32:02 +02:00
|
|
|
if event.membership in ("leave", "ban"):
|
2018-09-03 16:13:17 +02:00
|
|
|
return event.sender != event.state_key
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2020-07-24 16:59:51 +02:00
|
|
|
async def _add_event_and_auth_chain_to_graph(
|
2020-08-24 20:25:27 +02:00
|
|
|
graph: Dict[str, Set[str]],
|
|
|
|
room_id: str,
|
|
|
|
event_id: str,
|
|
|
|
event_map: Dict[str, EventBase],
|
|
|
|
state_res_store: "synapse.state.StateResolutionStore",
|
|
|
|
auth_diff: Set[str],
|
|
|
|
) -> None:
|
2018-09-03 16:13:17 +02:00
|
|
|
"""Helper function for _reverse_topological_power_sort that add the event
|
|
|
|
and its auth chain (that is in the auth diff) to the graph
|
|
|
|
|
|
|
|
Args:
|
2020-08-24 20:25:27 +02:00
|
|
|
graph: A map from event ID to the events auth event IDs
|
|
|
|
room_id: the room we are working in
|
|
|
|
event_id: Event to add to the graph
|
|
|
|
event_map
|
|
|
|
state_res_store
|
|
|
|
auth_diff: Set of event IDs that are in the auth difference.
|
2018-09-03 16:13:17 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
state = [event_id]
|
|
|
|
while state:
|
|
|
|
eid = state.pop()
|
|
|
|
graph.setdefault(eid, set())
|
|
|
|
|
2020-07-24 16:59:51 +02:00
|
|
|
event = await _get_event(room_id, eid, event_map, state_res_store)
|
2018-11-05 14:35:15 +01:00
|
|
|
for aid in event.auth_event_ids():
|
2018-09-03 16:13:17 +02:00
|
|
|
if aid in auth_diff:
|
|
|
|
if aid not in graph:
|
|
|
|
state.append(aid)
|
|
|
|
|
|
|
|
graph.setdefault(eid, set()).add(aid)
|
|
|
|
|
|
|
|
|
2020-07-24 16:59:51 +02:00
|
|
|
async def _reverse_topological_power_sort(
|
2020-08-24 20:25:27 +02:00
|
|
|
clock: Clock,
|
|
|
|
room_id: str,
|
|
|
|
event_ids: Iterable[str],
|
|
|
|
event_map: Dict[str, EventBase],
|
|
|
|
state_res_store: "synapse.state.StateResolutionStore",
|
|
|
|
auth_diff: Set[str],
|
|
|
|
) -> List[str]:
|
2018-09-03 16:13:17 +02:00
|
|
|
"""Returns a list of the event_ids sorted by reverse topological ordering,
|
|
|
|
and then by power level and origin_server_ts
|
|
|
|
|
|
|
|
Args:
|
2020-08-24 20:25:27 +02:00
|
|
|
clock
|
|
|
|
room_id: the room we are working in
|
|
|
|
event_ids: The events to sort
|
|
|
|
event_map
|
|
|
|
state_res_store
|
|
|
|
auth_diff: Set of event IDs that are in the auth difference.
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-08-24 20:25:27 +02:00
|
|
|
The sorted list
|
2018-09-03 16:13:17 +02:00
|
|
|
"""
|
|
|
|
|
2021-07-15 12:02:43 +02:00
|
|
|
graph: Dict[str, Set[str]] = {}
|
2020-06-24 19:48:18 +02:00
|
|
|
for idx, event_id in enumerate(event_ids, start=1):
|
2020-07-24 16:59:51 +02:00
|
|
|
await _add_event_and_auth_chain_to_graph(
|
2019-12-13 13:55:32 +01:00
|
|
|
graph, room_id, event_id, event_map, state_res_store, auth_diff
|
2018-09-03 16:13:17 +02:00
|
|
|
)
|
|
|
|
|
2020-07-24 16:59:51 +02:00
|
|
|
# We await occasionally when we're working with large data sets to
|
2020-06-24 19:48:18 +02:00
|
|
|
# ensure that we don't block the reactor loop for too long.
|
2020-07-24 16:59:51 +02:00
|
|
|
if idx % _AWAIT_AFTER_ITERATIONS == 0:
|
|
|
|
await clock.sleep(0)
|
2020-06-24 19:48:18 +02:00
|
|
|
|
2018-09-03 16:13:17 +02:00
|
|
|
event_to_pl = {}
|
2020-06-24 19:48:18 +02:00
|
|
|
for idx, event_id in enumerate(graph, start=1):
|
2020-07-24 16:59:51 +02:00
|
|
|
pl = await _get_power_level_for_sender(
|
2019-12-13 13:55:32 +01:00
|
|
|
room_id, event_id, event_map, state_res_store
|
|
|
|
)
|
2018-09-03 16:13:17 +02:00
|
|
|
event_to_pl[event_id] = pl
|
|
|
|
|
2020-07-24 16:59:51 +02:00
|
|
|
# We await occasionally when we're working with large data sets to
|
2020-06-24 19:48:18 +02:00
|
|
|
# ensure that we don't block the reactor loop for too long.
|
2020-07-24 16:59:51 +02:00
|
|
|
if idx % _AWAIT_AFTER_ITERATIONS == 0:
|
|
|
|
await clock.sleep(0)
|
2020-06-24 19:48:18 +02:00
|
|
|
|
2018-09-03 16:13:17 +02:00
|
|
|
def _get_power_order(event_id):
|
|
|
|
ev = event_map[event_id]
|
|
|
|
pl = event_to_pl[event_id]
|
|
|
|
|
|
|
|
return -pl, ev.origin_server_ts, event_id
|
|
|
|
|
|
|
|
# Note: graph is modified during the sort
|
2019-06-20 11:32:02 +02:00
|
|
|
it = lexicographical_topological_sort(graph, key=_get_power_order)
|
2018-09-03 16:13:17 +02:00
|
|
|
sorted_events = list(it)
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return sorted_events
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
|
2020-07-24 16:59:51 +02:00
|
|
|
async def _iterative_auth_checks(
|
2020-08-24 20:25:27 +02:00
|
|
|
clock: Clock,
|
|
|
|
room_id: str,
|
2021-07-26 18:17:00 +02:00
|
|
|
room_version: RoomVersion,
|
2020-08-24 20:25:27 +02:00
|
|
|
event_ids: List[str],
|
|
|
|
base_state: StateMap[str],
|
|
|
|
event_map: Dict[str, EventBase],
|
|
|
|
state_res_store: "synapse.state.StateResolutionStore",
|
2020-08-28 13:28:53 +02:00
|
|
|
) -> MutableStateMap[str]:
|
2018-09-03 16:13:17 +02:00
|
|
|
"""Sequentially apply auth checks to each event in given list, updating the
|
|
|
|
state as it goes along.
|
|
|
|
|
|
|
|
Args:
|
2020-08-24 20:25:27 +02:00
|
|
|
clock
|
|
|
|
room_id
|
|
|
|
room_version
|
|
|
|
event_ids: Ordered list of events to apply auth checks to
|
|
|
|
base_state: The set of state to start with
|
|
|
|
event_map
|
|
|
|
state_res_store
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-08-24 20:25:27 +02:00
|
|
|
Returns the final updated state
|
2018-09-03 16:13:17 +02:00
|
|
|
"""
|
2020-08-28 13:28:53 +02:00
|
|
|
resolved_state = dict(base_state)
|
2018-09-03 16:13:17 +02:00
|
|
|
|
2020-06-26 11:44:52 +02:00
|
|
|
for idx, event_id in enumerate(event_ids, start=1):
|
2018-09-03 16:13:17 +02:00
|
|
|
event = event_map[event_id]
|
|
|
|
|
|
|
|
auth_events = {}
|
2018-11-05 14:35:15 +01:00
|
|
|
for aid in event.auth_event_ids():
|
2020-07-24 16:59:51 +02:00
|
|
|
ev = await _get_event(
|
2019-12-31 11:41:44 +01:00
|
|
|
room_id, aid, event_map, state_res_store, allow_none=True
|
|
|
|
)
|
2018-09-03 16:13:17 +02:00
|
|
|
|
2019-12-31 11:41:44 +01:00
|
|
|
if not ev:
|
|
|
|
logger.warning(
|
|
|
|
"auth_event id %s for event %s is missing", aid, event_id
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
if ev.rejected_reason is None:
|
|
|
|
auth_events[(ev.type, ev.state_key)] = ev
|
2018-09-03 16:13:17 +02:00
|
|
|
|
2021-07-26 18:17:00 +02:00
|
|
|
for key in event_auth.auth_types_for_event(room_version, event):
|
2018-09-03 16:13:17 +02:00
|
|
|
if key in resolved_state:
|
|
|
|
ev_id = resolved_state[key]
|
2020-07-24 16:59:51 +02:00
|
|
|
ev = await _get_event(room_id, ev_id, event_map, state_res_store)
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
if ev.rejected_reason is None:
|
|
|
|
auth_events[key] = event_map[ev_id]
|
|
|
|
|
|
|
|
try:
|
Split `event_auth.check` into two parts (#10940)
Broadly, the existing `event_auth.check` function has two parts:
* a validation section: checks that the event isn't too big, that it has the rught signatures, etc.
This bit is independent of the rest of the state in the room, and so need only be done once
for each event.
* an auth section: ensures that the event is allowed, given the rest of the state in the room.
This gets done multiple times, against various sets of room state, because it forms part of
the state res algorithm.
Currently, this is implemented with `do_sig_check` and `do_size_check` parameters, but I think
that makes everything hard to follow. Instead, we split the function in two and call each part
separately where it is needed.
2021-09-29 19:59:15 +02:00
|
|
|
event_auth.check_auth_rules_for_event(
|
2021-07-26 18:17:00 +02:00
|
|
|
room_version,
|
2019-06-20 11:32:02 +02:00
|
|
|
event,
|
|
|
|
auth_events,
|
2018-09-03 16:13:17 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
resolved_state[(event.type, event.state_key)] = event_id
|
|
|
|
except AuthError:
|
|
|
|
pass
|
|
|
|
|
2020-07-24 16:59:51 +02:00
|
|
|
# We await occasionally when we're working with large data sets to
|
2020-06-26 11:44:52 +02:00
|
|
|
# ensure that we don't block the reactor loop for too long.
|
2020-07-24 16:59:51 +02:00
|
|
|
if idx % _AWAIT_AFTER_ITERATIONS == 0:
|
|
|
|
await clock.sleep(0)
|
2020-06-26 11:44:52 +02:00
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return resolved_state
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
|
2020-07-24 16:59:51 +02:00
|
|
|
async def _mainline_sort(
|
2020-08-24 20:25:27 +02:00
|
|
|
clock: Clock,
|
|
|
|
room_id: str,
|
|
|
|
event_ids: List[str],
|
|
|
|
resolved_power_event_id: Optional[str],
|
|
|
|
event_map: Dict[str, EventBase],
|
|
|
|
state_res_store: "synapse.state.StateResolutionStore",
|
|
|
|
) -> List[str]:
|
2018-09-03 16:13:17 +02:00
|
|
|
"""Returns a sorted list of event_ids sorted by mainline ordering based on
|
|
|
|
the given event resolved_power_event_id
|
|
|
|
|
|
|
|
Args:
|
2020-08-24 20:25:27 +02:00
|
|
|
clock
|
|
|
|
room_id: room we're working in
|
|
|
|
event_ids: Events to sort
|
|
|
|
resolved_power_event_id: The final resolved power level event ID
|
|
|
|
event_map
|
|
|
|
state_res_store
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-08-24 20:25:27 +02:00
|
|
|
The sorted list
|
2018-09-03 16:13:17 +02:00
|
|
|
"""
|
2020-06-24 19:48:18 +02:00
|
|
|
if not event_ids:
|
|
|
|
# It's possible for there to be no event IDs here to sort, so we can
|
|
|
|
# skip calculating the mainline in that case.
|
|
|
|
return []
|
|
|
|
|
2018-09-03 16:13:17 +02:00
|
|
|
mainline = []
|
|
|
|
pl = resolved_power_event_id
|
2020-06-24 19:48:18 +02:00
|
|
|
idx = 0
|
2018-09-03 16:13:17 +02:00
|
|
|
while pl:
|
|
|
|
mainline.append(pl)
|
2020-07-24 16:59:51 +02:00
|
|
|
pl_ev = await _get_event(room_id, pl, event_map, state_res_store)
|
2018-11-05 14:35:15 +01:00
|
|
|
auth_events = pl_ev.auth_event_ids()
|
2018-09-03 16:13:17 +02:00
|
|
|
pl = None
|
2018-11-05 14:35:15 +01:00
|
|
|
for aid in auth_events:
|
2020-07-24 16:59:51 +02:00
|
|
|
ev = await _get_event(
|
2019-12-31 11:41:44 +01:00
|
|
|
room_id, aid, event_map, state_res_store, allow_none=True
|
|
|
|
)
|
|
|
|
if ev and (ev.type, ev.state_key) == (EventTypes.PowerLevels, ""):
|
2018-09-03 16:13:17 +02:00
|
|
|
pl = aid
|
|
|
|
break
|
|
|
|
|
2020-07-24 16:59:51 +02:00
|
|
|
# We await occasionally when we're working with large data sets to
|
2020-06-24 19:48:18 +02:00
|
|
|
# ensure that we don't block the reactor loop for too long.
|
2020-07-24 16:59:51 +02:00
|
|
|
if idx != 0 and idx % _AWAIT_AFTER_ITERATIONS == 0:
|
|
|
|
await clock.sleep(0)
|
2020-06-24 19:48:18 +02:00
|
|
|
|
|
|
|
idx += 1
|
|
|
|
|
2018-09-03 16:13:17 +02:00
|
|
|
mainline_map = {ev_id: i + 1 for i, ev_id in enumerate(reversed(mainline))}
|
|
|
|
|
|
|
|
event_ids = list(event_ids)
|
|
|
|
|
|
|
|
order_map = {}
|
2020-06-24 19:48:18 +02:00
|
|
|
for idx, ev_id in enumerate(event_ids, start=1):
|
2020-07-24 16:59:51 +02:00
|
|
|
depth = await _get_mainline_depth_for_event(
|
2019-06-20 11:32:02 +02:00
|
|
|
event_map[ev_id], mainline_map, event_map, state_res_store
|
2018-09-03 16:13:17 +02:00
|
|
|
)
|
|
|
|
order_map[ev_id] = (depth, event_map[ev_id].origin_server_ts, ev_id)
|
|
|
|
|
2020-07-24 16:59:51 +02:00
|
|
|
# We await occasionally when we're working with large data sets to
|
2020-06-24 19:48:18 +02:00
|
|
|
# ensure that we don't block the reactor loop for too long.
|
2020-07-24 16:59:51 +02:00
|
|
|
if idx % _AWAIT_AFTER_ITERATIONS == 0:
|
|
|
|
await clock.sleep(0)
|
2020-06-24 19:48:18 +02:00
|
|
|
|
2018-09-03 16:13:17 +02:00
|
|
|
event_ids.sort(key=lambda ev_id: order_map[ev_id])
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return event_ids
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
|
2020-07-24 16:59:51 +02:00
|
|
|
async def _get_mainline_depth_for_event(
|
2020-08-24 20:25:27 +02:00
|
|
|
event: EventBase,
|
|
|
|
mainline_map: Dict[str, int],
|
|
|
|
event_map: Dict[str, EventBase],
|
|
|
|
state_res_store: "synapse.state.StateResolutionStore",
|
|
|
|
) -> int:
|
2018-09-03 16:13:17 +02:00
|
|
|
"""Get the mainline depths for the given event based on the mainline map
|
|
|
|
|
|
|
|
Args:
|
2020-08-24 20:25:27 +02:00
|
|
|
event
|
|
|
|
mainline_map: Map from event_id to mainline depth for events in the mainline.
|
|
|
|
event_map
|
|
|
|
state_res_store
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-08-24 20:25:27 +02:00
|
|
|
The mainline depth
|
2018-09-03 16:13:17 +02:00
|
|
|
"""
|
|
|
|
|
2019-12-13 13:55:32 +01:00
|
|
|
room_id = event.room_id
|
2021-07-15 12:02:43 +02:00
|
|
|
tmp_event: Optional[EventBase] = event
|
2019-12-13 13:55:32 +01:00
|
|
|
|
2018-09-03 16:13:17 +02:00
|
|
|
# We do an iterative search, replacing `event with the power level in its
|
|
|
|
# auth events (if any)
|
2020-08-24 20:25:27 +02:00
|
|
|
while tmp_event:
|
2020-12-18 16:00:34 +01:00
|
|
|
depth = mainline_map.get(tmp_event.event_id)
|
2018-09-03 16:13:17 +02:00
|
|
|
if depth is not None:
|
2019-07-23 15:00:55 +02:00
|
|
|
return depth
|
2018-09-03 16:13:17 +02:00
|
|
|
|
2020-08-24 20:25:27 +02:00
|
|
|
auth_events = tmp_event.auth_event_ids()
|
|
|
|
tmp_event = None
|
2018-09-03 16:13:17 +02:00
|
|
|
|
2018-11-05 14:35:15 +01:00
|
|
|
for aid in auth_events:
|
2020-07-24 16:59:51 +02:00
|
|
|
aev = await _get_event(
|
2019-12-31 11:41:44 +01:00
|
|
|
room_id, aid, event_map, state_res_store, allow_none=True
|
|
|
|
)
|
|
|
|
if aev and (aev.type, aev.state_key) == (EventTypes.PowerLevels, ""):
|
2020-08-24 20:25:27 +02:00
|
|
|
tmp_event = aev
|
2018-09-03 16:13:17 +02:00
|
|
|
break
|
|
|
|
|
|
|
|
# Didn't find a power level auth event, so we just return 0
|
2019-07-23 15:00:55 +02:00
|
|
|
return 0
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
|
2020-08-24 20:25:27 +02:00
|
|
|
@overload
|
|
|
|
async def _get_event(
|
|
|
|
room_id: str,
|
|
|
|
event_id: str,
|
|
|
|
event_map: Dict[str, EventBase],
|
|
|
|
state_res_store: "synapse.state.StateResolutionStore",
|
|
|
|
allow_none: Literal[False] = False,
|
|
|
|
) -> EventBase:
|
|
|
|
...
|
|
|
|
|
|
|
|
|
|
|
|
@overload
|
|
|
|
async def _get_event(
|
|
|
|
room_id: str,
|
|
|
|
event_id: str,
|
|
|
|
event_map: Dict[str, EventBase],
|
|
|
|
state_res_store: "synapse.state.StateResolutionStore",
|
|
|
|
allow_none: Literal[True],
|
|
|
|
) -> Optional[EventBase]:
|
|
|
|
...
|
|
|
|
|
|
|
|
|
|
|
|
async def _get_event(
|
|
|
|
room_id: str,
|
|
|
|
event_id: str,
|
|
|
|
event_map: Dict[str, EventBase],
|
|
|
|
state_res_store: "synapse.state.StateResolutionStore",
|
|
|
|
allow_none: bool = False,
|
|
|
|
) -> Optional[EventBase]:
|
2018-09-03 16:13:17 +02:00
|
|
|
"""Helper function to look up event in event_map, falling back to looking
|
|
|
|
it up in the store
|
|
|
|
|
|
|
|
Args:
|
2020-08-24 20:25:27 +02:00
|
|
|
room_id
|
|
|
|
event_id
|
|
|
|
event_map
|
|
|
|
state_res_store
|
|
|
|
allow_none: if the event is not found, return None rather than raising
|
2019-12-31 11:41:44 +01:00
|
|
|
an exception
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-08-24 20:25:27 +02:00
|
|
|
The event, or none if the event does not exist (and allow_none is True).
|
2018-09-03 16:13:17 +02:00
|
|
|
"""
|
|
|
|
if event_id not in event_map:
|
2020-07-24 16:59:51 +02:00
|
|
|
events = await state_res_store.get_events([event_id], allow_rejected=True)
|
2018-09-03 16:13:17 +02:00
|
|
|
event_map.update(events)
|
2019-12-31 11:41:44 +01:00
|
|
|
event = event_map.get(event_id)
|
|
|
|
|
|
|
|
if event is None:
|
|
|
|
if allow_none:
|
|
|
|
return None
|
|
|
|
raise Exception("Unknown event %s" % (event_id,))
|
|
|
|
|
2019-12-13 13:55:32 +01:00
|
|
|
if event.room_id != room_id:
|
|
|
|
raise Exception(
|
|
|
|
"In state res for room %s, event %s is in %s"
|
|
|
|
% (room_id, event_id, event.room_id)
|
|
|
|
)
|
|
|
|
return event
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
|
2020-08-24 20:25:27 +02:00
|
|
|
def lexicographical_topological_sort(
|
|
|
|
graph: Dict[str, Set[str]], key: Callable[[str], Any]
|
|
|
|
) -> Generator[str, None, None]:
|
2018-09-03 16:13:17 +02:00
|
|
|
"""Performs a lexicographic reverse topological sort on the graph.
|
|
|
|
|
|
|
|
This returns a reverse topological sort (i.e. if node A references B then B
|
|
|
|
appears before A in the sort), with ties broken lexicographically based on
|
|
|
|
return value of the `key` function.
|
|
|
|
|
|
|
|
NOTE: `graph` is modified during the sort.
|
|
|
|
|
|
|
|
Args:
|
2020-08-24 20:25:27 +02:00
|
|
|
graph: A representation of the graph where each node is a key in the
|
|
|
|
dict and its value are the nodes edges.
|
|
|
|
key: A function that takes a node and returns a value that is comparable
|
|
|
|
and used to order nodes
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
Yields:
|
2020-08-24 20:25:27 +02:00
|
|
|
The next node in the topological sort
|
2018-09-03 16:13:17 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
# Note, this is basically Kahn's algorithm except we look at nodes with no
|
|
|
|
# outgoing edges, c.f.
|
|
|
|
# https://en.wikipedia.org/wiki/Topological_sorting#Kahn's_algorithm
|
|
|
|
outdegree_map = graph
|
2021-07-15 12:02:43 +02:00
|
|
|
reverse_graph: Dict[str, Set[str]] = {}
|
2018-09-03 16:13:17 +02:00
|
|
|
|
|
|
|
# Lists of nodes with zero out degree. Is actually a tuple of
|
|
|
|
# `(key(node), node)` so that sorting does the right thing
|
|
|
|
zero_outdegree = []
|
|
|
|
|
2020-06-15 13:03:36 +02:00
|
|
|
for node, edges in graph.items():
|
2018-09-03 16:13:17 +02:00
|
|
|
if len(edges) == 0:
|
|
|
|
zero_outdegree.append((key(node), node))
|
|
|
|
|
|
|
|
reverse_graph.setdefault(node, set())
|
|
|
|
for edge in edges:
|
|
|
|
reverse_graph.setdefault(edge, set()).add(node)
|
|
|
|
|
|
|
|
# heapq is a built in implementation of a sorted queue.
|
|
|
|
heapq.heapify(zero_outdegree)
|
|
|
|
|
|
|
|
while zero_outdegree:
|
|
|
|
_, node = heapq.heappop(zero_outdegree)
|
|
|
|
|
|
|
|
for parent in reverse_graph[node]:
|
|
|
|
out = outdegree_map[parent]
|
|
|
|
out.discard(node)
|
|
|
|
if len(out) == 0:
|
|
|
|
heapq.heappush(zero_outdegree, (key(parent), parent))
|
|
|
|
|
|
|
|
yield node
|