Make base insertion event float off on its own
See https://github.com/matrix-org/synapse/pull/10250#issuecomment-875711889 Conflicts: synapse/rest/client/v1/room.pypull/10419/head
parent
04b1f7ec02
commit
b703962095
|
@ -518,6 +518,9 @@ class EventCreationHandler:
|
||||||
outlier: Indicates whether the event is an `outlier`, i.e. if
|
outlier: Indicates whether the event is an `outlier`, i.e. if
|
||||||
it's from an arbitrary point and floating in the DAG as
|
it's from an arbitrary point and floating in the DAG as
|
||||||
opposed to being inline with the current DAG.
|
opposed to being inline with the current DAG.
|
||||||
|
historical: Indicates whether the message is being inserted
|
||||||
|
back in time around some existing events. This is used to skip
|
||||||
|
a few checks and mark the event as backfilled.
|
||||||
depth: Override the depth used to order the event in the DAG.
|
depth: Override the depth used to order the event in the DAG.
|
||||||
Should normally be set to None, which will cause the depth to be calculated
|
Should normally be set to None, which will cause the depth to be calculated
|
||||||
based on the prev_events.
|
based on the prev_events.
|
||||||
|
@ -772,6 +775,7 @@ class EventCreationHandler:
|
||||||
txn_id: Optional[str] = None,
|
txn_id: Optional[str] = None,
|
||||||
ignore_shadow_ban: bool = False,
|
ignore_shadow_ban: bool = False,
|
||||||
outlier: bool = False,
|
outlier: bool = False,
|
||||||
|
historical: bool = False,
|
||||||
depth: Optional[int] = None,
|
depth: Optional[int] = None,
|
||||||
) -> Tuple[EventBase, int]:
|
) -> Tuple[EventBase, int]:
|
||||||
"""
|
"""
|
||||||
|
@ -799,6 +803,9 @@ class EventCreationHandler:
|
||||||
outlier: Indicates whether the event is an `outlier`, i.e. if
|
outlier: Indicates whether the event is an `outlier`, i.e. if
|
||||||
it's from an arbitrary point and floating in the DAG as
|
it's from an arbitrary point and floating in the DAG as
|
||||||
opposed to being inline with the current DAG.
|
opposed to being inline with the current DAG.
|
||||||
|
historical: Indicates whether the message is being inserted
|
||||||
|
back in time around some existing events. This is used to skip
|
||||||
|
a few checks and mark the event as backfilled.
|
||||||
depth: Override the depth used to order the event in the DAG.
|
depth: Override the depth used to order the event in the DAG.
|
||||||
Should normally be set to None, which will cause the depth to be calculated
|
Should normally be set to None, which will cause the depth to be calculated
|
||||||
based on the prev_events.
|
based on the prev_events.
|
||||||
|
@ -847,6 +854,7 @@ class EventCreationHandler:
|
||||||
prev_event_ids=prev_event_ids,
|
prev_event_ids=prev_event_ids,
|
||||||
auth_event_ids=auth_event_ids,
|
auth_event_ids=auth_event_ids,
|
||||||
outlier=outlier,
|
outlier=outlier,
|
||||||
|
historical=historical,
|
||||||
depth=depth,
|
depth=depth,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -350,12 +350,15 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
|
||||||
|
|
||||||
return depth
|
return depth
|
||||||
|
|
||||||
def _create_insertion_event_dict(self, sender: str, origin_server_ts: int):
|
def _create_insertion_event_dict(
|
||||||
|
self, sender: str, room_id: str, origin_server_ts: int
|
||||||
|
):
|
||||||
"""Creates an event dict for an "insertion" event with the proper fields
|
"""Creates an event dict for an "insertion" event with the proper fields
|
||||||
and a random chunk ID.
|
and a random chunk ID.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
sender: The event author MXID
|
sender: The event author MXID
|
||||||
|
room_id: The room ID that the event belongs to
|
||||||
origin_server_ts: Timestamp when the event was sent
|
origin_server_ts: Timestamp when the event was sent
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
|
@ -366,6 +369,7 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
|
||||||
insertion_event = {
|
insertion_event = {
|
||||||
"type": EventTypes.MSC2716_INSERTION,
|
"type": EventTypes.MSC2716_INSERTION,
|
||||||
"sender": sender,
|
"sender": sender,
|
||||||
|
"room_id": room_id,
|
||||||
"content": {
|
"content": {
|
||||||
EventContentFields.MSC2716_NEXT_CHUNK_ID: next_chunk_id,
|
EventContentFields.MSC2716_NEXT_CHUNK_ID: next_chunk_id,
|
||||||
EventContentFields.MSC2716_HISTORICAL: True,
|
EventContentFields.MSC2716_HISTORICAL: True,
|
||||||
|
@ -479,11 +483,17 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
|
||||||
|
|
||||||
events_to_create = body["events"]
|
events_to_create = body["events"]
|
||||||
|
|
||||||
|
prev_event_ids = prev_events_from_query
|
||||||
|
inherited_depth = await self._inherit_depth_from_prev_ids(
|
||||||
|
prev_events_from_query
|
||||||
|
)
|
||||||
|
|
||||||
# Figure out which chunk to connect to. If they passed in
|
# Figure out which chunk to connect to. If they passed in
|
||||||
# chunk_id_from_query let's use it. The chunk ID passed in comes
|
# chunk_id_from_query let's use it. The chunk ID passed in comes
|
||||||
# from the chunk_id in the "insertion" event from the previous chunk.
|
# from the chunk_id in the "insertion" event from the previous chunk.
|
||||||
last_event_in_chunk = events_to_create[-1]
|
last_event_in_chunk = events_to_create[-1]
|
||||||
chunk_id_to_connect_to = chunk_id_from_query
|
chunk_id_to_connect_to = chunk_id_from_query
|
||||||
|
base_insertion_event = None
|
||||||
if chunk_id_from_query:
|
if chunk_id_from_query:
|
||||||
# TODO: Verify the chunk_id_from_query corresponds to an insertion event
|
# TODO: Verify the chunk_id_from_query corresponds to an insertion event
|
||||||
pass
|
pass
|
||||||
|
@ -495,11 +505,25 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
|
||||||
# an insertion event), in which case we just create a new insertion event
|
# an insertion event), in which case we just create a new insertion event
|
||||||
# that can then get pointed to by a "marker" event later.
|
# that can then get pointed to by a "marker" event later.
|
||||||
else:
|
else:
|
||||||
base_insertion_event = self._create_insertion_event_dict(
|
base_insertion_event_dict = self._create_insertion_event_dict(
|
||||||
sender=requester.user.to_string(),
|
sender=requester.user.to_string(),
|
||||||
|
room_id=room_id,
|
||||||
origin_server_ts=last_event_in_chunk["origin_server_ts"],
|
origin_server_ts=last_event_in_chunk["origin_server_ts"],
|
||||||
)
|
)
|
||||||
events_to_create.append(base_insertion_event)
|
base_insertion_event_dict["prev_events"] = prev_event_ids.copy()
|
||||||
|
|
||||||
|
(
|
||||||
|
base_insertion_event,
|
||||||
|
_,
|
||||||
|
) = await self.event_creation_handler.create_and_send_nonmember_event(
|
||||||
|
requester,
|
||||||
|
base_insertion_event_dict,
|
||||||
|
prev_event_ids=base_insertion_event_dict.get("prev_events"),
|
||||||
|
auth_event_ids=auth_event_ids,
|
||||||
|
historical=True,
|
||||||
|
depth=inherited_depth,
|
||||||
|
)
|
||||||
|
|
||||||
chunk_id_to_connect_to = base_insertion_event["content"][
|
chunk_id_to_connect_to = base_insertion_event["content"][
|
||||||
EventContentFields.MSC2716_NEXT_CHUNK_ID
|
EventContentFields.MSC2716_NEXT_CHUNK_ID
|
||||||
]
|
]
|
||||||
|
@ -513,6 +537,7 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
|
||||||
# event in the chunk) so the next chunk can be connected to this one.
|
# event in the chunk) so the next chunk can be connected to this one.
|
||||||
insertion_event = self._create_insertion_event_dict(
|
insertion_event = self._create_insertion_event_dict(
|
||||||
sender=requester.user.to_string(),
|
sender=requester.user.to_string(),
|
||||||
|
room_id=room_id,
|
||||||
# Since the insertion event is put at the start of the chunk,
|
# Since the insertion event is put at the start of the chunk,
|
||||||
# where the oldest-in-time event is, copy the origin_server_ts from
|
# where the oldest-in-time event is, copy the origin_server_ts from
|
||||||
# the first event we're inserting
|
# the first event we're inserting
|
||||||
|
@ -521,12 +546,7 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
|
||||||
# Prepend the insertion event to the start of the chunk
|
# Prepend the insertion event to the start of the chunk
|
||||||
events_to_create = [insertion_event] + events_to_create
|
events_to_create = [insertion_event] + events_to_create
|
||||||
|
|
||||||
inherited_depth = await self._inherit_depth_from_prev_ids(
|
|
||||||
prev_events_from_query
|
|
||||||
)
|
|
||||||
|
|
||||||
event_ids = []
|
event_ids = []
|
||||||
prev_event_ids = prev_events_from_query
|
|
||||||
events_to_persist = []
|
events_to_persist = []
|
||||||
for ev in events_to_create:
|
for ev in events_to_create:
|
||||||
assert_params_in_dict(ev, ["type", "origin_server_ts", "content", "sender"])
|
assert_params_in_dict(ev, ["type", "origin_server_ts", "content", "sender"])
|
||||||
|
@ -580,6 +600,10 @@ class RoomBatchSendEventRestServlet(TransactionRestServlet):
|
||||||
context=context,
|
context=context,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Add the base_insertion_event to the bottom of the list we return
|
||||||
|
if base_insertion_event is not None:
|
||||||
|
event_ids.append(base_insertion_event.event_id)
|
||||||
|
|
||||||
return 200, {
|
return 200, {
|
||||||
"state_events": auth_event_ids,
|
"state_events": auth_event_ids,
|
||||||
"events": event_ids,
|
"events": event_ids,
|
||||||
|
|
Loading…
Reference in New Issue