Continue debugging
parent
281588f120
commit
4226165cc4
|
@ -2128,7 +2128,9 @@ class FederationHandler(BaseHandler):
|
|||
|
||||
events = await self.store.get_backfill_events(room_id, pdu_list, limit)
|
||||
logger.info(
|
||||
"on_backfill_request get_backfill_events events(%d)=%s", len(events), events
|
||||
"on_backfill_request get_backfill_events events(%d)=%s",
|
||||
len(events),
|
||||
[f'{ev.content.get("body")}: {ev.type} ({ev.event_id})' for ev in events],
|
||||
)
|
||||
|
||||
events = await filter_events_for_server(self.storage, origin, events)
|
||||
|
|
|
@ -673,20 +673,6 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas
|
|||
)
|
||||
|
||||
def get_oldest_events_with_depth_in_room_txn(self, txn, room_id):
|
||||
# sql = (
|
||||
# "SELECT b.event_id, MAX(e.depth) FROM events as e"
|
||||
# " INNER JOIN event_edges as g"
|
||||
# " ON g.event_id = e.event_id"
|
||||
# " INNER JOIN event_backward_extremities as b"
|
||||
# " ON g.prev_event_id = b.event_id"
|
||||
# # TODO
|
||||
# # " INNER JOIN insertion_event_extremeties as i"
|
||||
# # " ON g.event_id = i.insertion_prev_event_id"
|
||||
# " WHERE b.room_id = ? AND g.is_state is ?"
|
||||
# " GROUP BY b.event_id"
|
||||
# )
|
||||
# txn.execute(sql, (room_id, False))
|
||||
|
||||
sqlAsdf = "SELECT * FROM insertion_event_extremeties as i"
|
||||
txn.execute(sqlAsdf)
|
||||
logger.info("wfeafewawafeawg %s", dict(txn))
|
||||
|
@ -710,6 +696,20 @@ class EventFederationWorkerStore(EventsWorkerStore, SignatureWorkerStore, SQLBas
|
|||
|
||||
txn.execute(sql, (room_id,))
|
||||
|
||||
sql = (
|
||||
"SELECT b.event_id, MAX(e.depth) FROM events as e"
|
||||
" INNER JOIN event_edges as g"
|
||||
" ON g.event_id = e.event_id"
|
||||
" INNER JOIN event_backward_extremities as b"
|
||||
" ON g.prev_event_id = b.event_id"
|
||||
# TODO
|
||||
# " INNER JOIN insertion_event_extremeties as i"
|
||||
# " ON g.event_id = i.insertion_prev_event_id"
|
||||
" WHERE b.room_id = ? AND g.is_state is ?"
|
||||
" GROUP BY b.event_id"
|
||||
)
|
||||
txn.execute(sql, (room_id, False))
|
||||
|
||||
return dict(txn)
|
||||
|
||||
async def get_max_depth_of(self, event_ids: List[str]) -> Tuple[str, int]:
|
||||
|
|
|
@ -1773,7 +1773,7 @@ class PersistEventsStore:
|
|||
|
||||
logger.info("_handle_insertion_event %s", event)
|
||||
|
||||
for prev_event_id in event.prev_event_ids:
|
||||
for prev_event_id in event.prev_events:
|
||||
self.db_pool.simple_insert_txn(
|
||||
txn,
|
||||
table="insertion_event_extremeties",
|
||||
|
|
Loading…
Reference in New Issue