2019-03-25 10:37:08 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright 2018 Vector Creations Ltd
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import logging
|
2020-08-28 15:37:55 +02:00
|
|
|
from typing import Any, Dict, List, Tuple
|
2020-03-02 17:52:15 +01:00
|
|
|
|
2019-03-25 10:37:08 +01:00
|
|
|
from synapse.storage._base import SQLBaseStore
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class StateDeltasStore(SQLBaseStore):
|
2020-08-28 15:37:55 +02:00
|
|
|
async def get_current_state_deltas(
|
|
|
|
self, prev_stream_id: int, max_stream_id: int
|
|
|
|
) -> Tuple[int, List[Dict[str, Any]]]:
|
2019-04-26 12:13:16 +02:00
|
|
|
"""Fetch a list of room state changes since the given stream id
|
|
|
|
|
|
|
|
Each entry in the result contains the following fields:
|
|
|
|
- stream_id (int)
|
|
|
|
- room_id (str)
|
|
|
|
- type (str): event type
|
|
|
|
- state_key (str):
|
|
|
|
- event_id (str|None): new event_id for this state key. None if the
|
|
|
|
state has been deleted.
|
|
|
|
- prev_event_id (str|None): previous event_id for this state key. None
|
|
|
|
if it's new state.
|
|
|
|
|
|
|
|
Args:
|
2020-08-28 15:37:55 +02:00
|
|
|
prev_stream_id: point to get changes since (exclusive)
|
|
|
|
max_stream_id: the point that we know has been correctly persisted
|
2019-10-10 12:29:01 +02:00
|
|
|
- ie, an upper limit to return changes from.
|
2019-04-26 12:13:16 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-08-28 15:37:55 +02:00
|
|
|
A tuple consisting of:
|
2019-10-10 12:29:01 +02:00
|
|
|
- the stream id which these results go up to
|
|
|
|
- list of current_state_delta_stream rows. If it is empty, we are
|
|
|
|
up to date.
|
2019-04-26 12:13:16 +02:00
|
|
|
"""
|
2019-03-25 10:37:08 +01:00
|
|
|
prev_stream_id = int(prev_stream_id)
|
2019-10-10 12:29:01 +02:00
|
|
|
|
|
|
|
# check we're not going backwards
|
|
|
|
assert prev_stream_id <= max_stream_id
|
|
|
|
|
2019-04-03 11:07:29 +02:00
|
|
|
if not self._curr_state_delta_stream_cache.has_any_entity_changed(
|
|
|
|
prev_stream_id
|
|
|
|
):
|
2019-10-10 12:29:01 +02:00
|
|
|
# if the CSDs haven't changed between prev_stream_id and now, we
|
|
|
|
# know for certain that they haven't changed between prev_stream_id and
|
|
|
|
# max_stream_id.
|
2020-08-28 15:37:55 +02:00
|
|
|
return (max_stream_id, [])
|
2019-03-25 10:37:08 +01:00
|
|
|
|
|
|
|
def get_current_state_deltas_txn(txn):
|
|
|
|
# First we calculate the max stream id that will give us less than
|
|
|
|
# N results.
|
2021-02-12 17:01:48 +01:00
|
|
|
# We arbitrarily limit to 100 stream_id entries to ensure we don't
|
2019-03-25 10:37:08 +01:00
|
|
|
# select toooo many.
|
|
|
|
sql = """
|
|
|
|
SELECT stream_id, count(*)
|
|
|
|
FROM current_state_delta_stream
|
2019-10-10 12:29:01 +02:00
|
|
|
WHERE stream_id > ? AND stream_id <= ?
|
2019-03-25 10:37:08 +01:00
|
|
|
GROUP BY stream_id
|
|
|
|
ORDER BY stream_id ASC
|
|
|
|
LIMIT 100
|
|
|
|
"""
|
2019-10-10 12:29:01 +02:00
|
|
|
txn.execute(sql, (prev_stream_id, max_stream_id))
|
2019-03-25 10:37:08 +01:00
|
|
|
|
|
|
|
total = 0
|
2019-10-10 12:29:01 +02:00
|
|
|
|
|
|
|
for stream_id, count in txn:
|
2019-03-25 10:37:08 +01:00
|
|
|
total += count
|
|
|
|
if total > 100:
|
2021-02-12 17:01:48 +01:00
|
|
|
# We arbitrarily limit to 100 entries to ensure we don't
|
2019-03-25 10:37:08 +01:00
|
|
|
# select toooo many.
|
2019-10-10 12:29:01 +02:00
|
|
|
logger.debug(
|
|
|
|
"Clipping current_state_delta_stream rows to stream_id %i",
|
|
|
|
stream_id,
|
|
|
|
)
|
|
|
|
clipped_stream_id = stream_id
|
2019-03-25 10:37:08 +01:00
|
|
|
break
|
2019-10-10 12:29:01 +02:00
|
|
|
else:
|
|
|
|
# if there's no problem, we may as well go right up to the max_stream_id
|
|
|
|
clipped_stream_id = max_stream_id
|
2019-03-25 10:37:08 +01:00
|
|
|
|
|
|
|
# Now actually get the deltas
|
|
|
|
sql = """
|
|
|
|
SELECT stream_id, room_id, type, state_key, event_id, prev_event_id
|
|
|
|
FROM current_state_delta_stream
|
|
|
|
WHERE ? < stream_id AND stream_id <= ?
|
|
|
|
ORDER BY stream_id ASC
|
|
|
|
"""
|
2019-10-10 12:29:01 +02:00
|
|
|
txn.execute(sql, (prev_stream_id, clipped_stream_id))
|
2020-08-05 22:38:57 +02:00
|
|
|
return clipped_stream_id, self.db_pool.cursor_to_dict(txn)
|
2019-03-25 10:37:08 +01:00
|
|
|
|
2020-08-28 15:37:55 +02:00
|
|
|
return await self.db_pool.runInteraction(
|
2019-03-25 10:37:08 +01:00
|
|
|
"get_current_state_deltas", get_current_state_deltas_txn
|
|
|
|
)
|
|
|
|
|
2019-05-21 18:36:50 +02:00
|
|
|
def _get_max_stream_id_in_current_state_deltas_txn(self, txn):
|
2020-08-05 22:38:57 +02:00
|
|
|
return self.db_pool.simple_select_one_onecol_txn(
|
2019-05-21 18:36:50 +02:00
|
|
|
txn,
|
2019-03-25 10:37:08 +01:00
|
|
|
table="current_state_delta_stream",
|
|
|
|
keyvalues={},
|
|
|
|
retcol="COALESCE(MAX(stream_id), -1)",
|
2019-05-21 18:36:50 +02:00
|
|
|
)
|
|
|
|
|
2020-08-28 15:37:55 +02:00
|
|
|
async def get_max_stream_id_in_current_state_deltas(self):
|
|
|
|
return await self.db_pool.runInteraction(
|
2019-05-21 18:36:50 +02:00
|
|
|
"get_max_stream_id_in_current_state_deltas",
|
|
|
|
self._get_max_stream_id_in_current_state_deltas_txn,
|
2019-03-25 10:37:08 +01:00
|
|
|
)
|