2015-11-09 15:37:28 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-07 05:26:29 +01:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2015-11-09 15:37:28 +01:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2018-07-09 08:09:20 +02:00
|
|
|
import logging
|
2020-12-30 14:09:53 +01:00
|
|
|
from typing import TYPE_CHECKING, Awaitable, Callable, Dict, Iterable, Optional
|
2015-11-09 15:37:28 +01:00
|
|
|
|
2018-07-18 15:35:24 +02:00
|
|
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
2020-12-30 14:09:53 +01:00
|
|
|
from synapse.storage.types import Connection
|
|
|
|
from synapse.types import JsonDict
|
2020-08-20 16:32:33 +02:00
|
|
|
from synapse.util import json_encoder
|
2018-07-18 15:35:24 +02:00
|
|
|
|
2018-07-09 08:09:20 +02:00
|
|
|
from . import engines
|
2015-11-09 15:37:28 +01:00
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from synapse.app.homeserver import HomeServer
|
|
|
|
from synapse.storage.database import DatabasePool, LoggingTransaction
|
|
|
|
|
2015-11-09 15:37:28 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2020-09-04 12:54:56 +02:00
|
|
|
class BackgroundUpdatePerformance:
|
2015-11-09 15:37:28 +01:00
|
|
|
"""Tracks the how long a background update is taking to update its items"""
|
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
def __init__(self, name: str):
|
2015-11-09 15:37:28 +01:00
|
|
|
self.name = name
|
|
|
|
self.total_item_count = 0
|
2020-12-30 14:09:53 +01:00
|
|
|
self.total_duration_ms = 0.0
|
|
|
|
self.avg_item_count = 0.0
|
|
|
|
self.avg_duration_ms = 0.0
|
2015-11-09 15:37:28 +01:00
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
def update(self, item_count: int, duration_ms: float) -> None:
|
2015-11-09 15:37:28 +01:00
|
|
|
"""Update the stats after doing an update"""
|
|
|
|
self.total_item_count += item_count
|
|
|
|
self.total_duration_ms += duration_ms
|
|
|
|
|
|
|
|
# Exponential moving averages for the number of items updated and
|
|
|
|
# the duration.
|
|
|
|
self.avg_item_count += 0.1 * (item_count - self.avg_item_count)
|
|
|
|
self.avg_duration_ms += 0.1 * (duration_ms - self.avg_duration_ms)
|
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
def average_items_per_ms(self) -> Optional[float]:
|
2015-11-09 15:37:28 +01:00
|
|
|
"""An estimate of how long it takes to do a single update.
|
|
|
|
Returns:
|
|
|
|
A duration in ms as a float
|
|
|
|
"""
|
2019-03-18 18:50:24 +01:00
|
|
|
if self.avg_duration_ms == 0:
|
|
|
|
return 0
|
|
|
|
elif self.total_item_count == 0:
|
2015-11-09 15:37:28 +01:00
|
|
|
return None
|
|
|
|
else:
|
|
|
|
# Use the exponential moving average so that we can adapt to
|
|
|
|
# changes in how long the update process takes.
|
2015-11-10 16:50:58 +01:00
|
|
|
return float(self.avg_item_count) / float(self.avg_duration_ms)
|
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
def total_items_per_ms(self) -> Optional[float]:
|
2015-11-10 16:50:58 +01:00
|
|
|
"""An estimate of how long it takes to do a single update.
|
|
|
|
Returns:
|
|
|
|
A duration in ms as a float
|
|
|
|
"""
|
2019-03-18 18:50:24 +01:00
|
|
|
if self.total_duration_ms == 0:
|
|
|
|
return 0
|
|
|
|
elif self.total_item_count == 0:
|
2015-11-10 16:50:58 +01:00
|
|
|
return None
|
|
|
|
else:
|
|
|
|
return float(self.total_item_count) / float(self.total_duration_ms)
|
2015-11-09 15:37:28 +01:00
|
|
|
|
|
|
|
|
2020-09-04 12:54:56 +02:00
|
|
|
class BackgroundUpdater:
|
2021-02-16 23:32:34 +01:00
|
|
|
"""Background updates are updates to the database that run in the
|
2015-11-09 15:37:28 +01:00
|
|
|
background. Each update processes a batch of data at once. We attempt to
|
|
|
|
limit the impact of each update by monitoring how long each batch takes to
|
|
|
|
process and autotuning the batch size.
|
|
|
|
"""
|
|
|
|
|
|
|
|
MINIMUM_BACKGROUND_BATCH_SIZE = 100
|
|
|
|
DEFAULT_BACKGROUND_BATCH_SIZE = 100
|
2015-11-10 16:50:58 +01:00
|
|
|
BACKGROUND_UPDATE_INTERVAL_MS = 1000
|
|
|
|
BACKGROUND_UPDATE_DURATION_MS = 100
|
2015-11-09 15:37:28 +01:00
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
def __init__(self, hs: "HomeServer", database: "DatabasePool"):
|
2019-12-04 16:09:36 +01:00
|
|
|
self._clock = hs.get_clock()
|
2020-08-05 22:38:57 +02:00
|
|
|
self.db_pool = database
|
2019-12-04 16:09:36 +01:00
|
|
|
|
2020-03-31 18:43:19 +02:00
|
|
|
# if a background update is currently running, its name.
|
|
|
|
self._current_background_update = None # type: Optional[str]
|
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
self._background_update_performance = (
|
|
|
|
{}
|
|
|
|
) # type: Dict[str, BackgroundUpdatePerformance]
|
|
|
|
self._background_update_handlers = (
|
|
|
|
{}
|
|
|
|
) # type: Dict[str, Callable[[JsonDict, int], Awaitable[int]]]
|
2017-11-21 12:03:21 +01:00
|
|
|
self._all_done = False
|
2015-11-10 16:50:58 +01:00
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
def start_doing_background_updates(self) -> None:
|
2019-10-28 18:45:32 +01:00
|
|
|
run_as_background_process("background_updates", self.run_background_updates)
|
2015-11-10 16:50:58 +01:00
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
async def run_background_updates(self, sleep: bool = True) -> None:
|
2018-07-18 15:35:24 +02:00
|
|
|
logger.info("Starting background schema updates")
|
2016-07-25 13:10:42 +02:00
|
|
|
while True:
|
2019-10-28 18:45:32 +01:00
|
|
|
if sleep:
|
2020-01-07 15:12:42 +01:00
|
|
|
await self._clock.sleep(self.BACKGROUND_UPDATE_INTERVAL_MS / 1000.0)
|
2015-11-10 16:50:58 +01:00
|
|
|
|
2015-11-11 14:59:40 +01:00
|
|
|
try:
|
2020-01-07 15:12:42 +01:00
|
|
|
result = await self.do_next_background_update(
|
2015-11-11 14:59:40 +01:00
|
|
|
self.BACKGROUND_UPDATE_DURATION_MS
|
|
|
|
)
|
2017-10-23 16:52:32 +02:00
|
|
|
except Exception:
|
2015-11-11 14:59:40 +01:00
|
|
|
logger.exception("Error doing update")
|
2016-07-22 14:14:03 +02:00
|
|
|
else:
|
2020-03-31 18:31:32 +02:00
|
|
|
if result:
|
2016-07-22 14:14:03 +02:00
|
|
|
logger.info(
|
|
|
|
"No more background updates to do."
|
|
|
|
" Unscheduling background update task."
|
|
|
|
)
|
2017-11-21 12:03:21 +01:00
|
|
|
self._all_done = True
|
2019-07-23 15:00:55 +02:00
|
|
|
return None
|
2015-11-09 15:37:28 +01:00
|
|
|
|
2020-03-31 18:25:10 +02:00
|
|
|
async def has_completed_background_updates(self) -> bool:
|
2017-11-21 12:03:21 +01:00
|
|
|
"""Check if all the background updates have completed
|
|
|
|
|
|
|
|
Returns:
|
2020-03-31 18:25:10 +02:00
|
|
|
True if all background updates have completed
|
2017-11-21 12:03:21 +01:00
|
|
|
"""
|
2017-11-22 19:02:15 +01:00
|
|
|
# if we've previously determined that there is nothing left to do, that
|
|
|
|
# is easy
|
|
|
|
if self._all_done:
|
2019-07-23 15:00:55 +02:00
|
|
|
return True
|
2017-11-22 19:02:15 +01:00
|
|
|
|
2020-04-02 13:04:55 +02:00
|
|
|
# obviously, if we are currently processing an update, we're not done.
|
2020-03-31 18:43:19 +02:00
|
|
|
if self._current_background_update:
|
2019-07-23 15:00:55 +02:00
|
|
|
return False
|
2017-11-22 19:02:15 +01:00
|
|
|
|
|
|
|
# otherwise, check if there are updates to be run. This is important,
|
|
|
|
# as we may be running on a worker which doesn't perform the bg updates
|
|
|
|
# itself, but still wants to wait for them to happen.
|
2020-08-05 22:38:57 +02:00
|
|
|
updates = await self.db_pool.simple_select_onecol(
|
2017-11-22 19:02:15 +01:00
|
|
|
"background_updates",
|
|
|
|
keyvalues=None,
|
|
|
|
retcol="1",
|
2019-09-24 15:18:31 +02:00
|
|
|
desc="has_completed_background_updates",
|
2017-11-22 19:02:15 +01:00
|
|
|
)
|
|
|
|
if not updates:
|
|
|
|
self._all_done = True
|
2019-07-23 15:00:55 +02:00
|
|
|
return True
|
2017-11-22 19:02:15 +01:00
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return False
|
2017-11-21 12:03:21 +01:00
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
async def has_completed_background_update(self, update_name: str) -> bool:
|
2021-02-16 23:32:34 +01:00
|
|
|
"""Check if the given background update has finished running."""
|
2019-09-24 15:18:31 +02:00
|
|
|
if self._all_done:
|
|
|
|
return True
|
|
|
|
|
2020-03-31 18:43:19 +02:00
|
|
|
if update_name == self._current_background_update:
|
2019-09-24 15:18:31 +02:00
|
|
|
return False
|
|
|
|
|
2020-08-05 22:38:57 +02:00
|
|
|
update_exists = await self.db_pool.simple_select_one_onecol(
|
2019-09-24 15:18:31 +02:00
|
|
|
"background_updates",
|
|
|
|
keyvalues={"update_name": update_name},
|
|
|
|
retcol="1",
|
|
|
|
desc="has_completed_background_update",
|
|
|
|
allow_none=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
return not update_exists
|
|
|
|
|
2020-03-31 18:31:32 +02:00
|
|
|
async def do_next_background_update(self, desired_duration_ms: float) -> bool:
|
2016-07-25 13:10:42 +02:00
|
|
|
"""Does some amount of work on the next queued background update
|
|
|
|
|
2020-01-07 15:12:42 +01:00
|
|
|
Returns once some amount of work is done.
|
|
|
|
|
2015-11-09 15:37:28 +01:00
|
|
|
Args:
|
2020-12-30 14:09:53 +01:00
|
|
|
desired_duration_ms: How long we want to spend updating.
|
2015-11-09 15:37:28 +01:00
|
|
|
Returns:
|
2020-04-03 11:51:32 +02:00
|
|
|
True if we have finished running all the background updates, otherwise False
|
2015-11-09 15:37:28 +01:00
|
|
|
"""
|
2020-03-31 18:43:19 +02:00
|
|
|
|
|
|
|
def get_background_updates_txn(txn):
|
|
|
|
txn.execute(
|
|
|
|
"""
|
|
|
|
SELECT update_name, depends_on FROM background_updates
|
|
|
|
ORDER BY ordering, update_name
|
|
|
|
"""
|
2015-11-09 15:37:28 +01:00
|
|
|
)
|
2020-08-05 22:38:57 +02:00
|
|
|
return self.db_pool.cursor_to_dict(txn)
|
2015-11-09 15:37:28 +01:00
|
|
|
|
2020-03-31 18:43:19 +02:00
|
|
|
if not self._current_background_update:
|
2020-08-05 22:38:57 +02:00
|
|
|
all_pending_updates = await self.db_pool.runInteraction(
|
2021-02-16 23:32:34 +01:00
|
|
|
"background_updates",
|
|
|
|
get_background_updates_txn,
|
2020-03-31 18:43:19 +02:00
|
|
|
)
|
|
|
|
if not all_pending_updates:
|
|
|
|
# no work left to do
|
|
|
|
return True
|
|
|
|
|
|
|
|
# find the first update which isn't dependent on another one in the queue.
|
|
|
|
pending = {update["update_name"] for update in all_pending_updates}
|
|
|
|
for upd in all_pending_updates:
|
|
|
|
depends_on = upd["depends_on"]
|
|
|
|
if not depends_on or depends_on not in pending:
|
|
|
|
break
|
|
|
|
logger.info(
|
|
|
|
"Not starting on bg update %s until %s is done",
|
|
|
|
upd["update_name"],
|
|
|
|
depends_on,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
# if we get to the end of that for loop, there is a problem
|
|
|
|
raise Exception(
|
|
|
|
"Unable to find a background update which doesn't depend on "
|
|
|
|
"another: dependency cycle?"
|
|
|
|
)
|
2015-11-09 15:37:28 +01:00
|
|
|
|
2020-03-31 18:43:19 +02:00
|
|
|
self._current_background_update = upd["update_name"]
|
2015-11-09 15:37:28 +01:00
|
|
|
|
2020-03-31 18:43:19 +02:00
|
|
|
await self._do_background_update(desired_duration_ms)
|
2020-03-31 18:31:32 +02:00
|
|
|
return False
|
2016-07-25 13:10:42 +02:00
|
|
|
|
2020-03-31 18:43:19 +02:00
|
|
|
async def _do_background_update(self, desired_duration_ms: float) -> int:
|
2020-12-30 14:09:53 +01:00
|
|
|
assert self._current_background_update is not None
|
2020-03-31 18:43:19 +02:00
|
|
|
update_name = self._current_background_update
|
2019-04-03 11:07:29 +02:00
|
|
|
logger.info("Starting update batch on background update '%s'", update_name)
|
2016-07-25 13:10:42 +02:00
|
|
|
|
2015-11-09 15:37:28 +01:00
|
|
|
update_handler = self._background_update_handlers[update_name]
|
|
|
|
|
|
|
|
performance = self._background_update_performance.get(update_name)
|
|
|
|
|
|
|
|
if performance is None:
|
|
|
|
performance = BackgroundUpdatePerformance(update_name)
|
|
|
|
self._background_update_performance[update_name] = performance
|
|
|
|
|
2015-11-10 16:50:58 +01:00
|
|
|
items_per_ms = performance.average_items_per_ms()
|
2015-11-09 15:37:28 +01:00
|
|
|
|
2015-11-10 16:50:58 +01:00
|
|
|
if items_per_ms is not None:
|
|
|
|
batch_size = int(desired_duration_ms * items_per_ms)
|
2015-11-09 15:37:28 +01:00
|
|
|
# Clamp the batch size so that we always make progress
|
|
|
|
batch_size = max(batch_size, self.MINIMUM_BACKGROUND_BATCH_SIZE)
|
|
|
|
else:
|
|
|
|
batch_size = self.DEFAULT_BACKGROUND_BATCH_SIZE
|
|
|
|
|
2020-08-05 22:38:57 +02:00
|
|
|
progress_json = await self.db_pool.simple_select_one_onecol(
|
2015-11-09 15:37:28 +01:00
|
|
|
"background_updates",
|
|
|
|
keyvalues={"update_name": update_name},
|
2019-04-03 11:07:29 +02:00
|
|
|
retcol="progress_json",
|
2015-11-09 15:37:28 +01:00
|
|
|
)
|
|
|
|
|
2020-07-16 17:32:19 +02:00
|
|
|
# Avoid a circular import.
|
|
|
|
from synapse.storage._base import db_to_json
|
|
|
|
|
|
|
|
progress = db_to_json(progress_json)
|
2015-11-09 15:37:28 +01:00
|
|
|
|
|
|
|
time_start = self._clock.time_msec()
|
2020-01-07 15:12:42 +01:00
|
|
|
items_updated = await update_handler(progress, batch_size)
|
2015-11-09 15:37:28 +01:00
|
|
|
time_stop = self._clock.time_msec()
|
|
|
|
|
|
|
|
duration_ms = time_stop - time_start
|
|
|
|
|
|
|
|
logger.info(
|
2019-09-24 15:39:07 +02:00
|
|
|
"Running background update %r. Processed %r items in %rms."
|
2016-04-22 10:37:16 +02:00
|
|
|
" (total_rate=%r/ms, current_rate=%r/ms, total_updated=%r, batch_size=%r)",
|
2019-04-03 11:07:29 +02:00
|
|
|
update_name,
|
|
|
|
items_updated,
|
|
|
|
duration_ms,
|
2015-11-10 16:50:58 +01:00
|
|
|
performance.total_items_per_ms(),
|
|
|
|
performance.average_items_per_ms(),
|
|
|
|
performance.total_item_count,
|
2016-04-22 10:37:16 +02:00
|
|
|
batch_size,
|
2015-11-09 15:37:28 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
performance.update(items_updated, duration_ms)
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return len(self._background_update_performance)
|
2015-11-09 15:37:28 +01:00
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
def register_background_update_handler(
|
|
|
|
self,
|
|
|
|
update_name: str,
|
|
|
|
update_handler: Callable[[JsonDict, int], Awaitable[int]],
|
|
|
|
):
|
2015-11-09 15:37:28 +01:00
|
|
|
"""Register a handler for doing a background update.
|
|
|
|
|
|
|
|
The handler should take two arguments:
|
|
|
|
|
|
|
|
* A dict of the current progress
|
|
|
|
* An integer count of the number of items to update in this batch.
|
|
|
|
|
2020-01-07 15:12:42 +01:00
|
|
|
The handler should return a deferred or coroutine which returns an integer count
|
|
|
|
of items updated.
|
|
|
|
|
2019-01-18 16:27:11 +01:00
|
|
|
The handler is responsible for updating the progress of the update.
|
2015-11-09 15:37:28 +01:00
|
|
|
|
|
|
|
Args:
|
2020-12-30 14:09:53 +01:00
|
|
|
update_name: The name of the update that this code handles.
|
|
|
|
update_handler: The function that does the update.
|
2015-11-09 15:37:28 +01:00
|
|
|
"""
|
|
|
|
self._background_update_handlers[update_name] = update_handler
|
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
def register_noop_background_update(self, update_name: str) -> None:
|
2018-02-02 15:32:51 +01:00
|
|
|
"""Register a noop handler for a background update.
|
|
|
|
|
|
|
|
This is useful when we previously did a background update, but no
|
|
|
|
longer wish to do the update. In this case the background update should
|
|
|
|
be removed from the schema delta files, but there may still be some
|
|
|
|
users who have the background update queued, so this method should
|
|
|
|
also be called to clear the update.
|
|
|
|
|
|
|
|
Args:
|
2020-12-30 14:09:53 +01:00
|
|
|
update_name: Name of update
|
2018-02-02 15:32:51 +01:00
|
|
|
"""
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
async def noop_update(progress: JsonDict, batch_size: int) -> int:
|
2020-08-14 13:24:26 +02:00
|
|
|
await self._end_background_update(update_name)
|
2019-07-23 15:00:55 +02:00
|
|
|
return 1
|
2018-02-02 15:32:51 +01:00
|
|
|
|
|
|
|
self.register_background_update_handler(update_name, noop_update)
|
|
|
|
|
2019-04-03 11:07:29 +02:00
|
|
|
def register_background_index_update(
|
|
|
|
self,
|
2020-12-30 14:09:53 +01:00
|
|
|
update_name: str,
|
|
|
|
index_name: str,
|
|
|
|
table: str,
|
|
|
|
columns: Iterable[str],
|
|
|
|
where_clause: Optional[str] = None,
|
|
|
|
unique: bool = False,
|
|
|
|
psql_only: bool = False,
|
|
|
|
) -> None:
|
2016-07-22 14:14:03 +02:00
|
|
|
"""Helper for store classes to do a background index addition
|
|
|
|
|
|
|
|
To use:
|
|
|
|
|
|
|
|
1. use a schema delta file to add a background update. Example:
|
|
|
|
INSERT INTO background_updates (update_name, progress_json) VALUES
|
|
|
|
('my_new_index', '{}');
|
|
|
|
|
|
|
|
2. In the Store constructor, call this method
|
|
|
|
|
|
|
|
Args:
|
2020-12-30 14:09:53 +01:00
|
|
|
update_name: update_name to register for
|
|
|
|
index_name: name of index to add
|
|
|
|
table: table to add index to
|
|
|
|
columns: columns/expressions to include in index
|
|
|
|
unique: true to make a UNIQUE index
|
2017-05-11 13:46:55 +02:00
|
|
|
psql_only: true to only create this index on psql databases (useful
|
|
|
|
for virtual sqlite tables)
|
2016-07-22 14:14:03 +02:00
|
|
|
"""
|
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
def create_index_psql(conn: Connection) -> None:
|
2016-07-22 14:14:03 +02:00
|
|
|
conn.rollback()
|
|
|
|
# postgres insists on autocommit for the index
|
2020-12-30 14:09:53 +01:00
|
|
|
conn.set_session(autocommit=True) # type: ignore
|
2016-07-22 14:14:03 +02:00
|
|
|
|
2017-04-27 16:27:48 +02:00
|
|
|
try:
|
|
|
|
c = conn.cursor()
|
|
|
|
|
|
|
|
# If a previous attempt to create the index was interrupted,
|
|
|
|
# we may already have a half-built index. Let's just drop it
|
|
|
|
# before trying to create it again.
|
|
|
|
|
|
|
|
sql = "DROP INDEX IF EXISTS %s" % (index_name,)
|
|
|
|
logger.debug("[SQL] %s", sql)
|
|
|
|
c.execute(sql)
|
|
|
|
|
|
|
|
sql = (
|
2017-05-11 12:57:02 +02:00
|
|
|
"CREATE %(unique)s INDEX CONCURRENTLY %(name)s"
|
|
|
|
" ON %(table)s"
|
2017-04-27 16:27:48 +02:00
|
|
|
" (%(columns)s) %(where_clause)s"
|
|
|
|
) % {
|
2017-05-11 12:57:02 +02:00
|
|
|
"unique": "UNIQUE" if unique else "",
|
2017-04-27 16:27:48 +02:00
|
|
|
"name": index_name,
|
|
|
|
"table": table,
|
|
|
|
"columns": ", ".join(columns),
|
2019-04-03 11:07:29 +02:00
|
|
|
"where_clause": "WHERE " + where_clause if where_clause else "",
|
2017-04-27 16:27:48 +02:00
|
|
|
}
|
|
|
|
logger.debug("[SQL] %s", sql)
|
|
|
|
c.execute(sql)
|
|
|
|
finally:
|
2020-12-30 14:09:53 +01:00
|
|
|
conn.set_session(autocommit=False) # type: ignore
|
2017-04-27 16:27:48 +02:00
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
def create_index_sqlite(conn: Connection) -> None:
|
2017-04-27 16:27:48 +02:00
|
|
|
# Sqlite doesn't support concurrent creation of indexes.
|
|
|
|
#
|
|
|
|
# We don't use partial indices on SQLite as it wasn't introduced
|
2017-11-21 12:03:21 +01:00
|
|
|
# until 3.8, and wheezy and CentOS 7 have 3.7
|
2017-04-27 16:27:48 +02:00
|
|
|
#
|
|
|
|
# We assume that sqlite doesn't give us invalid indices; however
|
|
|
|
# we may still end up with the index existing but the
|
|
|
|
# background_updates not having been recorded if synapse got shut
|
|
|
|
# down at the wrong moment - hance we use IF NOT EXISTS. (SQLite
|
|
|
|
# has supported CREATE TABLE|INDEX IF NOT EXISTS since 3.3.0.)
|
|
|
|
sql = (
|
2017-05-11 12:57:02 +02:00
|
|
|
"CREATE %(unique)s INDEX IF NOT EXISTS %(name)s ON %(table)s"
|
2017-04-27 16:27:48 +02:00
|
|
|
" (%(columns)s)"
|
|
|
|
) % {
|
2017-05-11 12:57:02 +02:00
|
|
|
"unique": "UNIQUE" if unique else "",
|
2017-04-27 16:27:48 +02:00
|
|
|
"name": index_name,
|
|
|
|
"table": table,
|
|
|
|
"columns": ", ".join(columns),
|
|
|
|
}
|
|
|
|
|
2016-07-22 14:14:03 +02:00
|
|
|
c = conn.cursor()
|
2017-04-27 16:27:48 +02:00
|
|
|
logger.debug("[SQL] %s", sql)
|
2016-07-22 14:14:03 +02:00
|
|
|
c.execute(sql)
|
|
|
|
|
2020-08-05 22:38:57 +02:00
|
|
|
if isinstance(self.db_pool.engine, engines.PostgresEngine):
|
2020-12-30 14:09:53 +01:00
|
|
|
runner = create_index_psql # type: Optional[Callable[[Connection], None]]
|
2017-05-11 13:46:55 +02:00
|
|
|
elif psql_only:
|
|
|
|
runner = None
|
2017-04-27 16:27:48 +02:00
|
|
|
else:
|
|
|
|
runner = create_index_sqlite
|
|
|
|
|
2020-08-14 13:24:26 +02:00
|
|
|
async def updater(progress, batch_size):
|
2017-05-11 13:46:55 +02:00
|
|
|
if runner is not None:
|
|
|
|
logger.info("Adding index %s to %s", index_name, table)
|
2020-08-14 13:24:26 +02:00
|
|
|
await self.db_pool.runWithConnection(runner)
|
|
|
|
await self._end_background_update(update_name)
|
2019-07-23 15:00:55 +02:00
|
|
|
return 1
|
2016-07-22 14:14:03 +02:00
|
|
|
|
|
|
|
self.register_background_update_handler(update_name, updater)
|
|
|
|
|
2020-08-27 13:41:01 +02:00
|
|
|
async def _end_background_update(self, update_name: str) -> None:
|
2015-11-09 15:37:28 +01:00
|
|
|
"""Removes a completed background update task from the queue.
|
|
|
|
|
|
|
|
Args:
|
2020-08-27 13:41:01 +02:00
|
|
|
update_name:: The name of the completed task to remove
|
|
|
|
|
2015-11-09 15:37:28 +01:00
|
|
|
Returns:
|
2020-08-27 13:41:01 +02:00
|
|
|
None, completes once the task is removed.
|
2015-11-09 15:37:28 +01:00
|
|
|
"""
|
2020-03-31 18:43:19 +02:00
|
|
|
if update_name != self._current_background_update:
|
|
|
|
raise Exception(
|
|
|
|
"Cannot end background update %s which isn't currently running"
|
|
|
|
% update_name
|
|
|
|
)
|
|
|
|
self._current_background_update = None
|
2020-08-27 13:41:01 +02:00
|
|
|
await self.db_pool.simple_delete_one(
|
2015-11-09 15:37:28 +01:00
|
|
|
"background_updates", keyvalues={"update_name": update_name}
|
|
|
|
)
|
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
async def _background_update_progress(
|
|
|
|
self, update_name: str, progress: dict
|
|
|
|
) -> None:
|
2020-01-07 15:18:43 +01:00
|
|
|
"""Update the progress of a background update
|
|
|
|
|
|
|
|
Args:
|
|
|
|
update_name: The name of the background update task
|
|
|
|
progress: The progress of the update.
|
|
|
|
"""
|
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
await self.db_pool.runInteraction(
|
2020-01-07 15:18:43 +01:00
|
|
|
"background_update_progress",
|
|
|
|
self._background_update_progress_txn,
|
|
|
|
update_name,
|
|
|
|
progress,
|
|
|
|
)
|
|
|
|
|
2020-12-30 14:09:53 +01:00
|
|
|
def _background_update_progress_txn(
|
|
|
|
self, txn: "LoggingTransaction", update_name: str, progress: JsonDict
|
|
|
|
) -> None:
|
2015-11-09 15:37:28 +01:00
|
|
|
"""Update the progress of a background update
|
|
|
|
|
|
|
|
Args:
|
2020-12-30 14:09:53 +01:00
|
|
|
txn: The transaction.
|
|
|
|
update_name: The name of the background update task
|
|
|
|
progress: The progress of the update.
|
2015-11-09 15:37:28 +01:00
|
|
|
"""
|
|
|
|
|
2020-08-20 16:32:33 +02:00
|
|
|
progress_json = json_encoder.encode(progress)
|
2015-11-09 15:37:28 +01:00
|
|
|
|
2020-08-05 22:38:57 +02:00
|
|
|
self.db_pool.simple_update_one_txn(
|
2015-11-09 15:37:28 +01:00
|
|
|
txn,
|
|
|
|
"background_updates",
|
|
|
|
keyvalues={"update_name": update_name},
|
|
|
|
updatevalues={"progress_json": progress_json},
|
|
|
|
)
|