Add more logging to debug slow startup (#8264)

I'm hoping this will provide some pointers for debugging
https://github.com/matrix-org/synapse/issues/7968.
pull/8270/head
Richard van der Hoff 2020-09-07 13:36:02 +01:00 committed by GitHub
parent 96312536f2
commit 0dae7d80bf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 33 additions and 7 deletions

1
changelog.d/8264.misc Normal file
View File

@ -0,0 +1 @@
Add more logging to debug slow startup.

View File

@ -47,9 +47,14 @@ class Databases:
engine = create_engine(database_config.config) engine = create_engine(database_config.config)
with make_conn(database_config, engine) as db_conn: with make_conn(database_config, engine) as db_conn:
logger.info("Preparing database %r...", db_name) logger.info("[database config %r]: Checking database server", db_name)
engine.check_database(db_conn) engine.check_database(db_conn)
logger.info(
"[database config %r]: Preparing for databases %r",
db_name,
database_config.databases,
)
prepare_database( prepare_database(
db_conn, engine, hs.config, databases=database_config.databases, db_conn, engine, hs.config, databases=database_config.databases,
) )
@ -57,7 +62,9 @@ class Databases:
database = DatabasePool(hs, database_config, engine) database = DatabasePool(hs, database_config, engine)
if "main" in database_config.databases: if "main" in database_config.databases:
logger.info("Starting 'main' data store") logger.info(
"[database config %r]: Starting 'main' database", db_name
)
# Sanity check we don't try and configure the main store on # Sanity check we don't try and configure the main store on
# multiple databases. # multiple databases.
@ -72,7 +79,9 @@ class Databases:
persist_events = PersistEventsStore(hs, database, main) persist_events = PersistEventsStore(hs, database, main)
if "state" in database_config.databases: if "state" in database_config.databases:
logger.info("Starting 'state' data store") logger.info(
"[database config %r]: Starting 'state' database", db_name
)
# Sanity check we don't try and configure the state store on # Sanity check we don't try and configure the state store on
# multiple databases. # multiple databases.
@ -85,7 +94,7 @@ class Databases:
self.databases.append(database) self.databases.append(database)
logger.info("Database %r prepared", db_name) logger.info("[database config %r]: prepared", db_name)
# Closing the context manager doesn't close the connection. # Closing the context manager doesn't close the connection.
# psycopg will close the connection when the object gets GCed, but *only* # psycopg will close the connection when the object gets GCed, but *only*
@ -98,10 +107,10 @@ class Databases:
# Sanity check that we have actually configured all the required stores. # Sanity check that we have actually configured all the required stores.
if not main: if not main:
raise Exception("No 'main' data store configured") raise Exception("No 'main' database configured")
if not state: if not state:
raise Exception("No 'state' data store configured") raise Exception("No 'state' database configured")
# We use local variables here to ensure that the databases do not have # We use local variables here to ensure that the databases do not have
# optional types. # optional types.

View File

@ -591,6 +591,7 @@ def check_database_before_upgrade(cur, database_engine, config: HomeServerConfig
"""Called before upgrading an existing database to check that it is broadly sane """Called before upgrading an existing database to check that it is broadly sane
compared with the configuration. compared with the configuration.
""" """
logger.info("Running sanity-checks on database...")
domain = config.server_name domain = config.server_name
sql = database_engine.convert_param_style( sql = database_engine.convert_param_style(

View File

@ -82,10 +82,18 @@ def prepare_database(db_conn, database_engine, config, databases=["main", "state
try: try:
cur = db_conn.cursor() cur = db_conn.cursor()
logger.info("%r: Checking existing schema version", databases)
version_info = _get_or_create_schema_state(cur, database_engine) version_info = _get_or_create_schema_state(cur, database_engine)
if version_info: if version_info:
user_version, delta_files, upgraded = version_info user_version, delta_files, upgraded = version_info
logger.info(
"%r: Existing schema is %i (+%i deltas)",
databases,
user_version,
len(delta_files),
)
# config should only be None when we are preparing an in-memory SQLite db, # config should only be None when we are preparing an in-memory SQLite db,
# which should be empty. # which should be empty.
@ -111,6 +119,8 @@ def prepare_database(db_conn, database_engine, config, databases=["main", "state
databases=databases, databases=databases,
) )
else: else:
logger.info("%r: Initialising new database", databases)
# if it's a worker app, refuse to upgrade the database, to avoid multiple # if it's a worker app, refuse to upgrade the database, to avoid multiple
# workers doing it at once. # workers doing it at once.
if config and config.worker_app is not None: if config and config.worker_app is not None:

View File

@ -15,6 +15,7 @@
import contextlib import contextlib
import heapq import heapq
import logging
import threading import threading
from collections import deque from collections import deque
from typing import Dict, List, Set from typing import Dict, List, Set
@ -24,6 +25,8 @@ from typing_extensions import Deque
from synapse.storage.database import DatabasePool, LoggingTransaction from synapse.storage.database import DatabasePool, LoggingTransaction
from synapse.storage.util.sequence import PostgresSequenceGenerator from synapse.storage.util.sequence import PostgresSequenceGenerator
logger = logging.getLogger(__name__)
class IdGenerator: class IdGenerator:
def __init__(self, db_conn, table, column): def __init__(self, db_conn, table, column):
@ -48,6 +51,8 @@ def _load_current_id(db_conn, table, column, step=1):
Returns: Returns:
int int
""" """
# debug logging for https://github.com/matrix-org/synapse/issues/7968
logger.info("initialising stream generator for %s(%s)", table, column)
cur = db_conn.cursor() cur = db_conn.cursor()
if step == 1: if step == 1:
cur.execute("SELECT MAX(%s) FROM %s" % (column, table)) cur.execute("SELECT MAX(%s) FROM %s" % (column, table))