Add some crude measurement log lines
parent
b007ee4606
commit
49fda0f1de
|
@ -29,6 +29,7 @@ from synapse.util import unwrapFirstError
|
||||||
from synapse.util.async import concurrently_execute
|
from synapse.util.async import concurrently_execute
|
||||||
from synapse.util.caches.snapshot_cache import SnapshotCache
|
from synapse.util.caches.snapshot_cache import SnapshotCache
|
||||||
from synapse.util.logcontext import PreserveLoggingContext, preserve_fn
|
from synapse.util.logcontext import PreserveLoggingContext, preserve_fn
|
||||||
|
from synapse.util.logutils import log_duration
|
||||||
from synapse.visibility import filter_events_for_client
|
from synapse.visibility import filter_events_for_client
|
||||||
|
|
||||||
from ._base import BaseHandler
|
from ._base import BaseHandler
|
||||||
|
@ -734,6 +735,7 @@ class MessageHandler(BaseHandler):
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _create_new_client_event(self, builder, prev_event_ids=None):
|
def _create_new_client_event(self, builder, prev_event_ids=None):
|
||||||
|
with log_duration("prev_events"):
|
||||||
if prev_event_ids:
|
if prev_event_ids:
|
||||||
prev_events = yield self.store.add_event_hashes(prev_event_ids)
|
prev_events = yield self.store.add_event_hashes(prev_event_ids)
|
||||||
prev_max_depth = yield self.store.get_max_depth_of_events(prev_event_ids)
|
prev_max_depth = yield self.store.get_max_depth_of_events(prev_event_ids)
|
||||||
|
@ -758,6 +760,7 @@ class MessageHandler(BaseHandler):
|
||||||
|
|
||||||
state_handler = self.state_handler
|
state_handler = self.state_handler
|
||||||
|
|
||||||
|
with log_duration("context"):
|
||||||
context = yield state_handler.compute_event_context(builder)
|
context = yield state_handler.compute_event_context(builder)
|
||||||
|
|
||||||
if builder.is_state():
|
if builder.is_state():
|
||||||
|
@ -765,8 +768,10 @@ class MessageHandler(BaseHandler):
|
||||||
context.prev_state_events
|
context.prev_state_events
|
||||||
)
|
)
|
||||||
|
|
||||||
|
with log_duration("add_auth"):
|
||||||
yield self.auth.add_auth_events(builder, context)
|
yield self.auth.add_auth_events(builder, context)
|
||||||
|
|
||||||
|
with log_duration("signing"):
|
||||||
signing_key = self.hs.config.signing_key[0]
|
signing_key = self.hs.config.signing_key[0]
|
||||||
add_hashes_and_signatures(
|
add_hashes_and_signatures(
|
||||||
builder, self.server_name, signing_key
|
builder, self.server_name, signing_key
|
||||||
|
@ -798,6 +803,7 @@ class MessageHandler(BaseHandler):
|
||||||
self.ratelimit(requester)
|
self.ratelimit(requester)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
with log_duration("auth_check"):
|
||||||
self.auth.check(event, auth_events=context.current_state)
|
self.auth.check(event, auth_events=context.current_state)
|
||||||
except AuthError as err:
|
except AuthError as err:
|
||||||
logger.warn("Denying new event %r because %s", event, err)
|
logger.warn("Denying new event %r because %s", event, err)
|
||||||
|
@ -882,22 +888,26 @@ class MessageHandler(BaseHandler):
|
||||||
"Changing the room create event is forbidden",
|
"Changing the room create event is forbidden",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
with log_duration("action_generator"):
|
||||||
action_generator = ActionGenerator(self.hs)
|
action_generator = ActionGenerator(self.hs)
|
||||||
yield action_generator.handle_push_actions_for_event(
|
yield action_generator.handle_push_actions_for_event(
|
||||||
event, context
|
event, context
|
||||||
)
|
)
|
||||||
|
|
||||||
|
with log_duration("persist_event"):
|
||||||
(event_stream_id, max_stream_id) = yield self.store.persist_event(
|
(event_stream_id, max_stream_id) = yield self.store.persist_event(
|
||||||
event, context=context
|
event, context=context
|
||||||
)
|
)
|
||||||
|
|
||||||
# this intentionally does not yield: we don't care about the result
|
# this intentionally does not yield: we don't care about the result
|
||||||
# and don't need to wait for it.
|
# and don't need to wait for it.
|
||||||
|
with log_duration("pusherpool"):
|
||||||
preserve_fn(self.hs.get_pusherpool().on_new_notifications)(
|
preserve_fn(self.hs.get_pusherpool().on_new_notifications)(
|
||||||
event_stream_id, max_stream_id
|
event_stream_id, max_stream_id
|
||||||
)
|
)
|
||||||
|
|
||||||
destinations = set()
|
destinations = set()
|
||||||
|
with log_duration("destination"):
|
||||||
for k, s in context.current_state.items():
|
for k, s in context.current_state.items():
|
||||||
try:
|
try:
|
||||||
if k[0] == EventTypes.Member:
|
if k[0] == EventTypes.Member:
|
||||||
|
@ -908,6 +918,7 @@ class MessageHandler(BaseHandler):
|
||||||
"Failed to get destination from event %s", s.event_id
|
"Failed to get destination from event %s", s.event_id
|
||||||
)
|
)
|
||||||
|
|
||||||
|
with log_duration("on_new_room_event"):
|
||||||
with PreserveLoggingContext():
|
with PreserveLoggingContext():
|
||||||
# Don't block waiting on waking up all the listeners.
|
# Don't block waiting on waking up all the listeners.
|
||||||
self.notifier.on_new_room_event(
|
self.notifier.on_new_room_event(
|
||||||
|
@ -918,6 +929,7 @@ class MessageHandler(BaseHandler):
|
||||||
# If invite, remove room_state from unsigned before sending.
|
# If invite, remove room_state from unsigned before sending.
|
||||||
event.unsigned.pop("invite_room_state", None)
|
event.unsigned.pop("invite_room_state", None)
|
||||||
|
|
||||||
|
with log_duration("handle_new_event"):
|
||||||
federation_handler.handle_new_event(
|
federation_handler.handle_new_event(
|
||||||
event, destinations=destinations,
|
event, destinations=destinations,
|
||||||
)
|
)
|
||||||
|
|
|
@ -21,10 +21,23 @@ import logging
|
||||||
import inspect
|
import inspect
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
from contextlib import contextmanager
|
||||||
|
|
||||||
|
|
||||||
_TIME_FUNC_ID = 0
|
_TIME_FUNC_ID = 0
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def log_duration(name):
|
||||||
|
start = time.time() * 1000
|
||||||
|
yield
|
||||||
|
end = time.time() * 1000
|
||||||
|
logger.info("Timings: %s tooke %dms", name, int(end - start))
|
||||||
|
|
||||||
|
|
||||||
def _log_debug_as_f(f, msg, msg_args):
|
def _log_debug_as_f(f, msg, msg_args):
|
||||||
name = f.__module__
|
name = f.__module__
|
||||||
logger = logging.getLogger(name)
|
logger = logging.getLogger(name)
|
||||||
|
|
Loading…
Reference in New Issue