2016-01-07 05:26:29 +01:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2015-01-27 16:50:28 +01:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2018-07-09 08:09:20 +02:00
|
|
|
import logging
|
2021-09-20 14:56:23 +02:00
|
|
|
from typing import TYPE_CHECKING, Collection, Dict, Iterable, List, Optional, Union
|
2015-01-27 16:50:28 +01:00
|
|
|
|
2018-07-09 08:09:20 +02:00
|
|
|
from prometheus_client import Counter
|
|
|
|
|
|
|
|
from twisted.internet import defer
|
|
|
|
|
2018-01-15 19:20:30 +01:00
|
|
|
import synapse
|
2015-05-22 15:45:46 +02:00
|
|
|
from synapse.api.constants import EventTypes
|
2020-10-15 18:33:28 +02:00
|
|
|
from synapse.appservice import ApplicationService
|
|
|
|
from synapse.events import EventBase
|
|
|
|
from synapse.handlers.presence import format_user_presence_state
|
2019-07-03 16:07:04 +02:00
|
|
|
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
2018-08-07 20:09:48 +02:00
|
|
|
from synapse.metrics import (
|
|
|
|
event_processing_loop_counter,
|
|
|
|
event_processing_loop_room_count,
|
|
|
|
)
|
2020-10-26 10:30:19 +01:00
|
|
|
from synapse.metrics.background_process_metrics import (
|
|
|
|
run_as_background_process,
|
|
|
|
wrap_as_background_process,
|
|
|
|
)
|
2020-10-28 16:12:21 +01:00
|
|
|
from synapse.storage.databases.main.directory import RoomAliasMapping
|
2021-04-22 17:43:50 +02:00
|
|
|
from synapse.types import JsonDict, RoomAlias, RoomStreamToken, UserID
|
2021-11-03 17:51:00 +01:00
|
|
|
from synapse.util.async_helpers import Linearizer
|
2016-08-17 12:12:29 +02:00
|
|
|
from synapse.util.metrics import Measure
|
2015-01-27 16:50:28 +01:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
if TYPE_CHECKING:
|
2021-03-23 12:12:48 +01:00
|
|
|
from synapse.server import HomeServer
|
2020-10-28 16:12:21 +01:00
|
|
|
|
2015-01-27 16:50:28 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2018-05-22 02:47:37 +02:00
|
|
|
events_processed_counter = Counter("synapse_handlers_appservice_events_processed", "")
|
2018-01-15 19:20:30 +01:00
|
|
|
|
2015-01-27 16:50:28 +01:00
|
|
|
|
2020-09-04 12:54:56 +02:00
|
|
|
class ApplicationServicesHandler:
|
2020-10-28 16:12:21 +01:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2015-02-04 13:24:20 +01:00
|
|
|
self.store = hs.get_datastore()
|
2016-05-31 14:53:48 +02:00
|
|
|
self.is_mine_id = hs.is_mine_id
|
|
|
|
self.appservice_api = hs.get_application_service_api()
|
|
|
|
self.scheduler = hs.get_application_service_scheduler()
|
2015-03-09 18:01:19 +01:00
|
|
|
self.started_scheduler = False
|
2016-08-17 12:12:29 +02:00
|
|
|
self.clock = hs.get_clock()
|
2021-09-23 13:13:34 +02:00
|
|
|
self.notify_appservices = hs.config.appservice.notify_appservices
|
2020-10-15 18:33:28 +02:00
|
|
|
self.event_sources = hs.get_event_sources()
|
2016-08-18 15:59:55 +02:00
|
|
|
|
|
|
|
self.current_max = 0
|
|
|
|
self.is_processing = False
|
2015-01-27 16:50:28 +01:00
|
|
|
|
2021-11-03 17:51:00 +01:00
|
|
|
self._ephemeral_events_linearizer = Linearizer(
|
|
|
|
name="appservice_ephemeral_events"
|
|
|
|
)
|
|
|
|
|
2021-09-20 14:56:23 +02:00
|
|
|
def notify_interested_services(self, max_token: RoomStreamToken) -> None:
|
2015-01-27 17:53:59 +01:00
|
|
|
"""Notifies (pushes) all application services interested in this event.
|
|
|
|
|
|
|
|
Pushing is done asynchronously, so this method won't block for any
|
|
|
|
prolonged length of time.
|
|
|
|
"""
|
2020-10-14 14:27:51 +02:00
|
|
|
# We just use the minimum stream ordering and ignore the vector clock
|
|
|
|
# component. This is safe to do as long as we *always* ignore the vector
|
|
|
|
# clock components.
|
|
|
|
current_id = max_token.stream
|
|
|
|
|
2016-10-06 10:43:32 +02:00
|
|
|
services = self.store.get_app_services()
|
2016-08-18 15:59:55 +02:00
|
|
|
if not services or not self.notify_appservices:
|
|
|
|
return
|
|
|
|
|
|
|
|
self.current_max = max(self.current_max, current_id)
|
|
|
|
if self.is_processing:
|
2016-08-18 12:54:41 +02:00
|
|
|
return
|
|
|
|
|
2020-10-26 10:30:19 +01:00
|
|
|
# We only start a new background process if necessary rather than
|
|
|
|
# optimistically (to cut down on overhead).
|
|
|
|
self._notify_interested_services(max_token)
|
|
|
|
|
|
|
|
@wrap_as_background_process("notify_interested_services")
|
2021-09-20 14:56:23 +02:00
|
|
|
async def _notify_interested_services(self, max_token: RoomStreamToken) -> None:
|
2016-08-17 12:12:29 +02:00
|
|
|
with Measure(self.clock, "notify_interested_services"):
|
2016-08-18 15:59:55 +02:00
|
|
|
self.is_processing = True
|
|
|
|
try:
|
|
|
|
limit = 100
|
2021-06-07 16:42:05 +02:00
|
|
|
upper_bound = -1
|
|
|
|
while upper_bound < self.current_max:
|
2019-10-31 16:43:24 +01:00
|
|
|
(
|
|
|
|
upper_bound,
|
|
|
|
events,
|
2020-07-06 13:40:35 +02:00
|
|
|
) = await self.store.get_new_events_for_appservice(
|
2017-11-02 20:49:43 +01:00
|
|
|
self.current_max, limit
|
2016-08-18 15:59:55 +02:00
|
|
|
)
|
|
|
|
|
2021-07-16 19:22:36 +02:00
|
|
|
events_by_room: Dict[str, List[EventBase]] = {}
|
2016-08-18 15:59:55 +02:00
|
|
|
for event in events:
|
2018-04-11 10:56:00 +02:00
|
|
|
events_by_room.setdefault(event.room_id, []).append(event)
|
|
|
|
|
2021-09-20 14:56:23 +02:00
|
|
|
async def handle_event(event: EventBase) -> None:
|
2016-08-18 15:59:55 +02:00
|
|
|
# Gather interested services
|
2020-07-06 13:40:35 +02:00
|
|
|
services = await self._get_services_for_event(event)
|
2016-08-18 15:59:55 +02:00
|
|
|
if len(services) == 0:
|
2018-04-11 10:56:00 +02:00
|
|
|
return # no services need notifying
|
2016-08-18 15:59:55 +02:00
|
|
|
|
|
|
|
# Do we know this user exists? If not, poke the user
|
|
|
|
# query API for all services which match that user regex.
|
|
|
|
# This needs to block as these user queries need to be
|
|
|
|
# made BEFORE pushing the event.
|
2020-07-06 13:40:35 +02:00
|
|
|
await self._check_user_exists(event.sender)
|
2016-08-18 15:59:55 +02:00
|
|
|
if event.type == EventTypes.Member:
|
2020-07-06 13:40:35 +02:00
|
|
|
await self._check_user_exists(event.state_key)
|
2016-08-18 15:59:55 +02:00
|
|
|
|
|
|
|
if not self.started_scheduler:
|
2019-06-20 11:32:02 +02:00
|
|
|
|
2021-09-20 14:56:23 +02:00
|
|
|
async def start_scheduler() -> None:
|
2020-07-30 13:27:39 +02:00
|
|
|
try:
|
2021-09-20 14:56:23 +02:00
|
|
|
await self.scheduler.start()
|
2020-07-30 13:27:39 +02:00
|
|
|
except Exception:
|
|
|
|
logger.error("Application Services Failure")
|
2018-10-08 15:06:19 +02:00
|
|
|
|
2018-07-24 01:57:48 +02:00
|
|
|
run_as_background_process("as_scheduler", start_scheduler)
|
2016-08-18 15:59:55 +02:00
|
|
|
self.started_scheduler = True
|
|
|
|
|
|
|
|
# Fork off pushes to these services
|
|
|
|
for service in services:
|
2018-04-27 13:09:47 +02:00
|
|
|
self.scheduler.submit_event_for_as(service, event)
|
2016-08-18 15:59:55 +02:00
|
|
|
|
2020-06-30 17:58:06 +02:00
|
|
|
now = self.clock.time_msec()
|
2020-07-06 13:40:35 +02:00
|
|
|
ts = await self.store.get_received_ts(event.event_id)
|
2021-09-10 18:03:18 +02:00
|
|
|
assert ts is not None
|
|
|
|
|
2020-06-30 17:58:06 +02:00
|
|
|
synapse.metrics.event_processing_lag_by_event.labels(
|
|
|
|
"appservice_sender"
|
2020-07-01 16:23:58 +02:00
|
|
|
).observe((now - ts) / 1000)
|
2020-06-30 17:58:06 +02:00
|
|
|
|
2021-09-20 14:56:23 +02:00
|
|
|
async def handle_room_events(events: Iterable[EventBase]) -> None:
|
2018-04-11 10:56:00 +02:00
|
|
|
for event in events:
|
2020-07-06 13:40:35 +02:00
|
|
|
await handle_event(event)
|
2018-04-11 10:56:00 +02:00
|
|
|
|
2020-07-06 13:40:35 +02:00
|
|
|
await make_deferred_yieldable(
|
2019-06-20 11:32:02 +02:00
|
|
|
defer.gatherResults(
|
|
|
|
[
|
|
|
|
run_in_background(handle_room_events, evs)
|
2020-06-15 13:03:36 +02:00
|
|
|
for evs in events_by_room.values()
|
2019-06-20 11:32:02 +02:00
|
|
|
],
|
|
|
|
consumeErrors=True,
|
|
|
|
)
|
|
|
|
)
|
2018-04-11 10:56:00 +02:00
|
|
|
|
2020-07-06 13:40:35 +02:00
|
|
|
await self.store.set_appservice_last_pos(upper_bound)
|
2018-04-11 12:07:51 +02:00
|
|
|
|
2018-05-23 00:32:57 +02:00
|
|
|
synapse.metrics.event_processing_positions.labels(
|
2019-06-20 11:32:02 +02:00
|
|
|
"appservice_sender"
|
|
|
|
).set(upper_bound)
|
2018-04-11 12:52:19 +02:00
|
|
|
|
2018-05-22 02:47:37 +02:00
|
|
|
events_processed_counter.inc(len(events))
|
2018-04-11 12:52:19 +02:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
event_processing_loop_room_count.labels("appservice_sender").inc(
|
|
|
|
len(events_by_room)
|
|
|
|
)
|
2018-08-07 20:09:48 +02:00
|
|
|
|
|
|
|
event_processing_loop_counter.labels("appservice_sender").inc()
|
|
|
|
|
2021-06-07 16:42:05 +02:00
|
|
|
if events:
|
|
|
|
now = self.clock.time_msec()
|
|
|
|
ts = await self.store.get_received_ts(events[-1].event_id)
|
2021-09-10 18:03:18 +02:00
|
|
|
assert ts is not None
|
2021-06-07 16:42:05 +02:00
|
|
|
|
|
|
|
synapse.metrics.event_processing_lag.labels(
|
|
|
|
"appservice_sender"
|
|
|
|
).set(now - ts)
|
|
|
|
synapse.metrics.event_processing_last_ts.labels(
|
|
|
|
"appservice_sender"
|
|
|
|
).set(ts)
|
2016-08-18 15:59:55 +02:00
|
|
|
finally:
|
|
|
|
self.is_processing = False
|
2015-02-05 11:08:12 +01:00
|
|
|
|
2020-10-26 10:30:19 +01:00
|
|
|
def notify_interested_services_ephemeral(
|
|
|
|
self,
|
|
|
|
stream_key: str,
|
2021-11-02 11:39:02 +01:00
|
|
|
new_token: Union[int, RoomStreamToken],
|
2021-04-08 23:38:54 +02:00
|
|
|
users: Optional[Collection[Union[str, UserID]]] = None,
|
2021-09-20 14:56:23 +02:00
|
|
|
) -> None:
|
2021-10-21 18:42:25 +02:00
|
|
|
"""
|
|
|
|
This is called by the notifier in the background when an ephemeral event is handled
|
|
|
|
by the homeserver.
|
2020-10-15 18:33:28 +02:00
|
|
|
|
2021-10-21 18:42:25 +02:00
|
|
|
This will determine which appservices are interested in the event, and submit them.
|
2020-10-15 18:33:28 +02:00
|
|
|
|
|
|
|
Args:
|
|
|
|
stream_key: The stream the event came from.
|
2021-10-21 18:42:25 +02:00
|
|
|
|
|
|
|
`stream_key` can be "typing_key", "receipt_key" or "presence_key". Any other
|
|
|
|
value for `stream_key` will cause this function to return early.
|
|
|
|
|
|
|
|
Ephemeral events will only be pushed to appservices that have opted into
|
|
|
|
them.
|
|
|
|
|
|
|
|
Appservices will only receive ephemeral events that fall within their
|
|
|
|
registered user and room namespaces.
|
|
|
|
|
2021-11-02 11:39:02 +01:00
|
|
|
new_token: The stream token of the event.
|
2021-10-21 18:42:25 +02:00
|
|
|
users: The users that should be informed of the new event, if any.
|
2020-10-15 18:33:28 +02:00
|
|
|
"""
|
2020-10-26 10:30:19 +01:00
|
|
|
if not self.notify_appservices:
|
|
|
|
return
|
|
|
|
|
|
|
|
if stream_key not in ("typing_key", "receipt_key", "presence_key"):
|
|
|
|
return
|
|
|
|
|
2021-11-02 11:39:02 +01:00
|
|
|
# Assert that new_token is an integer (and not a RoomStreamToken).
|
|
|
|
# All of the supported streams that this function handles use an
|
|
|
|
# integer to track progress (rather than a RoomStreamToken - a
|
|
|
|
# vector clock implementation) as they don't support multiple
|
|
|
|
# stream writers.
|
|
|
|
#
|
|
|
|
# As a result, we simply assert that new_token is an integer.
|
|
|
|
# If we do end up needing to pass a RoomStreamToken down here
|
|
|
|
# in the future, using RoomStreamToken.stream (the minimum stream
|
|
|
|
# position) to convert to an ascending integer value should work.
|
|
|
|
# Additional context: https://github.com/matrix-org/synapse/pull/11137
|
|
|
|
assert isinstance(new_token, int)
|
|
|
|
|
2020-10-15 18:33:28 +02:00
|
|
|
services = [
|
|
|
|
service
|
|
|
|
for service in self.store.get_app_services()
|
|
|
|
if service.supports_ephemeral
|
|
|
|
]
|
2020-10-26 10:30:19 +01:00
|
|
|
if not services:
|
2020-10-15 18:33:28 +02:00
|
|
|
return
|
2020-10-26 10:30:19 +01:00
|
|
|
|
|
|
|
# We only start a new background process if necessary rather than
|
|
|
|
# optimistically (to cut down on overhead).
|
|
|
|
self._notify_interested_services_ephemeral(
|
2021-04-08 23:38:54 +02:00
|
|
|
services, stream_key, new_token, users or []
|
2020-10-26 10:30:19 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
@wrap_as_background_process("notify_interested_services_ephemeral")
|
|
|
|
async def _notify_interested_services_ephemeral(
|
|
|
|
self,
|
|
|
|
services: List[ApplicationService],
|
|
|
|
stream_key: str,
|
2021-11-02 11:39:02 +01:00
|
|
|
new_token: int,
|
2020-10-26 10:30:19 +01:00
|
|
|
users: Collection[Union[str, UserID]],
|
2021-09-20 14:56:23 +02:00
|
|
|
) -> None:
|
2020-11-18 19:54:09 +01:00
|
|
|
logger.debug("Checking interested services for %s" % (stream_key))
|
2020-10-15 18:33:28 +02:00
|
|
|
with Measure(self.clock, "notify_interested_services_ephemeral"):
|
|
|
|
for service in services:
|
2021-11-02 11:39:02 +01:00
|
|
|
if stream_key == "typing_key":
|
2021-10-21 18:42:25 +02:00
|
|
|
# Note that we don't persist the token (via set_type_stream_id_for_appservice)
|
|
|
|
# for typing_key due to performance reasons and due to their highly
|
|
|
|
# ephemeral nature.
|
|
|
|
#
|
|
|
|
# Instead we simply grab the latest typing updates in _handle_typing
|
|
|
|
# and, if they apply to this application service, send it off.
|
2020-10-15 18:33:28 +02:00
|
|
|
events = await self._handle_typing(service, new_token)
|
|
|
|
if events:
|
|
|
|
self.scheduler.submit_ephemeral_events_for_as(service, events)
|
2021-11-03 17:51:00 +01:00
|
|
|
continue
|
2021-10-21 18:42:25 +02:00
|
|
|
|
2021-11-03 17:51:00 +01:00
|
|
|
# Since we read/update the stream position for this AS/stream
|
|
|
|
with (
|
|
|
|
await self._ephemeral_events_linearizer.queue(
|
|
|
|
(service.id, stream_key)
|
2020-10-26 15:51:33 +01:00
|
|
|
)
|
2021-11-03 17:51:00 +01:00
|
|
|
):
|
|
|
|
if stream_key == "receipt_key":
|
|
|
|
events = await self._handle_receipts(service, new_token)
|
|
|
|
if events:
|
|
|
|
self.scheduler.submit_ephemeral_events_for_as(
|
|
|
|
service, events
|
|
|
|
)
|
|
|
|
|
|
|
|
# Persist the latest handled stream token for this appservice
|
|
|
|
await self.store.set_type_stream_id_for_appservice(
|
|
|
|
service, "read_receipt", new_token
|
|
|
|
)
|
2021-10-21 18:42:25 +02:00
|
|
|
|
2021-11-03 17:51:00 +01:00
|
|
|
elif stream_key == "presence_key":
|
|
|
|
events = await self._handle_presence(service, users, new_token)
|
|
|
|
if events:
|
|
|
|
self.scheduler.submit_ephemeral_events_for_as(
|
|
|
|
service, events
|
|
|
|
)
|
2021-10-21 18:42:25 +02:00
|
|
|
|
2021-11-03 17:51:00 +01:00
|
|
|
# Persist the latest handled stream token for this appservice
|
|
|
|
await self.store.set_type_stream_id_for_appservice(
|
|
|
|
service, "presence", new_token
|
|
|
|
)
|
2020-10-15 18:33:28 +02:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
async def _handle_typing(
|
|
|
|
self, service: ApplicationService, new_token: int
|
|
|
|
) -> List[JsonDict]:
|
2021-10-21 18:42:25 +02:00
|
|
|
"""
|
|
|
|
Return the typing events since the given stream token that the given application
|
|
|
|
service should receive.
|
|
|
|
|
|
|
|
First fetch all typing events between the given typing stream token (non-inclusive)
|
|
|
|
and the latest typing event stream token (inclusive). Then return only those typing
|
|
|
|
events that the given application service may be interested in.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service: The application service to check for which events it should receive.
|
|
|
|
new_token: A typing event stream token.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A list of JSON dictionaries containing data derived from the typing events that
|
|
|
|
should be sent to the given application service.
|
|
|
|
"""
|
2021-09-21 19:34:26 +02:00
|
|
|
typing_source = self.event_sources.sources.typing
|
2020-10-15 18:33:28 +02:00
|
|
|
# Get the typing events from just before current
|
|
|
|
typing, _ = await typing_source.get_new_events_as(
|
|
|
|
service=service,
|
|
|
|
# For performance reasons, we don't persist the previous
|
2021-10-21 18:42:25 +02:00
|
|
|
# token in the DB and instead fetch the latest typing event
|
2020-10-15 18:33:28 +02:00
|
|
|
# for appservices.
|
2021-10-21 18:42:25 +02:00
|
|
|
# TODO: It'd likely be more efficient to simply fetch the
|
|
|
|
# typing event with the given 'new_token' stream token and
|
|
|
|
# check if the given service was interested, rather than
|
|
|
|
# iterating over all typing events and only grabbing the
|
|
|
|
# latest few.
|
2020-10-15 18:33:28 +02:00
|
|
|
from_key=new_token - 1,
|
|
|
|
)
|
|
|
|
return typing
|
|
|
|
|
2021-11-03 17:51:00 +01:00
|
|
|
async def _handle_receipts(
|
|
|
|
self, service: ApplicationService, new_token: Optional[int]
|
|
|
|
) -> List[JsonDict]:
|
2021-10-21 18:42:25 +02:00
|
|
|
"""
|
|
|
|
Return the latest read receipts that the given application service should receive.
|
|
|
|
|
|
|
|
First fetch all read receipts between the last receipt stream token that this
|
|
|
|
application service should have previously received (non-inclusive) and the
|
|
|
|
latest read receipt stream token (inclusive). Then from that set, return only
|
|
|
|
those read receipts that the given application service may be interested in.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service: The application service to check for which events it should receive.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A list of JSON dictionaries containing data derived from the read receipts that
|
|
|
|
should be sent to the given application service.
|
|
|
|
"""
|
2020-10-15 18:33:28 +02:00
|
|
|
from_key = await self.store.get_type_stream_id_for_appservice(
|
|
|
|
service, "read_receipt"
|
|
|
|
)
|
2021-11-03 17:51:00 +01:00
|
|
|
if new_token is not None and new_token <= from_key:
|
|
|
|
logger.debug(
|
|
|
|
"Rejecting token lower than or equal to stored: %s" % (new_token,)
|
|
|
|
)
|
|
|
|
return []
|
|
|
|
|
2021-09-21 19:34:26 +02:00
|
|
|
receipts_source = self.event_sources.sources.receipt
|
2020-10-15 18:33:28 +02:00
|
|
|
receipts, _ = await receipts_source.get_new_events_as(
|
|
|
|
service=service, from_key=from_key
|
|
|
|
)
|
|
|
|
return receipts
|
|
|
|
|
|
|
|
async def _handle_presence(
|
2021-11-03 17:51:00 +01:00
|
|
|
self,
|
|
|
|
service: ApplicationService,
|
|
|
|
users: Collection[Union[str, UserID]],
|
|
|
|
new_token: Optional[int],
|
2020-10-28 16:12:21 +01:00
|
|
|
) -> List[JsonDict]:
|
2021-10-21 18:42:25 +02:00
|
|
|
"""
|
|
|
|
Return the latest presence updates that the given application service should receive.
|
|
|
|
|
|
|
|
First, filter the given users list to those that the application service is
|
|
|
|
interested in. Then retrieve the latest presence updates since the
|
|
|
|
the last-known previously received presence stream token for the given
|
|
|
|
application service. Return those presence updates.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service: The application service that ephemeral events are being sent to.
|
|
|
|
users: The users that should receive the presence update.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A list of json dictionaries containing data derived from the presence events
|
|
|
|
that should be sent to the given application service.
|
|
|
|
"""
|
2021-07-16 19:22:36 +02:00
|
|
|
events: List[JsonDict] = []
|
2021-09-21 19:34:26 +02:00
|
|
|
presence_source = self.event_sources.sources.presence
|
2020-10-15 18:33:28 +02:00
|
|
|
from_key = await self.store.get_type_stream_id_for_appservice(
|
|
|
|
service, "presence"
|
|
|
|
)
|
2021-11-03 17:51:00 +01:00
|
|
|
if new_token is not None and new_token <= from_key:
|
|
|
|
logger.debug(
|
|
|
|
"Rejecting token lower than or equal to stored: %s" % (new_token,)
|
|
|
|
)
|
|
|
|
return []
|
|
|
|
|
2020-10-15 18:33:28 +02:00
|
|
|
for user in users:
|
2020-10-26 10:30:19 +01:00
|
|
|
if isinstance(user, str):
|
|
|
|
user = UserID.from_string(user)
|
|
|
|
|
2020-10-15 18:33:28 +02:00
|
|
|
interested = await service.is_interested_in_presence(user, self.store)
|
|
|
|
if not interested:
|
|
|
|
continue
|
2021-10-21 18:42:25 +02:00
|
|
|
|
2020-10-15 18:33:28 +02:00
|
|
|
presence_events, _ = await presence_source.get_new_events(
|
2021-02-16 23:32:34 +01:00
|
|
|
user=user,
|
|
|
|
from_key=from_key,
|
2020-10-15 18:33:28 +02:00
|
|
|
)
|
|
|
|
time_now = self.clock.time_msec()
|
2020-10-26 14:19:07 +01:00
|
|
|
events.extend(
|
2020-10-15 18:33:28 +02:00
|
|
|
{
|
|
|
|
"type": "m.presence",
|
|
|
|
"sender": event.user_id,
|
|
|
|
"content": format_user_presence_state(
|
|
|
|
event, time_now, include_user_id=False
|
|
|
|
),
|
|
|
|
}
|
|
|
|
for event in presence_events
|
2020-10-26 14:19:07 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
return events
|
2020-10-15 18:33:28 +02:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
async def query_user_exists(self, user_id: str) -> bool:
|
2015-02-05 15:17:08 +01:00
|
|
|
"""Check if any application service knows this user_id exists.
|
2015-02-05 14:19:46 +01:00
|
|
|
|
|
|
|
Args:
|
2020-10-28 16:12:21 +01:00
|
|
|
user_id: The user to query if they exist on any AS.
|
2015-02-05 14:19:46 +01:00
|
|
|
Returns:
|
2015-02-05 15:17:08 +01:00
|
|
|
True if this user exists on at least one application service.
|
2015-02-05 14:19:46 +01:00
|
|
|
"""
|
2020-07-06 13:40:35 +02:00
|
|
|
user_query_services = self._get_services_for_user(user_id=user_id)
|
2015-02-05 14:19:46 +01:00
|
|
|
for user_service in user_query_services:
|
2020-07-06 13:40:35 +02:00
|
|
|
is_known_user = await self.appservice_api.query_user(user_service, user_id)
|
2015-02-05 14:19:46 +01:00
|
|
|
if is_known_user:
|
2019-07-23 15:00:55 +02:00
|
|
|
return True
|
|
|
|
return False
|
2015-02-05 12:47:11 +01:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
async def query_room_alias_exists(
|
|
|
|
self, room_alias: RoomAlias
|
|
|
|
) -> Optional[RoomAliasMapping]:
|
2015-02-05 12:47:11 +01:00
|
|
|
"""Check if an application service knows this room alias exists.
|
|
|
|
|
|
|
|
Args:
|
2020-10-28 16:12:21 +01:00
|
|
|
room_alias: The room alias to query.
|
2015-02-05 12:47:11 +01:00
|
|
|
Returns:
|
|
|
|
namedtuple: with keys "room_id" and "servers" or None if no
|
|
|
|
association can be found.
|
|
|
|
"""
|
2015-02-09 16:01:28 +01:00
|
|
|
room_alias_str = room_alias.to_string()
|
2016-10-06 10:43:32 +02:00
|
|
|
services = self.store.get_app_services()
|
2016-08-17 18:20:50 +02:00
|
|
|
alias_query_services = [
|
2019-06-20 11:32:02 +02:00
|
|
|
s for s in services if (s.is_interested_in_alias(room_alias_str))
|
2016-08-17 18:20:50 +02:00
|
|
|
]
|
2015-02-05 12:47:11 +01:00
|
|
|
for alias_service in alias_query_services:
|
2020-07-06 13:40:35 +02:00
|
|
|
is_known_alias = await self.appservice_api.query_alias(
|
2015-02-09 16:01:28 +01:00
|
|
|
alias_service, room_alias_str
|
2015-02-05 12:47:11 +01:00
|
|
|
)
|
|
|
|
if is_known_alias:
|
|
|
|
# the alias exists now so don't query more ASes.
|
2020-10-28 16:12:21 +01:00
|
|
|
return await self.store.get_association_from_room_alias(room_alias)
|
|
|
|
|
|
|
|
return None
|
2015-02-05 12:47:11 +01:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
async def query_3pe(
|
|
|
|
self, kind: str, protocol: str, fields: Dict[bytes, List[bytes]]
|
|
|
|
) -> List[JsonDict]:
|
2020-07-06 13:40:35 +02:00
|
|
|
services = self._get_services_for_3pn(protocol)
|
2016-08-17 14:15:06 +02:00
|
|
|
|
2020-07-06 13:40:35 +02:00
|
|
|
results = await make_deferred_yieldable(
|
2019-06-20 11:32:02 +02:00
|
|
|
defer.DeferredList(
|
|
|
|
[
|
|
|
|
run_in_background(
|
|
|
|
self.appservice_api.query_3pe, service, kind, protocol, fields
|
|
|
|
)
|
|
|
|
for service in services
|
|
|
|
],
|
|
|
|
consumeErrors=True,
|
2018-04-27 12:29:27 +02:00
|
|
|
)
|
2019-06-20 11:32:02 +02:00
|
|
|
)
|
2016-08-18 16:40:41 +02:00
|
|
|
|
2016-08-18 17:09:50 +02:00
|
|
|
ret = []
|
|
|
|
for (success, result) in results:
|
2016-08-18 18:33:56 +02:00
|
|
|
if success:
|
|
|
|
ret.extend(result)
|
2016-08-17 14:15:06 +02:00
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return ret
|
2016-08-17 14:15:06 +02:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
async def get_3pe_protocols(
|
|
|
|
self, only_protocol: Optional[str] = None
|
|
|
|
) -> Dict[str, JsonDict]:
|
2016-10-06 10:43:32 +02:00
|
|
|
services = self.store.get_app_services()
|
2021-07-16 19:22:36 +02:00
|
|
|
protocols: Dict[str, List[JsonDict]] = {}
|
2016-09-08 18:43:53 +02:00
|
|
|
|
|
|
|
# Collect up all the individual protocol responses out of the ASes
|
2016-08-24 13:33:01 +02:00
|
|
|
for s in services:
|
2016-08-24 14:01:53 +02:00
|
|
|
for p in s.protocols:
|
2016-09-09 14:25:02 +02:00
|
|
|
if only_protocol is not None and p != only_protocol:
|
|
|
|
continue
|
|
|
|
|
2016-09-08 18:43:53 +02:00
|
|
|
if p not in protocols:
|
|
|
|
protocols[p] = []
|
2016-09-09 16:07:04 +02:00
|
|
|
|
2020-07-06 13:40:35 +02:00
|
|
|
info = await self.appservice_api.get_3pe_protocol(s, p)
|
2016-09-09 16:07:04 +02:00
|
|
|
|
|
|
|
if info is not None:
|
|
|
|
protocols[p].append(info)
|
2016-09-08 18:43:53 +02:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
def _merge_instances(infos: List[JsonDict]) -> JsonDict:
|
2016-09-08 18:43:53 +02:00
|
|
|
# Merge the 'instances' lists of multiple results, but just take
|
|
|
|
# the other fields from the first as they ought to be identical
|
2016-09-09 15:54:16 +02:00
|
|
|
# copy the result so as not to corrupt the cached one
|
2016-09-08 18:43:53 +02:00
|
|
|
combined = dict(infos[0])
|
2016-09-09 14:10:36 +02:00
|
|
|
combined["instances"] = list(combined["instances"])
|
2016-09-08 18:43:53 +02:00
|
|
|
|
|
|
|
for info in infos[1:]:
|
|
|
|
combined["instances"].extend(info["instances"])
|
|
|
|
|
|
|
|
return combined
|
|
|
|
|
2021-08-05 14:22:14 +02:00
|
|
|
return {
|
|
|
|
p: _merge_instances(protocols[p]) for p in protocols.keys() if protocols[p]
|
|
|
|
}
|
2016-08-24 14:01:53 +02:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
async def _get_services_for_event(
|
|
|
|
self, event: EventBase
|
|
|
|
) -> List[ApplicationService]:
|
2015-02-05 12:47:11 +01:00
|
|
|
"""Retrieve a list of application services interested in this event.
|
|
|
|
|
|
|
|
Args:
|
2020-10-28 16:12:21 +01:00
|
|
|
event: The event to check. Can be None if alias_list is not.
|
2015-02-05 12:47:11 +01:00
|
|
|
Returns:
|
2020-10-28 16:12:21 +01:00
|
|
|
A list of services interested in this event based on the service regex.
|
2015-02-05 12:47:11 +01:00
|
|
|
"""
|
2016-10-06 10:43:32 +02:00
|
|
|
services = self.store.get_app_services()
|
2018-04-28 13:04:40 +02:00
|
|
|
|
|
|
|
# we can't use a list comprehension here. Since python 3, list
|
|
|
|
# comprehensions use a generator internally. This means you can't yield
|
|
|
|
# inside of a list comprehension anymore.
|
|
|
|
interested_list = []
|
|
|
|
for s in services:
|
2020-07-06 13:40:35 +02:00
|
|
|
if await s.is_interested(event, self.store):
|
2018-04-28 13:04:40 +02:00
|
|
|
interested_list.append(s)
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return interested_list
|
2015-02-05 12:47:11 +01:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
def _get_services_for_user(self, user_id: str) -> List[ApplicationService]:
|
2016-10-06 10:43:32 +02:00
|
|
|
services = self.store.get_app_services()
|
2020-10-28 16:12:21 +01:00
|
|
|
return [s for s in services if (s.is_interested_in_user(user_id))]
|
2015-02-05 15:17:08 +01:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
def _get_services_for_3pn(self, protocol: str) -> List[ApplicationService]:
|
2016-10-06 10:43:32 +02:00
|
|
|
services = self.store.get_app_services()
|
2020-10-28 16:12:21 +01:00
|
|
|
return [s for s in services if s.is_interested_in_protocol(protocol)]
|
2016-08-17 14:15:06 +02:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
async def _is_unknown_user(self, user_id: str) -> bool:
|
2016-05-31 14:53:48 +02:00
|
|
|
if not self.is_mine_id(user_id):
|
2015-02-05 12:25:32 +01:00
|
|
|
# we don't know if they are unknown or not since it isn't one of our
|
|
|
|
# users. We can't poke ASes.
|
2019-07-23 15:00:55 +02:00
|
|
|
return False
|
2015-02-05 12:25:32 +01:00
|
|
|
|
2020-07-06 13:40:35 +02:00
|
|
|
user_info = await self.store.get_user_by_id(user_id)
|
2015-06-17 18:26:03 +02:00
|
|
|
if user_info:
|
2019-07-23 15:00:55 +02:00
|
|
|
return False
|
2015-04-01 15:05:24 +02:00
|
|
|
|
|
|
|
# user not found; could be the AS though, so check.
|
2016-10-06 10:43:32 +02:00
|
|
|
services = self.store.get_app_services()
|
2015-04-01 15:05:24 +02:00
|
|
|
service_list = [s for s in services if s.sender == user_id]
|
2019-07-23 15:00:55 +02:00
|
|
|
return len(service_list) == 0
|
2015-02-05 12:25:32 +01:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
async def _check_user_exists(self, user_id: str) -> bool:
|
2020-07-06 13:40:35 +02:00
|
|
|
unknown_user = await self._is_unknown_user(user_id)
|
2015-02-05 15:17:08 +01:00
|
|
|
if unknown_user:
|
2020-10-28 16:12:21 +01:00
|
|
|
return await self.query_user_exists(user_id)
|
2019-07-23 15:00:55 +02:00
|
|
|
return True
|