2016-01-07 05:26:29 +01:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2022-03-30 15:39:27 +02:00
|
|
|
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
2015-02-03 15:44:16 +01:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2018-07-09 08:09:20 +02:00
|
|
|
import logging
|
2021-12-14 18:02:46 +01:00
|
|
|
import urllib.parse
|
2023-02-06 13:49:06 +01:00
|
|
|
from typing import (
|
|
|
|
TYPE_CHECKING,
|
|
|
|
Any,
|
2023-04-03 19:20:32 +02:00
|
|
|
Awaitable,
|
|
|
|
Callable,
|
2023-02-06 13:49:06 +01:00
|
|
|
Dict,
|
|
|
|
Iterable,
|
|
|
|
List,
|
|
|
|
Mapping,
|
|
|
|
Optional,
|
|
|
|
Sequence,
|
|
|
|
Tuple,
|
2023-04-03 19:20:32 +02:00
|
|
|
TypeVar,
|
2023-02-06 13:49:06 +01:00
|
|
|
)
|
2018-07-09 08:09:20 +02:00
|
|
|
|
|
|
|
from prometheus_client import Counter
|
2023-04-03 19:20:32 +02:00
|
|
|
from typing_extensions import Concatenate, ParamSpec, TypeGuard
|
2018-07-09 08:09:20 +02:00
|
|
|
|
2021-06-09 20:39:51 +02:00
|
|
|
from synapse.api.constants import EventTypes, Membership, ThirdPartyEntityKind
|
2023-03-30 14:39:38 +02:00
|
|
|
from synapse.api.errors import CodeMessageException, HttpResponseException
|
2022-02-24 18:55:45 +01:00
|
|
|
from synapse.appservice import (
|
|
|
|
ApplicationService,
|
2022-11-28 17:17:29 +01:00
|
|
|
TransactionOneTimeKeysCount,
|
2022-02-24 18:55:45 +01:00
|
|
|
TransactionUnusedFallbackKeys,
|
|
|
|
)
|
2020-10-15 18:33:28 +02:00
|
|
|
from synapse.events import EventBase
|
2022-03-03 16:43:06 +01:00
|
|
|
from synapse.events.utils import SerializeEventConfig, serialize_event
|
2023-03-30 14:39:38 +02:00
|
|
|
from synapse.http.client import SimpleHttpClient, is_unknown_endpoint
|
2022-03-30 15:39:27 +02:00
|
|
|
from synapse.types import DeviceListUpdates, JsonDict, ThirdPartyInstanceID
|
2018-07-09 08:09:20 +02:00
|
|
|
from synapse.util.caches.response_cache import ResponseCache
|
2018-06-05 14:17:55 +02:00
|
|
|
|
2020-09-01 17:03:49 +02:00
|
|
|
if TYPE_CHECKING:
|
2021-10-22 19:15:41 +02:00
|
|
|
from synapse.server import HomeServer
|
2020-09-01 17:03:49 +02:00
|
|
|
|
2015-02-04 17:44:53 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2018-06-05 14:17:55 +02:00
|
|
|
sent_transactions_counter = Counter(
|
|
|
|
"synapse_appservice_api_sent_transactions",
|
|
|
|
"Number of /transactions/ requests sent",
|
2019-06-20 11:32:02 +02:00
|
|
|
["service"],
|
2018-06-05 14:17:55 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
failed_transactions_counter = Counter(
|
|
|
|
"synapse_appservice_api_failed_transactions",
|
|
|
|
"Number of /transactions/ requests that failed to send",
|
2019-06-20 11:32:02 +02:00
|
|
|
["service"],
|
2018-06-05 14:17:55 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
sent_events_counter = Counter(
|
2019-06-20 11:32:02 +02:00
|
|
|
"synapse_appservice_api_sent_events", "Number of events sent to the AS", ["service"]
|
2018-06-05 14:17:55 +02:00
|
|
|
)
|
2015-02-04 17:44:53 +01:00
|
|
|
|
2022-07-13 16:18:20 +02:00
|
|
|
sent_ephemeral_counter = Counter(
|
|
|
|
"synapse_appservice_api_sent_ephemeral",
|
|
|
|
"Number of ephemeral events sent to the AS",
|
|
|
|
["service"],
|
|
|
|
)
|
|
|
|
|
|
|
|
sent_todevice_counter = Counter(
|
|
|
|
"synapse_appservice_api_sent_todevice",
|
|
|
|
"Number of todevice messages sent to the AS",
|
|
|
|
["service"],
|
|
|
|
)
|
|
|
|
|
2016-08-25 16:56:27 +02:00
|
|
|
HOUR_IN_MS = 60 * 60 * 1000
|
|
|
|
|
|
|
|
|
2023-04-03 19:20:32 +02:00
|
|
|
APP_SERVICE_PREFIX = "/_matrix/app/v1"
|
|
|
|
APP_SERVICE_UNSTABLE_PREFIX = "/_matrix/app/unstable"
|
|
|
|
|
|
|
|
P = ParamSpec("P")
|
|
|
|
R = TypeVar("R")
|
2016-08-25 19:06:29 +02:00
|
|
|
|
|
|
|
|
2021-12-14 18:02:46 +01:00
|
|
|
def _is_valid_3pe_metadata(info: JsonDict) -> bool:
|
2016-09-09 16:09:46 +02:00
|
|
|
if "instances" not in info:
|
|
|
|
return False
|
|
|
|
if not isinstance(info["instances"], list):
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2022-05-06 14:35:20 +02:00
|
|
|
def _is_valid_3pe_result(r: object, field: str) -> TypeGuard[JsonDict]:
|
2016-08-18 18:33:56 +02:00
|
|
|
if not isinstance(r, dict):
|
|
|
|
return False
|
|
|
|
|
|
|
|
for k in (field, "protocol"):
|
|
|
|
if k not in r:
|
|
|
|
return False
|
|
|
|
if not isinstance(r[k], str):
|
|
|
|
return False
|
|
|
|
|
|
|
|
if "fields" not in r:
|
|
|
|
return False
|
|
|
|
fields = r["fields"]
|
|
|
|
if not isinstance(fields, dict):
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2015-02-04 17:44:53 +01:00
|
|
|
class ApplicationServiceApi(SimpleHttpClient):
|
2015-02-04 12:19:18 +01:00
|
|
|
"""This class manages HS -> AS communications, including querying and
|
|
|
|
pushing.
|
|
|
|
"""
|
|
|
|
|
2021-10-22 19:15:41 +02:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2020-09-18 15:56:44 +02:00
|
|
|
super().__init__(hs)
|
2015-02-05 14:42:35 +01:00
|
|
|
self.clock = hs.get_clock()
|
2015-02-04 12:19:18 +01:00
|
|
|
|
2021-07-15 12:02:43 +02:00
|
|
|
self.protocol_meta_cache: ResponseCache[Tuple[str, str]] = ResponseCache(
|
2021-03-08 20:00:07 +01:00
|
|
|
hs.get_clock(), "as_protocol_meta", timeout_ms=HOUR_IN_MS
|
2021-07-15 12:02:43 +02:00
|
|
|
)
|
2016-08-25 16:56:27 +02:00
|
|
|
|
2023-04-03 19:20:32 +02:00
|
|
|
async def _send_with_fallbacks(
|
|
|
|
self,
|
|
|
|
service: "ApplicationService",
|
|
|
|
prefixes: List[str],
|
|
|
|
path: str,
|
|
|
|
func: Callable[Concatenate[str, P], Awaitable[R]],
|
|
|
|
*args: P.args,
|
|
|
|
**kwargs: P.kwargs,
|
|
|
|
) -> R:
|
|
|
|
"""
|
|
|
|
Attempt to call an application service with multiple paths, falling back
|
|
|
|
until one succeeds.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service: The appliacation service, this provides the base URL.
|
|
|
|
prefixes: A last of paths to try in order for the requests.
|
|
|
|
path: A suffix to append to each prefix.
|
|
|
|
func: The function to call, the first argument will be the full
|
|
|
|
endpoint to fetch. Other arguments are provided by args/kwargs.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The return value of func.
|
|
|
|
"""
|
|
|
|
for i, prefix in enumerate(prefixes, start=1):
|
|
|
|
uri = f"{service.url}{prefix}{path}"
|
|
|
|
try:
|
|
|
|
return await func(uri, *args, **kwargs)
|
|
|
|
except HttpResponseException as e:
|
|
|
|
# If an error is received that is due to an unrecognised path,
|
|
|
|
# fallback to next path (if one exists). Otherwise, consider it
|
|
|
|
# a legitimate error and raise.
|
|
|
|
if i < len(prefixes) and is_unknown_endpoint(e):
|
|
|
|
continue
|
|
|
|
raise
|
|
|
|
except Exception:
|
|
|
|
# Unexpected exceptions get sent to the caller.
|
|
|
|
raise
|
|
|
|
|
|
|
|
# The function should always exit via the return or raise above this.
|
|
|
|
raise RuntimeError("Unexpected fallback behaviour. This should never be seen.")
|
|
|
|
|
2021-12-14 18:02:46 +01:00
|
|
|
async def query_user(self, service: "ApplicationService", user_id: str) -> bool:
|
2016-08-30 18:16:00 +02:00
|
|
|
if service.url is None:
|
2019-07-23 15:00:55 +02:00
|
|
|
return False
|
2021-12-14 18:02:46 +01:00
|
|
|
|
|
|
|
# This is required by the configuration.
|
|
|
|
assert service.hs_token is not None
|
|
|
|
|
2015-02-04 17:44:53 +01:00
|
|
|
try:
|
2023-04-03 19:20:32 +02:00
|
|
|
response = await self._send_with_fallbacks(
|
|
|
|
service,
|
|
|
|
[APP_SERVICE_PREFIX, ""],
|
|
|
|
f"/users/{urllib.parse.quote(user_id)}",
|
|
|
|
self.get_json,
|
2022-10-04 13:06:41 +02:00
|
|
|
{"access_token": service.hs_token},
|
2022-10-26 15:00:01 +02:00
|
|
|
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
2022-10-04 13:06:41 +02:00
|
|
|
)
|
2015-02-09 16:01:28 +01:00
|
|
|
if response is not None: # just an empty json object
|
2019-07-23 15:00:55 +02:00
|
|
|
return True
|
2015-02-04 18:32:44 +01:00
|
|
|
except CodeMessageException as e:
|
|
|
|
if e.code == 404:
|
2019-07-23 15:00:55 +02:00
|
|
|
return False
|
2023-04-03 19:20:32 +02:00
|
|
|
logger.warning("query_user to %s received %s", service.url, e.code)
|
2015-02-05 14:19:46 +01:00
|
|
|
except Exception as ex:
|
2023-04-03 19:20:32 +02:00
|
|
|
logger.warning("query_user to %s threw exception %s", service.url, ex)
|
2019-07-23 15:00:55 +02:00
|
|
|
return False
|
2015-02-04 12:19:18 +01:00
|
|
|
|
2021-12-14 18:02:46 +01:00
|
|
|
async def query_alias(self, service: "ApplicationService", alias: str) -> bool:
|
2016-08-30 18:16:00 +02:00
|
|
|
if service.url is None:
|
2019-07-23 15:00:55 +02:00
|
|
|
return False
|
2021-12-14 18:02:46 +01:00
|
|
|
|
|
|
|
# This is required by the configuration.
|
|
|
|
assert service.hs_token is not None
|
|
|
|
|
2015-02-04 17:44:53 +01:00
|
|
|
try:
|
2023-04-03 19:20:32 +02:00
|
|
|
response = await self._send_with_fallbacks(
|
|
|
|
service,
|
|
|
|
[APP_SERVICE_PREFIX, ""],
|
|
|
|
f"/rooms/{urllib.parse.quote(alias)}",
|
|
|
|
self.get_json,
|
2022-10-04 13:06:41 +02:00
|
|
|
{"access_token": service.hs_token},
|
2022-10-26 15:00:01 +02:00
|
|
|
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
2022-10-04 13:06:41 +02:00
|
|
|
)
|
2015-02-09 16:01:28 +01:00
|
|
|
if response is not None: # just an empty json object
|
2019-07-23 15:00:55 +02:00
|
|
|
return True
|
2015-02-04 18:32:44 +01:00
|
|
|
except CodeMessageException as e:
|
2023-04-03 19:20:32 +02:00
|
|
|
logger.warning("query_alias to %s received %s", service.url, e.code)
|
2015-02-04 18:32:44 +01:00
|
|
|
if e.code == 404:
|
2019-07-23 15:00:55 +02:00
|
|
|
return False
|
2015-02-05 14:19:46 +01:00
|
|
|
except Exception as ex:
|
2023-04-03 19:20:32 +02:00
|
|
|
logger.warning("query_alias to %s threw exception %s", service.url, ex)
|
2019-07-23 15:00:55 +02:00
|
|
|
return False
|
2015-02-05 14:19:46 +01:00
|
|
|
|
2021-12-14 18:02:46 +01:00
|
|
|
async def query_3pe(
|
|
|
|
self,
|
|
|
|
service: "ApplicationService",
|
|
|
|
kind: str,
|
|
|
|
protocol: str,
|
|
|
|
fields: Dict[bytes, List[bytes]],
|
|
|
|
) -> List[JsonDict]:
|
2016-08-18 18:19:55 +02:00
|
|
|
if kind == ThirdPartyEntityKind.USER:
|
2016-08-18 18:33:56 +02:00
|
|
|
required_field = "userid"
|
2016-08-18 18:19:55 +02:00
|
|
|
elif kind == ThirdPartyEntityKind.LOCATION:
|
2016-08-18 18:33:56 +02:00
|
|
|
required_field = "alias"
|
2016-08-18 18:19:55 +02:00
|
|
|
else:
|
2019-06-20 11:32:02 +02:00
|
|
|
raise ValueError("Unrecognised 'kind' argument %r to query_3pe()", kind)
|
2016-08-30 18:16:00 +02:00
|
|
|
if service.url is None:
|
2019-07-23 15:00:55 +02:00
|
|
|
return []
|
2016-08-17 14:15:06 +02:00
|
|
|
|
2022-05-24 16:39:54 +02:00
|
|
|
# This is required by the configuration.
|
|
|
|
assert service.hs_token is not None
|
|
|
|
|
2016-08-18 17:09:50 +02:00
|
|
|
try:
|
2022-05-24 16:39:54 +02:00
|
|
|
args: Mapping[Any, Any] = {
|
|
|
|
**fields,
|
|
|
|
b"access_token": service.hs_token,
|
|
|
|
}
|
2023-04-03 19:20:32 +02:00
|
|
|
response = await self._send_with_fallbacks(
|
|
|
|
service,
|
|
|
|
[APP_SERVICE_PREFIX, APP_SERVICE_UNSTABLE_PREFIX],
|
|
|
|
f"/thirdparty/{kind}/{urllib.parse.quote(protocol)}",
|
|
|
|
self.get_json,
|
2022-10-26 15:00:01 +02:00
|
|
|
args=args,
|
|
|
|
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
2022-10-04 13:06:41 +02:00
|
|
|
)
|
2016-08-18 18:33:56 +02:00
|
|
|
if not isinstance(response, list):
|
|
|
|
logger.warning(
|
2023-04-03 19:20:32 +02:00
|
|
|
"query_3pe to %s returned an invalid response %r",
|
|
|
|
service.url,
|
|
|
|
response,
|
2016-08-18 18:33:56 +02:00
|
|
|
)
|
2019-07-23 15:00:55 +02:00
|
|
|
return []
|
2016-08-18 18:33:56 +02:00
|
|
|
|
|
|
|
ret = []
|
|
|
|
for r in response:
|
|
|
|
if _is_valid_3pe_result(r, field=required_field):
|
|
|
|
ret.append(r)
|
|
|
|
else:
|
|
|
|
logger.warning(
|
2023-04-03 19:20:32 +02:00
|
|
|
"query_3pe to %s returned an invalid result %r", service.url, r
|
2016-08-18 18:33:56 +02:00
|
|
|
)
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return ret
|
2016-08-18 17:09:50 +02:00
|
|
|
except Exception as ex:
|
2023-04-03 19:20:32 +02:00
|
|
|
logger.warning("query_3pe to %s threw exception %s", service.url, ex)
|
2019-07-23 15:00:55 +02:00
|
|
|
return []
|
2016-08-18 17:09:50 +02:00
|
|
|
|
2020-09-01 17:03:49 +02:00
|
|
|
async def get_3pe_protocol(
|
|
|
|
self, service: "ApplicationService", protocol: str
|
|
|
|
) -> Optional[JsonDict]:
|
2016-08-30 18:16:00 +02:00
|
|
|
if service.url is None:
|
2019-07-23 15:00:55 +02:00
|
|
|
return {}
|
2016-08-30 17:30:12 +02:00
|
|
|
|
2020-09-01 17:03:49 +02:00
|
|
|
async def _get() -> Optional[JsonDict]:
|
2022-05-24 16:39:54 +02:00
|
|
|
# This is required by the configuration.
|
|
|
|
assert service.hs_token is not None
|
2016-08-25 16:56:27 +02:00
|
|
|
try:
|
2023-04-03 19:20:32 +02:00
|
|
|
info = await self._send_with_fallbacks(
|
|
|
|
service,
|
|
|
|
[APP_SERVICE_PREFIX, APP_SERVICE_UNSTABLE_PREFIX],
|
|
|
|
f"/thirdparty/protocol/{urllib.parse.quote(protocol)}",
|
|
|
|
self.get_json,
|
2022-10-04 13:06:41 +02:00
|
|
|
{"access_token": service.hs_token},
|
2022-10-26 15:00:01 +02:00
|
|
|
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
2022-10-04 13:06:41 +02:00
|
|
|
)
|
2016-09-09 16:07:04 +02:00
|
|
|
|
2016-09-09 16:09:46 +02:00
|
|
|
if not _is_valid_3pe_metadata(info):
|
2019-06-20 11:32:02 +02:00
|
|
|
logger.warning(
|
2023-04-03 19:20:32 +02:00
|
|
|
"query_3pe_protocol to %s did not return a valid result",
|
|
|
|
service.url,
|
2019-06-20 11:32:02 +02:00
|
|
|
)
|
2019-07-23 15:00:55 +02:00
|
|
|
return None
|
2016-09-09 16:07:04 +02:00
|
|
|
|
2016-12-06 11:43:48 +01:00
|
|
|
for instance in info.get("instances", []):
|
|
|
|
network_id = instance.get("network_id", None)
|
|
|
|
if network_id is not None:
|
2016-12-12 15:46:13 +01:00
|
|
|
instance["instance_id"] = ThirdPartyInstanceID(
|
2019-06-20 11:32:02 +02:00
|
|
|
service.id, network_id
|
2016-12-06 11:43:48 +01:00
|
|
|
).to_string()
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return info
|
2016-08-25 16:56:27 +02:00
|
|
|
except Exception as ex:
|
2023-04-03 19:20:32 +02:00
|
|
|
logger.warning(
|
|
|
|
"query_3pe_protocol to %s threw exception %s", service.url, ex
|
|
|
|
)
|
2019-07-23 15:00:55 +02:00
|
|
|
return None
|
2016-08-25 16:56:27 +02:00
|
|
|
|
|
|
|
key = (service.id, protocol)
|
2020-09-01 17:03:49 +02:00
|
|
|
return await self.protocol_meta_cache.wrap(key, _get)
|
2016-08-25 16:10:06 +02:00
|
|
|
|
2023-03-16 15:00:03 +01:00
|
|
|
async def ping(self, service: "ApplicationService", txn_id: Optional[str]) -> None:
|
|
|
|
# The caller should check that url is set
|
|
|
|
assert service.url is not None, "ping called without URL being set"
|
|
|
|
|
|
|
|
# This is required by the configuration.
|
|
|
|
assert service.hs_token is not None
|
|
|
|
|
|
|
|
await self.post_json_get_json(
|
2023-04-03 19:20:32 +02:00
|
|
|
uri=f"{service.url}{APP_SERVICE_UNSTABLE_PREFIX}/fi.mau.msc2659/ping",
|
2023-03-16 15:00:03 +01:00
|
|
|
post_json={"transaction_id": txn_id},
|
|
|
|
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
|
|
|
)
|
|
|
|
|
2020-10-15 18:33:28 +02:00
|
|
|
async def push_bulk(
|
|
|
|
self,
|
|
|
|
service: "ApplicationService",
|
2023-02-06 13:49:06 +01:00
|
|
|
events: Sequence[EventBase],
|
2020-10-15 18:33:28 +02:00
|
|
|
ephemeral: List[JsonDict],
|
2022-02-01 15:13:38 +01:00
|
|
|
to_device_messages: List[JsonDict],
|
2022-11-28 17:17:29 +01:00
|
|
|
one_time_keys_count: TransactionOneTimeKeysCount,
|
2022-02-24 18:55:45 +01:00
|
|
|
unused_fallback_keys: TransactionUnusedFallbackKeys,
|
2022-03-30 15:39:27 +02:00
|
|
|
device_list_summary: DeviceListUpdates,
|
2020-10-15 18:33:28 +02:00
|
|
|
txn_id: Optional[int] = None,
|
2021-12-14 18:02:46 +01:00
|
|
|
) -> bool:
|
2022-02-01 15:13:38 +01:00
|
|
|
"""
|
|
|
|
Push data to an application service.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service: The application service to send to.
|
|
|
|
events: The persistent events to send.
|
|
|
|
ephemeral: The ephemeral events to send.
|
|
|
|
to_device_messages: The to-device messages to send.
|
|
|
|
txn_id: An unique ID to assign to this transaction. Application services should
|
|
|
|
deduplicate transactions received with identitical IDs.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if the task succeeded, False if it failed.
|
|
|
|
"""
|
2016-08-30 18:16:00 +02:00
|
|
|
if service.url is None:
|
2019-07-23 15:00:55 +02:00
|
|
|
return True
|
2016-08-30 17:21:16 +02:00
|
|
|
|
2021-12-14 18:02:46 +01:00
|
|
|
# This is required by the configuration.
|
|
|
|
assert service.hs_token is not None
|
|
|
|
|
|
|
|
serialized_events = self._serialize(service, events)
|
2015-02-05 14:42:35 +01:00
|
|
|
|
2015-03-05 16:40:07 +01:00
|
|
|
if txn_id is None:
|
2019-06-20 11:32:02 +02:00
|
|
|
logger.warning(
|
|
|
|
"push_bulk: Missing txn ID sending events to %s", service.url
|
|
|
|
)
|
2020-10-15 18:33:28 +02:00
|
|
|
txn_id = 0
|
|
|
|
|
|
|
|
# Never send ephemeral events to appservices that do not support it
|
2022-02-24 18:55:45 +01:00
|
|
|
body: JsonDict = {"events": serialized_events}
|
2020-10-15 18:33:28 +02:00
|
|
|
if service.supports_ephemeral:
|
2022-02-01 15:13:38 +01:00
|
|
|
body.update(
|
|
|
|
{
|
|
|
|
# TODO: Update to stable prefixes once MSC2409 completes FCP merge.
|
|
|
|
"de.sorunome.msc2409.ephemeral": ephemeral,
|
|
|
|
"de.sorunome.msc2409.to_device": to_device_messages,
|
|
|
|
}
|
|
|
|
)
|
2015-03-05 16:40:07 +01:00
|
|
|
|
2022-03-30 15:39:27 +02:00
|
|
|
# TODO: Update to stable prefixes once MSC3202 completes FCP merge
|
2022-02-24 18:55:45 +01:00
|
|
|
if service.msc3202_transaction_extensions:
|
2022-11-28 17:17:29 +01:00
|
|
|
if one_time_keys_count:
|
2022-02-24 18:55:45 +01:00
|
|
|
body[
|
|
|
|
"org.matrix.msc3202.device_one_time_key_counts"
|
2022-11-28 17:17:29 +01:00
|
|
|
] = one_time_keys_count
|
|
|
|
body[
|
|
|
|
"org.matrix.msc3202.device_one_time_keys_count"
|
|
|
|
] = one_time_keys_count
|
2022-02-24 18:55:45 +01:00
|
|
|
if unused_fallback_keys:
|
|
|
|
body[
|
2022-04-22 17:03:46 +02:00
|
|
|
"org.matrix.msc3202.device_unused_fallback_key_types"
|
2022-02-24 18:55:45 +01:00
|
|
|
] = unused_fallback_keys
|
2022-03-30 15:39:27 +02:00
|
|
|
if device_list_summary:
|
|
|
|
body["org.matrix.msc3202.device_lists"] = {
|
|
|
|
"changed": list(device_list_summary.changed),
|
|
|
|
"left": list(device_list_summary.left),
|
|
|
|
}
|
2022-02-24 18:55:45 +01:00
|
|
|
|
2015-02-05 10:43:22 +01:00
|
|
|
try:
|
2023-04-03 19:20:32 +02:00
|
|
|
await self._send_with_fallbacks(
|
|
|
|
service,
|
|
|
|
[APP_SERVICE_PREFIX, ""],
|
|
|
|
f"/transactions/{urllib.parse.quote(str(txn_id))}",
|
|
|
|
self.put_json,
|
2021-02-16 23:32:34 +01:00
|
|
|
json_body=body,
|
|
|
|
args={"access_token": service.hs_token},
|
2022-10-26 15:00:01 +02:00
|
|
|
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
2019-06-20 11:32:02 +02:00
|
|
|
)
|
2021-11-18 21:16:08 +01:00
|
|
|
if logger.isEnabledFor(logging.DEBUG):
|
|
|
|
logger.debug(
|
|
|
|
"push_bulk to %s succeeded! events=%s",
|
2023-04-03 19:20:32 +02:00
|
|
|
service.url,
|
2021-11-18 21:16:08 +01:00
|
|
|
[event.get("event_id") for event in events],
|
|
|
|
)
|
2018-06-05 18:30:45 +02:00
|
|
|
sent_transactions_counter.labels(service.id).inc()
|
2021-12-14 18:02:46 +01:00
|
|
|
sent_events_counter.labels(service.id).inc(len(serialized_events))
|
2022-07-13 16:18:20 +02:00
|
|
|
sent_ephemeral_counter.labels(service.id).inc(len(ephemeral))
|
|
|
|
sent_todevice_counter.labels(service.id).inc(len(to_device_messages))
|
2019-07-23 15:00:55 +02:00
|
|
|
return True
|
2015-02-05 10:43:22 +01:00
|
|
|
except CodeMessageException as e:
|
2021-11-18 21:16:08 +01:00
|
|
|
logger.warning(
|
|
|
|
"push_bulk to %s received code=%s msg=%s",
|
2023-04-03 19:20:32 +02:00
|
|
|
service.url,
|
2021-11-18 21:16:08 +01:00
|
|
|
e.code,
|
|
|
|
e.msg,
|
|
|
|
exc_info=logger.isEnabledFor(logging.DEBUG),
|
|
|
|
)
|
2015-02-05 14:19:46 +01:00
|
|
|
except Exception as ex:
|
2021-11-18 21:16:08 +01:00
|
|
|
logger.warning(
|
|
|
|
"push_bulk to %s threw exception(%s) %s args=%s",
|
2023-04-03 19:20:32 +02:00
|
|
|
service.url,
|
2021-11-18 21:16:08 +01:00
|
|
|
type(ex).__name__,
|
|
|
|
ex,
|
|
|
|
ex.args,
|
|
|
|
exc_info=logger.isEnabledFor(logging.DEBUG),
|
|
|
|
)
|
2018-06-05 18:30:45 +02:00
|
|
|
failed_transactions_counter.labels(service.id).inc()
|
2019-07-23 15:00:55 +02:00
|
|
|
return False
|
2015-02-04 12:19:18 +01:00
|
|
|
|
2023-03-28 20:26:27 +02:00
|
|
|
async def claim_client_keys(
|
2023-04-27 18:57:46 +02:00
|
|
|
self, service: "ApplicationService", query: List[Tuple[str, str, str, int]]
|
|
|
|
) -> Tuple[
|
|
|
|
Dict[str, Dict[str, Dict[str, JsonDict]]], List[Tuple[str, str, str, int]]
|
|
|
|
]:
|
2023-03-28 20:26:27 +02:00
|
|
|
"""Claim one time keys from an application service.
|
|
|
|
|
2023-03-30 14:39:38 +02:00
|
|
|
Note that any error (including a timeout) is treated as the application
|
|
|
|
service having no information.
|
|
|
|
|
2023-03-28 20:26:27 +02:00
|
|
|
Args:
|
2023-03-30 14:39:38 +02:00
|
|
|
service: The application service to query.
|
2023-03-28 20:26:27 +02:00
|
|
|
query: An iterable of tuples of (user ID, device ID, algorithm).
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A tuple of:
|
|
|
|
A map of user ID -> a map device ID -> a map of key ID -> JSON dict.
|
|
|
|
|
|
|
|
A copy of the input which has not been fulfilled because the
|
|
|
|
appservice doesn't support this endpoint or has not returned
|
|
|
|
data for that tuple.
|
|
|
|
"""
|
|
|
|
if service.url is None:
|
|
|
|
return {}, query
|
|
|
|
|
|
|
|
# This is required by the configuration.
|
|
|
|
assert service.hs_token is not None
|
|
|
|
|
|
|
|
# Create the expected payload shape.
|
|
|
|
body: Dict[str, Dict[str, List[str]]] = {}
|
2023-04-27 18:57:46 +02:00
|
|
|
for user_id, device, algorithm, count in query:
|
|
|
|
body.setdefault(user_id, {}).setdefault(device, []).extend(
|
|
|
|
[algorithm] * count
|
|
|
|
)
|
2023-03-28 20:26:27 +02:00
|
|
|
|
|
|
|
uri = f"{service.url}/_matrix/app/unstable/org.matrix.msc3983/keys/claim"
|
|
|
|
try:
|
|
|
|
response = await self.post_json_get_json(
|
|
|
|
uri,
|
|
|
|
body,
|
|
|
|
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
|
|
|
)
|
2023-03-30 14:39:38 +02:00
|
|
|
except HttpResponseException as e:
|
2023-03-28 20:26:27 +02:00
|
|
|
# The appservice doesn't support this endpoint.
|
2023-03-30 14:39:38 +02:00
|
|
|
if is_unknown_endpoint(e):
|
2023-03-28 20:26:27 +02:00
|
|
|
return {}, query
|
|
|
|
logger.warning("claim_keys to %s received %s", uri, e.code)
|
|
|
|
return {}, query
|
|
|
|
except Exception as ex:
|
|
|
|
logger.warning("claim_keys to %s threw exception %s", uri, ex)
|
|
|
|
return {}, query
|
|
|
|
|
|
|
|
# Check if the appservice fulfilled all of the queried user/device/algorithms
|
|
|
|
# or if some are still missing.
|
|
|
|
#
|
|
|
|
# TODO This places a lot of faith in the response shape being correct.
|
2023-04-27 18:57:46 +02:00
|
|
|
missing = []
|
|
|
|
for user_id, device, algorithm, count in query:
|
|
|
|
# Count the number of keys in the response for this algorithm by
|
|
|
|
# checking which key IDs start with the algorithm. This uses that
|
|
|
|
# True == 1 in Python to generate a count.
|
|
|
|
response_count = sum(
|
|
|
|
key_id.startswith(f"{algorithm}:")
|
|
|
|
for key_id in response.get(user_id, {}).get(device, {})
|
|
|
|
)
|
|
|
|
count -= response_count
|
|
|
|
# If the appservice responds with fewer keys than requested, then
|
|
|
|
# consider the request unfulfilled.
|
|
|
|
if count > 0:
|
|
|
|
missing.append((user_id, device, algorithm, count))
|
2023-03-28 20:26:27 +02:00
|
|
|
|
|
|
|
return response, missing
|
|
|
|
|
2023-03-30 14:39:38 +02:00
|
|
|
async def query_keys(
|
|
|
|
self, service: "ApplicationService", query: Dict[str, List[str]]
|
|
|
|
) -> Dict[str, Dict[str, Dict[str, JsonDict]]]:
|
|
|
|
"""Query the application service for keys.
|
|
|
|
|
|
|
|
Note that any error (including a timeout) is treated as the application
|
|
|
|
service having no information.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
service: The application service to query.
|
|
|
|
query: An iterable of tuples of (user ID, device ID, algorithm).
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A map of device_keys/master_keys/self_signing_keys/user_signing_keys:
|
|
|
|
|
|
|
|
device_keys is a map of user ID -> a map device ID -> device info.
|
|
|
|
"""
|
|
|
|
if service.url is None:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
# This is required by the configuration.
|
|
|
|
assert service.hs_token is not None
|
|
|
|
|
|
|
|
uri = f"{service.url}/_matrix/app/unstable/org.matrix.msc3984/keys/query"
|
|
|
|
try:
|
|
|
|
response = await self.post_json_get_json(
|
|
|
|
uri,
|
|
|
|
query,
|
|
|
|
headers={"Authorization": [f"Bearer {service.hs_token}"]},
|
|
|
|
)
|
|
|
|
except HttpResponseException as e:
|
|
|
|
# The appservice doesn't support this endpoint.
|
|
|
|
if is_unknown_endpoint(e):
|
|
|
|
return {}
|
|
|
|
logger.warning("query_keys to %s received %s", uri, e.code)
|
|
|
|
return {}
|
|
|
|
except Exception as ex:
|
|
|
|
logger.warning("query_keys to %s threw exception %s", uri, ex)
|
|
|
|
return {}
|
|
|
|
|
|
|
|
return response
|
|
|
|
|
2021-12-14 18:02:46 +01:00
|
|
|
def _serialize(
|
|
|
|
self, service: "ApplicationService", events: Iterable[EventBase]
|
|
|
|
) -> List[JsonDict]:
|
2015-02-05 14:42:35 +01:00
|
|
|
time_now = self.clock.time_msec()
|
2020-07-10 19:44:56 +02:00
|
|
|
return [
|
|
|
|
serialize_event(
|
|
|
|
e,
|
|
|
|
time_now,
|
2022-03-03 16:43:06 +01:00
|
|
|
config=SerializeEventConfig(
|
|
|
|
as_client_event=True,
|
|
|
|
# If this is an invite or a knock membership event, and we're interested
|
|
|
|
# in this user, then include any stripped state alongside the event.
|
|
|
|
include_stripped_room_state=(
|
|
|
|
e.type == EventTypes.Member
|
|
|
|
and (
|
|
|
|
e.membership == Membership.INVITE
|
|
|
|
or e.membership == Membership.KNOCK
|
|
|
|
)
|
|
|
|
and service.is_interested_in_user(e.state_key)
|
|
|
|
),
|
2020-07-10 19:44:56 +02:00
|
|
|
),
|
|
|
|
)
|
|
|
|
for e in events
|
|
|
|
]
|