2021-11-19 11:55:09 +01:00
|
|
|
# Copyright 2014-2021 The Matrix.org Foundation C.I.C.
|
2014-09-30 16:15:10 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
import abc
|
2018-07-09 08:09:20 +02:00
|
|
|
import logging
|
2021-06-02 17:37:59 +02:00
|
|
|
from typing import TYPE_CHECKING, Callable, Dict, Iterable, List, Optional, Tuple
|
2015-08-24 17:17:38 +02:00
|
|
|
|
2019-04-01 13:28:40 +02:00
|
|
|
import attr
|
2015-08-24 17:17:38 +02:00
|
|
|
from signedjson.key import (
|
2018-07-09 08:09:20 +02:00
|
|
|
decode_verify_key_bytes,
|
2018-06-08 13:01:36 +02:00
|
|
|
encode_verify_key_base64,
|
2021-10-28 19:27:17 +02:00
|
|
|
get_verify_key,
|
2018-07-09 08:09:20 +02:00
|
|
|
is_signing_algorithm_supported,
|
|
|
|
)
|
|
|
|
from signedjson.sign import (
|
|
|
|
SignatureVerifyException,
|
|
|
|
encode_canonical_json,
|
|
|
|
signature_ids,
|
|
|
|
verify_signed_json,
|
2014-09-30 16:15:10 +02:00
|
|
|
)
|
2021-10-28 19:27:17 +02:00
|
|
|
from signedjson.types import VerifyKey
|
2019-01-22 12:04:20 +01:00
|
|
|
from unpaddedbase64 import decode_base64
|
2015-06-26 12:25:00 +02:00
|
|
|
|
2018-07-09 08:09:20 +02:00
|
|
|
from twisted.internet import defer
|
2014-09-30 16:15:10 +02:00
|
|
|
|
2019-02-23 16:06:02 +01:00
|
|
|
from synapse.api.errors import (
|
|
|
|
Codes,
|
|
|
|
HttpResponseException,
|
|
|
|
RequestSendFailed,
|
|
|
|
SynapseError,
|
|
|
|
)
|
2021-01-04 16:04:50 +01:00
|
|
|
from synapse.config.key import TrustedKeyServer
|
2021-05-20 17:25:11 +02:00
|
|
|
from synapse.events import EventBase
|
|
|
|
from synapse.events.utils import prune_event_dict
|
2021-06-02 17:37:59 +02:00
|
|
|
from synapse.logging.context import make_deferred_yieldable, run_in_background
|
2019-07-03 16:07:04 +02:00
|
|
|
from synapse.storage.keys import FetchKeyResult
|
2021-01-04 16:04:50 +01:00
|
|
|
from synapse.types import JsonDict
|
2019-07-03 16:07:04 +02:00
|
|
|
from synapse.util import unwrapFirstError
|
|
|
|
from synapse.util.async_helpers import yieldable_gather_results
|
2021-06-02 17:37:59 +02:00
|
|
|
from synapse.util.batching_queue import BatchingQueue
|
2019-02-23 16:06:02 +01:00
|
|
|
from synapse.util.retryutils import NotRetryingDestination
|
2014-09-30 16:15:10 +02:00
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
if TYPE_CHECKING:
|
2021-03-23 12:12:48 +01:00
|
|
|
from synapse.server import HomeServer
|
2021-01-04 16:04:50 +01:00
|
|
|
|
2014-09-30 16:15:10 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2022-01-13 14:49:28 +01:00
|
|
|
@attr.s(slots=True, frozen=True, cmp=False, auto_attribs=True)
|
2020-09-04 12:54:56 +02:00
|
|
|
class VerifyJsonRequest:
|
2019-04-01 13:28:40 +02:00
|
|
|
"""
|
2019-06-04 17:12:57 +02:00
|
|
|
A request to verify a JSON object.
|
2019-04-01 13:28:40 +02:00
|
|
|
|
|
|
|
Attributes:
|
2021-01-04 16:04:50 +01:00
|
|
|
server_name: The name of the server to verify against.
|
2019-05-29 18:21:39 +02:00
|
|
|
|
2021-05-20 17:25:11 +02:00
|
|
|
get_json_object: A callback to fetch the JSON object to verify.
|
|
|
|
A callback is used to allow deferring the creation of the JSON
|
|
|
|
object to verify until needed, e.g. for events we can defer
|
|
|
|
creating the redacted copy. This reduces the memory usage when
|
|
|
|
there are large numbers of in flight requests.
|
2019-05-29 18:21:39 +02:00
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
minimum_valid_until_ts: time at which we require the signing key to
|
2019-06-03 23:59:51 +02:00
|
|
|
be valid. (0 implies we don't care)
|
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
key_ids: The set of key_ids to that could be used to verify the JSON object
|
2019-04-01 13:28:40 +02:00
|
|
|
"""
|
|
|
|
|
2022-01-13 14:49:28 +01:00
|
|
|
server_name: str
|
|
|
|
get_json_object: Callable[[], JsonDict]
|
|
|
|
minimum_valid_until_ts: int
|
|
|
|
key_ids: List[str]
|
2015-06-26 10:52:24 +02:00
|
|
|
|
2021-05-20 17:25:11 +02:00
|
|
|
@staticmethod
|
|
|
|
def from_json_object(
|
|
|
|
server_name: str,
|
|
|
|
json_object: JsonDict,
|
|
|
|
minimum_valid_until_ms: int,
|
2021-10-21 15:07:07 +02:00
|
|
|
) -> "VerifyJsonRequest":
|
2021-05-20 17:25:11 +02:00
|
|
|
"""Create a VerifyJsonRequest to verify all signatures on a signed JSON
|
|
|
|
object for the given server.
|
|
|
|
"""
|
|
|
|
key_ids = signature_ids(json_object, server_name)
|
|
|
|
return VerifyJsonRequest(
|
|
|
|
server_name,
|
|
|
|
lambda: json_object,
|
|
|
|
minimum_valid_until_ms,
|
|
|
|
key_ids=key_ids,
|
|
|
|
)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def from_event(
|
|
|
|
server_name: str,
|
|
|
|
event: EventBase,
|
|
|
|
minimum_valid_until_ms: int,
|
2021-10-21 15:07:07 +02:00
|
|
|
) -> "VerifyJsonRequest":
|
2021-05-20 17:25:11 +02:00
|
|
|
"""Create a VerifyJsonRequest to verify all signatures on an event
|
|
|
|
object for the given server.
|
|
|
|
"""
|
|
|
|
key_ids = list(event.signatures.get(server_name, []))
|
|
|
|
return VerifyJsonRequest(
|
|
|
|
server_name,
|
|
|
|
# We defer creating the redacted json object, as it uses a lot more
|
|
|
|
# memory than the Event object itself.
|
|
|
|
lambda: prune_event_dict(event.room_version, event.get_pdu_json()),
|
|
|
|
minimum_valid_until_ms,
|
|
|
|
key_ids=key_ids,
|
|
|
|
)
|
2019-06-04 17:12:57 +02:00
|
|
|
|
2015-06-26 10:52:24 +02:00
|
|
|
|
2016-08-10 11:44:37 +02:00
|
|
|
class KeyLookupError(ValueError):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2022-01-13 14:49:28 +01:00
|
|
|
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
2021-06-02 17:37:59 +02:00
|
|
|
class _FetchKeyRequest:
|
|
|
|
"""A request for keys for a given server.
|
|
|
|
|
|
|
|
We will continue to try and fetch until we have all the keys listed under
|
|
|
|
`key_ids` (with an appropriate `valid_until_ts` property) or we run out of
|
|
|
|
places to fetch keys from.
|
|
|
|
|
|
|
|
Attributes:
|
|
|
|
server_name: The name of the server that owns the keys.
|
|
|
|
minimum_valid_until_ts: The timestamp which the keys must be valid until.
|
|
|
|
key_ids: The IDs of the keys to attempt to fetch
|
|
|
|
"""
|
|
|
|
|
2022-01-13 14:49:28 +01:00
|
|
|
server_name: str
|
|
|
|
minimum_valid_until_ts: int
|
|
|
|
key_ids: List[str]
|
2021-06-02 17:37:59 +02:00
|
|
|
|
|
|
|
|
2020-09-04 12:54:56 +02:00
|
|
|
class Keyring:
|
2021-06-02 17:37:59 +02:00
|
|
|
"""Handles verifying signed JSON objects and fetching the keys needed to do
|
|
|
|
so.
|
|
|
|
"""
|
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
def __init__(
|
|
|
|
self, hs: "HomeServer", key_fetchers: "Optional[Iterable[KeyFetcher]]" = None
|
|
|
|
):
|
2014-09-30 16:15:10 +02:00
|
|
|
self.clock = hs.get_clock()
|
2019-04-09 19:28:17 +02:00
|
|
|
|
2019-06-03 23:59:51 +02:00
|
|
|
if key_fetchers is None:
|
|
|
|
key_fetchers = (
|
|
|
|
StoreKeyFetcher(hs),
|
|
|
|
PerspectivesKeyFetcher(hs),
|
|
|
|
ServerKeyFetcher(hs),
|
|
|
|
)
|
|
|
|
self._key_fetchers = key_fetchers
|
2014-09-30 16:15:10 +02:00
|
|
|
|
2021-07-15 12:02:43 +02:00
|
|
|
self._server_queue: BatchingQueue[
|
|
|
|
_FetchKeyRequest, Dict[str, Dict[str, FetchKeyResult]]
|
|
|
|
] = BatchingQueue(
|
2021-06-02 17:37:59 +02:00
|
|
|
"keyring_server",
|
|
|
|
clock=hs.get_clock(),
|
|
|
|
process_batch_callback=self._inner_fetch_key_requests,
|
2021-07-15 12:02:43 +02:00
|
|
|
)
|
2021-11-19 11:55:09 +01:00
|
|
|
|
|
|
|
self._hostname = hs.hostname
|
|
|
|
|
|
|
|
# build a FetchKeyResult for each of our own keys, to shortcircuit the
|
|
|
|
# fetcher.
|
|
|
|
self._local_verify_keys: Dict[str, FetchKeyResult] = {}
|
|
|
|
for key_id, key in hs.config.key.old_signing_keys.items():
|
|
|
|
self._local_verify_keys[key_id] = FetchKeyResult(
|
2022-03-29 23:37:50 +02:00
|
|
|
verify_key=key, valid_until_ts=key.expired
|
2021-11-19 11:55:09 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
vk = get_verify_key(hs.signing_key)
|
|
|
|
self._local_verify_keys[f"{vk.alg}:{vk.version}"] = FetchKeyResult(
|
|
|
|
verify_key=vk,
|
2022-03-29 12:41:19 +02:00
|
|
|
valid_until_ts=2**63, # fake future timestamp
|
2021-11-19 11:55:09 +01:00
|
|
|
)
|
2015-04-27 15:20:26 +02:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
async def verify_json_for_server(
|
2021-01-04 16:04:50 +01:00
|
|
|
self,
|
|
|
|
server_name: str,
|
|
|
|
json_object: JsonDict,
|
|
|
|
validity_time: int,
|
2021-06-02 17:37:59 +02:00
|
|
|
) -> None:
|
2019-06-03 23:59:51 +02:00
|
|
|
"""Verify that a JSON object has been signed by a given server
|
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
Completes if the the object was correctly signed, otherwise raises.
|
|
|
|
|
2019-06-03 23:59:51 +02:00
|
|
|
Args:
|
2021-01-04 16:04:50 +01:00
|
|
|
server_name: name of the server which must have signed this object
|
2019-06-03 23:59:51 +02:00
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
json_object: object to be checked
|
2019-06-03 23:59:51 +02:00
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
validity_time: timestamp at which we require the signing key to
|
2019-06-03 23:59:51 +02:00
|
|
|
be valid. (0 implies we don't care)
|
|
|
|
"""
|
2021-10-28 19:27:17 +02:00
|
|
|
|
2021-05-20 17:25:11 +02:00
|
|
|
request = VerifyJsonRequest.from_json_object(
|
|
|
|
server_name,
|
|
|
|
json_object,
|
|
|
|
validity_time,
|
|
|
|
)
|
2021-06-02 17:37:59 +02:00
|
|
|
return await self.process_request(request)
|
2015-03-05 18:09:13 +01:00
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
def verify_json_objects_for_server(
|
2021-06-02 17:37:59 +02:00
|
|
|
self, server_and_json: Iterable[Tuple[str, dict, int]]
|
2022-11-16 16:25:24 +01:00
|
|
|
) -> List["defer.Deferred[None]"]:
|
2017-09-18 19:31:01 +02:00
|
|
|
"""Bulk verifies signatures of json objects, bulk fetching keys as
|
2015-06-26 10:52:24 +02:00
|
|
|
necessary.
|
2014-09-30 16:15:10 +02:00
|
|
|
|
|
|
|
Args:
|
2021-01-04 16:04:50 +01:00
|
|
|
server_and_json:
|
2021-06-02 17:37:59 +02:00
|
|
|
Iterable of (server_name, json_object, validity_time)
|
2019-06-04 17:12:57 +02:00
|
|
|
tuples.
|
|
|
|
|
|
|
|
validity_time is a timestamp at which the signing key must be
|
|
|
|
valid.
|
|
|
|
|
2015-06-26 10:52:24 +02:00
|
|
|
Returns:
|
2022-11-16 16:25:24 +01:00
|
|
|
For each input triplet, a deferred indicating success or failure to
|
|
|
|
verify each json object's signature for the given server_name. The
|
|
|
|
deferreds run their callbacks in the sentinel logcontext.
|
2014-09-30 16:15:10 +02:00
|
|
|
"""
|
2021-06-02 17:37:59 +02:00
|
|
|
return [
|
|
|
|
run_in_background(
|
|
|
|
self.process_request,
|
|
|
|
VerifyJsonRequest.from_json_object(
|
|
|
|
server_name,
|
|
|
|
json_object,
|
|
|
|
validity_time,
|
|
|
|
),
|
2021-05-20 17:25:11 +02:00
|
|
|
)
|
2021-06-02 17:37:59 +02:00
|
|
|
for server_name, json_object, validity_time in server_and_json
|
|
|
|
]
|
2019-06-04 17:12:57 +02:00
|
|
|
|
2021-06-08 12:07:46 +02:00
|
|
|
async def verify_event_for_server(
|
|
|
|
self,
|
|
|
|
server_name: str,
|
|
|
|
event: EventBase,
|
|
|
|
validity_time: int,
|
|
|
|
) -> None:
|
|
|
|
await self.process_request(
|
|
|
|
VerifyJsonRequest.from_event(
|
|
|
|
server_name,
|
|
|
|
event,
|
|
|
|
validity_time,
|
2021-06-02 17:37:59 +02:00
|
|
|
)
|
2021-06-08 12:07:46 +02:00
|
|
|
)
|
2019-06-04 17:12:57 +02:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
async def process_request(self, verify_request: VerifyJsonRequest) -> None:
|
|
|
|
"""Processes the `VerifyJsonRequest`. Raises if the object is not signed
|
|
|
|
by the server, the signatures don't match or we failed to fetch the
|
|
|
|
necessary keys.
|
2019-06-04 17:12:57 +02:00
|
|
|
"""
|
2015-06-26 10:52:24 +02:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
if not verify_request.key_ids:
|
|
|
|
raise SynapseError(
|
|
|
|
400,
|
|
|
|
f"Not signed by {verify_request.server_name}",
|
|
|
|
Codes.UNAUTHORIZED,
|
2019-06-03 16:36:41 +02:00
|
|
|
)
|
2017-03-20 16:36:14 +01:00
|
|
|
|
2021-11-19 11:55:09 +01:00
|
|
|
found_keys: Dict[str, FetchKeyResult] = {}
|
|
|
|
|
|
|
|
# If we are the originating server, short-circuit the key-fetch for any keys
|
|
|
|
# we already have
|
|
|
|
if verify_request.server_name == self._hostname:
|
|
|
|
for key_id in verify_request.key_ids:
|
|
|
|
if key_id in self._local_verify_keys:
|
|
|
|
found_keys[key_id] = self._local_verify_keys[key_id]
|
|
|
|
|
|
|
|
key_ids_to_find = set(verify_request.key_ids) - found_keys.keys()
|
|
|
|
if key_ids_to_find:
|
|
|
|
# Add the keys we need to verify to the queue for retrieval. We queue
|
|
|
|
# up requests for the same server so we don't end up with many in flight
|
|
|
|
# requests for the same keys.
|
|
|
|
key_request = _FetchKeyRequest(
|
|
|
|
server_name=verify_request.server_name,
|
|
|
|
minimum_valid_until_ts=verify_request.minimum_valid_until_ts,
|
|
|
|
key_ids=list(key_ids_to_find),
|
|
|
|
)
|
|
|
|
found_keys_by_server = await self._server_queue.add_to_queue(
|
|
|
|
key_request, key=verify_request.server_name
|
|
|
|
)
|
2018-04-27 12:07:40 +02:00
|
|
|
|
2021-11-19 11:55:09 +01:00
|
|
|
# Since we batch up requests the returned set of keys may contain keys
|
|
|
|
# from other servers, so we pull out only the ones we care about.
|
|
|
|
found_keys.update(found_keys_by_server.get(verify_request.server_name, {}))
|
2019-07-19 18:49:19 +02:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
# Verify each signature we got valid keys for, raising if we can't
|
|
|
|
# verify any of them.
|
|
|
|
verified = False
|
|
|
|
for key_id in verify_request.key_ids:
|
|
|
|
key_result = found_keys.get(key_id)
|
|
|
|
if not key_result:
|
|
|
|
continue
|
2018-04-27 12:07:40 +02:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
if key_result.valid_until_ts < verify_request.minimum_valid_until_ts:
|
|
|
|
continue
|
2015-06-26 12:25:00 +02:00
|
|
|
|
2021-10-28 19:27:17 +02:00
|
|
|
await self._process_json(key_result.verify_key, verify_request)
|
|
|
|
verified = True
|
2019-07-19 18:49:19 +02:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
if not verified:
|
|
|
|
raise SynapseError(
|
|
|
|
401,
|
|
|
|
f"Failed to find any key to satisfy: {key_request}",
|
|
|
|
Codes.UNAUTHORIZED,
|
2018-09-06 19:51:06 +02:00
|
|
|
)
|
2021-10-28 19:27:17 +02:00
|
|
|
|
|
|
|
async def _process_json(
|
|
|
|
self, verify_key: VerifyKey, verify_request: VerifyJsonRequest
|
|
|
|
) -> None:
|
|
|
|
"""Processes the `VerifyJsonRequest`. Raises if the signature can't be
|
|
|
|
verified.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
verify_signed_json(
|
|
|
|
verify_request.get_json_object(),
|
|
|
|
verify_request.server_name,
|
|
|
|
verify_key,
|
|
|
|
)
|
|
|
|
except SignatureVerifyException as e:
|
|
|
|
logger.debug(
|
|
|
|
"Error verifying signature for %s:%s:%s with key %s: %s",
|
|
|
|
verify_request.server_name,
|
|
|
|
verify_key.alg,
|
|
|
|
verify_key.version,
|
|
|
|
encode_verify_key_base64(verify_key),
|
|
|
|
str(e),
|
|
|
|
)
|
|
|
|
raise SynapseError(
|
|
|
|
401,
|
|
|
|
"Invalid signature for server %s with key %s:%s: %s"
|
|
|
|
% (
|
|
|
|
verify_request.server_name,
|
|
|
|
verify_key.alg,
|
|
|
|
verify_key.version,
|
|
|
|
str(e),
|
|
|
|
),
|
|
|
|
Codes.UNAUTHORIZED,
|
|
|
|
)
|
2018-09-06 19:51:06 +02:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
async def _inner_fetch_key_requests(
|
|
|
|
self, requests: List[_FetchKeyRequest]
|
|
|
|
) -> Dict[str, Dict[str, FetchKeyResult]]:
|
|
|
|
"""Processing function for the queue of `_FetchKeyRequest`."""
|
|
|
|
|
|
|
|
logger.debug("Starting fetch for %s", requests)
|
|
|
|
|
|
|
|
# First we need to deduplicate requests for the same key. We do this by
|
|
|
|
# taking the *maximum* requested `minimum_valid_until_ts` for each pair
|
|
|
|
# of server name/key ID.
|
2021-07-15 12:02:43 +02:00
|
|
|
server_to_key_to_ts: Dict[str, Dict[str, int]] = {}
|
2021-06-02 17:37:59 +02:00
|
|
|
for request in requests:
|
|
|
|
by_server = server_to_key_to_ts.setdefault(request.server_name, {})
|
|
|
|
for key_id in request.key_ids:
|
|
|
|
existing_ts = by_server.get(key_id, 0)
|
|
|
|
by_server[key_id] = max(request.minimum_valid_until_ts, existing_ts)
|
|
|
|
|
|
|
|
deduped_requests = [
|
|
|
|
_FetchKeyRequest(server_name, minimum_valid_ts, [key_id])
|
|
|
|
for server_name, by_server in server_to_key_to_ts.items()
|
|
|
|
for key_id, minimum_valid_ts in by_server.items()
|
|
|
|
]
|
|
|
|
|
|
|
|
logger.debug("Deduplicated key requests to %s", deduped_requests)
|
|
|
|
|
|
|
|
# For each key we call `_inner_verify_request` which will handle
|
|
|
|
# fetching each key. Note these shouldn't throw if we fail to contact
|
|
|
|
# other servers etc.
|
|
|
|
results_per_request = await yieldable_gather_results(
|
|
|
|
self._inner_fetch_key_request,
|
|
|
|
deduped_requests,
|
|
|
|
)
|
2018-09-06 19:51:06 +02:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
# We now convert the returned list of results into a map from server
|
|
|
|
# name to key ID to FetchKeyResult, to return.
|
2021-07-15 12:02:43 +02:00
|
|
|
to_return: Dict[str, Dict[str, FetchKeyResult]] = {}
|
2021-06-02 17:37:59 +02:00
|
|
|
for (request, results) in zip(deduped_requests, results_per_request):
|
|
|
|
to_return_by_server = to_return.setdefault(request.server_name, {})
|
|
|
|
for key_id, key_result in results.items():
|
|
|
|
existing = to_return_by_server.get(key_id)
|
|
|
|
if not existing or existing.valid_until_ts < key_result.valid_until_ts:
|
|
|
|
to_return_by_server[key_id] = key_result
|
2017-03-20 16:34:35 +01:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
return to_return
|
2017-03-20 16:34:35 +01:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
async def _inner_fetch_key_request(
|
|
|
|
self, verify_request: _FetchKeyRequest
|
|
|
|
) -> Dict[str, FetchKeyResult]:
|
|
|
|
"""Attempt to fetch the given key by calling each key fetcher one by
|
|
|
|
one.
|
2015-06-26 10:52:24 +02:00
|
|
|
"""
|
2021-06-02 17:37:59 +02:00
|
|
|
logger.debug("Starting fetch for %s", verify_request)
|
2015-06-26 10:52:24 +02:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
found_keys: Dict[str, FetchKeyResult] = {}
|
|
|
|
missing_key_ids = set(verify_request.key_ids)
|
2019-04-09 19:30:13 +02:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
for fetcher in self._key_fetchers:
|
|
|
|
if not missing_key_ids:
|
|
|
|
break
|
2019-04-09 19:30:13 +02:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
logger.debug("Getting keys from %s for %s", fetcher, verify_request)
|
|
|
|
keys = await fetcher.get_keys(
|
|
|
|
verify_request.server_name,
|
|
|
|
list(missing_key_ids),
|
|
|
|
verify_request.minimum_valid_until_ts,
|
|
|
|
)
|
2019-06-03 23:59:51 +02:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
for key_id, key in keys.items():
|
|
|
|
if not key:
|
|
|
|
continue
|
2019-04-09 19:30:13 +02:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
# If we already have a result for the given key ID we keep the
|
|
|
|
# one with the highest `valid_until_ts`.
|
|
|
|
existing_key = found_keys.get(key_id)
|
|
|
|
if existing_key:
|
|
|
|
if key.valid_until_ts <= existing_key.valid_until_ts:
|
|
|
|
continue
|
2019-04-09 19:30:13 +02:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
# We always store the returned key even if it doesn't the
|
|
|
|
# `minimum_valid_until_ts` requirement, as some verification
|
|
|
|
# requests may still be able to be satisfied by it.
|
|
|
|
#
|
|
|
|
# We still keep looking for the key from other fetchers in that
|
|
|
|
# case though.
|
|
|
|
found_keys[key_id] = key
|
2019-06-03 23:59:51 +02:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
if key.valid_until_ts < verify_request.minimum_valid_until_ts:
|
2019-06-03 23:59:51 +02:00
|
|
|
continue
|
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
missing_key_ids.discard(key_id)
|
2019-04-09 19:30:13 +02:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
return found_keys
|
2019-04-09 19:30:13 +02:00
|
|
|
|
2019-04-09 19:28:17 +02:00
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
class KeyFetcher(metaclass=abc.ABCMeta):
|
2021-06-02 17:37:59 +02:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
|
|
|
self._queue = BatchingQueue(
|
|
|
|
self.__class__.__name__, hs.get_clock(), self._fetch_keys
|
|
|
|
)
|
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
async def get_keys(
|
2021-06-02 17:37:59 +02:00
|
|
|
self, server_name: str, key_ids: List[str], minimum_valid_until_ts: int
|
|
|
|
) -> Dict[str, FetchKeyResult]:
|
|
|
|
results = await self._queue.add_to_queue(
|
|
|
|
_FetchKeyRequest(
|
|
|
|
server_name=server_name,
|
|
|
|
key_ids=key_ids,
|
|
|
|
minimum_valid_until_ts=minimum_valid_until_ts,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
return results.get(server_name, {})
|
2017-03-20 16:34:35 +01:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
@abc.abstractmethod
|
|
|
|
async def _fetch_keys(
|
|
|
|
self, keys_to_fetch: List[_FetchKeyRequest]
|
|
|
|
) -> Dict[str, Dict[str, FetchKeyResult]]:
|
|
|
|
pass
|
2019-04-09 19:28:17 +02:00
|
|
|
|
|
|
|
|
|
|
|
class StoreKeyFetcher(KeyFetcher):
|
|
|
|
"""KeyFetcher impl which fetches keys from our data store"""
|
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2021-06-02 17:37:59 +02:00
|
|
|
super().__init__(hs)
|
2019-04-09 19:28:17 +02:00
|
|
|
|
2022-02-23 12:04:02 +01:00
|
|
|
self.store = hs.get_datastores().main
|
2019-06-03 23:59:51 +02:00
|
|
|
|
2021-10-21 15:07:07 +02:00
|
|
|
async def _fetch_keys(
|
|
|
|
self, keys_to_fetch: List[_FetchKeyRequest]
|
|
|
|
) -> Dict[str, Dict[str, FetchKeyResult]]:
|
2021-01-04 16:04:50 +01:00
|
|
|
key_ids_to_fetch = (
|
2021-06-02 17:37:59 +02:00
|
|
|
(queue_value.server_name, key_id)
|
|
|
|
for queue_value in keys_to_fetch
|
|
|
|
for key_id in queue_value.key_ids
|
2019-04-08 15:51:07 +02:00
|
|
|
)
|
2019-06-03 23:59:51 +02:00
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
res = await self.store.get_server_verify_keys(key_ids_to_fetch)
|
2021-07-15 12:02:43 +02:00
|
|
|
keys: Dict[str, Dict[str, FetchKeyResult]] = {}
|
2019-04-08 15:51:07 +02:00
|
|
|
for (server_name, key_id), key in res.items():
|
|
|
|
keys.setdefault(server_name, {})[key_id] = key
|
2019-07-23 15:00:55 +02:00
|
|
|
return keys
|
2015-04-29 14:31:14 +02:00
|
|
|
|
2019-04-09 19:28:17 +02:00
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
class BaseV2KeyFetcher(KeyFetcher):
|
|
|
|
def __init__(self, hs: "HomeServer"):
|
2021-06-02 17:37:59 +02:00
|
|
|
super().__init__(hs)
|
|
|
|
|
2022-02-23 12:04:02 +01:00
|
|
|
self.store = hs.get_datastores().main
|
2021-04-14 20:09:08 +02:00
|
|
|
self.config = hs.config
|
2019-04-09 19:28:17 +02:00
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
async def process_v2_response(
|
|
|
|
self, from_server: str, response_json: JsonDict, time_added_ms: int
|
|
|
|
) -> Dict[str, FetchKeyResult]:
|
2019-04-09 19:28:17 +02:00
|
|
|
"""Parse a 'Server Keys' structure from the result of a /key request
|
|
|
|
|
|
|
|
This is used to parse either the entirety of the response from
|
|
|
|
GET /_matrix/key/v2/server, or a single entry from the list returned by
|
|
|
|
POST /_matrix/key/v2/query.
|
|
|
|
|
|
|
|
Checks that each signature in the response that claims to come from the origin
|
2019-04-09 14:03:56 +02:00
|
|
|
server is valid, and that there is at least one such signature.
|
2019-04-09 19:28:17 +02:00
|
|
|
|
|
|
|
Stores the json in server_keys_json so that it can be used for future responses
|
|
|
|
to /_matrix/key/v2/query.
|
|
|
|
|
|
|
|
Args:
|
2021-01-04 16:04:50 +01:00
|
|
|
from_server: the name of the server producing this result: either
|
2019-04-09 19:28:17 +02:00
|
|
|
the origin server for a /_matrix/key/v2/server request, or the notary
|
|
|
|
for a /_matrix/key/v2/query.
|
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
response_json: the json-decoded Server Keys response object
|
2019-04-09 19:28:17 +02:00
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
time_added_ms: the timestamp to record in server_keys_json
|
2019-04-09 19:28:17 +02:00
|
|
|
|
|
|
|
Returns:
|
2021-01-04 16:04:50 +01:00
|
|
|
Map from key_id to result object
|
2019-04-09 19:28:17 +02:00
|
|
|
"""
|
2019-06-20 11:32:02 +02:00
|
|
|
ts_valid_until_ms = response_json["valid_until_ts"]
|
2019-04-09 19:28:17 +02:00
|
|
|
|
|
|
|
# start by extracting the keys from the response, since they may be required
|
|
|
|
# to validate the signature on the response.
|
|
|
|
verify_keys = {}
|
|
|
|
for key_id, key_data in response_json["verify_keys"].items():
|
|
|
|
if is_signing_algorithm_supported(key_id):
|
|
|
|
key_base64 = key_data["key"]
|
|
|
|
key_bytes = decode_base64(key_base64)
|
|
|
|
verify_key = decode_verify_key_bytes(key_id, key_bytes)
|
|
|
|
verify_keys[key_id] = FetchKeyResult(
|
|
|
|
verify_key=verify_key, valid_until_ts=ts_valid_until_ms
|
|
|
|
)
|
|
|
|
|
|
|
|
server_name = response_json["server_name"]
|
2019-04-09 14:03:56 +02:00
|
|
|
verified = False
|
2019-04-09 19:28:17 +02:00
|
|
|
for key_id in response_json["signatures"].get(server_name, {}):
|
2019-04-09 14:03:56 +02:00
|
|
|
key = verify_keys.get(key_id)
|
|
|
|
if not key:
|
2020-01-06 13:33:56 +01:00
|
|
|
# the key may not be present in verify_keys if:
|
|
|
|
# * we got the key from the notary server, and:
|
|
|
|
# * the key belongs to the notary server, and:
|
|
|
|
# * the notary server is using a different key to sign notary
|
|
|
|
# responses.
|
|
|
|
continue
|
2019-04-09 19:28:17 +02:00
|
|
|
|
2019-04-09 14:03:56 +02:00
|
|
|
verify_signed_json(response_json, server_name, key.verify_key)
|
|
|
|
verified = True
|
2020-01-06 13:33:56 +01:00
|
|
|
break
|
2019-04-09 14:03:56 +02:00
|
|
|
|
|
|
|
if not verified:
|
|
|
|
raise KeyLookupError(
|
|
|
|
"Key response for %s is not signed by the origin server"
|
|
|
|
% (server_name,)
|
2019-04-09 19:28:17 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
for key_id, key_data in response_json["old_verify_keys"].items():
|
|
|
|
if is_signing_algorithm_supported(key_id):
|
|
|
|
key_base64 = key_data["key"]
|
|
|
|
key_bytes = decode_base64(key_base64)
|
|
|
|
verify_key = decode_verify_key_bytes(key_id, key_bytes)
|
|
|
|
verify_keys[key_id] = FetchKeyResult(
|
|
|
|
verify_key=verify_key, valid_until_ts=key_data["expired_ts"]
|
|
|
|
)
|
|
|
|
|
2019-08-23 15:52:11 +02:00
|
|
|
key_json_bytes = encode_canonical_json(response_json)
|
2019-04-09 19:28:17 +02:00
|
|
|
|
2020-08-03 14:29:01 +02:00
|
|
|
await make_deferred_yieldable(
|
2019-04-09 19:28:17 +02:00
|
|
|
defer.gatherResults(
|
|
|
|
[
|
|
|
|
run_in_background(
|
|
|
|
self.store.store_server_keys_json,
|
|
|
|
server_name=server_name,
|
|
|
|
key_id=key_id,
|
|
|
|
from_server=from_server,
|
|
|
|
ts_now_ms=time_added_ms,
|
|
|
|
ts_expires_ms=ts_valid_until_ms,
|
2019-08-23 15:52:11 +02:00
|
|
|
key_json_bytes=key_json_bytes,
|
2019-04-09 19:28:17 +02:00
|
|
|
)
|
2019-05-31 16:48:36 +02:00
|
|
|
for key_id in verify_keys
|
2019-04-09 19:28:17 +02:00
|
|
|
],
|
|
|
|
consumeErrors=True,
|
|
|
|
).addErrback(unwrapFirstError)
|
|
|
|
)
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return verify_keys
|
2019-04-09 19:28:17 +02:00
|
|
|
|
|
|
|
|
|
|
|
class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|
|
|
"""KeyFetcher impl which fetches keys from the "perspectives" servers"""
|
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2020-09-18 15:56:44 +02:00
|
|
|
super().__init__(hs)
|
2019-04-09 19:28:17 +02:00
|
|
|
self.clock = hs.get_clock()
|
2020-12-02 17:09:24 +01:00
|
|
|
self.client = hs.get_federation_http_client()
|
2021-09-15 14:34:52 +02:00
|
|
|
self.key_servers = self.config.key.key_servers
|
2019-04-09 19:28:17 +02:00
|
|
|
|
2021-06-02 17:37:59 +02:00
|
|
|
async def _fetch_keys(
|
|
|
|
self, keys_to_fetch: List[_FetchKeyRequest]
|
2021-01-04 16:04:50 +01:00
|
|
|
) -> Dict[str, Dict[str, FetchKeyResult]]:
|
2021-06-02 17:37:59 +02:00
|
|
|
"""see KeyFetcher._fetch_keys"""
|
2019-04-09 19:28:17 +02:00
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
async def get_key(key_server: TrustedKeyServer) -> Dict:
|
2015-05-19 12:56:18 +02:00
|
|
|
try:
|
2021-01-04 16:04:50 +01:00
|
|
|
return await self.get_server_verify_key_v2_indirect(
|
2019-06-06 18:33:11 +02:00
|
|
|
keys_to_fetch, key_server
|
2015-05-19 12:56:18 +02:00
|
|
|
)
|
2019-02-23 16:06:02 +01:00
|
|
|
except KeyLookupError as e:
|
2019-06-06 18:33:11 +02:00
|
|
|
logger.warning(
|
|
|
|
"Key lookup failed from %r: %s", key_server.server_name, e
|
|
|
|
)
|
2015-05-19 12:56:18 +02:00
|
|
|
except Exception as e:
|
2015-06-26 10:52:24 +02:00
|
|
|
logger.exception(
|
|
|
|
"Unable to get key from %r: %s %s",
|
2019-06-06 18:33:11 +02:00
|
|
|
key_server.server_name,
|
2019-05-22 19:39:33 +02:00
|
|
|
type(e).__name__,
|
|
|
|
str(e),
|
2015-05-19 12:56:18 +02:00
|
|
|
)
|
2019-02-23 16:06:02 +01:00
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return {}
|
2015-04-29 14:31:14 +02:00
|
|
|
|
2020-08-03 14:29:01 +02:00
|
|
|
results = await make_deferred_yieldable(
|
2019-05-22 19:39:33 +02:00
|
|
|
defer.gatherResults(
|
2019-06-20 11:32:02 +02:00
|
|
|
[run_in_background(get_key, server) for server in self.key_servers],
|
2019-05-22 19:39:33 +02:00
|
|
|
consumeErrors=True,
|
|
|
|
).addErrback(unwrapFirstError)
|
|
|
|
)
|
2015-04-29 14:31:14 +02:00
|
|
|
|
2021-07-15 12:02:43 +02:00
|
|
|
union_of_keys: Dict[str, Dict[str, FetchKeyResult]] = {}
|
2015-06-26 10:52:24 +02:00
|
|
|
for result in results:
|
|
|
|
for server_name, keys in result.items():
|
|
|
|
union_of_keys.setdefault(server_name, {}).update(keys)
|
2014-09-30 16:15:10 +02:00
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return union_of_keys
|
2014-09-30 16:15:10 +02:00
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
async def get_server_verify_key_v2_indirect(
|
2021-06-02 17:37:59 +02:00
|
|
|
self, keys_to_fetch: List[_FetchKeyRequest], key_server: TrustedKeyServer
|
2021-01-04 16:04:50 +01:00
|
|
|
) -> Dict[str, Dict[str, FetchKeyResult]]:
|
2019-04-03 19:10:24 +02:00
|
|
|
"""
|
|
|
|
Args:
|
2021-01-04 16:04:50 +01:00
|
|
|
keys_to_fetch:
|
2021-06-02 17:37:59 +02:00
|
|
|
the keys to be fetched.
|
2019-06-03 23:59:51 +02:00
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
key_server: notary server to query for the keys
|
2019-04-03 19:10:24 +02:00
|
|
|
|
|
|
|
Returns:
|
2021-01-04 16:04:50 +01:00
|
|
|
Map from server_name -> key_id -> FetchKeyResult
|
2019-05-23 18:31:26 +02:00
|
|
|
|
|
|
|
Raises:
|
|
|
|
KeyLookupError if there was an error processing the entire response from
|
|
|
|
the server
|
2019-04-03 19:10:24 +02:00
|
|
|
"""
|
2019-06-06 18:33:11 +02:00
|
|
|
perspective_name = key_server.server_name
|
2019-05-23 18:31:26 +02:00
|
|
|
logger.info(
|
|
|
|
"Requesting keys %s from notary server %s",
|
2021-06-02 17:37:59 +02:00
|
|
|
keys_to_fetch,
|
2019-05-23 18:31:26 +02:00
|
|
|
perspective_name,
|
|
|
|
)
|
2019-06-03 23:59:51 +02:00
|
|
|
|
2021-11-29 14:15:36 +01:00
|
|
|
request: JsonDict = {}
|
|
|
|
for queue_value in keys_to_fetch:
|
|
|
|
# there may be multiple requests for each server, so we have to merge
|
|
|
|
# them intelligently.
|
|
|
|
request_for_server = {
|
|
|
|
key_id: {
|
|
|
|
"minimum_valid_until_ts": queue_value.minimum_valid_until_ts,
|
|
|
|
}
|
|
|
|
for key_id in queue_value.key_ids
|
|
|
|
}
|
|
|
|
request.setdefault(queue_value.server_name, {}).update(request_for_server)
|
|
|
|
|
|
|
|
logger.debug("Request to notary server %s: %s", perspective_name, request)
|
|
|
|
|
2019-02-23 16:06:02 +01:00
|
|
|
try:
|
2020-08-03 14:29:01 +02:00
|
|
|
query_response = await self.client.post_json(
|
|
|
|
destination=perspective_name,
|
|
|
|
path="/_matrix/key/v2/query",
|
2021-11-29 14:15:36 +01:00
|
|
|
data={"server_keys": request},
|
2019-02-23 16:06:02 +01:00
|
|
|
)
|
|
|
|
except (NotRetryingDestination, RequestSendFailed) as e:
|
2019-08-22 11:42:06 +02:00
|
|
|
# these both have str() representations which we can't really improve upon
|
|
|
|
raise KeyLookupError(str(e))
|
2019-02-23 16:06:02 +01:00
|
|
|
except HttpResponseException as e:
|
2019-08-22 11:42:06 +02:00
|
|
|
raise KeyLookupError("Remote server returned an error: %s" % (e,))
|
2015-04-20 17:23:47 +02:00
|
|
|
|
2021-11-29 14:15:36 +01:00
|
|
|
logger.debug(
|
|
|
|
"Response from notary server %s: %s", perspective_name, query_response
|
|
|
|
)
|
|
|
|
|
2021-07-15 12:02:43 +02:00
|
|
|
keys: Dict[str, Dict[str, FetchKeyResult]] = {}
|
|
|
|
added_keys: List[Tuple[str, str, FetchKeyResult]] = []
|
2015-04-20 17:23:47 +02:00
|
|
|
|
2019-05-23 12:45:39 +02:00
|
|
|
time_now_ms = self.clock.time_msec()
|
2015-05-19 11:23:02 +02:00
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
assert isinstance(query_response, dict)
|
2019-05-23 12:45:39 +02:00
|
|
|
for response in query_response["server_keys"]:
|
2019-05-23 18:31:26 +02:00
|
|
|
# do this first, so that we can give useful errors thereafter
|
|
|
|
server_name = response.get("server_name")
|
2020-06-16 14:51:47 +02:00
|
|
|
if not isinstance(server_name, str):
|
2016-08-10 11:44:37 +02:00
|
|
|
raise KeyLookupError(
|
2019-05-23 18:31:26 +02:00
|
|
|
"Malformed response from key notary server %s: invalid server_name"
|
|
|
|
% (perspective_name,)
|
2015-04-20 17:23:47 +02:00
|
|
|
)
|
|
|
|
|
2019-05-23 18:31:26 +02:00
|
|
|
try:
|
2019-06-20 11:32:02 +02:00
|
|
|
self._validate_perspectives_response(key_server, response)
|
2019-06-06 18:33:11 +02:00
|
|
|
|
2020-08-03 14:29:01 +02:00
|
|
|
processed_response = await self.process_v2_response(
|
2019-06-06 18:33:11 +02:00
|
|
|
perspective_name, response, time_added_ms=time_now_ms
|
2015-04-20 17:23:47 +02:00
|
|
|
)
|
2019-05-23 18:31:26 +02:00
|
|
|
except KeyLookupError as e:
|
|
|
|
logger.warning(
|
|
|
|
"Error processing response from key notary server %s for origin "
|
|
|
|
"server %s: %s",
|
|
|
|
perspective_name,
|
|
|
|
server_name,
|
|
|
|
e,
|
2015-04-20 17:23:47 +02:00
|
|
|
)
|
2019-05-23 18:31:26 +02:00
|
|
|
# we continue to process the rest of the response
|
|
|
|
continue
|
2015-04-20 17:23:47 +02:00
|
|
|
|
2019-05-23 12:45:39 +02:00
|
|
|
added_keys.extend(
|
|
|
|
(server_name, key_id, key) for key_id, key in processed_response.items()
|
|
|
|
)
|
2019-04-04 20:12:54 +02:00
|
|
|
keys.setdefault(server_name, {}).update(processed_response)
|
2015-04-20 17:23:47 +02:00
|
|
|
|
2020-08-03 14:29:01 +02:00
|
|
|
await self.store.store_server_verify_keys(
|
2019-05-23 12:45:39 +02:00
|
|
|
perspective_name, time_now_ms, added_keys
|
2019-05-22 19:39:33 +02:00
|
|
|
)
|
2015-04-20 17:23:47 +02:00
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return keys
|
2015-04-20 17:23:47 +02:00
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
def _validate_perspectives_response(
|
|
|
|
self, key_server: TrustedKeyServer, response: JsonDict
|
|
|
|
) -> None:
|
2019-06-06 18:33:11 +02:00
|
|
|
"""Optionally check the signature on the result of a /key/query request
|
2019-05-23 18:31:26 +02:00
|
|
|
|
|
|
|
Args:
|
2021-01-04 16:04:50 +01:00
|
|
|
key_server: the notary server that produced this result
|
2019-05-23 18:31:26 +02:00
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
response: the json-decoded Server Keys response object
|
2019-06-06 18:33:11 +02:00
|
|
|
"""
|
|
|
|
perspective_name = key_server.server_name
|
|
|
|
perspective_keys = key_server.verify_keys
|
2019-05-23 18:31:26 +02:00
|
|
|
|
2019-06-06 18:33:11 +02:00
|
|
|
if perspective_keys is None:
|
|
|
|
# signature checking is disabled on this server
|
|
|
|
return
|
2019-05-23 18:31:26 +02:00
|
|
|
|
|
|
|
if (
|
2019-06-20 11:32:02 +02:00
|
|
|
"signatures" not in response
|
|
|
|
or perspective_name not in response["signatures"]
|
2019-05-23 18:31:26 +02:00
|
|
|
):
|
|
|
|
raise KeyLookupError("Response not signed by the notary server")
|
|
|
|
|
|
|
|
verified = False
|
2019-06-20 11:32:02 +02:00
|
|
|
for key_id in response["signatures"][perspective_name]:
|
2019-05-23 18:31:26 +02:00
|
|
|
if key_id in perspective_keys:
|
|
|
|
verify_signed_json(response, perspective_name, perspective_keys[key_id])
|
|
|
|
verified = True
|
|
|
|
|
|
|
|
if not verified:
|
|
|
|
raise KeyLookupError(
|
|
|
|
"Response not signed with a known key: signed with: %r, known keys: %r"
|
|
|
|
% (
|
2019-06-20 11:32:02 +02:00
|
|
|
list(response["signatures"][perspective_name].keys()),
|
2019-05-23 18:31:26 +02:00
|
|
|
list(perspective_keys.keys()),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2019-04-09 19:28:17 +02:00
|
|
|
|
|
|
|
class ServerKeyFetcher(BaseV2KeyFetcher):
|
|
|
|
"""KeyFetcher impl which fetches keys from the origin servers"""
|
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
def __init__(self, hs: "HomeServer"):
|
2020-09-18 15:56:44 +02:00
|
|
|
super().__init__(hs)
|
2019-04-09 19:28:17 +02:00
|
|
|
self.clock = hs.get_clock()
|
2020-12-02 17:09:24 +01:00
|
|
|
self.client = hs.get_federation_http_client()
|
2019-04-09 19:28:17 +02:00
|
|
|
|
2021-01-04 16:04:50 +01:00
|
|
|
async def get_keys(
|
2021-06-02 17:37:59 +02:00
|
|
|
self, server_name: str, key_ids: List[str], minimum_valid_until_ts: int
|
|
|
|
) -> Dict[str, FetchKeyResult]:
|
|
|
|
results = await self._queue.add_to_queue(
|
|
|
|
_FetchKeyRequest(
|
|
|
|
server_name=server_name,
|
|
|
|
key_ids=key_ids,
|
|
|
|
minimum_valid_until_ts=minimum_valid_until_ts,
|
|
|
|
),
|
|
|
|
key=server_name,
|
|
|
|
)
|
|
|
|
return results.get(server_name, {})
|
|
|
|
|
|
|
|
async def _fetch_keys(
|
|
|
|
self, keys_to_fetch: List[_FetchKeyRequest]
|
2021-01-04 16:04:50 +01:00
|
|
|
) -> Dict[str, Dict[str, FetchKeyResult]]:
|
2019-06-03 19:07:19 +02:00
|
|
|
"""
|
|
|
|
Args:
|
2021-01-04 16:04:50 +01:00
|
|
|
keys_to_fetch:
|
2019-06-03 19:07:19 +02:00
|
|
|
the keys to be fetched. server_name -> key_ids
|
|
|
|
|
|
|
|
Returns:
|
2021-01-04 16:04:50 +01:00
|
|
|
Map from server_name -> key_id -> FetchKeyResult
|
2019-06-03 19:07:19 +02:00
|
|
|
"""
|
2019-04-09 19:28:17 +02:00
|
|
|
|
2019-06-03 16:36:41 +02:00
|
|
|
results = {}
|
|
|
|
|
2022-11-30 12:59:57 +01:00
|
|
|
async def get_keys(key_to_fetch_item: _FetchKeyRequest) -> None:
|
2021-06-02 17:37:59 +02:00
|
|
|
server_name = key_to_fetch_item.server_name
|
|
|
|
|
2019-06-03 16:36:41 +02:00
|
|
|
try:
|
2022-11-30 12:59:57 +01:00
|
|
|
keys = await self.get_server_verify_keys_v2_direct(server_name)
|
2019-06-03 16:36:41 +02:00
|
|
|
results[server_name] = keys
|
|
|
|
except KeyLookupError as e:
|
2022-11-30 12:59:57 +01:00
|
|
|
logger.warning("Error looking up keys from %s: %s", server_name, e)
|
2019-06-03 16:36:41 +02:00
|
|
|
except Exception:
|
2022-11-30 12:59:57 +01:00
|
|
|
logger.exception("Error getting keys from %s", server_name)
|
2019-04-09 19:28:17 +02:00
|
|
|
|
2022-11-30 12:59:57 +01:00
|
|
|
await yieldable_gather_results(get_keys, keys_to_fetch)
|
2020-08-20 12:39:55 +02:00
|
|
|
return results
|
2019-04-09 19:28:17 +02:00
|
|
|
|
2022-11-30 12:59:57 +01:00
|
|
|
async def get_server_verify_keys_v2_direct(
|
|
|
|
self, server_name: str
|
2021-01-04 16:04:50 +01:00
|
|
|
) -> Dict[str, FetchKeyResult]:
|
2019-06-03 16:36:41 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
Args:
|
2022-11-30 12:59:57 +01:00
|
|
|
server_name: Server to request keys from
|
2019-06-03 16:36:41 +02:00
|
|
|
|
|
|
|
Returns:
|
2021-01-04 16:04:50 +01:00
|
|
|
Map from key ID to lookup result
|
2019-06-03 16:36:41 +02:00
|
|
|
|
|
|
|
Raises:
|
|
|
|
KeyLookupError if there was a problem making the lookup
|
|
|
|
"""
|
2022-11-30 12:59:57 +01:00
|
|
|
time_now_ms = self.clock.time_msec()
|
|
|
|
try:
|
|
|
|
response = await self.client.get_json(
|
|
|
|
destination=server_name,
|
|
|
|
path="/_matrix/key/v2/server",
|
|
|
|
ignore_backoff=True,
|
|
|
|
# we only give the remote server 10s to respond. It should be an
|
|
|
|
# easy request to handle, so if it doesn't reply within 10s, it's
|
|
|
|
# probably not going to.
|
|
|
|
#
|
|
|
|
# Furthermore, when we are acting as a notary server, we cannot
|
|
|
|
# wait all day for all of the origin servers, as the requesting
|
|
|
|
# server will otherwise time out before we can respond.
|
|
|
|
#
|
|
|
|
# (Note that get_json may make 4 attempts, so this can still take
|
|
|
|
# almost 45 seconds to fetch the headers, plus up to another 60s to
|
|
|
|
# read the response).
|
|
|
|
timeout=10000,
|
2015-04-20 17:23:47 +02:00
|
|
|
)
|
2022-11-30 12:59:57 +01:00
|
|
|
except (NotRetryingDestination, RequestSendFailed) as e:
|
|
|
|
# these both have str() representations which we can't really improve
|
|
|
|
# upon
|
|
|
|
raise KeyLookupError(str(e))
|
|
|
|
except HttpResponseException as e:
|
|
|
|
raise KeyLookupError("Remote server returned an error: %s" % (e,))
|
|
|
|
|
|
|
|
assert isinstance(response, dict)
|
|
|
|
if response["server_name"] != server_name:
|
|
|
|
raise KeyLookupError(
|
|
|
|
"Expected a response for server %r not %r"
|
|
|
|
% (server_name, response["server_name"])
|
2019-05-23 12:45:39 +02:00
|
|
|
)
|
2015-04-20 17:23:47 +02:00
|
|
|
|
2022-11-30 12:59:57 +01:00
|
|
|
return await self.process_v2_response(
|
|
|
|
from_server=server_name,
|
|
|
|
response_json=response,
|
|
|
|
time_added_ms=time_now_ms,
|
|
|
|
)
|