MatrixSynapse/synapse/crypto/keyring.py

779 lines
28 KiB
Python
Raw Normal View History

# -*- coding: utf-8 -*-
2016-01-07 05:26:29 +01:00
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017 New Vector Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.crypto.keyclient import fetch_server_key
from synapse.api.errors import SynapseError, Codes
from synapse.util import unwrapFirstError, logcontext
2016-02-04 11:22:44 +01:00
from synapse.util.logcontext import (
PreserveLoggingContext,
preserve_fn,
run_in_background,
2016-02-04 11:22:44 +01:00
)
2016-08-19 18:38:15 +02:00
from synapse.util.metrics import Measure
from twisted.internet import defer
from signedjson.sign import (
2015-04-22 15:21:08 +02:00
verify_signed_json, signature_ids, sign_json, encode_canonical_json
)
from signedjson.key import (
is_signing_algorithm_supported, decode_verify_key_bytes
)
from unpaddedbase64 import decode_base64, encode_base64
from OpenSSL import crypto
2015-06-26 10:52:24 +02:00
from collections import namedtuple
2015-04-22 15:21:08 +02:00
import urllib
import hashlib
import logging
logger = logging.getLogger(__name__)
VerifyKeyRequest = namedtuple("VerifyRequest", (
"server_name", "key_ids", "json_object", "deferred"
))
"""
A request for a verify key to verify a JSON object.
Attributes:
server_name(str): The name of the server to verify against.
key_ids(set(str)): The set of key_ids to that could be used to verify the
JSON object
json_object(dict): The JSON object to verify.
deferred(twisted.internet.defer.Deferred):
A deferred (server_name, key_id, verify_key) tuple that resolves when
a verify key has been fetched. The deferreds' callbacks are run with no
logcontext.
"""
2015-06-26 10:52:24 +02:00
class KeyLookupError(ValueError):
pass
class Keyring(object):
def __init__(self, hs):
self.store = hs.get_datastore()
self.clock = hs.get_clock()
2015-04-20 17:23:47 +02:00
self.client = hs.get_http_client()
2015-04-22 15:21:08 +02:00
self.config = hs.get_config()
self.perspective_servers = self.config.perspectives
self.hs = hs
# map from server name to Deferred. Has an entry for each server with
# an ongoing key download; the Deferred completes once the download
# completes.
#
# These are regular, logcontext-agnostic Deferreds.
self.key_downloads = {}
def verify_json_for_server(self, server_name, json_object):
return logcontext.make_deferred_yieldable(
self.verify_json_objects_for_server(
[(server_name, json_object)]
)[0]
)
2015-06-26 10:52:24 +02:00
def verify_json_objects_for_server(self, server_and_json):
"""Bulk verifies signatures of json objects, bulk fetching keys as
2015-06-26 10:52:24 +02:00
necessary.
Args:
2015-06-26 10:52:24 +02:00
server_and_json (list): List of pairs of (server_name, json_object)
Returns:
List<Deferred>: for each input pair, a deferred indicating success
or failure to verify each json object's signature for the given
server_name. The deferreds run their callbacks in the sentinel
logcontext.
"""
verify_requests = []
2015-06-26 10:52:24 +02:00
for server_name, json_object in server_and_json:
key_ids = signature_ids(json_object, server_name)
if not key_ids:
logger.warn("Request from %s: no supported signature keys",
server_name)
deferred = defer.fail(SynapseError(
2015-06-26 10:52:24 +02:00
400,
"Not signed with a supported algorithm",
Codes.UNAUTHORIZED,
))
else:
deferred = defer.Deferred()
2015-06-26 10:52:24 +02:00
logger.debug("Verifying for %s with key_ids %s",
server_name, key_ids)
verify_request = VerifyKeyRequest(
server_name, key_ids, json_object, deferred
)
verify_requests.append(verify_request)
run_in_background(self._start_key_lookups, verify_requests)
# Pass those keys to handle_key_deferred so that the json object
# signatures can be verified
handle = preserve_fn(_handle_key_deferred)
return [
handle(rq) for rq in verify_requests
]
2016-08-19 18:38:15 +02:00
@defer.inlineCallbacks
def _start_key_lookups(self, verify_requests):
"""Sets off the key fetches for each verify request
2016-08-19 18:38:15 +02:00
Once each fetch completes, verify_request.deferred will be resolved.
Args:
verify_requests (List[VerifyKeyRequest]):
"""
2015-06-26 10:52:24 +02:00
try:
# create a deferred for each server we're going to look up the keys
# for; we'll resolve them once we have completed our lookups.
# These will be passed into wait_for_previous_lookups to block
# any other lookups until we have finished.
# The deferreds are called with no logcontext.
server_to_deferred = {
rq.server_name: defer.Deferred()
for rq in verify_requests
}
# We want to wait for any previous lookups to complete before
# proceeding.
yield self.wait_for_previous_lookups(
[rq.server_name for rq in verify_requests],
server_to_deferred,
2016-02-04 11:22:44 +01:00
)
2015-04-27 15:37:24 +02:00
# Actually start fetching keys.
self._get_server_verify_keys(verify_requests)
# When we've finished fetching all the keys for a given server_name,
# resolve the deferred passed to `wait_for_previous_lookups` so that
# any lookups waiting will proceed.
#
# map from server name to a set of request ids
server_to_request_ids = {}
for verify_request in verify_requests:
server_name = verify_request.server_name
request_id = id(verify_request)
server_to_request_ids.setdefault(server_name, set()).add(request_id)
def remove_deferreds(res, verify_request):
server_name = verify_request.server_name
request_id = id(verify_request)
server_to_request_ids[server_name].discard(request_id)
if not server_to_request_ids[server_name]:
d = server_to_deferred.pop(server_name, None)
if d:
d.callback(None)
return res
for verify_request in verify_requests:
verify_request.deferred.addBoth(
remove_deferreds, verify_request,
)
except Exception:
logger.exception("Error starting key lookups")
@defer.inlineCallbacks
def wait_for_previous_lookups(self, server_names, server_to_deferred):
"""Waits for any previous key lookups for the given servers to finish.
Args:
server_names (list): list of server_names we want to lookup
server_to_deferred (dict): server_name to deferred which gets
resolved once we've finished looking up keys for that server.
The Deferreds should be regular twisted ones which call their
callbacks with no logcontext.
Returns: a Deferred which resolves once all key lookups for the given
servers have completed. Follows the synapse rules of logcontext
preservation.
"""
while True:
wait_on = [
self.key_downloads[server_name]
for server_name in server_names
if server_name in self.key_downloads
]
if wait_on:
2016-02-04 11:22:44 +01:00
with PreserveLoggingContext():
yield defer.DeferredList(wait_on)
else:
break
def rm(r, server_name_):
self.key_downloads.pop(server_name_, None)
return r
2015-09-09 18:02:39 +02:00
for server_name, deferred in server_to_deferred.items():
self.key_downloads[server_name] = deferred
deferred.addBoth(rm, server_name)
def _get_server_verify_keys(self, verify_requests):
2017-03-20 16:34:35 +01:00
"""Tries to find at least one key for each verify request
For each verify_request, verify_request.deferred is called back with
params (server_name, key_id, VerifyKey) if a key is found, or errbacked
with a SynapseError if none of the keys are found.
Args:
verify_requests (list[VerifyKeyRequest]): list of verify requests
2015-06-26 10:52:24 +02:00
"""
# These are functions that produce keys given a list of key ids
key_fetch_fns = (
self.get_keys_from_store, # First try the local store
self.get_keys_from_perspectives, # Then try via perspectives
self.get_keys_from_server, # Then try directly
)
@defer.inlineCallbacks
def do_iterations():
2016-08-19 18:56:44 +02:00
with Measure(self.clock, "get_server_verify_keys"):
2017-03-20 16:34:35 +01:00
# dict[str, dict[str, VerifyKey]]: results so far.
# map server_name -> key_id -> VerifyKey
2016-08-19 18:56:44 +02:00
merged_results = {}
2015-06-26 10:52:24 +02:00
2017-03-20 16:34:35 +01:00
# dict[str, set(str)]: keys to fetch for each server
missing_keys = {}
for verify_request in verify_requests:
2016-08-19 18:56:44 +02:00
missing_keys.setdefault(verify_request.server_name, set()).update(
verify_request.key_ids
)
for fn in key_fetch_fns:
results = yield fn(missing_keys.items())
merged_results.update(results)
# We now need to figure out which verify requests we have keys
# for and which we don't
missing_keys = {}
requests_missing_keys = []
for verify_request in verify_requests:
server_name = verify_request.server_name
result_keys = merged_results[server_name]
if verify_request.deferred.called:
# We've already called this deferred, which probably
# means that we've already found a key for it.
continue
for key_id in verify_request.key_ids:
if key_id in result_keys:
with PreserveLoggingContext():
verify_request.deferred.callback((
server_name,
key_id,
result_keys[key_id],
))
break
else:
# The else block is only reached if the loop above
# doesn't break.
missing_keys.setdefault(server_name, set()).update(
verify_request.key_ids
)
requests_missing_keys.append(verify_request)
if not missing_keys:
break
with PreserveLoggingContext():
2017-09-25 12:50:11 +02:00
for verify_request in requests_missing_keys:
verify_request.deferred.errback(SynapseError(
401,
"No key for %s with id %s" % (
verify_request.server_name, verify_request.key_ids,
),
Codes.UNAUTHORIZED,
))
2015-06-26 10:52:24 +02:00
def on_err(err):
with PreserveLoggingContext():
for verify_request in verify_requests:
if not verify_request.deferred.called:
verify_request.deferred.errback(err)
2015-06-26 10:52:24 +02:00
run_in_background(do_iterations).addErrback(on_err)
2015-06-26 10:52:24 +02:00
2015-04-27 15:37:24 +02:00
@defer.inlineCallbacks
2015-06-26 10:52:24 +02:00
def get_keys_from_store(self, server_name_and_key_ids):
2017-03-20 16:34:35 +01:00
"""
Args:
server_name_and_key_ids (list[(str, iterable[str])]):
list of (server_name, iterable[key_id]) tuples to fetch keys for
Returns:
Deferred: resolves to dict[str, dict[str, VerifyKey]]: map from
server_name -> key_id -> VerifyKey
"""
res = yield logcontext.make_deferred_yieldable(defer.gatherResults(
2015-06-26 10:52:24 +02:00
[
run_in_background(
self.store.get_server_verify_keys,
server_name, key_ids,
2015-09-09 18:02:39 +02:00
).addCallback(lambda ks, server: (server, ks), server_name)
2015-06-26 10:52:24 +02:00
for server_name, key_ids in server_name_and_key_ids
],
consumeErrors=True,
).addErrback(unwrapFirstError))
2015-06-26 10:52:24 +02:00
2015-09-09 18:02:39 +02:00
defer.returnValue(dict(res))
2015-06-26 10:52:24 +02:00
@defer.inlineCallbacks
def get_keys_from_perspectives(self, server_name_and_key_ids):
@defer.inlineCallbacks
def get_key(perspective_name, perspective_keys):
try:
result = yield self.get_server_verify_key_v2_indirect(
2015-06-26 10:52:24 +02:00
server_name_and_key_ids, perspective_name, perspective_keys
)
defer.returnValue(result)
except Exception as e:
2015-06-26 10:52:24 +02:00
logger.exception(
"Unable to get key from %r: %s %s",
perspective_name,
type(e).__name__, str(e),
)
2015-06-26 10:52:24 +02:00
defer.returnValue({})
results = yield logcontext.make_deferred_yieldable(defer.gatherResults(
2015-06-26 10:52:24 +02:00
[
run_in_background(get_key, p_name, p_keys)
2015-06-26 10:52:24 +02:00
for p_name, p_keys in self.perspective_servers.items()
],
consumeErrors=True,
).addErrback(unwrapFirstError))
2015-06-26 10:52:24 +02:00
union_of_keys = {}
for result in results:
for server_name, keys in result.items():
union_of_keys.setdefault(server_name, {}).update(keys)
2015-06-26 10:52:24 +02:00
defer.returnValue(union_of_keys)
2015-06-26 10:52:24 +02:00
@defer.inlineCallbacks
def get_keys_from_server(self, server_name_and_key_ids):
@defer.inlineCallbacks
def get_key(server_name, key_ids):
keys = None
try:
keys = yield self.get_server_verify_key_v2_direct(
server_name, key_ids
)
except Exception as e:
logger.info(
"Unable to get key %r for %r directly: %s %s",
key_ids, server_name,
type(e).__name__, str(e),
)
2015-04-20 17:23:47 +02:00
if not keys:
keys = yield self.get_server_verify_key_v1_direct(
server_name, key_ids
)
2015-06-26 10:52:24 +02:00
keys = {server_name: keys}
2015-06-26 10:52:24 +02:00
defer.returnValue(keys)
results = yield logcontext.make_deferred_yieldable(defer.gatherResults(
2015-06-26 10:52:24 +02:00
[
run_in_background(get_key, server_name, key_ids)
2015-06-26 10:52:24 +02:00
for server_name, key_ids in server_name_and_key_ids
],
consumeErrors=True,
).addErrback(unwrapFirstError))
2015-04-20 17:23:47 +02:00
2015-06-26 10:52:24 +02:00
merged = {}
for result in results:
merged.update(result)
defer.returnValue({
server_name: keys
for server_name, keys in merged.items()
if keys
})
2015-04-20 17:23:47 +02:00
@defer.inlineCallbacks
2015-06-26 10:52:24 +02:00
def get_server_verify_key_v2_indirect(self, server_names_and_key_ids,
2015-04-20 17:23:47 +02:00
perspective_name,
perspective_keys):
# TODO(mark): Set the minimum_valid_until_ts to that needed by
# the events being validated or the current time if validating
# an incoming request.
query_response = yield self.client.post_json(
destination=perspective_name,
path=b"/_matrix/key/v2/query",
data={
u"server_keys": {
server_name: {
key_id: {
u"minimum_valid_until_ts": 0
} for key_id in key_ids
}
for server_name, key_ids in server_names_and_key_ids
}
},
2015-11-20 18:17:58 +01:00
long_retries=True,
)
2015-04-20 17:23:47 +02:00
2015-04-22 15:21:08 +02:00
keys = {}
2015-04-20 17:23:47 +02:00
responses = query_response["server_keys"]
2015-04-20 17:23:47 +02:00
for response in responses:
if (u"signatures" not in response
2015-04-22 15:21:08 +02:00
or perspective_name not in response[u"signatures"]):
raise KeyLookupError(
2015-04-20 17:23:47 +02:00
"Key response not signed by perspective server"
" %r" % (perspective_name,)
)
verified = False
for key_id in response[u"signatures"][perspective_name]:
if key_id in perspective_keys:
verify_signed_json(
response,
perspective_name,
perspective_keys[key_id]
)
verified = True
if not verified:
logging.info(
"Response from perspective server %r not signed with a"
" known key, signed with: %r, known keys: %r",
perspective_name,
list(response[u"signatures"][perspective_name]),
list(perspective_keys)
)
raise KeyLookupError(
2015-04-20 17:23:47 +02:00
"Response not signed with a known key for perspective"
" server %r" % (perspective_name,)
)
2015-06-26 10:52:24 +02:00
processed_response = yield self.process_v2_response(
perspective_name, response, only_from_server=False
2015-04-22 15:21:08 +02:00
)
2015-04-20 17:23:47 +02:00
2015-06-26 10:52:24 +02:00
for server_name, response_keys in processed_response.items():
keys.setdefault(server_name, {}).update(response_keys)
2015-04-20 17:23:47 +02:00
yield logcontext.make_deferred_yieldable(defer.gatherResults(
2015-06-26 10:52:24 +02:00
[
run_in_background(
self.store_keys,
2015-06-26 10:52:24 +02:00
server_name=server_name,
from_server=perspective_name,
verify_keys=response_keys,
)
for server_name, response_keys in keys.items()
],
consumeErrors=True
).addErrback(unwrapFirstError))
2015-04-20 17:23:47 +02:00
defer.returnValue(keys)
@defer.inlineCallbacks
def get_server_verify_key_v2_direct(self, server_name, key_ids):
keys = {}
for requested_key_id in key_ids:
if requested_key_id in keys:
continue
2015-04-22 15:21:08 +02:00
(response, tls_certificate) = yield fetch_server_key(
server_name, self.hs.tls_server_context_factory,
2015-04-22 15:21:08 +02:00
path=(b"/_matrix/key/v2/server/%s" % (
2015-04-20 17:23:47 +02:00
urllib.quote(requested_key_id),
2015-04-22 15:21:08 +02:00
)).encode("ascii"),
2015-04-20 17:23:47 +02:00
)
if (u"signatures" not in response
2015-04-22 15:21:08 +02:00
or server_name not in response[u"signatures"]):
raise KeyLookupError("Key response not signed by remote server")
2015-04-20 17:23:47 +02:00
if "tls_fingerprints" not in response:
raise KeyLookupError("Key response missing TLS fingerprints")
2015-04-20 17:23:47 +02:00
certificate_bytes = crypto.dump_certificate(
crypto.FILETYPE_ASN1, tls_certificate
)
sha256_fingerprint = hashlib.sha256(certificate_bytes).digest()
sha256_fingerprint_b64 = encode_base64(sha256_fingerprint)
response_sha256_fingerprints = set()
2015-04-22 15:21:08 +02:00
for fingerprint in response[u"tls_fingerprints"]:
2015-04-20 17:23:47 +02:00
if u"sha256" in fingerprint:
response_sha256_fingerprints.add(fingerprint[u"sha256"])
2015-04-22 15:21:08 +02:00
if sha256_fingerprint_b64 not in response_sha256_fingerprints:
raise KeyLookupError("TLS certificate not allowed by fingerprints")
2015-04-20 17:23:47 +02:00
response_keys = yield self.process_v2_response(
from_server=server_name,
2015-06-26 10:52:24 +02:00
requested_ids=[requested_key_id],
2015-04-22 15:21:08 +02:00
response_json=response,
2015-04-20 17:23:47 +02:00
)
keys.update(response_keys)
yield logcontext.make_deferred_yieldable(defer.gatherResults(
2015-06-26 10:52:24 +02:00
[
run_in_background(
self.store_keys,
2015-06-26 10:52:24 +02:00
server_name=key_server_name,
from_server=server_name,
verify_keys=verify_keys,
)
for key_server_name, verify_keys in keys.items()
],
consumeErrors=True
).addErrback(unwrapFirstError))
2015-04-20 17:23:47 +02:00
2015-04-22 15:21:08 +02:00
defer.returnValue(keys)
2015-04-20 17:23:47 +02:00
@defer.inlineCallbacks
2015-06-26 10:52:24 +02:00
def process_v2_response(self, from_server, response_json,
requested_ids=[], only_from_server=True):
2015-04-22 15:21:08 +02:00
time_now_ms = self.clock.time_msec()
2015-04-20 17:23:47 +02:00
response_keys = {}
verify_keys = {}
for key_id, key_data in response_json["verify_keys"].items():
2015-04-20 17:23:47 +02:00
if is_signing_algorithm_supported(key_id):
key_base64 = key_data["key"]
2015-04-20 17:23:47 +02:00
key_bytes = decode_base64(key_base64)
verify_key = decode_verify_key_bytes(key_id, key_bytes)
verify_key.time_added = time_now_ms
2015-04-20 17:23:47 +02:00
verify_keys[key_id] = verify_key
old_verify_keys = {}
2015-04-22 15:21:08 +02:00
for key_id, key_data in response_json["old_verify_keys"].items():
2015-04-20 17:23:47 +02:00
if is_signing_algorithm_supported(key_id):
key_base64 = key_data["key"]
key_bytes = decode_base64(key_base64)
verify_key = decode_verify_key_bytes(key_id, key_bytes)
verify_key.expired = key_data["expired_ts"]
2015-04-20 17:23:47 +02:00
verify_key.time_added = time_now_ms
old_verify_keys[key_id] = verify_key
2015-06-26 10:52:24 +02:00
results = {}
server_name = response_json["server_name"]
if only_from_server:
if server_name != from_server:
raise KeyLookupError(
"Expected a response for server %r not %r" % (
from_server, server_name
)
)
for key_id in response_json["signatures"].get(server_name, {}):
2015-04-22 15:21:08 +02:00
if key_id not in response_json["verify_keys"]:
raise KeyLookupError(
2015-04-20 17:23:47 +02:00
"Key response must include verification keys for all"
" signatures"
)
if key_id in verify_keys:
verify_signed_json(
2015-04-22 15:21:08 +02:00
response_json,
2015-04-20 17:23:47 +02:00
server_name,
verify_keys[key_id]
)
signed_key_json = sign_json(
2015-04-22 15:21:08 +02:00
response_json,
2015-04-20 17:23:47 +02:00
self.config.server_name,
self.config.signing_key[0],
)
signed_key_json_bytes = encode_canonical_json(signed_key_json)
ts_valid_until_ms = signed_key_json[u"valid_until_ts"]
2015-04-20 17:23:47 +02:00
2015-06-26 10:52:24 +02:00
updated_key_ids = set(requested_ids)
2015-04-20 17:23:47 +02:00
updated_key_ids.update(verify_keys)
updated_key_ids.update(old_verify_keys)
response_keys.update(verify_keys)
response_keys.update(old_verify_keys)
yield logcontext.make_deferred_yieldable(defer.gatherResults(
2015-06-26 10:52:24 +02:00
[
run_in_background(
self.store.store_server_keys_json,
2015-06-26 10:52:24 +02:00
server_name=server_name,
key_id=key_id,
from_server=server_name,
ts_now_ms=time_now_ms,
ts_expires_ms=ts_valid_until_ms,
key_json_bytes=signed_key_json_bytes,
)
for key_id in updated_key_ids
],
consumeErrors=True,
).addErrback(unwrapFirstError))
2015-06-26 10:52:24 +02:00
results[server_name] = response_keys
2015-04-20 17:23:47 +02:00
2015-06-26 10:52:24 +02:00
defer.returnValue(results)
2015-04-20 17:23:47 +02:00
@defer.inlineCallbacks
def get_server_verify_key_v1_direct(self, server_name, key_ids):
"""Finds a verification key for the server with one of the key ids.
Args:
server_name (str): The name of the server to fetch a key for.
keys_ids (list of str): The key_ids to check for.
"""
# Try to fetch the key from the remote server.
(response, tls_certificate) = yield fetch_server_key(
server_name, self.hs.tls_server_context_factory
2015-04-20 17:23:47 +02:00
)
# Check the response.
x509_certificate_bytes = crypto.dump_certificate(
crypto.FILETYPE_ASN1, tls_certificate
)
if ("signatures" not in response
or server_name not in response["signatures"]):
raise KeyLookupError("Key response not signed by remote server")
if "tls_certificate" not in response:
raise KeyLookupError("Key response missing TLS certificate")
tls_certificate_b64 = response["tls_certificate"]
if encode_base64(x509_certificate_bytes) != tls_certificate_b64:
raise KeyLookupError("TLS certificate doesn't match")
2015-04-20 17:23:47 +02:00
# Cache the result in the datastore.
time_now_ms = self.clock.time_msec()
verify_keys = {}
for key_id, key_base64 in response["verify_keys"].items():
if is_signing_algorithm_supported(key_id):
key_bytes = decode_base64(key_base64)
verify_key = decode_verify_key_bytes(key_id, key_bytes)
2015-04-20 17:23:47 +02:00
verify_key.time_added = time_now_ms
verify_keys[key_id] = verify_key
for key_id in response["signatures"][server_name]:
if key_id not in response["verify_keys"]:
raise KeyLookupError(
"Key response must include verification keys for all"
" signatures"
)
if key_id in verify_keys:
verify_signed_json(
response,
server_name,
verify_keys[key_id]
)
yield self.store.store_server_certificate(
server_name,
server_name,
time_now_ms,
tls_certificate,
)
2015-04-20 17:23:47 +02:00
yield self.store_keys(
server_name=server_name,
from_server=server_name,
verify_keys=verify_keys,
)
defer.returnValue(verify_keys)
def store_keys(self, server_name, from_server, verify_keys):
"""Store a collection of verify keys for a given server
Args:
server_name(str): The name of the server the keys are for.
from_server(str): The server the keys were downloaded from.
verify_keys(dict): A mapping of key_id to VerifyKey.
Returns:
A deferred that completes when the keys are stored.
"""
2015-06-26 10:52:24 +02:00
# TODO(markjh): Store whether the keys have expired.
return logcontext.make_deferred_yieldable(defer.gatherResults(
2015-06-26 10:52:24 +02:00
[
run_in_background(
self.store.store_server_verify_key,
2015-06-26 10:52:24 +02:00
server_name, server_name, key.time_added, key
)
for key_id, key in verify_keys.items()
],
consumeErrors=True,
).addErrback(unwrapFirstError))
@defer.inlineCallbacks
def _handle_key_deferred(verify_request):
server_name = verify_request.server_name
try:
with PreserveLoggingContext():
_, key_id, verify_key = yield verify_request.deferred
except IOError as e:
logger.warn(
"Got IOError when downloading keys for %s: %s %s",
server_name, type(e).__name__, str(e),
)
raise SynapseError(
502,
"Error downloading keys for %s" % (server_name,),
Codes.UNAUTHORIZED,
)
except Exception as e:
logger.exception(
"Got Exception when downloading keys for %s: %s %s",
server_name, type(e).__name__, str(e),
)
raise SynapseError(
401,
"No key for %s with id %s" % (server_name, verify_request.key_ids),
Codes.UNAUTHORIZED,
)
json_object = verify_request.json_object
logger.debug("Got key %s %s:%s for server %s, verifying" % (
key_id, verify_key.alg, verify_key.version, server_name,
))
try:
verify_signed_json(json_object, server_name, verify_key)
except Exception:
raise SynapseError(
401,
"Invalid signature for server %s with key %s:%s" % (
server_name, verify_key.alg, verify_key.version
),
Codes.UNAUTHORIZED,
)