2017-12-05 02:29:25 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
2018-10-12 12:26:18 +02:00
|
|
|
# Copyright 2017, 2018 New Vector Ltd
|
2019-11-27 22:14:44 +01:00
|
|
|
# Copyright 2019 Matrix.org Foundation C.I.C.
|
2017-12-05 02:29:25 +01:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import logging
|
|
|
|
|
|
|
|
from twisted.internet import defer
|
|
|
|
|
2019-02-07 03:32:52 +01:00
|
|
|
from synapse.api.errors import (
|
|
|
|
Codes,
|
|
|
|
NotFoundError,
|
|
|
|
RoomKeysVersionError,
|
|
|
|
StoreError,
|
2019-02-07 04:18:41 +01:00
|
|
|
SynapseError,
|
2019-02-07 03:32:52 +01:00
|
|
|
)
|
2019-08-22 12:28:12 +02:00
|
|
|
from synapse.logging.opentracing import log_kv, trace
|
2018-08-25 04:51:25 +02:00
|
|
|
from synapse.util.async_helpers import Linearizer
|
2017-12-05 02:29:25 +01:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class E2eRoomKeysHandler(object):
|
2017-12-18 02:52:46 +01:00
|
|
|
"""
|
|
|
|
Implements an optional realtime backup mechanism for encrypted E2E megolm room keys.
|
|
|
|
This gives a way for users to store and recover their megolm keys if they lose all
|
|
|
|
their clients. It should also extend easily to future room key mechanisms.
|
|
|
|
The actual payload of the encrypted keys is completely opaque to the handler.
|
|
|
|
"""
|
|
|
|
|
2017-12-05 02:29:25 +01:00
|
|
|
def __init__(self, hs):
|
|
|
|
self.store = hs.get_datastore()
|
2017-12-18 02:52:46 +01:00
|
|
|
|
|
|
|
# Used to lock whenever a client is uploading key data. This prevents collisions
|
|
|
|
# between clients trying to upload the details of a new session, given all
|
|
|
|
# clients belonging to a user will receive and try to upload a new session at
|
|
|
|
# roughly the same time. Also used to lock out uploads when the key is being
|
|
|
|
# changed.
|
2017-12-05 22:44:25 +01:00
|
|
|
self._upload_linearizer = Linearizer("upload_room_keys_lock")
|
2017-12-05 02:29:25 +01:00
|
|
|
|
2019-08-22 12:28:12 +02:00
|
|
|
@trace
|
2017-12-05 02:29:25 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-12-28 00:37:44 +01:00
|
|
|
def get_room_keys(self, user_id, version, room_id=None, session_id=None):
|
|
|
|
"""Bulk get the E2E room keys for a given backup, optionally filtered to a given
|
|
|
|
room, or a given session.
|
|
|
|
See EndToEndRoomKeyStore.get_e2e_room_keys for full details.
|
|
|
|
|
2018-10-12 12:26:18 +02:00
|
|
|
Args:
|
|
|
|
user_id(str): the user whose keys we're getting
|
|
|
|
version(str): the version ID of the backup we're getting keys from
|
|
|
|
room_id(string): room ID to get keys for, for None to get keys for all rooms
|
2018-10-12 14:47:43 +02:00
|
|
|
session_id(string): session ID to get keys for, for None to get keys for all
|
|
|
|
sessions
|
2018-11-05 23:59:29 +01:00
|
|
|
Raises:
|
|
|
|
NotFoundError: if the backup version does not exist
|
2017-12-28 00:37:44 +01:00
|
|
|
Returns:
|
|
|
|
A deferred list of dicts giving the session_data and message metadata for
|
|
|
|
these room keys.
|
|
|
|
"""
|
|
|
|
|
2017-12-06 02:02:57 +01:00
|
|
|
# we deliberately take the lock to get keys so that changing the version
|
|
|
|
# works atomically
|
|
|
|
with (yield self._upload_linearizer.queue(user_id)):
|
2018-11-05 23:59:29 +01:00
|
|
|
# make sure the backup version exists
|
|
|
|
try:
|
|
|
|
yield self.store.get_e2e_room_keys_version_info(user_id, version)
|
|
|
|
except StoreError as e:
|
|
|
|
if e.code == 404:
|
|
|
|
raise NotFoundError("Unknown backup version")
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
|
2017-12-06 02:02:57 +01:00
|
|
|
results = yield self.store.get_e2e_room_keys(
|
|
|
|
user_id, version, room_id, session_id
|
|
|
|
)
|
2017-12-31 18:47:11 +01:00
|
|
|
|
2019-08-22 12:28:12 +02:00
|
|
|
log_kv(results)
|
2019-07-23 15:00:55 +02:00
|
|
|
return results
|
2017-12-05 02:29:25 +01:00
|
|
|
|
2019-08-22 12:28:12 +02:00
|
|
|
@trace
|
2017-12-05 18:54:48 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-12-28 00:37:44 +01:00
|
|
|
def delete_room_keys(self, user_id, version, room_id=None, session_id=None):
|
|
|
|
"""Bulk delete the E2E room keys for a given backup, optionally filtered to a given
|
|
|
|
room or a given session.
|
|
|
|
See EndToEndRoomKeyStore.delete_e2e_room_keys for full details.
|
|
|
|
|
2018-10-12 12:26:18 +02:00
|
|
|
Args:
|
|
|
|
user_id(str): the user whose backup we're deleting
|
|
|
|
version(str): the version ID of the backup we're deleting
|
2018-10-12 14:47:43 +02:00
|
|
|
room_id(string): room ID to delete keys for, for None to delete keys for all
|
|
|
|
rooms
|
|
|
|
session_id(string): session ID to delete keys for, for None to delete keys
|
|
|
|
for all sessions
|
2019-11-27 22:14:44 +01:00
|
|
|
Raises:
|
|
|
|
NotFoundError: if the backup version does not exist
|
2017-12-28 00:37:44 +01:00
|
|
|
Returns:
|
2019-11-27 22:14:44 +01:00
|
|
|
A dict containing the count and etag for the backup version
|
2017-12-28 00:37:44 +01:00
|
|
|
"""
|
|
|
|
|
2017-12-18 02:52:46 +01:00
|
|
|
# lock for consistency with uploading
|
|
|
|
with (yield self._upload_linearizer.queue(user_id)):
|
2019-11-27 22:14:44 +01:00
|
|
|
# make sure the backup version exists
|
|
|
|
try:
|
|
|
|
version_info = yield self.store.get_e2e_room_keys_version_info(
|
|
|
|
user_id, version
|
|
|
|
)
|
|
|
|
except StoreError as e:
|
|
|
|
if e.code == 404:
|
|
|
|
raise NotFoundError("Unknown backup version")
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
|
2017-12-18 02:52:46 +01:00
|
|
|
yield self.store.delete_e2e_room_keys(user_id, version, room_id, session_id)
|
2017-12-05 18:54:48 +01:00
|
|
|
|
2019-11-27 22:14:44 +01:00
|
|
|
version_etag = version_info["etag"] + 1
|
|
|
|
yield self.store.update_e2e_room_keys_version(
|
|
|
|
user_id, version, None, version_etag
|
|
|
|
)
|
|
|
|
|
|
|
|
count = yield self.store.count_e2e_room_keys(user_id, version)
|
|
|
|
return {"etag": str(version_etag), "count": count}
|
|
|
|
|
2019-08-22 12:28:12 +02:00
|
|
|
@trace
|
2017-12-05 02:29:25 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def upload_room_keys(self, user_id, version, room_keys):
|
2017-12-28 00:37:44 +01:00
|
|
|
"""Bulk upload a list of room keys into a given backup version, asserting
|
|
|
|
that the given version is the current backup version. room_keys are merged
|
|
|
|
into the current backup as described in RoomKeysServlet.on_PUT().
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id(str): the user whose backup we're setting
|
|
|
|
version(str): the version ID of the backup we're updating
|
|
|
|
room_keys(dict): a nested dict describing the room_keys we're setting:
|
|
|
|
|
|
|
|
{
|
|
|
|
"rooms": {
|
|
|
|
"!abc:matrix.org": {
|
|
|
|
"sessions": {
|
|
|
|
"c0ff33": {
|
|
|
|
"first_message_index": 1,
|
|
|
|
"forwarded_count": 1,
|
|
|
|
"is_verified": false,
|
|
|
|
"session_data": "SSBBTSBBIEZJU0gK"
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-27 22:14:44 +01:00
|
|
|
Returns:
|
|
|
|
A dict containing the count and etag for the backup version
|
|
|
|
|
2017-12-28 00:37:44 +01:00
|
|
|
Raises:
|
2018-11-05 23:59:29 +01:00
|
|
|
NotFoundError: if there are no versions defined
|
2017-12-28 00:37:44 +01:00
|
|
|
RoomKeysVersionError: if the uploaded version is not the current version
|
|
|
|
"""
|
|
|
|
|
2017-12-05 02:29:25 +01:00
|
|
|
# TODO: Validate the JSON to make sure it has the right keys.
|
|
|
|
|
2017-12-05 18:54:48 +01:00
|
|
|
# XXX: perhaps we should use a finer grained lock here?
|
2017-12-05 22:44:25 +01:00
|
|
|
with (yield self._upload_linearizer.queue(user_id)):
|
2017-12-31 18:47:11 +01:00
|
|
|
|
2017-12-18 02:52:46 +01:00
|
|
|
# Check that the version we're trying to upload is the current version
|
2017-12-28 00:42:08 +01:00
|
|
|
try:
|
2018-10-12 12:26:18 +02:00
|
|
|
version_info = yield self.store.get_e2e_room_keys_version_info(user_id)
|
2017-12-18 02:52:46 +01:00
|
|
|
except StoreError as e:
|
|
|
|
if e.code == 404:
|
2018-11-05 23:59:29 +01:00
|
|
|
raise NotFoundError("Version '%s' not found" % (version,))
|
2017-12-18 02:52:46 +01:00
|
|
|
else:
|
2018-10-12 12:26:18 +02:00
|
|
|
raise
|
2017-12-18 02:52:46 +01:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
if version_info["version"] != version:
|
2017-12-31 18:47:11 +01:00
|
|
|
# Check that the version we're trying to upload actually exists
|
|
|
|
try:
|
2018-10-12 14:47:43 +02:00
|
|
|
version_info = yield self.store.get_e2e_room_keys_version_info(
|
2019-06-20 11:32:02 +02:00
|
|
|
user_id, version
|
2018-10-12 14:47:43 +02:00
|
|
|
)
|
2017-12-31 18:47:11 +01:00
|
|
|
# if we get this far, the version must exist
|
2019-06-20 11:32:02 +02:00
|
|
|
raise RoomKeysVersionError(current_version=version_info["version"])
|
2017-12-31 18:47:11 +01:00
|
|
|
except StoreError as e:
|
|
|
|
if e.code == 404:
|
2018-11-05 23:59:29 +01:00
|
|
|
raise NotFoundError("Version '%s' not found" % (version,))
|
2017-12-31 18:47:11 +01:00
|
|
|
else:
|
2018-10-12 12:26:18 +02:00
|
|
|
raise
|
2017-12-18 02:52:46 +01:00
|
|
|
|
2019-11-27 22:14:44 +01:00
|
|
|
# Fetch any existing room keys for the sessions that have been
|
|
|
|
# submitted. Then compare them with the submitted keys. If the
|
|
|
|
# key is new, insert it; if the key should be updated, then update
|
|
|
|
# it; otherwise, drop it.
|
|
|
|
existing_keys = yield self.store.get_e2e_room_keys_multi(
|
|
|
|
user_id, version, room_keys["rooms"]
|
|
|
|
)
|
|
|
|
to_insert = [] # batch the inserts together
|
|
|
|
changed = False # if anything has changed, we need to update the etag
|
2020-06-15 13:03:36 +02:00
|
|
|
for room_id, room in room_keys["rooms"].items():
|
|
|
|
for session_id, room_key in room["sessions"].items():
|
2020-03-06 12:02:52 +01:00
|
|
|
if not isinstance(room_key["is_verified"], bool):
|
|
|
|
msg = (
|
2020-03-06 16:07:28 +01:00
|
|
|
"is_verified must be a boolean in keys for session %s in"
|
2020-03-06 16:11:13 +01:00
|
|
|
"room %s" % (session_id, room_id)
|
2020-03-06 12:02:52 +01:00
|
|
|
)
|
|
|
|
raise SynapseError(400, msg, Codes.INVALID_PARAM)
|
|
|
|
|
2019-11-27 22:14:44 +01:00
|
|
|
log_kv(
|
|
|
|
{
|
|
|
|
"message": "Trying to upload room key",
|
|
|
|
"room_id": room_id,
|
|
|
|
"session_id": session_id,
|
|
|
|
"user_id": user_id,
|
|
|
|
}
|
2017-12-05 18:54:48 +01:00
|
|
|
)
|
2019-11-27 22:14:44 +01:00
|
|
|
current_room_key = existing_keys.get(room_id, {}).get(session_id)
|
|
|
|
if current_room_key:
|
|
|
|
if self._should_replace_room_key(current_room_key, room_key):
|
|
|
|
log_kv({"message": "Replacing room key."})
|
|
|
|
# updates are done one at a time in the DB, so send
|
|
|
|
# updates right away rather than batching them up,
|
|
|
|
# like we do with the inserts
|
|
|
|
yield self.store.update_e2e_room_key(
|
|
|
|
user_id, version, room_id, session_id, room_key
|
|
|
|
)
|
|
|
|
changed = True
|
|
|
|
else:
|
|
|
|
log_kv({"message": "Not replacing room_key."})
|
|
|
|
else:
|
|
|
|
log_kv(
|
|
|
|
{
|
|
|
|
"message": "Room key not found.",
|
|
|
|
"room_id": room_id,
|
|
|
|
"user_id": user_id,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
log_kv({"message": "Replacing room key."})
|
|
|
|
to_insert.append((room_id, session_id, room_key))
|
|
|
|
changed = True
|
|
|
|
|
|
|
|
if len(to_insert):
|
|
|
|
yield self.store.add_e2e_room_keys(user_id, version, to_insert)
|
|
|
|
|
|
|
|
version_etag = version_info["etag"]
|
|
|
|
if changed:
|
|
|
|
version_etag = version_etag + 1
|
|
|
|
yield self.store.update_e2e_room_keys_version(
|
|
|
|
user_id, version, None, version_etag
|
2019-08-22 12:28:12 +02:00
|
|
|
)
|
2017-12-05 22:44:25 +01:00
|
|
|
|
2019-11-27 22:14:44 +01:00
|
|
|
count = yield self.store.count_e2e_room_keys(user_id, version)
|
|
|
|
return {"etag": str(version_etag), "count": count}
|
2017-12-18 02:52:46 +01:00
|
|
|
|
2017-12-24 16:03:44 +01:00
|
|
|
@staticmethod
|
2017-12-18 02:52:46 +01:00
|
|
|
def _should_replace_room_key(current_room_key, room_key):
|
|
|
|
"""
|
2017-12-24 16:03:44 +01:00
|
|
|
Determine whether to replace a given current_room_key (if any)
|
|
|
|
with a newly uploaded room_key backup
|
2017-12-18 02:52:46 +01:00
|
|
|
|
|
|
|
Args:
|
|
|
|
current_room_key (dict): Optional, the current room_key dict if any
|
|
|
|
room_key (dict): The new room_key dict which may or may not be fit to
|
|
|
|
replace the current_room_key
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if current_room_key should be replaced by room_key in the backup
|
|
|
|
"""
|
|
|
|
|
2017-12-05 22:44:25 +01:00
|
|
|
if current_room_key:
|
2017-12-18 02:52:46 +01:00
|
|
|
# spelt out with if/elifs rather than nested boolean expressions
|
|
|
|
# purely for legibility.
|
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
if room_key["is_verified"] and not current_room_key["is_verified"]:
|
2018-10-12 12:26:18 +02:00
|
|
|
return True
|
2017-12-05 22:44:25 +01:00
|
|
|
elif (
|
2019-06-20 11:32:02 +02:00
|
|
|
room_key["first_message_index"]
|
|
|
|
< current_room_key["first_message_index"]
|
2017-12-05 22:44:25 +01:00
|
|
|
):
|
2018-10-12 12:26:18 +02:00
|
|
|
return True
|
2019-06-20 11:32:02 +02:00
|
|
|
elif room_key["forwarded_count"] < current_room_key["forwarded_count"]:
|
2018-10-12 12:26:18 +02:00
|
|
|
return True
|
2017-12-05 22:44:25 +01:00
|
|
|
else:
|
2017-12-18 02:52:46 +01:00
|
|
|
return False
|
|
|
|
return True
|
2017-12-06 02:02:57 +01:00
|
|
|
|
2019-08-22 12:28:12 +02:00
|
|
|
@trace
|
2017-12-06 02:02:57 +01:00
|
|
|
@defer.inlineCallbacks
|
2017-12-06 10:02:49 +01:00
|
|
|
def create_version(self, user_id, version_info):
|
2017-12-28 00:37:44 +01:00
|
|
|
"""Create a new backup version. This automatically becomes the new
|
|
|
|
backup version for the user's keys; previous backups will no longer be
|
|
|
|
writeable to.
|
|
|
|
|
2017-12-28 00:58:51 +01:00
|
|
|
Args:
|
2017-12-28 00:37:44 +01:00
|
|
|
user_id(str): the user whose backup version we're creating
|
|
|
|
version_info(dict): metadata about the new version being created
|
|
|
|
|
|
|
|
{
|
|
|
|
"algorithm": "m.megolm_backup.v1",
|
|
|
|
"auth_data": "dGhpcyBzaG91bGQgYWN0dWFsbHkgYmUgZW5jcnlwdGVkIGpzb24K"
|
|
|
|
}
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A deferred of a string that gives the new version number.
|
|
|
|
"""
|
|
|
|
|
2017-12-06 02:02:57 +01:00
|
|
|
# TODO: Validate the JSON to make sure it has the right keys.
|
|
|
|
|
|
|
|
# lock everyone out until we've switched version
|
|
|
|
with (yield self._upload_linearizer.queue(user_id)):
|
2017-12-18 02:58:53 +01:00
|
|
|
new_version = yield self.store.create_e2e_room_keys_version(
|
2017-12-06 10:02:49 +01:00
|
|
|
user_id, version_info
|
2017-12-06 02:02:57 +01:00
|
|
|
)
|
2019-07-23 15:00:55 +02:00
|
|
|
return new_version
|
2017-12-06 02:02:57 +01:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2017-12-31 15:10:31 +01:00
|
|
|
def get_version_info(self, user_id, version=None):
|
|
|
|
"""Get the info about a given version of the user's backup
|
2017-12-28 00:37:44 +01:00
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id(str): the user whose current backup version we're querying
|
2017-12-31 15:10:31 +01:00
|
|
|
version(str): Optional; if None gives the most recent version
|
|
|
|
otherwise a historical one.
|
2017-12-28 00:37:44 +01:00
|
|
|
Raises:
|
2019-02-06 23:47:22 +01:00
|
|
|
NotFoundError: if the requested backup version doesn't exist
|
2017-12-28 00:37:44 +01:00
|
|
|
Returns:
|
|
|
|
A deferred of a info dict that gives the info about the new version.
|
|
|
|
|
|
|
|
{
|
2017-12-31 15:10:31 +01:00
|
|
|
"version": "1234",
|
2017-12-28 00:37:44 +01:00
|
|
|
"algorithm": "m.megolm_backup.v1",
|
|
|
|
"auth_data": "dGhpcyBzaG91bGQgYWN0dWFsbHkgYmUgZW5jcnlwdGVkIGpzb24K"
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
2017-12-06 02:02:57 +01:00
|
|
|
with (yield self._upload_linearizer.queue(user_id)):
|
2019-02-06 23:47:22 +01:00
|
|
|
try:
|
|
|
|
res = yield self.store.get_e2e_room_keys_version_info(user_id, version)
|
|
|
|
except StoreError as e:
|
|
|
|
if e.code == 404:
|
|
|
|
raise NotFoundError("Unknown backup version")
|
|
|
|
else:
|
|
|
|
raise
|
2019-11-27 22:14:44 +01:00
|
|
|
|
|
|
|
res["count"] = yield self.store.count_e2e_room_keys(user_id, res["version"])
|
2020-06-15 19:42:44 +02:00
|
|
|
res["etag"] = str(res["etag"])
|
2019-07-23 15:00:55 +02:00
|
|
|
return res
|
2017-12-31 18:47:11 +01:00
|
|
|
|
2019-08-22 12:28:12 +02:00
|
|
|
@trace
|
2017-12-06 02:02:57 +01:00
|
|
|
@defer.inlineCallbacks
|
2018-01-08 00:45:26 +01:00
|
|
|
def delete_version(self, user_id, version=None):
|
2017-12-28 00:37:44 +01:00
|
|
|
"""Deletes a given version of the user's e2e_room_keys backup
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id(str): the user whose current backup version we're deleting
|
|
|
|
version(str): the version id of the backup being deleted
|
|
|
|
Raises:
|
2019-02-06 23:47:22 +01:00
|
|
|
NotFoundError: if this backup version doesn't exist
|
2017-12-28 00:37:44 +01:00
|
|
|
"""
|
|
|
|
|
2017-12-06 02:02:57 +01:00
|
|
|
with (yield self._upload_linearizer.queue(user_id)):
|
2019-02-06 23:47:22 +01:00
|
|
|
try:
|
|
|
|
yield self.store.delete_e2e_room_keys_version(user_id, version)
|
|
|
|
except StoreError as e:
|
|
|
|
if e.code == 404:
|
|
|
|
raise NotFoundError("Unknown backup version")
|
|
|
|
else:
|
|
|
|
raise
|
2019-02-06 23:57:10 +01:00
|
|
|
|
2019-08-22 12:28:12 +02:00
|
|
|
@trace
|
2019-02-06 23:57:10 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def update_version(self, user_id, version, version_info):
|
|
|
|
"""Update the info about a given version of the user's backup
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id(str): the user whose current backup version we're updating
|
|
|
|
version(str): the backup version we're updating
|
|
|
|
version_info(dict): the new information about the backup
|
|
|
|
Raises:
|
|
|
|
NotFoundError: if the requested backup version doesn't exist
|
|
|
|
Returns:
|
|
|
|
A deferred of an empty dict.
|
|
|
|
"""
|
2019-02-08 07:32:45 +01:00
|
|
|
if "version" not in version_info:
|
2019-10-09 23:54:03 +02:00
|
|
|
version_info["version"] = version
|
|
|
|
elif version_info["version"] != version:
|
2019-02-06 23:57:10 +01:00
|
|
|
raise SynapseError(
|
2019-06-20 11:32:02 +02:00
|
|
|
400, "Version in body does not match", Codes.INVALID_PARAM
|
2019-02-06 23:57:10 +01:00
|
|
|
)
|
2019-02-08 07:32:45 +01:00
|
|
|
with (yield self._upload_linearizer.queue(user_id)):
|
|
|
|
try:
|
|
|
|
old_info = yield self.store.get_e2e_room_keys_version_info(
|
|
|
|
user_id, version
|
|
|
|
)
|
|
|
|
except StoreError as e:
|
|
|
|
if e.code == 404:
|
|
|
|
raise NotFoundError("Unknown backup version")
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
if old_info["algorithm"] != version_info["algorithm"]:
|
2019-06-20 11:32:02 +02:00
|
|
|
raise SynapseError(400, "Algorithm does not match", Codes.INVALID_PARAM)
|
2019-02-06 23:57:10 +01:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
yield self.store.update_e2e_room_keys_version(
|
|
|
|
user_id, version, version_info
|
|
|
|
)
|
2019-02-06 23:57:10 +01:00
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return {}
|