2016-01-07 05:26:29 +01:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2019-07-25 17:08:24 +02:00
|
|
|
# Copyright 2019 New Vector Ltd
|
2020-10-07 14:00:17 +02:00
|
|
|
# Copyright 2019,2020 The Matrix.org Foundation C.I.C.
|
2015-07-06 19:46:47 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2020-09-01 13:41:21 +02:00
|
|
|
import abc
|
2021-12-13 17:28:26 +01:00
|
|
|
from typing import (
|
|
|
|
TYPE_CHECKING,
|
|
|
|
Collection,
|
|
|
|
Dict,
|
|
|
|
Iterable,
|
|
|
|
List,
|
|
|
|
Optional,
|
|
|
|
Tuple,
|
|
|
|
cast,
|
|
|
|
)
|
2019-12-12 18:03:28 +01:00
|
|
|
|
2020-09-02 12:47:26 +02:00
|
|
|
import attr
|
2020-08-07 14:02:55 +02:00
|
|
|
from canonicaljson import encode_canonical_json
|
2018-07-09 08:09:20 +02:00
|
|
|
|
2021-07-27 15:36:38 +02:00
|
|
|
from synapse.api.constants import DeviceKeyAlgorithms
|
2019-08-22 12:28:12 +02:00
|
|
|
from synapse.logging.opentracing import log_kv, set_tag, trace
|
2019-10-21 13:56:42 +02:00
|
|
|
from synapse.storage._base import SQLBaseStore, db_to_json
|
2021-12-13 17:28:26 +01:00
|
|
|
from synapse.storage.database import (
|
|
|
|
DatabasePool,
|
|
|
|
LoggingDatabaseConnection,
|
|
|
|
LoggingTransaction,
|
|
|
|
make_in_list_sql_clause,
|
|
|
|
)
|
|
|
|
from synapse.storage.databases.main.cache import CacheInvalidationWorkerStore
|
2021-01-14 18:57:09 +01:00
|
|
|
from synapse.storage.engines import PostgresEngine
|
2021-12-13 17:28:26 +01:00
|
|
|
from synapse.storage.util.id_generators import StreamIdGenerator
|
2020-08-29 01:14:17 +02:00
|
|
|
from synapse.types import JsonDict
|
2020-08-07 14:02:55 +02:00
|
|
|
from synapse.util import json_encoder
|
2019-12-12 18:03:28 +01:00
|
|
|
from synapse.util.caches.descriptors import cached, cachedList
|
2020-05-06 01:03:38 +02:00
|
|
|
from synapse.util.iterutils import batch_iter
|
2017-03-23 14:48:30 +01:00
|
|
|
|
2020-08-27 13:41:01 +02:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from synapse.handlers.e2e_keys import SignatureListItem
|
2020-11-02 14:55:56 +01:00
|
|
|
from synapse.server import HomeServer
|
2020-08-27 13:41:01 +02:00
|
|
|
|
2015-07-06 19:46:47 +02:00
|
|
|
|
2022-01-13 14:49:28 +01:00
|
|
|
@attr.s(slots=True, auto_attribs=True)
|
2020-09-02 12:47:26 +02:00
|
|
|
class DeviceKeyLookupResult:
|
2020-09-03 12:50:49 +02:00
|
|
|
"""The type returned by get_e2e_device_keys_and_signatures"""
|
2020-09-02 12:47:26 +02:00
|
|
|
|
2022-01-13 14:49:28 +01:00
|
|
|
display_name: Optional[str]
|
2020-09-02 12:47:26 +02:00
|
|
|
|
|
|
|
# the key data from e2e_device_keys_json. Typically includes fields like
|
|
|
|
# "algorithm", "keys" (including the curve25519 identity key and the ed25519 signing
|
2020-09-04 16:06:05 +02:00
|
|
|
# key) and "signatures" (a map from (user id) to (key id/device_id) to signature.)
|
2022-01-13 14:49:28 +01:00
|
|
|
keys: Optional[JsonDict]
|
2020-09-02 12:47:26 +02:00
|
|
|
|
|
|
|
|
2020-11-02 14:55:56 +01:00
|
|
|
class EndToEndKeyBackgroundStore(SQLBaseStore):
|
2021-12-13 17:28:26 +01:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
database: DatabasePool,
|
|
|
|
db_conn: LoggingDatabaseConnection,
|
|
|
|
hs: "HomeServer",
|
|
|
|
):
|
2020-11-02 14:55:56 +01:00
|
|
|
super().__init__(database, db_conn, hs)
|
|
|
|
|
|
|
|
self.db_pool.updates.register_background_index_update(
|
|
|
|
"e2e_cross_signing_keys_idx",
|
|
|
|
index_name="e2e_cross_signing_keys_stream_idx",
|
|
|
|
table="e2e_cross_signing_keys",
|
|
|
|
columns=["stream_id"],
|
|
|
|
unique=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-12-13 17:28:26 +01:00
|
|
|
class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorkerStore):
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
database: DatabasePool,
|
|
|
|
db_conn: LoggingDatabaseConnection,
|
|
|
|
hs: "HomeServer",
|
|
|
|
):
|
2021-06-21 15:38:59 +02:00
|
|
|
super().__init__(database, db_conn, hs)
|
|
|
|
|
|
|
|
self._allow_device_name_lookup_over_federation = (
|
|
|
|
self.hs.config.federation.allow_device_name_lookup_over_federation
|
|
|
|
)
|
|
|
|
|
2020-09-01 17:04:17 +02:00
|
|
|
async def get_e2e_device_keys_for_federation_query(
|
|
|
|
self, user_id: str
|
|
|
|
) -> Tuple[int, List[JsonDict]]:
|
2020-09-01 13:41:21 +02:00
|
|
|
"""Get all devices (with any device keys) for a user
|
|
|
|
|
|
|
|
Returns:
|
2020-09-01 17:04:17 +02:00
|
|
|
(stream_id, devices)
|
2020-09-01 13:41:21 +02:00
|
|
|
"""
|
|
|
|
now_stream_id = self.get_device_stream_token()
|
|
|
|
|
2020-09-03 12:50:49 +02:00
|
|
|
devices = await self.get_e2e_device_keys_and_signatures([(user_id, None)])
|
2020-09-01 13:41:21 +02:00
|
|
|
|
|
|
|
if devices:
|
|
|
|
user_devices = devices[user_id]
|
|
|
|
results = []
|
|
|
|
for device_id, device in user_devices.items():
|
|
|
|
result = {"device_id": device_id}
|
|
|
|
|
2020-09-04 16:06:05 +02:00
|
|
|
keys = device.keys
|
|
|
|
if keys:
|
|
|
|
result["keys"] = keys
|
2020-09-01 13:41:21 +02:00
|
|
|
|
2021-05-11 15:03:23 +02:00
|
|
|
device_display_name = None
|
2021-06-21 15:38:59 +02:00
|
|
|
if self._allow_device_name_lookup_over_federation:
|
2021-05-11 15:03:23 +02:00
|
|
|
device_display_name = device.display_name
|
2020-09-01 13:41:21 +02:00
|
|
|
if device_display_name:
|
|
|
|
result["device_display_name"] = device_display_name
|
|
|
|
|
|
|
|
results.append(result)
|
|
|
|
|
|
|
|
return now_stream_id, results
|
|
|
|
|
|
|
|
return now_stream_id, []
|
|
|
|
|
2019-08-22 12:28:12 +02:00
|
|
|
@trace
|
2020-08-29 01:14:17 +02:00
|
|
|
async def get_e2e_device_keys_for_cs_api(
|
|
|
|
self, query_list: List[Tuple[str, Optional[str]]]
|
|
|
|
) -> Dict[str, Dict[str, JsonDict]]:
|
|
|
|
"""Fetch a list of device keys, formatted suitably for the C/S API.
|
2015-07-06 19:46:47 +02:00
|
|
|
Args:
|
|
|
|
query_list(list): List of pairs of user_ids and device_ids.
|
|
|
|
Returns:
|
|
|
|
Dict mapping from user-id to dict mapping from device_id to
|
2019-10-11 20:24:52 +02:00
|
|
|
key data. The key data will be a dict in the same format as the
|
|
|
|
DeviceKeys type returned by POST /_matrix/client/r0/keys/query.
|
2015-07-06 19:46:47 +02:00
|
|
|
"""
|
2019-08-22 12:28:12 +02:00
|
|
|
set_tag("query_list", query_list)
|
2016-08-03 08:46:57 +02:00
|
|
|
if not query_list:
|
2019-07-23 15:00:55 +02:00
|
|
|
return {}
|
2016-08-03 08:46:57 +02:00
|
|
|
|
2020-09-03 12:50:49 +02:00
|
|
|
results = await self.get_e2e_device_keys_and_signatures(query_list)
|
2016-08-03 08:46:57 +02:00
|
|
|
|
2019-10-10 21:56:00 +02:00
|
|
|
# Build the result structure, un-jsonify the results, and add the
|
|
|
|
# "unsigned" section
|
2021-12-13 17:28:26 +01:00
|
|
|
rv: Dict[str, Dict[str, JsonDict]] = {}
|
2020-06-15 13:03:36 +02:00
|
|
|
for user_id, device_keys in results.items():
|
2019-10-11 02:31:30 +02:00
|
|
|
rv[user_id] = {}
|
2020-06-15 13:03:36 +02:00
|
|
|
for device_id, device_info in device_keys.items():
|
2020-09-04 16:06:05 +02:00
|
|
|
r = device_info.keys
|
2019-10-10 21:56:00 +02:00
|
|
|
r["unsigned"] = {}
|
2020-09-02 12:47:26 +02:00
|
|
|
display_name = device_info.display_name
|
2019-10-10 21:56:00 +02:00
|
|
|
if display_name is not None:
|
|
|
|
r["unsigned"]["device_display_name"] = display_name
|
2019-10-11 02:31:30 +02:00
|
|
|
rv[user_id][device_id] = r
|
2017-01-30 18:11:24 +01:00
|
|
|
|
2019-10-11 02:31:30 +02:00
|
|
|
return rv
|
2017-01-30 18:11:24 +01:00
|
|
|
|
2019-08-22 12:28:12 +02:00
|
|
|
@trace
|
2020-09-03 12:50:49 +02:00
|
|
|
async def get_e2e_device_keys_and_signatures(
|
|
|
|
self,
|
|
|
|
query_list: List[Tuple[str, Optional[str]]],
|
|
|
|
include_all_devices: bool = False,
|
|
|
|
include_deleted_devices: bool = False,
|
2020-09-02 12:47:26 +02:00
|
|
|
) -> Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]]:
|
2020-09-03 19:27:26 +02:00
|
|
|
"""Fetch a list of device keys
|
|
|
|
|
|
|
|
Any cross-signatures made on the keys by the owner of the device are also
|
|
|
|
included.
|
2020-09-03 12:50:49 +02:00
|
|
|
|
2020-09-04 16:06:05 +02:00
|
|
|
The cross-signatures are added to the `signatures` field within the `keys`
|
|
|
|
object in the response.
|
|
|
|
|
2020-09-03 12:50:49 +02:00
|
|
|
Args:
|
|
|
|
query_list: List of pairs of user_ids and device_ids. Device id can be None
|
|
|
|
to indicate "all devices for this user"
|
|
|
|
|
|
|
|
include_all_devices: whether to return devices without device keys
|
|
|
|
|
|
|
|
include_deleted_devices: whether to include null entries for
|
|
|
|
devices which no longer exist (but were in the query_list).
|
|
|
|
This option only takes effect if include_all_devices is true.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Dict mapping from user-id to dict mapping from device_id to
|
|
|
|
key data.
|
|
|
|
"""
|
2019-08-22 12:28:12 +02:00
|
|
|
set_tag("include_all_devices", include_all_devices)
|
|
|
|
set_tag("include_deleted_devices", include_deleted_devices)
|
|
|
|
|
2020-09-03 12:50:49 +02:00
|
|
|
result = await self.db_pool.runInteraction(
|
|
|
|
"get_e2e_device_keys",
|
2020-09-03 19:27:26 +02:00
|
|
|
self._get_e2e_device_keys_txn,
|
2020-09-03 12:50:49 +02:00
|
|
|
query_list,
|
|
|
|
include_all_devices,
|
|
|
|
include_deleted_devices,
|
|
|
|
)
|
|
|
|
|
2020-09-03 19:27:26 +02:00
|
|
|
# get the (user_id, device_id) tuples to look up cross-signatures for
|
|
|
|
signature_query = (
|
|
|
|
(user_id, device_id)
|
|
|
|
for user_id, dev in result.items()
|
|
|
|
for device_id, d in dev.items()
|
2020-09-04 16:06:05 +02:00
|
|
|
if d is not None and d.keys is not None
|
2020-09-03 19:27:26 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
for batch in batch_iter(signature_query, 50):
|
|
|
|
cross_sigs_result = await self.db_pool.runInteraction(
|
|
|
|
"get_e2e_cross_signing_signatures",
|
|
|
|
self._get_e2e_cross_signing_signatures_for_devices_txn,
|
|
|
|
batch,
|
|
|
|
)
|
|
|
|
|
|
|
|
# add each cross-signing signature to the correct device in the result dict.
|
|
|
|
for (user_id, key_id, device_id, signature) in cross_sigs_result:
|
|
|
|
target_device_result = result[user_id][device_id]
|
2021-12-13 17:28:26 +01:00
|
|
|
# We've only looked up cross-signatures for non-deleted devices with key
|
|
|
|
# data.
|
|
|
|
assert target_device_result is not None
|
|
|
|
assert target_device_result.keys is not None
|
2020-09-04 16:06:05 +02:00
|
|
|
target_device_signatures = target_device_result.keys.setdefault(
|
|
|
|
"signatures", {}
|
|
|
|
)
|
2020-09-03 19:27:26 +02:00
|
|
|
signing_user_signatures = target_device_signatures.setdefault(
|
|
|
|
user_id, {}
|
|
|
|
)
|
|
|
|
signing_user_signatures[key_id] = signature
|
|
|
|
|
2020-09-03 12:50:49 +02:00
|
|
|
log_kv(result)
|
|
|
|
return result
|
|
|
|
|
2020-09-03 19:27:26 +02:00
|
|
|
def _get_e2e_device_keys_txn(
|
2021-12-13 17:28:26 +01:00
|
|
|
self,
|
|
|
|
txn: LoggingTransaction,
|
|
|
|
query_list: Collection[Tuple[str, str]],
|
|
|
|
include_all_devices: bool = False,
|
|
|
|
include_deleted_devices: bool = False,
|
2020-09-03 12:50:49 +02:00
|
|
|
) -> Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]]:
|
2020-09-03 19:27:26 +02:00
|
|
|
"""Get information on devices from the database
|
|
|
|
|
|
|
|
The results include the device's keys and self-signatures, but *not* any
|
|
|
|
cross-signing signatures which have been added subsequently (for which, see
|
|
|
|
get_e2e_device_keys_and_signatures)
|
|
|
|
"""
|
2016-08-03 08:46:57 +02:00
|
|
|
query_clauses = []
|
|
|
|
query_params = []
|
|
|
|
|
2018-07-19 11:59:02 +02:00
|
|
|
if include_all_devices is False:
|
|
|
|
include_deleted_devices = False
|
|
|
|
|
2018-07-12 12:39:43 +02:00
|
|
|
if include_deleted_devices:
|
|
|
|
deleted_devices = set(query_list)
|
|
|
|
|
2016-08-03 08:46:57 +02:00
|
|
|
for (user_id, device_id) in query_list:
|
2017-01-26 17:06:54 +01:00
|
|
|
query_clause = "user_id = ?"
|
2016-08-03 08:46:57 +02:00
|
|
|
query_params.append(user_id)
|
|
|
|
|
2017-02-08 17:04:29 +01:00
|
|
|
if device_id is not None:
|
2017-01-26 17:06:54 +01:00
|
|
|
query_clause += " AND device_id = ?"
|
2016-08-03 08:46:57 +02:00
|
|
|
query_params.append(device_id)
|
|
|
|
|
|
|
|
query_clauses.append(query_clause)
|
|
|
|
|
|
|
|
sql = (
|
2017-01-25 15:27:27 +01:00
|
|
|
"SELECT user_id, device_id, "
|
2020-09-02 12:47:26 +02:00
|
|
|
" d.display_name, "
|
2016-08-03 15:57:46 +02:00
|
|
|
" k.key_json"
|
2017-01-26 17:55:50 +01:00
|
|
|
" FROM devices d"
|
|
|
|
" %s JOIN e2e_device_keys_json k USING (user_id, device_id)"
|
2019-07-31 05:09:50 +02:00
|
|
|
" WHERE %s AND NOT d.hidden"
|
2016-08-03 15:57:46 +02:00
|
|
|
) % (
|
2017-01-26 17:55:50 +01:00
|
|
|
"LEFT" if include_all_devices else "INNER",
|
2019-04-03 11:07:29 +02:00
|
|
|
" OR ".join("(" + q + ")" for q in query_clauses),
|
2016-08-03 15:57:46 +02:00
|
|
|
)
|
2016-08-03 08:46:57 +02:00
|
|
|
|
|
|
|
txn.execute(sql, query_params)
|
|
|
|
|
2021-07-15 18:46:54 +02:00
|
|
|
result: Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]] = {}
|
2020-09-02 12:47:26 +02:00
|
|
|
for (user_id, device_id, display_name, key_json) in txn:
|
2018-07-12 12:39:43 +02:00
|
|
|
if include_deleted_devices:
|
2020-09-02 12:47:26 +02:00
|
|
|
deleted_devices.remove((user_id, device_id))
|
|
|
|
result.setdefault(user_id, {})[device_id] = DeviceKeyLookupResult(
|
2020-09-04 16:06:05 +02:00
|
|
|
display_name, db_to_json(key_json) if key_json else None
|
2020-09-02 12:47:26 +02:00
|
|
|
)
|
2016-08-03 08:46:57 +02:00
|
|
|
|
2018-07-12 12:39:43 +02:00
|
|
|
if include_deleted_devices:
|
|
|
|
for user_id, device_id in deleted_devices:
|
|
|
|
result.setdefault(user_id, {})[device_id] = None
|
|
|
|
|
2020-09-03 19:27:26 +02:00
|
|
|
return result
|
|
|
|
|
|
|
|
def _get_e2e_cross_signing_signatures_for_devices_txn(
|
2021-12-13 17:28:26 +01:00
|
|
|
self, txn: LoggingTransaction, device_query: Iterable[Tuple[str, str]]
|
2020-09-03 19:27:26 +02:00
|
|
|
) -> List[Tuple[str, str, str, str]]:
|
|
|
|
"""Get cross-signing signatures for a given list of devices
|
|
|
|
|
|
|
|
Returns signatures made by the owners of the devices.
|
|
|
|
|
|
|
|
Returns: a list of results; each entry in the list is a tuple of
|
|
|
|
(user_id, key_id, target_device_id, signature).
|
|
|
|
"""
|
|
|
|
signature_query_clauses = []
|
|
|
|
signature_query_params = []
|
|
|
|
|
|
|
|
for (user_id, device_id) in device_query:
|
|
|
|
signature_query_clauses.append(
|
|
|
|
"target_user_id = ? AND target_device_id = ? AND user_id = ?"
|
|
|
|
)
|
|
|
|
signature_query_params.extend([user_id, device_id, user_id])
|
|
|
|
|
|
|
|
signature_sql = """
|
|
|
|
SELECT user_id, key_id, target_device_id, signature
|
|
|
|
FROM e2e_cross_signing_signatures WHERE %s
|
|
|
|
""" % (
|
2019-11-21 13:00:14 +01:00
|
|
|
" OR ".join("(" + q + ")" for q in signature_query_clauses)
|
|
|
|
)
|
2019-05-22 22:41:24 +02:00
|
|
|
|
|
|
|
txn.execute(signature_sql, signature_query_params)
|
2021-12-13 17:28:26 +01:00
|
|
|
return cast(
|
|
|
|
List[
|
|
|
|
Tuple[
|
|
|
|
str,
|
|
|
|
str,
|
|
|
|
str,
|
|
|
|
str,
|
|
|
|
]
|
|
|
|
],
|
|
|
|
txn.fetchall(),
|
|
|
|
)
|
2015-07-06 19:46:47 +02:00
|
|
|
|
2020-08-07 19:36:29 +02:00
|
|
|
async def get_e2e_one_time_keys(
|
|
|
|
self, user_id: str, device_id: str, key_ids: List[str]
|
|
|
|
) -> Dict[Tuple[str, str], str]:
|
2017-05-09 19:26:54 +02:00
|
|
|
"""Retrieve a number of one-time keys for a user
|
2017-03-23 14:17:00 +01:00
|
|
|
|
2017-05-09 19:26:54 +02:00
|
|
|
Args:
|
|
|
|
user_id(str): id of user to get keys for
|
|
|
|
device_id(str): id of device to get keys for
|
|
|
|
key_ids(list[str]): list of key ids (excluding algorithm) to
|
|
|
|
retrieve
|
|
|
|
|
|
|
|
Returns:
|
2020-08-07 19:36:29 +02:00
|
|
|
A map from (algorithm, key_id) to json string for key
|
2017-03-23 14:17:00 +01:00
|
|
|
"""
|
|
|
|
|
2020-08-07 19:36:29 +02:00
|
|
|
rows = await self.db_pool.simple_select_many_batch(
|
2017-03-23 14:17:00 +01:00
|
|
|
table="e2e_one_time_keys_json",
|
|
|
|
column="key_id",
|
2017-05-09 19:26:54 +02:00
|
|
|
iterable=key_ids,
|
2019-04-03 11:07:29 +02:00
|
|
|
retcols=("algorithm", "key_id", "key_json"),
|
|
|
|
keyvalues={"user_id": user_id, "device_id": device_id},
|
2017-03-23 14:17:00 +01:00
|
|
|
desc="add_e2e_one_time_keys_check",
|
|
|
|
)
|
2019-08-22 12:28:12 +02:00
|
|
|
result = {(row["algorithm"], row["key_id"]): row["key_json"] for row in rows}
|
|
|
|
log_kv({"message": "Fetched one time keys for user", "one_time_keys": result})
|
|
|
|
return result
|
2017-05-09 19:26:54 +02:00
|
|
|
|
2020-08-07 19:36:29 +02:00
|
|
|
async def add_e2e_one_time_keys(
|
|
|
|
self,
|
|
|
|
user_id: str,
|
|
|
|
device_id: str,
|
|
|
|
time_now: int,
|
|
|
|
new_keys: Iterable[Tuple[str, str, str]],
|
|
|
|
) -> None:
|
2017-05-09 19:26:54 +02:00
|
|
|
"""Insert some new one time keys for a device. Errors if any of the
|
|
|
|
keys already exist.
|
|
|
|
|
|
|
|
Args:
|
2020-08-07 19:36:29 +02:00
|
|
|
user_id: id of user to get keys for
|
|
|
|
device_id: id of device to get keys for
|
|
|
|
time_now: insertion time to record (ms since epoch)
|
|
|
|
new_keys: keys to add - each a tuple of (algorithm, key_id, key json)
|
2017-05-09 19:26:54 +02:00
|
|
|
"""
|
2017-03-23 14:17:00 +01:00
|
|
|
|
2021-12-13 17:28:26 +01:00
|
|
|
def _add_e2e_one_time_keys(txn: LoggingTransaction) -> None:
|
2019-08-22 12:28:12 +02:00
|
|
|
set_tag("user_id", user_id)
|
|
|
|
set_tag("device_id", device_id)
|
|
|
|
set_tag("new_keys", new_keys)
|
2017-03-23 14:17:00 +01:00
|
|
|
# We are protected from race between lookup and insertion due to
|
|
|
|
# a unique constraint. If there is a race of two calls to
|
|
|
|
# `add_e2e_one_time_keys` then they'll conflict and we will only
|
|
|
|
# insert one set.
|
2020-08-05 22:38:57 +02:00
|
|
|
self.db_pool.simple_insert_many_txn(
|
2019-04-03 11:07:29 +02:00
|
|
|
txn,
|
|
|
|
table="e2e_one_time_keys_json",
|
2022-01-14 01:44:18 +01:00
|
|
|
keys=(
|
|
|
|
"user_id",
|
|
|
|
"device_id",
|
|
|
|
"algorithm",
|
|
|
|
"key_id",
|
|
|
|
"ts_added_ms",
|
|
|
|
"key_json",
|
|
|
|
),
|
2017-03-23 14:17:00 +01:00
|
|
|
values=[
|
2022-01-14 01:44:18 +01:00
|
|
|
(user_id, device_id, algorithm, key_id, time_now, json_bytes)
|
2017-03-23 14:17:00 +01:00
|
|
|
for algorithm, key_id, json_bytes in new_keys
|
|
|
|
],
|
|
|
|
)
|
2017-05-22 17:19:22 +02:00
|
|
|
self._invalidate_cache_and_stream(
|
2019-04-03 11:07:29 +02:00
|
|
|
txn, self.count_e2e_one_time_keys, (user_id, device_id)
|
2017-05-08 16:34:27 +02:00
|
|
|
)
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2020-08-07 19:36:29 +02:00
|
|
|
await self.db_pool.runInteraction(
|
2017-03-23 14:17:00 +01:00
|
|
|
"add_e2e_one_time_keys_insert", _add_e2e_one_time_keys
|
2015-07-06 19:46:47 +02:00
|
|
|
)
|
|
|
|
|
2017-05-08 16:34:27 +02:00
|
|
|
@cached(max_entries=10000)
|
2020-09-01 17:04:17 +02:00
|
|
|
async def count_e2e_one_time_keys(
|
|
|
|
self, user_id: str, device_id: str
|
|
|
|
) -> Dict[str, int]:
|
2021-02-16 23:32:34 +01:00
|
|
|
"""Count the number of one time keys the server has for a device
|
2015-07-06 19:46:47 +02:00
|
|
|
Returns:
|
2020-09-01 17:04:17 +02:00
|
|
|
A mapping from algorithm to number of keys for that algorithm.
|
2015-07-06 19:46:47 +02:00
|
|
|
"""
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2021-12-13 17:28:26 +01:00
|
|
|
def _count_e2e_one_time_keys(txn: LoggingTransaction) -> Dict[str, int]:
|
2015-07-06 19:46:47 +02:00
|
|
|
sql = (
|
|
|
|
"SELECT algorithm, COUNT(key_id) FROM e2e_one_time_keys_json"
|
2015-07-09 15:04:03 +02:00
|
|
|
" WHERE user_id = ? AND device_id = ?"
|
2015-07-06 19:46:47 +02:00
|
|
|
" GROUP BY algorithm"
|
|
|
|
)
|
2015-07-09 15:04:03 +02:00
|
|
|
txn.execute(sql, (user_id, device_id))
|
2021-07-27 15:36:38 +02:00
|
|
|
|
|
|
|
# Initially set the key count to 0. This ensures that the client will always
|
|
|
|
# receive *some count*, even if it's 0.
|
|
|
|
result = {DeviceKeyAlgorithms.SIGNED_CURVE25519: 0}
|
|
|
|
|
|
|
|
# Override entries with the count of any keys we pulled from the database
|
2017-03-23 18:53:49 +01:00
|
|
|
for algorithm, key_count in txn:
|
2015-07-06 19:46:47 +02:00
|
|
|
result[algorithm] = key_count
|
2021-07-27 15:36:38 +02:00
|
|
|
|
2015-07-06 19:46:47 +02:00
|
|
|
return result
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2020-09-01 17:04:17 +02:00
|
|
|
return await self.db_pool.runInteraction(
|
2019-12-04 14:52:46 +01:00
|
|
|
"count_e2e_one_time_keys", _count_e2e_one_time_keys
|
|
|
|
)
|
2015-07-06 19:46:47 +02:00
|
|
|
|
2020-10-06 19:26:29 +02:00
|
|
|
async def set_e2e_fallback_keys(
|
|
|
|
self, user_id: str, device_id: str, fallback_keys: JsonDict
|
|
|
|
) -> None:
|
|
|
|
"""Set the user's e2e fallback keys.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id: the user whose keys are being set
|
|
|
|
device_id: the device whose keys are being set
|
|
|
|
fallback_keys: the keys to set. This is a map from key ID (which is
|
|
|
|
of the form "algorithm:id") to key data.
|
|
|
|
"""
|
2021-11-19 12:40:12 +01:00
|
|
|
await self.db_pool.runInteraction(
|
|
|
|
"set_e2e_fallback_keys_txn",
|
|
|
|
self._set_e2e_fallback_keys_txn,
|
|
|
|
user_id,
|
|
|
|
device_id,
|
|
|
|
fallback_keys,
|
|
|
|
)
|
|
|
|
|
|
|
|
await self.invalidate_cache_and_stream(
|
|
|
|
"get_e2e_unused_fallback_key_types", (user_id, device_id)
|
|
|
|
)
|
|
|
|
|
|
|
|
def _set_e2e_fallback_keys_txn(
|
2021-12-13 17:28:26 +01:00
|
|
|
self,
|
|
|
|
txn: LoggingTransaction,
|
|
|
|
user_id: str,
|
|
|
|
device_id: str,
|
|
|
|
fallback_keys: JsonDict,
|
2021-11-19 12:40:12 +01:00
|
|
|
) -> None:
|
2020-10-06 19:26:29 +02:00
|
|
|
# fallback_keys will usually only have one item in it, so using a for
|
|
|
|
# loop (as opposed to calling simple_upsert_many_txn) won't be too bad
|
|
|
|
# FIXME: make sure that only one key per algorithm is uploaded
|
|
|
|
for key_id, fallback_key in fallback_keys.items():
|
|
|
|
algorithm, key_id = key_id.split(":", 1)
|
2021-11-19 12:40:12 +01:00
|
|
|
old_key_json = self.db_pool.simple_select_one_onecol_txn(
|
|
|
|
txn,
|
|
|
|
table="e2e_fallback_keys_json",
|
2020-10-06 19:26:29 +02:00
|
|
|
keyvalues={
|
|
|
|
"user_id": user_id,
|
|
|
|
"device_id": device_id,
|
|
|
|
"algorithm": algorithm,
|
|
|
|
},
|
2021-11-19 12:40:12 +01:00
|
|
|
retcol="key_json",
|
|
|
|
allow_none=True,
|
2020-10-06 19:26:29 +02:00
|
|
|
)
|
|
|
|
|
2021-11-19 12:40:12 +01:00
|
|
|
new_key_json = encode_canonical_json(fallback_key).decode("utf-8")
|
|
|
|
|
|
|
|
# If the uploaded key is the same as the current fallback key,
|
|
|
|
# don't do anything. This prevents marking the key as unused if it
|
|
|
|
# was already used.
|
|
|
|
if old_key_json != new_key_json:
|
|
|
|
self.db_pool.simple_upsert_txn(
|
|
|
|
txn,
|
|
|
|
table="e2e_fallback_keys_json",
|
|
|
|
keyvalues={
|
|
|
|
"user_id": user_id,
|
|
|
|
"device_id": device_id,
|
|
|
|
"algorithm": algorithm,
|
|
|
|
},
|
|
|
|
values={
|
|
|
|
"key_id": key_id,
|
|
|
|
"key_json": json_encoder.encode(fallback_key),
|
|
|
|
"used": False,
|
|
|
|
},
|
|
|
|
)
|
2020-10-08 19:24:46 +02:00
|
|
|
|
2020-10-06 19:26:29 +02:00
|
|
|
@cached(max_entries=10000)
|
|
|
|
async def get_e2e_unused_fallback_key_types(
|
|
|
|
self, user_id: str, device_id: str
|
|
|
|
) -> List[str]:
|
|
|
|
"""Returns the fallback key types that have an unused key.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id: the user whose keys are being queried
|
|
|
|
device_id: the device whose keys are being queried
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
a list of key types
|
|
|
|
"""
|
|
|
|
return await self.db_pool.simple_select_onecol(
|
|
|
|
"e2e_fallback_keys_json",
|
|
|
|
keyvalues={"user_id": user_id, "device_id": device_id, "used": False},
|
|
|
|
retcol="algorithm",
|
|
|
|
desc="get_e2e_unused_fallback_key_types",
|
|
|
|
)
|
|
|
|
|
2020-08-07 19:36:29 +02:00
|
|
|
async def get_e2e_cross_signing_key(
|
|
|
|
self, user_id: str, key_type: str, from_user_id: Optional[str] = None
|
2021-12-13 17:28:26 +01:00
|
|
|
) -> Optional[JsonDict]:
|
2019-10-25 12:08:03 +02:00
|
|
|
"""Returns a user's cross-signing key.
|
|
|
|
|
|
|
|
Args:
|
2020-08-07 19:36:29 +02:00
|
|
|
user_id: the user whose key is being requested
|
|
|
|
key_type: the type of key that is being requested: either 'master'
|
2019-12-12 18:03:28 +01:00
|
|
|
for a master key, 'self_signing' for a self-signing key, or
|
|
|
|
'user_signing' for a user-signing key
|
2020-08-07 19:36:29 +02:00
|
|
|
from_user_id: if specified, signatures made by this user on
|
2019-10-25 12:08:03 +02:00
|
|
|
the self-signing key will be included in the result
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
dict of the key data or None if not found
|
|
|
|
"""
|
2020-08-07 19:36:29 +02:00
|
|
|
res = await self.get_e2e_cross_signing_keys_bulk([user_id], from_user_id)
|
2020-05-06 01:08:36 +02:00
|
|
|
user_keys = res.get(user_id)
|
|
|
|
if not user_keys:
|
|
|
|
return None
|
|
|
|
return user_keys.get(key_type)
|
2019-10-25 12:08:03 +02:00
|
|
|
|
2019-12-12 18:03:28 +01:00
|
|
|
@cached(num_args=1)
|
2021-12-13 17:28:26 +01:00
|
|
|
def _get_bare_e2e_cross_signing_keys(self, user_id: str) -> Dict[str, JsonDict]:
|
2019-12-12 18:03:28 +01:00
|
|
|
"""Dummy function. Only used to make a cache for
|
|
|
|
_get_bare_e2e_cross_signing_keys_bulk.
|
|
|
|
"""
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
@cachedList(
|
|
|
|
cached_method_name="_get_bare_e2e_cross_signing_keys",
|
|
|
|
list_name="user_ids",
|
|
|
|
num_args=1,
|
|
|
|
)
|
2020-09-01 17:04:17 +02:00
|
|
|
async def _get_bare_e2e_cross_signing_keys_bulk(
|
2021-05-14 12:12:36 +02:00
|
|
|
self, user_ids: Iterable[str]
|
2021-12-13 17:28:26 +01:00
|
|
|
) -> Dict[str, Optional[Dict[str, JsonDict]]]:
|
2019-12-12 18:03:28 +01:00
|
|
|
"""Returns the cross-signing keys for a set of users. The output of this
|
|
|
|
function should be passed to _get_e2e_cross_signing_signatures_txn if
|
|
|
|
the signatures for the calling user need to be fetched.
|
|
|
|
|
|
|
|
Args:
|
2020-09-01 17:04:17 +02:00
|
|
|
user_ids: the users whose keys are being requested
|
2019-12-12 18:03:28 +01:00
|
|
|
|
|
|
|
Returns:
|
2020-09-01 17:04:17 +02:00
|
|
|
A mapping from user ID to key type to key data. If a user's cross-signing
|
|
|
|
keys were not found, either their user ID will not be in the dict, or
|
|
|
|
their user ID will map to None.
|
2019-12-12 18:03:28 +01:00
|
|
|
|
|
|
|
"""
|
2021-12-13 17:28:26 +01:00
|
|
|
result = await self.db_pool.runInteraction(
|
2019-12-12 18:03:28 +01:00
|
|
|
"get_bare_e2e_cross_signing_keys_bulk",
|
|
|
|
self._get_bare_e2e_cross_signing_keys_bulk_txn,
|
|
|
|
user_ids,
|
|
|
|
)
|
|
|
|
|
2021-12-13 17:28:26 +01:00
|
|
|
# The `Optional` comes from the `@cachedList` decorator.
|
|
|
|
return cast(Dict[str, Optional[Dict[str, JsonDict]]], result)
|
|
|
|
|
2019-12-12 18:03:28 +01:00
|
|
|
def _get_bare_e2e_cross_signing_keys_bulk_txn(
|
2021-02-16 23:32:34 +01:00
|
|
|
self,
|
2021-12-13 17:28:26 +01:00
|
|
|
txn: LoggingTransaction,
|
2021-05-14 12:12:36 +02:00
|
|
|
user_ids: Iterable[str],
|
2021-12-13 17:28:26 +01:00
|
|
|
) -> Dict[str, Dict[str, JsonDict]]:
|
2019-12-12 18:03:28 +01:00
|
|
|
"""Returns the cross-signing keys for a set of users. The output of this
|
|
|
|
function should be passed to _get_e2e_cross_signing_signatures_txn if
|
|
|
|
the signatures for the calling user need to be fetched.
|
|
|
|
|
|
|
|
Args:
|
2021-12-13 17:28:26 +01:00
|
|
|
txn: db connection
|
|
|
|
user_ids: the users whose keys are being requested
|
2019-12-12 18:03:28 +01:00
|
|
|
|
|
|
|
Returns:
|
2021-12-13 17:28:26 +01:00
|
|
|
Mapping from user ID to key type to key data.
|
|
|
|
If a user's cross-signing keys were not found, their user ID will not be in
|
|
|
|
the dict.
|
2019-12-12 18:03:28 +01:00
|
|
|
|
|
|
|
"""
|
2021-12-13 17:28:26 +01:00
|
|
|
result: Dict[str, Dict[str, JsonDict]] = {}
|
2019-12-12 18:03:28 +01:00
|
|
|
|
2020-05-06 01:03:38 +02:00
|
|
|
for user_chunk in batch_iter(user_ids, 100):
|
|
|
|
clause, params = make_in_list_sql_clause(
|
2021-01-14 18:57:09 +01:00
|
|
|
txn.database_engine, "user_id", user_chunk
|
2019-12-12 18:03:28 +01:00
|
|
|
)
|
|
|
|
|
2021-01-14 18:57:09 +01:00
|
|
|
# Fetch the latest key for each type per user.
|
|
|
|
if isinstance(self.database_engine, PostgresEngine):
|
|
|
|
# The `DISTINCT ON` clause will pick the *first* row it
|
|
|
|
# encounters, so ordering by stream ID desc will ensure we get
|
|
|
|
# the latest key.
|
|
|
|
sql = """
|
|
|
|
SELECT DISTINCT ON (user_id, keytype) user_id, keytype, keydata, stream_id
|
|
|
|
FROM e2e_cross_signing_keys
|
|
|
|
WHERE %(clause)s
|
|
|
|
ORDER BY user_id, keytype, stream_id DESC
|
|
|
|
""" % {
|
|
|
|
"clause": clause
|
|
|
|
}
|
|
|
|
else:
|
|
|
|
# SQLite has special handling for bare columns when using
|
|
|
|
# MIN/MAX with a `GROUP BY` clause where it picks the value from
|
|
|
|
# a row that matches the MIN/MAX.
|
|
|
|
sql = """
|
|
|
|
SELECT user_id, keytype, keydata, MAX(stream_id)
|
|
|
|
FROM e2e_cross_signing_keys
|
|
|
|
WHERE %(clause)s
|
|
|
|
GROUP BY user_id, keytype
|
|
|
|
""" % {
|
|
|
|
"clause": clause
|
|
|
|
}
|
|
|
|
|
2020-05-06 01:03:38 +02:00
|
|
|
txn.execute(sql, params)
|
2020-08-05 22:38:57 +02:00
|
|
|
rows = self.db_pool.cursor_to_dict(txn)
|
2019-12-12 18:03:28 +01:00
|
|
|
|
|
|
|
for row in rows:
|
|
|
|
user_id = row["user_id"]
|
|
|
|
key_type = row["keytype"]
|
2020-07-16 17:32:19 +02:00
|
|
|
key = db_to_json(row["keydata"])
|
2021-12-13 17:28:26 +01:00
|
|
|
user_keys = result.setdefault(user_id, {})
|
|
|
|
user_keys[key_type] = key
|
2019-12-12 18:03:28 +01:00
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
def _get_e2e_cross_signing_signatures_txn(
|
2021-02-16 23:32:34 +01:00
|
|
|
self,
|
2021-12-13 17:28:26 +01:00
|
|
|
txn: LoggingTransaction,
|
|
|
|
keys: Dict[str, Optional[Dict[str, JsonDict]]],
|
2021-02-16 23:32:34 +01:00
|
|
|
from_user_id: str,
|
2021-12-13 17:28:26 +01:00
|
|
|
) -> Dict[str, Optional[Dict[str, JsonDict]]]:
|
2019-12-12 18:03:28 +01:00
|
|
|
"""Returns the cross-signing signatures made by a user on a set of keys.
|
|
|
|
|
|
|
|
Args:
|
2021-12-13 17:28:26 +01:00
|
|
|
txn: db connection
|
|
|
|
keys: a map of user ID to key type to key data.
|
|
|
|
This dict will be modified to add signatures.
|
|
|
|
from_user_id: fetch the signatures made by this user
|
2019-12-12 18:03:28 +01:00
|
|
|
|
|
|
|
Returns:
|
2021-12-13 17:28:26 +01:00
|
|
|
Mapping from user ID to key type to key data.
|
|
|
|
The return value will be the same as the keys argument, with the
|
|
|
|
modifications included.
|
2019-12-12 18:03:28 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
# find out what cross-signing keys (a.k.a. devices) we need to get
|
|
|
|
# signatures for. This is a map of (user_id, device_id) to key type
|
|
|
|
# (device_id is the key's public part).
|
2021-12-13 17:28:26 +01:00
|
|
|
devices: Dict[Tuple[str, str], str] = {}
|
2019-12-12 18:03:28 +01:00
|
|
|
|
2021-12-13 17:28:26 +01:00
|
|
|
for user_id, user_keys in keys.items():
|
|
|
|
if user_keys is None:
|
2019-12-12 18:03:28 +01:00
|
|
|
continue
|
2021-12-13 17:28:26 +01:00
|
|
|
for key_type, key in user_keys.items():
|
2019-12-12 18:03:28 +01:00
|
|
|
device_id = None
|
|
|
|
for k in key["keys"].values():
|
|
|
|
device_id = k
|
2021-12-13 17:28:26 +01:00
|
|
|
# `key` ought to be a `CrossSigningKey`, whose .keys property is a
|
|
|
|
# dictionary with a single entry:
|
|
|
|
# "algorithm:base64_public_key": "base64_public_key"
|
|
|
|
# See https://spec.matrix.org/v1.1/client-server-api/#cross-signing
|
|
|
|
assert isinstance(device_id, str)
|
2019-12-12 18:03:28 +01:00
|
|
|
devices[(user_id, device_id)] = key_type
|
|
|
|
|
2020-05-06 01:03:38 +02:00
|
|
|
for batch in batch_iter(devices.keys(), size=100):
|
2019-12-12 18:03:28 +01:00
|
|
|
sql = """
|
|
|
|
SELECT target_user_id, target_device_id, key_id, signature
|
|
|
|
FROM e2e_cross_signing_signatures
|
|
|
|
WHERE user_id = ?
|
|
|
|
AND (%s)
|
|
|
|
""" % (
|
|
|
|
" OR ".join(
|
2020-05-06 01:03:38 +02:00
|
|
|
"(target_user_id = ? AND target_device_id = ?)" for _ in batch
|
2019-12-12 18:03:28 +01:00
|
|
|
)
|
|
|
|
)
|
|
|
|
query_params = [from_user_id]
|
2020-05-06 01:03:38 +02:00
|
|
|
for item in batch:
|
2019-12-12 18:03:28 +01:00
|
|
|
# item is a (user_id, device_id) tuple
|
|
|
|
query_params.extend(item)
|
|
|
|
|
|
|
|
txn.execute(sql, query_params)
|
2020-08-05 22:38:57 +02:00
|
|
|
rows = self.db_pool.cursor_to_dict(txn)
|
2019-12-12 18:03:28 +01:00
|
|
|
|
|
|
|
# and add the signatures to the appropriate keys
|
|
|
|
for row in rows:
|
2021-12-13 17:28:26 +01:00
|
|
|
key_id: str = row["key_id"]
|
|
|
|
target_user_id: str = row["target_user_id"]
|
|
|
|
target_device_id: str = row["target_device_id"]
|
2019-12-12 18:03:28 +01:00
|
|
|
key_type = devices[(target_user_id, target_device_id)]
|
|
|
|
# We need to copy everything, because the result may have come
|
|
|
|
# from the cache. dict.copy only does a shallow copy, so we
|
|
|
|
# need to recursively copy the dicts that will be modified.
|
2021-12-13 17:28:26 +01:00
|
|
|
user_keys = keys[target_user_id]
|
|
|
|
# `user_keys` cannot be `None` because we only fetched signatures for
|
|
|
|
# users with keys
|
|
|
|
assert user_keys is not None
|
|
|
|
user_keys = keys[target_user_id] = user_keys.copy()
|
|
|
|
|
|
|
|
target_user_key = user_keys[key_type] = user_keys[key_type].copy()
|
2019-12-12 18:03:28 +01:00
|
|
|
if "signatures" in target_user_key:
|
|
|
|
signatures = target_user_key["signatures"] = target_user_key[
|
|
|
|
"signatures"
|
|
|
|
].copy()
|
|
|
|
if from_user_id in signatures:
|
|
|
|
user_sigs = signatures[from_user_id] = signatures[from_user_id]
|
|
|
|
user_sigs[key_id] = row["signature"]
|
|
|
|
else:
|
|
|
|
signatures[from_user_id] = {key_id: row["signature"]}
|
|
|
|
else:
|
|
|
|
target_user_key["signatures"] = {
|
|
|
|
from_user_id: {key_id: row["signature"]}
|
|
|
|
}
|
|
|
|
|
|
|
|
return keys
|
|
|
|
|
2020-08-07 19:36:29 +02:00
|
|
|
async def get_e2e_cross_signing_keys_bulk(
|
|
|
|
self, user_ids: List[str], from_user_id: Optional[str] = None
|
2021-12-13 17:28:26 +01:00
|
|
|
) -> Dict[str, Optional[Dict[str, JsonDict]]]:
|
2019-12-12 18:03:28 +01:00
|
|
|
"""Returns the cross-signing keys for a set of users.
|
|
|
|
|
|
|
|
Args:
|
2020-08-07 19:36:29 +02:00
|
|
|
user_ids: the users whose keys are being requested
|
|
|
|
from_user_id: if specified, signatures made by this user on
|
2019-12-12 18:03:28 +01:00
|
|
|
the self-signing keys will be included in the result
|
|
|
|
|
|
|
|
Returns:
|
2020-08-07 19:36:29 +02:00
|
|
|
A map of user ID to key type to key data. If a user's cross-signing
|
|
|
|
keys were not found, either their user ID will not be in the dict,
|
|
|
|
or their user ID will map to None.
|
2019-12-12 18:03:28 +01:00
|
|
|
"""
|
|
|
|
|
2020-08-07 19:36:29 +02:00
|
|
|
result = await self._get_bare_e2e_cross_signing_keys_bulk(user_ids)
|
2019-12-12 18:03:28 +01:00
|
|
|
|
|
|
|
if from_user_id:
|
2020-08-07 19:36:29 +02:00
|
|
|
result = await self.db_pool.runInteraction(
|
2019-12-12 18:03:28 +01:00
|
|
|
"get_e2e_cross_signing_signatures",
|
|
|
|
self._get_e2e_cross_signing_signatures_txn,
|
|
|
|
result,
|
|
|
|
from_user_id,
|
|
|
|
)
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2020-07-07 13:11:35 +02:00
|
|
|
async def get_all_user_signature_changes_for_remotes(
|
|
|
|
self, instance_name: str, last_id: int, current_id: int, limit: int
|
|
|
|
) -> Tuple[List[Tuple[int, tuple]], int, bool]:
|
|
|
|
"""Get updates for groups replication stream.
|
|
|
|
|
2019-10-30 22:22:52 +01:00
|
|
|
Note that the user signature stream represents when a user signs their
|
|
|
|
device with their user-signing key, which is not published to other
|
|
|
|
users or servers, so no `destination` is needed in the returned
|
|
|
|
list. However, this is needed to poke workers.
|
|
|
|
|
|
|
|
Args:
|
2020-07-07 13:11:35 +02:00
|
|
|
instance_name: The writer we want to fetch updates from. Unused
|
|
|
|
here since there is only ever one writer.
|
|
|
|
last_id: The token to fetch updates from. Exclusive.
|
|
|
|
current_id: The token to fetch updates up to. Inclusive.
|
|
|
|
limit: The requested limit for the number of rows to return. The
|
|
|
|
function may return more or fewer rows.
|
2019-10-30 22:22:52 +01:00
|
|
|
|
|
|
|
Returns:
|
2020-07-07 13:11:35 +02:00
|
|
|
A tuple consisting of: the updates, a token to use to fetch
|
|
|
|
subsequent updates, and whether we returned fewer rows than exists
|
|
|
|
between the requested tokens due to the limit.
|
|
|
|
|
|
|
|
The token returned can be used in a subsequent call to this
|
|
|
|
function to get further updatees.
|
|
|
|
|
|
|
|
The updates are a list of 2-tuples of stream ID and the row data
|
2019-10-30 22:22:52 +01:00
|
|
|
"""
|
2020-07-07 13:11:35 +02:00
|
|
|
|
|
|
|
if last_id == current_id:
|
|
|
|
return [], current_id, False
|
|
|
|
|
2021-12-13 17:28:26 +01:00
|
|
|
def _get_all_user_signature_changes_for_remotes_txn(
|
|
|
|
txn: LoggingTransaction,
|
|
|
|
) -> Tuple[List[Tuple[int, tuple]], int, bool]:
|
2020-07-07 13:11:35 +02:00
|
|
|
sql = """
|
|
|
|
SELECT stream_id, from_user_id AS user_id
|
|
|
|
FROM user_signature_stream
|
|
|
|
WHERE ? < stream_id AND stream_id <= ?
|
|
|
|
ORDER BY stream_id ASC
|
|
|
|
LIMIT ?
|
|
|
|
"""
|
|
|
|
txn.execute(sql, (last_id, current_id, limit))
|
|
|
|
|
|
|
|
updates = [(row[0], (row[1:])) for row in txn]
|
|
|
|
|
|
|
|
limited = False
|
|
|
|
upto_token = current_id
|
|
|
|
if len(updates) >= limit:
|
|
|
|
upto_token = updates[-1][0]
|
|
|
|
limited = True
|
|
|
|
|
|
|
|
return updates, upto_token, limited
|
|
|
|
|
2020-08-05 22:38:57 +02:00
|
|
|
return await self.db_pool.runInteraction(
|
2020-03-20 15:40:47 +01:00
|
|
|
"get_all_user_signature_changes_for_remotes",
|
2020-07-07 13:11:35 +02:00
|
|
|
_get_all_user_signature_changes_for_remotes_txn,
|
2019-10-30 22:22:52 +01:00
|
|
|
)
|
|
|
|
|
2020-09-01 13:41:21 +02:00
|
|
|
@abc.abstractmethod
|
|
|
|
def get_device_stream_token(self) -> int:
|
|
|
|
"""Get the current stream id from the _device_list_id_gen"""
|
|
|
|
...
|
|
|
|
|
2020-09-01 17:04:17 +02:00
|
|
|
async def claim_e2e_one_time_keys(
|
|
|
|
self, query_list: Iterable[Tuple[str, str, str]]
|
2021-01-28 14:34:19 +01:00
|
|
|
) -> Dict[str, Dict[str, Dict[str, str]]]:
|
2020-09-01 17:04:17 +02:00
|
|
|
"""Take a list of one time keys out of the database.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
query_list: An iterable of tuples of (user ID, device ID, algorithm).
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A map of user ID -> a map device ID -> a map of key ID -> JSON bytes.
|
|
|
|
"""
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2019-08-22 12:28:12 +02:00
|
|
|
@trace
|
2021-08-04 14:09:04 +02:00
|
|
|
def _claim_e2e_one_time_key_simple(
|
2021-12-13 17:28:26 +01:00
|
|
|
txn: LoggingTransaction, user_id: str, device_id: str, algorithm: str
|
2021-08-04 14:09:04 +02:00
|
|
|
) -> Optional[Tuple[str, str]]:
|
|
|
|
"""Claim OTK for device for DBs that don't support RETURNING.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A tuple of key name (algorithm + key ID) and key JSON, if an
|
|
|
|
OTK was found.
|
|
|
|
"""
|
|
|
|
|
|
|
|
sql = """
|
|
|
|
SELECT key_id, key_json FROM e2e_one_time_keys_json
|
|
|
|
WHERE user_id = ? AND device_id = ? AND algorithm = ?
|
|
|
|
LIMIT 1
|
|
|
|
"""
|
|
|
|
|
|
|
|
txn.execute(sql, (user_id, device_id, algorithm))
|
|
|
|
otk_row = txn.fetchone()
|
|
|
|
if otk_row is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
key_id, key_json = otk_row
|
|
|
|
|
|
|
|
self.db_pool.simple_delete_one_txn(
|
|
|
|
txn,
|
|
|
|
table="e2e_one_time_keys_json",
|
|
|
|
keyvalues={
|
|
|
|
"user_id": user_id,
|
|
|
|
"device_id": device_id,
|
|
|
|
"algorithm": algorithm,
|
|
|
|
"key_id": key_id,
|
|
|
|
},
|
2015-07-06 19:46:47 +02:00
|
|
|
)
|
2021-08-04 14:09:04 +02:00
|
|
|
self._invalidate_cache_and_stream(
|
|
|
|
txn, self.count_e2e_one_time_keys, (user_id, device_id)
|
2020-10-06 19:26:29 +02:00
|
|
|
)
|
2021-08-04 14:09:04 +02:00
|
|
|
|
|
|
|
return f"{algorithm}:{key_id}", key_json
|
|
|
|
|
|
|
|
@trace
|
|
|
|
def _claim_e2e_one_time_key_returning(
|
2021-12-13 17:28:26 +01:00
|
|
|
txn: LoggingTransaction, user_id: str, device_id: str, algorithm: str
|
2021-08-04 14:09:04 +02:00
|
|
|
) -> Optional[Tuple[str, str]]:
|
|
|
|
"""Claim OTK for device for DBs that support RETURNING.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A tuple of key name (algorithm + key ID) and key JSON, if an
|
|
|
|
OTK was found.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# We can use RETURNING to do the fetch and DELETE in once step.
|
|
|
|
sql = """
|
|
|
|
DELETE FROM e2e_one_time_keys_json
|
|
|
|
WHERE user_id = ? AND device_id = ? AND algorithm = ?
|
|
|
|
AND key_id IN (
|
|
|
|
SELECT key_id FROM e2e_one_time_keys_json
|
|
|
|
WHERE user_id = ? AND device_id = ? AND algorithm = ?
|
|
|
|
LIMIT 1
|
|
|
|
)
|
|
|
|
RETURNING key_id, key_json
|
|
|
|
"""
|
|
|
|
|
|
|
|
txn.execute(
|
|
|
|
sql, (user_id, device_id, algorithm, user_id, device_id, algorithm)
|
2015-07-06 19:46:47 +02:00
|
|
|
)
|
2021-08-04 14:09:04 +02:00
|
|
|
otk_row = txn.fetchone()
|
|
|
|
if otk_row is None:
|
|
|
|
return None
|
|
|
|
|
2021-09-22 16:31:05 +02:00
|
|
|
self._invalidate_cache_and_stream(
|
|
|
|
txn, self.count_e2e_one_time_keys, (user_id, device_id)
|
|
|
|
)
|
|
|
|
|
2021-08-04 14:09:04 +02:00
|
|
|
key_id, key_json = otk_row
|
|
|
|
return f"{algorithm}:{key_id}", key_json
|
|
|
|
|
2021-12-13 17:28:26 +01:00
|
|
|
results: Dict[str, Dict[str, Dict[str, str]]] = {}
|
2021-08-04 14:09:04 +02:00
|
|
|
for user_id, device_id, algorithm in query_list:
|
|
|
|
if self.database_engine.supports_returning:
|
|
|
|
# If we support RETURNING clause we can use a single query that
|
|
|
|
# allows us to use autocommit mode.
|
|
|
|
_claim_e2e_one_time_key = _claim_e2e_one_time_key_returning
|
|
|
|
db_autocommit = True
|
|
|
|
else:
|
|
|
|
_claim_e2e_one_time_key = _claim_e2e_one_time_key_simple
|
|
|
|
db_autocommit = False
|
|
|
|
|
|
|
|
row = await self.db_pool.runInteraction(
|
|
|
|
"claim_e2e_one_time_keys",
|
|
|
|
_claim_e2e_one_time_key,
|
|
|
|
user_id,
|
|
|
|
device_id,
|
|
|
|
algorithm,
|
|
|
|
db_autocommit=db_autocommit,
|
|
|
|
)
|
|
|
|
if row:
|
|
|
|
device_results = results.setdefault(user_id, {}).setdefault(
|
|
|
|
device_id, {}
|
2017-05-23 10:36:52 +02:00
|
|
|
)
|
2021-08-04 14:09:04 +02:00
|
|
|
device_results[row[0]] = row[1]
|
|
|
|
continue
|
|
|
|
|
|
|
|
# No one-time key available, so see if there's a fallback
|
|
|
|
# key
|
|
|
|
row = await self.db_pool.simple_select_one(
|
|
|
|
table="e2e_fallback_keys_json",
|
|
|
|
keyvalues={
|
|
|
|
"user_id": user_id,
|
|
|
|
"device_id": device_id,
|
|
|
|
"algorithm": algorithm,
|
|
|
|
},
|
|
|
|
retcols=("key_id", "key_json", "used"),
|
|
|
|
desc="_get_fallback_key",
|
|
|
|
allow_none=True,
|
|
|
|
)
|
|
|
|
if row is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
key_id = row["key_id"]
|
|
|
|
key_json = row["key_json"]
|
|
|
|
used = row["used"]
|
|
|
|
|
|
|
|
# Mark fallback key as used if not already.
|
|
|
|
if not used:
|
|
|
|
await self.db_pool.simple_update_one(
|
|
|
|
table="e2e_fallback_keys_json",
|
|
|
|
keyvalues={
|
2020-10-06 19:26:29 +02:00
|
|
|
"user_id": user_id,
|
|
|
|
"device_id": device_id,
|
|
|
|
"algorithm": algorithm,
|
|
|
|
"key_id": key_id,
|
|
|
|
},
|
2021-08-04 14:09:04 +02:00
|
|
|
updatevalues={"used": True},
|
|
|
|
desc="_get_fallback_key_set_used",
|
2020-10-06 19:26:29 +02:00
|
|
|
)
|
2021-08-04 14:09:04 +02:00
|
|
|
await self.invalidate_cache_and_stream(
|
|
|
|
"get_e2e_unused_fallback_key_types", (user_id, device_id)
|
2020-10-06 19:26:29 +02:00
|
|
|
)
|
|
|
|
|
2021-08-04 14:09:04 +02:00
|
|
|
device_results = results.setdefault(user_id, {}).setdefault(device_id, {})
|
|
|
|
device_results[f"{algorithm}:{key_id}"] = key_json
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2021-08-04 14:09:04 +02:00
|
|
|
return results
|
2016-07-27 13:18:03 +02:00
|
|
|
|
2021-01-11 19:01:27 +01:00
|
|
|
|
|
|
|
class EndToEndKeyStore(EndToEndKeyWorkerStore, SQLBaseStore):
|
2021-12-13 17:28:26 +01:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
database: DatabasePool,
|
|
|
|
db_conn: LoggingDatabaseConnection,
|
|
|
|
hs: "HomeServer",
|
|
|
|
):
|
|
|
|
super().__init__(database, db_conn, hs)
|
|
|
|
|
|
|
|
self._cross_signing_id_gen = StreamIdGenerator(
|
|
|
|
db_conn, "e2e_cross_signing_keys", "stream_id"
|
|
|
|
)
|
|
|
|
|
2021-01-11 19:01:27 +01:00
|
|
|
async def set_e2e_device_keys(
|
|
|
|
self, user_id: str, device_id: str, time_now: int, device_keys: JsonDict
|
|
|
|
) -> bool:
|
|
|
|
"""Stores device keys for a device. Returns whether there was a change
|
|
|
|
or the keys were already in the database.
|
|
|
|
"""
|
|
|
|
|
2021-12-13 17:28:26 +01:00
|
|
|
def _set_e2e_device_keys_txn(txn: LoggingTransaction) -> bool:
|
2021-01-11 19:01:27 +01:00
|
|
|
set_tag("user_id", user_id)
|
|
|
|
set_tag("device_id", device_id)
|
|
|
|
set_tag("time_now", time_now)
|
|
|
|
set_tag("device_keys", device_keys)
|
|
|
|
|
|
|
|
old_key_json = self.db_pool.simple_select_one_onecol_txn(
|
|
|
|
txn,
|
|
|
|
table="e2e_device_keys_json",
|
|
|
|
keyvalues={"user_id": user_id, "device_id": device_id},
|
|
|
|
retcol="key_json",
|
|
|
|
allow_none=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
# In py3 we need old_key_json to match new_key_json type. The DB
|
|
|
|
# returns unicode while encode_canonical_json returns bytes.
|
|
|
|
new_key_json = encode_canonical_json(device_keys).decode("utf-8")
|
|
|
|
|
|
|
|
if old_key_json == new_key_json:
|
|
|
|
log_kv({"Message": "Device key already stored."})
|
|
|
|
return False
|
|
|
|
|
|
|
|
self.db_pool.simple_upsert_txn(
|
|
|
|
txn,
|
|
|
|
table="e2e_device_keys_json",
|
|
|
|
keyvalues={"user_id": user_id, "device_id": device_id},
|
|
|
|
values={"ts_added_ms": time_now, "key_json": new_key_json},
|
|
|
|
)
|
|
|
|
log_kv({"message": "Device keys stored."})
|
|
|
|
return True
|
|
|
|
|
|
|
|
return await self.db_pool.runInteraction(
|
|
|
|
"set_e2e_device_keys", _set_e2e_device_keys_txn
|
|
|
|
)
|
|
|
|
|
2020-09-01 17:04:17 +02:00
|
|
|
async def delete_e2e_keys_by_device(self, user_id: str, device_id: str) -> None:
|
2021-12-13 17:28:26 +01:00
|
|
|
def delete_e2e_keys_by_device_txn(txn: LoggingTransaction) -> None:
|
2019-08-22 12:28:12 +02:00
|
|
|
log_kv(
|
|
|
|
{
|
|
|
|
"message": "Deleting keys for device",
|
|
|
|
"device_id": device_id,
|
|
|
|
"user_id": user_id,
|
|
|
|
}
|
|
|
|
)
|
2020-08-05 22:38:57 +02:00
|
|
|
self.db_pool.simple_delete_txn(
|
2017-05-22 17:19:22 +02:00
|
|
|
txn,
|
|
|
|
table="e2e_device_keys_json",
|
|
|
|
keyvalues={"user_id": user_id, "device_id": device_id},
|
|
|
|
)
|
2020-08-05 22:38:57 +02:00
|
|
|
self.db_pool.simple_delete_txn(
|
2017-05-22 17:19:22 +02:00
|
|
|
txn,
|
|
|
|
table="e2e_one_time_keys_json",
|
|
|
|
keyvalues={"user_id": user_id, "device_id": device_id},
|
|
|
|
)
|
|
|
|
self._invalidate_cache_and_stream(
|
2019-04-03 11:07:29 +02:00
|
|
|
txn, self.count_e2e_one_time_keys, (user_id, device_id)
|
2017-05-22 17:19:22 +02:00
|
|
|
)
|
2020-10-07 14:00:17 +02:00
|
|
|
self.db_pool.simple_delete_txn(
|
|
|
|
txn,
|
|
|
|
table="dehydrated_devices",
|
|
|
|
keyvalues={"user_id": user_id, "device_id": device_id},
|
|
|
|
)
|
2020-10-06 19:26:29 +02:00
|
|
|
self.db_pool.simple_delete_txn(
|
|
|
|
txn,
|
|
|
|
table="e2e_fallback_keys_json",
|
|
|
|
keyvalues={"user_id": user_id, "device_id": device_id},
|
|
|
|
)
|
|
|
|
self._invalidate_cache_and_stream(
|
|
|
|
txn, self.get_e2e_unused_fallback_key_types, (user_id, device_id)
|
|
|
|
)
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2020-09-01 17:04:17 +02:00
|
|
|
await self.db_pool.runInteraction(
|
2017-05-22 17:19:22 +02:00
|
|
|
"delete_e2e_keys_by_device", delete_e2e_keys_by_device_txn
|
2016-07-27 13:18:03 +02:00
|
|
|
)
|
2019-07-25 17:08:24 +02:00
|
|
|
|
2021-12-13 17:28:26 +01:00
|
|
|
def _set_e2e_cross_signing_key_txn(
|
|
|
|
self,
|
|
|
|
txn: LoggingTransaction,
|
|
|
|
user_id: str,
|
|
|
|
key_type: str,
|
|
|
|
key: JsonDict,
|
|
|
|
stream_id: int,
|
|
|
|
) -> None:
|
2019-07-25 17:08:24 +02:00
|
|
|
"""Set a user's cross-signing key.
|
|
|
|
|
|
|
|
Args:
|
2021-12-13 17:28:26 +01:00
|
|
|
txn: db connection
|
|
|
|
user_id: the user to set the signing key for
|
|
|
|
key_type: the type of key that is being set: either 'master'
|
2019-07-25 17:08:24 +02:00
|
|
|
for a master key, 'self_signing' for a self-signing key, or
|
|
|
|
'user_signing' for a user-signing key
|
2021-12-13 17:28:26 +01:00
|
|
|
key: the key data
|
|
|
|
stream_id
|
2019-07-25 17:08:24 +02:00
|
|
|
"""
|
|
|
|
# the 'key' dict will look something like:
|
|
|
|
# {
|
|
|
|
# "user_id": "@alice:example.com",
|
|
|
|
# "usage": ["self_signing"],
|
|
|
|
# "keys": {
|
|
|
|
# "ed25519:base64+self+signing+public+key": "base64+self+signing+public+key",
|
|
|
|
# },
|
|
|
|
# "signatures": {
|
|
|
|
# "@alice:example.com": {
|
|
|
|
# "ed25519:base64+master+public+key": "base64+signature"
|
|
|
|
# }
|
|
|
|
# }
|
|
|
|
# }
|
|
|
|
# The "keys" property must only have one entry, which will be the public
|
|
|
|
# key, so we just grab the first value in there
|
|
|
|
pubkey = next(iter(key["keys"].values()))
|
2020-02-20 15:59:00 +01:00
|
|
|
|
|
|
|
# The cross-signing keys need to occupy the same namespace as devices,
|
|
|
|
# since signatures are identified by device ID. So add an entry to the
|
|
|
|
# device table to make sure that we don't have a collision with device
|
|
|
|
# IDs.
|
|
|
|
# We only need to do this for local users, since remote servers should be
|
|
|
|
# responsible for checking this for their own users.
|
|
|
|
if self.hs.is_mine_id(user_id):
|
2020-08-05 22:38:57 +02:00
|
|
|
self.db_pool.simple_insert_txn(
|
2020-02-20 15:59:00 +01:00
|
|
|
txn,
|
|
|
|
"devices",
|
|
|
|
values={
|
|
|
|
"user_id": user_id,
|
|
|
|
"device_id": pubkey,
|
|
|
|
"display_name": key_type + " signing key",
|
|
|
|
"hidden": True,
|
|
|
|
},
|
|
|
|
)
|
2019-07-25 17:08:24 +02:00
|
|
|
|
|
|
|
# and finally, store the key itself
|
2020-08-25 16:10:08 +02:00
|
|
|
self.db_pool.simple_insert_txn(
|
|
|
|
txn,
|
|
|
|
"e2e_cross_signing_keys",
|
|
|
|
values={
|
|
|
|
"user_id": user_id,
|
|
|
|
"keytype": key_type,
|
|
|
|
"keydata": json_encoder.encode(key),
|
|
|
|
"stream_id": stream_id,
|
|
|
|
},
|
|
|
|
)
|
2019-07-25 17:08:24 +02:00
|
|
|
|
2019-12-12 18:03:28 +01:00
|
|
|
self._invalidate_cache_and_stream(
|
|
|
|
txn, self._get_bare_e2e_cross_signing_keys, (user_id,)
|
|
|
|
)
|
|
|
|
|
2021-12-13 17:28:26 +01:00
|
|
|
async def set_e2e_cross_signing_key(
|
|
|
|
self, user_id: str, key_type: str, key: JsonDict
|
|
|
|
) -> None:
|
2019-07-25 17:08:24 +02:00
|
|
|
"""Set a user's cross-signing key.
|
|
|
|
|
|
|
|
Args:
|
2021-12-13 17:28:26 +01:00
|
|
|
user_id: the user to set the user-signing key for
|
|
|
|
key_type: the type of cross-signing key to set
|
|
|
|
key: the key data
|
2019-07-25 17:08:24 +02:00
|
|
|
"""
|
2020-08-25 16:10:08 +02:00
|
|
|
|
2020-09-23 17:11:18 +02:00
|
|
|
async with self._cross_signing_id_gen.get_next() as stream_id:
|
2020-08-25 16:10:08 +02:00
|
|
|
return await self.db_pool.runInteraction(
|
|
|
|
"add_e2e_cross_signing_key",
|
|
|
|
self._set_e2e_cross_signing_key_txn,
|
|
|
|
user_id,
|
|
|
|
key_type,
|
|
|
|
key,
|
|
|
|
stream_id,
|
|
|
|
)
|
2019-07-25 17:08:24 +02:00
|
|
|
|
2020-08-27 13:41:01 +02:00
|
|
|
async def store_e2e_cross_signing_signatures(
|
|
|
|
self, user_id: str, signatures: "Iterable[SignatureListItem]"
|
|
|
|
) -> None:
|
2019-07-25 17:08:24 +02:00
|
|
|
"""Stores cross-signing signatures.
|
|
|
|
|
|
|
|
Args:
|
2020-08-27 13:41:01 +02:00
|
|
|
user_id: the user who made the signatures
|
|
|
|
signatures: signatures to add
|
2019-07-25 17:08:24 +02:00
|
|
|
"""
|
2020-08-27 13:41:01 +02:00
|
|
|
await self.db_pool.simple_insert_many(
|
2019-07-25 17:08:24 +02:00
|
|
|
"e2e_cross_signing_signatures",
|
2022-01-14 01:44:18 +01:00
|
|
|
keys=(
|
|
|
|
"user_id",
|
|
|
|
"key_id",
|
|
|
|
"target_user_id",
|
|
|
|
"target_device_id",
|
|
|
|
"signature",
|
|
|
|
),
|
|
|
|
values=[
|
|
|
|
(
|
|
|
|
user_id,
|
|
|
|
item.signing_key_id,
|
|
|
|
item.target_user_id,
|
|
|
|
item.target_device_id,
|
|
|
|
item.signature,
|
|
|
|
)
|
2019-09-24 20:12:20 +02:00
|
|
|
for item in signatures
|
2019-07-25 17:08:24 +02:00
|
|
|
],
|
2022-01-14 01:44:18 +01:00
|
|
|
desc="add_e2e_signing_key",
|
2019-07-25 17:08:24 +02:00
|
|
|
)
|