2016-08-25 18:35:37 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright 2016 OpenMarket Ltd
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import logging
|
2020-07-07 13:11:35 +02:00
|
|
|
from typing import List, Tuple
|
2018-06-28 15:49:57 +02:00
|
|
|
|
2019-09-03 11:21:30 +02:00
|
|
|
from synapse.logging.opentracing import log_kv, set_tag, trace
|
2020-07-16 17:32:19 +02:00
|
|
|
from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause
|
2020-08-05 22:38:57 +02:00
|
|
|
from synapse.storage.database import DatabasePool
|
2020-08-07 14:02:55 +02:00
|
|
|
from synapse.util import json_encoder
|
2017-03-09 15:50:40 +01:00
|
|
|
from synapse.util.caches.expiringcache import ExpiringCache
|
|
|
|
|
2016-08-25 18:35:37 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2019-03-04 19:03:29 +01:00
|
|
|
class DeviceInboxWorkerStore(SQLBaseStore):
|
|
|
|
def get_to_device_stream_token(self):
|
|
|
|
return self._device_inbox_id_gen.get_current_token()
|
|
|
|
|
2020-08-12 15:29:06 +02:00
|
|
|
async def get_new_messages_for_device(
|
|
|
|
self,
|
|
|
|
user_id: str,
|
|
|
|
device_id: str,
|
|
|
|
last_stream_id: int,
|
|
|
|
current_stream_id: int,
|
|
|
|
limit: int = 100,
|
|
|
|
) -> Tuple[List[dict], int]:
|
2019-03-04 19:03:29 +01:00
|
|
|
"""
|
|
|
|
Args:
|
2020-08-12 15:29:06 +02:00
|
|
|
user_id: The recipient user_id.
|
|
|
|
device_id: The recipient device_id.
|
|
|
|
last_stream_id: The last stream ID checked.
|
|
|
|
current_stream_id: The current position of the to device
|
2019-03-04 19:03:29 +01:00
|
|
|
message stream.
|
2020-08-12 15:29:06 +02:00
|
|
|
limit: The maximum number of messages to retrieve.
|
|
|
|
|
2019-03-04 19:03:29 +01:00
|
|
|
Returns:
|
2020-08-12 15:29:06 +02:00
|
|
|
A list of messages for the device and where in the stream the messages got to.
|
2019-03-04 19:03:29 +01:00
|
|
|
"""
|
|
|
|
has_changed = self._device_inbox_stream_cache.has_entity_changed(
|
|
|
|
user_id, last_stream_id
|
|
|
|
)
|
|
|
|
if not has_changed:
|
2020-08-12 15:29:06 +02:00
|
|
|
return ([], current_stream_id)
|
2019-03-04 19:03:29 +01:00
|
|
|
|
|
|
|
def get_new_messages_for_device_txn(txn):
|
|
|
|
sql = (
|
|
|
|
"SELECT stream_id, message_json FROM device_inbox"
|
|
|
|
" WHERE user_id = ? AND device_id = ?"
|
|
|
|
" AND ? < stream_id AND stream_id <= ?"
|
|
|
|
" ORDER BY stream_id ASC"
|
|
|
|
" LIMIT ?"
|
|
|
|
)
|
2019-04-03 11:07:29 +02:00
|
|
|
txn.execute(
|
|
|
|
sql, (user_id, device_id, last_stream_id, current_stream_id, limit)
|
|
|
|
)
|
2019-03-04 19:03:29 +01:00
|
|
|
messages = []
|
|
|
|
for row in txn:
|
|
|
|
stream_pos = row[0]
|
2020-07-16 17:32:19 +02:00
|
|
|
messages.append(db_to_json(row[1]))
|
2019-03-04 19:03:29 +01:00
|
|
|
if len(messages) < limit:
|
|
|
|
stream_pos = current_stream_id
|
2019-08-30 17:28:26 +02:00
|
|
|
return messages, stream_pos
|
2019-03-04 19:03:29 +01:00
|
|
|
|
2020-08-12 15:29:06 +02:00
|
|
|
return await self.db_pool.runInteraction(
|
2019-04-03 11:07:29 +02:00
|
|
|
"get_new_messages_for_device", get_new_messages_for_device_txn
|
2019-03-04 19:03:29 +01:00
|
|
|
)
|
|
|
|
|
2019-09-03 11:21:30 +02:00
|
|
|
@trace
|
2020-08-12 15:29:06 +02:00
|
|
|
async def delete_messages_for_device(
|
|
|
|
self, user_id: str, device_id: str, up_to_stream_id: int
|
|
|
|
) -> int:
|
2019-03-04 19:03:29 +01:00
|
|
|
"""
|
|
|
|
Args:
|
2020-08-12 15:29:06 +02:00
|
|
|
user_id: The recipient user_id.
|
|
|
|
device_id: The recipient device_id.
|
|
|
|
up_to_stream_id: Where to delete messages up to.
|
|
|
|
|
2019-03-04 19:03:29 +01:00
|
|
|
Returns:
|
2020-08-12 15:29:06 +02:00
|
|
|
The number of messages deleted.
|
2019-03-04 19:03:29 +01:00
|
|
|
"""
|
|
|
|
# If we have cached the last stream id we've deleted up to, we can
|
|
|
|
# check if there is likely to be anything that needs deleting
|
|
|
|
last_deleted_stream_id = self._last_device_delete_cache.get(
|
|
|
|
(user_id, device_id), None
|
|
|
|
)
|
2019-09-03 11:21:30 +02:00
|
|
|
|
|
|
|
set_tag("last_deleted_stream_id", last_deleted_stream_id)
|
|
|
|
|
2019-03-04 19:03:29 +01:00
|
|
|
if last_deleted_stream_id:
|
|
|
|
has_changed = self._device_inbox_stream_cache.has_entity_changed(
|
|
|
|
user_id, last_deleted_stream_id
|
|
|
|
)
|
|
|
|
if not has_changed:
|
2019-09-03 11:21:30 +02:00
|
|
|
log_kv({"message": "No changes in cache since last check"})
|
2019-07-23 15:00:55 +02:00
|
|
|
return 0
|
2019-03-04 19:03:29 +01:00
|
|
|
|
|
|
|
def delete_messages_for_device_txn(txn):
|
|
|
|
sql = (
|
|
|
|
"DELETE FROM device_inbox"
|
|
|
|
" WHERE user_id = ? AND device_id = ?"
|
|
|
|
" AND stream_id <= ?"
|
|
|
|
)
|
|
|
|
txn.execute(sql, (user_id, device_id, up_to_stream_id))
|
|
|
|
return txn.rowcount
|
|
|
|
|
2020-08-12 15:29:06 +02:00
|
|
|
count = await self.db_pool.runInteraction(
|
2019-03-04 19:03:29 +01:00
|
|
|
"delete_messages_for_device", delete_messages_for_device_txn
|
|
|
|
)
|
|
|
|
|
2019-09-03 11:21:30 +02:00
|
|
|
log_kv(
|
|
|
|
{"message": "deleted {} messages for device".format(count), "count": count}
|
|
|
|
)
|
|
|
|
|
2019-03-04 19:03:29 +01:00
|
|
|
# Update the cache, ensuring that we only ever increase the value
|
|
|
|
last_deleted_stream_id = self._last_device_delete_cache.get(
|
|
|
|
(user_id, device_id), 0
|
|
|
|
)
|
|
|
|
self._last_device_delete_cache[(user_id, device_id)] = max(
|
|
|
|
last_deleted_stream_id, up_to_stream_id
|
|
|
|
)
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return count
|
2019-03-04 19:03:29 +01:00
|
|
|
|
2019-09-03 11:21:30 +02:00
|
|
|
@trace
|
2020-08-12 15:29:06 +02:00
|
|
|
async def get_new_device_msgs_for_remote(
|
2019-05-09 12:01:41 +02:00
|
|
|
self, destination, last_stream_id, current_stream_id, limit
|
2020-08-12 15:29:06 +02:00
|
|
|
) -> Tuple[List[dict], int]:
|
2019-03-04 19:03:29 +01:00
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
destination(str): The name of the remote server.
|
|
|
|
last_stream_id(int|long): The last position of the device message stream
|
|
|
|
that the server sent up to.
|
|
|
|
current_stream_id(int|long): The current position of the device
|
|
|
|
message stream.
|
|
|
|
Returns:
|
2020-08-12 15:29:06 +02:00
|
|
|
A list of messages for the device and where in the stream the messages got to.
|
2019-03-04 19:03:29 +01:00
|
|
|
"""
|
|
|
|
|
2019-09-03 11:21:30 +02:00
|
|
|
set_tag("destination", destination)
|
|
|
|
set_tag("last_stream_id", last_stream_id)
|
|
|
|
set_tag("current_stream_id", current_stream_id)
|
|
|
|
set_tag("limit", limit)
|
|
|
|
|
2019-03-04 19:03:29 +01:00
|
|
|
has_changed = self._device_federation_outbox_stream_cache.has_entity_changed(
|
|
|
|
destination, last_stream_id
|
|
|
|
)
|
|
|
|
if not has_changed or last_stream_id == current_stream_id:
|
2019-09-03 11:21:30 +02:00
|
|
|
log_kv({"message": "No new messages in stream"})
|
2020-08-12 15:29:06 +02:00
|
|
|
return ([], current_stream_id)
|
2019-03-04 19:03:29 +01:00
|
|
|
|
2019-06-10 17:21:42 +02:00
|
|
|
if limit <= 0:
|
|
|
|
# This can happen if we run out of room for EDUs in the transaction.
|
2020-08-12 15:29:06 +02:00
|
|
|
return ([], last_stream_id)
|
2019-06-10 17:21:42 +02:00
|
|
|
|
2019-09-03 11:21:30 +02:00
|
|
|
@trace
|
2019-03-04 19:03:29 +01:00
|
|
|
def get_new_messages_for_remote_destination_txn(txn):
|
|
|
|
sql = (
|
|
|
|
"SELECT stream_id, messages_json FROM device_federation_outbox"
|
|
|
|
" WHERE destination = ?"
|
|
|
|
" AND ? < stream_id AND stream_id <= ?"
|
|
|
|
" ORDER BY stream_id ASC"
|
|
|
|
" LIMIT ?"
|
|
|
|
)
|
2019-04-03 11:07:29 +02:00
|
|
|
txn.execute(sql, (destination, last_stream_id, current_stream_id, limit))
|
2019-03-04 19:03:29 +01:00
|
|
|
messages = []
|
|
|
|
for row in txn:
|
|
|
|
stream_pos = row[0]
|
2020-07-16 17:32:19 +02:00
|
|
|
messages.append(db_to_json(row[1]))
|
2019-03-04 19:03:29 +01:00
|
|
|
if len(messages) < limit:
|
2019-09-03 11:21:30 +02:00
|
|
|
log_kv({"message": "Set stream position to current position"})
|
2019-03-04 19:03:29 +01:00
|
|
|
stream_pos = current_stream_id
|
2019-08-30 17:28:26 +02:00
|
|
|
return messages, stream_pos
|
2019-03-04 19:03:29 +01:00
|
|
|
|
2020-08-12 15:29:06 +02:00
|
|
|
return await self.db_pool.runInteraction(
|
2019-03-04 19:03:29 +01:00
|
|
|
"get_new_device_msgs_for_remote",
|
|
|
|
get_new_messages_for_remote_destination_txn,
|
|
|
|
)
|
|
|
|
|
2019-09-03 11:21:30 +02:00
|
|
|
@trace
|
2019-03-04 19:03:29 +01:00
|
|
|
def delete_device_msgs_for_remote(self, destination, up_to_stream_id):
|
|
|
|
"""Used to delete messages when the remote destination acknowledges
|
|
|
|
their receipt.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
destination(str): The destination server_name
|
|
|
|
up_to_stream_id(int): Where to delete messages up to.
|
|
|
|
Returns:
|
|
|
|
A deferred that resolves when the messages have been deleted.
|
|
|
|
"""
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2019-03-04 19:03:29 +01:00
|
|
|
def delete_messages_for_remote_destination_txn(txn):
|
|
|
|
sql = (
|
|
|
|
"DELETE FROM device_federation_outbox"
|
|
|
|
" WHERE destination = ?"
|
|
|
|
" AND stream_id <= ?"
|
|
|
|
)
|
|
|
|
txn.execute(sql, (destination, up_to_stream_id))
|
|
|
|
|
2020-08-05 22:38:57 +02:00
|
|
|
return self.db_pool.runInteraction(
|
2019-04-03 11:07:29 +02:00
|
|
|
"delete_device_msgs_for_remote", delete_messages_for_remote_destination_txn
|
2019-03-04 19:03:29 +01:00
|
|
|
)
|
|
|
|
|
2020-07-07 13:11:35 +02:00
|
|
|
async def get_all_new_device_messages(
|
|
|
|
self, instance_name: str, last_id: int, current_id: int, limit: int
|
|
|
|
) -> Tuple[List[Tuple[int, tuple]], int, bool]:
|
|
|
|
"""Get updates for to device replication stream.
|
|
|
|
|
2020-03-25 15:54:01 +01:00
|
|
|
Args:
|
2020-07-07 13:11:35 +02:00
|
|
|
instance_name: The writer we want to fetch updates from. Unused
|
|
|
|
here since there is only ever one writer.
|
|
|
|
last_id: The token to fetch updates from. Exclusive.
|
|
|
|
current_id: The token to fetch updates up to. Inclusive.
|
|
|
|
limit: The requested limit for the number of rows to return. The
|
|
|
|
function may return more or fewer rows.
|
|
|
|
|
2020-03-25 15:54:01 +01:00
|
|
|
Returns:
|
2020-07-07 13:11:35 +02:00
|
|
|
A tuple consisting of: the updates, a token to use to fetch
|
|
|
|
subsequent updates, and whether we returned fewer rows than exists
|
|
|
|
between the requested tokens due to the limit.
|
|
|
|
|
|
|
|
The token returned can be used in a subsequent call to this
|
|
|
|
function to get further updatees.
|
|
|
|
|
|
|
|
The updates are a list of 2-tuples of stream ID and the row data
|
2020-03-25 15:54:01 +01:00
|
|
|
"""
|
2020-07-07 13:11:35 +02:00
|
|
|
|
|
|
|
if last_id == current_id:
|
|
|
|
return [], current_id, False
|
2020-03-25 15:54:01 +01:00
|
|
|
|
|
|
|
def get_all_new_device_messages_txn(txn):
|
|
|
|
# We limit like this as we might have multiple rows per stream_id, and
|
|
|
|
# we want to make sure we always get all entries for any stream_id
|
|
|
|
# we return.
|
2020-07-07 13:11:35 +02:00
|
|
|
upper_pos = min(current_id, last_id + limit)
|
2020-03-25 15:54:01 +01:00
|
|
|
sql = (
|
|
|
|
"SELECT max(stream_id), user_id"
|
|
|
|
" FROM device_inbox"
|
|
|
|
" WHERE ? < stream_id AND stream_id <= ?"
|
|
|
|
" GROUP BY user_id"
|
|
|
|
)
|
2020-07-07 13:11:35 +02:00
|
|
|
txn.execute(sql, (last_id, upper_pos))
|
|
|
|
updates = [(row[0], row[1:]) for row in txn]
|
2020-03-25 15:54:01 +01:00
|
|
|
|
|
|
|
sql = (
|
|
|
|
"SELECT max(stream_id), destination"
|
|
|
|
" FROM device_federation_outbox"
|
|
|
|
" WHERE ? < stream_id AND stream_id <= ?"
|
|
|
|
" GROUP BY destination"
|
|
|
|
)
|
2020-07-07 13:11:35 +02:00
|
|
|
txn.execute(sql, (last_id, upper_pos))
|
|
|
|
updates.extend((row[0], row[1:]) for row in txn)
|
2020-03-25 15:54:01 +01:00
|
|
|
|
|
|
|
# Order by ascending stream ordering
|
2020-07-07 13:11:35 +02:00
|
|
|
updates.sort()
|
2020-03-25 15:54:01 +01:00
|
|
|
|
2020-07-07 13:11:35 +02:00
|
|
|
limited = False
|
|
|
|
upto_token = current_id
|
|
|
|
if len(updates) >= limit:
|
|
|
|
upto_token = updates[-1][0]
|
|
|
|
limited = True
|
2020-03-25 15:54:01 +01:00
|
|
|
|
2020-07-07 13:11:35 +02:00
|
|
|
return updates, upto_token, limited
|
|
|
|
|
2020-08-05 22:38:57 +02:00
|
|
|
return await self.db_pool.runInteraction(
|
2020-03-25 15:54:01 +01:00
|
|
|
"get_all_new_device_messages", get_all_new_device_messages_txn
|
|
|
|
)
|
|
|
|
|
2019-03-04 19:03:29 +01:00
|
|
|
|
2019-12-04 16:09:36 +01:00
|
|
|
class DeviceInboxBackgroundUpdateStore(SQLBaseStore):
|
2017-01-10 16:04:57 +01:00
|
|
|
DEVICE_INBOX_STREAM_ID = "device_inbox_stream_drop"
|
|
|
|
|
2020-08-05 22:38:57 +02:00
|
|
|
def __init__(self, database: DatabasePool, db_conn, hs):
|
2019-12-06 14:08:40 +01:00
|
|
|
super(DeviceInboxBackgroundUpdateStore, self).__init__(database, db_conn, hs)
|
2017-01-10 16:04:57 +01:00
|
|
|
|
2020-08-05 22:38:57 +02:00
|
|
|
self.db_pool.updates.register_background_index_update(
|
2017-01-10 16:04:57 +01:00
|
|
|
"device_inbox_stream_index",
|
|
|
|
index_name="device_inbox_stream_id_user_id",
|
|
|
|
table="device_inbox",
|
|
|
|
columns=["stream_id", "user_id"],
|
|
|
|
)
|
|
|
|
|
2020-08-05 22:38:57 +02:00
|
|
|
self.db_pool.updates.register_background_update_handler(
|
2019-04-03 11:07:29 +02:00
|
|
|
self.DEVICE_INBOX_STREAM_ID, self._background_drop_index_device_inbox
|
2017-01-10 16:04:57 +01:00
|
|
|
)
|
2016-08-25 18:35:37 +02:00
|
|
|
|
2020-08-12 15:29:06 +02:00
|
|
|
async def _background_drop_index_device_inbox(self, progress, batch_size):
|
2019-10-03 18:19:25 +02:00
|
|
|
def reindex_txn(conn):
|
|
|
|
txn = conn.cursor()
|
|
|
|
txn.execute("DROP INDEX IF EXISTS device_inbox_stream_id")
|
|
|
|
txn.close()
|
|
|
|
|
2020-08-12 15:29:06 +02:00
|
|
|
await self.db_pool.runWithConnection(reindex_txn)
|
2019-10-03 18:19:25 +02:00
|
|
|
|
2020-08-12 15:29:06 +02:00
|
|
|
await self.db_pool.updates._end_background_update(self.DEVICE_INBOX_STREAM_ID)
|
2019-10-03 18:19:25 +02:00
|
|
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
|
|
|
class DeviceInboxStore(DeviceInboxWorkerStore, DeviceInboxBackgroundUpdateStore):
|
|
|
|
DEVICE_INBOX_STREAM_ID = "device_inbox_stream_drop"
|
|
|
|
|
2020-08-05 22:38:57 +02:00
|
|
|
def __init__(self, database: DatabasePool, db_conn, hs):
|
2019-12-06 14:08:40 +01:00
|
|
|
super(DeviceInboxStore, self).__init__(database, db_conn, hs)
|
2019-10-03 18:19:25 +02:00
|
|
|
|
2017-03-09 15:50:40 +01:00
|
|
|
# Map of (user_id, device_id) to the last stream_id that has been
|
|
|
|
# deleted up to. This is so that we can no op deletions.
|
|
|
|
self._last_device_delete_cache = ExpiringCache(
|
|
|
|
cache_name="last_device_delete_cache",
|
|
|
|
clock=self._clock,
|
|
|
|
max_len=10000,
|
|
|
|
expiry_ms=30 * 60 * 1000,
|
|
|
|
)
|
|
|
|
|
2019-09-03 11:21:30 +02:00
|
|
|
@trace
|
2020-08-12 15:29:06 +02:00
|
|
|
async def add_messages_to_device_inbox(
|
|
|
|
self,
|
|
|
|
local_messages_by_user_then_device: dict,
|
|
|
|
remote_messages_by_destination: dict,
|
|
|
|
) -> int:
|
2016-09-06 16:12:13 +02:00
|
|
|
"""Used to send messages from this server.
|
|
|
|
|
2016-08-25 18:35:37 +02:00
|
|
|
Args:
|
2020-08-12 15:29:06 +02:00
|
|
|
local_messages_by_user_and_device:
|
2016-08-25 18:35:37 +02:00
|
|
|
Dictionary of user_id to device_id to message.
|
2020-08-12 15:29:06 +02:00
|
|
|
remote_messages_by_destination:
|
2016-09-06 16:12:13 +02:00
|
|
|
Dictionary of destination server_name to the EDU JSON to send.
|
2020-08-12 15:29:06 +02:00
|
|
|
|
2016-08-25 18:35:37 +02:00
|
|
|
Returns:
|
2020-08-12 15:29:06 +02:00
|
|
|
The new stream_id.
|
2016-08-25 18:35:37 +02:00
|
|
|
"""
|
|
|
|
|
2016-09-07 13:03:37 +02:00
|
|
|
def add_messages_txn(txn, now_ms, stream_id):
|
|
|
|
# Add the local messages directly to the local inbox.
|
|
|
|
self._add_messages_to_local_device_inbox_txn(
|
|
|
|
txn, stream_id, local_messages_by_user_then_device
|
|
|
|
)
|
|
|
|
|
|
|
|
# Add the remote messages to the federation outbox.
|
|
|
|
# We'll send them to a remote server when we next send a
|
|
|
|
# federation transaction to that destination.
|
2016-09-06 16:12:13 +02:00
|
|
|
sql = (
|
|
|
|
"INSERT INTO device_federation_outbox"
|
|
|
|
" (destination, stream_id, queued_ts, messages_json)"
|
|
|
|
" VALUES (?,?,?,?)"
|
|
|
|
)
|
|
|
|
rows = []
|
|
|
|
for destination, edu in remote_messages_by_destination.items():
|
2020-08-07 14:02:55 +02:00
|
|
|
edu_json = json_encoder.encode(edu)
|
2016-09-06 16:12:13 +02:00
|
|
|
rows.append((destination, stream_id, now_ms, edu_json))
|
|
|
|
txn.executemany(sql, rows)
|
|
|
|
|
2020-08-25 16:10:08 +02:00
|
|
|
with await self._device_inbox_id_gen.get_next() as stream_id:
|
2016-09-06 19:16:20 +02:00
|
|
|
now_ms = self.clock.time_msec()
|
2020-08-12 15:29:06 +02:00
|
|
|
await self.db_pool.runInteraction(
|
2019-04-03 11:07:29 +02:00
|
|
|
"add_messages_to_device_inbox", add_messages_txn, now_ms, stream_id
|
2016-09-06 12:26:37 +02:00
|
|
|
)
|
2016-09-07 16:27:07 +02:00
|
|
|
for user_id in local_messages_by_user_then_device.keys():
|
2019-04-03 11:07:29 +02:00
|
|
|
self._device_inbox_stream_cache.entity_has_changed(user_id, stream_id)
|
2016-09-07 16:27:07 +02:00
|
|
|
for destination in remote_messages_by_destination.keys():
|
|
|
|
self._device_federation_outbox_stream_cache.entity_has_changed(
|
|
|
|
destination, stream_id
|
|
|
|
)
|
2016-09-06 12:26:37 +02:00
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return self._device_inbox_id_gen.get_current_token()
|
2016-09-06 12:26:37 +02:00
|
|
|
|
2020-08-12 15:29:06 +02:00
|
|
|
async def add_messages_from_remote_to_device_inbox(
|
|
|
|
self, origin: str, message_id: str, local_messages_by_user_then_device: dict
|
|
|
|
) -> int:
|
2016-09-06 16:12:13 +02:00
|
|
|
def add_messages_txn(txn, now_ms, stream_id):
|
2016-09-07 13:03:37 +02:00
|
|
|
# Check if we've already inserted a matching message_id for that
|
|
|
|
# origin. This can happen if the origin doesn't receive our
|
|
|
|
# acknowledgement from the first time we received the message.
|
2020-08-05 22:38:57 +02:00
|
|
|
already_inserted = self.db_pool.simple_select_one_txn(
|
2019-04-03 11:07:29 +02:00
|
|
|
txn,
|
|
|
|
table="device_federation_inbox",
|
2016-09-06 16:12:13 +02:00
|
|
|
keyvalues={"origin": origin, "message_id": message_id},
|
|
|
|
retcols=("message_id",),
|
|
|
|
allow_none=True,
|
|
|
|
)
|
|
|
|
if already_inserted is not None:
|
|
|
|
return
|
|
|
|
|
2016-09-07 13:03:37 +02:00
|
|
|
# Add an entry for this message_id so that we know we've processed
|
|
|
|
# it.
|
2020-08-05 22:38:57 +02:00
|
|
|
self.db_pool.simple_insert_txn(
|
2019-04-03 11:07:29 +02:00
|
|
|
txn,
|
|
|
|
table="device_federation_inbox",
|
2016-09-06 16:12:13 +02:00
|
|
|
values={
|
|
|
|
"origin": origin,
|
|
|
|
"message_id": message_id,
|
|
|
|
"received_ts": now_ms,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2016-09-07 13:03:37 +02:00
|
|
|
# Add the messages to the approriate local device inboxes so that
|
|
|
|
# they'll be sent to the devices when they next sync.
|
2016-09-06 16:12:13 +02:00
|
|
|
self._add_messages_to_local_device_inbox_txn(
|
|
|
|
txn, stream_id, local_messages_by_user_then_device
|
|
|
|
)
|
|
|
|
|
2020-08-25 16:10:08 +02:00
|
|
|
with await self._device_inbox_id_gen.get_next() as stream_id:
|
2016-09-06 19:16:20 +02:00
|
|
|
now_ms = self.clock.time_msec()
|
2020-08-12 15:29:06 +02:00
|
|
|
await self.db_pool.runInteraction(
|
2016-09-06 16:12:13 +02:00
|
|
|
"add_messages_from_remote_to_device_inbox",
|
|
|
|
add_messages_txn,
|
|
|
|
now_ms,
|
|
|
|
stream_id,
|
|
|
|
)
|
2016-09-07 16:27:07 +02:00
|
|
|
for user_id in local_messages_by_user_then_device.keys():
|
2019-04-03 11:07:29 +02:00
|
|
|
self._device_inbox_stream_cache.entity_has_changed(user_id, stream_id)
|
2016-09-06 16:12:13 +02:00
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return stream_id
|
2016-09-12 13:30:46 +02:00
|
|
|
|
2019-04-03 11:07:29 +02:00
|
|
|
def _add_messages_to_local_device_inbox_txn(
|
|
|
|
self, txn, stream_id, messages_by_user_then_device
|
|
|
|
):
|
2016-09-08 16:13:05 +02:00
|
|
|
local_by_user_then_device = {}
|
2016-09-06 12:26:37 +02:00
|
|
|
for user_id, messages_by_device in messages_by_user_then_device.items():
|
2016-09-08 16:13:05 +02:00
|
|
|
messages_json_for_user = {}
|
2018-08-30 16:19:58 +02:00
|
|
|
devices = list(messages_by_device.keys())
|
2016-09-08 16:13:05 +02:00
|
|
|
if len(devices) == 1 and devices[0] == "*":
|
|
|
|
# Handle wildcard device_ids.
|
2019-11-21 13:00:14 +01:00
|
|
|
sql = "SELECT device_id FROM devices WHERE user_id = ?"
|
2016-09-08 16:13:05 +02:00
|
|
|
txn.execute(sql, (user_id,))
|
2020-08-07 14:02:55 +02:00
|
|
|
message_json = json_encoder.encode(messages_by_device["*"])
|
2017-03-23 18:53:49 +01:00
|
|
|
for row in txn:
|
2016-09-08 16:13:05 +02:00
|
|
|
# Add the message for all devices for this user on this
|
|
|
|
# server.
|
|
|
|
device = row[0]
|
|
|
|
messages_json_for_user[device] = message_json
|
|
|
|
else:
|
2016-09-09 12:48:23 +02:00
|
|
|
if not devices:
|
|
|
|
continue
|
2019-10-02 20:07:07 +02:00
|
|
|
|
|
|
|
clause, args = make_in_list_sql_clause(
|
|
|
|
txn.database_engine, "device_id", devices
|
2016-09-08 16:13:05 +02:00
|
|
|
)
|
2019-10-02 20:07:07 +02:00
|
|
|
sql = "SELECT device_id FROM devices WHERE user_id = ? AND " + clause
|
|
|
|
|
2016-09-08 16:13:05 +02:00
|
|
|
# TODO: Maybe this needs to be done in batches if there are
|
|
|
|
# too many local devices for a given user.
|
2019-10-02 20:07:07 +02:00
|
|
|
txn.execute(sql, [user_id] + list(args))
|
2017-03-23 18:53:49 +01:00
|
|
|
for row in txn:
|
2016-09-08 16:13:05 +02:00
|
|
|
# Only insert into the local inbox if the device exists on
|
|
|
|
# this server
|
|
|
|
device = row[0]
|
2020-08-07 14:02:55 +02:00
|
|
|
message_json = json_encoder.encode(messages_by_device[device])
|
2016-09-08 16:13:05 +02:00
|
|
|
messages_json_for_user[device] = message_json
|
|
|
|
|
2016-09-09 12:48:23 +02:00
|
|
|
if messages_json_for_user:
|
|
|
|
local_by_user_then_device[user_id] = messages_json_for_user
|
|
|
|
|
|
|
|
if not local_by_user_then_device:
|
|
|
|
return
|
2016-08-25 18:35:37 +02:00
|
|
|
|
2016-09-06 12:26:37 +02:00
|
|
|
sql = (
|
|
|
|
"INSERT INTO device_inbox"
|
|
|
|
" (user_id, device_id, stream_id, message_json)"
|
|
|
|
" VALUES (?,?,?,?)"
|
|
|
|
)
|
|
|
|
rows = []
|
2016-09-08 16:13:05 +02:00
|
|
|
for user_id, messages_by_device in local_by_user_then_device.items():
|
|
|
|
for device_id, message_json in messages_by_device.items():
|
|
|
|
rows.append((user_id, device_id, stream_id, message_json))
|
2016-08-25 18:35:37 +02:00
|
|
|
|
2016-09-06 12:26:37 +02:00
|
|
|
txn.executemany(sql, rows)
|