2016-01-07 05:26:29 +01:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2018-02-23 11:45:00 +01:00
|
|
|
# Copyright 2018 New Vector Ltd
|
2015-01-27 16:50:28 +01:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2015-01-28 12:59:38 +01:00
|
|
|
import logging
|
2017-05-18 17:34:41 +02:00
|
|
|
import re
|
2020-10-28 16:12:21 +01:00
|
|
|
from typing import TYPE_CHECKING, List, Optional, Pattern, Tuple
|
2018-07-09 08:09:20 +02:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
from synapse.appservice import (
|
|
|
|
ApplicationService,
|
|
|
|
ApplicationServiceState,
|
|
|
|
AppServiceTransaction,
|
|
|
|
)
|
2016-05-17 12:28:58 +02:00
|
|
|
from synapse.config.appservice import load_appservices
|
2020-10-15 18:33:28 +02:00
|
|
|
from synapse.events import EventBase
|
2020-07-16 17:32:19 +02:00
|
|
|
from synapse.storage._base import SQLBaseStore, db_to_json
|
2021-12-13 18:05:00 +01:00
|
|
|
from synapse.storage.database import DatabasePool, LoggingDatabaseConnection
|
2020-08-05 22:38:57 +02:00
|
|
|
from synapse.storage.databases.main.events_worker import EventsWorkerStore
|
2020-10-15 18:33:28 +02:00
|
|
|
from synapse.types import JsonDict
|
2020-08-20 16:32:33 +02:00
|
|
|
from synapse.util import json_encoder
|
2015-01-27 16:50:28 +01:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
if TYPE_CHECKING:
|
2021-03-23 12:12:48 +01:00
|
|
|
from synapse.server import HomeServer
|
2020-10-28 16:12:21 +01:00
|
|
|
|
2015-01-28 12:59:38 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2015-02-03 12:26:33 +01:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
def _make_exclusive_regex(
|
|
|
|
services_cache: List[ApplicationService],
|
|
|
|
) -> Optional[Pattern]:
|
2020-05-15 19:53:31 +02:00
|
|
|
# We precompile a regex constructed from all the regexes that the AS's
|
2017-06-21 15:19:33 +02:00
|
|
|
# have registered for exclusive users.
|
|
|
|
exclusive_user_regexes = [
|
|
|
|
regex.pattern
|
|
|
|
for service in services_cache
|
2020-05-15 19:53:31 +02:00
|
|
|
for regex in service.get_exclusive_user_regexes()
|
2017-06-21 15:19:33 +02:00
|
|
|
]
|
|
|
|
if exclusive_user_regexes:
|
|
|
|
exclusive_user_regex = "|".join("(" + r + ")" for r in exclusive_user_regexes)
|
2021-07-15 18:46:54 +02:00
|
|
|
exclusive_user_pattern: Optional[Pattern] = re.compile(exclusive_user_regex)
|
2017-06-21 15:19:33 +02:00
|
|
|
else:
|
|
|
|
# We handle this case specially otherwise the constructed regex
|
|
|
|
# will always match
|
2020-10-28 16:12:21 +01:00
|
|
|
exclusive_user_pattern = None
|
2017-06-21 15:19:33 +02:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
return exclusive_user_pattern
|
2017-06-21 15:19:33 +02:00
|
|
|
|
|
|
|
|
2018-02-21 12:20:42 +01:00
|
|
|
class ApplicationServiceWorkerStore(SQLBaseStore):
|
2021-12-13 18:05:00 +01:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
database: DatabasePool,
|
|
|
|
db_conn: LoggingDatabaseConnection,
|
|
|
|
hs: "HomeServer",
|
|
|
|
):
|
2016-05-17 12:28:58 +02:00
|
|
|
self.services_cache = load_appservices(
|
2021-09-23 13:13:34 +02:00
|
|
|
hs.hostname, hs.config.appservice.app_service_config_files
|
2015-02-02 18:39:41 +01:00
|
|
|
)
|
2017-06-21 15:19:33 +02:00
|
|
|
self.exclusive_user_regex = _make_exclusive_regex(self.services_cache)
|
2017-05-18 17:34:41 +02:00
|
|
|
|
2020-09-18 15:56:44 +02:00
|
|
|
super().__init__(database, db_conn, hs)
|
2018-02-21 12:20:42 +01:00
|
|
|
|
2015-02-03 12:26:33 +01:00
|
|
|
def get_app_services(self):
|
2016-10-06 10:43:32 +02:00
|
|
|
return self.services_cache
|
2015-01-27 16:50:28 +01:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
def get_if_app_services_interested_in_user(self, user_id: str) -> bool:
|
2021-02-16 23:32:34 +01:00
|
|
|
"""Check if the user is one associated with an app service (exclusively)"""
|
2017-05-18 17:34:41 +02:00
|
|
|
if self.exclusive_user_regex:
|
|
|
|
return bool(self.exclusive_user_regex.match(user_id))
|
|
|
|
else:
|
|
|
|
return False
|
2016-11-23 12:01:01 +01:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
def get_app_service_by_user_id(self, user_id: str) -> Optional[ApplicationService]:
|
2015-02-25 18:15:25 +01:00
|
|
|
"""Retrieve an application service from their user ID.
|
|
|
|
|
|
|
|
All application services have associated with them a particular user ID.
|
|
|
|
There is no distinguishing feature on the user ID which indicates it
|
|
|
|
represents an application service. This function allows you to map from
|
|
|
|
a user ID to an application service.
|
|
|
|
|
|
|
|
Args:
|
2020-10-28 16:12:21 +01:00
|
|
|
user_id: The user ID to see if it is an application service.
|
2015-02-25 18:15:25 +01:00
|
|
|
Returns:
|
2020-10-28 16:12:21 +01:00
|
|
|
The application service or None.
|
2015-02-25 18:15:25 +01:00
|
|
|
"""
|
2015-02-25 16:00:59 +01:00
|
|
|
for service in self.services_cache:
|
|
|
|
if service.sender == user_id:
|
2016-10-06 10:43:32 +02:00
|
|
|
return service
|
|
|
|
return None
|
2015-02-25 16:00:59 +01:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
def get_app_service_by_token(self, token: str) -> Optional[ApplicationService]:
|
2015-02-25 18:15:25 +01:00
|
|
|
"""Get the application service with the given appservice token.
|
2015-01-27 16:50:28 +01:00
|
|
|
|
|
|
|
Args:
|
2020-10-28 16:12:21 +01:00
|
|
|
token: The application service token.
|
2015-03-31 12:00:00 +02:00
|
|
|
Returns:
|
2020-10-28 16:12:21 +01:00
|
|
|
The application service or None.
|
2015-01-27 16:50:28 +01:00
|
|
|
"""
|
2015-03-31 12:00:00 +02:00
|
|
|
for service in self.services_cache:
|
|
|
|
if service.token == token:
|
2016-10-06 10:43:32 +02:00
|
|
|
return service
|
|
|
|
return None
|
2015-01-27 17:53:59 +01:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
def get_app_service_by_id(self, as_id: str) -> Optional[ApplicationService]:
|
2018-02-05 18:22:16 +01:00
|
|
|
"""Get the application service with the given appservice ID.
|
|
|
|
|
|
|
|
Args:
|
2020-10-28 16:12:21 +01:00
|
|
|
as_id: The application service ID.
|
2018-02-05 18:22:16 +01:00
|
|
|
Returns:
|
2020-10-28 16:12:21 +01:00
|
|
|
The application service or None.
|
2018-02-05 18:22:16 +01:00
|
|
|
"""
|
|
|
|
for service in self.services_cache:
|
|
|
|
if service.id == as_id:
|
|
|
|
return service
|
|
|
|
return None
|
|
|
|
|
2018-02-21 12:20:42 +01:00
|
|
|
|
|
|
|
class ApplicationServiceStore(ApplicationServiceWorkerStore):
|
2018-03-05 16:42:57 +01:00
|
|
|
# This is currently empty due to there not being any AS storage functions
|
|
|
|
# that can't be run on the workers. Since this may change in future, and
|
|
|
|
# to keep consistency with the other stores, we keep this empty class for
|
|
|
|
# now.
|
|
|
|
pass
|
2015-02-25 16:00:59 +01:00
|
|
|
|
2015-03-06 15:53:35 +01:00
|
|
|
|
2019-04-03 11:07:29 +02:00
|
|
|
class ApplicationServiceTransactionWorkerStore(
|
|
|
|
ApplicationServiceWorkerStore, EventsWorkerStore
|
|
|
|
):
|
2020-10-28 16:12:21 +01:00
|
|
|
async def get_appservices_by_state(
|
|
|
|
self, state: ApplicationServiceState
|
|
|
|
) -> List[ApplicationService]:
|
2015-03-06 16:12:24 +01:00
|
|
|
"""Get a list of application services based on their state.
|
2015-03-06 15:53:35 +01:00
|
|
|
|
2015-03-06 16:12:24 +01:00
|
|
|
Args:
|
2020-10-28 16:12:21 +01:00
|
|
|
state: The state to filter on.
|
2015-03-06 15:53:35 +01:00
|
|
|
Returns:
|
2020-08-12 15:28:48 +02:00
|
|
|
A list of ApplicationServices, which may be empty.
|
2015-03-06 15:53:35 +01:00
|
|
|
"""
|
2020-08-12 15:28:48 +02:00
|
|
|
results = await self.db_pool.simple_select_list(
|
2021-12-02 16:30:05 +01:00
|
|
|
"application_services_state", {"state": state.value}, ["as_id"]
|
2015-03-16 11:09:15 +01:00
|
|
|
)
|
2015-03-09 14:10:31 +01:00
|
|
|
# NB: This assumes this class is linked with ApplicationServiceStore
|
2016-10-06 10:43:32 +02:00
|
|
|
as_list = self.get_app_services()
|
2015-03-31 13:07:56 +02:00
|
|
|
services = []
|
|
|
|
|
|
|
|
for res in results:
|
|
|
|
for service in as_list:
|
|
|
|
if service.id == res["as_id"]:
|
|
|
|
services.append(service)
|
2019-07-23 15:00:55 +02:00
|
|
|
return services
|
2015-03-06 15:53:35 +01:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
async def get_appservice_state(
|
|
|
|
self, service: ApplicationService
|
|
|
|
) -> Optional[ApplicationServiceState]:
|
2015-03-06 17:09:05 +01:00
|
|
|
"""Get the application service state.
|
|
|
|
|
|
|
|
Args:
|
2020-10-28 16:12:21 +01:00
|
|
|
service: The service whose state to set.
|
2015-03-06 17:09:05 +01:00
|
|
|
Returns:
|
2020-10-28 16:12:21 +01:00
|
|
|
An ApplicationServiceState or none.
|
2015-03-06 17:09:05 +01:00
|
|
|
"""
|
2020-08-12 15:28:48 +02:00
|
|
|
result = await self.db_pool.simple_select_one(
|
2015-03-09 14:10:31 +01:00
|
|
|
"application_services_state",
|
2020-02-21 13:15:07 +01:00
|
|
|
{"as_id": service.id},
|
2015-03-09 14:10:31 +01:00
|
|
|
["state"],
|
2016-02-03 17:22:35 +01:00
|
|
|
allow_none=True,
|
|
|
|
desc="get_appservice_state",
|
2015-03-09 14:10:31 +01:00
|
|
|
)
|
|
|
|
if result:
|
2021-12-02 16:30:05 +01:00
|
|
|
return ApplicationServiceState(result.get("state"))
|
2019-07-23 15:00:55 +02:00
|
|
|
return None
|
2015-03-06 17:09:05 +01:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
async def set_appservice_state(
|
|
|
|
self, service: ApplicationService, state: ApplicationServiceState
|
|
|
|
) -> None:
|
2015-03-06 16:12:24 +01:00
|
|
|
"""Set the application service state.
|
|
|
|
|
|
|
|
Args:
|
2020-10-28 16:12:21 +01:00
|
|
|
service: The service whose state to set.
|
|
|
|
state: The connectivity state to apply.
|
2015-03-06 16:12:24 +01:00
|
|
|
"""
|
2020-08-27 19:38:41 +02:00
|
|
|
await self.db_pool.simple_upsert(
|
2021-12-02 16:30:05 +01:00
|
|
|
"application_services_state", {"as_id": service.id}, {"state": state.value}
|
2015-03-06 18:35:14 +01:00
|
|
|
)
|
2015-03-06 16:12:24 +01:00
|
|
|
|
2020-10-15 18:33:28 +02:00
|
|
|
async def create_appservice_txn(
|
|
|
|
self,
|
|
|
|
service: ApplicationService,
|
|
|
|
events: List[EventBase],
|
|
|
|
ephemeral: List[JsonDict],
|
|
|
|
) -> AppServiceTransaction:
|
2015-03-06 17:09:05 +01:00
|
|
|
"""Atomically creates a new transaction for this application service
|
2020-10-15 18:33:28 +02:00
|
|
|
with the given list of events. Ephemeral events are NOT persisted to the
|
|
|
|
database and are not resent if a transaction is retried.
|
2015-03-06 17:09:05 +01:00
|
|
|
|
|
|
|
Args:
|
2020-10-15 18:33:28 +02:00
|
|
|
service: The service who the transaction is for.
|
|
|
|
events: A list of persistent events to put in the transaction.
|
|
|
|
ephemeral: A list of ephemeral events to put in the transaction.
|
|
|
|
|
2015-03-06 17:09:05 +01:00
|
|
|
Returns:
|
2020-10-15 18:33:28 +02:00
|
|
|
A new transaction.
|
2015-03-06 17:09:05 +01:00
|
|
|
"""
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2016-08-18 15:59:55 +02:00
|
|
|
def _create_appservice_txn(txn):
|
|
|
|
# work out new txn id (highest txn id for this service += 1)
|
|
|
|
# The highest id may be the last one sent (in which case it is last_txn)
|
|
|
|
# or it may be the highest in the txns list (which are waiting to be/are
|
|
|
|
# being sent)
|
|
|
|
last_txn_id = self._get_last_txn(txn, service.id)
|
2015-03-09 14:54:20 +01:00
|
|
|
|
2016-08-18 15:59:55 +02:00
|
|
|
txn.execute(
|
|
|
|
"SELECT MAX(txn_id) FROM application_services_txns WHERE as_id=?",
|
2019-04-03 11:07:29 +02:00
|
|
|
(service.id,),
|
2016-08-18 15:59:55 +02:00
|
|
|
)
|
|
|
|
highest_txn_id = txn.fetchone()[0]
|
|
|
|
if highest_txn_id is None:
|
|
|
|
highest_txn_id = 0
|
2015-03-09 14:54:20 +01:00
|
|
|
|
2016-08-18 15:59:55 +02:00
|
|
|
new_txn_id = max(highest_txn_id, last_txn_id) + 1
|
2015-03-09 14:54:20 +01:00
|
|
|
|
2016-08-18 15:59:55 +02:00
|
|
|
# Insert new txn into txn table
|
2020-08-20 16:32:33 +02:00
|
|
|
event_ids = json_encoder.encode([e.event_id for e in events])
|
2016-08-18 15:59:55 +02:00
|
|
|
txn.execute(
|
|
|
|
"INSERT INTO application_services_txns(as_id, txn_id, event_ids) "
|
|
|
|
"VALUES(?,?,?)",
|
2019-04-03 11:07:29 +02:00
|
|
|
(service.id, new_txn_id, event_ids),
|
2016-08-18 15:59:55 +02:00
|
|
|
)
|
2020-10-15 18:33:28 +02:00
|
|
|
return AppServiceTransaction(
|
|
|
|
service=service, id=new_txn_id, events=events, ephemeral=ephemeral
|
|
|
|
)
|
2015-03-09 14:54:20 +01:00
|
|
|
|
2020-08-28 13:54:27 +02:00
|
|
|
return await self.db_pool.runInteraction(
|
2020-08-05 22:38:57 +02:00
|
|
|
"create_appservice_txn", _create_appservice_txn
|
|
|
|
)
|
2015-03-06 17:09:05 +01:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
async def complete_appservice_txn(
|
|
|
|
self, txn_id: int, service: ApplicationService
|
|
|
|
) -> None:
|
2015-03-06 15:53:35 +01:00
|
|
|
"""Completes an application service transaction.
|
|
|
|
|
|
|
|
Args:
|
2020-10-28 16:12:21 +01:00
|
|
|
txn_id: The transaction ID being completed.
|
|
|
|
service: The application service which was sent this transaction.
|
2015-03-06 15:53:35 +01:00
|
|
|
"""
|
2015-03-09 16:53:03 +01:00
|
|
|
txn_id = int(txn_id)
|
|
|
|
|
2016-08-18 15:59:55 +02:00
|
|
|
def _complete_appservice_txn(txn):
|
|
|
|
# Debugging query: Make sure the txn being completed is EXACTLY +1 from
|
|
|
|
# what was there before. If it isn't, we've got problems (e.g. the AS
|
|
|
|
# has probably missed some events), so whine loudly but still continue,
|
|
|
|
# since it shouldn't fail completion of the transaction.
|
|
|
|
last_txn_id = self._get_last_txn(txn, service.id)
|
2020-10-30 17:50:48 +01:00
|
|
|
if (last_txn_id + 1) != txn_id:
|
2016-08-18 15:59:55 +02:00
|
|
|
logger.error(
|
|
|
|
"appservice: Completing a transaction which has an ID > 1 from "
|
|
|
|
"the last ID sent to this AS. We've either dropped events or "
|
|
|
|
"sent it to the AS out of order. FIX ME. last_txn=%s "
|
2019-04-03 11:07:29 +02:00
|
|
|
"completing_txn=%s service_id=%s",
|
|
|
|
last_txn_id,
|
|
|
|
txn_id,
|
|
|
|
service.id,
|
2016-08-18 15:59:55 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# Set current txn_id for AS to 'txn_id'
|
2020-08-05 22:38:57 +02:00
|
|
|
self.db_pool.simple_upsert_txn(
|
2019-04-03 11:07:29 +02:00
|
|
|
txn,
|
|
|
|
"application_services_state",
|
2020-02-21 13:15:07 +01:00
|
|
|
{"as_id": service.id},
|
|
|
|
{"last_txn": txn_id},
|
2015-03-09 16:53:03 +01:00
|
|
|
)
|
|
|
|
|
2016-08-18 15:59:55 +02:00
|
|
|
# Delete txn
|
2020-08-05 22:38:57 +02:00
|
|
|
self.db_pool.simple_delete_txn(
|
2020-02-21 13:15:07 +01:00
|
|
|
txn,
|
|
|
|
"application_services_txns",
|
|
|
|
{"txn_id": txn_id, "as_id": service.id},
|
2016-08-18 15:59:55 +02:00
|
|
|
)
|
2015-03-09 16:53:03 +01:00
|
|
|
|
2020-08-28 13:54:27 +02:00
|
|
|
await self.db_pool.runInteraction(
|
2019-12-04 14:52:46 +01:00
|
|
|
"complete_appservice_txn", _complete_appservice_txn
|
|
|
|
)
|
2015-03-06 17:16:14 +01:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
async def get_oldest_unsent_txn(
|
|
|
|
self, service: ApplicationService
|
|
|
|
) -> Optional[AppServiceTransaction]:
|
|
|
|
"""Get the oldest transaction which has not been sent for this service.
|
2015-03-06 17:16:14 +01:00
|
|
|
|
|
|
|
Args:
|
2020-10-28 16:12:21 +01:00
|
|
|
service: The app service to get the oldest txn.
|
2015-03-06 17:16:14 +01:00
|
|
|
Returns:
|
2020-08-12 15:28:48 +02:00
|
|
|
An AppServiceTransaction or None.
|
2015-03-06 17:16:14 +01:00
|
|
|
"""
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2016-08-18 15:59:55 +02:00
|
|
|
def _get_oldest_unsent_txn(txn):
|
|
|
|
# Monotonically increasing txn ids, so just select the smallest
|
|
|
|
# one in the txns table (we delete them when they are sent)
|
|
|
|
txn.execute(
|
|
|
|
"SELECT * FROM application_services_txns WHERE as_id=?"
|
|
|
|
" ORDER BY txn_id ASC LIMIT 1",
|
2019-04-03 11:07:29 +02:00
|
|
|
(service.id,),
|
2016-08-18 15:59:55 +02:00
|
|
|
)
|
2020-08-05 22:38:57 +02:00
|
|
|
rows = self.db_pool.cursor_to_dict(txn)
|
2016-08-18 15:59:55 +02:00
|
|
|
if not rows:
|
|
|
|
return None
|
|
|
|
|
|
|
|
entry = rows[0]
|
|
|
|
|
|
|
|
return entry
|
|
|
|
|
2020-08-12 15:28:48 +02:00
|
|
|
entry = await self.db_pool.runInteraction(
|
2019-04-03 11:07:29 +02:00
|
|
|
"get_oldest_unsent_appservice_txn", _get_oldest_unsent_txn
|
2015-03-09 16:53:03 +01:00
|
|
|
)
|
|
|
|
|
2016-06-03 18:12:48 +02:00
|
|
|
if not entry:
|
2019-07-23 15:00:55 +02:00
|
|
|
return None
|
2016-06-03 18:12:48 +02:00
|
|
|
|
2020-07-16 17:32:19 +02:00
|
|
|
event_ids = db_to_json(entry["event_ids"])
|
2016-06-03 18:12:48 +02:00
|
|
|
|
2020-08-12 15:28:48 +02:00
|
|
|
events = await self.get_events_as_list(event_ids)
|
2016-06-03 18:12:48 +02:00
|
|
|
|
2020-10-15 18:33:28 +02:00
|
|
|
return AppServiceTransaction(
|
|
|
|
service=service, id=entry["txn_id"], events=events, ephemeral=[]
|
|
|
|
)
|
2016-06-03 18:12:48 +02:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
def _get_last_txn(self, txn, service_id: Optional[str]) -> int:
|
2015-04-29 17:43:39 +02:00
|
|
|
txn.execute(
|
2015-03-09 16:53:03 +01:00
|
|
|
"SELECT last_txn FROM application_services_state WHERE as_id=?",
|
2019-04-03 11:07:29 +02:00
|
|
|
(service_id,),
|
2015-03-09 16:53:03 +01:00
|
|
|
)
|
2015-04-29 17:43:39 +02:00
|
|
|
last_txn_id = txn.fetchone()
|
2015-03-09 18:45:41 +01:00
|
|
|
if last_txn_id is None or last_txn_id[0] is None: # no row exists
|
2015-03-09 16:53:03 +01:00
|
|
|
return 0
|
|
|
|
else:
|
|
|
|
return int(last_txn_id[0]) # select 'last_txn' col
|
2016-08-18 12:54:41 +02:00
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
async def set_appservice_last_pos(self, pos: int) -> None:
|
2016-08-18 12:54:41 +02:00
|
|
|
def set_appservice_last_pos_txn(txn):
|
|
|
|
txn.execute(
|
|
|
|
"UPDATE appservice_stream_position SET stream_ordering = ?", (pos,)
|
|
|
|
)
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2020-08-28 13:54:27 +02:00
|
|
|
await self.db_pool.runInteraction(
|
2016-08-18 12:54:41 +02:00
|
|
|
"set_appservice_last_pos", set_appservice_last_pos_txn
|
|
|
|
)
|
|
|
|
|
2020-10-28 16:12:21 +01:00
|
|
|
async def get_new_events_for_appservice(
|
|
|
|
self, current_id: int, limit: int
|
|
|
|
) -> Tuple[int, List[EventBase]]:
|
2020-10-15 18:33:28 +02:00
|
|
|
"""Get all new events for an appservice"""
|
2016-08-18 12:54:41 +02:00
|
|
|
|
|
|
|
def get_new_events_for_appservice_txn(txn):
|
|
|
|
sql = (
|
|
|
|
"SELECT e.stream_ordering, e.event_id"
|
2016-08-19 11:19:29 +02:00
|
|
|
" FROM events AS e"
|
|
|
|
" WHERE"
|
|
|
|
" (SELECT stream_ordering FROM appservice_stream_position)"
|
|
|
|
" < e.stream_ordering"
|
|
|
|
" AND e.stream_ordering <= ?"
|
2016-08-18 12:54:41 +02:00
|
|
|
" ORDER BY e.stream_ordering ASC"
|
|
|
|
" LIMIT ?"
|
|
|
|
)
|
|
|
|
|
|
|
|
txn.execute(sql, (current_id, limit))
|
|
|
|
rows = txn.fetchall()
|
|
|
|
|
|
|
|
upper_bound = current_id
|
|
|
|
if len(rows) == limit:
|
|
|
|
upper_bound = rows[-1][0]
|
|
|
|
|
|
|
|
return upper_bound, [row[1] for row in rows]
|
|
|
|
|
2020-08-12 15:28:48 +02:00
|
|
|
upper_bound, event_ids = await self.db_pool.runInteraction(
|
2019-04-03 11:07:29 +02:00
|
|
|
"get_new_events_for_appservice", get_new_events_for_appservice_txn
|
2016-08-18 12:54:41 +02:00
|
|
|
)
|
|
|
|
|
2020-08-12 15:28:48 +02:00
|
|
|
events = await self.get_events_as_list(event_ids)
|
2016-08-18 12:54:41 +02:00
|
|
|
|
2019-08-30 17:28:26 +02:00
|
|
|
return upper_bound, events
|
2018-02-21 12:20:42 +01:00
|
|
|
|
2020-10-15 18:33:28 +02:00
|
|
|
async def get_type_stream_id_for_appservice(
|
|
|
|
self, service: ApplicationService, type: str
|
|
|
|
) -> int:
|
2020-10-26 15:51:33 +01:00
|
|
|
if type not in ("read_receipt", "presence"):
|
|
|
|
raise ValueError(
|
|
|
|
"Expected type to be a valid application stream id type, got %s"
|
|
|
|
% (type,)
|
|
|
|
)
|
|
|
|
|
2020-10-15 18:33:28 +02:00
|
|
|
def get_type_stream_id_for_appservice_txn(txn):
|
|
|
|
stream_id_type = "%s_stream_id" % type
|
|
|
|
txn.execute(
|
2020-10-26 15:51:33 +01:00
|
|
|
# We do NOT want to escape `stream_id_type`.
|
|
|
|
"SELECT %s FROM application_services_state WHERE as_id=?"
|
|
|
|
% stream_id_type,
|
|
|
|
(service.id,),
|
2020-10-15 18:33:28 +02:00
|
|
|
)
|
2020-10-26 15:51:33 +01:00
|
|
|
last_stream_id = txn.fetchone()
|
|
|
|
if last_stream_id is None or last_stream_id[0] is None: # no row exists
|
2020-10-15 18:33:28 +02:00
|
|
|
return 0
|
|
|
|
else:
|
2020-10-26 15:51:33 +01:00
|
|
|
return int(last_stream_id[0])
|
2020-10-15 18:33:28 +02:00
|
|
|
|
|
|
|
return await self.db_pool.runInteraction(
|
|
|
|
"get_type_stream_id_for_appservice", get_type_stream_id_for_appservice_txn
|
|
|
|
)
|
|
|
|
|
|
|
|
async def set_type_stream_id_for_appservice(
|
2021-11-09 15:31:15 +01:00
|
|
|
self, service: ApplicationService, stream_type: str, pos: Optional[int]
|
2020-10-15 18:33:28 +02:00
|
|
|
) -> None:
|
2021-11-09 15:31:15 +01:00
|
|
|
if stream_type not in ("read_receipt", "presence"):
|
2020-10-26 15:51:33 +01:00
|
|
|
raise ValueError(
|
|
|
|
"Expected type to be a valid application stream id type, got %s"
|
2021-11-09 15:31:15 +01:00
|
|
|
% (stream_type,)
|
2020-10-26 15:51:33 +01:00
|
|
|
)
|
|
|
|
|
2020-10-15 18:33:28 +02:00
|
|
|
def set_type_stream_id_for_appservice_txn(txn):
|
2021-11-09 15:31:15 +01:00
|
|
|
stream_id_type = "%s_stream_id" % stream_type
|
2020-10-15 18:33:28 +02:00
|
|
|
txn.execute(
|
2020-10-26 15:51:33 +01:00
|
|
|
"UPDATE application_services_state SET %s = ? WHERE as_id=?"
|
|
|
|
% stream_id_type,
|
|
|
|
(pos, service.id),
|
2020-10-15 18:33:28 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
await self.db_pool.runInteraction(
|
|
|
|
"set_type_stream_id_for_appservice", set_type_stream_id_for_appservice_txn
|
|
|
|
)
|
|
|
|
|
2018-02-21 12:20:42 +01:00
|
|
|
|
|
|
|
class ApplicationServiceTransactionStore(ApplicationServiceTransactionWorkerStore):
|
2018-02-27 11:06:51 +01:00
|
|
|
# This is currently empty due to there not being any AS storage functions
|
|
|
|
# that can't be run on the workers. Since this may change in future, and
|
|
|
|
# to keep consistency with the other stores, we keep this empty class for
|
|
|
|
# now.
|
2018-02-21 12:20:42 +01:00
|
|
|
pass
|