2014-08-12 16:10:52 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2019-05-17 20:37:31 +02:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
|
|
|
# Copyright 2017-2018 New Vector Ltd
|
|
|
|
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
2014-08-12 16:10:52 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2014-08-13 04:14:34 +02:00
|
|
|
|
2019-06-14 14:18:24 +02:00
|
|
|
import logging
|
2016-02-05 12:22:30 +01:00
|
|
|
import re
|
2020-06-03 18:15:57 +02:00
|
|
|
from typing import Optional
|
2016-02-05 12:22:30 +01:00
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
from twisted.internet import defer
|
2019-09-13 16:20:49 +02:00
|
|
|
from twisted.internet.defer import Deferred
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2018-12-14 19:20:59 +01:00
|
|
|
from synapse.api.constants import UserTypes
|
2019-09-20 16:21:30 +02:00
|
|
|
from synapse.api.errors import Codes, StoreError, SynapseError, ThreepidValidationError
|
2019-07-03 10:31:27 +02:00
|
|
|
from synapse.metrics.background_process_metrics import run_as_background_process
|
2018-03-01 19:19:34 +01:00
|
|
|
from synapse.storage._base import SQLBaseStore
|
2019-12-06 14:08:40 +01:00
|
|
|
from synapse.storage.database import Database
|
2018-12-07 14:44:46 +01:00
|
|
|
from synapse.types import UserID
|
2016-04-06 17:50:47 +02:00
|
|
|
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2019-06-06 18:34:07 +02:00
|
|
|
THIRTY_MINUTES_IN_MS = 30 * 60 * 1000
|
|
|
|
|
2019-06-14 14:18:24 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2019-10-08 15:36:33 +02:00
|
|
|
class RegistrationWorkerStore(SQLBaseStore):
|
2019-12-06 14:08:40 +01:00
|
|
|
def __init__(self, database: Database, db_conn, hs):
|
|
|
|
super(RegistrationWorkerStore, self).__init__(database, db_conn, hs)
|
2018-08-23 20:17:08 +02:00
|
|
|
|
|
|
|
self.config = hs.config
|
2019-04-10 18:58:47 +02:00
|
|
|
self.clock = hs.get_clock()
|
2018-08-23 20:17:08 +02:00
|
|
|
|
2018-03-01 19:19:34 +01:00
|
|
|
@cached()
|
|
|
|
def get_user_by_id(self, user_id):
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.simple_select_one(
|
2018-03-01 19:19:34 +01:00
|
|
|
table="users",
|
2019-04-03 11:07:29 +02:00
|
|
|
keyvalues={"name": user_id},
|
2018-05-17 18:35:31 +02:00
|
|
|
retcols=[
|
2019-04-03 11:07:29 +02:00
|
|
|
"name",
|
|
|
|
"password_hash",
|
|
|
|
"is_guest",
|
2020-01-09 14:31:00 +01:00
|
|
|
"admin",
|
2019-04-03 11:07:29 +02:00
|
|
|
"consent_version",
|
|
|
|
"consent_server_notice_sent",
|
|
|
|
"appservice_id",
|
|
|
|
"creation_ts",
|
2019-08-23 10:15:10 +02:00
|
|
|
"user_type",
|
2020-01-09 14:31:00 +01:00
|
|
|
"deactivated",
|
2018-05-17 18:35:31 +02:00
|
|
|
],
|
2018-03-01 19:19:34 +01:00
|
|
|
allow_none=True,
|
|
|
|
desc="get_user_by_id",
|
|
|
|
)
|
|
|
|
|
2018-08-23 20:17:08 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def is_trial_user(self, user_id):
|
|
|
|
"""Checks if user is in the "trial" period, i.e. within the first
|
|
|
|
N days of registration defined by `mau_trial_days` config
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred[bool]
|
|
|
|
"""
|
|
|
|
|
|
|
|
info = yield self.get_user_by_id(user_id)
|
|
|
|
if not info:
|
2019-07-23 15:00:55 +02:00
|
|
|
return False
|
2018-08-23 20:17:08 +02:00
|
|
|
|
|
|
|
now = self.clock.time_msec()
|
|
|
|
trial_duration_ms = self.config.mau_trial_days * 24 * 60 * 60 * 1000
|
|
|
|
is_trial = (now - info["creation_ts"] * 1000) < trial_duration_ms
|
2019-07-23 15:00:55 +02:00
|
|
|
return is_trial
|
2018-08-23 20:17:08 +02:00
|
|
|
|
2018-03-01 19:19:34 +01:00
|
|
|
@cached()
|
|
|
|
def get_user_by_access_token(self, token):
|
|
|
|
"""Get a user from the given access token.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
token (str): The access token of a user.
|
|
|
|
Returns:
|
|
|
|
defer.Deferred: None, if the token did not match, otherwise dict
|
2019-07-12 18:26:02 +02:00
|
|
|
including the keys `name`, `is_guest`, `device_id`, `token_id`,
|
|
|
|
`valid_until_ms`.
|
2018-03-01 19:19:34 +01:00
|
|
|
"""
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.runInteraction(
|
2019-04-03 11:07:29 +02:00
|
|
|
"get_user_by_access_token", self._query_for_auth, token
|
2018-03-01 19:19:34 +01:00
|
|
|
)
|
|
|
|
|
2019-04-08 18:10:55 +02:00
|
|
|
@cachedInlineCallbacks()
|
2019-04-10 18:58:47 +02:00
|
|
|
def get_expiration_ts_for_user(self, user_id):
|
2019-04-08 18:10:55 +02:00
|
|
|
"""Get the expiration timestamp for the account bearing a given user ID.
|
|
|
|
|
|
|
|
Args:
|
2019-04-10 18:58:47 +02:00
|
|
|
user_id (str): The ID of the user.
|
2019-04-08 18:10:55 +02:00
|
|
|
Returns:
|
|
|
|
defer.Deferred: None, if the account has no expiration timestamp,
|
2019-04-10 18:58:47 +02:00
|
|
|
otherwise int representation of the timestamp (as a number of
|
|
|
|
milliseconds since epoch).
|
2019-04-08 18:10:55 +02:00
|
|
|
"""
|
2019-12-04 14:52:46 +01:00
|
|
|
res = yield self.db.simple_select_one_onecol(
|
2019-04-08 18:10:55 +02:00
|
|
|
table="account_validity",
|
2019-04-10 18:58:47 +02:00
|
|
|
keyvalues={"user_id": user_id},
|
2019-04-08 18:10:55 +02:00
|
|
|
retcol="expiration_ts_ms",
|
|
|
|
allow_none=True,
|
2019-04-10 18:58:47 +02:00
|
|
|
desc="get_expiration_ts_for_user",
|
2019-04-08 18:10:55 +02:00
|
|
|
)
|
2019-07-23 15:00:55 +02:00
|
|
|
return res
|
2019-04-08 18:10:55 +02:00
|
|
|
|
2019-04-10 18:58:47 +02:00
|
|
|
@defer.inlineCallbacks
|
2019-06-20 11:32:02 +02:00
|
|
|
def set_account_validity_for_user(
|
|
|
|
self, user_id, expiration_ts, email_sent, renewal_token=None
|
|
|
|
):
|
2019-04-16 21:13:59 +02:00
|
|
|
"""Updates the account validity properties of the given account, with the
|
|
|
|
given values.
|
2019-04-10 18:58:47 +02:00
|
|
|
|
|
|
|
Args:
|
2019-04-16 21:13:59 +02:00
|
|
|
user_id (str): ID of the account to update properties for.
|
|
|
|
expiration_ts (int): New expiration date, as a timestamp in milliseconds
|
2019-04-10 18:58:47 +02:00
|
|
|
since epoch.
|
2019-04-16 21:13:59 +02:00
|
|
|
email_sent (bool): True means a renewal email has been sent for this
|
|
|
|
account and there's no need to send another one for the current validity
|
|
|
|
period.
|
|
|
|
renewal_token (str): Renewal token the user can use to extend the validity
|
|
|
|
of their account. Defaults to no token.
|
2019-04-10 18:58:47 +02:00
|
|
|
"""
|
2019-06-20 11:32:02 +02:00
|
|
|
|
2019-04-16 21:13:59 +02:00
|
|
|
def set_account_validity_for_user_txn(txn):
|
2019-12-04 14:52:46 +01:00
|
|
|
self.db.simple_update_txn(
|
2019-04-10 18:58:47 +02:00
|
|
|
txn=txn,
|
|
|
|
table="account_validity",
|
|
|
|
keyvalues={"user_id": user_id},
|
|
|
|
updatevalues={
|
2019-04-16 21:13:59 +02:00
|
|
|
"expiration_ts_ms": expiration_ts,
|
|
|
|
"email_sent": email_sent,
|
|
|
|
"renewal_token": renewal_token,
|
2019-04-10 18:58:47 +02:00
|
|
|
},
|
|
|
|
)
|
|
|
|
self._invalidate_cache_and_stream(
|
2019-06-20 11:32:02 +02:00
|
|
|
txn, self.get_expiration_ts_for_user, (user_id,)
|
2019-04-10 18:58:47 +02:00
|
|
|
)
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
yield self.db.runInteraction(
|
2019-06-20 11:32:02 +02:00
|
|
|
"set_account_validity_for_user", set_account_validity_for_user_txn
|
2019-04-10 18:58:47 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def set_renewal_token_for_user(self, user_id, renewal_token):
|
|
|
|
"""Defines a renewal token for a given user.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): ID of the user to set the renewal token for.
|
|
|
|
renewal_token (str): Random unique string that will be used to renew the
|
|
|
|
user's account.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
StoreError: The provided token is already set for another user.
|
|
|
|
"""
|
2019-12-04 14:52:46 +01:00
|
|
|
yield self.db.simple_update_one(
|
2019-04-10 18:58:47 +02:00
|
|
|
table="account_validity",
|
|
|
|
keyvalues={"user_id": user_id},
|
|
|
|
updatevalues={"renewal_token": renewal_token},
|
|
|
|
desc="set_renewal_token_for_user",
|
|
|
|
)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_user_from_renewal_token(self, renewal_token):
|
|
|
|
"""Get a user ID from a renewal token.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
renewal_token (str): The renewal token to perform the lookup with.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
defer.Deferred[str]: The ID of the user to which the token belongs.
|
|
|
|
"""
|
2019-12-04 14:52:46 +01:00
|
|
|
res = yield self.db.simple_select_one_onecol(
|
2019-04-10 18:58:47 +02:00
|
|
|
table="account_validity",
|
|
|
|
keyvalues={"renewal_token": renewal_token},
|
|
|
|
retcol="user_id",
|
|
|
|
desc="get_user_from_renewal_token",
|
|
|
|
)
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return res
|
2019-04-10 18:58:47 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_renewal_token_for_user(self, user_id):
|
|
|
|
"""Get the renewal token associated with a given user ID.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): The user ID to lookup a token for.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
defer.Deferred[str]: The renewal token associated with this user ID.
|
|
|
|
"""
|
2019-12-04 14:52:46 +01:00
|
|
|
res = yield self.db.simple_select_one_onecol(
|
2019-04-10 18:58:47 +02:00
|
|
|
table="account_validity",
|
|
|
|
keyvalues={"user_id": user_id},
|
|
|
|
retcol="renewal_token",
|
|
|
|
desc="get_renewal_token_for_user",
|
|
|
|
)
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return res
|
2019-04-10 18:58:47 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_users_expiring_soon(self):
|
|
|
|
"""Selects users whose account will expire in the [now, now + renew_at] time
|
|
|
|
window (see configuration for account_validity for information on what renew_at
|
|
|
|
refers to).
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred: Resolves to a list[dict[user_id (str), expiration_ts_ms (int)]]
|
|
|
|
"""
|
2019-06-20 11:32:02 +02:00
|
|
|
|
2019-04-10 18:58:47 +02:00
|
|
|
def select_users_txn(txn, now_ms, renew_at):
|
|
|
|
sql = (
|
|
|
|
"SELECT user_id, expiration_ts_ms FROM account_validity"
|
|
|
|
" WHERE email_sent = ? AND (expiration_ts_ms - ?) <= ?"
|
|
|
|
)
|
|
|
|
values = [False, now_ms, renew_at]
|
|
|
|
txn.execute(sql, values)
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.cursor_to_dict(txn)
|
2019-04-10 18:58:47 +02:00
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
res = yield self.db.runInteraction(
|
2019-04-10 18:58:47 +02:00
|
|
|
"get_users_expiring_soon",
|
|
|
|
select_users_txn,
|
2019-06-20 11:32:02 +02:00
|
|
|
self.clock.time_msec(),
|
|
|
|
self.config.account_validity.renew_at,
|
2019-04-10 18:58:47 +02:00
|
|
|
)
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return res
|
2019-04-10 18:58:47 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def set_renewal_mail_status(self, user_id, email_sent):
|
|
|
|
"""Sets or unsets the flag that indicates whether a renewal email has been sent
|
|
|
|
to the user (and the user hasn't renewed their account yet).
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): ID of the user to set/unset the flag for.
|
|
|
|
email_sent (bool): Flag which indicates whether a renewal email has been sent
|
|
|
|
to this user.
|
|
|
|
"""
|
2019-12-04 14:52:46 +01:00
|
|
|
yield self.db.simple_update_one(
|
2019-04-10 18:58:47 +02:00
|
|
|
table="account_validity",
|
|
|
|
keyvalues={"user_id": user_id},
|
|
|
|
updatevalues={"email_sent": email_sent},
|
|
|
|
desc="set_renewal_mail_status",
|
|
|
|
)
|
|
|
|
|
2019-06-07 16:30:54 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def delete_account_validity_for_user(self, user_id):
|
|
|
|
"""Deletes the entry for the given user in the account validity table, removing
|
|
|
|
their expiration date and renewal token.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): ID of the user to remove from the account validity table.
|
|
|
|
"""
|
2019-12-04 14:52:46 +01:00
|
|
|
yield self.db.simple_delete_one(
|
2019-06-07 16:30:54 +02:00
|
|
|
table="account_validity",
|
|
|
|
keyvalues={"user_id": user_id},
|
|
|
|
desc="delete_account_validity_for_user",
|
|
|
|
)
|
|
|
|
|
2020-05-01 16:15:36 +02:00
|
|
|
async def is_server_admin(self, user):
|
2019-08-27 11:14:00 +02:00
|
|
|
"""Determines if a user is an admin of this homeserver.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user (UserID): user ID of the user to test
|
|
|
|
|
|
|
|
Returns (bool):
|
|
|
|
true iff the user is a server admin, false otherwise.
|
|
|
|
"""
|
2020-05-01 16:15:36 +02:00
|
|
|
res = await self.db.simple_select_one_onecol(
|
2018-03-01 19:19:34 +01:00
|
|
|
table="users",
|
|
|
|
keyvalues={"name": user.to_string()},
|
|
|
|
retcol="admin",
|
|
|
|
allow_none=True,
|
|
|
|
desc="is_server_admin",
|
|
|
|
)
|
|
|
|
|
2020-01-22 16:09:57 +01:00
|
|
|
return bool(res) if res else False
|
2018-03-01 19:19:34 +01:00
|
|
|
|
2019-08-27 11:14:00 +02:00
|
|
|
def set_server_admin(self, user, admin):
|
|
|
|
"""Sets whether a user is an admin of this homeserver.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user (UserID): user ID of the user to test
|
|
|
|
admin (bool): true iff the user is to be a server admin,
|
|
|
|
false otherwise.
|
|
|
|
"""
|
2020-02-28 10:58:05 +01:00
|
|
|
|
|
|
|
def set_server_admin_txn(txn):
|
|
|
|
self.db.simple_update_one_txn(
|
|
|
|
txn, "users", {"name": user.to_string()}, {"admin": 1 if admin else 0}
|
|
|
|
)
|
|
|
|
self._invalidate_cache_and_stream(
|
|
|
|
txn, self.get_user_by_id, (user.to_string(),)
|
|
|
|
)
|
|
|
|
|
|
|
|
return self.db.runInteraction("set_server_admin", set_server_admin_txn)
|
2019-08-27 11:14:00 +02:00
|
|
|
|
2018-03-01 19:19:34 +01:00
|
|
|
def _query_for_auth(self, txn, token):
|
|
|
|
sql = (
|
|
|
|
"SELECT users.name, users.is_guest, access_tokens.id as token_id,"
|
2019-07-12 18:26:02 +02:00
|
|
|
" access_tokens.device_id, access_tokens.valid_until_ms"
|
2018-03-01 19:19:34 +01:00
|
|
|
" FROM users"
|
|
|
|
" INNER JOIN access_tokens on users.name = access_tokens.user_id"
|
|
|
|
" WHERE token = ?"
|
|
|
|
)
|
|
|
|
|
|
|
|
txn.execute(sql, (token,))
|
2019-12-04 14:52:46 +01:00
|
|
|
rows = self.db.cursor_to_dict(txn)
|
2018-03-01 19:19:34 +01:00
|
|
|
if rows:
|
|
|
|
return rows[0]
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2019-09-09 13:43:51 +02:00
|
|
|
@cachedInlineCallbacks()
|
|
|
|
def is_real_user(self, user_id):
|
|
|
|
"""Determines if the user is a real user, ie does not have a 'user_type'.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): user id to test
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred[bool]: True if user 'user_type' is null or empty string
|
|
|
|
"""
|
2019-12-04 14:52:46 +01:00
|
|
|
res = yield self.db.runInteraction(
|
|
|
|
"is_real_user", self.is_real_user_txn, user_id
|
|
|
|
)
|
2019-09-09 13:43:51 +02:00
|
|
|
return res
|
|
|
|
|
2020-06-03 18:15:57 +02:00
|
|
|
@cached()
|
2019-01-02 11:19:59 +01:00
|
|
|
def is_support_user(self, user_id):
|
|
|
|
"""Determines if the user is of type UserTypes.SUPPORT
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): user id to test
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred[bool]: True if user is of type UserTypes.SUPPORT
|
|
|
|
"""
|
2020-06-03 18:15:57 +02:00
|
|
|
return self.db.runInteraction(
|
2019-01-02 11:19:59 +01:00
|
|
|
"is_support_user", self.is_support_user_txn, user_id
|
|
|
|
)
|
|
|
|
|
2019-09-09 13:43:51 +02:00
|
|
|
def is_real_user_txn(self, txn, user_id):
|
2019-12-04 14:52:46 +01:00
|
|
|
res = self.db.simple_select_one_onecol_txn(
|
2019-09-09 13:43:51 +02:00
|
|
|
txn=txn,
|
|
|
|
table="users",
|
|
|
|
keyvalues={"name": user_id},
|
|
|
|
retcol="user_type",
|
|
|
|
allow_none=True,
|
|
|
|
)
|
2019-09-09 15:40:40 +02:00
|
|
|
return res is None
|
2019-09-09 13:43:51 +02:00
|
|
|
|
2019-01-02 11:19:59 +01:00
|
|
|
def is_support_user_txn(self, txn, user_id):
|
2019-12-04 14:52:46 +01:00
|
|
|
res = self.db.simple_select_one_onecol_txn(
|
2019-01-02 11:19:59 +01:00
|
|
|
txn=txn,
|
|
|
|
table="users",
|
|
|
|
keyvalues={"name": user_id},
|
|
|
|
retcol="user_type",
|
|
|
|
allow_none=True,
|
|
|
|
)
|
|
|
|
return True if res == UserTypes.SUPPORT else False
|
|
|
|
|
2019-02-18 13:12:57 +01:00
|
|
|
def get_users_by_id_case_insensitive(self, user_id):
|
|
|
|
"""Gets users that match user_id case insensitively.
|
|
|
|
Returns a mapping of user_id -> password_hash.
|
|
|
|
"""
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2019-02-18 13:12:57 +01:00
|
|
|
def f(txn):
|
2019-11-21 13:00:14 +01:00
|
|
|
sql = "SELECT name, password_hash FROM users WHERE lower(name) = lower(?)"
|
2019-02-18 13:12:57 +01:00
|
|
|
txn.execute(sql, (user_id,))
|
|
|
|
return dict(txn)
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.runInteraction("get_users_by_id_case_insensitive", f)
|
2019-02-18 13:12:57 +01:00
|
|
|
|
2019-09-13 16:20:49 +02:00
|
|
|
async def get_user_by_external_id(
|
|
|
|
self, auth_provider: str, external_id: str
|
|
|
|
) -> str:
|
|
|
|
"""Look up a user by their external auth id
|
|
|
|
|
|
|
|
Args:
|
|
|
|
auth_provider: identifier for the remote auth provider
|
|
|
|
external_id: id on that system
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
str|None: the mxid of the user, or None if they are not known
|
|
|
|
"""
|
2019-12-04 14:52:46 +01:00
|
|
|
return await self.db.simple_select_one_onecol(
|
2019-09-13 16:20:49 +02:00
|
|
|
table="user_external_ids",
|
|
|
|
keyvalues={"auth_provider": auth_provider, "external_id": external_id},
|
|
|
|
retcol="user_id",
|
|
|
|
allow_none=True,
|
|
|
|
desc="get_user_by_external_id",
|
|
|
|
)
|
|
|
|
|
2019-02-18 13:12:57 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def count_all_users(self):
|
|
|
|
"""Counts all users registered on the homeserver."""
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2019-02-18 13:12:57 +01:00
|
|
|
def _count_users(txn):
|
|
|
|
txn.execute("SELECT COUNT(*) AS users FROM users")
|
2019-12-04 14:52:46 +01:00
|
|
|
rows = self.db.cursor_to_dict(txn)
|
2019-02-18 13:12:57 +01:00
|
|
|
if rows:
|
|
|
|
return rows[0]["users"]
|
|
|
|
return 0
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
ret = yield self.db.runInteraction("count_users", _count_users)
|
2019-07-23 15:00:55 +02:00
|
|
|
return ret
|
2019-02-18 13:12:57 +01:00
|
|
|
|
|
|
|
def count_daily_user_type(self):
|
|
|
|
"""
|
|
|
|
Counts 1) native non guest users
|
|
|
|
2) native guests users
|
|
|
|
3) bridged users
|
|
|
|
who registered on the homeserver in the past 24 hours
|
|
|
|
"""
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2019-02-18 13:12:57 +01:00
|
|
|
def _count_daily_user_type(txn):
|
|
|
|
yesterday = int(self._clock.time()) - (60 * 60 * 24)
|
|
|
|
|
|
|
|
sql = """
|
|
|
|
SELECT user_type, COALESCE(count(*), 0) AS count FROM (
|
|
|
|
SELECT
|
|
|
|
CASE
|
|
|
|
WHEN is_guest=0 AND appservice_id IS NULL THEN 'native'
|
|
|
|
WHEN is_guest=1 AND appservice_id IS NULL THEN 'guest'
|
|
|
|
WHEN is_guest=0 AND appservice_id IS NOT NULL THEN 'bridged'
|
|
|
|
END AS user_type
|
|
|
|
FROM users
|
|
|
|
WHERE creation_ts > ?
|
|
|
|
) AS t GROUP BY user_type
|
|
|
|
"""
|
2019-06-20 11:32:02 +02:00
|
|
|
results = {"native": 0, "guest": 0, "bridged": 0}
|
2019-02-18 13:12:57 +01:00
|
|
|
txn.execute(sql, (yesterday,))
|
|
|
|
for row in txn:
|
|
|
|
results[row[0]] = row[1]
|
|
|
|
return results
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.runInteraction("count_daily_user_type", _count_daily_user_type)
|
2019-02-18 13:12:57 +01:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def count_nonbridged_users(self):
|
|
|
|
def _count_users(txn):
|
2019-04-03 11:07:29 +02:00
|
|
|
txn.execute(
|
|
|
|
"""
|
2019-02-18 13:12:57 +01:00
|
|
|
SELECT COALESCE(COUNT(*), 0) FROM users
|
|
|
|
WHERE appservice_id IS NULL
|
2019-04-03 11:07:29 +02:00
|
|
|
"""
|
|
|
|
)
|
2019-10-31 16:43:24 +01:00
|
|
|
(count,) = txn.fetchone()
|
2019-02-18 13:12:57 +01:00
|
|
|
return count
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
ret = yield self.db.runInteraction("count_users", _count_users)
|
2019-07-23 15:00:55 +02:00
|
|
|
return ret
|
2019-02-18 13:12:57 +01:00
|
|
|
|
2019-09-09 13:43:51 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def count_real_users(self):
|
|
|
|
"""Counts all users without a special user_type registered on the homeserver."""
|
|
|
|
|
|
|
|
def _count_users(txn):
|
2019-09-09 16:10:02 +02:00
|
|
|
txn.execute("SELECT COUNT(*) AS users FROM users where user_type is null")
|
2019-12-04 14:52:46 +01:00
|
|
|
rows = self.db.cursor_to_dict(txn)
|
2019-09-09 13:43:51 +02:00
|
|
|
if rows:
|
|
|
|
return rows[0]["users"]
|
|
|
|
return 0
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
ret = yield self.db.runInteraction("count_real_users", _count_users)
|
2019-09-09 13:43:51 +02:00
|
|
|
return ret
|
|
|
|
|
2019-02-18 13:12:57 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def find_next_generated_user_id_localpart(self):
|
|
|
|
"""
|
|
|
|
Gets the localpart of the next generated user ID.
|
|
|
|
|
2019-11-26 16:50:17 +01:00
|
|
|
Generated user IDs are integers, so we find the largest integer user ID
|
|
|
|
already taken and return that plus one.
|
2019-02-18 13:12:57 +01:00
|
|
|
"""
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2019-02-18 13:12:57 +01:00
|
|
|
def _find_next_generated_user_id(txn):
|
2019-11-06 12:55:00 +01:00
|
|
|
# We bound between '@0' and '@a' to avoid pulling the entire table
|
2019-10-02 12:21:52 +02:00
|
|
|
# out.
|
2019-11-06 12:55:00 +01:00
|
|
|
txn.execute("SELECT name FROM users WHERE '@0' <= name AND name < '@a'")
|
2019-02-18 13:12:57 +01:00
|
|
|
|
|
|
|
regex = re.compile(r"^@(\d+):")
|
|
|
|
|
2019-11-26 17:06:41 +01:00
|
|
|
max_found = 0
|
2019-02-18 13:12:57 +01:00
|
|
|
|
2019-04-03 11:07:29 +02:00
|
|
|
for (user_id,) in txn:
|
2019-02-18 13:12:57 +01:00
|
|
|
match = regex.search(user_id)
|
|
|
|
if match:
|
2019-11-26 17:06:41 +01:00
|
|
|
max_found = max(int(match.group(1)), max_found)
|
2019-11-26 16:50:17 +01:00
|
|
|
|
2019-11-26 17:06:41 +01:00
|
|
|
return max_found + 1
|
2019-02-18 13:12:57 +01:00
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return (
|
2019-04-03 11:07:29 +02:00
|
|
|
(
|
2019-12-04 14:52:46 +01:00
|
|
|
yield self.db.runInteraction(
|
2019-04-03 11:07:29 +02:00
|
|
|
"find_next_generated_user_id", _find_next_generated_user_id
|
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
2019-02-18 13:12:57 +01:00
|
|
|
|
2020-06-03 18:15:57 +02:00
|
|
|
async def get_user_id_by_threepid(self, medium: str, address: str) -> Optional[str]:
|
2019-02-18 18:19:01 +01:00
|
|
|
"""Returns user id from threepid
|
|
|
|
|
|
|
|
Args:
|
2020-06-03 18:15:57 +02:00
|
|
|
medium: threepid medium e.g. email
|
|
|
|
address: threepid address e.g. me@example.com
|
2019-02-18 18:19:01 +01:00
|
|
|
|
|
|
|
Returns:
|
2020-06-03 18:15:57 +02:00
|
|
|
The user ID or None if no user id/threepid mapping exists
|
2019-02-18 18:19:01 +01:00
|
|
|
"""
|
2020-06-03 18:15:57 +02:00
|
|
|
user_id = await self.db.runInteraction(
|
2019-04-03 11:07:29 +02:00
|
|
|
"get_user_id_by_threepid", self.get_user_id_by_threepid_txn, medium, address
|
2019-02-18 18:19:01 +01:00
|
|
|
)
|
2019-07-23 15:00:55 +02:00
|
|
|
return user_id
|
2019-02-18 18:19:01 +01:00
|
|
|
|
|
|
|
def get_user_id_by_threepid_txn(self, txn, medium, address):
|
|
|
|
"""Returns user id from threepid
|
|
|
|
|
|
|
|
Args:
|
|
|
|
txn (cursor):
|
|
|
|
medium (str): threepid medium e.g. email
|
|
|
|
address (str): threepid address e.g. me@example.com
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
str|None: user id or None if no user id/threepid mapping exists
|
|
|
|
"""
|
2019-12-04 14:52:46 +01:00
|
|
|
ret = self.db.simple_select_one_txn(
|
2019-02-18 18:19:01 +01:00
|
|
|
txn,
|
|
|
|
"user_threepids",
|
2019-04-03 11:07:29 +02:00
|
|
|
{"medium": medium, "address": address},
|
2019-06-20 11:32:02 +02:00
|
|
|
["user_id"],
|
2019-04-03 11:07:29 +02:00
|
|
|
True,
|
2019-02-18 18:19:01 +01:00
|
|
|
)
|
|
|
|
if ret:
|
2019-06-20 11:32:02 +02:00
|
|
|
return ret["user_id"]
|
2019-02-18 18:19:01 +01:00
|
|
|
return None
|
|
|
|
|
2019-02-27 15:26:08 +01:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def user_add_threepid(self, user_id, medium, address, validated_at, added_at):
|
2019-12-04 14:52:46 +01:00
|
|
|
yield self.db.simple_upsert(
|
2019-04-03 11:07:29 +02:00
|
|
|
"user_threepids",
|
|
|
|
{"medium": medium, "address": address},
|
|
|
|
{"user_id": user_id, "validated_at": validated_at, "added_at": added_at},
|
|
|
|
)
|
2019-02-27 15:26:08 +01:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def user_get_threepids(self, user_id):
|
2019-12-04 14:52:46 +01:00
|
|
|
ret = yield self.db.simple_select_list(
|
2019-04-03 11:07:29 +02:00
|
|
|
"user_threepids",
|
|
|
|
{"user_id": user_id},
|
2019-06-20 11:32:02 +02:00
|
|
|
["medium", "address", "validated_at", "added_at"],
|
|
|
|
"user_get_threepids",
|
2019-02-27 15:26:08 +01:00
|
|
|
)
|
2019-07-23 15:00:55 +02:00
|
|
|
return ret
|
2019-02-27 15:26:08 +01:00
|
|
|
|
|
|
|
def user_delete_threepid(self, user_id, medium, address):
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.simple_delete(
|
2019-02-27 15:26:08 +01:00
|
|
|
"user_threepids",
|
2019-04-03 11:07:29 +02:00
|
|
|
keyvalues={"user_id": user_id, "medium": medium, "address": address},
|
2019-11-28 11:40:42 +01:00
|
|
|
desc="user_delete_threepid",
|
|
|
|
)
|
|
|
|
|
|
|
|
def user_delete_threepids(self, user_id: str):
|
|
|
|
"""Delete all threepid this user has bound
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id: The user id to delete all threepids of
|
|
|
|
|
|
|
|
"""
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.simple_delete(
|
2019-11-28 11:40:42 +01:00
|
|
|
"user_threepids",
|
|
|
|
keyvalues={"user_id": user_id},
|
2019-02-27 15:26:08 +01:00
|
|
|
desc="user_delete_threepids",
|
|
|
|
)
|
|
|
|
|
2019-04-01 11:16:13 +02:00
|
|
|
def add_user_bound_threepid(self, user_id, medium, address, id_server):
|
|
|
|
"""The server proxied a bind request to the given identity server on
|
|
|
|
behalf of the given user. We need to remember this in case the user
|
|
|
|
asks us to unbind the threepid.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str)
|
|
|
|
medium (str)
|
|
|
|
address (str)
|
|
|
|
id_server (str)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred
|
|
|
|
"""
|
|
|
|
# We need to use an upsert, in case they user had already bound the
|
|
|
|
# threepid
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.simple_upsert(
|
2019-04-01 11:16:13 +02:00
|
|
|
table="user_threepid_id_server",
|
2019-02-27 15:26:08 +01:00
|
|
|
keyvalues={
|
|
|
|
"user_id": user_id,
|
|
|
|
"medium": medium,
|
|
|
|
"address": address,
|
2019-04-01 11:16:13 +02:00
|
|
|
"id_server": id_server,
|
2019-02-27 15:26:08 +01:00
|
|
|
},
|
2019-04-01 11:16:13 +02:00
|
|
|
values={},
|
|
|
|
insertion_values={},
|
|
|
|
desc="add_user_bound_threepid",
|
2019-02-27 15:26:08 +01:00
|
|
|
)
|
|
|
|
|
2019-09-23 17:50:27 +02:00
|
|
|
def user_get_bound_threepids(self, user_id):
|
|
|
|
"""Get the threepids that a user has bound to an identity server through the homeserver
|
|
|
|
The homeserver remembers where binds to an identity server occurred. Using this
|
|
|
|
method can retrieve those threepids.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): The ID of the user to retrieve threepids for
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred[list[dict]]: List of dictionaries containing the following:
|
|
|
|
medium (str): The medium of the threepid (e.g "email")
|
|
|
|
address (str): The address of the threepid (e.g "bob@example.com")
|
|
|
|
"""
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.simple_select_list(
|
2019-09-23 17:50:27 +02:00
|
|
|
table="user_threepid_id_server",
|
|
|
|
keyvalues={"user_id": user_id},
|
|
|
|
retcols=["medium", "address"],
|
|
|
|
desc="user_get_bound_threepids",
|
|
|
|
)
|
|
|
|
|
2019-04-01 11:16:13 +02:00
|
|
|
def remove_user_bound_threepid(self, user_id, medium, address, id_server):
|
2019-04-02 12:15:19 +02:00
|
|
|
"""The server proxied an unbind request to the given identity server on
|
|
|
|
behalf of the given user, so we remove the mapping of threepid to
|
|
|
|
identity server.
|
2019-04-01 11:16:13 +02:00
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str)
|
|
|
|
medium (str)
|
|
|
|
address (str)
|
|
|
|
id_server (str)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred
|
|
|
|
"""
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.simple_delete(
|
2019-04-01 11:16:13 +02:00
|
|
|
table="user_threepid_id_server",
|
|
|
|
keyvalues={
|
|
|
|
"user_id": user_id,
|
|
|
|
"medium": medium,
|
|
|
|
"address": address,
|
|
|
|
"id_server": id_server,
|
|
|
|
},
|
|
|
|
desc="remove_user_bound_threepid",
|
|
|
|
)
|
2018-03-01 19:19:34 +01:00
|
|
|
|
2019-04-01 11:16:13 +02:00
|
|
|
def get_id_servers_user_bound(self, user_id, medium, address):
|
|
|
|
"""Get the list of identity servers that the server proxied bind
|
|
|
|
requests to for given user and threepid
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2019-04-01 11:16:13 +02:00
|
|
|
Args:
|
|
|
|
user_id (str)
|
|
|
|
medium (str)
|
|
|
|
address (str)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred[list[str]]: Resolves to a list of identity servers
|
|
|
|
"""
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.simple_select_onecol(
|
2019-04-01 11:16:13 +02:00
|
|
|
table="user_threepid_id_server",
|
2019-06-20 11:32:02 +02:00
|
|
|
keyvalues={"user_id": user_id, "medium": medium, "address": address},
|
2019-04-01 11:16:13 +02:00
|
|
|
retcol="id_server",
|
|
|
|
desc="get_id_servers_user_bound",
|
|
|
|
)
|
|
|
|
|
2019-10-08 15:36:33 +02:00
|
|
|
@cachedInlineCallbacks()
|
|
|
|
def get_user_deactivated_status(self, user_id):
|
|
|
|
"""Retrieve the value for the `deactivated` property for the provided user.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): The ID of the user to retrieve the status for.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
defer.Deferred(bool): The requested value.
|
|
|
|
"""
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
res = yield self.db.simple_select_one_onecol(
|
2019-10-08 15:36:33 +02:00
|
|
|
table="users",
|
|
|
|
keyvalues={"name": user_id},
|
|
|
|
retcol="deactivated",
|
|
|
|
desc="get_user_deactivated_status",
|
|
|
|
)
|
|
|
|
|
|
|
|
# Convert the integer into a boolean.
|
|
|
|
return res == 1
|
|
|
|
|
2019-09-06 14:10:11 +02:00
|
|
|
def get_threepid_validation_session(
|
|
|
|
self, medium, client_secret, address=None, sid=None, validated=True
|
|
|
|
):
|
|
|
|
"""Gets a session_id and last_send_attempt (if available) for a
|
2019-09-23 17:50:27 +02:00
|
|
|
combination of validation metadata
|
2019-09-06 14:10:11 +02:00
|
|
|
|
|
|
|
Args:
|
|
|
|
medium (str|None): The medium of the 3PID
|
|
|
|
address (str|None): The address of the 3PID
|
|
|
|
sid (str|None): The ID of the validation session
|
2019-09-20 16:21:30 +02:00
|
|
|
client_secret (str): A unique string provided by the client to help identify this
|
|
|
|
validation attempt
|
2019-09-06 14:10:11 +02:00
|
|
|
validated (bool|None): Whether sessions should be filtered by
|
|
|
|
whether they have been validated already or not. None to
|
|
|
|
perform no filtering
|
|
|
|
|
|
|
|
Returns:
|
2019-09-20 16:21:30 +02:00
|
|
|
Deferred[dict|None]: A dict containing the following:
|
|
|
|
* address - address of the 3pid
|
|
|
|
* medium - medium of the 3pid
|
|
|
|
* client_secret - a secret provided by the client for this validation session
|
|
|
|
* session_id - ID of the validation session
|
|
|
|
* send_attempt - a number serving to dedupe send attempts for this session
|
|
|
|
* validated_at - timestamp of when this session was validated if so
|
|
|
|
|
|
|
|
Otherwise None if a validation session is not found
|
2019-09-06 14:10:11 +02:00
|
|
|
"""
|
2019-09-20 16:21:30 +02:00
|
|
|
if not client_secret:
|
|
|
|
raise SynapseError(
|
|
|
|
400, "Missing parameter: client_secret", errcode=Codes.MISSING_PARAM
|
|
|
|
)
|
|
|
|
|
|
|
|
keyvalues = {"client_secret": client_secret}
|
|
|
|
if medium:
|
|
|
|
keyvalues["medium"] = medium
|
2019-09-06 14:10:11 +02:00
|
|
|
if address:
|
|
|
|
keyvalues["address"] = address
|
|
|
|
if sid:
|
|
|
|
keyvalues["session_id"] = sid
|
|
|
|
|
|
|
|
assert address or sid
|
|
|
|
|
|
|
|
def get_threepid_validation_session_txn(txn):
|
|
|
|
sql = """
|
|
|
|
SELECT address, session_id, medium, client_secret,
|
|
|
|
last_send_attempt, validated_at
|
|
|
|
FROM threepid_validation_session WHERE %s
|
|
|
|
""" % (
|
2020-06-15 13:03:36 +02:00
|
|
|
" AND ".join("%s = ?" % k for k in keyvalues.keys()),
|
2019-09-06 14:10:11 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
if validated is not None:
|
|
|
|
sql += " AND validated_at IS " + ("NOT NULL" if validated else "NULL")
|
|
|
|
|
|
|
|
sql += " LIMIT 1"
|
|
|
|
|
|
|
|
txn.execute(sql, list(keyvalues.values()))
|
2019-12-04 14:52:46 +01:00
|
|
|
rows = self.db.cursor_to_dict(txn)
|
2019-09-06 14:10:11 +02:00
|
|
|
if not rows:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return rows[0]
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.runInteraction(
|
2019-09-06 14:10:11 +02:00
|
|
|
"get_threepid_validation_session", get_threepid_validation_session_txn
|
|
|
|
)
|
|
|
|
|
2019-09-06 14:23:10 +02:00
|
|
|
def delete_threepid_session(self, session_id):
|
|
|
|
"""Removes a threepid validation session from the database. This can
|
|
|
|
be done after validation has been performed and whatever action was
|
|
|
|
waiting on it has been carried out
|
|
|
|
|
|
|
|
Args:
|
|
|
|
session_id (str): The ID of the session to delete
|
|
|
|
"""
|
|
|
|
|
|
|
|
def delete_threepid_session_txn(txn):
|
2019-12-04 14:52:46 +01:00
|
|
|
self.db.simple_delete_txn(
|
2019-09-06 14:23:10 +02:00
|
|
|
txn,
|
|
|
|
table="threepid_validation_token",
|
|
|
|
keyvalues={"session_id": session_id},
|
|
|
|
)
|
2019-12-04 14:52:46 +01:00
|
|
|
self.db.simple_delete_txn(
|
2019-09-06 14:23:10 +02:00
|
|
|
txn,
|
|
|
|
table="threepid_validation_session",
|
|
|
|
keyvalues={"session_id": session_id},
|
|
|
|
)
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.runInteraction(
|
2019-09-06 14:23:10 +02:00
|
|
|
"delete_threepid_session", delete_threepid_session_txn
|
|
|
|
)
|
|
|
|
|
2018-03-01 19:19:34 +01:00
|
|
|
|
2019-12-04 16:09:36 +01:00
|
|
|
class RegistrationBackgroundUpdateStore(RegistrationWorkerStore):
|
2019-12-06 14:08:40 +01:00
|
|
|
def __init__(self, database: Database, db_conn, hs):
|
|
|
|
super(RegistrationBackgroundUpdateStore, self).__init__(database, db_conn, hs)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
|
|
|
self.clock = hs.get_clock()
|
2019-10-03 18:44:26 +02:00
|
|
|
self.config = hs.config
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2019-12-04 16:09:36 +01:00
|
|
|
self.db.updates.register_background_index_update(
|
2016-07-22 15:52:53 +02:00
|
|
|
"access_tokens_device_index",
|
|
|
|
index_name="access_tokens_device_id",
|
|
|
|
table="access_tokens",
|
|
|
|
columns=["user_id", "device_id"],
|
|
|
|
)
|
|
|
|
|
2019-12-04 16:09:36 +01:00
|
|
|
self.db.updates.register_background_index_update(
|
2018-05-29 18:47:55 +02:00
|
|
|
"users_creation_ts",
|
|
|
|
index_name="users_creation_ts",
|
|
|
|
table="users",
|
|
|
|
columns=["creation_ts"],
|
|
|
|
)
|
|
|
|
|
2017-10-31 21:35:58 +01:00
|
|
|
# we no longer use refresh tokens, but it's possible that some people
|
|
|
|
# might have a background update queued to build this index. Just
|
|
|
|
# clear the background update.
|
2019-12-04 16:09:36 +01:00
|
|
|
self.db.updates.register_noop_background_update("refresh_tokens_device_index")
|
2016-07-22 15:52:53 +02:00
|
|
|
|
2019-12-04 16:09:36 +01:00
|
|
|
self.db.updates.register_background_update_handler(
|
2019-06-20 11:32:02 +02:00
|
|
|
"user_threepids_grandfather", self._bg_user_threepids_grandfather
|
2019-04-01 14:23:18 +02:00
|
|
|
)
|
|
|
|
|
2019-12-04 16:09:36 +01:00
|
|
|
self.db.updates.register_background_update_handler(
|
2019-07-12 16:29:32 +02:00
|
|
|
"users_set_deactivated_flag", self._background_update_set_deactivated_flag
|
2019-06-14 14:18:24 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2019-07-12 16:29:32 +02:00
|
|
|
def _background_update_set_deactivated_flag(self, progress, batch_size):
|
2019-06-14 14:18:24 +02:00
|
|
|
"""Retrieves a list of all deactivated users and sets the 'deactivated' flag to 1
|
|
|
|
for each of them.
|
|
|
|
"""
|
|
|
|
|
|
|
|
last_user = progress.get("user_id", "")
|
|
|
|
|
2019-07-12 16:29:32 +02:00
|
|
|
def _background_update_set_deactivated_flag_txn(txn):
|
2019-06-14 14:18:24 +02:00
|
|
|
txn.execute(
|
|
|
|
"""
|
|
|
|
SELECT
|
|
|
|
users.name,
|
|
|
|
COUNT(access_tokens.token) AS count_tokens,
|
|
|
|
COUNT(user_threepids.address) AS count_threepids
|
|
|
|
FROM users
|
|
|
|
LEFT JOIN access_tokens ON (access_tokens.user_id = users.name)
|
|
|
|
LEFT JOIN user_threepids ON (user_threepids.user_id = users.name)
|
2019-06-14 17:00:45 +02:00
|
|
|
WHERE (users.password_hash IS NULL OR users.password_hash = '')
|
|
|
|
AND (users.appservice_id IS NULL OR users.appservice_id = '')
|
|
|
|
AND users.is_guest = 0
|
2019-06-14 14:18:24 +02:00
|
|
|
AND users.name > ?
|
|
|
|
GROUP BY users.name
|
|
|
|
ORDER BY users.name ASC
|
|
|
|
LIMIT ?;
|
|
|
|
""",
|
|
|
|
(last_user, batch_size),
|
|
|
|
)
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
rows = self.db.cursor_to_dict(txn)
|
2019-06-14 14:18:24 +02:00
|
|
|
|
|
|
|
if not rows:
|
2019-09-24 15:43:38 +02:00
|
|
|
return True, 0
|
2019-06-14 14:18:24 +02:00
|
|
|
|
|
|
|
rows_processed_nb = 0
|
|
|
|
|
|
|
|
for user in rows:
|
|
|
|
if not user["count_tokens"] and not user["count_threepids"]:
|
2019-06-19 12:04:15 +02:00
|
|
|
self.set_user_deactivated_status_txn(txn, user["name"], True)
|
2019-06-14 14:18:24 +02:00
|
|
|
rows_processed_nb += 1
|
|
|
|
|
|
|
|
logger.info("Marked %d rows as deactivated", rows_processed_nb)
|
|
|
|
|
2019-12-04 16:09:36 +01:00
|
|
|
self.db.updates._background_update_progress_txn(
|
2019-06-14 17:00:45 +02:00
|
|
|
txn, "users_set_deactivated_flag", {"user_id": rows[-1]["name"]}
|
2019-06-14 14:18:24 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
if batch_size > len(rows):
|
2019-09-24 15:43:38 +02:00
|
|
|
return True, len(rows)
|
2019-06-14 14:18:24 +02:00
|
|
|
else:
|
2019-09-24 15:43:38 +02:00
|
|
|
return False, len(rows)
|
2019-06-14 14:18:24 +02:00
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
end, nb_processed = yield self.db.runInteraction(
|
2019-07-12 16:29:32 +02:00
|
|
|
"users_set_deactivated_flag", _background_update_set_deactivated_flag_txn
|
2019-06-14 14:18:24 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
if end:
|
2019-12-04 16:09:36 +01:00
|
|
|
yield self.db.updates._end_background_update("users_set_deactivated_flag")
|
2019-06-14 14:18:24 +02:00
|
|
|
|
2019-09-23 18:22:01 +02:00
|
|
|
return nb_processed
|
2019-06-14 14:18:24 +02:00
|
|
|
|
2019-10-03 18:44:26 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _bg_user_threepids_grandfather(self, progress, batch_size):
|
|
|
|
"""We now track which identity servers a user binds their 3PID to, so
|
|
|
|
we need to handle the case of existing bindings where we didn't track
|
|
|
|
this.
|
|
|
|
|
|
|
|
We do this by grandfathering in existing user threepids assuming that
|
|
|
|
they used one of the server configured trusted identity servers.
|
|
|
|
"""
|
|
|
|
id_servers = set(self.config.trusted_third_party_id_servers)
|
|
|
|
|
|
|
|
def _bg_user_threepids_grandfather_txn(txn):
|
|
|
|
sql = """
|
|
|
|
INSERT INTO user_threepid_id_server
|
|
|
|
(user_id, medium, address, id_server)
|
|
|
|
SELECT user_id, medium, address, ?
|
|
|
|
FROM user_threepids
|
|
|
|
"""
|
|
|
|
|
|
|
|
txn.executemany(sql, [(id_server,) for id_server in id_servers])
|
|
|
|
|
|
|
|
if id_servers:
|
2019-12-04 14:52:46 +01:00
|
|
|
yield self.db.runInteraction(
|
2019-10-03 18:44:26 +02:00
|
|
|
"_bg_user_threepids_grandfather", _bg_user_threepids_grandfather_txn
|
|
|
|
)
|
|
|
|
|
2019-12-04 16:09:36 +01:00
|
|
|
yield self.db.updates._end_background_update("user_threepids_grandfather")
|
2019-10-03 18:44:26 +02:00
|
|
|
|
|
|
|
return 1
|
|
|
|
|
|
|
|
|
2019-10-08 15:48:33 +02:00
|
|
|
class RegistrationStore(RegistrationBackgroundUpdateStore):
|
2019-12-06 14:08:40 +01:00
|
|
|
def __init__(self, database: Database, db_conn, hs):
|
|
|
|
super(RegistrationStore, self).__init__(database, db_conn, hs)
|
2019-10-03 18:44:26 +02:00
|
|
|
|
|
|
|
self._account_validity = hs.config.account_validity
|
|
|
|
|
2019-12-03 15:00:09 +01:00
|
|
|
if self._account_validity.enabled:
|
|
|
|
self._clock.call_later(
|
|
|
|
0.0,
|
|
|
|
run_as_background_process,
|
|
|
|
"account_validity_set_expiration_dates",
|
|
|
|
self._set_expiration_date_when_missing,
|
|
|
|
)
|
|
|
|
|
2019-10-03 18:44:26 +02:00
|
|
|
# Create a background job for culling expired 3PID validity tokens
|
|
|
|
def start_cull():
|
|
|
|
# run as a background process to make sure that the database transactions
|
|
|
|
# have a logcontext to report to
|
|
|
|
return run_as_background_process(
|
|
|
|
"cull_expired_threepid_validation_tokens",
|
|
|
|
self.cull_expired_threepid_validation_tokens,
|
|
|
|
)
|
|
|
|
|
|
|
|
hs.get_clock().looping_call(start_cull, THIRTY_MINUTES_IN_MS)
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
@defer.inlineCallbacks
|
2019-07-12 18:26:02 +02:00
|
|
|
def add_access_token_to_user(self, user_id, token, device_id, valid_until_ms):
|
2014-08-12 16:10:52 +02:00
|
|
|
"""Adds an access token for the given user.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): The user ID.
|
|
|
|
token (str): The new access token to add.
|
2016-07-15 14:19:07 +02:00
|
|
|
device_id (str): ID of the device to associate with the access
|
2019-07-12 18:26:02 +02:00
|
|
|
token
|
|
|
|
valid_until_ms (int|None): when the token is valid until. None for
|
|
|
|
no expiry.
|
2014-08-12 16:10:52 +02:00
|
|
|
Raises:
|
|
|
|
StoreError if there was a problem adding this.
|
|
|
|
"""
|
2016-03-01 15:32:56 +01:00
|
|
|
next_id = self._access_tokens_id_gen.get_next()
|
2015-04-07 13:05:36 +02:00
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
yield self.db.simple_insert(
|
2014-08-12 16:10:52 +02:00
|
|
|
"access_tokens",
|
2019-07-12 18:26:02 +02:00
|
|
|
{
|
|
|
|
"id": next_id,
|
|
|
|
"user_id": user_id,
|
|
|
|
"token": token,
|
|
|
|
"device_id": device_id,
|
|
|
|
"valid_until_ms": valid_until_ms,
|
|
|
|
},
|
2015-03-20 16:59:18 +01:00
|
|
|
desc="add_access_token_to_user",
|
2014-08-12 16:10:52 +02:00
|
|
|
)
|
|
|
|
|
2019-07-10 17:26:49 +02:00
|
|
|
def register_user(
|
2019-04-03 11:07:29 +02:00
|
|
|
self,
|
|
|
|
user_id,
|
|
|
|
password_hash=None,
|
|
|
|
was_guest=False,
|
|
|
|
make_guest=False,
|
|
|
|
appservice_id=None,
|
|
|
|
create_profile_with_displayname=None,
|
|
|
|
admin=False,
|
|
|
|
user_type=None,
|
|
|
|
):
|
2014-08-12 16:10:52 +02:00
|
|
|
"""Attempts to register an account.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): The desired user ID to register.
|
2020-06-03 18:15:57 +02:00
|
|
|
password_hash (str|None): Optional. The password hash for this user.
|
2016-01-05 19:01:18 +01:00
|
|
|
was_guest (bool): Optional. Whether this is a guest account being
|
|
|
|
upgraded to a non-guest account.
|
2016-01-06 12:38:09 +01:00
|
|
|
make_guest (boolean): True if the the new user should be guest,
|
|
|
|
false to add a regular user account.
|
2016-03-10 16:58:22 +01:00
|
|
|
appservice_id (str): The ID of the appservice registering the user.
|
2018-12-07 14:44:46 +01:00
|
|
|
create_profile_with_displayname (unicode): Optionally create a profile for
|
|
|
|
the user, setting their displayname to the given value
|
2018-12-14 19:20:59 +01:00
|
|
|
admin (boolean): is an admin user?
|
|
|
|
user_type (str|None): type of user. One of the values from
|
|
|
|
api.constants.UserTypes, or None for a normal user.
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
Raises:
|
|
|
|
StoreError if the user_id could not be registered.
|
2020-06-03 18:15:57 +02:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred
|
2014-08-12 16:10:52 +02:00
|
|
|
"""
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.runInteraction(
|
2019-07-10 17:26:49 +02:00
|
|
|
"register_user",
|
|
|
|
self._register_user,
|
2016-03-10 16:58:22 +01:00
|
|
|
user_id,
|
|
|
|
password_hash,
|
|
|
|
was_guest,
|
|
|
|
make_guest,
|
2016-06-17 20:14:16 +02:00
|
|
|
appservice_id,
|
2018-12-07 14:44:46 +01:00
|
|
|
create_profile_with_displayname,
|
2018-12-14 19:20:59 +01:00
|
|
|
admin,
|
2019-04-03 11:07:29 +02:00
|
|
|
user_type,
|
2014-10-28 12:18:04 +01:00
|
|
|
)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2019-07-10 17:26:49 +02:00
|
|
|
def _register_user(
|
2016-03-10 16:58:22 +01:00
|
|
|
self,
|
|
|
|
txn,
|
|
|
|
user_id,
|
|
|
|
password_hash,
|
|
|
|
was_guest,
|
|
|
|
make_guest,
|
2016-06-17 20:14:16 +02:00
|
|
|
appservice_id,
|
2018-12-07 14:44:46 +01:00
|
|
|
create_profile_with_displayname,
|
2016-07-05 18:30:22 +02:00
|
|
|
admin,
|
2018-12-14 19:20:59 +01:00
|
|
|
user_type,
|
2016-03-10 16:58:22 +01:00
|
|
|
):
|
2018-12-07 14:44:46 +01:00
|
|
|
user_id_obj = UserID.from_string(user_id)
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
now = int(self.clock.time())
|
|
|
|
|
|
|
|
try:
|
2016-01-05 19:01:18 +01:00
|
|
|
if was_guest:
|
2016-07-08 16:15:55 +02:00
|
|
|
# Ensure that the guest user actually exists
|
2016-07-08 16:57:06 +02:00
|
|
|
# ``allow_none=False`` makes this raise an exception
|
|
|
|
# if the row isn't in the database.
|
2019-12-04 14:52:46 +01:00
|
|
|
self.db.simple_select_one_txn(
|
2016-07-08 16:15:55 +02:00
|
|
|
txn,
|
|
|
|
"users",
|
2019-04-03 11:07:29 +02:00
|
|
|
keyvalues={"name": user_id, "is_guest": 1},
|
2016-07-08 16:15:55 +02:00
|
|
|
retcols=("name",),
|
|
|
|
allow_none=False,
|
|
|
|
)
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
self.db.simple_update_one_txn(
|
2016-07-05 18:30:22 +02:00
|
|
|
txn,
|
|
|
|
"users",
|
2019-04-03 11:07:29 +02:00
|
|
|
keyvalues={"name": user_id, "is_guest": 1},
|
2016-07-05 18:30:22 +02:00
|
|
|
updatevalues={
|
|
|
|
"password_hash": password_hash,
|
|
|
|
"upgrade_ts": now,
|
|
|
|
"is_guest": 1 if make_guest else 0,
|
|
|
|
"appservice_id": appservice_id,
|
2016-07-05 18:34:25 +02:00
|
|
|
"admin": 1 if admin else 0,
|
2018-12-14 19:20:59 +01:00
|
|
|
"user_type": user_type,
|
2019-04-03 11:07:29 +02:00
|
|
|
},
|
2016-07-05 18:30:22 +02:00
|
|
|
)
|
2016-01-05 19:01:18 +01:00
|
|
|
else:
|
2019-12-04 14:52:46 +01:00
|
|
|
self.db.simple_insert_txn(
|
2016-07-05 18:30:22 +02:00
|
|
|
txn,
|
|
|
|
"users",
|
|
|
|
values={
|
|
|
|
"name": user_id,
|
|
|
|
"password_hash": password_hash,
|
|
|
|
"creation_ts": now,
|
|
|
|
"is_guest": 1 if make_guest else 0,
|
|
|
|
"appservice_id": appservice_id,
|
2016-07-05 18:34:25 +02:00
|
|
|
"admin": 1 if admin else 0,
|
2018-12-14 19:20:59 +01:00
|
|
|
"user_type": user_type,
|
2019-04-03 11:07:29 +02:00
|
|
|
},
|
2016-07-05 18:30:22 +02:00
|
|
|
)
|
2019-04-08 18:10:55 +02:00
|
|
|
|
2015-04-08 17:53:48 +02:00
|
|
|
except self.database_engine.module.IntegrityError:
|
2019-04-03 11:07:29 +02:00
|
|
|
raise StoreError(400, "User ID already taken.", errcode=Codes.USER_IN_USE)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2019-04-10 18:58:47 +02:00
|
|
|
if self._account_validity.enabled:
|
2019-05-17 20:37:31 +02:00
|
|
|
self.set_expiration_date_for_user_txn(txn, user_id)
|
2019-04-10 18:58:47 +02:00
|
|
|
|
2018-12-07 14:44:46 +01:00
|
|
|
if create_profile_with_displayname:
|
2017-11-01 16:51:25 +01:00
|
|
|
# set a default displayname serverside to avoid ugly race
|
|
|
|
# between auto-joins and clients trying to set displaynames
|
2018-12-07 14:44:46 +01:00
|
|
|
#
|
|
|
|
# *obviously* the 'profiles' table uses localpart for user_id
|
|
|
|
# while everything else uses the full mxid.
|
2016-06-17 20:14:16 +02:00
|
|
|
txn.execute(
|
2017-11-01 16:51:25 +01:00
|
|
|
"INSERT INTO profiles(user_id, displayname) VALUES (?,?)",
|
2019-04-03 11:07:29 +02:00
|
|
|
(user_id_obj.localpart, create_profile_with_displayname),
|
2016-06-17 20:14:16 +02:00
|
|
|
)
|
|
|
|
|
2019-09-04 14:04:27 +02:00
|
|
|
if self.hs.config.stats_enabled:
|
|
|
|
# we create a new completed user statistics row
|
|
|
|
|
|
|
|
# we don't strictly need current_token since this user really can't
|
|
|
|
# have any state deltas before now (as it is a new user), but still,
|
|
|
|
# we include it for completeness.
|
|
|
|
current_token = self._get_max_stream_id_in_current_state_deltas_txn(txn)
|
|
|
|
self._update_stats_delta_txn(
|
|
|
|
txn, now, "user", user_id, {}, complete_with_stream_id=current_token
|
|
|
|
)
|
|
|
|
|
2019-04-03 11:07:29 +02:00
|
|
|
self._invalidate_cache_and_stream(txn, self.get_user_by_id, (user_id,))
|
2016-08-18 15:59:55 +02:00
|
|
|
txn.call_after(self.is_guest.invalidate, (user_id,))
|
|
|
|
|
2019-09-13 16:20:49 +02:00
|
|
|
def record_user_external_id(
|
|
|
|
self, auth_provider: str, external_id: str, user_id: str
|
|
|
|
) -> Deferred:
|
|
|
|
"""Record a mapping from an external user id to a mxid
|
|
|
|
|
|
|
|
Args:
|
|
|
|
auth_provider: identifier for the remote auth provider
|
|
|
|
external_id: id on that system
|
|
|
|
user_id: complete mxid that it is mapped to
|
|
|
|
"""
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.simple_insert(
|
2019-09-13 16:20:49 +02:00
|
|
|
table="user_external_ids",
|
|
|
|
values={
|
|
|
|
"auth_provider": auth_provider,
|
|
|
|
"external_id": external_id,
|
|
|
|
"user_id": user_id,
|
|
|
|
},
|
|
|
|
desc="record_user_external_id",
|
|
|
|
)
|
|
|
|
|
2015-03-23 15:20:28 +01:00
|
|
|
def user_set_password_hash(self, user_id, password_hash):
|
|
|
|
"""
|
|
|
|
NB. This does *not* evict any cache because the one use for this
|
|
|
|
removes most of the entries subsequently anyway so it would be
|
|
|
|
pointless. Use flush_user separately.
|
|
|
|
"""
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2016-08-18 15:59:55 +02:00
|
|
|
def user_set_password_hash_txn(txn):
|
2019-12-04 14:52:46 +01:00
|
|
|
self.db.simple_update_one_txn(
|
2019-06-20 11:32:02 +02:00
|
|
|
txn, "users", {"name": user_id}, {"password_hash": password_hash}
|
2016-08-18 15:59:55 +02:00
|
|
|
)
|
2019-04-03 11:07:29 +02:00
|
|
|
self._invalidate_cache_and_stream(txn, self.get_user_by_id, (user_id,))
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.runInteraction(
|
|
|
|
"user_set_password_hash", user_set_password_hash_txn
|
|
|
|
)
|
2015-03-23 15:20:28 +01:00
|
|
|
|
2018-05-11 01:17:11 +02:00
|
|
|
def user_set_consent_version(self, user_id, consent_version):
|
|
|
|
"""Updates the user table to record privacy policy consent
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): full mxid of the user to update
|
|
|
|
consent_version (str): version of the policy the user has consented
|
|
|
|
to
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
StoreError(404) if user not found
|
|
|
|
"""
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2018-05-17 18:35:31 +02:00
|
|
|
def f(txn):
|
2019-12-04 14:52:46 +01:00
|
|
|
self.db.simple_update_one_txn(
|
2018-05-17 18:35:31 +02:00
|
|
|
txn,
|
2019-06-20 11:32:02 +02:00
|
|
|
table="users",
|
|
|
|
keyvalues={"name": user_id},
|
|
|
|
updatevalues={"consent_version": consent_version},
|
2018-05-17 18:35:31 +02:00
|
|
|
)
|
2019-04-03 11:07:29 +02:00
|
|
|
self._invalidate_cache_and_stream(txn, self.get_user_by_id, (user_id,))
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.runInteraction("user_set_consent_version", f)
|
2018-05-17 18:35:31 +02:00
|
|
|
|
|
|
|
def user_set_consent_server_notice_sent(self, user_id, consent_version):
|
|
|
|
"""Updates the user table to record that we have sent the user a server
|
|
|
|
notice about privacy policy consent
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): full mxid of the user to update
|
|
|
|
consent_version (str): version of the policy we have notified the
|
|
|
|
user about
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
StoreError(404) if user not found
|
|
|
|
"""
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2018-05-17 18:35:31 +02:00
|
|
|
def f(txn):
|
2019-12-04 14:52:46 +01:00
|
|
|
self.db.simple_update_one_txn(
|
2018-05-17 18:35:31 +02:00
|
|
|
txn,
|
2019-06-20 11:32:02 +02:00
|
|
|
table="users",
|
|
|
|
keyvalues={"name": user_id},
|
|
|
|
updatevalues={"consent_server_notice_sent": consent_version},
|
2018-05-17 18:35:31 +02:00
|
|
|
)
|
2019-04-03 11:07:29 +02:00
|
|
|
self._invalidate_cache_and_stream(txn, self.get_user_by_id, (user_id,))
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.runInteraction("user_set_consent_server_notice_sent", f)
|
2018-05-11 01:17:11 +02:00
|
|
|
|
2019-04-03 11:07:29 +02:00
|
|
|
def user_delete_access_tokens(self, user_id, except_token_id=None, device_id=None):
|
2016-07-26 12:09:47 +02:00
|
|
|
"""
|
2017-10-31 21:35:58 +01:00
|
|
|
Invalidate access tokens belonging to a user
|
2016-07-26 12:09:47 +02:00
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): ID of user the tokens belong to
|
2016-08-15 18:04:39 +02:00
|
|
|
except_token_id (str): list of access_tokens IDs which should
|
2016-07-26 12:09:47 +02:00
|
|
|
*not* be deleted
|
|
|
|
device_id (str|None): ID of device the tokens are associated with.
|
|
|
|
If None, tokens associated with any device (or no device) will
|
|
|
|
be deleted
|
|
|
|
Returns:
|
2017-11-29 15:33:05 +01:00
|
|
|
defer.Deferred[list[str, int, str|None, int]]: a list of
|
|
|
|
(token, token id, device id) for each of the deleted tokens
|
2016-07-26 12:09:47 +02:00
|
|
|
"""
|
2019-04-03 11:07:29 +02:00
|
|
|
|
2016-08-15 18:04:39 +02:00
|
|
|
def f(txn):
|
2019-04-03 11:07:29 +02:00
|
|
|
keyvalues = {"user_id": user_id}
|
2016-07-22 15:52:53 +02:00
|
|
|
if device_id is not None:
|
2016-08-15 18:04:39 +02:00
|
|
|
keyvalues["device_id"] = device_id
|
2016-07-22 15:52:53 +02:00
|
|
|
|
2016-08-15 18:04:39 +02:00
|
|
|
items = keyvalues.items()
|
|
|
|
where_clause = " AND ".join(k + " = ?" for k, _ in items)
|
|
|
|
values = [v for _, v in items]
|
|
|
|
if except_token_id:
|
|
|
|
where_clause += " AND id != ?"
|
|
|
|
values.append(except_token_id)
|
2016-03-11 17:27:50 +01:00
|
|
|
|
2016-08-15 18:04:39 +02:00
|
|
|
txn.execute(
|
2019-04-03 11:07:29 +02:00
|
|
|
"SELECT token, id, device_id FROM access_tokens WHERE %s"
|
|
|
|
% where_clause,
|
|
|
|
values,
|
2016-08-15 18:04:39 +02:00
|
|
|
)
|
2017-11-29 15:33:05 +01:00
|
|
|
tokens_and_devices = [(r[0], r[1], r[2]) for r in txn]
|
2016-03-11 17:27:50 +01:00
|
|
|
|
2017-11-29 15:33:05 +01:00
|
|
|
for token, _, _ in tokens_and_devices:
|
2016-08-15 18:04:39 +02:00
|
|
|
self._invalidate_cache_and_stream(
|
2017-11-01 16:42:38 +01:00
|
|
|
txn, self.get_user_by_access_token, (token,)
|
2016-03-11 17:27:50 +01:00
|
|
|
)
|
|
|
|
|
2019-04-03 11:07:29 +02:00
|
|
|
txn.execute("DELETE FROM access_tokens WHERE %s" % where_clause, values)
|
2016-07-26 12:09:47 +02:00
|
|
|
|
2017-11-01 16:42:38 +01:00
|
|
|
return tokens_and_devices
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.runInteraction("user_delete_access_tokens", f)
|
2016-03-11 17:27:50 +01:00
|
|
|
|
|
|
|
def delete_access_token(self, access_token):
|
|
|
|
def f(txn):
|
2019-12-04 14:52:46 +01:00
|
|
|
self.db.simple_delete_one_txn(
|
2019-04-03 11:07:29 +02:00
|
|
|
txn, table="access_tokens", keyvalues={"token": access_token}
|
2016-03-11 14:14:18 +01:00
|
|
|
)
|
2016-03-11 17:27:50 +01:00
|
|
|
|
2016-08-15 18:04:39 +02:00
|
|
|
self._invalidate_cache_and_stream(
|
|
|
|
txn, self.get_user_by_access_token, (access_token,)
|
|
|
|
)
|
2016-03-11 17:27:50 +01:00
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.runInteraction("delete_access_token", f)
|
2015-03-25 18:15:20 +01:00
|
|
|
|
2016-01-06 12:38:09 +01:00
|
|
|
@cachedInlineCallbacks()
|
2016-01-18 15:09:47 +01:00
|
|
|
def is_guest(self, user_id):
|
2019-12-04 14:52:46 +01:00
|
|
|
res = yield self.db.simple_select_one_onecol(
|
2016-01-06 12:38:09 +01:00
|
|
|
table="users",
|
2016-01-18 15:09:47 +01:00
|
|
|
keyvalues={"name": user_id},
|
2016-01-06 12:38:09 +01:00
|
|
|
retcol="is_guest",
|
|
|
|
allow_none=True,
|
|
|
|
desc="is_guest",
|
|
|
|
)
|
|
|
|
|
2019-07-23 15:00:55 +02:00
|
|
|
return res if res else False
|
2016-01-06 12:38:09 +01:00
|
|
|
|
2018-05-09 15:54:28 +02:00
|
|
|
def add_user_pending_deactivation(self, user_id):
|
2018-05-10 13:20:40 +02:00
|
|
|
"""
|
|
|
|
Adds a user to the table of users who need to be parted from all the rooms they're
|
|
|
|
in
|
|
|
|
"""
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.simple_insert(
|
2018-05-09 15:54:28 +02:00
|
|
|
"users_pending_deactivation",
|
2019-04-03 11:07:29 +02:00
|
|
|
values={"user_id": user_id},
|
2018-05-09 15:54:28 +02:00
|
|
|
desc="add_user_pending_deactivation",
|
|
|
|
)
|
|
|
|
|
|
|
|
def del_user_pending_deactivation(self, user_id):
|
2018-05-10 13:20:40 +02:00
|
|
|
"""
|
|
|
|
Removes the given user to the table of users who need to be parted from all the
|
|
|
|
rooms they're in, effectively marking that user as fully deactivated.
|
|
|
|
"""
|
2018-06-26 11:52:52 +02:00
|
|
|
# XXX: This should be simple_delete_one but we failed to put a unique index on
|
|
|
|
# the table, so somehow duplicate entries have ended up in it.
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.simple_delete(
|
2018-05-09 15:54:28 +02:00
|
|
|
"users_pending_deactivation",
|
2019-04-03 11:07:29 +02:00
|
|
|
keyvalues={"user_id": user_id},
|
2018-05-09 15:54:28 +02:00
|
|
|
desc="del_user_pending_deactivation",
|
|
|
|
)
|
|
|
|
|
|
|
|
def get_user_pending_deactivation(self):
|
2018-05-10 13:20:40 +02:00
|
|
|
"""
|
|
|
|
Gets one user from the table of users waiting to be parted from all the rooms
|
|
|
|
they're in.
|
|
|
|
"""
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.simple_select_one_onecol(
|
2018-05-09 15:54:28 +02:00
|
|
|
"users_pending_deactivation",
|
|
|
|
keyvalues={},
|
|
|
|
retcol="user_id",
|
|
|
|
allow_none=True,
|
|
|
|
desc="get_users_pending_deactivation",
|
|
|
|
)
|
2019-04-01 14:23:18 +02:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
def validate_threepid_session(self, session_id, client_secret, token, current_ts):
|
2019-06-06 18:34:07 +02:00
|
|
|
"""Attempt to validate a threepid session using a token
|
|
|
|
|
|
|
|
Args:
|
|
|
|
session_id (str): The id of a validation session
|
|
|
|
client_secret (str): A unique string provided by the client to
|
|
|
|
help identify this validation attempt
|
|
|
|
token (str): A validation token
|
|
|
|
current_ts (int): The current unix time in milliseconds. Used for
|
|
|
|
checking token expiry status
|
|
|
|
|
2019-09-20 16:21:30 +02:00
|
|
|
Raises:
|
|
|
|
ThreepidValidationError: if a matching validation token was not found or has
|
|
|
|
expired
|
|
|
|
|
2019-06-06 18:34:07 +02:00
|
|
|
Returns:
|
|
|
|
deferred str|None: A str representing a link to redirect the user
|
|
|
|
to if there is one.
|
|
|
|
"""
|
2019-09-04 14:04:27 +02:00
|
|
|
|
2019-06-06 18:34:07 +02:00
|
|
|
# Insert everything into a transaction in order to run atomically
|
|
|
|
def validate_threepid_session_txn(txn):
|
2019-12-04 14:52:46 +01:00
|
|
|
row = self.db.simple_select_one_txn(
|
2019-06-06 18:34:07 +02:00
|
|
|
txn,
|
|
|
|
table="threepid_validation_session",
|
|
|
|
keyvalues={"session_id": session_id},
|
|
|
|
retcols=["client_secret", "validated_at"],
|
|
|
|
allow_none=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
if not row:
|
|
|
|
raise ThreepidValidationError(400, "Unknown session_id")
|
|
|
|
retrieved_client_secret = row["client_secret"]
|
|
|
|
validated_at = row["validated_at"]
|
|
|
|
|
|
|
|
if retrieved_client_secret != client_secret:
|
|
|
|
raise ThreepidValidationError(
|
2019-06-20 11:32:02 +02:00
|
|
|
400, "This client_secret does not match the provided session_id"
|
2019-06-06 18:34:07 +02:00
|
|
|
)
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
row = self.db.simple_select_one_txn(
|
2019-06-06 18:34:07 +02:00
|
|
|
txn,
|
|
|
|
table="threepid_validation_token",
|
|
|
|
keyvalues={"session_id": session_id, "token": token},
|
|
|
|
retcols=["expires", "next_link"],
|
|
|
|
allow_none=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
if not row:
|
|
|
|
raise ThreepidValidationError(
|
2019-06-20 11:32:02 +02:00
|
|
|
400, "Validation token not found or has expired"
|
2019-06-06 18:34:07 +02:00
|
|
|
)
|
|
|
|
expires = row["expires"]
|
|
|
|
next_link = row["next_link"]
|
|
|
|
|
|
|
|
# If the session is already validated, no need to revalidate
|
|
|
|
if validated_at:
|
|
|
|
return next_link
|
|
|
|
|
|
|
|
if expires <= current_ts:
|
|
|
|
raise ThreepidValidationError(
|
2019-06-20 11:32:02 +02:00
|
|
|
400, "This token has expired. Please request a new one"
|
2019-06-06 18:34:07 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# Looks good. Validate the session
|
2019-12-04 14:52:46 +01:00
|
|
|
self.db.simple_update_txn(
|
2019-06-06 18:34:07 +02:00
|
|
|
txn,
|
|
|
|
table="threepid_validation_session",
|
|
|
|
keyvalues={"session_id": session_id},
|
|
|
|
updatevalues={"validated_at": self.clock.time_msec()},
|
|
|
|
)
|
|
|
|
|
|
|
|
return next_link
|
|
|
|
|
|
|
|
# Return next_link if it exists
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.runInteraction(
|
2019-06-20 11:32:02 +02:00
|
|
|
"validate_threepid_session_txn", validate_threepid_session_txn
|
2019-06-06 18:34:07 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def upsert_threepid_validation_session(
|
|
|
|
self,
|
|
|
|
medium,
|
|
|
|
address,
|
|
|
|
client_secret,
|
|
|
|
send_attempt,
|
|
|
|
session_id,
|
|
|
|
validated_at=None,
|
|
|
|
):
|
|
|
|
"""Upsert a threepid validation session
|
|
|
|
Args:
|
|
|
|
medium (str): The medium of the 3PID
|
|
|
|
address (str): The address of the 3PID
|
|
|
|
client_secret (str): A unique string provided by the client to
|
|
|
|
help identify this validation attempt
|
|
|
|
send_attempt (int): The latest send_attempt on this session
|
|
|
|
session_id (str): The id of this validation session
|
|
|
|
validated_at (int|None): The unix timestamp in milliseconds of
|
|
|
|
when the session was marked as valid
|
|
|
|
"""
|
|
|
|
insertion_values = {
|
|
|
|
"medium": medium,
|
|
|
|
"address": address,
|
|
|
|
"client_secret": client_secret,
|
|
|
|
}
|
|
|
|
|
|
|
|
if validated_at:
|
|
|
|
insertion_values["validated_at"] = validated_at
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.simple_upsert(
|
2019-06-06 18:34:07 +02:00
|
|
|
table="threepid_validation_session",
|
|
|
|
keyvalues={"session_id": session_id},
|
|
|
|
values={"last_send_attempt": send_attempt},
|
|
|
|
insertion_values=insertion_values,
|
|
|
|
desc="upsert_threepid_validation_session",
|
|
|
|
)
|
|
|
|
|
|
|
|
def start_or_continue_validation_session(
|
|
|
|
self,
|
|
|
|
medium,
|
|
|
|
address,
|
|
|
|
session_id,
|
|
|
|
client_secret,
|
|
|
|
send_attempt,
|
|
|
|
next_link,
|
|
|
|
token,
|
|
|
|
token_expires,
|
|
|
|
):
|
|
|
|
"""Creates a new threepid validation session if it does not already
|
|
|
|
exist and associates a new validation token with it
|
|
|
|
|
|
|
|
Args:
|
|
|
|
medium (str): The medium of the 3PID
|
|
|
|
address (str): The address of the 3PID
|
|
|
|
session_id (str): The id of this validation session
|
|
|
|
client_secret (str): A unique string provided by the client to
|
|
|
|
help identify this validation attempt
|
|
|
|
send_attempt (int): The latest send_attempt on this session
|
|
|
|
next_link (str|None): The link to redirect the user to upon
|
|
|
|
successful validation
|
|
|
|
token (str): The validation token
|
|
|
|
token_expires (int): The timestamp for which after the token
|
|
|
|
will no longer be valid
|
|
|
|
"""
|
2019-06-20 11:32:02 +02:00
|
|
|
|
2019-06-06 18:34:07 +02:00
|
|
|
def start_or_continue_validation_session_txn(txn):
|
|
|
|
# Create or update a validation session
|
2019-12-04 14:52:46 +01:00
|
|
|
self.db.simple_upsert_txn(
|
2019-06-06 18:34:07 +02:00
|
|
|
txn,
|
|
|
|
table="threepid_validation_session",
|
|
|
|
keyvalues={"session_id": session_id},
|
|
|
|
values={"last_send_attempt": send_attempt},
|
|
|
|
insertion_values={
|
|
|
|
"medium": medium,
|
|
|
|
"address": address,
|
|
|
|
"client_secret": client_secret,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
# Create a new validation token with this session ID
|
2019-12-04 14:52:46 +01:00
|
|
|
self.db.simple_insert_txn(
|
2019-06-06 18:34:07 +02:00
|
|
|
txn,
|
|
|
|
table="threepid_validation_token",
|
|
|
|
values={
|
|
|
|
"session_id": session_id,
|
|
|
|
"token": token,
|
|
|
|
"next_link": next_link,
|
|
|
|
"expires": token_expires,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.runInteraction(
|
2019-06-06 18:34:07 +02:00
|
|
|
"start_or_continue_validation_session",
|
|
|
|
start_or_continue_validation_session_txn,
|
|
|
|
)
|
|
|
|
|
|
|
|
def cull_expired_threepid_validation_tokens(self):
|
|
|
|
"""Remove threepid validation tokens with expiry dates that have passed"""
|
2019-06-20 11:32:02 +02:00
|
|
|
|
2019-06-06 18:34:07 +02:00
|
|
|
def cull_expired_threepid_validation_tokens_txn(txn, ts):
|
|
|
|
sql = """
|
|
|
|
DELETE FROM threepid_validation_token WHERE
|
|
|
|
expires < ?
|
|
|
|
"""
|
|
|
|
return txn.execute(sql, (ts,))
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
return self.db.runInteraction(
|
2019-06-06 18:34:07 +02:00
|
|
|
"cull_expired_threepid_validation_tokens",
|
|
|
|
cull_expired_threepid_validation_tokens_txn,
|
|
|
|
self.clock.time_msec(),
|
|
|
|
)
|
2019-10-08 15:36:33 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def set_user_deactivated_status(self, user_id, deactivated):
|
|
|
|
"""Set the `deactivated` property for the provided user to the provided value.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): The ID of the user to set the status for.
|
|
|
|
deactivated (bool): The value to set for `deactivated`.
|
|
|
|
"""
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
yield self.db.runInteraction(
|
2019-10-08 15:36:33 +02:00
|
|
|
"set_user_deactivated_status",
|
|
|
|
self.set_user_deactivated_status_txn,
|
|
|
|
user_id,
|
|
|
|
deactivated,
|
|
|
|
)
|
|
|
|
|
|
|
|
def set_user_deactivated_status_txn(self, txn, user_id, deactivated):
|
2019-12-04 14:52:46 +01:00
|
|
|
self.db.simple_update_one_txn(
|
2019-10-08 15:36:33 +02:00
|
|
|
txn=txn,
|
|
|
|
table="users",
|
|
|
|
keyvalues={"name": user_id},
|
|
|
|
updatevalues={"deactivated": 1 if deactivated else 0},
|
|
|
|
)
|
|
|
|
self._invalidate_cache_and_stream(
|
|
|
|
txn, self.get_user_deactivated_status, (user_id,)
|
|
|
|
)
|
2019-12-03 15:00:09 +01:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def _set_expiration_date_when_missing(self):
|
|
|
|
"""
|
|
|
|
Retrieves the list of registered users that don't have an expiration date, and
|
|
|
|
adds an expiration date for each of them.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def select_users_with_no_expiration_date_txn(txn):
|
|
|
|
"""Retrieves the list of registered users with no expiration date from the
|
|
|
|
database, filtering out deactivated users.
|
|
|
|
"""
|
|
|
|
sql = (
|
|
|
|
"SELECT users.name FROM users"
|
|
|
|
" LEFT JOIN account_validity ON (users.name = account_validity.user_id)"
|
|
|
|
" WHERE account_validity.user_id is NULL AND users.deactivated = 0;"
|
|
|
|
)
|
|
|
|
txn.execute(sql, [])
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
res = self.db.cursor_to_dict(txn)
|
2019-12-03 15:00:09 +01:00
|
|
|
if res:
|
|
|
|
for user in res:
|
|
|
|
self.set_expiration_date_for_user_txn(
|
|
|
|
txn, user["name"], use_delta=True
|
|
|
|
)
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
yield self.db.runInteraction(
|
2019-12-03 15:00:09 +01:00
|
|
|
"get_users_with_no_expiration_date",
|
|
|
|
select_users_with_no_expiration_date_txn,
|
|
|
|
)
|
|
|
|
|
|
|
|
def set_expiration_date_for_user_txn(self, txn, user_id, use_delta=False):
|
|
|
|
"""Sets an expiration date to the account with the given user ID.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): User ID to set an expiration date for.
|
|
|
|
use_delta (bool): If set to False, the expiration date for the user will be
|
|
|
|
now + validity period. If set to True, this expiration date will be a
|
|
|
|
random value in the [now + period - d ; now + period] range, d being a
|
|
|
|
delta equal to 10% of the validity period.
|
|
|
|
"""
|
|
|
|
now_ms = self._clock.time_msec()
|
|
|
|
expiration_ts = now_ms + self._account_validity.period
|
|
|
|
|
|
|
|
if use_delta:
|
|
|
|
expiration_ts = self.rand.randrange(
|
|
|
|
expiration_ts - self._account_validity.startup_job_max_delta,
|
|
|
|
expiration_ts,
|
|
|
|
)
|
|
|
|
|
2019-12-04 14:52:46 +01:00
|
|
|
self.db.simple_upsert_txn(
|
2019-12-03 15:00:09 +01:00
|
|
|
txn,
|
|
|
|
"account_validity",
|
|
|
|
keyvalues={"user_id": user_id},
|
|
|
|
values={"expiration_ts_ms": expiration_ts, "email_sent": False},
|
|
|
|
)
|