2014-08-12 16:10:52 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-05 19:01:18 +01:00
|
|
|
# Copyright 2014 - 2016 OpenMarket Ltd
|
2014-08-12 16:10:52 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2014-08-13 04:14:34 +02:00
|
|
|
|
2016-02-05 12:22:30 +01:00
|
|
|
import re
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
from twisted.internet import defer
|
|
|
|
|
2014-08-14 17:03:04 +02:00
|
|
|
from synapse.api.errors import StoreError, Codes
|
2016-07-22 15:52:53 +02:00
|
|
|
from synapse.storage import background_updates
|
2016-04-06 17:50:47 +02:00
|
|
|
from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
|
2014-08-12 16:10:52 +02:00
|
|
|
|
|
|
|
|
2016-07-22 15:52:53 +02:00
|
|
|
class RegistrationStore(background_updates.BackgroundUpdateStore):
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2017-11-09 19:51:27 +01:00
|
|
|
def __init__(self, db_conn, hs):
|
|
|
|
super(RegistrationStore, self).__init__(db_conn, hs)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
|
|
|
self.clock = hs.get_clock()
|
|
|
|
|
2016-07-22 15:52:53 +02:00
|
|
|
self.register_background_index_update(
|
|
|
|
"access_tokens_device_index",
|
|
|
|
index_name="access_tokens_device_id",
|
|
|
|
table="access_tokens",
|
|
|
|
columns=["user_id", "device_id"],
|
|
|
|
)
|
|
|
|
|
2017-10-31 21:35:58 +01:00
|
|
|
# we no longer use refresh tokens, but it's possible that some people
|
|
|
|
# might have a background update queued to build this index. Just
|
|
|
|
# clear the background update.
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def noop_update(progress, batch_size):
|
|
|
|
yield self._end_background_update("refresh_tokens_device_index")
|
|
|
|
defer.returnValue(1)
|
|
|
|
self.register_background_update_handler(
|
|
|
|
"refresh_tokens_device_index", noop_update)
|
2016-07-22 15:52:53 +02:00
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
@defer.inlineCallbacks
|
2016-07-15 14:19:07 +02:00
|
|
|
def add_access_token_to_user(self, user_id, token, device_id=None):
|
2014-08-12 16:10:52 +02:00
|
|
|
"""Adds an access token for the given user.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): The user ID.
|
|
|
|
token (str): The new access token to add.
|
2016-07-15 14:19:07 +02:00
|
|
|
device_id (str): ID of the device to associate with the access
|
|
|
|
token
|
2014-08-12 16:10:52 +02:00
|
|
|
Raises:
|
|
|
|
StoreError if there was a problem adding this.
|
|
|
|
"""
|
2016-03-01 15:32:56 +01:00
|
|
|
next_id = self._access_tokens_id_gen.get_next()
|
2015-04-07 13:05:36 +02:00
|
|
|
|
2015-04-15 11:24:07 +02:00
|
|
|
yield self._simple_insert(
|
2014-08-12 16:10:52 +02:00
|
|
|
"access_tokens",
|
|
|
|
{
|
2015-04-07 13:05:36 +02:00
|
|
|
"id": next_id,
|
2015-03-19 16:59:48 +01:00
|
|
|
"user_id": user_id,
|
2016-07-15 14:19:07 +02:00
|
|
|
"token": token,
|
|
|
|
"device_id": device_id,
|
2015-03-20 16:59:18 +01:00
|
|
|
},
|
|
|
|
desc="add_access_token_to_user",
|
2014-08-12 16:10:52 +02:00
|
|
|
)
|
|
|
|
|
2016-07-19 19:46:19 +02:00
|
|
|
def register(self, user_id, token=None, password_hash=None,
|
2016-06-17 20:14:16 +02:00
|
|
|
was_guest=False, make_guest=False, appservice_id=None,
|
2016-07-05 18:30:22 +02:00
|
|
|
create_profile_with_localpart=None, admin=False):
|
2014-08-12 16:10:52 +02:00
|
|
|
"""Attempts to register an account.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): The desired user ID to register.
|
2016-07-19 19:46:19 +02:00
|
|
|
token (str): The desired access token to use for this user. If this
|
|
|
|
is not None, the given access token is associated with the user
|
|
|
|
id.
|
2014-08-12 16:10:52 +02:00
|
|
|
password_hash (str): Optional. The password hash for this user.
|
2016-01-05 19:01:18 +01:00
|
|
|
was_guest (bool): Optional. Whether this is a guest account being
|
|
|
|
upgraded to a non-guest account.
|
2016-01-06 12:38:09 +01:00
|
|
|
make_guest (boolean): True if the the new user should be guest,
|
|
|
|
false to add a regular user account.
|
2016-03-10 16:58:22 +01:00
|
|
|
appservice_id (str): The ID of the appservice registering the user.
|
2016-06-17 20:14:16 +02:00
|
|
|
create_profile_with_localpart (str): Optionally create a profile for
|
|
|
|
the given localpart.
|
2014-08-12 16:10:52 +02:00
|
|
|
Raises:
|
|
|
|
StoreError if the user_id could not be registered.
|
|
|
|
"""
|
2016-08-18 15:59:55 +02:00
|
|
|
return self.runInteraction(
|
2014-10-28 12:18:04 +01:00
|
|
|
"register",
|
2016-03-10 16:58:22 +01:00
|
|
|
self._register,
|
|
|
|
user_id,
|
|
|
|
token,
|
|
|
|
password_hash,
|
|
|
|
was_guest,
|
|
|
|
make_guest,
|
2016-06-17 20:14:16 +02:00
|
|
|
appservice_id,
|
|
|
|
create_profile_with_localpart,
|
2016-07-05 18:30:22 +02:00
|
|
|
admin
|
2014-10-28 12:18:04 +01:00
|
|
|
)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2016-03-10 16:58:22 +01:00
|
|
|
def _register(
|
|
|
|
self,
|
|
|
|
txn,
|
|
|
|
user_id,
|
|
|
|
token,
|
|
|
|
password_hash,
|
|
|
|
was_guest,
|
|
|
|
make_guest,
|
2016-06-17 20:14:16 +02:00
|
|
|
appservice_id,
|
|
|
|
create_profile_with_localpart,
|
2016-07-05 18:30:22 +02:00
|
|
|
admin,
|
2016-03-10 16:58:22 +01:00
|
|
|
):
|
2014-08-12 16:10:52 +02:00
|
|
|
now = int(self.clock.time())
|
|
|
|
|
2016-03-01 15:32:56 +01:00
|
|
|
next_id = self._access_tokens_id_gen.get_next()
|
2015-04-07 13:05:36 +02:00
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
try:
|
2016-01-05 19:01:18 +01:00
|
|
|
if was_guest:
|
2016-07-08 16:15:55 +02:00
|
|
|
# Ensure that the guest user actually exists
|
2016-07-08 16:57:06 +02:00
|
|
|
# ``allow_none=False`` makes this raise an exception
|
|
|
|
# if the row isn't in the database.
|
2016-07-08 16:15:55 +02:00
|
|
|
self._simple_select_one_txn(
|
|
|
|
txn,
|
|
|
|
"users",
|
|
|
|
keyvalues={
|
|
|
|
"name": user_id,
|
|
|
|
"is_guest": 1,
|
|
|
|
},
|
|
|
|
retcols=("name",),
|
|
|
|
allow_none=False,
|
|
|
|
)
|
|
|
|
|
2016-07-05 18:30:22 +02:00
|
|
|
self._simple_update_one_txn(
|
|
|
|
txn,
|
|
|
|
"users",
|
|
|
|
keyvalues={
|
|
|
|
"name": user_id,
|
2016-07-08 16:15:55 +02:00
|
|
|
"is_guest": 1,
|
2016-07-05 18:30:22 +02:00
|
|
|
},
|
|
|
|
updatevalues={
|
|
|
|
"password_hash": password_hash,
|
|
|
|
"upgrade_ts": now,
|
|
|
|
"is_guest": 1 if make_guest else 0,
|
|
|
|
"appservice_id": appservice_id,
|
2016-07-05 18:34:25 +02:00
|
|
|
"admin": 1 if admin else 0,
|
2016-07-05 18:30:22 +02:00
|
|
|
}
|
|
|
|
)
|
2016-01-05 19:01:18 +01:00
|
|
|
else:
|
2016-07-05 18:30:22 +02:00
|
|
|
self._simple_insert_txn(
|
|
|
|
txn,
|
|
|
|
"users",
|
|
|
|
values={
|
|
|
|
"name": user_id,
|
|
|
|
"password_hash": password_hash,
|
|
|
|
"creation_ts": now,
|
|
|
|
"is_guest": 1 if make_guest else 0,
|
|
|
|
"appservice_id": appservice_id,
|
2016-07-05 18:34:25 +02:00
|
|
|
"admin": 1 if admin else 0,
|
2016-07-05 18:30:22 +02:00
|
|
|
}
|
|
|
|
)
|
2015-04-08 17:53:48 +02:00
|
|
|
except self.database_engine.module.IntegrityError:
|
2014-11-20 18:26:36 +01:00
|
|
|
raise StoreError(
|
|
|
|
400, "User ID already taken.", errcode=Codes.USER_IN_USE
|
|
|
|
)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2015-11-04 18:29:07 +01:00
|
|
|
if token:
|
|
|
|
# it's possible for this to get a conflict, but only for a single user
|
|
|
|
# since tokens are namespaced based on their user ID
|
|
|
|
txn.execute(
|
|
|
|
"INSERT INTO access_tokens(id, user_id, token)"
|
|
|
|
" VALUES (?,?,?)",
|
|
|
|
(next_id, user_id, token,)
|
|
|
|
)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2016-06-17 20:14:16 +02:00
|
|
|
if create_profile_with_localpart:
|
2017-11-01 16:51:25 +01:00
|
|
|
# set a default displayname serverside to avoid ugly race
|
|
|
|
# between auto-joins and clients trying to set displaynames
|
2016-06-17 20:14:16 +02:00
|
|
|
txn.execute(
|
2017-11-01 16:51:25 +01:00
|
|
|
"INSERT INTO profiles(user_id, displayname) VALUES (?,?)",
|
|
|
|
(create_profile_with_localpart, create_profile_with_localpart)
|
2016-06-17 20:14:16 +02:00
|
|
|
)
|
|
|
|
|
2016-08-18 15:59:55 +02:00
|
|
|
self._invalidate_cache_and_stream(
|
|
|
|
txn, self.get_user_by_id, (user_id,)
|
|
|
|
)
|
|
|
|
txn.call_after(self.is_guest.invalidate, (user_id,))
|
|
|
|
|
2016-05-09 11:08:21 +02:00
|
|
|
@cached()
|
2014-08-12 16:10:52 +02:00
|
|
|
def get_user_by_id(self, user_id):
|
2015-04-28 14:39:42 +02:00
|
|
|
return self._simple_select_one(
|
2015-03-25 18:15:20 +01:00
|
|
|
table="users",
|
|
|
|
keyvalues={
|
|
|
|
"name": user_id,
|
|
|
|
},
|
2016-01-06 12:38:09 +01:00
|
|
|
retcols=["name", "password_hash", "is_guest"],
|
2015-03-25 18:15:20 +01:00
|
|
|
allow_none=True,
|
2016-02-03 17:22:35 +01:00
|
|
|
desc="get_user_by_id",
|
2014-08-12 16:10:52 +02:00
|
|
|
)
|
|
|
|
|
2015-08-26 14:42:45 +02:00
|
|
|
def get_users_by_id_case_insensitive(self, user_id):
|
|
|
|
"""Gets users that match user_id case insensitively.
|
|
|
|
Returns a mapping of user_id -> password_hash.
|
|
|
|
"""
|
|
|
|
def f(txn):
|
|
|
|
sql = (
|
2016-01-06 18:16:02 +01:00
|
|
|
"SELECT name, password_hash FROM users"
|
2015-08-26 14:42:45 +02:00
|
|
|
" WHERE lower(name) = lower(?)"
|
|
|
|
)
|
|
|
|
txn.execute(sql, (user_id,))
|
2017-03-23 18:53:49 +01:00
|
|
|
return dict(txn)
|
2015-08-26 14:42:45 +02:00
|
|
|
|
|
|
|
return self.runInteraction("get_users_by_id_case_insensitive", f)
|
|
|
|
|
2015-03-23 15:20:28 +01:00
|
|
|
def user_set_password_hash(self, user_id, password_hash):
|
|
|
|
"""
|
|
|
|
NB. This does *not* evict any cache because the one use for this
|
|
|
|
removes most of the entries subsequently anyway so it would be
|
|
|
|
pointless. Use flush_user separately.
|
|
|
|
"""
|
2016-08-18 15:59:55 +02:00
|
|
|
def user_set_password_hash_txn(txn):
|
|
|
|
self._simple_update_one_txn(
|
|
|
|
txn,
|
|
|
|
'users', {
|
|
|
|
'name': user_id
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'password_hash': password_hash
|
|
|
|
}
|
|
|
|
)
|
|
|
|
self._invalidate_cache_and_stream(
|
|
|
|
txn, self.get_user_by_id, (user_id,)
|
|
|
|
)
|
|
|
|
return self.runInteraction(
|
|
|
|
"user_set_password_hash", user_set_password_hash_txn
|
|
|
|
)
|
2015-03-23 15:20:28 +01:00
|
|
|
|
2016-08-15 18:04:39 +02:00
|
|
|
def user_delete_access_tokens(self, user_id, except_token_id=None,
|
2017-10-31 21:35:58 +01:00
|
|
|
device_id=None):
|
2016-07-26 12:09:47 +02:00
|
|
|
"""
|
2017-10-31 21:35:58 +01:00
|
|
|
Invalidate access tokens belonging to a user
|
2016-07-26 12:09:47 +02:00
|
|
|
|
|
|
|
Args:
|
|
|
|
user_id (str): ID of user the tokens belong to
|
2016-08-15 18:04:39 +02:00
|
|
|
except_token_id (str): list of access_tokens IDs which should
|
2016-07-26 12:09:47 +02:00
|
|
|
*not* be deleted
|
|
|
|
device_id (str|None): ID of device the tokens are associated with.
|
|
|
|
If None, tokens associated with any device (or no device) will
|
|
|
|
be deleted
|
|
|
|
Returns:
|
2017-11-29 15:33:05 +01:00
|
|
|
defer.Deferred[list[str, int, str|None, int]]: a list of
|
|
|
|
(token, token id, device id) for each of the deleted tokens
|
2016-07-26 12:09:47 +02:00
|
|
|
"""
|
2016-08-15 18:04:39 +02:00
|
|
|
def f(txn):
|
|
|
|
keyvalues = {
|
|
|
|
"user_id": user_id,
|
|
|
|
}
|
2016-07-22 15:52:53 +02:00
|
|
|
if device_id is not None:
|
2016-08-15 18:04:39 +02:00
|
|
|
keyvalues["device_id"] = device_id
|
2016-07-22 15:52:53 +02:00
|
|
|
|
2016-08-15 18:04:39 +02:00
|
|
|
items = keyvalues.items()
|
|
|
|
where_clause = " AND ".join(k + " = ?" for k, _ in items)
|
|
|
|
values = [v for _, v in items]
|
|
|
|
if except_token_id:
|
|
|
|
where_clause += " AND id != ?"
|
|
|
|
values.append(except_token_id)
|
2016-03-11 17:27:50 +01:00
|
|
|
|
2016-08-15 18:04:39 +02:00
|
|
|
txn.execute(
|
2017-11-29 15:33:05 +01:00
|
|
|
"SELECT token, id, device_id FROM access_tokens WHERE %s" % where_clause,
|
2016-08-15 18:04:39 +02:00
|
|
|
values
|
|
|
|
)
|
2017-11-29 15:33:05 +01:00
|
|
|
tokens_and_devices = [(r[0], r[1], r[2]) for r in txn]
|
2016-03-11 17:27:50 +01:00
|
|
|
|
2017-11-29 15:33:05 +01:00
|
|
|
for token, _, _ in tokens_and_devices:
|
2016-08-15 18:04:39 +02:00
|
|
|
self._invalidate_cache_and_stream(
|
2017-11-01 16:42:38 +01:00
|
|
|
txn, self.get_user_by_access_token, (token,)
|
2016-03-11 17:27:50 +01:00
|
|
|
)
|
|
|
|
|
2016-08-15 18:04:39 +02:00
|
|
|
txn.execute(
|
|
|
|
"DELETE FROM access_tokens WHERE %s" % where_clause,
|
|
|
|
values
|
2016-07-26 12:09:47 +02:00
|
|
|
)
|
|
|
|
|
2017-11-01 16:42:38 +01:00
|
|
|
return tokens_and_devices
|
|
|
|
|
2017-11-09 16:15:33 +01:00
|
|
|
return self.runInteraction(
|
2016-07-26 12:09:47 +02:00
|
|
|
"user_delete_access_tokens", f,
|
|
|
|
)
|
2016-03-11 17:27:50 +01:00
|
|
|
|
|
|
|
def delete_access_token(self, access_token):
|
|
|
|
def f(txn):
|
|
|
|
self._simple_delete_one_txn(
|
|
|
|
txn,
|
|
|
|
table="access_tokens",
|
|
|
|
keyvalues={
|
|
|
|
"token": access_token
|
|
|
|
},
|
2016-03-11 14:14:18 +01:00
|
|
|
)
|
2016-03-11 17:27:50 +01:00
|
|
|
|
2016-08-15 18:04:39 +02:00
|
|
|
self._invalidate_cache_and_stream(
|
|
|
|
txn, self.get_user_by_access_token, (access_token,)
|
|
|
|
)
|
2016-03-11 17:27:50 +01:00
|
|
|
|
|
|
|
return self.runInteraction("delete_access_token", f)
|
2015-03-25 18:15:20 +01:00
|
|
|
|
2015-03-17 18:24:51 +01:00
|
|
|
@cached()
|
2015-08-20 17:01:29 +02:00
|
|
|
def get_user_by_access_token(self, token):
|
2014-08-12 16:10:52 +02:00
|
|
|
"""Get a user from the given access token.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
token (str): The access token of a user.
|
|
|
|
Returns:
|
2016-07-26 14:32:15 +02:00
|
|
|
defer.Deferred: None, if the token did not match, otherwise dict
|
2016-07-26 12:09:47 +02:00
|
|
|
including the keys `name`, `is_guest`, `device_id`, `token_id`.
|
2014-08-12 16:10:52 +02:00
|
|
|
"""
|
2014-09-29 15:59:52 +02:00
|
|
|
return self.runInteraction(
|
2015-08-20 17:01:29 +02:00
|
|
|
"get_user_by_access_token",
|
2014-09-29 15:59:52 +02:00
|
|
|
self._query_for_auth,
|
|
|
|
token
|
|
|
|
)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2014-11-10 19:24:43 +01:00
|
|
|
@defer.inlineCallbacks
|
2014-09-29 14:35:38 +02:00
|
|
|
def is_server_admin(self, user):
|
2014-11-10 19:24:43 +01:00
|
|
|
res = yield self._simple_select_one_onecol(
|
2014-09-29 14:35:38 +02:00
|
|
|
table="users",
|
|
|
|
keyvalues={"name": user.to_string()},
|
|
|
|
retcol="admin",
|
2014-11-10 19:24:43 +01:00
|
|
|
allow_none=True,
|
2015-03-20 16:59:18 +01:00
|
|
|
desc="is_server_admin",
|
2014-09-29 14:35:38 +02:00
|
|
|
)
|
|
|
|
|
2014-11-10 19:24:43 +01:00
|
|
|
defer.returnValue(res if res else False)
|
|
|
|
|
2016-01-06 12:38:09 +01:00
|
|
|
@cachedInlineCallbacks()
|
2016-01-18 15:09:47 +01:00
|
|
|
def is_guest(self, user_id):
|
2016-01-06 12:38:09 +01:00
|
|
|
res = yield self._simple_select_one_onecol(
|
|
|
|
table="users",
|
2016-01-18 15:09:47 +01:00
|
|
|
keyvalues={"name": user_id},
|
2016-01-06 12:38:09 +01:00
|
|
|
retcol="is_guest",
|
|
|
|
allow_none=True,
|
|
|
|
desc="is_guest",
|
|
|
|
)
|
|
|
|
|
|
|
|
defer.returnValue(res if res else False)
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
def _query_for_auth(self, txn, token):
|
2014-09-29 15:59:52 +02:00
|
|
|
sql = (
|
2016-07-15 14:19:07 +02:00
|
|
|
"SELECT users.name, users.is_guest, access_tokens.id as token_id,"
|
|
|
|
" access_tokens.device_id"
|
2014-11-20 18:26:36 +01:00
|
|
|
" FROM users"
|
2015-03-19 16:59:48 +01:00
|
|
|
" INNER JOIN access_tokens on users.name = access_tokens.user_id"
|
2014-11-20 18:26:36 +01:00
|
|
|
" WHERE token = ?"
|
2014-09-29 15:59:52 +02:00
|
|
|
)
|
|
|
|
|
2015-03-19 16:59:48 +01:00
|
|
|
txn.execute(sql, (token,))
|
|
|
|
rows = self.cursor_to_dict(txn)
|
2014-09-29 15:59:52 +02:00
|
|
|
if rows:
|
|
|
|
return rows[0]
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2015-03-24 18:24:15 +01:00
|
|
|
return None
|
2015-04-17 17:44:49 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def user_add_threepid(self, user_id, medium, address, validated_at, added_at):
|
|
|
|
yield self._simple_upsert("user_threepids", {
|
|
|
|
"medium": medium,
|
|
|
|
"address": address,
|
|
|
|
}, {
|
2015-12-15 18:02:21 +01:00
|
|
|
"user_id": user_id,
|
2015-04-17 17:44:49 +02:00
|
|
|
"validated_at": validated_at,
|
|
|
|
"added_at": added_at,
|
2015-04-17 17:46:45 +02:00
|
|
|
})
|
2015-04-17 18:20:18 +02:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def user_get_threepids(self, user_id):
|
|
|
|
ret = yield self._simple_select_list(
|
|
|
|
"user_threepids", {
|
2015-04-29 17:57:14 +02:00
|
|
|
"user_id": user_id
|
2015-04-17 18:20:18 +02:00
|
|
|
},
|
|
|
|
['medium', 'address', 'validated_at', 'added_at'],
|
|
|
|
'user_get_threepids'
|
|
|
|
)
|
2015-04-17 20:53:47 +02:00
|
|
|
defer.returnValue(ret)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
2015-05-01 16:04:20 +02:00
|
|
|
def get_user_id_by_threepid(self, medium, address):
|
2015-04-17 20:53:47 +02:00
|
|
|
ret = yield self._simple_select_one(
|
|
|
|
"user_threepids",
|
|
|
|
{
|
|
|
|
"medium": medium,
|
|
|
|
"address": address
|
|
|
|
},
|
2015-05-01 16:04:20 +02:00
|
|
|
['user_id'], True, 'get_user_id_by_threepid'
|
2015-04-17 20:53:47 +02:00
|
|
|
)
|
|
|
|
if ret:
|
2015-05-01 16:04:20 +02:00
|
|
|
defer.returnValue(ret['user_id'])
|
2015-04-17 20:56:04 +02:00
|
|
|
defer.returnValue(None)
|
2015-09-22 13:57:40 +02:00
|
|
|
|
2016-06-30 16:40:58 +02:00
|
|
|
def user_delete_threepids(self, user_id):
|
|
|
|
return self._simple_delete(
|
|
|
|
"user_threepids",
|
|
|
|
keyvalues={
|
|
|
|
"user_id": user_id,
|
|
|
|
},
|
|
|
|
desc="user_delete_threepids",
|
|
|
|
)
|
|
|
|
|
2016-12-20 19:27:30 +01:00
|
|
|
def user_delete_threepid(self, user_id, medium, address):
|
|
|
|
return self._simple_delete(
|
|
|
|
"user_threepids",
|
|
|
|
keyvalues={
|
|
|
|
"user_id": user_id,
|
|
|
|
"medium": medium,
|
|
|
|
"address": address,
|
|
|
|
},
|
|
|
|
desc="user_delete_threepids",
|
|
|
|
)
|
|
|
|
|
2015-09-22 13:57:40 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def count_all_users(self):
|
2015-09-22 14:47:40 +02:00
|
|
|
"""Counts all users registered on the homeserver."""
|
2015-09-22 13:57:40 +02:00
|
|
|
def _count_users(txn):
|
|
|
|
txn.execute("SELECT COUNT(*) AS users FROM users")
|
|
|
|
rows = self.cursor_to_dict(txn)
|
|
|
|
if rows:
|
|
|
|
return rows[0]["users"]
|
|
|
|
return 0
|
|
|
|
|
|
|
|
ret = yield self.runInteraction("count_users", _count_users)
|
2017-06-15 10:39:39 +02:00
|
|
|
defer.returnValue(ret)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def count_nonbridged_users(self):
|
|
|
|
def _count_users(txn):
|
|
|
|
txn.execute("""
|
|
|
|
SELECT COALESCE(COUNT(*), 0) FROM users
|
|
|
|
WHERE appservice_id IS NULL
|
|
|
|
""")
|
|
|
|
count, = txn.fetchone()
|
|
|
|
return count
|
|
|
|
|
|
|
|
ret = yield self.runInteraction("count_users", _count_users)
|
2015-09-22 13:57:40 +02:00
|
|
|
defer.returnValue(ret)
|
2016-02-05 12:22:30 +01:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def find_next_generated_user_id_localpart(self):
|
|
|
|
"""
|
|
|
|
Gets the localpart of the next generated user ID.
|
|
|
|
|
|
|
|
Generated user IDs are integers, and we aim for them to be as small as
|
|
|
|
we can. Unfortunately, it's possible some of them are already taken by
|
|
|
|
existing users, and there may be gaps in the already taken range. This
|
|
|
|
function returns the start of the first allocatable gap. This is to
|
|
|
|
avoid the case of ID 10000000 being pre-allocated, so us wasting the
|
|
|
|
first (and shortest) many generated user IDs.
|
|
|
|
"""
|
|
|
|
def _find_next_generated_user_id(txn):
|
|
|
|
txn.execute("SELECT name FROM users")
|
|
|
|
rows = self.cursor_to_dict(txn)
|
|
|
|
|
|
|
|
regex = re.compile("^@(\d+):")
|
|
|
|
|
|
|
|
found = set()
|
|
|
|
|
|
|
|
for r in rows:
|
|
|
|
user_id = r["name"]
|
|
|
|
match = regex.search(user_id)
|
|
|
|
if match:
|
|
|
|
found.add(int(match.group(1)))
|
|
|
|
for i in xrange(len(found) + 1):
|
|
|
|
if i not in found:
|
|
|
|
return i
|
|
|
|
|
|
|
|
defer.returnValue((yield self.runInteraction(
|
|
|
|
"find_next_generated_user_id",
|
|
|
|
_find_next_generated_user_id
|
|
|
|
)))
|
2016-02-24 15:41:25 +01:00
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def get_3pid_guest_access_token(self, medium, address):
|
|
|
|
ret = yield self._simple_select_one(
|
|
|
|
"threepid_guest_access_tokens",
|
|
|
|
{
|
|
|
|
"medium": medium,
|
|
|
|
"address": address
|
|
|
|
},
|
|
|
|
["guest_access_token"], True, 'get_3pid_guest_access_token'
|
|
|
|
)
|
|
|
|
if ret:
|
|
|
|
defer.returnValue(ret["guest_access_token"])
|
|
|
|
defer.returnValue(None)
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def save_or_get_3pid_guest_access_token(
|
|
|
|
self, medium, address, access_token, inviter_user_id
|
|
|
|
):
|
|
|
|
"""
|
|
|
|
Gets the 3pid's guest access token if exists, else saves access_token.
|
|
|
|
|
2016-04-01 17:08:59 +02:00
|
|
|
Args:
|
|
|
|
medium (str): Medium of the 3pid. Must be "email".
|
|
|
|
address (str): 3pid address.
|
|
|
|
access_token (str): The access token to persist if none is
|
|
|
|
already persisted.
|
|
|
|
inviter_user_id (str): User ID of the inviter.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
deferred str: Whichever access token is persisted at the end
|
2016-02-24 15:41:25 +01:00
|
|
|
of this function call.
|
|
|
|
"""
|
|
|
|
def insert(txn):
|
|
|
|
txn.execute(
|
|
|
|
"INSERT INTO threepid_guest_access_tokens "
|
|
|
|
"(medium, address, guest_access_token, first_inviter) "
|
|
|
|
"VALUES (?, ?, ?, ?)",
|
|
|
|
(medium, address, access_token, inviter_user_id)
|
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
yield self.runInteraction("save_3pid_guest_access_token", insert)
|
|
|
|
defer.returnValue(access_token)
|
|
|
|
except self.database_engine.module.IntegrityError:
|
|
|
|
ret = yield self.get_3pid_guest_access_token(medium, address)
|
|
|
|
defer.returnValue(ret)
|