2016-01-07 05:26:29 +01:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2018-08-14 12:53:43 +02:00
|
|
|
# Copyright 2018 New Vector
|
2019-11-27 22:54:07 +01:00
|
|
|
# Copyright 2019 Matrix.org Federation C.I.C
|
2014-09-12 19:24:53 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2023-05-22 14:25:39 +02:00
|
|
|
import functools
|
2018-11-27 03:00:33 +01:00
|
|
|
import gc
|
2018-10-01 16:11:58 +02:00
|
|
|
import hashlib
|
|
|
|
import hmac
|
2023-04-18 15:50:27 +02:00
|
|
|
import json
|
2018-06-04 08:06:06 +02:00
|
|
|
import logging
|
2021-04-27 14:13:07 +02:00
|
|
|
import secrets
|
2019-06-29 09:06:55 +02:00
|
|
|
import time
|
2021-11-12 16:50:54 +01:00
|
|
|
from typing import (
|
|
|
|
Any,
|
2022-04-01 18:04:16 +02:00
|
|
|
Awaitable,
|
2021-11-12 16:50:54 +01:00
|
|
|
Callable,
|
|
|
|
ClassVar,
|
|
|
|
Dict,
|
2022-04-01 18:04:16 +02:00
|
|
|
Generic,
|
2021-11-12 16:50:54 +01:00
|
|
|
Iterable,
|
|
|
|
List,
|
2022-07-27 19:18:41 +02:00
|
|
|
NoReturn,
|
2021-11-12 16:50:54 +01:00
|
|
|
Optional,
|
|
|
|
Tuple,
|
|
|
|
Type,
|
|
|
|
TypeVar,
|
|
|
|
Union,
|
|
|
|
)
|
2021-04-09 19:44:38 +02:00
|
|
|
from unittest.mock import Mock, patch
|
2018-08-14 12:53:43 +02:00
|
|
|
|
2022-02-11 13:06:02 +01:00
|
|
|
import canonicaljson
|
|
|
|
import signedjson.key
|
|
|
|
import unpaddedbase64
|
2022-07-27 19:18:41 +02:00
|
|
|
from typing_extensions import Concatenate, ParamSpec, Protocol
|
2018-08-17 17:08:45 +02:00
|
|
|
|
2022-02-11 13:06:02 +01:00
|
|
|
from twisted.internet.defer import Deferred, ensureDeferred
|
2020-09-28 19:00:30 +02:00
|
|
|
from twisted.python.failure import Failure
|
2019-06-29 09:06:55 +02:00
|
|
|
from twisted.python.threadpool import ThreadPool
|
2023-02-17 19:19:38 +01:00
|
|
|
from twisted.test.proto_helpers import MemoryReactor, MemoryReactorClock
|
2014-09-12 19:24:53 +02:00
|
|
|
from twisted.trial import unittest
|
2020-11-16 15:45:52 +01:00
|
|
|
from twisted.web.resource import Resource
|
2021-11-16 11:41:35 +01:00
|
|
|
from twisted.web.server import Request
|
2014-09-12 19:24:53 +02:00
|
|
|
|
2021-03-17 17:51:55 +01:00
|
|
|
from synapse import events
|
2022-04-01 18:04:16 +02:00
|
|
|
from synapse.api.constants import EventTypes
|
2022-02-22 13:17:10 +01:00
|
|
|
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
|
2023-04-18 15:50:27 +02:00
|
|
|
from synapse.config._base import Config, RootConfig
|
2019-05-13 22:01:14 +02:00
|
|
|
from synapse.config.homeserver import HomeServerConfig
|
2022-02-22 13:17:10 +01:00
|
|
|
from synapse.config.server import DEFAULT_ROOM_VERSION
|
|
|
|
from synapse.crypto.event_signing import add_hashes_and_signatures
|
2022-02-11 13:06:02 +01:00
|
|
|
from synapse.federation.transport.server import TransportLayerServer
|
2018-08-14 12:53:43 +02:00
|
|
|
from synapse.http.server import JsonResource
|
2020-01-06 13:28:58 +01:00
|
|
|
from synapse.http.site import SynapseRequest, SynapseSite
|
2020-03-24 15:45:33 +01:00
|
|
|
from synapse.logging.context import (
|
|
|
|
SENTINEL_CONTEXT,
|
2020-03-31 18:27:56 +02:00
|
|
|
LoggingContext,
|
2020-03-24 15:45:33 +01:00
|
|
|
current_context,
|
|
|
|
set_current_context,
|
|
|
|
)
|
2021-11-12 16:50:54 +01:00
|
|
|
from synapse.rest import RegisterServletsFunc
|
2018-08-14 12:53:43 +02:00
|
|
|
from synapse.server import HomeServer
|
2022-07-27 19:18:41 +02:00
|
|
|
from synapse.types import JsonDict, Requester, UserID, create_requester
|
2021-09-30 12:04:40 +02:00
|
|
|
from synapse.util import Clock
|
2020-12-02 16:21:00 +01:00
|
|
|
from synapse.util.httpresourcetree import create_resource_tree
|
2018-06-04 08:06:06 +02:00
|
|
|
|
2022-04-01 18:04:16 +02:00
|
|
|
from tests.server import (
|
|
|
|
CustomHeaderType,
|
|
|
|
FakeChannel,
|
2023-01-26 11:15:50 +01:00
|
|
|
ThreadedMemoryReactorClock,
|
2022-04-01 18:04:16 +02:00
|
|
|
get_clock,
|
|
|
|
make_request,
|
|
|
|
setup_test_homeserver,
|
|
|
|
)
|
2020-10-30 12:15:07 +01:00
|
|
|
from tests.test_utils import event_injection, setup_awaitable_errors
|
2019-01-29 13:07:00 +01:00
|
|
|
from tests.test_utils.logging_setup import setup_logging
|
2023-02-17 19:19:38 +01:00
|
|
|
from tests.utils import checked_cast, default_config, setupdb
|
2018-12-04 11:30:32 +01:00
|
|
|
|
|
|
|
setupdb()
|
2019-01-29 13:07:00 +01:00
|
|
|
setup_logging()
|
2014-09-12 19:24:53 +02:00
|
|
|
|
2022-04-01 18:04:16 +02:00
|
|
|
TV = TypeVar("TV")
|
|
|
|
_ExcType = TypeVar("_ExcType", bound=BaseException, covariant=True)
|
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
P = ParamSpec("P")
|
|
|
|
R = TypeVar("R")
|
|
|
|
S = TypeVar("S")
|
|
|
|
|
2022-04-01 18:04:16 +02:00
|
|
|
|
|
|
|
class _TypedFailure(Generic[_ExcType], Protocol):
|
|
|
|
"""Extension to twisted.Failure, where the 'value' has a certain type."""
|
|
|
|
|
|
|
|
@property
|
|
|
|
def value(self) -> _ExcType:
|
|
|
|
...
|
|
|
|
|
2014-09-12 19:24:53 +02:00
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
def around(target: TV) -> Callable[[Callable[Concatenate[S, P], R]], None]:
|
2014-09-12 20:07:29 +02:00
|
|
|
"""A CLOS-style 'around' modifier, which wraps the original method of the
|
|
|
|
given instance with another piece of code.
|
|
|
|
|
|
|
|
@around(self)
|
|
|
|
def method_name(orig, *args, **kwargs):
|
|
|
|
return orig(*args, **kwargs)
|
|
|
|
"""
|
2018-08-10 15:54:09 +02:00
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
def _around(code: Callable[Concatenate[S, P], R]) -> None:
|
2014-09-12 20:07:29 +02:00
|
|
|
name = code.__name__
|
|
|
|
orig = getattr(target, name)
|
2016-02-19 16:34:38 +01:00
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
def new(*args: P.args, **kwargs: P.kwargs) -> R:
|
2014-09-12 20:07:29 +02:00
|
|
|
return code(orig, *args, **kwargs)
|
2016-02-19 16:34:38 +01:00
|
|
|
|
2014-09-12 20:07:29 +02:00
|
|
|
setattr(target, name, new)
|
2016-02-19 16:34:38 +01:00
|
|
|
|
2014-09-12 20:07:29 +02:00
|
|
|
return _around
|
|
|
|
|
|
|
|
|
2023-04-18 15:50:27 +02:00
|
|
|
_TConfig = TypeVar("_TConfig", Config, RootConfig)
|
|
|
|
|
|
|
|
|
|
|
|
def deepcopy_config(config: _TConfig) -> _TConfig:
|
|
|
|
new_config: _TConfig
|
|
|
|
|
|
|
|
if isinstance(config, RootConfig):
|
|
|
|
new_config = config.__class__(config.config_files) # type: ignore[arg-type]
|
|
|
|
else:
|
|
|
|
new_config = config.__class__(config.root)
|
|
|
|
|
|
|
|
for attr_name in config.__dict__:
|
|
|
|
if attr_name.startswith("__") or attr_name == "root":
|
|
|
|
continue
|
|
|
|
attr = getattr(config, attr_name)
|
|
|
|
if isinstance(attr, Config):
|
|
|
|
new_attr = deepcopy_config(attr)
|
|
|
|
else:
|
|
|
|
new_attr = attr
|
|
|
|
|
|
|
|
setattr(new_config, attr_name, new_attr)
|
|
|
|
|
|
|
|
return new_config
|
|
|
|
|
|
|
|
|
2023-05-22 14:25:39 +02:00
|
|
|
@functools.lru_cache(maxsize=8)
|
|
|
|
def _parse_config_dict(config: str) -> RootConfig:
|
|
|
|
config_obj = HomeServerConfig()
|
|
|
|
config_obj.parse_config_dict(json.loads(config), "", "")
|
|
|
|
return config_obj
|
2023-04-18 15:50:27 +02:00
|
|
|
|
|
|
|
|
|
|
|
def make_homeserver_config_obj(config: Dict[str, Any]) -> RootConfig:
|
|
|
|
"""Creates a :class:`HomeServerConfig` instance with the given configuration dict.
|
|
|
|
|
|
|
|
This is equivalent to::
|
|
|
|
|
|
|
|
config_obj = HomeServerConfig()
|
|
|
|
config_obj.parse_config_dict(config, "", "")
|
|
|
|
|
|
|
|
but it keeps a cache of `HomeServerConfig` instances and deepcopies them as needed,
|
|
|
|
to avoid validating the whole configuration every time.
|
|
|
|
"""
|
2023-05-22 14:25:39 +02:00
|
|
|
config_obj = _parse_config_dict(json.dumps(config, sort_keys=True))
|
2023-04-18 15:50:27 +02:00
|
|
|
return deepcopy_config(config_obj)
|
|
|
|
|
|
|
|
|
2014-09-12 19:24:53 +02:00
|
|
|
class TestCase(unittest.TestCase):
|
2014-09-12 19:45:48 +02:00
|
|
|
"""A subclass of twisted.trial's TestCase which looks for 'loglevel'
|
|
|
|
attributes on both itself and its individual test methods, to override the
|
|
|
|
root logger's logging level while that test (case|method) runs."""
|
|
|
|
|
2021-11-16 11:41:35 +01:00
|
|
|
def __init__(self, methodName: str):
|
|
|
|
super().__init__(methodName)
|
2014-09-12 19:29:07 +02:00
|
|
|
|
2014-09-12 19:43:49 +02:00
|
|
|
method = getattr(self, methodName)
|
|
|
|
|
2019-01-29 13:07:00 +01:00
|
|
|
level = getattr(method, "loglevel", getattr(self, "loglevel", None))
|
2014-09-12 19:29:07 +02:00
|
|
|
|
2014-09-12 20:07:29 +02:00
|
|
|
@around(self)
|
2022-07-27 19:18:41 +02:00
|
|
|
def setUp(orig: Callable[[], R]) -> R:
|
2018-11-27 03:47:18 +01:00
|
|
|
# if we're not starting in the sentinel logcontext, then to be honest
|
|
|
|
# all future bets are off.
|
2020-03-24 15:45:33 +01:00
|
|
|
if current_context():
|
2018-11-27 03:47:18 +01:00
|
|
|
self.fail(
|
2019-05-10 07:12:11 +02:00
|
|
|
"Test starting with non-sentinel logging context %s"
|
2020-03-24 15:45:33 +01:00
|
|
|
% (current_context(),)
|
2018-11-27 03:47:18 +01:00
|
|
|
)
|
2014-09-12 19:29:07 +02:00
|
|
|
|
2023-03-30 17:21:12 +02:00
|
|
|
# Disable GC for duration of test. See below for why.
|
|
|
|
gc.disable()
|
|
|
|
|
2018-11-27 03:47:18 +01:00
|
|
|
old_level = logging.getLogger().level
|
2019-01-29 13:07:00 +01:00
|
|
|
if level is not None and old_level != level:
|
2018-08-10 15:54:09 +02:00
|
|
|
|
2014-09-12 20:07:29 +02:00
|
|
|
@around(self)
|
2022-07-27 19:18:41 +02:00
|
|
|
def tearDown(orig: Callable[[], R]) -> R:
|
2014-09-12 20:07:29 +02:00
|
|
|
ret = orig()
|
2014-09-12 19:29:07 +02:00
|
|
|
logging.getLogger().setLevel(old_level)
|
|
|
|
return ret
|
|
|
|
|
2019-01-29 13:07:00 +01:00
|
|
|
logging.getLogger().setLevel(level)
|
|
|
|
|
2020-10-30 12:15:07 +01:00
|
|
|
# Trial messes with the warnings configuration, thus this has to be
|
|
|
|
# done in the context of an individual TestCase.
|
|
|
|
self.addCleanup(setup_awaitable_errors())
|
|
|
|
|
2014-09-12 20:07:29 +02:00
|
|
|
return orig()
|
2014-09-12 19:38:11 +02:00
|
|
|
|
2023-03-30 17:21:12 +02:00
|
|
|
# We want to force a GC to workaround problems with deferreds leaking
|
|
|
|
# logcontexts when they are GCed (see the logcontext docs).
|
|
|
|
#
|
|
|
|
# The easiest way to do this would be to do a full GC after each test
|
|
|
|
# run, but that is very expensive. Instead, we disable GC (above) for
|
2023-05-19 12:17:12 +02:00
|
|
|
# the duration of the test and only run a gen-0 GC, which is a lot
|
|
|
|
# quicker. This doesn't clean up everything, since the TestCase
|
|
|
|
# instance still holds references to objects created during the test,
|
|
|
|
# such as HomeServers, so we do a full GC every so often.
|
2023-03-30 17:21:12 +02:00
|
|
|
|
2018-11-27 03:00:33 +01:00
|
|
|
@around(self)
|
2022-07-27 19:18:41 +02:00
|
|
|
def tearDown(orig: Callable[[], R]) -> R:
|
2018-11-27 03:00:33 +01:00
|
|
|
ret = orig()
|
2023-03-30 17:21:12 +02:00
|
|
|
gc.collect(0)
|
2023-05-19 12:17:12 +02:00
|
|
|
# Run a full GC every 50 gen-0 GCs.
|
|
|
|
gen0_stats = gc.get_stats()[0]
|
|
|
|
gen0_collections = gen0_stats["collections"]
|
|
|
|
if gen0_collections % 50 == 0:
|
|
|
|
gc.collect()
|
2023-03-30 17:21:12 +02:00
|
|
|
gc.enable()
|
2020-03-24 15:45:33 +01:00
|
|
|
set_current_context(SENTINEL_CONTEXT)
|
2018-11-27 03:00:33 +01:00
|
|
|
|
|
|
|
return ret
|
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
def assertObjectHasAttributes(self, attrs: Dict[str, object], obj: object) -> None:
|
2014-09-17 16:56:40 +02:00
|
|
|
"""Asserts that the given object has each of the attributes given, and
|
2022-02-28 13:12:29 +01:00
|
|
|
that the value of each matches according to assertEqual."""
|
2021-04-20 12:50:49 +02:00
|
|
|
for key in attrs.keys():
|
2014-09-17 16:56:40 +02:00
|
|
|
if not hasattr(obj, key):
|
|
|
|
raise AssertionError("Expected obj to have a '.%s'" % key)
|
|
|
|
try:
|
2022-02-28 13:12:29 +01:00
|
|
|
self.assertEqual(attrs[key], getattr(obj, key))
|
2014-09-17 16:56:40 +02:00
|
|
|
except AssertionError as e:
|
2021-07-13 12:43:15 +02:00
|
|
|
raise (type(e))(f"Assert error for '.{key}':") from e
|
2014-09-17 16:56:40 +02:00
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
def assert_dict(self, required: dict, actual: dict) -> None:
|
2018-07-17 12:43:18 +02:00
|
|
|
"""Does a partial assert of a dict.
|
|
|
|
|
|
|
|
Args:
|
2022-07-27 19:18:41 +02:00
|
|
|
required: The keys and value which MUST be in 'actual'.
|
|
|
|
actual: The test result. Extra keys will not be checked.
|
2018-07-17 12:43:18 +02:00
|
|
|
"""
|
|
|
|
for key in required:
|
2022-02-28 13:12:29 +01:00
|
|
|
self.assertEqual(
|
2018-08-10 15:54:09 +02:00
|
|
|
required[key], actual[key], msg="%s mismatch. %s" % (key, actual)
|
|
|
|
)
|
2018-07-17 12:43:18 +02:00
|
|
|
|
2014-09-12 19:38:11 +02:00
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
def DEBUG(target: TV) -> TV:
|
2014-09-12 19:45:48 +02:00
|
|
|
"""A decorator to set the .loglevel attribute to logging.DEBUG.
|
|
|
|
Can apply to either a TestCase or an individual test method."""
|
2022-07-27 19:18:41 +02:00
|
|
|
target.loglevel = logging.DEBUG # type: ignore[attr-defined]
|
2014-09-12 19:38:11 +02:00
|
|
|
return target
|
2018-08-14 12:53:43 +02:00
|
|
|
|
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
def INFO(target: TV) -> TV:
|
2018-10-30 13:55:43 +01:00
|
|
|
"""A decorator to set the .loglevel attribute to logging.INFO.
|
|
|
|
Can apply to either a TestCase or an individual test method."""
|
2022-07-27 19:18:41 +02:00
|
|
|
target.loglevel = logging.INFO # type: ignore[attr-defined]
|
2018-10-30 13:55:43 +01:00
|
|
|
return target
|
|
|
|
|
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
def logcontext_clean(target: TV) -> TV:
|
2020-09-28 18:58:33 +02:00
|
|
|
"""A decorator which marks the TestCase or method as 'logcontext_clean'
|
|
|
|
|
|
|
|
... ie, any logcontext errors should cause a test failure
|
|
|
|
"""
|
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
def logcontext_error(msg: str) -> NoReturn:
|
2020-09-28 18:58:33 +02:00
|
|
|
raise AssertionError("logcontext error: %s" % (msg))
|
|
|
|
|
|
|
|
patcher = patch("synapse.logging.context.logcontext_error", new=logcontext_error)
|
2022-07-27 19:18:41 +02:00
|
|
|
return patcher(target) # type: ignore[call-overload]
|
2020-09-28 18:58:33 +02:00
|
|
|
|
|
|
|
|
2018-08-14 12:53:43 +02:00
|
|
|
class HomeserverTestCase(TestCase):
|
|
|
|
"""
|
|
|
|
A base TestCase that reduces boilerplate for HomeServer-using test cases.
|
|
|
|
|
2019-07-12 11:16:23 +02:00
|
|
|
Defines a setUp method which creates a mock reactor, and instantiates a homeserver
|
|
|
|
running on that reactor.
|
|
|
|
|
|
|
|
There are various hooks for modifying the way that the homeserver is instantiated:
|
|
|
|
|
|
|
|
* override make_homeserver, for example by making it pass different parameters into
|
|
|
|
setup_test_homeserver.
|
|
|
|
|
|
|
|
* override default_config, to return a modified configuration dictionary for use
|
|
|
|
by setup_test_homeserver.
|
|
|
|
|
|
|
|
* On a per-test basis, you can use the @override_config decorator to give a
|
|
|
|
dictionary containing additional configuration settings to be added to the basic
|
|
|
|
config dict.
|
|
|
|
|
2018-08-14 12:53:43 +02:00
|
|
|
Attributes:
|
2021-11-12 16:50:54 +01:00
|
|
|
servlets: List of servlet registration function.
|
2018-08-14 12:53:43 +02:00
|
|
|
user_id (str): The user ID to assume if auth is hijacked.
|
2021-11-16 11:41:35 +01:00
|
|
|
hijack_auth: Whether to hijack auth to return the user specified
|
2018-08-14 12:53:43 +02:00
|
|
|
in user_id.
|
|
|
|
"""
|
2018-08-30 16:19:58 +02:00
|
|
|
|
2021-11-16 11:41:35 +01:00
|
|
|
hijack_auth: ClassVar[bool] = True
|
|
|
|
needs_threadpool: ClassVar[bool] = False
|
2021-11-12 16:50:54 +01:00
|
|
|
servlets: ClassVar[List[RegisterServletsFunc]] = []
|
2018-08-14 12:53:43 +02:00
|
|
|
|
2021-11-16 11:41:35 +01:00
|
|
|
def __init__(self, methodName: str):
|
|
|
|
super().__init__(methodName)
|
2019-07-12 11:16:23 +02:00
|
|
|
|
|
|
|
# see if we have any additional config for this test
|
|
|
|
method = getattr(self, methodName)
|
|
|
|
self._extra_config = getattr(method, "_extra_config", None)
|
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
def setUp(self) -> None:
|
2018-08-14 12:53:43 +02:00
|
|
|
"""
|
|
|
|
Set up the TestCase by calling the homeserver constructor, optionally
|
|
|
|
hijacking the authentication system to return a fixed user, and then
|
|
|
|
calling the prepare function.
|
|
|
|
"""
|
|
|
|
self.reactor, self.clock = get_clock()
|
|
|
|
self._hs_args = {"clock": self.clock, "reactor": self.reactor}
|
|
|
|
self.hs = self.make_homeserver(self.reactor, self.clock)
|
|
|
|
|
2021-03-17 17:51:55 +01:00
|
|
|
# Honour the `use_frozen_dicts` config option. We have to do this
|
|
|
|
# manually because this is taken care of in the app `start` code, which
|
|
|
|
# we don't run. Plus we want to reset it on tearDown.
|
2021-09-29 12:44:15 +02:00
|
|
|
events.USE_FROZEN_DICTS = self.hs.config.server.use_frozen_dicts
|
2021-03-17 17:51:55 +01:00
|
|
|
|
2018-08-14 12:53:43 +02:00
|
|
|
if self.hs is None:
|
|
|
|
raise Exception("No homeserver returned from make_homeserver.")
|
|
|
|
|
|
|
|
if not isinstance(self.hs, HomeServer):
|
|
|
|
raise Exception("A homeserver wasn't returned, but %r" % (self.hs,))
|
|
|
|
|
2020-11-16 15:45:52 +01:00
|
|
|
# create the root resource, and a site to wrap it.
|
|
|
|
self.resource = self.create_test_resource()
|
2020-01-06 13:28:58 +01:00
|
|
|
self.site = SynapseSite(
|
|
|
|
logger_name="synapse.access.http.fake",
|
2020-10-02 10:57:12 +02:00
|
|
|
site_tag=self.hs.config.server.server_name,
|
2020-06-16 13:44:07 +02:00
|
|
|
config=self.hs.config.server.listeners[0],
|
2020-01-06 13:28:58 +01:00
|
|
|
resource=self.resource,
|
|
|
|
server_version_string="1",
|
2022-07-19 13:45:17 +02:00
|
|
|
max_request_body_size=4096,
|
2021-04-23 18:06:47 +02:00
|
|
|
reactor=self.reactor,
|
2023-07-18 10:49:21 +02:00
|
|
|
hs=self.hs,
|
2020-01-06 13:28:58 +01:00
|
|
|
)
|
|
|
|
|
2021-08-20 18:50:44 +02:00
|
|
|
from tests.rest.client.utils import RestHelper
|
2018-08-14 12:53:43 +02:00
|
|
|
|
2023-02-17 19:19:38 +01:00
|
|
|
self.helper = RestHelper(
|
|
|
|
self.hs,
|
|
|
|
checked_cast(MemoryReactorClock, self.hs.get_reactor()),
|
|
|
|
self.site,
|
|
|
|
getattr(self, "user_id", None),
|
|
|
|
)
|
2018-08-14 12:53:43 +02:00
|
|
|
|
2018-11-06 17:00:00 +01:00
|
|
|
if hasattr(self, "user_id"):
|
2018-08-14 12:53:43 +02:00
|
|
|
if self.hijack_auth:
|
2022-04-01 18:04:16 +02:00
|
|
|
assert self.helper.auth_user_id is not None
|
2022-09-21 14:40:34 +02:00
|
|
|
token = "some_fake_token"
|
2018-08-14 12:53:43 +02:00
|
|
|
|
2020-10-13 13:07:56 +02:00
|
|
|
# We need a valid token ID to satisfy foreign key constraints.
|
|
|
|
token_id = self.get_success(
|
2022-02-23 12:04:02 +01:00
|
|
|
self.hs.get_datastores().main.add_access_token_to_user(
|
2021-02-16 23:32:34 +01:00
|
|
|
self.helper.auth_user_id,
|
2022-09-21 14:40:34 +02:00
|
|
|
token,
|
2021-02-16 23:32:34 +01:00
|
|
|
None,
|
|
|
|
None,
|
2020-10-13 13:07:56 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2022-09-21 14:40:34 +02:00
|
|
|
# This has to be a function and not just a Mock, because
|
|
|
|
# `self.helper.auth_user_id` is temporarily reassigned in some tests
|
2023-02-08 22:29:49 +01:00
|
|
|
async def get_requester(*args: Any, **kwargs: Any) -> Requester:
|
2022-04-01 18:04:16 +02:00
|
|
|
assert self.helper.auth_user_id is not None
|
2020-08-06 14:30:06 +02:00
|
|
|
return create_requester(
|
2022-09-21 14:40:34 +02:00
|
|
|
user_id=UserID.from_string(self.helper.auth_user_id),
|
|
|
|
access_token_id=token_id,
|
2018-08-14 12:53:43 +02:00
|
|
|
)
|
|
|
|
|
2021-11-16 11:41:35 +01:00
|
|
|
# Type ignore: mypy doesn't like us assigning to methods.
|
2022-09-21 14:40:34 +02:00
|
|
|
self.hs.get_auth().get_user_by_req = get_requester # type: ignore[assignment]
|
|
|
|
self.hs.get_auth().get_user_by_access_token = get_requester # type: ignore[assignment]
|
|
|
|
self.hs.get_auth().get_access_token_from_request = Mock(return_value=token) # type: ignore[assignment]
|
2018-08-14 12:53:43 +02:00
|
|
|
|
2019-06-29 09:06:55 +02:00
|
|
|
if self.needs_threadpool:
|
2022-04-01 18:04:16 +02:00
|
|
|
self.reactor.threadpool = ThreadPool() # type: ignore[assignment]
|
2019-06-29 09:06:55 +02:00
|
|
|
self.addCleanup(self.reactor.threadpool.stop)
|
|
|
|
self.reactor.threadpool.start()
|
|
|
|
|
2018-08-14 12:53:43 +02:00
|
|
|
if hasattr(self, "prepare"):
|
|
|
|
self.prepare(self.reactor, self.clock, self.hs)
|
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
def tearDown(self) -> None:
|
2021-03-17 17:51:55 +01:00
|
|
|
# Reset to not use frozen dicts.
|
|
|
|
events.USE_FROZEN_DICTS = False
|
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
def wait_on_thread(self, deferred: Deferred, timeout: int = 10) -> None:
|
2019-06-29 09:06:55 +02:00
|
|
|
"""
|
|
|
|
Wait until a Deferred is done, where it's waiting on a real thread.
|
|
|
|
"""
|
|
|
|
start_time = time.time()
|
|
|
|
|
|
|
|
while not deferred.called:
|
|
|
|
if start_time + timeout < time.time():
|
|
|
|
raise ValueError("Timed out waiting for threadpool")
|
|
|
|
self.reactor.advance(0.01)
|
|
|
|
time.sleep(0.01)
|
|
|
|
|
2021-10-06 14:56:45 +02:00
|
|
|
def wait_for_background_updates(self) -> None:
|
2021-12-07 17:51:53 +01:00
|
|
|
"""Block until all background database updates have completed."""
|
2022-02-23 12:04:02 +01:00
|
|
|
store = self.hs.get_datastores().main
|
2021-10-06 14:56:45 +02:00
|
|
|
while not self.get_success(
|
2021-12-07 17:51:53 +01:00
|
|
|
store.db_pool.updates.has_completed_background_updates()
|
2021-10-06 14:56:45 +02:00
|
|
|
):
|
|
|
|
self.get_success(
|
2021-12-07 17:51:53 +01:00
|
|
|
store.db_pool.updates.do_next_background_update(False), by=0.1
|
2021-10-06 14:56:45 +02:00
|
|
|
)
|
|
|
|
|
2023-02-14 20:03:35 +01:00
|
|
|
def make_homeserver(
|
|
|
|
self, reactor: ThreadedMemoryReactorClock, clock: Clock
|
|
|
|
) -> HomeServer:
|
2018-08-14 12:53:43 +02:00
|
|
|
"""
|
|
|
|
Make and return a homeserver.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
reactor: A Twisted Reactor, or something that pretends to be one.
|
2022-11-16 16:25:24 +01:00
|
|
|
clock: The Clock, associated with the reactor.
|
2018-08-14 12:53:43 +02:00
|
|
|
|
|
|
|
Returns:
|
2022-07-27 19:18:41 +02:00
|
|
|
A homeserver suitable for testing.
|
2018-08-14 12:53:43 +02:00
|
|
|
|
|
|
|
Function to be overridden in subclasses.
|
|
|
|
"""
|
2018-09-20 08:28:18 +02:00
|
|
|
hs = self.setup_test_homeserver()
|
|
|
|
return hs
|
2018-08-14 12:53:43 +02:00
|
|
|
|
2020-11-16 15:45:52 +01:00
|
|
|
def create_test_resource(self) -> Resource:
|
2019-05-07 10:29:30 +02:00
|
|
|
"""
|
2020-11-16 15:45:52 +01:00
|
|
|
Create a the root resource for the test server.
|
2019-05-07 10:29:30 +02:00
|
|
|
|
2020-12-02 16:21:00 +01:00
|
|
|
The default calls `self.create_resource_dict` and builds the resultant dict
|
|
|
|
into a tree.
|
2019-05-07 10:29:30 +02:00
|
|
|
"""
|
2020-12-02 16:21:00 +01:00
|
|
|
root_resource = Resource()
|
|
|
|
create_resource_tree(self.create_resource_dict(), root_resource)
|
|
|
|
return root_resource
|
2019-05-07 10:29:30 +02:00
|
|
|
|
2020-12-02 16:21:00 +01:00
|
|
|
def create_resource_dict(self) -> Dict[str, Resource]:
|
|
|
|
"""Create a resource tree for the test server
|
|
|
|
|
|
|
|
A resource tree is a mapping from path to twisted.web.resource.
|
2019-05-07 10:29:30 +02:00
|
|
|
|
2020-12-02 16:21:00 +01:00
|
|
|
The default implementation creates a JsonResource and calls each function in
|
|
|
|
`servlets` to register servlets against it.
|
|
|
|
"""
|
|
|
|
servlet_resource = JsonResource(self.hs)
|
|
|
|
for servlet in self.servlets:
|
|
|
|
servlet(self.hs, servlet_resource)
|
|
|
|
return {
|
|
|
|
"/_matrix/client": servlet_resource,
|
|
|
|
"/_synapse/admin": servlet_resource,
|
|
|
|
}
|
2019-05-07 10:29:30 +02:00
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
def default_config(self) -> JsonDict:
|
2018-10-01 16:11:58 +02:00
|
|
|
"""
|
2019-05-13 22:01:14 +02:00
|
|
|
Get a default HomeServer config dict.
|
2018-10-01 16:11:58 +02:00
|
|
|
"""
|
2020-03-24 19:33:49 +01:00
|
|
|
config = default_config("test")
|
2019-07-12 11:16:23 +02:00
|
|
|
|
|
|
|
# apply any additional config which was specified via the override_config
|
|
|
|
# decorator.
|
|
|
|
if self._extra_config is not None:
|
|
|
|
config.update(self._extra_config)
|
|
|
|
|
|
|
|
return config
|
2018-10-01 16:11:58 +02:00
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
def prepare(
|
|
|
|
self, reactor: MemoryReactor, clock: Clock, homeserver: HomeServer
|
|
|
|
) -> None:
|
2018-08-14 12:53:43 +02:00
|
|
|
"""
|
|
|
|
Prepare for the test. This involves things like mocking out parts of
|
|
|
|
the homeserver, or building test data common across the whole test
|
|
|
|
suite.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
reactor: A Twisted Reactor, or something that pretends to be one.
|
2022-11-16 16:25:24 +01:00
|
|
|
clock: The Clock, associated with the reactor.
|
|
|
|
homeserver: The HomeServer to test against.
|
2018-08-14 12:53:43 +02:00
|
|
|
|
|
|
|
Function to optionally be overridden in subclasses.
|
|
|
|
"""
|
|
|
|
|
2018-09-20 12:14:34 +02:00
|
|
|
def make_request(
|
2018-11-05 19:53:44 +01:00
|
|
|
self,
|
2020-02-18 17:23:25 +01:00
|
|
|
method: Union[bytes, str],
|
|
|
|
path: Union[bytes, str],
|
2021-10-14 15:19:35 +02:00
|
|
|
content: Union[bytes, str, JsonDict] = b"",
|
2020-02-18 17:23:25 +01:00
|
|
|
access_token: Optional[str] = None,
|
2021-11-16 11:41:35 +01:00
|
|
|
request: Type[Request] = SynapseRequest,
|
2020-02-18 17:23:25 +01:00
|
|
|
shorthand: bool = True,
|
2021-11-12 16:50:54 +01:00
|
|
|
federation_auth_origin: Optional[bytes] = None,
|
2020-09-10 12:45:12 +02:00
|
|
|
content_is_form: bool = False,
|
2020-11-15 23:47:54 +01:00
|
|
|
await_result: bool = True,
|
2022-04-01 18:04:16 +02:00
|
|
|
custom_headers: Optional[Iterable[CustomHeaderType]] = None,
|
2021-01-28 18:39:21 +01:00
|
|
|
client_ip: str = "127.0.0.1",
|
2020-12-15 15:44:04 +01:00
|
|
|
) -> FakeChannel:
|
2018-08-14 12:53:43 +02:00
|
|
|
"""
|
|
|
|
Create a SynapseRequest at the path using the method and containing the
|
|
|
|
given content.
|
|
|
|
|
|
|
|
Args:
|
2022-11-16 16:25:24 +01:00
|
|
|
method: The HTTP request method ("verb").
|
|
|
|
path: The HTTP path, suitably URL encoded (e.g. escaped UTF-8 & spaces
|
|
|
|
and such). content (bytes or dict): The body of the request.
|
|
|
|
JSON-encoded, if a dict.
|
2018-11-05 19:53:44 +01:00
|
|
|
shorthand: Whether to try and be helpful and prefix the given URL
|
|
|
|
with the usual REST API path, if it doesn't contain it.
|
2021-11-12 16:50:54 +01:00
|
|
|
federation_auth_origin: if set to not-None, we will add a fake
|
2019-03-04 11:05:39 +01:00
|
|
|
Authorization header pretenting to be the given server name.
|
2020-09-10 12:45:12 +02:00
|
|
|
content_is_form: Whether the content is URL encoded form data. Adds the
|
|
|
|
'Content-Type': 'application/x-www-form-urlencoded' header.
|
2018-08-14 12:53:43 +02:00
|
|
|
|
2020-11-15 23:47:54 +01:00
|
|
|
await_result: whether to wait for the request to complete rendering. If
|
|
|
|
true (the default), will pump the test reactor until the the renderer
|
|
|
|
tells the channel the request is finished.
|
|
|
|
|
2020-12-18 15:19:46 +01:00
|
|
|
custom_headers: (name, value) pairs to add as request headers
|
|
|
|
|
2021-01-28 18:39:21 +01:00
|
|
|
client_ip: The IP to use as the requesting IP. Useful for testing
|
|
|
|
ratelimiting.
|
|
|
|
|
2018-08-14 12:53:43 +02:00
|
|
|
Returns:
|
2020-12-15 15:44:04 +01:00
|
|
|
The FakeChannel object which stores the result of the request.
|
2018-08-14 12:53:43 +02:00
|
|
|
"""
|
2018-11-06 17:00:00 +01:00
|
|
|
return make_request(
|
2019-05-10 07:12:11 +02:00
|
|
|
self.reactor,
|
2020-11-13 23:39:09 +01:00
|
|
|
self.site,
|
2019-05-10 07:12:11 +02:00
|
|
|
method,
|
|
|
|
path,
|
|
|
|
content,
|
|
|
|
access_token,
|
|
|
|
request,
|
|
|
|
shorthand,
|
2019-03-04 11:05:39 +01:00
|
|
|
federation_auth_origin,
|
2020-09-10 12:45:12 +02:00
|
|
|
content_is_form,
|
2020-11-15 23:47:54 +01:00
|
|
|
await_result,
|
2020-12-18 15:19:46 +01:00
|
|
|
custom_headers,
|
2021-01-28 18:39:21 +01:00
|
|
|
client_ip,
|
2018-11-06 17:00:00 +01:00
|
|
|
)
|
2018-08-14 12:53:43 +02:00
|
|
|
|
2023-05-05 16:06:22 +02:00
|
|
|
def setup_test_homeserver(
|
|
|
|
self, name: Optional[str] = None, **kwargs: Any
|
|
|
|
) -> HomeServer:
|
2018-08-14 12:53:43 +02:00
|
|
|
"""
|
|
|
|
Set up the test homeserver, meant to be called by the overridable
|
|
|
|
make_homeserver. It automatically passes through the test class's
|
|
|
|
clock & reactor.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
See tests.utils.setup_test_homeserver.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
synapse.server.HomeServer
|
|
|
|
"""
|
|
|
|
kwargs = dict(kwargs)
|
|
|
|
kwargs.update(self._hs_args)
|
2019-03-21 16:10:21 +01:00
|
|
|
if "config" not in kwargs:
|
|
|
|
config = self.default_config()
|
2019-05-13 22:01:14 +02:00
|
|
|
else:
|
|
|
|
config = kwargs["config"]
|
|
|
|
|
2023-05-05 16:06:22 +02:00
|
|
|
# The server name can be specified using either the `name` argument or a config
|
|
|
|
# override. The `name` argument takes precedence over any config overrides.
|
|
|
|
if name is not None:
|
|
|
|
config["server_name"] = name
|
|
|
|
|
2019-05-13 22:01:14 +02:00
|
|
|
# Parse the config from a config dict into a HomeServerConfig
|
2023-04-18 15:50:27 +02:00
|
|
|
config_obj = make_homeserver_config_obj(config)
|
2019-05-13 22:01:14 +02:00
|
|
|
kwargs["config"] = config_obj
|
|
|
|
|
2023-05-05 16:06:22 +02:00
|
|
|
# The server name in the config is now `name`, if provided, or the `server_name`
|
|
|
|
# from a config override, or the default of "test". Whichever it is, we
|
|
|
|
# construct a homeserver with a matching name.
|
|
|
|
kwargs["name"] = config_obj.server.server_name
|
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
async def run_bg_updates() -> None:
|
2021-04-08 14:01:14 +02:00
|
|
|
with LoggingContext("run_bg_updates"):
|
2021-11-29 17:57:06 +01:00
|
|
|
self.get_success(stor.db_pool.updates.run_background_updates(False))
|
2020-03-31 18:27:56 +02:00
|
|
|
|
2023-05-05 16:06:22 +02:00
|
|
|
hs = setup_test_homeserver(self.addCleanup, **kwargs)
|
2022-02-23 12:04:02 +01:00
|
|
|
stor = hs.get_datastores().main
|
2019-01-24 11:31:54 +01:00
|
|
|
|
2019-12-04 16:09:36 +01:00
|
|
|
# Run the database background updates, when running against "master".
|
|
|
|
if hs.__class__.__name__ == "TestHomeServer":
|
2020-03-31 18:27:56 +02:00
|
|
|
self.get_success(run_bg_updates())
|
2019-01-24 11:31:54 +01:00
|
|
|
|
|
|
|
return hs
|
2018-08-30 16:19:58 +02:00
|
|
|
|
2022-04-01 18:04:16 +02:00
|
|
|
def pump(self, by: float = 0.0) -> None:
|
2018-08-30 16:19:58 +02:00
|
|
|
"""
|
|
|
|
Pump the reactor enough that Deferreds will fire.
|
|
|
|
"""
|
2018-09-03 18:21:48 +02:00
|
|
|
self.reactor.pump([by] * 100)
|
2018-08-30 16:19:58 +02:00
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
def get_success(self, d: Awaitable[TV], by: float = 0.0) -> TV:
|
2022-04-01 18:04:16 +02:00
|
|
|
deferred: Deferred[TV] = ensureDeferred(d) # type: ignore[arg-type]
|
2019-03-18 18:50:24 +01:00
|
|
|
self.pump(by=by)
|
2022-04-01 17:10:31 +02:00
|
|
|
return self.successResultOf(deferred)
|
2018-10-01 16:11:58 +02:00
|
|
|
|
2022-04-01 18:04:16 +02:00
|
|
|
def get_failure(
|
|
|
|
self, d: Awaitable[Any], exc: Type[_ExcType]
|
|
|
|
) -> _TypedFailure[_ExcType]:
|
2019-03-21 16:10:21 +01:00
|
|
|
"""
|
|
|
|
Run a Deferred and get a Failure from it. The failure must be of the type `exc`.
|
|
|
|
"""
|
2022-04-01 18:04:16 +02:00
|
|
|
deferred: Deferred[Any] = ensureDeferred(d) # type: ignore[arg-type]
|
2019-03-21 16:10:21 +01:00
|
|
|
self.pump()
|
2022-04-01 17:10:31 +02:00
|
|
|
return self.failureResultOf(deferred, exc)
|
2019-03-21 16:10:21 +01:00
|
|
|
|
2022-04-01 18:04:16 +02:00
|
|
|
def get_success_or_raise(self, d: Awaitable[TV], by: float = 0.0) -> TV:
|
2020-09-28 19:00:30 +02:00
|
|
|
"""Drive deferred to completion and return result or raise exception
|
|
|
|
on failure.
|
|
|
|
"""
|
2022-04-01 18:04:16 +02:00
|
|
|
deferred: Deferred[TV] = ensureDeferred(d) # type: ignore[arg-type]
|
2020-09-28 19:00:30 +02:00
|
|
|
|
2021-07-13 12:52:58 +02:00
|
|
|
results: list = []
|
2020-09-28 19:00:30 +02:00
|
|
|
deferred.addBoth(results.append)
|
|
|
|
|
|
|
|
self.pump(by=by)
|
|
|
|
|
|
|
|
if not results:
|
|
|
|
self.fail(
|
|
|
|
"Success result expected on {!r}, found no result instead".format(
|
|
|
|
deferred
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
result = results[0]
|
|
|
|
|
|
|
|
if isinstance(result, Failure):
|
|
|
|
result.raiseException()
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2020-11-05 14:55:45 +01:00
|
|
|
def register_user(
|
|
|
|
self,
|
|
|
|
username: str,
|
|
|
|
password: str,
|
|
|
|
admin: Optional[bool] = False,
|
|
|
|
displayname: Optional[str] = None,
|
|
|
|
) -> str:
|
2018-10-01 16:11:58 +02:00
|
|
|
"""
|
|
|
|
Register a user. Requires the Admin API be registered.
|
|
|
|
|
|
|
|
Args:
|
2020-11-05 14:55:45 +01:00
|
|
|
username: The user part of the new user.
|
|
|
|
password: The password of the new user.
|
|
|
|
admin: Whether the user should be created as an admin or not.
|
|
|
|
displayname: The displayname of the new user.
|
2018-10-01 16:11:58 +02:00
|
|
|
|
|
|
|
Returns:
|
2020-11-05 14:55:45 +01:00
|
|
|
The MXID of the new user.
|
2018-10-01 16:11:58 +02:00
|
|
|
"""
|
2021-10-04 13:18:54 +02:00
|
|
|
self.hs.config.registration.registration_shared_secret = "shared"
|
2018-10-01 16:11:58 +02:00
|
|
|
|
|
|
|
# Create the user
|
2020-12-15 15:44:04 +01:00
|
|
|
channel = self.make_request("GET", "/_synapse/admin/v1/register")
|
2020-01-20 18:38:09 +01:00
|
|
|
self.assertEqual(channel.code, 200, msg=channel.result)
|
2018-10-01 16:11:58 +02:00
|
|
|
nonce = channel.json_body["nonce"]
|
|
|
|
|
|
|
|
want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1)
|
2019-06-20 11:32:02 +02:00
|
|
|
nonce_str = b"\x00".join([username.encode("utf8"), password.encode("utf8")])
|
2018-10-01 16:11:58 +02:00
|
|
|
if admin:
|
|
|
|
nonce_str += b"\x00admin"
|
|
|
|
else:
|
|
|
|
nonce_str += b"\x00notadmin"
|
2018-12-14 19:20:59 +01:00
|
|
|
|
2019-06-20 11:32:02 +02:00
|
|
|
want_mac.update(nonce.encode("ascii") + b"\x00" + nonce_str)
|
2021-11-16 11:41:35 +01:00
|
|
|
want_mac_digest = want_mac.hexdigest()
|
2018-10-01 16:11:58 +02:00
|
|
|
|
2022-07-17 23:28:45 +02:00
|
|
|
body = {
|
|
|
|
"nonce": nonce,
|
|
|
|
"username": username,
|
|
|
|
"displayname": displayname,
|
|
|
|
"password": password,
|
|
|
|
"admin": admin,
|
|
|
|
"mac": want_mac_digest,
|
|
|
|
"inhibit_login": True,
|
|
|
|
}
|
|
|
|
channel = self.make_request("POST", "/_synapse/admin/v1/register", body)
|
2019-07-01 18:55:11 +02:00
|
|
|
self.assertEqual(channel.code, 200, channel.json_body)
|
2018-10-01 16:11:58 +02:00
|
|
|
|
|
|
|
user_id = channel.json_body["user_id"]
|
|
|
|
return user_id
|
|
|
|
|
2021-10-04 13:45:51 +02:00
|
|
|
def register_appservice_user(
|
|
|
|
self,
|
|
|
|
username: str,
|
|
|
|
appservice_token: str,
|
2022-02-02 10:59:55 +01:00
|
|
|
) -> Tuple[str, str]:
|
2021-10-04 13:45:51 +02:00
|
|
|
"""Register an appservice user as an application service.
|
|
|
|
Requires the client-facing registration API be registered.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
username: the user to be registered by an application service.
|
2022-02-02 10:59:55 +01:00
|
|
|
Should NOT be a full username, i.e. just "localpart" as opposed to "@localpart:hostname"
|
2021-10-04 13:45:51 +02:00
|
|
|
appservice_token: the acccess token for that application service.
|
|
|
|
|
|
|
|
Raises: if the request to '/register' does not return 200 OK.
|
|
|
|
|
2022-02-02 10:59:55 +01:00
|
|
|
Returns:
|
|
|
|
The MXID of the new user, the device ID of the new user's first device.
|
2021-10-04 13:45:51 +02:00
|
|
|
"""
|
|
|
|
channel = self.make_request(
|
|
|
|
"POST",
|
|
|
|
"/_matrix/client/r0/register",
|
|
|
|
{
|
|
|
|
"username": username,
|
|
|
|
"type": "m.login.application_service",
|
|
|
|
},
|
|
|
|
access_token=appservice_token,
|
|
|
|
)
|
|
|
|
self.assertEqual(channel.code, 200, channel.json_body)
|
2022-02-02 10:59:55 +01:00
|
|
|
return channel.json_body["user_id"], channel.json_body["device_id"]
|
2021-10-04 13:45:51 +02:00
|
|
|
|
2021-07-19 17:11:34 +02:00
|
|
|
def login(
|
|
|
|
self,
|
2022-04-01 18:04:16 +02:00
|
|
|
username: str,
|
|
|
|
password: str,
|
|
|
|
device_id: Optional[str] = None,
|
2022-08-19 18:17:10 +02:00
|
|
|
additional_request_fields: Optional[Dict[str, str]] = None,
|
2022-04-01 18:04:16 +02:00
|
|
|
custom_headers: Optional[Iterable[CustomHeaderType]] = None,
|
|
|
|
) -> str:
|
2018-10-01 16:11:58 +02:00
|
|
|
"""
|
2022-07-17 23:28:45 +02:00
|
|
|
Log in a user, and get an access token. Requires the Login API be registered.
|
2022-08-19 18:17:10 +02:00
|
|
|
|
|
|
|
Args:
|
|
|
|
username: The localpart to assign to the new user.
|
|
|
|
password: The password to assign to the new user.
|
|
|
|
device_id: An optional device ID to assign to the new device created during
|
|
|
|
login.
|
|
|
|
additional_request_fields: A dictionary containing any additional /login
|
|
|
|
request fields and their values.
|
|
|
|
custom_headers: Custom HTTP headers and values to add to the /login request.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The newly registered user's Matrix ID.
|
2018-10-01 16:11:58 +02:00
|
|
|
"""
|
|
|
|
body = {"type": "m.login.password", "user": username, "password": password}
|
|
|
|
if device_id:
|
|
|
|
body["device_id"] = device_id
|
2022-08-19 18:17:10 +02:00
|
|
|
if additional_request_fields:
|
|
|
|
body.update(additional_request_fields)
|
2018-10-01 16:11:58 +02:00
|
|
|
|
2020-12-15 15:44:04 +01:00
|
|
|
channel = self.make_request(
|
2021-07-19 17:11:34 +02:00
|
|
|
"POST",
|
|
|
|
"/_matrix/client/r0/login",
|
2022-07-17 23:28:45 +02:00
|
|
|
body,
|
2021-07-19 17:11:34 +02:00
|
|
|
custom_headers=custom_headers,
|
2018-10-01 16:11:58 +02:00
|
|
|
)
|
2019-04-04 18:25:47 +02:00
|
|
|
self.assertEqual(channel.code, 200, channel.result)
|
2018-10-01 16:11:58 +02:00
|
|
|
|
2018-10-30 13:55:43 +01:00
|
|
|
access_token = channel.json_body["access_token"]
|
2018-10-01 16:11:58 +02:00
|
|
|
return access_token
|
2019-06-11 12:31:12 +02:00
|
|
|
|
2019-06-13 14:40:52 +02:00
|
|
|
def create_and_send_event(
|
2022-04-01 18:04:16 +02:00
|
|
|
self,
|
|
|
|
room_id: str,
|
|
|
|
user: UserID,
|
|
|
|
soft_failed: bool = False,
|
|
|
|
prev_event_ids: Optional[List[str]] = None,
|
|
|
|
) -> str:
|
2019-06-13 14:40:52 +02:00
|
|
|
"""
|
|
|
|
Create and send an event.
|
|
|
|
|
|
|
|
Args:
|
2022-04-01 18:04:16 +02:00
|
|
|
soft_failed: Whether to create a soft failed event or not
|
|
|
|
prev_event_ids: Explicitly set the prev events,
|
2019-06-13 14:40:52 +02:00
|
|
|
or if None just use the default
|
|
|
|
|
|
|
|
Returns:
|
2022-04-01 18:04:16 +02:00
|
|
|
The new event's ID.
|
2019-06-13 14:40:52 +02:00
|
|
|
"""
|
|
|
|
event_creator = self.hs.get_event_creation_handler()
|
2020-10-22 11:11:06 +02:00
|
|
|
requester = create_requester(user)
|
2019-06-13 14:40:52 +02:00
|
|
|
|
2023-02-24 22:15:29 +01:00
|
|
|
event, unpersisted_context = self.get_success(
|
2019-06-13 14:40:52 +02:00
|
|
|
event_creator.create_event(
|
|
|
|
requester,
|
|
|
|
{
|
|
|
|
"type": EventTypes.Message,
|
|
|
|
"room_id": room_id,
|
|
|
|
"sender": user.to_string(),
|
|
|
|
"content": {"body": secrets.token_hex(), "msgtype": "m.text"},
|
|
|
|
},
|
2020-01-03 17:19:55 +01:00
|
|
|
prev_event_ids=prev_event_ids,
|
2019-06-13 14:40:52 +02:00
|
|
|
)
|
|
|
|
)
|
2023-02-24 22:15:29 +01:00
|
|
|
context = self.get_success(unpersisted_context.persist(event))
|
2019-06-13 14:40:52 +02:00
|
|
|
if soft_failed:
|
|
|
|
event.internal_metadata.soft_failed = True
|
|
|
|
|
2020-10-02 19:10:55 +02:00
|
|
|
self.get_success(
|
2022-09-28 12:11:48 +02:00
|
|
|
event_creator.handle_new_client_event(
|
|
|
|
requester, events_and_context=[(event, context)]
|
|
|
|
)
|
2020-10-02 19:10:55 +02:00
|
|
|
)
|
2019-06-13 14:40:52 +02:00
|
|
|
|
|
|
|
return event.event_id
|
|
|
|
|
2022-04-01 18:04:16 +02:00
|
|
|
def inject_room_member(self, room: str, user: str, membership: str) -> None:
|
2019-11-27 22:54:07 +01:00
|
|
|
"""
|
|
|
|
Inject a membership event into a room.
|
|
|
|
|
2020-04-29 13:30:36 +02:00
|
|
|
Deprecated: use event_injection.inject_room_member directly
|
|
|
|
|
2019-11-27 22:54:07 +01:00
|
|
|
Args:
|
|
|
|
room: Room ID to inject the event into.
|
|
|
|
user: MXID of the user to inject the membership for.
|
|
|
|
membership: The membership type.
|
|
|
|
"""
|
2020-07-22 18:29:15 +02:00
|
|
|
self.get_success(
|
|
|
|
event_injection.inject_member_event(self.hs, room, user, membership)
|
|
|
|
)
|
2019-11-27 22:54:07 +01:00
|
|
|
|
|
|
|
|
|
|
|
class FederatingHomeserverTestCase(HomeserverTestCase):
|
|
|
|
"""
|
2022-02-11 13:06:02 +01:00
|
|
|
A federating homeserver, set up to validate incoming federation requests
|
2019-11-27 22:54:07 +01:00
|
|
|
"""
|
|
|
|
|
2022-02-11 13:06:02 +01:00
|
|
|
OTHER_SERVER_NAME = "other.example.com"
|
|
|
|
OTHER_SERVER_SIGNATURE_KEY = signedjson.key.generate_signing_key("test")
|
2020-12-02 16:26:25 +01:00
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
2022-02-11 13:06:02 +01:00
|
|
|
super().prepare(reactor, clock, hs)
|
2020-12-02 16:26:25 +01:00
|
|
|
|
2022-02-11 13:06:02 +01:00
|
|
|
# poke the other server's signing key into the key store, so that we don't
|
|
|
|
# make requests for it
|
|
|
|
verify_key = signedjson.key.get_verify_key(self.OTHER_SERVER_SIGNATURE_KEY)
|
|
|
|
verify_key_id = "%s:%s" % (verify_key.alg, verify_key.version)
|
2020-12-02 16:26:25 +01:00
|
|
|
|
2022-02-11 13:06:02 +01:00
|
|
|
self.get_success(
|
2023-04-20 18:30:32 +02:00
|
|
|
hs.get_datastores().main.store_server_keys_json(
|
|
|
|
self.OTHER_SERVER_NAME,
|
|
|
|
verify_key_id,
|
2022-02-11 13:06:02 +01:00
|
|
|
from_server=self.OTHER_SERVER_NAME,
|
2023-04-20 18:30:32 +02:00
|
|
|
ts_now_ms=clock.time_msec(),
|
|
|
|
ts_expires_ms=clock.time_msec() + 10000,
|
|
|
|
key_json_bytes=canonicaljson.encode_canonical_json(
|
|
|
|
{
|
|
|
|
"verify_keys": {
|
|
|
|
verify_key_id: {
|
|
|
|
"key": signedjson.key.encode_verify_key_base64(
|
|
|
|
verify_key
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
),
|
2022-02-11 13:06:02 +01:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
def create_resource_dict(self) -> Dict[str, Resource]:
|
|
|
|
d = super().create_resource_dict()
|
|
|
|
d["/_matrix/federation"] = TransportLayerServer(self.hs)
|
|
|
|
return d
|
2020-12-02 16:26:25 +01:00
|
|
|
|
2022-02-11 13:06:02 +01:00
|
|
|
def make_signed_federation_request(
|
|
|
|
self,
|
|
|
|
method: str,
|
|
|
|
path: str,
|
|
|
|
content: Optional[JsonDict] = None,
|
|
|
|
await_result: bool = True,
|
2022-04-01 18:04:16 +02:00
|
|
|
custom_headers: Optional[Iterable[CustomHeaderType]] = None,
|
2022-02-11 13:06:02 +01:00
|
|
|
client_ip: str = "127.0.0.1",
|
|
|
|
) -> FakeChannel:
|
|
|
|
"""Make an inbound signed federation request to this server
|
2020-12-02 16:26:25 +01:00
|
|
|
|
2022-02-11 13:06:02 +01:00
|
|
|
The request is signed as if it came from "other.example.com", which our HS
|
|
|
|
already has the keys for.
|
|
|
|
"""
|
2019-11-27 22:54:07 +01:00
|
|
|
|
2022-02-11 13:06:02 +01:00
|
|
|
if custom_headers is None:
|
|
|
|
custom_headers = []
|
|
|
|
else:
|
|
|
|
custom_headers = list(custom_headers)
|
|
|
|
|
|
|
|
custom_headers.append(
|
|
|
|
(
|
|
|
|
"Authorization",
|
|
|
|
_auth_header_for_request(
|
|
|
|
origin=self.OTHER_SERVER_NAME,
|
|
|
|
destination=self.hs.hostname,
|
|
|
|
signing_key=self.OTHER_SERVER_SIGNATURE_KEY,
|
|
|
|
method=method,
|
|
|
|
path=path,
|
|
|
|
content=content,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
)
|
2020-12-02 16:26:25 +01:00
|
|
|
|
2022-02-11 13:06:02 +01:00
|
|
|
return make_request(
|
|
|
|
self.reactor,
|
|
|
|
self.site,
|
|
|
|
method=method,
|
|
|
|
path=path,
|
2022-05-11 13:25:13 +02:00
|
|
|
content=content if content is not None else "",
|
2022-02-11 13:06:02 +01:00
|
|
|
shorthand=False,
|
|
|
|
await_result=await_result,
|
|
|
|
custom_headers=custom_headers,
|
|
|
|
client_ip=client_ip,
|
2019-11-27 22:54:07 +01:00
|
|
|
)
|
|
|
|
|
2022-07-12 20:46:32 +02:00
|
|
|
def add_hashes_and_signatures_from_other_server(
|
2022-02-22 13:17:10 +01:00
|
|
|
self,
|
|
|
|
event_dict: JsonDict,
|
|
|
|
room_version: RoomVersion = KNOWN_ROOM_VERSIONS[DEFAULT_ROOM_VERSION],
|
|
|
|
) -> JsonDict:
|
|
|
|
"""Adds hashes and signatures to the given event dict
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The modified event dict, for convenience
|
|
|
|
"""
|
|
|
|
add_hashes_and_signatures(
|
|
|
|
room_version,
|
|
|
|
event_dict,
|
|
|
|
signature_name=self.OTHER_SERVER_NAME,
|
|
|
|
signing_key=self.OTHER_SERVER_SIGNATURE_KEY,
|
|
|
|
)
|
|
|
|
return event_dict
|
|
|
|
|
2022-02-11 13:06:02 +01:00
|
|
|
|
|
|
|
def _auth_header_for_request(
|
|
|
|
origin: str,
|
|
|
|
destination: str,
|
|
|
|
signing_key: signedjson.key.SigningKey,
|
|
|
|
method: str,
|
|
|
|
path: str,
|
|
|
|
content: Optional[JsonDict],
|
|
|
|
) -> str:
|
|
|
|
"""Build a suitable Authorization header for an outgoing federation request"""
|
|
|
|
request_description: JsonDict = {
|
|
|
|
"method": method,
|
|
|
|
"uri": path,
|
|
|
|
"destination": destination,
|
|
|
|
"origin": origin,
|
|
|
|
}
|
|
|
|
if content is not None:
|
|
|
|
request_description["content"] = content
|
|
|
|
signature_base64 = unpaddedbase64.encode_base64(
|
|
|
|
signing_key.sign(
|
|
|
|
canonicaljson.encode_canonical_json(request_description)
|
|
|
|
).signature
|
|
|
|
)
|
|
|
|
return (
|
|
|
|
f"X-Matrix origin={origin},"
|
|
|
|
f"key={signing_key.alg}:{signing_key.version},"
|
|
|
|
f"sig={signature_base64}"
|
|
|
|
)
|
2019-11-27 22:54:07 +01:00
|
|
|
|
2019-07-12 11:16:23 +02:00
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
def override_config(extra_config: JsonDict) -> Callable[[TV], TV]:
|
2019-07-12 11:16:23 +02:00
|
|
|
"""A decorator which can be applied to test functions to give additional HS config
|
|
|
|
|
|
|
|
For use
|
|
|
|
|
|
|
|
For example:
|
|
|
|
|
|
|
|
class MyTestCase(HomeserverTestCase):
|
|
|
|
@override_config({"enable_registration": False, ...})
|
|
|
|
def test_foo(self):
|
|
|
|
...
|
|
|
|
|
|
|
|
Args:
|
2022-07-27 19:18:41 +02:00
|
|
|
extra_config: Additional config settings to be merged into the default
|
2019-07-12 11:16:23 +02:00
|
|
|
config dict before instantiating the test homeserver.
|
|
|
|
"""
|
|
|
|
|
2022-07-27 19:18:41 +02:00
|
|
|
def decorator(func: TV) -> TV:
|
|
|
|
# This attribute is being defined.
|
|
|
|
func._extra_config = extra_config # type: ignore[attr-defined]
|
2019-07-12 11:16:23 +02:00
|
|
|
return func
|
|
|
|
|
|
|
|
return decorator
|
2021-01-07 12:41:28 +01:00
|
|
|
|
|
|
|
|
|
|
|
def skip_unless(condition: bool, reason: str) -> Callable[[TV], TV]:
|
|
|
|
"""A test decorator which will skip the decorated test unless a condition is set
|
|
|
|
|
|
|
|
For example:
|
|
|
|
|
|
|
|
class MyTestCase(TestCase):
|
|
|
|
@skip_unless(HAS_FOO, "Cannot test without foo")
|
|
|
|
def test_foo(self):
|
|
|
|
...
|
|
|
|
|
|
|
|
Args:
|
|
|
|
condition: If true, the test will be skipped
|
|
|
|
reason: the reason to give for skipping the test
|
|
|
|
"""
|
|
|
|
|
|
|
|
def decorator(f: TV) -> TV:
|
|
|
|
if not condition:
|
|
|
|
f.skip = reason # type: ignore
|
|
|
|
return f
|
|
|
|
|
|
|
|
return decorator
|