Merge branch 'develop' of github.com:matrix-org/synapse into matrix-org-hotfixes

matrix-org-hotfixes-identity
Erik Johnston 2019-06-24 15:31:36 +01:00
commit ad2ba70959
57 changed files with 353 additions and 246 deletions

1
changelog.d/5498.bugfix Normal file
View File

@ -0,0 +1 @@
Fix intermittent exceptions on Apple hardware. Also fix bug that caused database activity times to be under-reported in log lines.

1
changelog.d/5500.bugfix Normal file
View File

@ -0,0 +1 @@
Fix logging error when a tampered event is detected.

1
changelog.d/5509.misc Normal file
View File

@ -0,0 +1 @@
Fix "Unexpected entry in 'full_schemas'" log warning.

1
changelog.d/5510.misc Normal file
View File

@ -0,0 +1 @@
Improve logging when generating config files.

1
changelog.d/5511.misc Normal file
View File

@ -0,0 +1 @@
Refactor and clean up Config parser for maintainability.

1
changelog.d/5512.feature Normal file
View File

@ -0,0 +1 @@
Improve help and cmdline option names for --generate-config options.

1
changelog.d/5514.bugfix Normal file
View File

@ -0,0 +1 @@
Fix bug with `jinja2` preventing Synapse from starting. Users who had this problem should now simply need to run `pip install matrix-synapse`.

1
changelog.d/5516.feature Normal file
View File

@ -0,0 +1 @@
Allow configuration of the path used for ACME account keys.

1
changelog.d/5521.feature Normal file
View File

@ -0,0 +1 @@
Allow configuration of the path used for ACME account keys.

1
changelog.d/5522.feature Normal file
View File

@ -0,0 +1 @@
Allow configuration of the path used for ACME account keys.

1
changelog.d/5525.removal Normal file
View File

@ -0,0 +1 @@
Remove support for cpu_affinity setting.

1
changelog.d/5531.feature Normal file
View File

@ -0,0 +1 @@
Add support for handling pagination APIs on client reader worker.

View File

@ -23,29 +23,6 @@ server_name: "SERVERNAME"
# #
pid_file: DATADIR/homeserver.pid pid_file: DATADIR/homeserver.pid
# CPU affinity mask. Setting this restricts the CPUs on which the
# process will be scheduled. It is represented as a bitmask, with the
# lowest order bit corresponding to the first logical CPU and the
# highest order bit corresponding to the last logical CPU. Not all CPUs
# may exist on a given system but a mask may specify more CPUs than are
# present.
#
# For example:
# 0x00000001 is processor #0,
# 0x00000003 is processors #0 and #1,
# 0xFFFFFFFF is all processors (#0 through #31).
#
# Pinning a Python process to a single CPU is desirable, because Python
# is inherently single-threaded due to the GIL, and can suffer a
# 30-40% slowdown due to cache blow-out and thread context switching
# if the scheduler happens to schedule the underlying threads across
# different cores. See
# https://www.mirantis.com/blog/improve-performance-python-programs-restricting-single-cpu/.
#
# This setting requires the affinity package to be installed!
#
#cpu_affinity: 0xFFFFFFFF
# The path to the web client which will be served at /_matrix/client/ # The path to the web client which will be served at /_matrix/client/
# if 'webclient' is configured under the 'listeners' configuration. # if 'webclient' is configured under the 'listeners' configuration.
# #
@ -425,6 +402,13 @@ acme:
# #
#domain: matrix.example.com #domain: matrix.example.com
# file to use for the account key. This will be generated if it doesn't
# exist.
#
# If unspecified, we will use CONFDIR/client.key.
#
account_key_file: DATADIR/acme_account.key
# List of allowed TLS fingerprints for this server to publish along # List of allowed TLS fingerprints for this server to publish along
# with the signing keys for this server. Other matrix servers that # with the signing keys for this server. Other matrix servers that
# make HTTPS requests to this server will check that the TLS # make HTTPS requests to this server will check that the TLS

View File

@ -19,7 +19,6 @@ import signal
import sys import sys
import traceback import traceback
import psutil
from daemonize import Daemonize from daemonize import Daemonize
from twisted.internet import defer, error, reactor from twisted.internet import defer, error, reactor
@ -68,21 +67,13 @@ def start_worker_reactor(appname, config):
gc_thresholds=config.gc_thresholds, gc_thresholds=config.gc_thresholds,
pid_file=config.worker_pid_file, pid_file=config.worker_pid_file,
daemonize=config.worker_daemonize, daemonize=config.worker_daemonize,
cpu_affinity=config.worker_cpu_affinity,
print_pidfile=config.print_pidfile, print_pidfile=config.print_pidfile,
logger=logger, logger=logger,
) )
def start_reactor( def start_reactor(
appname, appname, soft_file_limit, gc_thresholds, pid_file, daemonize, print_pidfile, logger
soft_file_limit,
gc_thresholds,
pid_file,
daemonize,
cpu_affinity,
print_pidfile,
logger,
): ):
""" Run the reactor in the main process """ Run the reactor in the main process
@ -95,7 +86,6 @@ def start_reactor(
gc_thresholds: gc_thresholds:
pid_file (str): name of pid file to write to if daemonize is True pid_file (str): name of pid file to write to if daemonize is True
daemonize (bool): true to run the reactor in a background process daemonize (bool): true to run the reactor in a background process
cpu_affinity (int|None): cpu affinity mask
print_pidfile (bool): whether to print the pid file, if daemonize is True print_pidfile (bool): whether to print the pid file, if daemonize is True
logger (logging.Logger): logger instance to pass to Daemonize logger (logging.Logger): logger instance to pass to Daemonize
""" """
@ -109,20 +99,6 @@ def start_reactor(
# between the sentinel and `run` logcontexts. # between the sentinel and `run` logcontexts.
with PreserveLoggingContext(): with PreserveLoggingContext():
logger.info("Running") logger.info("Running")
if cpu_affinity is not None:
# Turn the bitmask into bits, reverse it so we go from 0 up
mask_to_bits = bin(cpu_affinity)[2:][::-1]
cpus = []
cpu_num = 0
for i in mask_to_bits:
if i == "1":
cpus.append(cpu_num)
cpu_num += 1
p = psutil.Process()
p.cpu_affinity(cpus)
change_resource_limit(soft_file_limit) change_resource_limit(soft_file_limit)
if gc_thresholds: if gc_thresholds:

View File

@ -641,7 +641,6 @@ def run(hs):
gc_thresholds=hs.config.gc_thresholds, gc_thresholds=hs.config.gc_thresholds,
pid_file=hs.config.pid_file, pid_file=hs.config.pid_file,
daemonize=hs.config.daemonize, daemonize=hs.config.daemonize,
cpu_affinity=hs.config.cpu_affinity,
print_pidfile=hs.config.print_pidfile, print_pidfile=hs.config.print_pidfile,
logger=logger, logger=logger,
) )

View File

@ -1,5 +1,7 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd # Copyright 2014-2016 OpenMarket Ltd
# Copyright 2017-2018 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@ -196,6 +198,12 @@ class Config(object):
@classmethod @classmethod
def load_config(cls, description, argv): def load_config(cls, description, argv):
"""Parse the commandline and config files
Doesn't support config-file-generation: used by the worker apps.
Returns: Config object.
"""
config_parser = argparse.ArgumentParser(description=description) config_parser = argparse.ArgumentParser(description=description)
config_parser.add_argument( config_parser.add_argument(
"-c", "-c",
@ -210,7 +218,7 @@ class Config(object):
"--keys-directory", "--keys-directory",
metavar="DIRECTORY", metavar="DIRECTORY",
help="Where files such as certs and signing keys are stored when" help="Where files such as certs and signing keys are stored when"
" their location is given explicitly in the config." " their location is not given explicitly in the config."
" Defaults to the directory containing the last config file", " Defaults to the directory containing the last config file",
) )
@ -222,8 +230,21 @@ class Config(object):
config_files = find_config_files(search_paths=config_args.config_path) config_files = find_config_files(search_paths=config_args.config_path)
obj.read_config_files( if not config_files:
config_files, keys_directory=config_args.keys_directory, generate_keys=False config_parser.error("Must supply a config file.")
if config_args.keys_directory:
config_dir_path = config_args.keys_directory
else:
config_dir_path = os.path.dirname(config_files[-1])
config_dir_path = os.path.abspath(config_dir_path)
data_dir_path = os.getcwd()
config_dict = obj.read_config_files(
config_files, config_dir_path=config_dir_path, data_dir_path=data_dir_path
)
obj.parse_config_dict(
config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
) )
obj.invoke_all("read_arguments", config_args) obj.invoke_all("read_arguments", config_args)
@ -232,6 +253,12 @@ class Config(object):
@classmethod @classmethod
def load_or_generate_config(cls, description, argv): def load_or_generate_config(cls, description, argv):
"""Parse the commandline and config files
Supports generation of config files, so is used for the main homeserver app.
Returns: Config object, or None if --generate-config or --generate-keys was set
"""
config_parser = argparse.ArgumentParser(add_help=False) config_parser = argparse.ArgumentParser(add_help=False)
config_parser.add_argument( config_parser.add_argument(
"-c", "-c",
@ -241,37 +268,57 @@ class Config(object):
help="Specify config file. Can be given multiple times and" help="Specify config file. Can be given multiple times and"
" may specify directories containing *.yaml files.", " may specify directories containing *.yaml files.",
) )
config_parser.add_argument(
generate_group = config_parser.add_argument_group("Config generation")
generate_group.add_argument(
"--generate-config", "--generate-config",
action="store_true", action="store_true",
help="Generate a config file for the server name", help="Generate a config file, then exit.",
) )
config_parser.add_argument( generate_group.add_argument(
"--report-stats", "--generate-missing-configs",
action="store",
help="Whether the generated config reports anonymized usage statistics",
choices=["yes", "no"],
)
config_parser.add_argument(
"--generate-keys", "--generate-keys",
action="store_true", action="store_true",
help="Generate any missing key files then exit", help="Generate any missing additional config files, then exit.",
) )
config_parser.add_argument( generate_group.add_argument(
"-H", "--server-name", help="The server name to generate a config file for."
)
generate_group.add_argument(
"--report-stats",
action="store",
help="Whether the generated config reports anonymized usage statistics.",
choices=["yes", "no"],
)
generate_group.add_argument(
"--config-directory",
"--keys-directory", "--keys-directory",
metavar="DIRECTORY", metavar="DIRECTORY",
help="Used with 'generate-*' options to specify where files such as" help=(
" signing keys should be stored, unless explicitly" "Specify where additional config files such as signing keys and log"
" specified in the config.", " config should be stored. Defaults to the same directory as the last"
) " config file."
config_parser.add_argument( ),
"-H", "--server-name", help="The server name to generate a config file for"
) )
config_args, remaining_args = config_parser.parse_known_args(argv) config_args, remaining_args = config_parser.parse_known_args(argv)
config_files = find_config_files(search_paths=config_args.config_path) config_files = find_config_files(search_paths=config_args.config_path)
generate_keys = config_args.generate_keys if not config_files:
config_parser.error(
"Must supply a config file.\nA config file can be automatically"
' generated using "--generate-config -H SERVER_NAME'
' -c CONFIG-FILE"'
)
if config_args.config_directory:
config_dir_path = config_args.config_directory
else:
config_dir_path = os.path.dirname(config_files[-1])
config_dir_path = os.path.abspath(config_dir_path)
data_dir_path = os.getcwd()
generate_missing_configs = config_args.generate_missing_configs
obj = cls() obj = cls()
@ -281,19 +328,10 @@ class Config(object):
"Please specify either --report-stats=yes or --report-stats=no\n\n" "Please specify either --report-stats=yes or --report-stats=no\n\n"
+ MISSING_REPORT_STATS_SPIEL + MISSING_REPORT_STATS_SPIEL
) )
if not config_files:
config_parser.error(
"Must supply a config file.\nA config file can be automatically"
' generated using "--generate-config -H SERVER_NAME'
' -c CONFIG-FILE"'
)
(config_path,) = config_files (config_path,) = config_files
if not cls.path_exists(config_path): if not cls.path_exists(config_path):
if config_args.keys_directory: print("Generating config file %s" % (config_path,))
config_dir_path = config_args.keys_directory
else:
config_dir_path = os.path.dirname(config_path)
config_dir_path = os.path.abspath(config_dir_path)
server_name = config_args.server_name server_name = config_args.server_name
if not server_name: if not server_name:
@ -304,7 +342,7 @@ class Config(object):
config_str = obj.generate_config( config_str = obj.generate_config(
config_dir_path=config_dir_path, config_dir_path=config_dir_path,
data_dir_path=os.getcwd(), data_dir_path=data_dir_path,
server_name=server_name, server_name=server_name,
report_stats=(config_args.report_stats == "yes"), report_stats=(config_args.report_stats == "yes"),
generate_secrets=True, generate_secrets=True,
@ -331,12 +369,12 @@ class Config(object):
else: else:
print( print(
( (
"Config file %r already exists. Generating any missing key" "Config file %r already exists. Generating any missing config"
" files." " files."
) )
% (config_path,) % (config_path,)
) )
generate_keys = True generate_missing_configs = True
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
parents=[config_parser], parents=[config_parser],
@ -347,48 +385,57 @@ class Config(object):
obj.invoke_all("add_arguments", parser) obj.invoke_all("add_arguments", parser)
args = parser.parse_args(remaining_args) args = parser.parse_args(remaining_args)
if not config_files: config_dict = obj.read_config_files(
config_parser.error( config_files, config_dir_path=config_dir_path, data_dir_path=data_dir_path
"Must supply a config file.\nA config file can be automatically"
' generated using "--generate-config -H SERVER_NAME'
' -c CONFIG-FILE"'
) )
obj.read_config_files( if generate_missing_configs:
config_files, obj.generate_missing_files(config_dict)
keys_directory=config_args.keys_directory,
generate_keys=generate_keys,
)
if generate_keys:
return None return None
obj.parse_config_dict(
config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
)
obj.invoke_all("read_arguments", args) obj.invoke_all("read_arguments", args)
return obj return obj
def read_config_files(self, config_files, keys_directory=None, generate_keys=False): def read_config_files(self, config_files, config_dir_path, data_dir_path):
if not keys_directory: """Read the config files into a dict
keys_directory = os.path.dirname(config_files[-1])
self.config_dir_path = os.path.abspath(keys_directory) Args:
config_files (iterable[str]): A list of the config files to read
config_dir_path (str): The path where the config files are kept. Used to
create filenames for things like the log config and the signing key.
data_dir_path (str): The path where the data files are kept. Used to create
filenames for things like the database and media store.
Returns: dict
"""
# first we read the config files into a dict
specified_config = {} specified_config = {}
for config_file in config_files: for config_file in config_files:
yaml_config = self.read_config_file(config_file) yaml_config = self.read_config_file(config_file)
specified_config.update(yaml_config) specified_config.update(yaml_config)
# not all of the options have sensible defaults in code, so we now need to
# generate a default config file suitable for the specified server name...
if "server_name" not in specified_config: if "server_name" not in specified_config:
raise ConfigError(MISSING_SERVER_NAME) raise ConfigError(MISSING_SERVER_NAME)
server_name = specified_config["server_name"] server_name = specified_config["server_name"]
config_string = self.generate_config( config_string = self.generate_config(
config_dir_path=self.config_dir_path, config_dir_path=config_dir_path,
data_dir_path=os.getcwd(), data_dir_path=data_dir_path,
server_name=server_name, server_name=server_name,
generate_secrets=False, generate_secrets=False,
) )
# ... and read it into a base config dict ...
config = yaml.safe_load(config_string) config = yaml.safe_load(config_string)
# ... and finally, overlay it with the actual configuration.
config.pop("log_config") config.pop("log_config")
config.update(specified_config) config.update(specified_config)
@ -398,15 +445,29 @@ class Config(object):
+ "\n" + "\n"
+ MISSING_REPORT_STATS_SPIEL + MISSING_REPORT_STATS_SPIEL
) )
return config
if generate_keys: def parse_config_dict(self, config_dict, config_dir_path, data_dir_path):
self.invoke_all("generate_files", config) """Read the information from the config dict into this Config object.
return
self.parse_config_dict(config) Args:
config_dict (dict): Configuration data, as read from the yaml
def parse_config_dict(self, config_dict): config_dir_path (str): The path where the config files are kept. Used to
self.invoke_all("read_config", config_dict) create filenames for things like the log config and the signing key.
data_dir_path (str): The path where the data files are kept. Used to create
filenames for things like the database and media store.
"""
self.invoke_all(
"read_config",
config_dict,
config_dir_path=config_dir_path,
data_dir_path=data_dir_path,
)
def generate_missing_files(self, config_dict):
self.invoke_all("generate_files", config_dict)
def find_config_files(search_paths): def find_config_files(search_paths):

View File

@ -18,7 +18,7 @@ from ._base import Config
class ApiConfig(Config): class ApiConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
self.room_invite_state_types = config.get( self.room_invite_state_types = config.get(
"room_invite_state_types", "room_invite_state_types",
[ [

View File

@ -29,7 +29,7 @@ logger = logging.getLogger(__name__)
class AppServiceConfig(Config): class AppServiceConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
self.app_service_config_files = config.get("app_service_config_files", []) self.app_service_config_files = config.get("app_service_config_files", [])
self.notify_appservices = config.get("notify_appservices", True) self.notify_appservices = config.get("notify_appservices", True)
self.track_appservice_user_ips = config.get("track_appservice_user_ips", False) self.track_appservice_user_ips = config.get("track_appservice_user_ips", False)

View File

@ -16,7 +16,7 @@ from ._base import Config
class CaptchaConfig(Config): class CaptchaConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
self.recaptcha_private_key = config.get("recaptcha_private_key") self.recaptcha_private_key = config.get("recaptcha_private_key")
self.recaptcha_public_key = config.get("recaptcha_public_key") self.recaptcha_public_key = config.get("recaptcha_public_key")
self.enable_registration_captcha = config.get( self.enable_registration_captcha = config.get(

View File

@ -22,7 +22,7 @@ class CasConfig(Config):
cas_server_url: URL of CAS server cas_server_url: URL of CAS server
""" """
def read_config(self, config): def read_config(self, config, **kwargs):
cas_config = config.get("cas_config", None) cas_config = config.get("cas_config", None)
if cas_config: if cas_config:
self.cas_enabled = cas_config.get("enabled", True) self.cas_enabled = cas_config.get("enabled", True)

View File

@ -84,7 +84,7 @@ class ConsentConfig(Config):
self.user_consent_at_registration = False self.user_consent_at_registration = False
self.user_consent_policy_name = "Privacy Policy" self.user_consent_policy_name = "Privacy Policy"
def read_config(self, config): def read_config(self, config, **kwargs):
consent_config = config.get("user_consent") consent_config = config.get("user_consent")
if consent_config is None: if consent_config is None:
return return

View File

@ -18,7 +18,7 @@ from ._base import Config
class DatabaseConfig(Config): class DatabaseConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
self.event_cache_size = self.parse_size(config.get("event_cache_size", "10K")) self.event_cache_size = self.parse_size(config.get("event_cache_size", "10K"))
self.database_config = config.get("database") self.database_config = config.get("database")

View File

@ -27,7 +27,7 @@ from ._base import Config, ConfigError
class EmailConfig(Config): class EmailConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
# TODO: We should separate better the email configuration from the notification # TODO: We should separate better the email configuration from the notification
# and account validity config. # and account validity config.

View File

@ -17,7 +17,7 @@ from ._base import Config
class GroupsConfig(Config): class GroupsConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
self.enable_group_creation = config.get("enable_group_creation", False) self.enable_group_creation = config.get("enable_group_creation", False)
self.group_creation_prefix = config.get("group_creation_prefix", "") self.group_creation_prefix = config.get("group_creation_prefix", "")

View File

@ -23,7 +23,7 @@ MISSING_JWT = """Missing jwt library. This is required for jwt login.
class JWTConfig(Config): class JWTConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
jwt_config = config.get("jwt_config", None) jwt_config = config.get("jwt_config", None)
if jwt_config: if jwt_config:
self.jwt_enabled = jwt_config.get("enabled", False) self.jwt_enabled = jwt_config.get("enabled", False)

View File

@ -65,7 +65,7 @@ class TrustedKeyServer(object):
class KeyConfig(Config): class KeyConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
# the signing key can be specified inline or in a separate file # the signing key can be specified inline or in a separate file
if "signing_key" in config: if "signing_key" in config:
self.signing_key = read_signing_keys([config["signing_key"]]) self.signing_key = read_signing_keys([config["signing_key"]])
@ -241,6 +241,7 @@ class KeyConfig(Config):
signing_key_path = config["signing_key_path"] signing_key_path = config["signing_key_path"]
if not self.path_exists(signing_key_path): if not self.path_exists(signing_key_path):
print("Generating signing key file %s" % (signing_key_path,))
with open(signing_key_path, "w") as signing_key_file: with open(signing_key_path, "w") as signing_key_file:
key_id = "a_" + random_string(4) key_id = "a_" + random_string(4)
write_signing_keys(signing_key_file, (generate_signing_key(key_id),)) write_signing_keys(signing_key_file, (generate_signing_key(key_id),))

View File

@ -74,7 +74,7 @@ root:
class LoggingConfig(Config): class LoggingConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
self.verbosity = config.get("verbose", 0) self.verbosity = config.get("verbose", 0)
self.no_redirect_stdio = config.get("no_redirect_stdio", False) self.no_redirect_stdio = config.get("no_redirect_stdio", False)
self.log_config = self.abspath(config.get("log_config")) self.log_config = self.abspath(config.get("log_config"))
@ -137,6 +137,10 @@ class LoggingConfig(Config):
log_config = config.get("log_config") log_config = config.get("log_config")
if log_config and not os.path.exists(log_config): if log_config and not os.path.exists(log_config):
log_file = self.abspath("homeserver.log") log_file = self.abspath("homeserver.log")
print(
"Generating log config file %s which will log to %s"
% (log_config, log_file)
)
with open(log_config, "w") as log_config_file: with open(log_config, "w") as log_config_file:
log_config_file.write(DEFAULT_LOG_CONFIG.substitute(log_file=log_file)) log_config_file.write(DEFAULT_LOG_CONFIG.substitute(log_file=log_file))

View File

@ -21,7 +21,7 @@ MISSING_SENTRY = """Missing sentry-sdk library. This is required to enable sentr
class MetricsConfig(Config): class MetricsConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
self.enable_metrics = config.get("enable_metrics", False) self.enable_metrics = config.get("enable_metrics", False)
self.report_stats = config.get("report_stats", None) self.report_stats = config.get("report_stats", None)
self.metrics_port = config.get("metrics_port") self.metrics_port = config.get("metrics_port")

View File

@ -20,7 +20,7 @@ class PasswordConfig(Config):
"""Password login configuration """Password login configuration
""" """
def read_config(self, config): def read_config(self, config, **kwargs):
password_config = config.get("password_config", {}) password_config = config.get("password_config", {})
if password_config is None: if password_config is None:
password_config = {} password_config = {}

View File

@ -21,7 +21,7 @@ LDAP_PROVIDER = "ldap_auth_provider.LdapAuthProvider"
class PasswordAuthProviderConfig(Config): class PasswordAuthProviderConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
self.password_providers = [] self.password_providers = []
providers = [] providers = []

View File

@ -18,7 +18,7 @@ from ._base import Config
class PushConfig(Config): class PushConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
push_config = config.get("push", {}) push_config = config.get("push", {})
self.push_include_content = push_config.get("include_content", True) self.push_include_content = push_config.get("include_content", True)

View File

@ -36,7 +36,7 @@ class FederationRateLimitConfig(object):
class RatelimitConfig(Config): class RatelimitConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
# Load the new-style messages config if it exists. Otherwise fall back # Load the new-style messages config if it exists. Otherwise fall back
# to the old method. # to the old method.

View File

@ -46,7 +46,7 @@ class AccountValidityConfig(Config):
class RegistrationConfig(Config): class RegistrationConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
self.enable_registration = bool( self.enable_registration = bool(
strtobool(str(config.get("enable_registration", False))) strtobool(str(config.get("enable_registration", False)))
) )

View File

@ -86,7 +86,7 @@ def parse_thumbnail_requirements(thumbnail_sizes):
class ContentRepositoryConfig(Config): class ContentRepositoryConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
self.max_upload_size = self.parse_size(config.get("max_upload_size", "10M")) self.max_upload_size = self.parse_size(config.get("max_upload_size", "10M"))
self.max_image_pixels = self.parse_size(config.get("max_image_pixels", "32M")) self.max_image_pixels = self.parse_size(config.get("max_image_pixels", "32M"))
self.max_spider_size = self.parse_size(config.get("max_spider_size", "10M")) self.max_spider_size = self.parse_size(config.get("max_spider_size", "10M"))

View File

@ -19,7 +19,7 @@ from ._base import Config, ConfigError
class RoomDirectoryConfig(Config): class RoomDirectoryConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
self.enable_room_list_search = config.get("enable_room_list_search", True) self.enable_room_list_search = config.get("enable_room_list_search", True)
alias_creation_rules = config.get("alias_creation_rules") alias_creation_rules = config.get("alias_creation_rules")

View File

@ -17,7 +17,7 @@ from ._base import Config, ConfigError
class SAML2Config(Config): class SAML2Config(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
self.saml2_enabled = False self.saml2_enabled = False
saml2_config = config.get("saml2_config") saml2_config = config.get("saml2_config")

View File

@ -40,7 +40,7 @@ DEFAULT_ROOM_VERSION = "4"
class ServerConfig(Config): class ServerConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
self.server_name = config["server_name"] self.server_name = config["server_name"]
self.server_context = config.get("server_context", None) self.server_context = config.get("server_context", None)
@ -57,7 +57,6 @@ class ServerConfig(Config):
self.user_agent_suffix = config.get("user_agent_suffix") self.user_agent_suffix = config.get("user_agent_suffix")
self.use_frozen_dicts = config.get("use_frozen_dicts", False) self.use_frozen_dicts = config.get("use_frozen_dicts", False)
self.public_baseurl = config.get("public_baseurl") self.public_baseurl = config.get("public_baseurl")
self.cpu_affinity = config.get("cpu_affinity")
# Whether to send federation traffic out in this process. This only # Whether to send federation traffic out in this process. This only
# applies to some federation traffic, and so shouldn't be used to # applies to some federation traffic, and so shouldn't be used to
@ -336,29 +335,6 @@ class ServerConfig(Config):
# #
pid_file: %(pid_file)s pid_file: %(pid_file)s
# CPU affinity mask. Setting this restricts the CPUs on which the
# process will be scheduled. It is represented as a bitmask, with the
# lowest order bit corresponding to the first logical CPU and the
# highest order bit corresponding to the last logical CPU. Not all CPUs
# may exist on a given system but a mask may specify more CPUs than are
# present.
#
# For example:
# 0x00000001 is processor #0,
# 0x00000003 is processors #0 and #1,
# 0xFFFFFFFF is all processors (#0 through #31).
#
# Pinning a Python process to a single CPU is desirable, because Python
# is inherently single-threaded due to the GIL, and can suffer a
# 30-40%% slowdown due to cache blow-out and thread context switching
# if the scheduler happens to schedule the underlying threads across
# different cores. See
# https://www.mirantis.com/blog/improve-performance-python-programs-restricting-single-cpu/.
#
# This setting requires the affinity package to be installed!
#
#cpu_affinity: 0xFFFFFFFF
# The path to the web client which will be served at /_matrix/client/ # The path to the web client which will be served at /_matrix/client/
# if 'webclient' is configured under the 'listeners' configuration. # if 'webclient' is configured under the 'listeners' configuration.
# #

View File

@ -66,7 +66,7 @@ class ServerNoticesConfig(Config):
self.server_notices_mxid_avatar_url = None self.server_notices_mxid_avatar_url = None
self.server_notices_room_name = None self.server_notices_room_name = None
def read_config(self, config): def read_config(self, config, **kwargs):
c = config.get("server_notices") c = config.get("server_notices")
if c is None: if c is None:
return return

View File

@ -19,7 +19,7 @@ from ._base import Config
class SpamCheckerConfig(Config): class SpamCheckerConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
self.spam_checker = None self.spam_checker = None
provider = config.get("spam_checker", None) provider = config.get("spam_checker", None)

View File

@ -25,7 +25,7 @@ class StatsConfig(Config):
Configuration for the behaviour of synapse's stats engine Configuration for the behaviour of synapse's stats engine
""" """
def read_config(self, config): def read_config(self, config, **kwargs):
self.stats_enabled = True self.stats_enabled = True
self.stats_bucket_size = 86400 self.stats_bucket_size = 86400
self.stats_retention = sys.maxsize self.stats_retention = sys.maxsize

View File

@ -19,7 +19,7 @@ from ._base import Config
class ThirdPartyRulesConfig(Config): class ThirdPartyRulesConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
self.third_party_event_rules = None self.third_party_event_rules = None
provider = config.get("third_party_event_rules", None) provider = config.get("third_party_event_rules", None)

View File

@ -33,7 +33,7 @@ logger = logging.getLogger(__name__)
class TlsConfig(Config): class TlsConfig(Config):
def read_config(self, config): def read_config(self, config, config_dir_path, **kwargs):
acme_config = config.get("acme", None) acme_config = config.get("acme", None)
if acme_config is None: if acme_config is None:
@ -50,6 +50,10 @@ class TlsConfig(Config):
self.acme_reprovision_threshold = acme_config.get("reprovision_threshold", 30) self.acme_reprovision_threshold = acme_config.get("reprovision_threshold", 30)
self.acme_domain = acme_config.get("domain", config.get("server_name")) self.acme_domain = acme_config.get("domain", config.get("server_name"))
self.acme_account_key_file = self.abspath(
acme_config.get("account_key_file", config_dir_path + "/client.key")
)
self.tls_certificate_file = self.abspath(config.get("tls_certificate_path")) self.tls_certificate_file = self.abspath(config.get("tls_certificate_path"))
self.tls_private_key_file = self.abspath(config.get("tls_private_key_path")) self.tls_private_key_file = self.abspath(config.get("tls_private_key_path"))
@ -213,11 +217,12 @@ class TlsConfig(Config):
if sha256_fingerprint not in sha256_fingerprints: if sha256_fingerprint not in sha256_fingerprints:
self.tls_fingerprints.append({"sha256": sha256_fingerprint}) self.tls_fingerprints.append({"sha256": sha256_fingerprint})
def default_config(self, config_dir_path, server_name, **kwargs): def default_config(self, config_dir_path, server_name, data_dir_path, **kwargs):
base_key_name = os.path.join(config_dir_path, server_name) base_key_name = os.path.join(config_dir_path, server_name)
tls_certificate_path = base_key_name + ".tls.crt" tls_certificate_path = base_key_name + ".tls.crt"
tls_private_key_path = base_key_name + ".tls.key" tls_private_key_path = base_key_name + ".tls.key"
default_acme_account_file = os.path.join(data_dir_path, "acme_account.key")
# this is to avoid the max line length. Sorrynotsorry # this is to avoid the max line length. Sorrynotsorry
proxypassline = ( proxypassline = (
@ -343,6 +348,13 @@ class TlsConfig(Config):
# #
#domain: matrix.example.com #domain: matrix.example.com
# file to use for the account key. This will be generated if it doesn't
# exist.
#
# If unspecified, we will use CONFDIR/client.key.
#
account_key_file: %(default_acme_account_file)s
# List of allowed TLS fingerprints for this server to publish along # List of allowed TLS fingerprints for this server to publish along
# with the signing keys for this server. Other matrix servers that # with the signing keys for this server. Other matrix servers that
# make HTTPS requests to this server will check that the TLS # make HTTPS requests to this server will check that the TLS

View File

@ -21,7 +21,7 @@ class UserDirectoryConfig(Config):
Configuration for the behaviour of the /user_directory API Configuration for the behaviour of the /user_directory API
""" """
def read_config(self, config): def read_config(self, config, **kwargs):
self.user_directory_search_enabled = True self.user_directory_search_enabled = True
self.user_directory_search_all_users = False self.user_directory_search_all_users = False
user_directory_config = config.get("user_directory", None) user_directory_config = config.get("user_directory", None)

View File

@ -16,7 +16,7 @@ from ._base import Config
class VoipConfig(Config): class VoipConfig(Config):
def read_config(self, config): def read_config(self, config, **kwargs):
self.turn_uris = config.get("turn_uris", []) self.turn_uris = config.get("turn_uris", [])
self.turn_shared_secret = config.get("turn_shared_secret") self.turn_shared_secret = config.get("turn_shared_secret")
self.turn_username = config.get("turn_username") self.turn_username = config.get("turn_username")

View File

@ -21,7 +21,7 @@ class WorkerConfig(Config):
They have their own pid_file and listener configuration. They use the They have their own pid_file and listener configuration. They use the
replication_url to talk to the main synapse process.""" replication_url to talk to the main synapse process."""
def read_config(self, config): def read_config(self, config, **kwargs):
self.worker_app = config.get("worker_app") self.worker_app = config.get("worker_app")
# Canonicalise worker_app so that master always has None # Canonicalise worker_app so that master always has None
@ -46,7 +46,6 @@ class WorkerConfig(Config):
self.worker_name = config.get("worker_name", self.worker_app) self.worker_name = config.get("worker_name", self.worker_app)
self.worker_main_http_uri = config.get("worker_main_http_uri", None) self.worker_main_http_uri = config.get("worker_main_http_uri", None)
self.worker_cpu_affinity = config.get("worker_cpu_affinity")
# This option is really only here to support `--manhole` command line # This option is really only here to support `--manhole` command line
# argument. # argument.

View File

@ -163,7 +163,6 @@ class FederationBase(object):
logger.warning( logger.warning(
"Event %s content has been tampered, redacting", "Event %s content has been tampered, redacting",
pdu.event_id, pdu.event_id,
pdu.get_pdu_json(),
) )
return redacted_event return redacted_event

View File

@ -15,14 +15,9 @@
import logging import logging
import attr
from zope.interface import implementer
import twisted import twisted
import twisted.internet.error import twisted.internet.error
from twisted.internet import defer from twisted.internet import defer
from twisted.python.filepath import FilePath
from twisted.python.url import URL
from twisted.web import server, static from twisted.web import server, static
from twisted.web.resource import Resource from twisted.web.resource import Resource
@ -30,27 +25,6 @@ from synapse.app import check_bind_error
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
try:
from txacme.interfaces import ICertificateStore
@attr.s
@implementer(ICertificateStore)
class ErsatzStore(object):
"""
A store that only stores in memory.
"""
certs = attr.ib(default=attr.Factory(dict))
def store(self, server_name, pem_objects):
self.certs[server_name] = [o.as_bytes() for o in pem_objects]
return defer.succeed(None)
except ImportError:
# txacme is missing
pass
class AcmeHandler(object): class AcmeHandler(object):
def __init__(self, hs): def __init__(self, hs):
@ -60,6 +34,7 @@ class AcmeHandler(object):
@defer.inlineCallbacks @defer.inlineCallbacks
def start_listening(self): def start_listening(self):
from synapse.handlers import acme_issuing_service
# Configure logging for txacme, if you need to debug # Configure logging for txacme, if you need to debug
# from eliot import add_destinations # from eliot import add_destinations
@ -67,37 +42,18 @@ class AcmeHandler(object):
# #
# add_destinations(TwistedDestination()) # add_destinations(TwistedDestination())
from txacme.challenges import HTTP01Responder
from txacme.service import AcmeIssuingService
from txacme.endpoint import load_or_create_client_key
from txacme.client import Client
from josepy.jwa import RS256
self._store = ErsatzStore()
responder = HTTP01Responder()
self._issuer = AcmeIssuingService(
cert_store=self._store,
client_creator=(
lambda: Client.from_url(
reactor=self.reactor,
url=URL.from_text(self.hs.config.acme_url),
key=load_or_create_client_key(
FilePath(self.hs.config.config_dir_path)
),
alg=RS256,
)
),
clock=self.reactor,
responders=[responder],
)
well_known = Resource() well_known = Resource()
well_known.putChild(b"acme-challenge", responder.resource)
self._issuer = acme_issuing_service.create_issuing_service(
self.reactor,
acme_url=self.hs.config.acme_url,
account_key_file=self.hs.config.acme_account_key_file,
well_known_resource=well_known,
)
responder_resource = Resource() responder_resource = Resource()
responder_resource.putChild(b".well-known", well_known) responder_resource.putChild(b".well-known", well_known)
responder_resource.putChild(b"check", static.Data(b"OK", b"text/plain")) responder_resource.putChild(b"check", static.Data(b"OK", b"text/plain"))
srv = server.Site(responder_resource) srv = server.Site(responder_resource)
bind_addresses = self.hs.config.acme_bind_addresses bind_addresses = self.hs.config.acme_bind_addresses
@ -128,7 +84,7 @@ class AcmeHandler(object):
logger.exception("Fail!") logger.exception("Fail!")
raise raise
logger.warning("Reprovisioned %s, saving.", self._acme_domain) logger.warning("Reprovisioned %s, saving.", self._acme_domain)
cert_chain = self._store.certs[self._acme_domain] cert_chain = self._issuer.cert_store.certs[self._acme_domain]
try: try:
with open(self.hs.config.tls_private_key_file, "wb") as private_key_file: with open(self.hs.config.tls_private_key_file, "wb") as private_key_file:

View File

@ -0,0 +1,117 @@
# -*- coding: utf-8 -*-
# Copyright 2019 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Utility function to create an ACME issuing service.
This file contains the unconditional imports on the acme and cryptography bits that we
only need (and may only have available) if we are doing ACME, so is designed to be
imported conditionally.
"""
import logging
import attr
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from josepy import JWKRSA
from josepy.jwa import RS256
from txacme.challenges import HTTP01Responder
from txacme.client import Client
from txacme.interfaces import ICertificateStore
from txacme.service import AcmeIssuingService
from txacme.util import generate_private_key
from zope.interface import implementer
from twisted.internet import defer
from twisted.python.filepath import FilePath
from twisted.python.url import URL
logger = logging.getLogger(__name__)
def create_issuing_service(reactor, acme_url, account_key_file, well_known_resource):
"""Create an ACME issuing service, and attach it to a web Resource
Args:
reactor: twisted reactor
acme_url (str): URL to use to request certificates
account_key_file (str): where to store the account key
well_known_resource (twisted.web.IResource): web resource for .well-known.
we will attach a child resource for "acme-challenge".
Returns:
AcmeIssuingService
"""
responder = HTTP01Responder()
well_known_resource.putChild(b"acme-challenge", responder.resource)
store = ErsatzStore()
return AcmeIssuingService(
cert_store=store,
client_creator=(
lambda: Client.from_url(
reactor=reactor,
url=URL.from_text(acme_url),
key=load_or_create_client_key(account_key_file),
alg=RS256,
)
),
clock=reactor,
responders=[responder],
)
@attr.s
@implementer(ICertificateStore)
class ErsatzStore(object):
"""
A store that only stores in memory.
"""
certs = attr.ib(default=attr.Factory(dict))
def store(self, server_name, pem_objects):
self.certs[server_name] = [o.as_bytes() for o in pem_objects]
return defer.succeed(None)
def load_or_create_client_key(key_file):
"""Load the ACME account key from a file, creating it if it does not exist.
Args:
key_file (str): name of the file to use as the account key
"""
# this is based on txacme.endpoint.load_or_create_client_key, but doesn't
# hardcode the 'client.key' filename
acme_key_file = FilePath(key_file)
if acme_key_file.exists():
logger.info("Loading ACME account key from '%s'", acme_key_file)
key = serialization.load_pem_private_key(
acme_key_file.getContent(), password=None, backend=default_backend()
)
else:
logger.info("Saving new ACME account key to '%s'", acme_key_file)
key = generate_private_key("rsa")
acme_key_file.setContent(
key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption(),
)
)
return JWKRSA(key=key)

View File

@ -180,9 +180,7 @@ class PaginationHandler(object):
room_token = pagin_config.from_token.room_key room_token = pagin_config.from_token.room_key
else: else:
pagin_config.from_token = ( pagin_config.from_token = (
yield self.hs.get_event_sources().get_current_token_for_room( yield self.hs.get_event_sources().get_current_token_for_pagination()
room_id=room_id
)
) )
room_token = pagin_config.from_token.room_key room_token = pagin_config.from_token.room_key

View File

@ -72,10 +72,11 @@ REQUIREMENTS = [
# Twisted 18.7.0 requires attrs>=17.4.0 # Twisted 18.7.0 requires attrs>=17.4.0
"attrs>=17.4.0", "attrs>=17.4.0",
"netaddr>=0.7.18", "netaddr>=0.7.18",
"Jinja2>=2.9",
"bleach>=1.4.3",
] ]
CONDITIONAL_REQUIREMENTS = { CONDITIONAL_REQUIREMENTS = {
"email": ["Jinja2>=2.9", "bleach>=1.4.3"],
"matrix-synapse-ldap3": ["matrix-synapse-ldap3>=0.1"], "matrix-synapse-ldap3": ["matrix-synapse-ldap3>=0.1"],
# we use execute_batch, which arrived in psycopg 2.7. # we use execute_batch, which arrived in psycopg 2.7.
"postgres": ["psycopg2>=2.7"], "postgres": ["psycopg2>=2.7"],

View File

@ -38,6 +38,14 @@ from synapse.util.caches.descriptors import Cache
from synapse.util.logcontext import LoggingContext, PreserveLoggingContext from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
from synapse.util.stringutils import exception_to_unicode from synapse.util.stringutils import exception_to_unicode
# import a function which will return a monotonic time, in seconds
try:
# on python 3, use time.monotonic, since time.clock can go backwards
from time import monotonic as monotonic_time
except ImportError:
# ... but python 2 doesn't have it
from time import clock as monotonic_time
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
try: try:
@ -350,14 +358,14 @@ class SQLBaseStore(object):
) )
def start_profiling(self): def start_profiling(self):
self._previous_loop_ts = self._clock.time_msec() self._previous_loop_ts = monotonic_time()
def loop(): def loop():
curr = self._current_txn_total_time curr = self._current_txn_total_time
prev = self._previous_txn_total_time prev = self._previous_txn_total_time
self._previous_txn_total_time = curr self._previous_txn_total_time = curr
time_now = self._clock.time_msec() time_now = monotonic_time()
time_then = self._previous_loop_ts time_then = self._previous_loop_ts
self._previous_loop_ts = time_now self._previous_loop_ts = time_now
@ -383,7 +391,7 @@ class SQLBaseStore(object):
def _new_transaction( def _new_transaction(
self, conn, desc, after_callbacks, exception_callbacks, func, *args, **kwargs self, conn, desc, after_callbacks, exception_callbacks, func, *args, **kwargs
): ):
start = time.time() start = monotonic_time()
txn_id = self._TXN_ID txn_id = self._TXN_ID
# We don't really need these to be unique, so lets stop it from # We don't really need these to be unique, so lets stop it from
@ -449,7 +457,7 @@ class SQLBaseStore(object):
logger.debug("[TXN FAIL] {%s} %s", name, e) logger.debug("[TXN FAIL] {%s} %s", name, e)
raise raise
finally: finally:
end = time.time() end = monotonic_time()
duration = end - start duration = end - start
LoggingContext.current_context().add_database_transaction(duration) LoggingContext.current_context().add_database_transaction(duration)
@ -523,11 +531,11 @@ class SQLBaseStore(object):
) )
parent_context = None parent_context = None
start_time = time.time() start_time = monotonic_time()
def inner_func(conn, *args, **kwargs): def inner_func(conn, *args, **kwargs):
with LoggingContext("runWithConnection", parent_context) as context: with LoggingContext("runWithConnection", parent_context) as context:
sched_duration_sec = time.time() - start_time sched_duration_sec = monotonic_time() - start_time
sql_scheduling_timer.observe(sched_duration_sec) sql_scheduling_timer.observe(sched_duration_sec)
context.add_database_scheduled(sched_duration_sec) context.add_database_scheduled(sched_duration_sec)

View File

@ -133,7 +133,7 @@ def _setup_new_database(cur, database_engine):
if ver <= SCHEMA_VERSION: if ver <= SCHEMA_VERSION:
valid_dirs.append((ver, abs_path)) valid_dirs.append((ver, abs_path))
else: else:
logger.warn("Unexpected entry in 'full_schemas': %s", filename) logger.debug("Ignoring entry '%s' in 'full_schemas'", filename)
if not valid_dirs: if not valid_dirs:
raise PrepareDatabaseException( raise PrepareDatabaseException(

View File

@ -59,21 +59,25 @@ class EventSources(object):
defer.returnValue(token) defer.returnValue(token)
@defer.inlineCallbacks @defer.inlineCallbacks
def get_current_token_for_room(self, room_id): def get_current_token_for_pagination(self):
push_rules_key, _ = self.store.get_push_rules_stream_token() """Get the current token for a given room to be used to paginate
to_device_key = self.store.get_to_device_stream_token() events.
device_list_key = self.store.get_device_stream_token()
groups_key = self.store.get_group_stream_token()
The returned token does not have the current values for fields other
than `room`, since they are not used during pagination.
Retuns:
Deferred[StreamToken]
"""
token = StreamToken( token = StreamToken(
room_key=(yield self.sources["room"].get_current_key_for_room(room_id)), room_key=(yield self.sources["room"].get_current_key()),
presence_key=(yield self.sources["presence"].get_current_key()), presence_key=0,
typing_key=(yield self.sources["typing"].get_current_key()), typing_key=0,
receipt_key=(yield self.sources["receipt"].get_current_key()), receipt_key=0,
account_data_key=(yield self.sources["account_data"].get_current_key()), account_data_key=0,
push_rules_key=push_rules_key, push_rules_key=0,
to_device_key=to_device_key, to_device_key=0,
device_list_key=device_list_key, device_list_key=0,
groups_key=groups_key, groups_key=0,
) )
defer.returnValue(token) defer.returnValue(token)

View File

@ -65,7 +65,7 @@ s4niecZKPBizL6aucT59CsunNmmb5Glq8rlAcU+1ZTZZzGYqVYhF6axB9Qg=
} }
t = TestConfig() t = TestConfig()
t.read_config(config) t.read_config(config, config_dir_path="", data_dir_path="")
t.read_certificate_from_disk(require_cert_and_key=False) t.read_certificate_from_disk(require_cert_and_key=False)
warnings = self.flushWarnings() warnings = self.flushWarnings()

View File

@ -78,7 +78,7 @@ class MatrixFederationAgentTests(TestCase):
# config_dict["trusted_key_servers"] = [] # config_dict["trusted_key_servers"] = []
self._config = config = HomeServerConfig() self._config = config = HomeServerConfig()
config.parse_config_dict(config_dict) config.parse_config_dict(config_dict, "", "")
self.agent = MatrixFederationAgent( self.agent = MatrixFederationAgent(
reactor=self.reactor, reactor=self.reactor,

View File

@ -342,7 +342,7 @@ class HomeserverTestCase(TestCase):
# Parse the config from a config dict into a HomeServerConfig # Parse the config from a config dict into a HomeServerConfig
config_obj = HomeServerConfig() config_obj = HomeServerConfig()
config_obj.parse_config_dict(config) config_obj.parse_config_dict(config, "", "")
kwargs["config"] = config_obj kwargs["config"] = config_obj
hs = setup_test_homeserver(self.addCleanup, *args, **kwargs) hs = setup_test_homeserver(self.addCleanup, *args, **kwargs)

View File

@ -182,7 +182,7 @@ def default_config(name, parse=False):
if parse: if parse:
config = HomeServerConfig() config = HomeServerConfig()
config.parse_config_dict(config_dict) config.parse_config_dict(config_dict, "", "")
return config return config
return config_dict return config_dict