2014-08-31 17:06:39 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-07 05:26:29 +01:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2019-06-24 12:34:45 +02:00
|
|
|
# Copyright 2017-2018 New Vector Ltd
|
|
|
|
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
2014-08-31 17:06:39 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import argparse
|
2015-11-12 12:58:48 +01:00
|
|
|
import errno
|
2014-08-31 17:06:39 +02:00
|
|
|
import os
|
2015-04-30 05:24:44 +02:00
|
|
|
from textwrap import dedent
|
2014-08-31 17:06:39 +02:00
|
|
|
|
2018-04-07 00:37:36 +02:00
|
|
|
from six import integer_types
|
|
|
|
|
2018-07-09 08:09:20 +02:00
|
|
|
import yaml
|
|
|
|
|
2014-08-31 17:06:39 +02:00
|
|
|
|
2014-09-02 11:48:05 +02:00
|
|
|
class ConfigError(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2015-11-19 14:05:51 +01:00
|
|
|
# We split these messages out to allow packages to override with package
|
|
|
|
# specific instructions.
|
2015-11-18 19:37:03 +01:00
|
|
|
MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS = """\
|
|
|
|
Please opt in or out of reporting anonymized homeserver usage statistics, by
|
|
|
|
setting the `report_stats` key in your config file to either True or False.
|
|
|
|
"""
|
|
|
|
|
|
|
|
MISSING_REPORT_STATS_SPIEL = """\
|
|
|
|
We would really appreciate it if you could help our project out by reporting
|
|
|
|
anonymized usage statistics from your homeserver. Only very basic aggregate
|
|
|
|
data (e.g. number of users) will be reported, but it helps us to track the
|
|
|
|
growth of the Matrix community, and helps us to make Matrix a success, as well
|
|
|
|
as to convince other networks that they should peer with us.
|
|
|
|
|
|
|
|
Thank you.
|
|
|
|
"""
|
|
|
|
|
|
|
|
MISSING_SERVER_NAME = """\
|
|
|
|
Missing mandatory `server_name` config option.
|
|
|
|
"""
|
2014-08-31 17:06:39 +02:00
|
|
|
|
2015-09-22 13:57:40 +02:00
|
|
|
|
2015-11-18 19:37:03 +01:00
|
|
|
class Config(object):
|
2015-02-11 16:01:15 +01:00
|
|
|
@staticmethod
|
2015-04-30 17:04:02 +02:00
|
|
|
def parse_size(value):
|
2018-04-07 00:37:36 +02:00
|
|
|
if isinstance(value, integer_types):
|
2015-04-30 17:04:02 +02:00
|
|
|
return value
|
2015-02-11 16:01:15 +01:00
|
|
|
sizes = {"K": 1024, "M": 1024 * 1024}
|
|
|
|
size = 1
|
2015-04-30 17:04:02 +02:00
|
|
|
suffix = value[-1]
|
2015-02-11 16:01:15 +01:00
|
|
|
if suffix in sizes:
|
2015-04-30 17:04:02 +02:00
|
|
|
value = value[:-1]
|
2015-02-11 16:01:15 +01:00
|
|
|
size = sizes[suffix]
|
2015-04-30 17:04:02 +02:00
|
|
|
return int(value) * size
|
2015-02-11 16:01:15 +01:00
|
|
|
|
2015-04-30 05:24:44 +02:00
|
|
|
@staticmethod
|
2015-04-30 17:04:02 +02:00
|
|
|
def parse_duration(value):
|
2018-04-07 00:37:36 +02:00
|
|
|
if isinstance(value, integer_types):
|
2015-04-30 17:04:02 +02:00
|
|
|
return value
|
2015-04-30 05:24:44 +02:00
|
|
|
second = 1000
|
2016-11-12 09:10:23 +01:00
|
|
|
minute = 60 * second
|
|
|
|
hour = 60 * minute
|
2015-04-30 05:24:44 +02:00
|
|
|
day = 24 * hour
|
|
|
|
week = 7 * day
|
|
|
|
year = 365 * day
|
2016-11-12 09:10:23 +01:00
|
|
|
sizes = {"s": second, "m": minute, "h": hour, "d": day, "w": week, "y": year}
|
2015-04-30 05:24:44 +02:00
|
|
|
size = 1
|
2015-04-30 17:04:02 +02:00
|
|
|
suffix = value[-1]
|
2015-04-30 05:24:44 +02:00
|
|
|
if suffix in sizes:
|
2015-04-30 17:04:02 +02:00
|
|
|
value = value[:-1]
|
2015-04-30 05:24:44 +02:00
|
|
|
size = sizes[suffix]
|
2015-04-30 17:04:02 +02:00
|
|
|
return int(value) * size
|
2015-04-30 05:24:44 +02:00
|
|
|
|
2014-09-01 16:51:15 +02:00
|
|
|
@staticmethod
|
|
|
|
def abspath(file_path):
|
|
|
|
return os.path.abspath(file_path) if file_path else file_path
|
|
|
|
|
2017-10-17 15:46:17 +02:00
|
|
|
@classmethod
|
|
|
|
def path_exists(cls, file_path):
|
|
|
|
"""Check if a file exists
|
|
|
|
|
|
|
|
Unlike os.path.exists, this throws an exception if there is an error
|
|
|
|
checking if the file exists (for example, if there is a perms error on
|
|
|
|
the parent dir).
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
bool: True if the file exists; False if not.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
os.stat(file_path)
|
|
|
|
return True
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno != errno.ENOENT:
|
|
|
|
raise e
|
|
|
|
return False
|
|
|
|
|
2014-09-02 11:48:05 +02:00
|
|
|
@classmethod
|
|
|
|
def check_file(cls, file_path, config_name):
|
|
|
|
if file_path is None:
|
2018-10-20 02:16:55 +02:00
|
|
|
raise ConfigError("Missing config for %s." % (config_name,))
|
2017-10-17 15:46:17 +02:00
|
|
|
try:
|
|
|
|
os.stat(file_path)
|
|
|
|
except OSError as e:
|
2014-09-02 11:48:05 +02:00
|
|
|
raise ConfigError(
|
2017-10-17 15:46:17 +02:00
|
|
|
"Error accessing file '%s' (config for %s): %s"
|
|
|
|
% (file_path, config_name, e.strerror)
|
2014-09-02 11:48:05 +02:00
|
|
|
)
|
|
|
|
return cls.abspath(file_path)
|
|
|
|
|
2015-02-09 19:29:36 +01:00
|
|
|
@classmethod
|
|
|
|
def ensure_directory(cls, dir_path):
|
|
|
|
dir_path = cls.abspath(dir_path)
|
2015-11-12 12:58:48 +01:00
|
|
|
try:
|
2014-12-02 20:51:47 +01:00
|
|
|
os.makedirs(dir_path)
|
2016-03-07 21:13:10 +01:00
|
|
|
except OSError as e:
|
2015-11-12 12:58:48 +01:00
|
|
|
if e.errno != errno.EEXIST:
|
|
|
|
raise
|
2014-12-02 20:51:47 +01:00
|
|
|
if not os.path.isdir(dir_path):
|
2018-10-20 02:16:55 +02:00
|
|
|
raise ConfigError("%s is not a directory" % (dir_path,))
|
2014-12-02 20:51:47 +01:00
|
|
|
return dir_path
|
|
|
|
|
2014-09-02 11:48:05 +02:00
|
|
|
@classmethod
|
|
|
|
def read_file(cls, file_path, config_name):
|
|
|
|
cls.check_file(file_path, config_name)
|
2014-08-31 17:06:39 +02:00
|
|
|
with open(file_path) as file_stream:
|
|
|
|
return file_stream.read()
|
|
|
|
|
2015-04-30 05:24:44 +02:00
|
|
|
def invoke_all(self, name, *args, **kargs):
|
2019-07-15 14:15:34 +02:00
|
|
|
"""Invoke all instance methods with the given name and arguments in the
|
|
|
|
class's MRO.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
name (str): Name of function to invoke
|
|
|
|
*args
|
|
|
|
**kwargs
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
list: The list of the return values from each method called
|
|
|
|
"""
|
2015-04-30 05:24:44 +02:00
|
|
|
results = []
|
|
|
|
for cls in type(self).mro():
|
|
|
|
if name in cls.__dict__:
|
|
|
|
results.append(getattr(cls, name)(self, *args, **kargs))
|
|
|
|
return results
|
2014-08-31 17:06:39 +02:00
|
|
|
|
2019-07-15 14:15:34 +02:00
|
|
|
@classmethod
|
|
|
|
def invoke_all_static(cls, name, *args, **kargs):
|
|
|
|
"""Invoke all static methods with the given name and arguments in the
|
|
|
|
class's MRO.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
name (str): Name of function to invoke
|
|
|
|
*args
|
|
|
|
**kwargs
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
list: The list of the return values from each method called
|
|
|
|
"""
|
|
|
|
results = []
|
|
|
|
for c in cls.mro():
|
|
|
|
if name in c.__dict__:
|
|
|
|
results.append(getattr(c, name)(*args, **kargs))
|
|
|
|
return results
|
|
|
|
|
Error if macaroon key is missing from config
Currently we store all access tokens in the DB, and fall back to that
check if we can't validate the macaroon, so our fallback works here, but
for guests, their macaroons don't get persisted, so we don't get to
find them in the database. Each restart, we generate a new ephemeral
key, so guests lose access after each server restart.
I tried to fix up the config stuff to be less insane, but gave up, so
instead I bolt on yet another piece of custom one-off insanity.
Also, add some basic tests for config generation and loading.
2016-02-05 02:58:23 +01:00
|
|
|
def generate_config(
|
2018-12-21 16:04:57 +01:00
|
|
|
self,
|
|
|
|
config_dir_path,
|
|
|
|
data_dir_path,
|
|
|
|
server_name,
|
|
|
|
generate_secrets=False,
|
|
|
|
report_stats=None,
|
2019-06-21 14:46:39 +02:00
|
|
|
open_private_ports=False,
|
Error if macaroon key is missing from config
Currently we store all access tokens in the DB, and fall back to that
check if we can't validate the macaroon, so our fallback works here, but
for guests, their macaroons don't get persisted, so we don't get to
find them in the database. Each restart, we generate a new ephemeral
key, so guests lose access after each server restart.
I tried to fix up the config stuff to be less insane, but gave up, so
instead I bolt on yet another piece of custom one-off insanity.
Also, add some basic tests for config generation and loading.
2016-02-05 02:58:23 +01:00
|
|
|
):
|
2018-12-21 16:04:57 +01:00
|
|
|
"""Build a default configuration file
|
|
|
|
|
2019-06-22 01:00:20 +02:00
|
|
|
This is used when the user explicitly asks us to generate a config file
|
|
|
|
(eg with --generate_config).
|
2018-12-21 16:04:57 +01:00
|
|
|
|
|
|
|
Args:
|
|
|
|
config_dir_path (str): The path where the config files are kept. Used to
|
|
|
|
create filenames for things like the log config and the signing key.
|
|
|
|
|
|
|
|
data_dir_path (str): The path where the data files are kept. Used to create
|
|
|
|
filenames for things like the database and media store.
|
|
|
|
|
|
|
|
server_name (str): The server name. Used to initialise the server_name
|
|
|
|
config param, but also used in the names of some of the config files.
|
|
|
|
|
|
|
|
generate_secrets (bool): True if we should generate new secrets for things
|
|
|
|
like the macaroon_secret_key. If False, these parameters will be left
|
|
|
|
unset.
|
|
|
|
|
|
|
|
report_stats (bool|None): Initial setting for the report_stats setting.
|
|
|
|
If None, report_stats will be left unset.
|
|
|
|
|
2019-06-21 14:46:39 +02:00
|
|
|
open_private_ports (bool): True to leave private ports (such as the non-TLS
|
|
|
|
HTTP listener) open to the internet.
|
|
|
|
|
2018-12-21 16:04:57 +01:00
|
|
|
Returns:
|
|
|
|
str: the yaml config file
|
|
|
|
"""
|
2019-06-22 01:00:20 +02:00
|
|
|
return "\n\n".join(
|
2018-10-20 02:16:55 +02:00
|
|
|
dedent(conf)
|
|
|
|
for conf in self.invoke_all(
|
2019-06-22 01:00:20 +02:00
|
|
|
"generate_config_section",
|
2018-10-20 02:16:55 +02:00
|
|
|
config_dir_path=config_dir_path,
|
2018-12-21 16:04:57 +01:00
|
|
|
data_dir_path=data_dir_path,
|
2018-10-20 02:16:55 +02:00
|
|
|
server_name=server_name,
|
2018-12-21 16:04:57 +01:00
|
|
|
generate_secrets=generate_secrets,
|
2018-10-20 02:16:55 +02:00
|
|
|
report_stats=report_stats,
|
2019-06-21 14:46:39 +02:00
|
|
|
open_private_ports=open_private_ports,
|
2018-10-20 02:16:55 +02:00
|
|
|
)
|
|
|
|
)
|
2015-04-30 05:24:44 +02:00
|
|
|
|
2014-08-31 17:06:39 +02:00
|
|
|
@classmethod
|
2016-06-09 19:50:38 +02:00
|
|
|
def load_config(cls, description, argv):
|
2019-06-21 18:43:38 +02:00
|
|
|
"""Parse the commandline and config files
|
|
|
|
|
|
|
|
Doesn't support config-file-generation: used by the worker apps.
|
|
|
|
|
|
|
|
Returns: Config object.
|
|
|
|
"""
|
2019-07-15 14:43:25 +02:00
|
|
|
config_parser = argparse.ArgumentParser(description=description)
|
|
|
|
cls.add_arguments_to_parser(config_parser)
|
2019-07-01 18:55:26 +02:00
|
|
|
obj, _ = cls.load_config_with_parser(config_parser, argv)
|
|
|
|
|
|
|
|
return obj
|
|
|
|
|
|
|
|
@classmethod
|
2019-07-15 14:43:25 +02:00
|
|
|
def add_arguments_to_parser(cls, config_parser):
|
|
|
|
"""Adds all the config flags to an ArgumentParser.
|
2019-07-01 18:55:26 +02:00
|
|
|
|
|
|
|
Doesn't support config-file-generation: used by the worker apps.
|
|
|
|
|
|
|
|
Used for workers where we want to add extra flags/subcommands.
|
|
|
|
|
|
|
|
Args:
|
2019-07-15 14:43:25 +02:00
|
|
|
config_parser (ArgumentParser): App description
|
2019-07-01 18:55:26 +02:00
|
|
|
"""
|
|
|
|
|
2016-06-09 19:50:38 +02:00
|
|
|
config_parser.add_argument(
|
2018-10-20 02:16:55 +02:00
|
|
|
"-c",
|
|
|
|
"--config-path",
|
2016-06-09 19:50:38 +02:00
|
|
|
action="append",
|
|
|
|
metavar="CONFIG_FILE",
|
|
|
|
help="Specify config file. Can be given multiple times and"
|
2018-10-20 02:16:55 +02:00
|
|
|
" may specify directories containing *.yaml files.",
|
2016-06-09 19:50:38 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
config_parser.add_argument(
|
|
|
|
"--keys-directory",
|
|
|
|
metavar="DIRECTORY",
|
|
|
|
help="Where files such as certs and signing keys are stored when"
|
2019-06-24 12:34:45 +02:00
|
|
|
" their location is not given explicitly in the config."
|
2018-10-20 02:16:55 +02:00
|
|
|
" Defaults to the directory containing the last config file",
|
2016-06-09 19:50:38 +02:00
|
|
|
)
|
|
|
|
|
2019-07-15 14:15:34 +02:00
|
|
|
cls.invoke_all_static("add_arguments", config_parser)
|
2019-03-13 18:33:54 +01:00
|
|
|
|
2019-07-01 18:55:26 +02:00
|
|
|
@classmethod
|
2019-07-16 12:39:13 +02:00
|
|
|
def load_config_with_parser(cls, parser, argv):
|
2019-07-01 18:55:26 +02:00
|
|
|
"""Parse the commandline and config files with the given parser
|
|
|
|
|
|
|
|
Doesn't support config-file-generation: used by the worker apps.
|
|
|
|
|
|
|
|
Used for workers where we want to add extra flags/subcommands.
|
|
|
|
|
|
|
|
Args:
|
2019-07-16 12:39:13 +02:00
|
|
|
parser (ArgumentParser)
|
2019-07-01 18:55:26 +02:00
|
|
|
argv (list[str])
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
tuple[HomeServerConfig, argparse.Namespace]: Returns the parsed
|
|
|
|
config object and the parsed argparse.Namespace object from
|
2019-07-16 12:39:13 +02:00
|
|
|
`parser.parse_args(..)`
|
2019-07-01 18:55:26 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
obj = cls()
|
2019-03-13 18:33:54 +01:00
|
|
|
|
2019-07-16 12:39:13 +02:00
|
|
|
config_args = parser.parse_args(argv)
|
2016-06-09 19:50:38 +02:00
|
|
|
|
|
|
|
config_files = find_config_files(search_paths=config_args.config_path)
|
|
|
|
|
2019-06-24 12:34:45 +02:00
|
|
|
if not config_files:
|
2019-07-16 12:39:13 +02:00
|
|
|
parser.error("Must supply a config file.")
|
2019-06-24 12:34:45 +02:00
|
|
|
|
|
|
|
if config_args.keys_directory:
|
|
|
|
config_dir_path = config_args.keys_directory
|
|
|
|
else:
|
|
|
|
config_dir_path = os.path.dirname(config_files[-1])
|
|
|
|
config_dir_path = os.path.abspath(config_dir_path)
|
|
|
|
data_dir_path = os.getcwd()
|
|
|
|
|
2019-06-22 01:00:20 +02:00
|
|
|
config_dict = read_config_files(config_files)
|
2019-06-24 12:34:45 +02:00
|
|
|
obj.parse_config_dict(
|
|
|
|
config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
|
2016-06-09 19:50:38 +02:00
|
|
|
)
|
2019-03-13 18:33:54 +01:00
|
|
|
|
|
|
|
obj.invoke_all("read_arguments", config_args)
|
|
|
|
|
2019-07-01 18:55:26 +02:00
|
|
|
return obj, config_args
|
2015-04-30 05:24:44 +02:00
|
|
|
|
2016-06-09 19:50:38 +02:00
|
|
|
@classmethod
|
|
|
|
def load_or_generate_config(cls, description, argv):
|
2019-06-21 18:43:38 +02:00
|
|
|
"""Parse the commandline and config files
|
|
|
|
|
|
|
|
Supports generation of config files, so is used for the main homeserver app.
|
|
|
|
|
|
|
|
Returns: Config object, or None if --generate-config or --generate-keys was set
|
|
|
|
"""
|
2014-08-31 17:06:39 +02:00
|
|
|
config_parser = argparse.ArgumentParser(add_help=False)
|
|
|
|
config_parser.add_argument(
|
2018-10-20 02:16:55 +02:00
|
|
|
"-c",
|
|
|
|
"--config-path",
|
2015-04-30 14:48:15 +02:00
|
|
|
action="append",
|
2014-08-31 17:06:39 +02:00
|
|
|
metavar="CONFIG_FILE",
|
2015-08-25 17:25:54 +02:00
|
|
|
help="Specify config file. Can be given multiple times and"
|
2018-10-20 02:16:55 +02:00
|
|
|
" may specify directories containing *.yaml files.",
|
2014-08-31 17:06:39 +02:00
|
|
|
)
|
2019-06-21 19:50:43 +02:00
|
|
|
|
|
|
|
generate_group = config_parser.add_argument_group("Config generation")
|
|
|
|
generate_group.add_argument(
|
2014-09-01 16:51:15 +02:00
|
|
|
"--generate-config",
|
2015-04-30 14:48:15 +02:00
|
|
|
action="store_true",
|
2019-06-21 19:50:43 +02:00
|
|
|
help="Generate a config file, then exit.",
|
2014-09-01 16:51:15 +02:00
|
|
|
)
|
2019-06-21 19:50:43 +02:00
|
|
|
generate_group.add_argument(
|
|
|
|
"--generate-missing-configs",
|
|
|
|
"--generate-keys",
|
|
|
|
action="store_true",
|
|
|
|
help="Generate any missing additional config files, then exit.",
|
|
|
|
)
|
|
|
|
generate_group.add_argument(
|
|
|
|
"-H", "--server-name", help="The server name to generate a config file for."
|
|
|
|
)
|
|
|
|
generate_group.add_argument(
|
2015-09-22 13:57:40 +02:00
|
|
|
"--report-stats",
|
|
|
|
action="store",
|
2019-06-21 19:50:43 +02:00
|
|
|
help="Whether the generated config reports anonymized usage statistics.",
|
2018-10-20 02:16:55 +02:00
|
|
|
choices=["yes", "no"],
|
2015-09-22 13:57:40 +02:00
|
|
|
)
|
2019-06-21 19:50:43 +02:00
|
|
|
generate_group.add_argument(
|
|
|
|
"--config-directory",
|
2015-08-25 18:31:22 +02:00
|
|
|
"--keys-directory",
|
2015-08-25 17:58:01 +02:00
|
|
|
metavar="DIRECTORY",
|
2019-06-21 19:50:43 +02:00
|
|
|
help=(
|
|
|
|
"Specify where additional config files such as signing keys and log"
|
2019-06-24 12:34:45 +02:00
|
|
|
" config should be stored. Defaults to the same directory as the last"
|
2019-06-21 19:50:43 +02:00
|
|
|
" config file."
|
|
|
|
),
|
2015-04-30 14:48:15 +02:00
|
|
|
)
|
2019-06-21 11:53:49 +02:00
|
|
|
generate_group.add_argument(
|
|
|
|
"--data-directory",
|
|
|
|
metavar="DIRECTORY",
|
|
|
|
help=(
|
|
|
|
"Specify where data such as the media store and database file should be"
|
|
|
|
" stored. Defaults to the current working directory."
|
|
|
|
),
|
|
|
|
)
|
2019-06-21 14:46:39 +02:00
|
|
|
generate_group.add_argument(
|
|
|
|
"--open-private-ports",
|
|
|
|
action="store_true",
|
|
|
|
help=(
|
|
|
|
"Leave private ports (such as the non-TLS HTTP listener) open to the"
|
|
|
|
" internet. Do not use this unless you know what you are doing."
|
|
|
|
),
|
|
|
|
)
|
2019-06-21 11:53:49 +02:00
|
|
|
|
2014-08-31 17:06:39 +02:00
|
|
|
config_args, remaining_args = config_parser.parse_known_args(argv)
|
|
|
|
|
2016-06-09 19:50:38 +02:00
|
|
|
config_files = find_config_files(search_paths=config_args.config_path)
|
|
|
|
|
2019-06-24 12:34:45 +02:00
|
|
|
if not config_files:
|
|
|
|
config_parser.error(
|
|
|
|
"Must supply a config file.\nA config file can be automatically"
|
|
|
|
' generated using "--generate-config -H SERVER_NAME'
|
|
|
|
' -c CONFIG-FILE"'
|
|
|
|
)
|
|
|
|
|
|
|
|
if config_args.config_directory:
|
|
|
|
config_dir_path = config_args.config_directory
|
|
|
|
else:
|
|
|
|
config_dir_path = os.path.dirname(config_files[-1])
|
|
|
|
config_dir_path = os.path.abspath(config_dir_path)
|
|
|
|
data_dir_path = os.getcwd()
|
|
|
|
|
2019-06-21 19:50:43 +02:00
|
|
|
generate_missing_configs = config_args.generate_missing_configs
|
2015-08-12 12:57:37 +02:00
|
|
|
|
2016-06-09 19:50:38 +02:00
|
|
|
obj = cls()
|
2015-08-25 17:25:54 +02:00
|
|
|
|
2014-09-01 16:51:15 +02:00
|
|
|
if config_args.generate_config:
|
2015-09-22 13:57:40 +02:00
|
|
|
if config_args.report_stats is None:
|
|
|
|
config_parser.error(
|
2018-10-20 02:16:55 +02:00
|
|
|
"Please specify either --report-stats=yes or --report-stats=no\n\n"
|
|
|
|
+ MISSING_REPORT_STATS_SPIEL
|
2015-09-22 13:57:40 +02:00
|
|
|
)
|
2019-06-24 12:34:45 +02:00
|
|
|
|
2015-08-25 17:25:54 +02:00
|
|
|
(config_path,) = config_files
|
2017-10-17 15:46:17 +02:00
|
|
|
if not cls.path_exists(config_path):
|
2019-06-21 18:14:56 +02:00
|
|
|
print("Generating config file %s" % (config_path,))
|
2015-08-12 12:57:37 +02:00
|
|
|
|
2019-06-21 11:53:49 +02:00
|
|
|
if config_args.data_directory:
|
|
|
|
data_dir_path = config_args.data_directory
|
|
|
|
else:
|
|
|
|
data_dir_path = os.getcwd()
|
|
|
|
data_dir_path = os.path.abspath(data_dir_path)
|
|
|
|
|
2015-08-12 12:57:37 +02:00
|
|
|
server_name = config_args.server_name
|
|
|
|
if not server_name:
|
Error if macaroon key is missing from config
Currently we store all access tokens in the DB, and fall back to that
check if we can't validate the macaroon, so our fallback works here, but
for guests, their macaroons don't get persisted, so we don't get to
find them in the database. Each restart, we generate a new ephemeral
key, so guests lose access after each server restart.
I tried to fix up the config stuff to be less insane, but gave up, so
instead I bolt on yet another piece of custom one-off insanity.
Also, add some basic tests for config generation and loading.
2016-02-05 02:58:23 +01:00
|
|
|
raise ConfigError(
|
|
|
|
"Must specify a server_name to a generate config for."
|
|
|
|
" Pass -H server.name."
|
|
|
|
)
|
2019-03-04 18:14:58 +01:00
|
|
|
|
|
|
|
config_str = obj.generate_config(
|
|
|
|
config_dir_path=config_dir_path,
|
2019-06-24 12:34:45 +02:00
|
|
|
data_dir_path=data_dir_path,
|
2019-03-04 18:14:58 +01:00
|
|
|
server_name=server_name,
|
|
|
|
report_stats=(config_args.report_stats == "yes"),
|
|
|
|
generate_secrets=True,
|
2019-06-21 14:46:39 +02:00
|
|
|
open_private_ports=config_args.open_private_ports,
|
2019-03-04 18:14:58 +01:00
|
|
|
)
|
|
|
|
|
2017-10-17 15:46:17 +02:00
|
|
|
if not cls.path_exists(config_dir_path):
|
2015-08-12 12:57:37 +02:00
|
|
|
os.makedirs(config_dir_path)
|
2018-04-07 01:39:45 +02:00
|
|
|
with open(config_path, "w") as config_file:
|
2019-06-20 11:32:02 +02:00
|
|
|
config_file.write("# vim:ft=yaml\n\n")
|
2018-04-07 01:39:45 +02:00
|
|
|
config_file.write(config_str)
|
2019-03-04 18:14:58 +01:00
|
|
|
|
2019-06-22 00:39:08 +02:00
|
|
|
config_dict = yaml.safe_load(config_str)
|
|
|
|
obj.generate_missing_files(config_dict, config_dir_path)
|
2019-03-04 18:14:58 +01:00
|
|
|
|
2018-10-20 02:16:55 +02:00
|
|
|
print(
|
|
|
|
(
|
|
|
|
"A config file has been generated in %r for server name"
|
2019-02-12 11:37:00 +01:00
|
|
|
" %r. Please review this file and customise it"
|
2018-10-20 02:16:55 +02:00
|
|
|
" to your needs."
|
|
|
|
)
|
|
|
|
% (config_path, server_name)
|
|
|
|
)
|
Error if macaroon key is missing from config
Currently we store all access tokens in the DB, and fall back to that
check if we can't validate the macaroon, so our fallback works here, but
for guests, their macaroons don't get persisted, so we don't get to
find them in the database. Each restart, we generate a new ephemeral
key, so guests lose access after each server restart.
I tried to fix up the config stuff to be less insane, but gave up, so
instead I bolt on yet another piece of custom one-off insanity.
Also, add some basic tests for config generation and loading.
2016-02-05 02:58:23 +01:00
|
|
|
return
|
2015-08-12 12:57:37 +02:00
|
|
|
else:
|
2018-10-20 02:16:55 +02:00
|
|
|
print(
|
|
|
|
(
|
2019-06-21 18:14:56 +02:00
|
|
|
"Config file %r already exists. Generating any missing config"
|
2018-10-20 02:16:55 +02:00
|
|
|
" files."
|
|
|
|
)
|
|
|
|
% (config_path,)
|
|
|
|
)
|
2019-06-21 19:50:43 +02:00
|
|
|
generate_missing_configs = True
|
2014-08-31 17:06:39 +02:00
|
|
|
|
2015-05-05 18:38:10 +02:00
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
parents=[config_parser],
|
|
|
|
description=description,
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
|
|
)
|
|
|
|
|
2019-07-15 15:25:05 +02:00
|
|
|
obj.invoke_all_static("add_arguments", parser)
|
2015-05-05 18:38:10 +02:00
|
|
|
args = parser.parse_args(remaining_args)
|
|
|
|
|
2019-06-22 01:00:20 +02:00
|
|
|
config_dict = read_config_files(config_files)
|
2019-06-21 19:50:43 +02:00
|
|
|
if generate_missing_configs:
|
2019-06-22 00:39:08 +02:00
|
|
|
obj.generate_missing_files(config_dict, config_dir_path)
|
2016-06-09 19:50:38 +02:00
|
|
|
return None
|
|
|
|
|
2019-06-24 12:34:45 +02:00
|
|
|
obj.parse_config_dict(
|
|
|
|
config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
|
|
|
|
)
|
2016-06-09 19:50:38 +02:00
|
|
|
obj.invoke_all("read_arguments", args)
|
|
|
|
|
|
|
|
return obj
|
|
|
|
|
2019-06-24 12:34:45 +02:00
|
|
|
def parse_config_dict(self, config_dict, config_dir_path, data_dir_path):
|
|
|
|
"""Read the information from the config dict into this Config object.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
config_dict (dict): Configuration data, as read from the yaml
|
|
|
|
|
|
|
|
config_dir_path (str): The path where the config files are kept. Used to
|
|
|
|
create filenames for things like the log config and the signing key.
|
|
|
|
|
|
|
|
data_dir_path (str): The path where the data files are kept. Used to create
|
|
|
|
filenames for things like the database and media store.
|
|
|
|
"""
|
|
|
|
self.invoke_all(
|
|
|
|
"read_config",
|
|
|
|
config_dict,
|
|
|
|
config_dir_path=config_dir_path,
|
|
|
|
data_dir_path=data_dir_path,
|
|
|
|
)
|
2016-06-09 19:50:38 +02:00
|
|
|
|
2019-06-22 00:39:08 +02:00
|
|
|
def generate_missing_files(self, config_dict, config_dir_path):
|
|
|
|
self.invoke_all("generate_files", config_dict, config_dir_path)
|
2019-06-21 18:43:38 +02:00
|
|
|
|
2016-06-09 19:50:38 +02:00
|
|
|
|
2019-06-22 01:00:20 +02:00
|
|
|
def read_config_files(config_files):
|
|
|
|
"""Read the config files into a dict
|
|
|
|
|
|
|
|
Args:
|
|
|
|
config_files (iterable[str]): A list of the config files to read
|
|
|
|
|
|
|
|
Returns: dict
|
|
|
|
"""
|
|
|
|
specified_config = {}
|
|
|
|
for config_file in config_files:
|
|
|
|
with open(config_file) as file_stream:
|
|
|
|
yaml_config = yaml.safe_load(file_stream)
|
|
|
|
specified_config.update(yaml_config)
|
|
|
|
|
|
|
|
if "server_name" not in specified_config:
|
|
|
|
raise ConfigError(MISSING_SERVER_NAME)
|
|
|
|
|
|
|
|
if "report_stats" not in specified_config:
|
|
|
|
raise ConfigError(
|
|
|
|
MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS + "\n" + MISSING_REPORT_STATS_SPIEL
|
|
|
|
)
|
|
|
|
return specified_config
|
|
|
|
|
|
|
|
|
2016-06-09 19:50:38 +02:00
|
|
|
def find_config_files(search_paths):
|
|
|
|
"""Finds config files using a list of search paths. If a path is a file
|
|
|
|
then that file path is added to the list. If a search path is a directory
|
|
|
|
then all the "*.yaml" files in that directory are added to the list in
|
|
|
|
sorted order.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
search_paths(list(str)): A list of paths to search.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
list(str): A list of file paths.
|
|
|
|
"""
|
|
|
|
|
|
|
|
config_files = []
|
|
|
|
if search_paths:
|
|
|
|
for config_path in search_paths:
|
|
|
|
if os.path.isdir(config_path):
|
|
|
|
# We accept specifying directories as config paths, we search
|
|
|
|
# inside that directory for all files matching *.yaml, and then
|
|
|
|
# we apply them in *sorted* order.
|
|
|
|
files = []
|
|
|
|
for entry in os.listdir(config_path):
|
|
|
|
entry_path = os.path.join(config_path, entry)
|
|
|
|
if not os.path.isfile(entry_path):
|
2018-10-20 02:16:55 +02:00
|
|
|
err = "Found subdirectory in config directory: %r. IGNORING."
|
|
|
|
print(err % (entry_path,))
|
2016-06-09 19:50:38 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
if not entry.endswith(".yaml"):
|
2018-10-20 02:16:55 +02:00
|
|
|
err = (
|
|
|
|
"Found file in config directory that does not end in "
|
|
|
|
"'.yaml': %r. IGNORING."
|
|
|
|
)
|
|
|
|
print(err % (entry_path,))
|
2016-06-09 19:50:38 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
files.append(entry_path)
|
|
|
|
|
|
|
|
config_files.extend(sorted(files))
|
|
|
|
else:
|
|
|
|
config_files.append(config_path)
|
|
|
|
return config_files
|