2016-01-07 05:26:29 +01:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2019-06-24 12:34:45 +02:00
|
|
|
# Copyright 2017-2018 New Vector Ltd
|
|
|
|
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
2014-08-31 17:06:39 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import argparse
|
2015-11-12 12:58:48 +01:00
|
|
|
import errno
|
2022-05-11 15:43:22 +02:00
|
|
|
import logging
|
2014-08-31 17:06:39 +02:00
|
|
|
import os
|
2022-06-14 16:53:42 +02:00
|
|
|
import re
|
2019-10-10 10:39:35 +02:00
|
|
|
from collections import OrderedDict
|
2022-08-26 13:26:06 +02:00
|
|
|
from enum import Enum, auto
|
2020-07-16 15:06:28 +02:00
|
|
|
from hashlib import sha256
|
2015-04-30 05:24:44 +02:00
|
|
|
from textwrap import dedent
|
2021-11-23 16:21:19 +01:00
|
|
|
from typing import (
|
|
|
|
Any,
|
2022-05-11 15:43:22 +02:00
|
|
|
ClassVar,
|
|
|
|
Collection,
|
2021-11-23 16:21:19 +01:00
|
|
|
Dict,
|
|
|
|
Iterable,
|
2022-05-11 15:43:22 +02:00
|
|
|
Iterator,
|
2021-11-23 16:21:19 +01:00
|
|
|
List,
|
|
|
|
MutableMapping,
|
|
|
|
Optional,
|
|
|
|
Tuple,
|
|
|
|
Type,
|
|
|
|
TypeVar,
|
|
|
|
Union,
|
|
|
|
)
|
2014-08-31 17:06:39 +02:00
|
|
|
|
2020-07-16 15:06:28 +02:00
|
|
|
import attr
|
2020-08-17 18:05:00 +02:00
|
|
|
import jinja2
|
|
|
|
import pkg_resources
|
2018-07-09 08:09:20 +02:00
|
|
|
import yaml
|
|
|
|
|
2021-02-01 16:52:50 +01:00
|
|
|
from synapse.util.templates import _create_mxc_to_http_filter, _format_ts_filter
|
|
|
|
|
2022-05-11 15:43:22 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2014-08-31 17:06:39 +02:00
|
|
|
|
2014-09-02 11:48:05 +02:00
|
|
|
class ConfigError(Exception):
|
2020-12-08 15:04:35 +01:00
|
|
|
"""Represents a problem parsing the configuration
|
|
|
|
|
|
|
|
Args:
|
|
|
|
msg: A textual description of the error.
|
|
|
|
path: Where appropriate, an indication of where in the configuration
|
|
|
|
the problem lies.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, msg: str, path: Optional[Iterable[str]] = None):
|
|
|
|
self.msg = msg
|
|
|
|
self.path = path
|
2014-09-02 11:48:05 +02:00
|
|
|
|
|
|
|
|
2022-05-11 15:43:22 +02:00
|
|
|
def format_config_error(e: ConfigError) -> Iterator[str]:
|
|
|
|
"""
|
|
|
|
Formats a config error neatly
|
|
|
|
|
|
|
|
The idea is to format the immediate error, plus the "causes" of those errors,
|
|
|
|
hopefully in a way that makes sense to the user. For example:
|
|
|
|
|
|
|
|
Error in configuration at 'oidc_config.user_mapping_provider.config.display_name_template':
|
|
|
|
Failed to parse config for module 'JinjaOidcMappingProvider':
|
|
|
|
invalid jinja template:
|
|
|
|
unexpected end of template, expected 'end of print statement'.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
e: the error to be formatted
|
|
|
|
|
|
|
|
Returns: An iterator which yields string fragments to be formatted
|
|
|
|
"""
|
|
|
|
yield "Error in configuration"
|
|
|
|
|
|
|
|
if e.path:
|
|
|
|
yield " at '%s'" % (".".join(e.path),)
|
|
|
|
|
|
|
|
yield ":\n %s" % (e.msg,)
|
|
|
|
|
|
|
|
parent_e = e.__cause__
|
|
|
|
indent = 1
|
|
|
|
while parent_e:
|
|
|
|
indent += 1
|
|
|
|
yield ":\n%s%s" % (" " * indent, str(parent_e))
|
|
|
|
parent_e = parent_e.__cause__
|
|
|
|
|
|
|
|
|
2015-11-19 14:05:51 +01:00
|
|
|
# We split these messages out to allow packages to override with package
|
|
|
|
# specific instructions.
|
2015-11-18 19:37:03 +01:00
|
|
|
MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS = """\
|
2022-07-19 14:38:29 +02:00
|
|
|
Please opt in or out of reporting homeserver usage statistics, by setting
|
|
|
|
the `report_stats` key in your config file to either True or False.
|
2015-11-18 19:37:03 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
MISSING_REPORT_STATS_SPIEL = """\
|
|
|
|
We would really appreciate it if you could help our project out by reporting
|
2022-07-19 14:38:29 +02:00
|
|
|
homeserver usage statistics from your homeserver. Your homeserver's server name,
|
|
|
|
along with very basic aggregate data (e.g. number of users) will be reported. But
|
|
|
|
it helps us to track the growth of the Matrix community, and helps us to make Matrix
|
|
|
|
a success, as well as to convince other networks that they should peer with us.
|
2015-11-18 19:37:03 +01:00
|
|
|
|
|
|
|
Thank you.
|
|
|
|
"""
|
|
|
|
|
|
|
|
MISSING_SERVER_NAME = """\
|
|
|
|
Missing mandatory `server_name` config option.
|
|
|
|
"""
|
2014-08-31 17:06:39 +02:00
|
|
|
|
2015-09-22 13:57:40 +02:00
|
|
|
|
2020-02-14 17:22:30 +01:00
|
|
|
CONFIG_FILE_HEADER = """\
|
|
|
|
# Configuration file for Synapse.
|
|
|
|
#
|
|
|
|
# This is a YAML file: see [1] for a quick introduction. Note in particular
|
|
|
|
# that *indentation is important*: all the elements of a list or dictionary
|
|
|
|
# should have the same indentation.
|
|
|
|
#
|
|
|
|
# [1] https://docs.ansible.com/ansible/latest/reference_appendices/YAMLSyntax.html
|
2022-06-14 16:53:42 +02:00
|
|
|
#
|
|
|
|
# For more information on how to configure Synapse, including a complete accounting of
|
|
|
|
# each option, go to docs/usage/configuration/config_documentation.md or
|
|
|
|
# https://matrix-org.github.io/synapse/latest/usage/configuration/config_documentation.html
|
2020-02-14 17:22:30 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
def path_exists(file_path: str) -> bool:
|
2019-10-10 10:39:35 +02:00
|
|
|
"""Check if a file exists
|
|
|
|
|
|
|
|
Unlike os.path.exists, this throws an exception if there is an error
|
|
|
|
checking if the file exists (for example, if there is a perms error on
|
|
|
|
the parent dir).
|
|
|
|
|
|
|
|
Returns:
|
2021-11-23 16:21:19 +01:00
|
|
|
True if the file exists; False if not.
|
2019-10-10 10:39:35 +02:00
|
|
|
"""
|
|
|
|
try:
|
|
|
|
os.stat(file_path)
|
|
|
|
return True
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno != errno.ENOENT:
|
|
|
|
raise e
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2020-09-04 12:54:56 +02:00
|
|
|
class Config:
|
2019-10-10 10:39:35 +02:00
|
|
|
"""
|
|
|
|
A configuration section, containing configuration keys and values.
|
|
|
|
|
|
|
|
Attributes:
|
2021-11-23 16:21:19 +01:00
|
|
|
section: The section title of this config object, such as
|
2019-10-10 10:39:35 +02:00
|
|
|
"tls" or "logger". This is used to refer to it on the root
|
|
|
|
logger (for example, `config.tls.some_option`). Must be
|
|
|
|
defined in subclasses.
|
|
|
|
"""
|
|
|
|
|
2022-05-11 15:43:22 +02:00
|
|
|
section: ClassVar[str]
|
2019-10-10 10:39:35 +02:00
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
def __init__(self, root_config: "RootConfig" = None):
|
2019-10-10 10:39:35 +02:00
|
|
|
self.root = root_config
|
|
|
|
|
2020-08-17 18:05:00 +02:00
|
|
|
# Get the path to the default Synapse template directory
|
|
|
|
self.default_template_dir = pkg_resources.resource_filename(
|
|
|
|
"synapse", "res/templates"
|
|
|
|
)
|
|
|
|
|
2015-02-11 16:01:15 +01:00
|
|
|
@staticmethod
|
2021-11-23 16:21:19 +01:00
|
|
|
def parse_size(value: Union[str, int]) -> int:
|
Prefer `type(x) is int` to `isinstance(x, int)` (#14945)
* Perfer `type(x) is int` to `isinstance(x, int)`
This covered all additional instances I could see where `x` was
user-controlled.
The remaining cases are
```
$ rg -s 'isinstance.*[^_]int'
tests/replication/_base.py
576: if isinstance(obj, int):
synapse/util/caches/stream_change_cache.py
136: assert isinstance(stream_pos, int)
214: assert isinstance(stream_pos, int)
246: assert isinstance(stream_pos, int)
267: assert isinstance(stream_pos, int)
synapse/replication/tcp/external_cache.py
133: if isinstance(result, int):
synapse/metrics/__init__.py
100: if isinstance(calls, (int, float)):
synapse/handlers/appservice.py
262: assert isinstance(new_token, int)
synapse/config/_util.py
62: if isinstance(p, int):
```
which cover metrics, logic related to `jsonschema`, and replication and
data streams. AFAICS these are all internal to Synapse
* Changelog
2023-01-31 11:33:07 +01:00
|
|
|
"""Interpret `value` as a number of bytes.
|
|
|
|
|
|
|
|
If an integer is provided it is treated as bytes and is unchanged.
|
|
|
|
|
|
|
|
String byte sizes can have a suffix of 'K' or `M`, representing kibibytes and
|
|
|
|
mebibytes respectively. No suffix is understood as a plain byte count.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
TypeError, if given something other than an integer or a string
|
|
|
|
ValueError: if given a string not of the form described above.
|
|
|
|
"""
|
|
|
|
if type(value) is int:
|
2015-04-30 17:04:02 +02:00
|
|
|
return value
|
Prefer `type(x) is int` to `isinstance(x, int)` (#14945)
* Perfer `type(x) is int` to `isinstance(x, int)`
This covered all additional instances I could see where `x` was
user-controlled.
The remaining cases are
```
$ rg -s 'isinstance.*[^_]int'
tests/replication/_base.py
576: if isinstance(obj, int):
synapse/util/caches/stream_change_cache.py
136: assert isinstance(stream_pos, int)
214: assert isinstance(stream_pos, int)
246: assert isinstance(stream_pos, int)
267: assert isinstance(stream_pos, int)
synapse/replication/tcp/external_cache.py
133: if isinstance(result, int):
synapse/metrics/__init__.py
100: if isinstance(calls, (int, float)):
synapse/handlers/appservice.py
262: assert isinstance(new_token, int)
synapse/config/_util.py
62: if isinstance(p, int):
```
which cover metrics, logic related to `jsonschema`, and replication and
data streams. AFAICS these are all internal to Synapse
* Changelog
2023-01-31 11:33:07 +01:00
|
|
|
elif type(value) is str:
|
|
|
|
sizes = {"K": 1024, "M": 1024 * 1024}
|
|
|
|
size = 1
|
|
|
|
suffix = value[-1]
|
|
|
|
if suffix in sizes:
|
|
|
|
value = value[:-1]
|
|
|
|
size = sizes[suffix]
|
|
|
|
return int(value) * size
|
|
|
|
else:
|
|
|
|
raise TypeError(f"Bad byte size {value!r}")
|
2015-02-11 16:01:15 +01:00
|
|
|
|
2015-04-30 05:24:44 +02:00
|
|
|
@staticmethod
|
2021-02-19 14:32:21 +01:00
|
|
|
def parse_duration(value: Union[str, int]) -> int:
|
|
|
|
"""Convert a duration as a string or integer to a number of milliseconds.
|
|
|
|
|
|
|
|
If an integer is provided it is treated as milliseconds and is unchanged.
|
|
|
|
|
|
|
|
String durations can have a suffix of 's', 'm', 'h', 'd', 'w', or 'y'.
|
|
|
|
No suffix is treated as milliseconds.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
value: The duration to parse.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The number of milliseconds in the duration.
|
Prefer `type(x) is int` to `isinstance(x, int)` (#14945)
* Perfer `type(x) is int` to `isinstance(x, int)`
This covered all additional instances I could see where `x` was
user-controlled.
The remaining cases are
```
$ rg -s 'isinstance.*[^_]int'
tests/replication/_base.py
576: if isinstance(obj, int):
synapse/util/caches/stream_change_cache.py
136: assert isinstance(stream_pos, int)
214: assert isinstance(stream_pos, int)
246: assert isinstance(stream_pos, int)
267: assert isinstance(stream_pos, int)
synapse/replication/tcp/external_cache.py
133: if isinstance(result, int):
synapse/metrics/__init__.py
100: if isinstance(calls, (int, float)):
synapse/handlers/appservice.py
262: assert isinstance(new_token, int)
synapse/config/_util.py
62: if isinstance(p, int):
```
which cover metrics, logic related to `jsonschema`, and replication and
data streams. AFAICS these are all internal to Synapse
* Changelog
2023-01-31 11:33:07 +01:00
|
|
|
|
|
|
|
Raises:
|
|
|
|
TypeError, if given something other than an integer or a string
|
|
|
|
ValueError: if given a string not of the form described above.
|
2021-02-19 14:32:21 +01:00
|
|
|
"""
|
Prefer `type(x) is int` to `isinstance(x, int)` (#14945)
* Perfer `type(x) is int` to `isinstance(x, int)`
This covered all additional instances I could see where `x` was
user-controlled.
The remaining cases are
```
$ rg -s 'isinstance.*[^_]int'
tests/replication/_base.py
576: if isinstance(obj, int):
synapse/util/caches/stream_change_cache.py
136: assert isinstance(stream_pos, int)
214: assert isinstance(stream_pos, int)
246: assert isinstance(stream_pos, int)
267: assert isinstance(stream_pos, int)
synapse/replication/tcp/external_cache.py
133: if isinstance(result, int):
synapse/metrics/__init__.py
100: if isinstance(calls, (int, float)):
synapse/handlers/appservice.py
262: assert isinstance(new_token, int)
synapse/config/_util.py
62: if isinstance(p, int):
```
which cover metrics, logic related to `jsonschema`, and replication and
data streams. AFAICS these are all internal to Synapse
* Changelog
2023-01-31 11:33:07 +01:00
|
|
|
if type(value) is int:
|
2015-04-30 17:04:02 +02:00
|
|
|
return value
|
Prefer `type(x) is int` to `isinstance(x, int)` (#14945)
* Perfer `type(x) is int` to `isinstance(x, int)`
This covered all additional instances I could see where `x` was
user-controlled.
The remaining cases are
```
$ rg -s 'isinstance.*[^_]int'
tests/replication/_base.py
576: if isinstance(obj, int):
synapse/util/caches/stream_change_cache.py
136: assert isinstance(stream_pos, int)
214: assert isinstance(stream_pos, int)
246: assert isinstance(stream_pos, int)
267: assert isinstance(stream_pos, int)
synapse/replication/tcp/external_cache.py
133: if isinstance(result, int):
synapse/metrics/__init__.py
100: if isinstance(calls, (int, float)):
synapse/handlers/appservice.py
262: assert isinstance(new_token, int)
synapse/config/_util.py
62: if isinstance(p, int):
```
which cover metrics, logic related to `jsonschema`, and replication and
data streams. AFAICS these are all internal to Synapse
* Changelog
2023-01-31 11:33:07 +01:00
|
|
|
elif type(value) is str:
|
|
|
|
second = 1000
|
|
|
|
minute = 60 * second
|
|
|
|
hour = 60 * minute
|
|
|
|
day = 24 * hour
|
|
|
|
week = 7 * day
|
|
|
|
year = 365 * day
|
|
|
|
sizes = {
|
|
|
|
"s": second,
|
|
|
|
"m": minute,
|
|
|
|
"h": hour,
|
|
|
|
"d": day,
|
|
|
|
"w": week,
|
|
|
|
"y": year,
|
|
|
|
}
|
|
|
|
size = 1
|
|
|
|
suffix = value[-1]
|
|
|
|
if suffix in sizes:
|
|
|
|
value = value[:-1]
|
|
|
|
size = sizes[suffix]
|
|
|
|
return int(value) * size
|
|
|
|
else:
|
|
|
|
raise TypeError(f"Bad duration {value!r}")
|
2015-04-30 05:24:44 +02:00
|
|
|
|
2014-09-01 16:51:15 +02:00
|
|
|
@staticmethod
|
2021-11-23 16:21:19 +01:00
|
|
|
def abspath(file_path: str) -> str:
|
2014-09-01 16:51:15 +02:00
|
|
|
return os.path.abspath(file_path) if file_path else file_path
|
|
|
|
|
2017-10-17 15:46:17 +02:00
|
|
|
@classmethod
|
2021-11-23 16:21:19 +01:00
|
|
|
def path_exists(cls, file_path: str) -> bool:
|
2019-10-10 10:39:35 +02:00
|
|
|
return path_exists(file_path)
|
2017-10-17 15:46:17 +02:00
|
|
|
|
2014-09-02 11:48:05 +02:00
|
|
|
@classmethod
|
2021-11-23 16:21:19 +01:00
|
|
|
def check_file(cls, file_path: Optional[str], config_name: str) -> str:
|
2014-09-02 11:48:05 +02:00
|
|
|
if file_path is None:
|
2018-10-20 02:16:55 +02:00
|
|
|
raise ConfigError("Missing config for %s." % (config_name,))
|
2017-10-17 15:46:17 +02:00
|
|
|
try:
|
|
|
|
os.stat(file_path)
|
|
|
|
except OSError as e:
|
2014-09-02 11:48:05 +02:00
|
|
|
raise ConfigError(
|
2017-10-17 15:46:17 +02:00
|
|
|
"Error accessing file '%s' (config for %s): %s"
|
|
|
|
% (file_path, config_name, e.strerror)
|
2014-09-02 11:48:05 +02:00
|
|
|
)
|
|
|
|
return cls.abspath(file_path)
|
|
|
|
|
2015-02-09 19:29:36 +01:00
|
|
|
@classmethod
|
2021-11-23 16:21:19 +01:00
|
|
|
def ensure_directory(cls, dir_path: str) -> str:
|
2015-02-09 19:29:36 +01:00
|
|
|
dir_path = cls.abspath(dir_path)
|
2021-09-27 12:29:23 +02:00
|
|
|
os.makedirs(dir_path, exist_ok=True)
|
2014-12-02 20:51:47 +01:00
|
|
|
if not os.path.isdir(dir_path):
|
2018-10-20 02:16:55 +02:00
|
|
|
raise ConfigError("%s is not a directory" % (dir_path,))
|
2014-12-02 20:51:47 +01:00
|
|
|
return dir_path
|
|
|
|
|
2014-09-02 11:48:05 +02:00
|
|
|
@classmethod
|
2021-11-23 16:21:19 +01:00
|
|
|
def read_file(cls, file_path: Any, config_name: str) -> str:
|
2021-03-09 16:03:37 +01:00
|
|
|
"""Deprecated: call read_file directly"""
|
|
|
|
return read_file(file_path, (config_name,))
|
2014-08-31 17:06:39 +02:00
|
|
|
|
2021-01-27 16:59:50 +01:00
|
|
|
def read_template(self, filename: str) -> jinja2.Template:
|
|
|
|
"""Load a template file from disk.
|
|
|
|
|
|
|
|
This function will attempt to load the given template from the default Synapse
|
|
|
|
template directory.
|
|
|
|
|
|
|
|
Files read are treated as Jinja templates. The templates is not rendered yet
|
|
|
|
and has autoescape enabled.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
filename: A template filename to read.
|
|
|
|
|
|
|
|
Raises:
|
|
|
|
ConfigError: if the file's path is incorrect or otherwise cannot be read.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A jinja2 template.
|
|
|
|
"""
|
|
|
|
return self.read_templates([filename])[0]
|
|
|
|
|
2020-08-17 18:05:00 +02:00
|
|
|
def read_templates(
|
2021-02-16 23:32:34 +01:00
|
|
|
self,
|
|
|
|
filenames: List[str],
|
2021-08-17 12:23:14 +02:00
|
|
|
custom_template_directories: Optional[Iterable[str]] = None,
|
2020-08-17 18:05:00 +02:00
|
|
|
) -> List[jinja2.Template]:
|
|
|
|
"""Load a list of template files from disk using the given variables.
|
|
|
|
|
|
|
|
This function will attempt to load the given templates from the default Synapse
|
2021-08-17 12:23:14 +02:00
|
|
|
template directory. If `custom_template_directories` is supplied, any directory
|
|
|
|
in this list is tried (in the order they appear in the list) before trying
|
|
|
|
Synapse's default directory.
|
2020-08-17 18:05:00 +02:00
|
|
|
|
2021-01-27 16:59:50 +01:00
|
|
|
Files read are treated as Jinja templates. The templates are not rendered yet
|
|
|
|
and have autoescape enabled.
|
2020-08-17 18:05:00 +02:00
|
|
|
|
|
|
|
Args:
|
|
|
|
filenames: A list of template filenames to read.
|
|
|
|
|
2021-08-17 12:23:14 +02:00
|
|
|
custom_template_directories: A list of directory to try to look for the
|
|
|
|
templates before using the default Synapse template directory instead.
|
2020-08-17 18:05:00 +02:00
|
|
|
|
|
|
|
Raises:
|
|
|
|
ConfigError: if the file's path is incorrect or otherwise cannot be read.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
A list of jinja2 templates.
|
|
|
|
"""
|
2021-08-17 12:23:14 +02:00
|
|
|
search_directories = []
|
|
|
|
|
|
|
|
# The loader will first look in the custom template directories (if specified)
|
|
|
|
# for the given filename. If it doesn't find it, it will use the default
|
|
|
|
# template dir instead.
|
|
|
|
if custom_template_directories is not None:
|
|
|
|
for custom_template_directory in custom_template_directories:
|
|
|
|
# Check that the given template directory exists
|
|
|
|
if not self.path_exists(custom_template_directory):
|
|
|
|
raise ConfigError(
|
|
|
|
"Configured template directory does not exist: %s"
|
|
|
|
% (custom_template_directory,)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Search the custom template directory as well
|
|
|
|
search_directories.append(custom_template_directory)
|
|
|
|
|
|
|
|
# Append the default directory at the end of the list so Jinja can fallback on it
|
|
|
|
# if a template is missing from any custom directory.
|
|
|
|
search_directories.append(self.default_template_dir)
|
2020-08-17 18:05:00 +02:00
|
|
|
|
2021-02-01 16:52:50 +01:00
|
|
|
# TODO: switch to synapse.util.templates.build_jinja_env
|
2020-08-17 18:05:00 +02:00
|
|
|
loader = jinja2.FileSystemLoader(search_directories)
|
2021-02-16 23:32:34 +01:00
|
|
|
env = jinja2.Environment(
|
|
|
|
loader=loader,
|
|
|
|
autoescape=jinja2.select_autoescape(),
|
|
|
|
)
|
2020-08-17 18:05:00 +02:00
|
|
|
|
|
|
|
# Update the environment with our custom filters
|
2021-01-20 13:30:41 +01:00
|
|
|
env.filters.update(
|
|
|
|
{
|
|
|
|
"format_ts": _format_ts_filter,
|
2021-10-06 16:47:41 +02:00
|
|
|
"mxc_to_http": _create_mxc_to_http_filter(
|
|
|
|
self.root.server.public_baseurl
|
|
|
|
),
|
2021-01-20 13:30:41 +01:00
|
|
|
}
|
|
|
|
)
|
2020-08-17 18:05:00 +02:00
|
|
|
|
2021-01-27 16:59:50 +01:00
|
|
|
# Load the templates
|
|
|
|
return [env.get_template(filename) for filename in filenames]
|
2020-08-17 18:05:00 +02:00
|
|
|
|
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
TRootConfig = TypeVar("TRootConfig", bound="RootConfig")
|
|
|
|
|
|
|
|
|
2020-09-04 12:54:56 +02:00
|
|
|
class RootConfig:
|
2019-10-10 10:39:35 +02:00
|
|
|
"""
|
|
|
|
Holder of an application's configuration.
|
|
|
|
|
|
|
|
What configuration this object holds is defined by `config_classes`, a list
|
|
|
|
of Config classes that will be instantiated and given the contents of a
|
|
|
|
configuration file to read. They can then be accessed on this class by their
|
|
|
|
section name, defined in the Config or dynamically set to be the name of the
|
|
|
|
class, lower-cased and with "Config" removed.
|
|
|
|
"""
|
|
|
|
|
2022-05-11 15:43:22 +02:00
|
|
|
config_classes: List[Type[Config]] = []
|
|
|
|
|
|
|
|
def __init__(self, config_files: Collection[str] = ()):
|
|
|
|
# Capture absolute paths here, so we can reload config after we daemonize.
|
|
|
|
self.config_files = [os.path.abspath(path) for path in config_files]
|
2019-10-10 10:39:35 +02:00
|
|
|
|
|
|
|
for config_class in self.config_classes:
|
|
|
|
if config_class.section is None:
|
|
|
|
raise ValueError("%r requires a section name" % (config_class,))
|
|
|
|
|
|
|
|
try:
|
|
|
|
conf = config_class(self)
|
|
|
|
except Exception as e:
|
|
|
|
raise Exception("Failed making %s: %r" % (config_class.section, e))
|
2021-10-06 16:47:41 +02:00
|
|
|
setattr(self, config_class.section, conf)
|
2019-10-10 10:39:35 +02:00
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
def invoke_all(
|
|
|
|
self, func_name: str, *args: Any, **kwargs: Any
|
|
|
|
) -> MutableMapping[str, Any]:
|
2019-10-10 10:39:35 +02:00
|
|
|
"""
|
|
|
|
Invoke a function on all instantiated config objects this RootConfig is
|
|
|
|
configured to use.
|
2019-07-15 14:15:34 +02:00
|
|
|
|
|
|
|
Args:
|
2019-10-10 10:39:35 +02:00
|
|
|
func_name: Name of function to invoke
|
2019-07-15 14:15:34 +02:00
|
|
|
*args
|
|
|
|
**kwargs
|
2021-11-23 16:21:19 +01:00
|
|
|
|
2019-07-15 14:15:34 +02:00
|
|
|
Returns:
|
2019-10-10 10:39:35 +02:00
|
|
|
ordered dictionary of config section name and the result of the
|
|
|
|
function from it.
|
2019-07-15 14:15:34 +02:00
|
|
|
"""
|
2019-10-10 10:39:35 +02:00
|
|
|
res = OrderedDict()
|
|
|
|
|
2021-10-06 16:47:41 +02:00
|
|
|
for config_class in self.config_classes:
|
|
|
|
config = getattr(self, config_class.section)
|
|
|
|
|
2019-10-10 10:39:35 +02:00
|
|
|
if hasattr(config, func_name):
|
2021-10-06 16:47:41 +02:00
|
|
|
res[config_class.section] = getattr(config, func_name)(*args, **kwargs)
|
2019-10-10 10:39:35 +02:00
|
|
|
|
|
|
|
return res
|
2014-08-31 17:06:39 +02:00
|
|
|
|
2019-07-15 14:15:34 +02:00
|
|
|
@classmethod
|
2021-11-23 16:21:19 +01:00
|
|
|
def invoke_all_static(cls, func_name: str, *args: Any, **kwargs: any) -> None:
|
2019-10-10 10:39:35 +02:00
|
|
|
"""
|
|
|
|
Invoke a static function on config objects this RootConfig is
|
|
|
|
configured to use.
|
2019-07-15 14:15:34 +02:00
|
|
|
|
|
|
|
Args:
|
2019-10-10 10:39:35 +02:00
|
|
|
func_name: Name of function to invoke
|
2019-07-15 14:15:34 +02:00
|
|
|
*args
|
|
|
|
**kwargs
|
2021-11-23 16:21:19 +01:00
|
|
|
|
2019-07-15 14:15:34 +02:00
|
|
|
Returns:
|
2019-10-10 10:39:35 +02:00
|
|
|
ordered dictionary of config section name and the result of the
|
|
|
|
function from it.
|
2019-07-15 14:15:34 +02:00
|
|
|
"""
|
2019-10-10 10:39:35 +02:00
|
|
|
for config in cls.config_classes:
|
|
|
|
if hasattr(config, func_name):
|
|
|
|
getattr(config, func_name)(*args, **kwargs)
|
2019-07-15 14:15:34 +02:00
|
|
|
|
Error if macaroon key is missing from config
Currently we store all access tokens in the DB, and fall back to that
check if we can't validate the macaroon, so our fallback works here, but
for guests, their macaroons don't get persisted, so we don't get to
find them in the database. Each restart, we generate a new ephemeral
key, so guests lose access after each server restart.
I tried to fix up the config stuff to be less insane, but gave up, so
instead I bolt on yet another piece of custom one-off insanity.
Also, add some basic tests for config generation and loading.
2016-02-05 02:58:23 +01:00
|
|
|
def generate_config(
|
2018-12-21 16:04:57 +01:00
|
|
|
self,
|
2021-11-23 16:21:19 +01:00
|
|
|
config_dir_path: str,
|
|
|
|
data_dir_path: str,
|
|
|
|
server_name: str,
|
|
|
|
generate_secrets: bool = False,
|
|
|
|
report_stats: Optional[bool] = None,
|
|
|
|
open_private_ports: bool = False,
|
|
|
|
listeners: Optional[List[dict]] = None,
|
|
|
|
tls_certificate_path: Optional[str] = None,
|
|
|
|
tls_private_key_path: Optional[str] = None,
|
|
|
|
) -> str:
|
2019-10-10 10:39:35 +02:00
|
|
|
"""
|
|
|
|
Build a default configuration file
|
2018-12-21 16:04:57 +01:00
|
|
|
|
2019-06-22 01:00:20 +02:00
|
|
|
This is used when the user explicitly asks us to generate a config file
|
2022-03-02 14:00:16 +01:00
|
|
|
(eg with --generate-config).
|
2018-12-21 16:04:57 +01:00
|
|
|
|
|
|
|
Args:
|
2021-11-23 16:21:19 +01:00
|
|
|
config_dir_path: The path where the config files are kept. Used to
|
2018-12-21 16:04:57 +01:00
|
|
|
create filenames for things like the log config and the signing key.
|
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
data_dir_path: The path where the data files are kept. Used to create
|
2018-12-21 16:04:57 +01:00
|
|
|
filenames for things like the database and media store.
|
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
server_name: The server name. Used to initialise the server_name
|
2018-12-21 16:04:57 +01:00
|
|
|
config param, but also used in the names of some of the config files.
|
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
generate_secrets: True if we should generate new secrets for things
|
2018-12-21 16:04:57 +01:00
|
|
|
like the macaroon_secret_key. If False, these parameters will be left
|
|
|
|
unset.
|
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
report_stats: Initial setting for the report_stats setting.
|
2018-12-21 16:04:57 +01:00
|
|
|
If None, report_stats will be left unset.
|
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
open_private_ports: True to leave private ports (such as the non-TLS
|
2019-06-21 14:46:39 +02:00
|
|
|
HTTP listener) open to the internet.
|
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
listeners: A list of descriptions of the listeners synapse should
|
|
|
|
start with each of which specifies a port (int), a list of
|
2019-08-28 14:12:22 +02:00
|
|
|
resources (list(str)), tls (bool) and type (str). For example:
|
|
|
|
[{
|
|
|
|
"port": 8448,
|
|
|
|
"resources": [{"names": ["federation"]}],
|
|
|
|
"tls": True,
|
|
|
|
"type": "http",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"port": 443,
|
|
|
|
"resources": [{"names": ["client"]}],
|
|
|
|
"tls": False,
|
|
|
|
"type": "http",
|
|
|
|
}],
|
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
tls_certificate_path: The path to the tls certificate.
|
2019-08-28 14:12:22 +02:00
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
tls_private_key_path: The path to the tls private key.
|
2019-08-28 14:12:22 +02:00
|
|
|
|
2018-12-21 16:04:57 +01:00
|
|
|
Returns:
|
2021-11-23 16:21:19 +01:00
|
|
|
The yaml config file
|
2018-12-21 16:04:57 +01:00
|
|
|
"""
|
2019-10-10 10:39:35 +02:00
|
|
|
|
2022-06-14 16:53:42 +02:00
|
|
|
conf = CONFIG_FILE_HEADER + "\n".join(
|
2018-10-20 02:16:55 +02:00
|
|
|
dedent(conf)
|
|
|
|
for conf in self.invoke_all(
|
2019-06-22 01:00:20 +02:00
|
|
|
"generate_config_section",
|
2018-10-20 02:16:55 +02:00
|
|
|
config_dir_path=config_dir_path,
|
2018-12-21 16:04:57 +01:00
|
|
|
data_dir_path=data_dir_path,
|
2018-10-20 02:16:55 +02:00
|
|
|
server_name=server_name,
|
2018-12-21 16:04:57 +01:00
|
|
|
generate_secrets=generate_secrets,
|
2018-10-20 02:16:55 +02:00
|
|
|
report_stats=report_stats,
|
2019-06-21 14:46:39 +02:00
|
|
|
open_private_ports=open_private_ports,
|
2019-08-28 14:12:22 +02:00
|
|
|
listeners=listeners,
|
|
|
|
tls_certificate_path=tls_certificate_path,
|
|
|
|
tls_private_key_path=tls_private_key_path,
|
2019-10-10 10:39:35 +02:00
|
|
|
).values()
|
2018-10-20 02:16:55 +02:00
|
|
|
)
|
2022-06-14 16:53:42 +02:00
|
|
|
conf = re.sub("\n{2,}", "\n", conf)
|
|
|
|
return conf
|
2015-04-30 05:24:44 +02:00
|
|
|
|
2014-08-31 17:06:39 +02:00
|
|
|
@classmethod
|
2021-11-23 16:21:19 +01:00
|
|
|
def load_config(
|
|
|
|
cls: Type[TRootConfig], description: str, argv: List[str]
|
|
|
|
) -> TRootConfig:
|
2019-06-21 18:43:38 +02:00
|
|
|
"""Parse the commandline and config files
|
|
|
|
|
|
|
|
Doesn't support config-file-generation: used by the worker apps.
|
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
Returns:
|
|
|
|
Config object.
|
2019-06-21 18:43:38 +02:00
|
|
|
"""
|
2019-07-15 14:43:25 +02:00
|
|
|
config_parser = argparse.ArgumentParser(description=description)
|
|
|
|
cls.add_arguments_to_parser(config_parser)
|
2019-07-01 18:55:26 +02:00
|
|
|
obj, _ = cls.load_config_with_parser(config_parser, argv)
|
|
|
|
|
|
|
|
return obj
|
|
|
|
|
|
|
|
@classmethod
|
2021-11-23 16:21:19 +01:00
|
|
|
def add_arguments_to_parser(cls, config_parser: argparse.ArgumentParser) -> None:
|
2019-07-15 14:43:25 +02:00
|
|
|
"""Adds all the config flags to an ArgumentParser.
|
2019-07-01 18:55:26 +02:00
|
|
|
|
|
|
|
Doesn't support config-file-generation: used by the worker apps.
|
|
|
|
|
|
|
|
Used for workers where we want to add extra flags/subcommands.
|
|
|
|
|
|
|
|
Args:
|
2021-11-23 16:21:19 +01:00
|
|
|
config_parser: App description
|
2019-07-01 18:55:26 +02:00
|
|
|
"""
|
|
|
|
|
2016-06-09 19:50:38 +02:00
|
|
|
config_parser.add_argument(
|
2018-10-20 02:16:55 +02:00
|
|
|
"-c",
|
|
|
|
"--config-path",
|
2016-06-09 19:50:38 +02:00
|
|
|
action="append",
|
|
|
|
metavar="CONFIG_FILE",
|
|
|
|
help="Specify config file. Can be given multiple times and"
|
2018-10-20 02:16:55 +02:00
|
|
|
" may specify directories containing *.yaml files.",
|
2016-06-09 19:50:38 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
config_parser.add_argument(
|
|
|
|
"--keys-directory",
|
|
|
|
metavar="DIRECTORY",
|
|
|
|
help="Where files such as certs and signing keys are stored when"
|
2019-06-24 12:34:45 +02:00
|
|
|
" their location is not given explicitly in the config."
|
2018-10-20 02:16:55 +02:00
|
|
|
" Defaults to the directory containing the last config file",
|
2016-06-09 19:50:38 +02:00
|
|
|
)
|
|
|
|
|
2019-07-15 14:15:34 +02:00
|
|
|
cls.invoke_all_static("add_arguments", config_parser)
|
2019-03-13 18:33:54 +01:00
|
|
|
|
2019-07-01 18:55:26 +02:00
|
|
|
@classmethod
|
2021-11-23 16:21:19 +01:00
|
|
|
def load_config_with_parser(
|
|
|
|
cls: Type[TRootConfig], parser: argparse.ArgumentParser, argv: List[str]
|
|
|
|
) -> Tuple[TRootConfig, argparse.Namespace]:
|
2019-07-01 18:55:26 +02:00
|
|
|
"""Parse the commandline and config files with the given parser
|
|
|
|
|
|
|
|
Doesn't support config-file-generation: used by the worker apps.
|
|
|
|
|
|
|
|
Used for workers where we want to add extra flags/subcommands.
|
|
|
|
|
|
|
|
Args:
|
2021-11-23 16:21:19 +01:00
|
|
|
parser
|
|
|
|
argv
|
2019-07-01 18:55:26 +02:00
|
|
|
|
|
|
|
Returns:
|
2021-11-23 16:21:19 +01:00
|
|
|
Returns the parsed config object and the parsed argparse.Namespace
|
|
|
|
object from parser.parse_args(..)`
|
2019-07-01 18:55:26 +02:00
|
|
|
"""
|
|
|
|
|
2019-07-16 12:39:13 +02:00
|
|
|
config_args = parser.parse_args(argv)
|
2016-06-09 19:50:38 +02:00
|
|
|
|
|
|
|
config_files = find_config_files(search_paths=config_args.config_path)
|
2022-05-11 15:43:22 +02:00
|
|
|
obj = cls(config_files)
|
2019-06-24 12:34:45 +02:00
|
|
|
if not config_files:
|
2019-07-16 12:39:13 +02:00
|
|
|
parser.error("Must supply a config file.")
|
2019-06-24 12:34:45 +02:00
|
|
|
|
|
|
|
if config_args.keys_directory:
|
|
|
|
config_dir_path = config_args.keys_directory
|
|
|
|
else:
|
|
|
|
config_dir_path = os.path.dirname(config_files[-1])
|
|
|
|
config_dir_path = os.path.abspath(config_dir_path)
|
|
|
|
data_dir_path = os.getcwd()
|
|
|
|
|
2019-06-22 01:00:20 +02:00
|
|
|
config_dict = read_config_files(config_files)
|
2019-06-24 12:34:45 +02:00
|
|
|
obj.parse_config_dict(
|
|
|
|
config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
|
2016-06-09 19:50:38 +02:00
|
|
|
)
|
2019-03-13 18:33:54 +01:00
|
|
|
|
|
|
|
obj.invoke_all("read_arguments", config_args)
|
|
|
|
|
2019-07-01 18:55:26 +02:00
|
|
|
return obj, config_args
|
2015-04-30 05:24:44 +02:00
|
|
|
|
2016-06-09 19:50:38 +02:00
|
|
|
@classmethod
|
2021-11-23 16:21:19 +01:00
|
|
|
def load_or_generate_config(
|
|
|
|
cls: Type[TRootConfig], description: str, argv: List[str]
|
|
|
|
) -> Optional[TRootConfig]:
|
2019-06-21 18:43:38 +02:00
|
|
|
"""Parse the commandline and config files
|
|
|
|
|
|
|
|
Supports generation of config files, so is used for the main homeserver app.
|
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
Returns:
|
|
|
|
Config object, or None if --generate-config or --generate-keys was set
|
2019-06-21 18:43:38 +02:00
|
|
|
"""
|
2020-04-09 13:44:37 +02:00
|
|
|
parser = argparse.ArgumentParser(description=description)
|
|
|
|
parser.add_argument(
|
2018-10-20 02:16:55 +02:00
|
|
|
"-c",
|
|
|
|
"--config-path",
|
2015-04-30 14:48:15 +02:00
|
|
|
action="append",
|
2014-08-31 17:06:39 +02:00
|
|
|
metavar="CONFIG_FILE",
|
2015-08-25 17:25:54 +02:00
|
|
|
help="Specify config file. Can be given multiple times and"
|
2018-10-20 02:16:55 +02:00
|
|
|
" may specify directories containing *.yaml files.",
|
2014-08-31 17:06:39 +02:00
|
|
|
)
|
2019-06-21 19:50:43 +02:00
|
|
|
|
2022-08-26 13:26:06 +02:00
|
|
|
# we nest the mutually-exclusive group inside another group so that the help
|
|
|
|
# text shows them in their own group.
|
|
|
|
generate_mode_group = parser.add_argument_group(
|
|
|
|
"Config generation mode",
|
|
|
|
)
|
|
|
|
generate_mode_exclusive = generate_mode_group.add_mutually_exclusive_group()
|
|
|
|
generate_mode_exclusive.add_argument(
|
|
|
|
# hidden option to make the type and default work
|
|
|
|
"--generate-mode",
|
|
|
|
help=argparse.SUPPRESS,
|
|
|
|
type=_ConfigGenerateMode,
|
|
|
|
default=_ConfigGenerateMode.GENERATE_MISSING_AND_RUN,
|
|
|
|
)
|
|
|
|
generate_mode_exclusive.add_argument(
|
2014-09-01 16:51:15 +02:00
|
|
|
"--generate-config",
|
2019-06-21 19:50:43 +02:00
|
|
|
help="Generate a config file, then exit.",
|
2022-08-26 13:26:06 +02:00
|
|
|
action="store_const",
|
|
|
|
const=_ConfigGenerateMode.GENERATE_EVERYTHING_AND_EXIT,
|
|
|
|
dest="generate_mode",
|
2014-09-01 16:51:15 +02:00
|
|
|
)
|
2022-08-26 13:26:06 +02:00
|
|
|
generate_mode_exclusive.add_argument(
|
2019-06-21 19:50:43 +02:00
|
|
|
"--generate-missing-configs",
|
|
|
|
"--generate-keys",
|
|
|
|
help="Generate any missing additional config files, then exit.",
|
2022-08-26 13:26:06 +02:00
|
|
|
action="store_const",
|
|
|
|
const=_ConfigGenerateMode.GENERATE_MISSING_AND_EXIT,
|
|
|
|
dest="generate_mode",
|
2019-06-21 19:50:43 +02:00
|
|
|
)
|
2022-08-26 13:26:06 +02:00
|
|
|
generate_mode_exclusive.add_argument(
|
|
|
|
"--generate-missing-and-run",
|
|
|
|
help="Generate any missing additional config files, then run. This is the "
|
|
|
|
"default behaviour.",
|
|
|
|
action="store_const",
|
|
|
|
const=_ConfigGenerateMode.GENERATE_MISSING_AND_RUN,
|
|
|
|
dest="generate_mode",
|
|
|
|
)
|
|
|
|
|
|
|
|
generate_group = parser.add_argument_group("Details for --generate-config")
|
2019-06-21 19:50:43 +02:00
|
|
|
generate_group.add_argument(
|
|
|
|
"-H", "--server-name", help="The server name to generate a config file for."
|
|
|
|
)
|
|
|
|
generate_group.add_argument(
|
2015-09-22 13:57:40 +02:00
|
|
|
"--report-stats",
|
|
|
|
action="store",
|
2022-07-19 14:38:29 +02:00
|
|
|
help="Whether the generated config reports homeserver usage statistics.",
|
2018-10-20 02:16:55 +02:00
|
|
|
choices=["yes", "no"],
|
2015-09-22 13:57:40 +02:00
|
|
|
)
|
2019-06-21 19:50:43 +02:00
|
|
|
generate_group.add_argument(
|
|
|
|
"--config-directory",
|
2015-08-25 18:31:22 +02:00
|
|
|
"--keys-directory",
|
2015-08-25 17:58:01 +02:00
|
|
|
metavar="DIRECTORY",
|
2019-06-21 19:50:43 +02:00
|
|
|
help=(
|
|
|
|
"Specify where additional config files such as signing keys and log"
|
2019-06-24 12:34:45 +02:00
|
|
|
" config should be stored. Defaults to the same directory as the last"
|
2019-06-21 19:50:43 +02:00
|
|
|
" config file."
|
|
|
|
),
|
2015-04-30 14:48:15 +02:00
|
|
|
)
|
2019-06-21 11:53:49 +02:00
|
|
|
generate_group.add_argument(
|
|
|
|
"--data-directory",
|
|
|
|
metavar="DIRECTORY",
|
|
|
|
help=(
|
|
|
|
"Specify where data such as the media store and database file should be"
|
|
|
|
" stored. Defaults to the current working directory."
|
|
|
|
),
|
|
|
|
)
|
2019-06-21 14:46:39 +02:00
|
|
|
generate_group.add_argument(
|
|
|
|
"--open-private-ports",
|
|
|
|
action="store_true",
|
|
|
|
help=(
|
|
|
|
"Leave private ports (such as the non-TLS HTTP listener) open to the"
|
|
|
|
" internet. Do not use this unless you know what you are doing."
|
|
|
|
),
|
|
|
|
)
|
2019-06-21 11:53:49 +02:00
|
|
|
|
2020-04-09 13:44:37 +02:00
|
|
|
cls.invoke_all_static("add_arguments", parser)
|
|
|
|
config_args = parser.parse_args(argv)
|
2014-08-31 17:06:39 +02:00
|
|
|
|
2016-06-09 19:50:38 +02:00
|
|
|
config_files = find_config_files(search_paths=config_args.config_path)
|
|
|
|
|
2019-06-24 12:34:45 +02:00
|
|
|
if not config_files:
|
2020-04-09 13:44:37 +02:00
|
|
|
parser.error(
|
2019-06-24 12:34:45 +02:00
|
|
|
"Must supply a config file.\nA config file can be automatically"
|
|
|
|
' generated using "--generate-config -H SERVER_NAME'
|
|
|
|
' -c CONFIG-FILE"'
|
|
|
|
)
|
|
|
|
|
|
|
|
if config_args.config_directory:
|
|
|
|
config_dir_path = config_args.config_directory
|
|
|
|
else:
|
|
|
|
config_dir_path = os.path.dirname(config_files[-1])
|
|
|
|
config_dir_path = os.path.abspath(config_dir_path)
|
|
|
|
data_dir_path = os.getcwd()
|
|
|
|
|
2022-05-11 15:43:22 +02:00
|
|
|
obj = cls(config_files)
|
2015-08-25 17:25:54 +02:00
|
|
|
|
2022-08-26 13:26:06 +02:00
|
|
|
if (
|
|
|
|
config_args.generate_mode
|
|
|
|
== _ConfigGenerateMode.GENERATE_EVERYTHING_AND_EXIT
|
|
|
|
):
|
2015-09-22 13:57:40 +02:00
|
|
|
if config_args.report_stats is None:
|
2020-04-09 13:44:37 +02:00
|
|
|
parser.error(
|
2018-10-20 02:16:55 +02:00
|
|
|
"Please specify either --report-stats=yes or --report-stats=no\n\n"
|
|
|
|
+ MISSING_REPORT_STATS_SPIEL
|
2015-09-22 13:57:40 +02:00
|
|
|
)
|
2019-06-24 12:34:45 +02:00
|
|
|
|
2015-08-25 17:25:54 +02:00
|
|
|
(config_path,) = config_files
|
2019-10-10 10:39:35 +02:00
|
|
|
if not path_exists(config_path):
|
2019-06-21 18:14:56 +02:00
|
|
|
print("Generating config file %s" % (config_path,))
|
2015-08-12 12:57:37 +02:00
|
|
|
|
2019-06-21 11:53:49 +02:00
|
|
|
if config_args.data_directory:
|
|
|
|
data_dir_path = config_args.data_directory
|
|
|
|
else:
|
|
|
|
data_dir_path = os.getcwd()
|
|
|
|
data_dir_path = os.path.abspath(data_dir_path)
|
|
|
|
|
2015-08-12 12:57:37 +02:00
|
|
|
server_name = config_args.server_name
|
|
|
|
if not server_name:
|
Error if macaroon key is missing from config
Currently we store all access tokens in the DB, and fall back to that
check if we can't validate the macaroon, so our fallback works here, but
for guests, their macaroons don't get persisted, so we don't get to
find them in the database. Each restart, we generate a new ephemeral
key, so guests lose access after each server restart.
I tried to fix up the config stuff to be less insane, but gave up, so
instead I bolt on yet another piece of custom one-off insanity.
Also, add some basic tests for config generation and loading.
2016-02-05 02:58:23 +01:00
|
|
|
raise ConfigError(
|
|
|
|
"Must specify a server_name to a generate config for."
|
|
|
|
" Pass -H server.name."
|
|
|
|
)
|
2019-03-04 18:14:58 +01:00
|
|
|
|
|
|
|
config_str = obj.generate_config(
|
|
|
|
config_dir_path=config_dir_path,
|
2019-06-24 12:34:45 +02:00
|
|
|
data_dir_path=data_dir_path,
|
2019-03-04 18:14:58 +01:00
|
|
|
server_name=server_name,
|
|
|
|
report_stats=(config_args.report_stats == "yes"),
|
|
|
|
generate_secrets=True,
|
2019-06-21 14:46:39 +02:00
|
|
|
open_private_ports=config_args.open_private_ports,
|
2019-03-04 18:14:58 +01:00
|
|
|
)
|
|
|
|
|
2021-09-27 12:29:23 +02:00
|
|
|
os.makedirs(config_dir_path, exist_ok=True)
|
2018-04-07 01:39:45 +02:00
|
|
|
with open(config_path, "w") as config_file:
|
|
|
|
config_file.write(config_str)
|
2020-02-14 17:22:30 +01:00
|
|
|
config_file.write("\n\n# vim:ft=yaml")
|
2019-03-04 18:14:58 +01:00
|
|
|
|
2019-06-22 00:39:08 +02:00
|
|
|
config_dict = yaml.safe_load(config_str)
|
|
|
|
obj.generate_missing_files(config_dict, config_dir_path)
|
2019-03-04 18:14:58 +01:00
|
|
|
|
2018-10-20 02:16:55 +02:00
|
|
|
print(
|
|
|
|
(
|
|
|
|
"A config file has been generated in %r for server name"
|
2019-02-12 11:37:00 +01:00
|
|
|
" %r. Please review this file and customise it"
|
2018-10-20 02:16:55 +02:00
|
|
|
" to your needs."
|
|
|
|
)
|
|
|
|
% (config_path, server_name)
|
|
|
|
)
|
Error if macaroon key is missing from config
Currently we store all access tokens in the DB, and fall back to that
check if we can't validate the macaroon, so our fallback works here, but
for guests, their macaroons don't get persisted, so we don't get to
find them in the database. Each restart, we generate a new ephemeral
key, so guests lose access after each server restart.
I tried to fix up the config stuff to be less insane, but gave up, so
instead I bolt on yet another piece of custom one-off insanity.
Also, add some basic tests for config generation and loading.
2016-02-05 02:58:23 +01:00
|
|
|
return
|
2015-08-12 12:57:37 +02:00
|
|
|
else:
|
2018-10-20 02:16:55 +02:00
|
|
|
print(
|
|
|
|
(
|
2019-06-21 18:14:56 +02:00
|
|
|
"Config file %r already exists. Generating any missing config"
|
2018-10-20 02:16:55 +02:00
|
|
|
" files."
|
|
|
|
)
|
|
|
|
% (config_path,)
|
|
|
|
)
|
2014-08-31 17:06:39 +02:00
|
|
|
|
2019-06-22 01:00:20 +02:00
|
|
|
config_dict = read_config_files(config_files)
|
2022-08-26 13:26:06 +02:00
|
|
|
obj.generate_missing_files(config_dict, config_dir_path)
|
|
|
|
|
|
|
|
if config_args.generate_mode in (
|
|
|
|
_ConfigGenerateMode.GENERATE_EVERYTHING_AND_EXIT,
|
|
|
|
_ConfigGenerateMode.GENERATE_MISSING_AND_EXIT,
|
|
|
|
):
|
2016-06-09 19:50:38 +02:00
|
|
|
return None
|
|
|
|
|
2019-06-24 12:34:45 +02:00
|
|
|
obj.parse_config_dict(
|
|
|
|
config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
|
|
|
|
)
|
2020-04-09 13:44:37 +02:00
|
|
|
obj.invoke_all("read_arguments", config_args)
|
2016-06-09 19:50:38 +02:00
|
|
|
|
|
|
|
return obj
|
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
def parse_config_dict(
|
2022-04-11 18:07:23 +02:00
|
|
|
self, config_dict: Dict[str, Any], config_dir_path: str, data_dir_path: str
|
2021-11-23 16:21:19 +01:00
|
|
|
) -> None:
|
2019-06-24 12:34:45 +02:00
|
|
|
"""Read the information from the config dict into this Config object.
|
|
|
|
|
|
|
|
Args:
|
2021-11-23 16:21:19 +01:00
|
|
|
config_dict: Configuration data, as read from the yaml
|
2019-06-24 12:34:45 +02:00
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
config_dir_path: The path where the config files are kept. Used to
|
2019-06-24 12:34:45 +02:00
|
|
|
create filenames for things like the log config and the signing key.
|
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
data_dir_path: The path where the data files are kept. Used to create
|
2019-06-24 12:34:45 +02:00
|
|
|
filenames for things like the database and media store.
|
|
|
|
"""
|
|
|
|
self.invoke_all(
|
|
|
|
"read_config",
|
|
|
|
config_dict,
|
|
|
|
config_dir_path=config_dir_path,
|
|
|
|
data_dir_path=data_dir_path,
|
|
|
|
)
|
2016-06-09 19:50:38 +02:00
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
def generate_missing_files(
|
|
|
|
self, config_dict: Dict[str, Any], config_dir_path: str
|
|
|
|
) -> None:
|
2019-06-22 00:39:08 +02:00
|
|
|
self.invoke_all("generate_files", config_dict, config_dir_path)
|
2019-06-21 18:43:38 +02:00
|
|
|
|
2022-05-11 15:43:22 +02:00
|
|
|
def reload_config_section(self, section_name: str) -> Config:
|
|
|
|
"""Reconstruct the given config section, leaving all others unchanged.
|
|
|
|
|
|
|
|
This works in three steps:
|
|
|
|
|
|
|
|
1. Create a new instance of the relevant `Config` subclass.
|
|
|
|
2. Call `read_config` on that instance to parse the new config.
|
|
|
|
3. Replace the existing config instance with the new one.
|
|
|
|
|
|
|
|
:raises ValueError: if the given `section` does not exist.
|
|
|
|
:raises ConfigError: for any other problems reloading config.
|
|
|
|
|
|
|
|
:returns: the previous config object, which no longer has a reference to this
|
|
|
|
RootConfig.
|
|
|
|
"""
|
|
|
|
existing_config: Optional[Config] = getattr(self, section_name, None)
|
|
|
|
if existing_config is None:
|
|
|
|
raise ValueError(f"Unknown config section '{section_name}'")
|
|
|
|
logger.info("Reloading config section '%s'", section_name)
|
|
|
|
|
|
|
|
new_config_data = read_config_files(self.config_files)
|
|
|
|
new_config = type(existing_config)(self)
|
|
|
|
new_config.read_config(new_config_data)
|
|
|
|
setattr(self, section_name, new_config)
|
|
|
|
|
|
|
|
existing_config.root = None
|
|
|
|
return existing_config
|
|
|
|
|
2016-06-09 19:50:38 +02:00
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
def read_config_files(config_files: Iterable[str]) -> Dict[str, Any]:
|
2019-06-22 01:00:20 +02:00
|
|
|
"""Read the config files into a dict
|
|
|
|
|
|
|
|
Args:
|
2021-11-23 16:21:19 +01:00
|
|
|
config_files: A list of the config files to read
|
2019-06-22 01:00:20 +02:00
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
Returns:
|
|
|
|
The configuration dictionary.
|
2019-06-22 01:00:20 +02:00
|
|
|
"""
|
|
|
|
specified_config = {}
|
|
|
|
for config_file in config_files:
|
|
|
|
with open(config_file) as file_stream:
|
|
|
|
yaml_config = yaml.safe_load(file_stream)
|
2020-04-27 15:01:03 +02:00
|
|
|
|
|
|
|
if not isinstance(yaml_config, dict):
|
|
|
|
err = "File %r is empty or doesn't parse into a key-value map. IGNORING."
|
|
|
|
print(err % (config_file,))
|
|
|
|
continue
|
|
|
|
|
2019-06-22 01:00:20 +02:00
|
|
|
specified_config.update(yaml_config)
|
|
|
|
|
|
|
|
if "server_name" not in specified_config:
|
|
|
|
raise ConfigError(MISSING_SERVER_NAME)
|
|
|
|
|
|
|
|
if "report_stats" not in specified_config:
|
|
|
|
raise ConfigError(
|
|
|
|
MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS + "\n" + MISSING_REPORT_STATS_SPIEL
|
|
|
|
)
|
|
|
|
return specified_config
|
|
|
|
|
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
def find_config_files(search_paths: List[str]) -> List[str]:
|
2016-06-09 19:50:38 +02:00
|
|
|
"""Finds config files using a list of search paths. If a path is a file
|
|
|
|
then that file path is added to the list. If a search path is a directory
|
|
|
|
then all the "*.yaml" files in that directory are added to the list in
|
|
|
|
sorted order.
|
|
|
|
|
|
|
|
Args:
|
2021-11-23 16:21:19 +01:00
|
|
|
search_paths: A list of paths to search.
|
2016-06-09 19:50:38 +02:00
|
|
|
|
|
|
|
Returns:
|
2021-11-23 16:21:19 +01:00
|
|
|
A list of file paths.
|
2016-06-09 19:50:38 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
config_files = []
|
|
|
|
if search_paths:
|
|
|
|
for config_path in search_paths:
|
|
|
|
if os.path.isdir(config_path):
|
|
|
|
# We accept specifying directories as config paths, we search
|
|
|
|
# inside that directory for all files matching *.yaml, and then
|
|
|
|
# we apply them in *sorted* order.
|
|
|
|
files = []
|
|
|
|
for entry in os.listdir(config_path):
|
|
|
|
entry_path = os.path.join(config_path, entry)
|
|
|
|
if not os.path.isfile(entry_path):
|
2018-10-20 02:16:55 +02:00
|
|
|
err = "Found subdirectory in config directory: %r. IGNORING."
|
|
|
|
print(err % (entry_path,))
|
2016-06-09 19:50:38 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
if not entry.endswith(".yaml"):
|
2018-10-20 02:16:55 +02:00
|
|
|
err = (
|
|
|
|
"Found file in config directory that does not end in "
|
|
|
|
"'.yaml': %r. IGNORING."
|
|
|
|
)
|
|
|
|
print(err % (entry_path,))
|
2016-06-09 19:50:38 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
files.append(entry_path)
|
|
|
|
|
|
|
|
config_files.extend(sorted(files))
|
|
|
|
else:
|
|
|
|
config_files.append(config_path)
|
|
|
|
return config_files
|
2019-10-10 10:39:35 +02:00
|
|
|
|
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
@attr.s(auto_attribs=True)
|
2020-07-16 15:06:28 +02:00
|
|
|
class ShardedWorkerHandlingConfig:
|
|
|
|
"""Algorithm for choosing which instance is responsible for handling some
|
|
|
|
sharded work.
|
|
|
|
|
|
|
|
For example, the federation senders use this to determine which instances
|
|
|
|
handles sending stuff to a given destination (which is used as the `key`
|
|
|
|
below).
|
|
|
|
"""
|
|
|
|
|
2021-11-23 16:21:19 +01:00
|
|
|
instances: List[str]
|
2020-07-16 15:06:28 +02:00
|
|
|
|
|
|
|
def should_handle(self, instance_name: str, key: str) -> bool:
|
2021-02-16 23:32:34 +01:00
|
|
|
"""Whether this instance is responsible for handling the given key."""
|
2021-02-24 14:23:18 +01:00
|
|
|
# If no instances are defined we assume some other worker is handling
|
|
|
|
# this.
|
|
|
|
if not self.instances:
|
|
|
|
return False
|
2020-07-16 15:06:28 +02:00
|
|
|
|
2021-02-24 14:23:18 +01:00
|
|
|
return self._get_instance(key) == instance_name
|
2020-09-14 11:16:41 +02:00
|
|
|
|
2021-02-24 14:23:18 +01:00
|
|
|
def _get_instance(self, key: str) -> str:
|
2020-09-14 11:16:41 +02:00
|
|
|
"""Get the instance responsible for handling the given key.
|
|
|
|
|
2021-02-24 14:23:18 +01:00
|
|
|
Note: For federation sending and pushers the config for which instance
|
|
|
|
is sending is known only to the sender instance, so we don't expose this
|
|
|
|
method by default.
|
2020-09-14 11:16:41 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
if not self.instances:
|
2021-02-24 14:23:18 +01:00
|
|
|
raise Exception("Unknown worker")
|
2020-09-14 11:16:41 +02:00
|
|
|
|
|
|
|
if len(self.instances) == 1:
|
|
|
|
return self.instances[0]
|
|
|
|
|
2020-07-16 15:06:28 +02:00
|
|
|
# We shard by taking the hash, modulo it by the number of instances and
|
|
|
|
# then checking whether this instance matches the instance at that
|
|
|
|
# index.
|
|
|
|
#
|
|
|
|
# (Technically this introduces some bias and is not entirely uniform,
|
|
|
|
# but since the hash is so large the bias is ridiculously small).
|
|
|
|
dest_hash = sha256(key.encode("utf8")).digest()
|
|
|
|
dest_int = int.from_bytes(dest_hash, byteorder="little")
|
|
|
|
remainder = dest_int % (len(self.instances))
|
2020-09-14 11:16:41 +02:00
|
|
|
return self.instances[remainder]
|
2020-07-16 15:06:28 +02:00
|
|
|
|
|
|
|
|
2021-02-24 14:23:18 +01:00
|
|
|
@attr.s
|
|
|
|
class RoutableShardedWorkerHandlingConfig(ShardedWorkerHandlingConfig):
|
|
|
|
"""A version of `ShardedWorkerHandlingConfig` that is used for config
|
|
|
|
options where all instances know which instances are responsible for the
|
|
|
|
sharded work.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __attrs_post_init__(self):
|
|
|
|
# We require that `self.instances` is non-empty.
|
|
|
|
if not self.instances:
|
|
|
|
raise Exception("Got empty list of instances for shard config")
|
|
|
|
|
|
|
|
def get_instance(self, key: str) -> str:
|
|
|
|
"""Get the instance responsible for handling the given key."""
|
|
|
|
return self._get_instance(key)
|
|
|
|
|
|
|
|
|
2021-03-09 16:03:37 +01:00
|
|
|
def read_file(file_path: Any, config_path: Iterable[str]) -> str:
|
|
|
|
"""Check the given file exists, and read it into a string
|
|
|
|
|
|
|
|
If it does not, emit an error indicating the problem
|
|
|
|
|
|
|
|
Args:
|
|
|
|
file_path: the file to be read
|
|
|
|
config_path: where in the configuration file_path came from, so that a useful
|
|
|
|
error can be emitted if it does not exist.
|
|
|
|
Returns:
|
|
|
|
content of the file.
|
|
|
|
Raises:
|
|
|
|
ConfigError if there is a problem reading the file.
|
|
|
|
"""
|
|
|
|
if not isinstance(file_path, str):
|
|
|
|
raise ConfigError("%r is not a string", config_path)
|
|
|
|
|
|
|
|
try:
|
|
|
|
os.stat(file_path)
|
|
|
|
with open(file_path) as file_stream:
|
|
|
|
return file_stream.read()
|
|
|
|
except OSError as e:
|
|
|
|
raise ConfigError("Error accessing file %r" % (file_path,), config_path) from e
|
|
|
|
|
|
|
|
|
2022-08-26 13:26:06 +02:00
|
|
|
class _ConfigGenerateMode(Enum):
|
|
|
|
GENERATE_MISSING_AND_RUN = auto()
|
|
|
|
GENERATE_MISSING_AND_EXIT = auto()
|
|
|
|
GENERATE_EVERYTHING_AND_EXIT = auto()
|
|
|
|
|
|
|
|
|
2021-03-09 16:03:37 +01:00
|
|
|
__all__ = [
|
|
|
|
"Config",
|
|
|
|
"RootConfig",
|
|
|
|
"ShardedWorkerHandlingConfig",
|
|
|
|
"RoutableShardedWorkerHandlingConfig",
|
|
|
|
"read_file",
|
|
|
|
]
|