MatrixSynapse/synapse/app/homeserver.py

659 lines
23 KiB
Python
Raw Normal View History

#!/usr/bin/env python
2014-08-12 16:10:52 +02:00
# -*- coding: utf-8 -*-
2016-01-07 05:26:29 +01:00
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2019 New Vector Ltd
2014-08-12 16:10:52 +02:00
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
2016-06-07 16:45:56 +02:00
import gc
2016-02-02 18:18:50 +01:00
import logging
import os
2015-02-17 11:54:06 +01:00
import sys
from six import iteritems
import psutil
2018-07-31 14:16:20 +02:00
from prometheus_client import Gauge
2018-07-09 08:09:20 +02:00
from twisted.application import service
from twisted.internet import defer, reactor
from twisted.python.failure import Failure
2018-07-09 08:09:20 +02:00
from twisted.web.resource import EncodingResourceWrapper, NoResource
from twisted.web.server import GzipEncoderFactory
from twisted.web.static import File
import synapse
import synapse.config.logger
from synapse import events
2018-07-09 08:09:20 +02:00
from synapse.api.urls import (
CONTENT_REPO_PREFIX,
FEDERATION_PREFIX,
LEGACY_MEDIA_PREFIX,
MEDIA_PREFIX,
SERVER_KEY_V2_PREFIX,
STATIC_PREFIX,
WEB_CLIENT_PREFIX,
)
from synapse.app import _base
2018-07-09 08:09:20 +02:00
from synapse.app._base import listen_ssl, listen_tcp, quit_with_error
from synapse.config._base import ConfigError
from synapse.config.homeserver import HomeServerConfig
from synapse.federation.transport.server import TransportLayerServer
from synapse.http.additional_resource import AdditionalResource
from synapse.http.server import RootRedirect
from synapse.http.site import SynapseSite
2018-05-22 23:28:23 +02:00
from synapse.metrics import RegistryProxy
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
2018-07-09 08:09:20 +02:00
from synapse.module_api import ModuleApi
from synapse.python_dependencies import check_requirements
2018-07-09 08:09:20 +02:00
from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
from synapse.replication.tcp.resource import ReplicationStreamProtocolFactory
from synapse.rest import ClientRestResource
from synapse.rest.admin import AdminRestResource
from synapse.rest.key.v2 import KeyApiV2Resource
from synapse.rest.media.v0.content_repository import ContentRepoResource
from synapse.rest.well_known import WellKnownResource
from synapse.server import HomeServer
from synapse.storage import DataStore, are_all_users_on_domain
from synapse.storage.engines import IncorrectDatabaseSetup, create_engine
from synapse.storage.prepare_database import UpgradeDatabaseException, prepare_database
2018-04-04 17:46:58 +02:00
from synapse.util.caches import CACHE_SIZE_FACTOR
from synapse.util.httpresourcetree import create_resource_tree
from synapse.util.logcontext import LoggingContext
from synapse.util.manhole import manhole
from synapse.util.module_loader import load_module
from synapse.util.rlimit import change_resource_limit
2018-09-17 18:37:56 +02:00
from synapse.util.versionstring import get_version_string
2014-08-12 16:10:52 +02:00
logger = logging.getLogger("synapse.app.homeserver")
2014-08-12 16:10:52 +02:00
def gz_wrap(r):
return EncodingResourceWrapper(r, [GzipEncoderFactory()])
2016-01-26 14:52:29 +01:00
class SynapseHomeServer(HomeServer):
DATASTORE_CLASS = DataStore
2015-06-12 16:33:07 +02:00
def _listener_http(self, config, listener_config):
port = listener_config["port"]
2017-01-10 18:21:41 +01:00
bind_addresses = listener_config["bind_addresses"]
2015-06-12 16:33:07 +02:00
tls = listener_config.get("tls", False)
site_tag = listener_config.get("tag", port)
2015-06-12 16:33:07 +02:00
resources = {}
for res in listener_config["resources"]:
for name in res["names"]:
if name == "openid" and "federation" in res["names"]:
# Skip loading openid resource if federation is defined
# since federation resource will include openid
continue
2019-06-20 11:32:02 +02:00
resources.update(
self._configure_named_resource(name, res.get("compress", False))
)
2015-06-12 16:33:07 +02:00
additional_resources = listener_config.get("additional_resources", {})
2019-06-20 11:32:02 +02:00
logger.debug("Configuring additional resources: %r", additional_resources)
module_api = ModuleApi(self, self.get_auth_handler())
for path, resmodule in additional_resources.items():
handler_cls, config = load_module(resmodule)
handler = handler_cls(config, module_api)
resources[path] = AdditionalResource(self, handler.handle_request)
# try to find something useful to redirect '/' to
if WEB_CLIENT_PREFIX in resources:
root_resource = RootRedirect(WEB_CLIENT_PREFIX)
elif STATIC_PREFIX in resources:
root_resource = RootRedirect(STATIC_PREFIX)
else:
root_resource = NoResource()
root_resource = create_resource_tree(resources, root_resource)
2015-06-12 16:33:07 +02:00
if tls:
ports = listen_ssl(
bind_addresses,
port,
SynapseSite(
"synapse.access.https.%s" % (site_tag,),
site_tag,
listener_config,
root_resource,
self.version_string,
),
self.tls_server_context_factory,
reactor=self.get_reactor(),
)
logger.info("Synapse now listening on TCP port %d (TLS)", port)
2015-06-12 16:33:07 +02:00
else:
ports = listen_tcp(
bind_addresses,
port,
SynapseSite(
"synapse.access.http.%s" % (site_tag,),
site_tag,
listener_config,
root_resource,
self.version_string,
),
reactor=self.get_reactor(),
)
logger.info("Synapse now listening on TCP port %d", port)
return ports
def _configure_named_resource(self, name, compress=False):
"""Build a resource map for a named resource
Args:
name (str): named resource: one of "client", "federation", etc
compress (bool): whether to enable gzip compression for this
resource
Returns:
dict[str, Resource]: map from path to HTTP resource
"""
resources = {}
if name == "client":
client_resource = ClientRestResource(self)
if compress:
client_resource = gz_wrap(client_resource)
2019-06-20 11:32:02 +02:00
resources.update(
{
"/_matrix/client/api/v1": client_resource,
"/_matrix/client/r0": client_resource,
"/_matrix/client/unstable": client_resource,
"/_matrix/client/v2_alpha": client_resource,
"/_matrix/client/versions": client_resource,
"/.well-known/matrix/client": WellKnownResource(self),
"/_synapse/admin": AdminRestResource(self),
}
)
if self.get_config().saml2_enabled:
from synapse.rest.saml2 import SAML2Resource
2019-06-20 11:32:02 +02:00
resources["/_matrix/saml2"] = SAML2Resource(self)
if name == "consent":
from synapse.rest.consent.consent_resource import ConsentResource
2019-06-20 11:32:02 +02:00
consent_resource = ConsentResource(self)
if compress:
consent_resource = gz_wrap(consent_resource)
2019-06-20 11:32:02 +02:00
resources.update({"/_matrix/consent": consent_resource})
if name == "federation":
2019-06-20 11:32:02 +02:00
resources.update({FEDERATION_PREFIX: TransportLayerServer(self)})
if name == "openid":
2019-06-20 11:32:02 +02:00
resources.update(
{
FEDERATION_PREFIX: TransportLayerServer(
self, servlet_groups=["openid"]
)
}
)
if name in ["static", "client"]:
2019-06-20 11:32:02 +02:00
resources.update(
{
STATIC_PREFIX: File(
os.path.join(os.path.dirname(synapse.__file__), "static")
)
}
)
if name in ["media", "federation", "client"]:
if self.get_config().enable_media_repo:
media_repo = self.get_media_repository_resource()
2019-06-20 11:32:02 +02:00
resources.update(
{
MEDIA_PREFIX: media_repo,
LEGACY_MEDIA_PREFIX: media_repo,
CONTENT_REPO_PREFIX: ContentRepoResource(
self, self.config.uploads_path
),
}
)
elif name == "media":
raise ConfigError(
2019-06-20 11:32:02 +02:00
"'media' resource conflicts with enable_media_repo=False"
)
if name in ["keys", "federation"]:
resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self)
if name == "webclient":
webclient_path = self.get_config().web_client_location
if webclient_path is None:
logger.warning(
"Not enabling webclient resource, as web_client_location is unset."
)
else:
# GZip is disabled here due to
# https://twistedmatrix.com/trac/ticket/7678
resources[WEB_CLIENT_PREFIX] = File(webclient_path)
if name == "metrics" and self.get_config().enable_metrics:
resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
if name == "replication":
resources[REPLICATION_PREFIX] = ReplicationRestResource(self)
return resources
def start_listening(self, listeners):
2015-06-12 16:33:07 +02:00
config = self.get_config()
for listener in listeners:
2015-06-12 16:33:07 +02:00
if listener["type"] == "http":
2019-06-20 11:32:02 +02:00
self._listening_services.extend(self._listener_http(config, listener))
2015-06-12 16:33:07 +02:00
elif listener["type"] == "manhole":
listen_tcp(
listener["bind_addresses"],
listener["port"],
manhole(
2019-06-20 11:32:02 +02:00
username="matrix", password="rabbithole", globals={"hs": self}
),
)
elif listener["type"] == "replication":
services = listen_tcp(
listener["bind_addresses"],
listener["port"],
ReplicationStreamProtocolFactory(self),
)
for s in services:
2019-06-20 11:32:02 +02:00
reactor.addSystemEventTrigger("before", "shutdown", s.stopListening)
elif listener["type"] == "metrics":
if not self.get_config().enable_metrics:
2019-06-20 11:32:02 +02:00
logger.warn(
(
"Metrics listener configured, but "
"enable_metrics is not True!"
)
)
else:
2019-06-20 11:32:02 +02:00
_base.listen_metrics(listener["bind_addresses"], listener["port"])
2015-06-12 16:33:07 +02:00
else:
logger.warn("Unrecognized listener type: %s", listener["type"])
def run_startup_checks(self, db_conn, database_engine):
all_users_native = are_all_users_on_domain(
db_conn.cursor(), database_engine, self.hostname
)
if not all_users_native:
quit_with_error(
"Found users in database not native to %s!\n"
"You cannot changed a synapse server_name after it's been configured"
% (self.hostname,)
)
try:
database_engine.check_database(db_conn.cursor())
except IncorrectDatabaseSetup as e:
quit_with_error(str(e))
# Gauges to expose monthly active user control metrics
2018-08-14 17:26:55 +02:00
current_mau_gauge = Gauge("synapse_admin_mau:current", "Current MAU")
max_mau_gauge = Gauge("synapse_admin_mau:max", "MAU Limit")
2018-09-12 17:22:15 +02:00
registered_reserved_users_mau_gauge = Gauge(
"synapse_admin_mau:registered_reserved_users",
2019-06-20 11:32:02 +02:00
"Registered users with reserved threepids",
2018-09-12 17:22:15 +02:00
)
2018-07-31 14:16:20 +02:00
2018-08-14 17:36:14 +02:00
2015-03-10 10:58:33 +01:00
def setup(config_options):
"""
Args:
config_options_options: The options passed to Synapse. Usually
`sys.argv[1:]`.
Returns:
HomeServer
"""
try:
config = HomeServerConfig.load_or_generate_config(
2019-06-20 11:32:02 +02:00
"Synapse Homeserver", config_options
)
except ConfigError as e:
sys.stderr.write("\n" + str(e) + "\n")
sys.exit(1)
if not config:
# If a config isn't returned, and an exception isn't raised, we're just
# generating config files and shouldn't try to continue.
sys.exit(0)
2014-11-18 16:57:00 +01:00
2019-06-20 11:32:02 +02:00
synapse.config.logger.setup_logging(config, use_worker_options=False)
2019-01-30 12:00:02 +01:00
events.USE_FROZEN_DICTS = config.use_frozen_dicts
database_engine = create_engine(config.database_config)
config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection
2014-08-12 16:10:52 +02:00
hs = SynapseHomeServer(
config.server_name,
db_config=config.database_config,
config=config,
version_string="Synapse/" + get_version_string(synapse),
database_engine=database_engine,
2014-08-12 16:10:52 +02:00
)
2019-06-20 11:32:02 +02:00
logger.info("Preparing database: %s...", config.database_config["name"])
try:
with hs.get_db_conn(run_new_connection=False) as db_conn:
prepare_database(db_conn, database_engine, config=config)
database_engine.on_new_connection(db_conn)
hs.run_startup_checks(db_conn, database_engine)
db_conn.commit()
except UpgradeDatabaseException:
sys.stderr.write(
"\nFailed to upgrade database.\n"
2015-01-19 16:30:48 +01:00
"Have you checked for version specific instructions in"
" UPGRADES.rst?\n"
)
sys.exit(1)
2019-06-20 11:32:02 +02:00
logger.info("Database prepared in %s.", config.database_config["name"])
hs.setup()
2019-03-11 11:13:35 +01:00
hs.setup_master()
2019-02-11 11:36:26 +01:00
@defer.inlineCallbacks
def do_acme():
"""
Reprovision an ACME certificate, if it's required.
Returns:
Deferred[bool]: Whether the cert has been updated.
"""
acme = hs.get_acme_handler()
# Check how long the certificate is active for.
2019-06-20 11:32:02 +02:00
cert_days_remaining = hs.config.is_disk_cert_valid(allow_self_signed=False)
2019-02-11 11:36:26 +01:00
# We want to reprovision if cert_days_remaining is None (meaning no
# certificate exists), or the days remaining number it returns
# is less than our re-registration threshold.
provision = False
if (
2019-06-20 11:32:02 +02:00
cert_days_remaining is None
or cert_days_remaining < hs.config.acme_reprovision_threshold
):
2019-02-11 11:36:26 +01:00
provision = True
if provision:
yield acme.provision_certificate()
defer.returnValue(provision)
@defer.inlineCallbacks
def reprovision_acme():
"""
Provision a certificate from ACME, if required, and reload the TLS
certificate if it's renewed.
"""
reprovisioned = yield do_acme()
if reprovisioned:
_base.refresh_certificate(hs)
@defer.inlineCallbacks
def start():
try:
2019-02-11 11:36:26 +01:00
# Run the ACME provisioning code, if it's enabled.
if hs.config.acme_enabled:
acme = hs.get_acme_handler()
# Start up the webservices which we will respond to ACME
2019-02-11 11:36:26 +01:00
# challenges with, and then provision.
yield acme.start_listening()
2019-02-11 11:36:26 +01:00
yield do_acme()
2019-02-11 11:36:26 +01:00
# Check if it needs to be reprovisioned every day.
2019-06-20 11:32:02 +02:00
hs.get_clock().looping_call(reprovision_acme, 24 * 60 * 60 * 1000)
_base.start(hs, config.listeners)
hs.get_pusherpool().start()
hs.get_datastore().start_doing_background_updates()
2019-02-11 11:36:26 +01:00
except Exception:
# Print the exception and bail out.
print("Error during startup:", file=sys.stderr)
# this gives better tracebacks than traceback.print_exc()
Failure().printTraceback(file=sys.stderr)
2019-02-11 11:36:26 +01:00
if reactor.running:
reactor.stop()
sys.exit(1)
reactor.callWhenRunning(start)
2015-02-06 17:52:22 +01:00
return hs
2014-11-20 18:26:36 +01:00
2015-01-07 14:46:37 +01:00
class SynapseService(service.Service):
2019-02-11 11:36:26 +01:00
"""
A twisted Service class that will start synapse. Used to run synapse
via twistd and a .tac.
"""
2019-06-20 11:32:02 +02:00
2015-01-07 14:46:37 +01:00
def __init__(self, config):
self.config = config
def startService(self):
2015-03-10 10:58:33 +01:00
hs = setup(self.config)
change_resource_limit(hs.config.soft_file_limit)
2016-06-07 16:45:56 +02:00
if hs.config.gc_thresholds:
gc.set_threshold(*hs.config.gc_thresholds)
2015-01-07 14:46:37 +01:00
def stopService(self):
return self._port.stopListening()
2015-03-10 10:58:33 +01:00
def run(hs):
2016-01-26 19:27:23 +01:00
PROFILE_SYNAPSE = False
2015-05-06 18:08:00 +02:00
if PROFILE_SYNAPSE:
2019-06-20 11:32:02 +02:00
2015-05-06 18:08:00 +02:00
def profile(func):
from cProfile import Profile
from threading import current_thread
def profiled(*args, **kargs):
profile = Profile()
profile.enable()
func(*args, **kargs)
profile.disable()
ident = current_thread().ident
2019-06-20 11:32:02 +02:00
profile.dump_stats(
"/tmp/%s.%s.%i.pstat" % (hs.hostname, func.__name__, ident)
)
2015-05-06 18:08:00 +02:00
return profiled
from twisted.python.threadpool import ThreadPool
2019-06-20 11:32:02 +02:00
2015-05-06 18:08:00 +02:00
ThreadPool._worker = profile(ThreadPool._worker)
reactor.run = profile(reactor.run)
2015-03-10 10:58:33 +01:00
2017-06-14 20:37:17 +02:00
clock = hs.get_clock()
start_time = clock.time()
stats = {}
2018-03-28 15:25:25 +02:00
# Contains the list of processes we will be monitoring
# currently either 0 or 1
stats_process = []
def start_phone_stats_home():
return run_as_background_process("phone_stats_home", phone_stats_home)
@defer.inlineCallbacks
def phone_stats_home():
2016-01-06 15:13:34 +01:00
logger.info("Gathering stats for reporting")
now = int(hs.get_clock().time())
uptime = int(now - start_time)
if uptime < 0:
uptime = 0
stats["homeserver"] = hs.config.server_name
stats["server_context"] = hs.config.server_context
stats["timestamp"] = now
stats["uptime_seconds"] = uptime
2018-09-17 17:35:18 +02:00
version = sys.version_info
2018-09-17 18:35:54 +02:00
stats["python_version"] = "{}.{}.{}".format(
version.major, version.minor, version.micro
)
stats["total_users"] = yield hs.get_datastore().count_all_users()
2017-06-15 11:49:10 +02:00
total_nonbridged_users = yield hs.get_datastore().count_nonbridged_users()
stats["total_nonbridged_users"] = total_nonbridged_users
2017-06-15 10:39:39 +02:00
2018-05-22 19:09:09 +02:00
daily_user_type_results = yield hs.get_datastore().count_daily_user_type()
for name, count in iteritems(daily_user_type_results):
2018-05-22 19:09:09 +02:00
stats["daily_user_type_" + name] = count
2017-06-15 10:39:39 +02:00
room_count = yield hs.get_datastore().get_room_count()
stats["total_room_count"] = room_count
stats["daily_active_users"] = yield hs.get_datastore().count_daily_users()
stats["monthly_active_users"] = yield hs.get_datastore().count_monthly_users()
2019-06-20 11:32:02 +02:00
stats[
"daily_active_rooms"
] = yield hs.get_datastore().count_daily_active_rooms()
2017-06-14 20:37:17 +02:00
stats["daily_messages"] = yield hs.get_datastore().count_daily_messages()
2017-06-15 10:39:39 +02:00
2018-03-28 15:36:53 +02:00
r30_results = yield hs.get_datastore().count_r30_users()
for name, count in iteritems(r30_results):
2018-03-28 15:36:53 +02:00
stats["r30_users_" + name] = count
2018-03-28 11:39:13 +02:00
2017-06-14 20:37:17 +02:00
daily_sent_messages = yield hs.get_datastore().count_daily_sent_messages()
stats["daily_sent_messages"] = daily_sent_messages
2018-04-04 17:46:58 +02:00
stats["cache_factor"] = CACHE_SIZE_FACTOR
stats["event_cache_size"] = hs.config.event_cache_size
2018-03-28 15:25:25 +02:00
if len(stats_process) > 0:
stats["memory_rss"] = 0
stats["cpu_average"] = 0
for process in stats_process:
2018-03-28 15:25:25 +02:00
stats["memory_rss"] += process.memory_info().rss
stats["cpu_average"] += int(process.cpu_percent(interval=None))
stats["database_engine"] = hs.get_datastore().database_engine_name
stats["database_server_version"] = hs.get_datastore().get_server_version()
logger.info("Reporting stats to matrix.org: %s" % (stats,))
2015-09-22 14:34:29 +02:00
try:
yield hs.get_simple_http_client().put_json(
2019-06-20 11:32:02 +02:00
"https://matrix.org/report-usage-stats/push", stats
2015-09-22 14:34:29 +02:00
)
except Exception as e:
logger.warn("Error reporting stats: %s", e)
def performance_stats_init():
try:
process = psutil.Process()
# Ensure we can fetch both, and make the initial request for cpu_percent
# so the next request will use this as the initial point.
process.memory_info().rss
process.cpu_percent(interval=None)
logger.info("report_stats can use psutil")
stats_process.append(process)
except (AttributeError):
2019-06-20 11:32:02 +02:00
logger.warning("Unable to read memory/cpu stats. Disabling reporting.")
2018-04-25 18:37:29 +02:00
def generate_user_daily_visit_stats():
return run_as_background_process(
2019-06-20 11:32:02 +02:00
"generate_user_daily_visits", hs.get_datastore().generate_user_daily_visits
)
2018-04-25 18:37:29 +02:00
# Rather than update on per session basis, batch up the requests.
# If you increase the loop period, the accuracy of user_daily_visits
# table will decrease
clock.looping_call(generate_user_daily_visit_stats, 5 * 60 * 1000)
# monthly active user limiting functionality
def reap_monthly_active_users():
return run_as_background_process(
2019-06-20 11:32:02 +02:00
"reap_monthly_active_users", hs.get_datastore().reap_monthly_active_users
)
2019-06-20 11:32:02 +02:00
clock.looping_call(reap_monthly_active_users, 1000 * 60 * 60)
reap_monthly_active_users()
2018-08-01 17:17:42 +02:00
@defer.inlineCallbacks
def generate_monthly_active_users():
2018-09-12 12:58:52 +02:00
current_mau_count = 0
2018-09-12 17:22:15 +02:00
reserved_count = 0
store = hs.get_datastore()
if hs.config.limit_usage_by_mau or hs.config.mau_stats_only:
2018-09-12 17:22:15 +02:00
current_mau_count = yield store.get_monthly_active_count()
reserved_count = yield store.get_registered_reserved_users_count()
2018-09-12 12:58:52 +02:00
current_mau_gauge.set(float(current_mau_count))
2018-09-12 17:22:15 +02:00
registered_reserved_users_mau_gauge.set(float(reserved_count))
2018-08-14 17:26:55 +02:00
max_mau_gauge.set(float(hs.config.max_mau_value))
def start_generate_monthly_active_users():
return run_as_background_process(
2019-06-20 11:32:02 +02:00
"generate_monthly_active_users", generate_monthly_active_users
)
start_generate_monthly_active_users()
if hs.config.limit_usage_by_mau or hs.config.mau_stats_only:
clock.looping_call(start_generate_monthly_active_users, 5 * 60 * 1000)
# End of monthly active user settings
if hs.config.report_stats:
2017-06-14 20:37:17 +02:00
logger.info("Scheduling stats reporting for 3 hour intervals")
clock.looping_call(start_phone_stats_home, 3 * 60 * 60 * 1000)
2017-06-14 20:37:17 +02:00
# We need to defer this init for the cases that we daemonize
# otherwise the process ID we get is that of the non-daemon process
2018-03-28 15:25:25 +02:00
clock.call_later(0, performance_stats_init)
2017-06-14 20:37:17 +02:00
# We wait 5 minutes to send the first set of stats as the server can
# be quite busy the first few minutes
clock.call_later(5 * 60, start_phone_stats_home)
_base.start_reactor(
"synapse-homeserver",
soft_file_limit=hs.config.soft_file_limit,
gc_thresholds=hs.config.gc_thresholds,
pid_file=hs.config.pid_file,
daemonize=hs.config.daemonize,
print_pidfile=hs.config.print_pidfile,
logger=logger,
)
2014-10-30 02:21:33 +01:00
2014-11-20 18:26:36 +01:00
2014-11-18 16:57:00 +01:00
def main():
with LoggingContext("main"):
# check base requirements
check_requirements()
2015-03-10 10:58:33 +01:00
hs = setup(sys.argv[1:])
run(hs)
2014-08-12 16:10:52 +02:00
2014-11-20 18:26:36 +01:00
2019-06-20 11:32:02 +02:00
if __name__ == "__main__":
2014-11-18 16:57:00 +01:00
main()