2014-08-13 04:14:34 +02:00
|
|
|
#!/usr/bin/env python
|
2014-08-12 16:10:52 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2015-01-06 14:21:39 +01:00
|
|
|
# Copyright 2014, 2015 OpenMarket Ltd
|
2014-08-12 16:10:52 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2015-02-17 11:54:06 +01:00
|
|
|
import sys
|
|
|
|
sys.dont_write_bytecode = True
|
|
|
|
|
2015-03-04 13:04:19 +01:00
|
|
|
from synapse.storage import (
|
|
|
|
prepare_database, prepare_sqlite3_database, UpgradeDatabaseException,
|
|
|
|
)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
|
|
|
from synapse.server import HomeServer
|
|
|
|
|
2015-01-08 18:07:28 +01:00
|
|
|
from synapse.python_dependencies import check_requirements
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
from twisted.internet import reactor
|
2015-01-07 14:46:37 +01:00
|
|
|
from twisted.application import service
|
2014-08-12 16:10:52 +02:00
|
|
|
from twisted.enterprise import adbapi
|
2014-08-14 10:52:20 +02:00
|
|
|
from twisted.web.resource import Resource
|
|
|
|
from twisted.web.static import File
|
|
|
|
from twisted.web.server import Site
|
2014-09-03 12:10:29 +02:00
|
|
|
from synapse.http.server import JsonResource, RootRedirect
|
2015-01-27 15:01:51 +01:00
|
|
|
from synapse.rest.appservice.v1 import AppServiceRestResource
|
2015-01-22 17:10:07 +01:00
|
|
|
from synapse.rest.media.v0.content_repository import ContentRepoResource
|
|
|
|
from synapse.rest.media.v1.media_repository import MediaRepositoryResource
|
2014-09-23 19:40:59 +02:00
|
|
|
from synapse.http.server_key_resource import LocalKey
|
2014-11-20 18:41:56 +01:00
|
|
|
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
|
2014-08-18 16:01:08 +02:00
|
|
|
from synapse.api.urls import (
|
2014-09-23 19:40:59 +02:00
|
|
|
CLIENT_PREFIX, FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX,
|
2015-02-23 16:35:09 +01:00
|
|
|
SERVER_KEY_PREFIX, MEDIA_PREFIX, CLIENT_V2_ALPHA_PREFIX, APP_SERVICE_PREFIX,
|
|
|
|
STATIC_PREFIX
|
2014-08-18 16:01:08 +02:00
|
|
|
)
|
2014-08-31 17:06:39 +02:00
|
|
|
from synapse.config.homeserver import HomeServerConfig
|
2014-09-01 17:30:43 +02:00
|
|
|
from synapse.crypto import context_factory
|
2014-10-30 02:21:33 +01:00
|
|
|
from synapse.util.logcontext import LoggingContext
|
2015-01-23 11:37:38 +01:00
|
|
|
from synapse.rest.client.v1 import ClientV1RestResource
|
2015-01-23 19:54:51 +01:00
|
|
|
from synapse.rest.client.v2_alpha import ClientV2AlphaRestResource
|
2015-03-12 16:33:53 +01:00
|
|
|
from synapse.metrics.resource import MetricsResource, METRICS_PREFIX
|
2014-08-12 16:10:52 +02:00
|
|
|
|
|
|
|
from daemonize import Daemonize
|
2014-08-26 14:43:55 +02:00
|
|
|
import twisted.manhole.telnet
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2015-01-07 18:25:28 +01:00
|
|
|
import synapse
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
import logging
|
2014-08-14 15:07:14 +02:00
|
|
|
import os
|
2014-08-24 12:56:55 +02:00
|
|
|
import re
|
2015-02-19 12:50:49 +01:00
|
|
|
import resource
|
2015-02-18 17:21:35 +01:00
|
|
|
import subprocess
|
2014-09-10 17:23:58 +02:00
|
|
|
import sqlite3
|
2014-08-12 16:10:52 +02:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class SynapseHomeServer(HomeServer):
|
|
|
|
|
|
|
|
def build_http_client(self):
|
2014-11-20 14:53:34 +01:00
|
|
|
return MatrixFederationHttpClient(self)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2014-08-14 10:52:20 +02:00
|
|
|
def build_resource_for_client(self):
|
2015-01-23 11:37:38 +01:00
|
|
|
return ClientV1RestResource(self)
|
2014-08-14 10:52:20 +02:00
|
|
|
|
2015-01-23 19:54:51 +01:00
|
|
|
def build_resource_for_client_v2_alpha(self):
|
|
|
|
return ClientV2AlphaRestResource(self)
|
2014-08-14 10:52:20 +02:00
|
|
|
|
|
|
|
def build_resource_for_federation(self):
|
2015-02-09 14:46:22 +01:00
|
|
|
return JsonResource(self)
|
2014-08-14 10:52:20 +02:00
|
|
|
|
2015-01-27 15:01:51 +01:00
|
|
|
def build_resource_for_app_services(self):
|
|
|
|
return AppServiceRestResource(self)
|
2014-08-14 10:52:20 +02:00
|
|
|
|
|
|
|
def build_resource_for_web_client(self):
|
2015-03-17 12:45:37 +01:00
|
|
|
import syweb
|
2014-11-04 17:19:03 +01:00
|
|
|
syweb_path = os.path.dirname(syweb.__file__)
|
|
|
|
webclient_path = os.path.join(syweb_path, "webclient")
|
|
|
|
return File(webclient_path) # TODO configurable?
|
2014-08-14 10:52:20 +02:00
|
|
|
|
2015-02-23 16:35:09 +01:00
|
|
|
def build_resource_for_static_content(self):
|
|
|
|
return File("static")
|
|
|
|
|
2014-08-18 16:01:08 +02:00
|
|
|
def build_resource_for_content_repo(self):
|
2014-09-03 12:57:23 +02:00
|
|
|
return ContentRepoResource(
|
|
|
|
self, self.upload_dir, self.auth, self.content_addr
|
|
|
|
)
|
2014-08-18 16:01:08 +02:00
|
|
|
|
2014-12-02 20:51:47 +01:00
|
|
|
def build_resource_for_media_repository(self):
|
|
|
|
return MediaRepositoryResource(self)
|
|
|
|
|
2014-09-23 19:40:59 +02:00
|
|
|
def build_resource_for_server_key(self):
|
|
|
|
return LocalKey(self)
|
|
|
|
|
2015-03-12 16:33:53 +01:00
|
|
|
def build_resource_for_metrics(self):
|
|
|
|
if self.get_config().enable_metrics:
|
|
|
|
return MetricsResource(self)
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
def build_db_pool(self):
|
2014-09-10 16:42:15 +02:00
|
|
|
return adbapi.ConnectionPool(
|
|
|
|
"sqlite3", self.get_db_name(),
|
|
|
|
check_same_thread=False,
|
|
|
|
cp_min=1,
|
2015-02-13 15:29:49 +01:00
|
|
|
cp_max=1,
|
|
|
|
cp_openfun=prepare_database, # Prepare the database for each conn
|
|
|
|
# so that :memory: sqlite works
|
2014-09-10 16:42:15 +02:00
|
|
|
)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2015-02-24 15:23:50 +01:00
|
|
|
def create_resource_tree(self, redirect_root_to_web_client):
|
2014-08-14 10:52:20 +02:00
|
|
|
"""Create the resource tree for this Home Server.
|
|
|
|
|
|
|
|
This in unduly complicated because Twisted does not support putting
|
|
|
|
child resources more than 1 level deep at a time.
|
2014-08-14 12:17:58 +02:00
|
|
|
|
|
|
|
Args:
|
|
|
|
web_client (bool): True to enable the web client.
|
2014-08-14 12:37:13 +02:00
|
|
|
redirect_root_to_web_client (bool): True to redirect '/' to the
|
|
|
|
location of the web client. This does nothing if web_client is not
|
|
|
|
True.
|
2014-08-14 10:52:20 +02:00
|
|
|
"""
|
2015-03-12 17:05:46 +01:00
|
|
|
config = self.get_config()
|
2015-03-17 12:45:37 +01:00
|
|
|
web_client = config.web_client
|
2015-02-24 15:23:50 +01:00
|
|
|
|
2014-08-14 12:17:58 +02:00
|
|
|
# list containing (path_str, Resource) e.g:
|
|
|
|
# [ ("/aaa/bbb/cc", Resource1), ("/aaa/dummy", Resource2) ]
|
|
|
|
desired_tree = [
|
2014-08-14 11:05:06 +02:00
|
|
|
(CLIENT_PREFIX, self.get_resource_for_client()),
|
2015-01-23 19:54:51 +01:00
|
|
|
(CLIENT_V2_ALPHA_PREFIX, self.get_resource_for_client_v2_alpha()),
|
2014-08-18 16:01:08 +02:00
|
|
|
(FEDERATION_PREFIX, self.get_resource_for_federation()),
|
2014-09-23 19:40:59 +02:00
|
|
|
(CONTENT_REPO_PREFIX, self.get_resource_for_content_repo()),
|
|
|
|
(SERVER_KEY_PREFIX, self.get_resource_for_server_key()),
|
2014-12-02 20:51:47 +01:00
|
|
|
(MEDIA_PREFIX, self.get_resource_for_media_repository()),
|
2015-01-27 15:01:51 +01:00
|
|
|
(APP_SERVICE_PREFIX, self.get_resource_for_app_services()),
|
2015-02-23 18:36:37 +01:00
|
|
|
(STATIC_PREFIX, self.get_resource_for_static_content()),
|
2014-08-14 11:24:17 +02:00
|
|
|
]
|
2015-02-23 16:14:56 +01:00
|
|
|
|
2014-08-14 11:24:17 +02:00
|
|
|
if web_client:
|
|
|
|
logger.info("Adding the web client.")
|
2014-08-14 12:54:37 +02:00
|
|
|
desired_tree.append((WEB_CLIENT_PREFIX,
|
2014-08-14 11:24:17 +02:00
|
|
|
self.get_resource_for_web_client()))
|
2014-08-14 10:52:20 +02:00
|
|
|
|
2014-08-14 12:37:13 +02:00
|
|
|
if web_client and redirect_root_to_web_client:
|
2014-08-14 12:54:37 +02:00
|
|
|
self.root_resource = RootRedirect(WEB_CLIENT_PREFIX)
|
2014-08-14 12:37:13 +02:00
|
|
|
else:
|
|
|
|
self.root_resource = Resource()
|
|
|
|
|
2015-03-12 16:33:53 +01:00
|
|
|
metrics_resource = self.get_resource_for_metrics()
|
2015-03-12 17:05:46 +01:00
|
|
|
if config.metrics_port is None and metrics_resource is not None:
|
2015-03-12 16:33:53 +01:00
|
|
|
desired_tree.append((METRICS_PREFIX, metrics_resource))
|
2015-02-24 16:49:14 +01:00
|
|
|
|
2014-08-14 10:52:20 +02:00
|
|
|
# ideally we'd just use getChild and putChild but getChild doesn't work
|
|
|
|
# unless you give it a Request object IN ADDITION to the name :/ So
|
|
|
|
# instead, we'll store a copy of this mapping so we can actually add
|
|
|
|
# extra resources to existing nodes. See self._resource_id for the key.
|
|
|
|
resource_mappings = {}
|
2015-02-19 12:53:13 +01:00
|
|
|
for full_path, res in desired_tree:
|
|
|
|
logger.info("Attaching %s to path %s", res, full_path)
|
2014-08-14 10:52:20 +02:00
|
|
|
last_resource = self.root_resource
|
|
|
|
for path_seg in full_path.split('/')[1:-1]:
|
2015-02-10 18:58:36 +01:00
|
|
|
if path_seg not in last_resource.listNames():
|
2014-08-14 12:17:58 +02:00
|
|
|
# resource doesn't exist, so make a "dummy resource"
|
2014-08-14 10:52:20 +02:00
|
|
|
child_resource = Resource()
|
|
|
|
last_resource.putChild(path_seg, child_resource)
|
|
|
|
res_id = self._resource_id(last_resource, path_seg)
|
|
|
|
resource_mappings[res_id] = child_resource
|
|
|
|
last_resource = child_resource
|
|
|
|
else:
|
2014-08-14 12:17:58 +02:00
|
|
|
# we have an existing Resource, use that instead.
|
2014-08-14 10:52:20 +02:00
|
|
|
res_id = self._resource_id(last_resource, path_seg)
|
|
|
|
last_resource = resource_mappings[res_id]
|
|
|
|
|
2014-08-14 12:17:58 +02:00
|
|
|
# ===========================
|
|
|
|
# now attach the actual desired resource
|
2014-08-14 10:52:20 +02:00
|
|
|
last_path_seg = full_path.split('/')[-1]
|
2014-08-14 12:17:58 +02:00
|
|
|
|
|
|
|
# if there is already a resource here, thieve its children and
|
|
|
|
# replace it
|
|
|
|
res_id = self._resource_id(last_resource, last_path_seg)
|
|
|
|
if res_id in resource_mappings:
|
|
|
|
# there is a dummy resource at this path already, which needs
|
|
|
|
# to be replaced with the desired resource.
|
|
|
|
existing_dummy_resource = resource_mappings[res_id]
|
|
|
|
for child_name in existing_dummy_resource.listNames():
|
|
|
|
child_res_id = self._resource_id(existing_dummy_resource,
|
|
|
|
child_name)
|
|
|
|
child_resource = resource_mappings[child_res_id]
|
|
|
|
# steal the children
|
2015-02-19 12:53:13 +01:00
|
|
|
res.putChild(child_name, child_resource)
|
2014-08-14 12:17:58 +02:00
|
|
|
|
|
|
|
# finally, insert the desired resource in the right place
|
2015-02-19 12:53:13 +01:00
|
|
|
last_resource.putChild(last_path_seg, res)
|
2014-08-14 10:52:20 +02:00
|
|
|
res_id = self._resource_id(last_resource, last_path_seg)
|
2015-02-19 12:53:13 +01:00
|
|
|
resource_mappings[res_id] = res
|
2014-08-14 10:52:20 +02:00
|
|
|
|
|
|
|
return self.root_resource
|
|
|
|
|
|
|
|
def _resource_id(self, resource, path_seg):
|
|
|
|
"""Construct an arbitrary resource ID so you can retrieve the mapping
|
|
|
|
later.
|
|
|
|
|
|
|
|
If you want to represent resource A putChild resource B with path C,
|
|
|
|
the mapping should looks like _resource_id(A,C) = B.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
resource (Resource): The *parent* Resource
|
|
|
|
path_seg (str): The name of the child Resource to be attached.
|
|
|
|
Returns:
|
|
|
|
str: A unique string which can be a key to the child Resource.
|
|
|
|
"""
|
|
|
|
return "%s-%s" % (resource, path_seg)
|
|
|
|
|
2015-03-12 16:51:33 +01:00
|
|
|
def start_listening(self):
|
|
|
|
config = self.get_config()
|
|
|
|
|
|
|
|
if not config.no_tls and config.bind_port is not None:
|
2014-09-01 23:38:52 +02:00
|
|
|
reactor.listenSSL(
|
2015-03-14 01:12:20 +01:00
|
|
|
config.bind_port,
|
|
|
|
Site(self.root_resource),
|
|
|
|
self.tls_context_factory,
|
|
|
|
interface=config.bind_host
|
2014-09-01 23:38:52 +02:00
|
|
|
)
|
2015-03-12 16:51:33 +01:00
|
|
|
logger.info("Synapse now listening on port %d", config.bind_port)
|
|
|
|
|
|
|
|
if config.unsecure_port is not None:
|
2014-09-01 23:38:52 +02:00
|
|
|
reactor.listenTCP(
|
2015-03-14 01:12:20 +01:00
|
|
|
config.unsecure_port,
|
|
|
|
Site(self.root_resource),
|
|
|
|
interface=config.bind_host
|
2014-09-01 23:38:52 +02:00
|
|
|
)
|
2015-03-12 16:51:33 +01:00
|
|
|
logger.info("Synapse now listening on port %d", config.unsecure_port)
|
2014-08-14 10:52:20 +02:00
|
|
|
|
2015-03-12 17:05:46 +01:00
|
|
|
metrics_resource = self.get_resource_for_metrics()
|
|
|
|
if metrics_resource and config.metrics_port is not None:
|
|
|
|
reactor.listenTCP(
|
|
|
|
config.metrics_port, Site(metrics_resource), interface="127.0.0.1",
|
|
|
|
)
|
|
|
|
logger.info("Metrics now running on 127.0.0.1 port %d", config.metrics_port)
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2015-02-18 17:21:35 +01:00
|
|
|
def get_version_string():
|
|
|
|
try:
|
2015-02-21 14:44:46 +01:00
|
|
|
null = open(os.devnull, 'w')
|
|
|
|
cwd = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
try:
|
|
|
|
git_branch = subprocess.check_output(
|
|
|
|
['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip()
|
|
|
|
git_branch = "b=" + git_branch
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_branch = ""
|
|
|
|
|
|
|
|
try:
|
|
|
|
git_tag = subprocess.check_output(
|
|
|
|
['git', 'describe', '--exact-match'],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip()
|
|
|
|
git_tag = "t=" + git_tag
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_tag = ""
|
|
|
|
|
|
|
|
try:
|
|
|
|
git_commit = subprocess.check_output(
|
|
|
|
['git', 'rev-parse', '--short', 'HEAD'],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip()
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_commit = ""
|
|
|
|
|
|
|
|
try:
|
|
|
|
dirty_string = "-this_is_a_dirty_checkout"
|
|
|
|
is_dirty = subprocess.check_output(
|
|
|
|
['git', 'describe', '--dirty=' + dirty_string],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip().endswith(dirty_string)
|
|
|
|
|
|
|
|
git_dirty = "dirty" if is_dirty else ""
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_dirty = ""
|
|
|
|
|
|
|
|
if git_branch or git_tag or git_commit or git_dirty:
|
|
|
|
git_version = ",".join(
|
|
|
|
s for s in
|
|
|
|
(git_branch, git_tag, git_commit, git_dirty,)
|
|
|
|
if s
|
2015-02-18 17:21:35 +01:00
|
|
|
)
|
2015-02-21 14:44:46 +01:00
|
|
|
|
|
|
|
return (
|
|
|
|
"Synapse/%s (%s)" % (
|
|
|
|
synapse.__version__, git_version,
|
|
|
|
)
|
|
|
|
).encode("ascii")
|
|
|
|
except Exception as e:
|
|
|
|
logger.warn("Failed to check for git repository: %s", e)
|
2015-02-18 17:21:35 +01:00
|
|
|
|
|
|
|
return ("Synapse/%s" % (synapse.__version__,)).encode("ascii")
|
|
|
|
|
|
|
|
|
2015-02-19 12:50:49 +01:00
|
|
|
def change_resource_limit(soft_file_no):
|
|
|
|
try:
|
|
|
|
soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
|
2015-02-20 17:09:44 +01:00
|
|
|
|
|
|
|
if not soft_file_no:
|
|
|
|
soft_file_no = hard
|
|
|
|
|
2015-02-19 12:50:49 +01:00
|
|
|
resource.setrlimit(resource.RLIMIT_NOFILE, (soft_file_no, hard))
|
2015-02-20 17:09:44 +01:00
|
|
|
|
2015-02-19 12:50:49 +01:00
|
|
|
logger.info("Set file limit to: %d", soft_file_no)
|
|
|
|
except (ValueError, resource.error) as e:
|
|
|
|
logger.warn("Failed to set file limit: %s", e)
|
|
|
|
|
|
|
|
|
2015-03-10 10:58:33 +01:00
|
|
|
def setup(config_options):
|
2015-03-10 10:39:42 +01:00
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
config_options_options: The options passed to Synapse. Usually
|
|
|
|
`sys.argv[1:]`.
|
|
|
|
should_run (bool): Whether to start the reactor.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
HomeServer
|
|
|
|
"""
|
2014-11-18 16:57:00 +01:00
|
|
|
config = HomeServerConfig.load_config(
|
|
|
|
"Synapse Homeserver",
|
2015-01-07 14:46:37 +01:00
|
|
|
config_options,
|
2014-11-18 16:57:00 +01:00
|
|
|
generate_section="Homeserver"
|
|
|
|
)
|
|
|
|
|
|
|
|
config.setup_logging()
|
|
|
|
|
2015-03-17 12:45:37 +01:00
|
|
|
# check any extra requirements we have now we have a config
|
|
|
|
check_requirements(config)
|
2015-01-08 18:07:28 +01:00
|
|
|
|
2015-02-18 17:21:35 +01:00
|
|
|
version_string = get_version_string()
|
|
|
|
|
2014-08-31 17:06:39 +02:00
|
|
|
logger.info("Server hostname: %s", config.server_name)
|
2015-02-18 17:21:35 +01:00
|
|
|
logger.info("Server version: %s", version_string)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2014-08-31 17:06:39 +02:00
|
|
|
if re.search(":[0-9]+$", config.server_name):
|
|
|
|
domain_with_port = config.server_name
|
2014-08-24 12:56:55 +02:00
|
|
|
else:
|
2014-09-01 16:51:15 +02:00
|
|
|
domain_with_port = "%s:%s" % (config.server_name, config.bind_port)
|
2014-08-24 12:56:55 +02:00
|
|
|
|
2014-09-01 17:30:43 +02:00
|
|
|
tls_context_factory = context_factory.ServerContextFactory(config)
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
hs = SynapseHomeServer(
|
2014-08-31 17:06:39 +02:00
|
|
|
config.server_name,
|
2014-08-24 12:56:55 +02:00
|
|
|
domain_with_port=domain_with_port,
|
2014-08-22 11:25:27 +02:00
|
|
|
upload_dir=os.path.abspath("uploads"),
|
2014-08-31 17:06:39 +02:00
|
|
|
db_name=config.database_path,
|
2014-09-01 17:30:43 +02:00
|
|
|
tls_context_factory=tls_context_factory,
|
2014-09-02 18:57:04 +02:00
|
|
|
config=config,
|
2014-09-03 12:57:23 +02:00
|
|
|
content_addr=config.content_addr,
|
2015-02-18 17:21:35 +01:00
|
|
|
version_string=version_string,
|
2014-08-12 16:10:52 +02:00
|
|
|
)
|
|
|
|
|
2014-11-18 16:57:00 +01:00
|
|
|
hs.create_resource_tree(
|
|
|
|
redirect_root_to_web_client=True,
|
|
|
|
)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2014-09-10 17:23:58 +02:00
|
|
|
db_name = hs.get_db_name()
|
|
|
|
|
2014-11-20 18:10:37 +01:00
|
|
|
logger.info("Preparing database: %s...", db_name)
|
2014-09-10 17:23:58 +02:00
|
|
|
|
2014-12-16 15:20:32 +01:00
|
|
|
try:
|
|
|
|
with sqlite3.connect(db_name) as db_conn:
|
2015-03-04 13:04:19 +01:00
|
|
|
prepare_sqlite3_database(db_conn)
|
2014-12-16 15:20:32 +01:00
|
|
|
prepare_database(db_conn)
|
|
|
|
except UpgradeDatabaseException:
|
|
|
|
sys.stderr.write(
|
|
|
|
"\nFailed to upgrade database.\n"
|
2015-01-19 16:30:48 +01:00
|
|
|
"Have you checked for version specific instructions in"
|
|
|
|
" UPGRADES.rst?\n"
|
2014-12-16 15:20:32 +01:00
|
|
|
)
|
|
|
|
sys.exit(1)
|
2014-09-10 17:23:58 +02:00
|
|
|
|
2014-11-20 18:10:37 +01:00
|
|
|
logger.info("Database prepared in %s.", db_name)
|
2014-09-10 16:42:15 +02:00
|
|
|
|
2014-08-31 17:06:39 +02:00
|
|
|
if config.manhole:
|
2014-08-26 14:43:55 +02:00
|
|
|
f = twisted.manhole.telnet.ShellFactory()
|
|
|
|
f.username = "matrix"
|
|
|
|
f.password = "rabbithole"
|
|
|
|
f.namespace['hs'] = hs
|
2014-08-31 17:06:39 +02:00
|
|
|
reactor.listenTCP(config.manhole, f, interface='127.0.0.1')
|
2014-08-26 14:43:55 +02:00
|
|
|
|
2015-03-12 16:51:33 +01:00
|
|
|
hs.start_listening()
|
2014-09-10 17:16:24 +02:00
|
|
|
|
2014-11-19 19:20:59 +01:00
|
|
|
hs.get_pusherpool().start()
|
2015-02-06 17:52:22 +01:00
|
|
|
hs.get_state_handler().start_caching()
|
2015-02-09 15:22:52 +01:00
|
|
|
hs.get_datastore().start_profiling()
|
2015-02-18 11:14:10 +01:00
|
|
|
hs.get_replication_layer().start_get_pdu_cache()
|
2015-02-06 17:52:22 +01:00
|
|
|
|
2015-03-10 10:39:42 +01:00
|
|
|
return hs
|
|
|
|
|
2014-11-20 18:26:36 +01:00
|
|
|
|
2015-01-07 14:46:37 +01:00
|
|
|
class SynapseService(service.Service):
|
2015-03-10 10:39:42 +01:00
|
|
|
"""A twisted Service class that will start synapse. Used to run synapse
|
|
|
|
via twistd and a .tac.
|
|
|
|
"""
|
2015-01-07 14:46:37 +01:00
|
|
|
def __init__(self, config):
|
|
|
|
self.config = config
|
|
|
|
|
|
|
|
def startService(self):
|
2015-03-10 10:58:33 +01:00
|
|
|
hs = setup(self.config)
|
2015-03-10 10:39:42 +01:00
|
|
|
change_resource_limit(hs.config.soft_file_limit)
|
2015-01-07 14:46:37 +01:00
|
|
|
|
|
|
|
def stopService(self):
|
|
|
|
return self._port.stopListening()
|
|
|
|
|
|
|
|
|
2015-03-10 10:58:33 +01:00
|
|
|
def run(hs):
|
|
|
|
|
|
|
|
def in_thread():
|
|
|
|
with LoggingContext("run"):
|
|
|
|
change_resource_limit(hs.config.soft_file_limit)
|
2015-02-19 12:50:49 +01:00
|
|
|
|
2015-03-10 10:58:33 +01:00
|
|
|
reactor.run()
|
|
|
|
|
|
|
|
if hs.config.daemonize:
|
|
|
|
|
|
|
|
print hs.config.pid_file
|
|
|
|
|
|
|
|
daemon = Daemonize(
|
|
|
|
app="synapse-homeserver",
|
|
|
|
pid=hs.config.pid_file,
|
|
|
|
action=lambda: in_thread(),
|
|
|
|
auto_close_fds=False,
|
|
|
|
verbose=True,
|
|
|
|
logger=logger,
|
|
|
|
)
|
|
|
|
|
|
|
|
daemon.start()
|
|
|
|
else:
|
2015-03-10 11:19:03 +01:00
|
|
|
in_thread()
|
2014-10-30 02:21:33 +01:00
|
|
|
|
2014-11-20 18:26:36 +01:00
|
|
|
|
2014-11-18 16:57:00 +01:00
|
|
|
def main():
|
2014-10-30 12:15:39 +01:00
|
|
|
with LoggingContext("main"):
|
2015-03-17 12:45:37 +01:00
|
|
|
# check base requirements
|
2015-01-08 18:07:28 +01:00
|
|
|
check_requirements()
|
2015-03-10 10:58:33 +01:00
|
|
|
hs = setup(sys.argv[1:])
|
|
|
|
run(hs)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2014-11-20 18:26:36 +01:00
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
if __name__ == '__main__':
|
2014-11-18 16:57:00 +01:00
|
|
|
main()
|