2014-08-13 04:14:34 +02:00
|
|
|
#!/usr/bin/env python
|
2014-08-12 16:10:52 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2015-01-06 14:21:39 +01:00
|
|
|
# Copyright 2014, 2015 OpenMarket Ltd
|
2014-08-12 16:10:52 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2015-02-17 11:54:06 +01:00
|
|
|
import sys
|
|
|
|
sys.dont_write_bytecode = True
|
2015-09-01 17:47:26 +02:00
|
|
|
from synapse.python_dependencies import (
|
|
|
|
check_requirements, DEPENDENCY_LINKS, MissingRequirementError
|
|
|
|
)
|
2015-04-29 15:52:42 +02:00
|
|
|
|
2015-04-29 15:53:23 +02:00
|
|
|
if __name__ == '__main__':
|
2015-09-01 17:47:26 +02:00
|
|
|
try:
|
|
|
|
check_requirements()
|
|
|
|
except MissingRequirementError as e:
|
|
|
|
message = "\n".join([
|
|
|
|
"Missing Requirement: %s" % (e.message,),
|
|
|
|
"To install run:",
|
|
|
|
" pip install --upgrade --force \"%s\"" % (e.dependency,),
|
|
|
|
"",
|
|
|
|
])
|
|
|
|
sys.stderr.writelines(message)
|
|
|
|
sys.exit(1)
|
|
|
|
|
2015-04-29 12:42:28 +02:00
|
|
|
from synapse.storage.engines import create_engine, IncorrectDatabaseSetup
|
2015-10-13 12:41:04 +02:00
|
|
|
from synapse.storage import are_all_users_on_domain
|
2015-10-13 14:56:22 +02:00
|
|
|
from synapse.storage.prepare_database import UpgradeDatabaseException
|
2014-08-12 16:10:52 +02:00
|
|
|
|
|
|
|
from synapse.server import HomeServer
|
|
|
|
|
2015-01-08 18:07:28 +01:00
|
|
|
|
2015-09-22 13:57:40 +02:00
|
|
|
from twisted.internet import reactor, task, defer
|
2015-01-07 14:46:37 +01:00
|
|
|
from twisted.application import service
|
2014-08-12 16:10:52 +02:00
|
|
|
from twisted.enterprise import adbapi
|
2015-05-14 17:39:19 +02:00
|
|
|
from twisted.web.resource import Resource, EncodingResourceWrapper
|
2014-08-14 10:52:20 +02:00
|
|
|
from twisted.web.static import File
|
2015-06-12 18:13:23 +02:00
|
|
|
from twisted.web.server import Site, GzipEncoderFactory, Request
|
2014-09-03 12:10:29 +02:00
|
|
|
from synapse.http.server import JsonResource, RootRedirect
|
2015-01-22 17:10:07 +01:00
|
|
|
from synapse.rest.media.v0.content_repository import ContentRepoResource
|
|
|
|
from synapse.rest.media.v1.media_repository import MediaRepositoryResource
|
2015-04-14 14:28:11 +02:00
|
|
|
from synapse.rest.key.v1.server_key_resource import LocalKey
|
2015-04-14 17:04:52 +02:00
|
|
|
from synapse.rest.key.v2 import KeyApiV2Resource
|
2014-11-20 18:41:56 +01:00
|
|
|
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
|
2014-08-18 16:01:08 +02:00
|
|
|
from synapse.api.urls import (
|
2014-09-23 19:40:59 +02:00
|
|
|
CLIENT_PREFIX, FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX,
|
2015-04-14 17:04:52 +02:00
|
|
|
SERVER_KEY_PREFIX, MEDIA_PREFIX, CLIENT_V2_ALPHA_PREFIX, STATIC_PREFIX,
|
|
|
|
SERVER_KEY_V2_PREFIX,
|
2014-08-18 16:01:08 +02:00
|
|
|
)
|
2014-08-31 17:06:39 +02:00
|
|
|
from synapse.config.homeserver import HomeServerConfig
|
2014-09-01 17:30:43 +02:00
|
|
|
from synapse.crypto import context_factory
|
2014-10-30 02:21:33 +01:00
|
|
|
from synapse.util.logcontext import LoggingContext
|
2015-01-23 11:37:38 +01:00
|
|
|
from synapse.rest.client.v1 import ClientV1RestResource
|
2015-01-23 19:54:51 +01:00
|
|
|
from synapse.rest.client.v2_alpha import ClientV2AlphaRestResource
|
2015-03-12 16:33:53 +01:00
|
|
|
from synapse.metrics.resource import MetricsResource, METRICS_PREFIX
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2015-05-29 13:17:33 +02:00
|
|
|
from synapse import events
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
from daemonize import Daemonize
|
2014-08-26 14:43:55 +02:00
|
|
|
import twisted.manhole.telnet
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2015-01-07 18:25:28 +01:00
|
|
|
import synapse
|
|
|
|
|
2015-06-15 19:18:05 +02:00
|
|
|
import contextlib
|
2014-08-12 16:10:52 +02:00
|
|
|
import logging
|
2014-08-14 15:07:14 +02:00
|
|
|
import os
|
2015-06-15 17:36:49 +02:00
|
|
|
import re
|
2015-02-19 12:50:49 +01:00
|
|
|
import resource
|
2015-02-18 17:21:35 +01:00
|
|
|
import subprocess
|
2015-06-15 19:18:05 +02:00
|
|
|
import time
|
2014-08-12 16:10:52 +02:00
|
|
|
|
|
|
|
|
2015-04-07 13:04:02 +02:00
|
|
|
logger = logging.getLogger("synapse.app.homeserver")
|
2014-08-12 16:10:52 +02:00
|
|
|
|
|
|
|
|
2015-05-14 17:39:19 +02:00
|
|
|
def gz_wrap(r):
|
|
|
|
return EncodingResourceWrapper(r, [GzipEncoderFactory()])
|
|
|
|
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
class SynapseHomeServer(HomeServer):
|
|
|
|
|
|
|
|
def build_http_client(self):
|
2014-11-20 14:53:34 +01:00
|
|
|
return MatrixFederationHttpClient(self)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2014-08-14 10:52:20 +02:00
|
|
|
def build_resource_for_client(self):
|
2015-06-12 16:33:07 +02:00
|
|
|
return ClientV1RestResource(self)
|
2014-08-14 10:52:20 +02:00
|
|
|
|
2015-01-23 19:54:51 +01:00
|
|
|
def build_resource_for_client_v2_alpha(self):
|
2015-06-12 16:33:07 +02:00
|
|
|
return ClientV2AlphaRestResource(self)
|
2014-08-14 10:52:20 +02:00
|
|
|
|
|
|
|
def build_resource_for_federation(self):
|
2015-02-09 14:46:22 +01:00
|
|
|
return JsonResource(self)
|
2014-08-14 10:52:20 +02:00
|
|
|
|
|
|
|
def build_resource_for_web_client(self):
|
2015-08-25 13:01:23 +02:00
|
|
|
webclient_path = self.get_config().web_client_location
|
|
|
|
if not webclient_path:
|
2015-08-25 14:34:50 +02:00
|
|
|
try:
|
|
|
|
import syweb
|
|
|
|
except ImportError:
|
|
|
|
quit_with_error(
|
2015-08-25 15:19:09 +02:00
|
|
|
"Could not find a webclient.\n\n"
|
|
|
|
"Please either install the matrix-angular-sdk or configure\n"
|
|
|
|
"the location of the source to serve via the configuration\n"
|
|
|
|
"option `web_client_location`\n\n"
|
|
|
|
"To install the `matrix-angular-sdk` via pip, run:\n\n"
|
2015-08-25 16:33:23 +02:00
|
|
|
" pip install '%(dep)s'\n"
|
2015-08-25 15:19:09 +02:00
|
|
|
"\n"
|
|
|
|
"You can also disable hosting of the webclient via the\n"
|
|
|
|
"configuration option `web_client`\n"
|
2015-08-25 16:33:23 +02:00
|
|
|
% {"dep": DEPENDENCY_LINKS["matrix-angular-sdk"]}
|
2015-08-25 14:34:50 +02:00
|
|
|
)
|
2015-08-25 13:01:23 +02:00
|
|
|
syweb_path = os.path.dirname(syweb.__file__)
|
|
|
|
webclient_path = os.path.join(syweb_path, "webclient")
|
2015-05-19 14:19:47 +02:00
|
|
|
# GZip is disabled here due to
|
|
|
|
# https://twistedmatrix.com/trac/ticket/7678
|
|
|
|
# (It can stay enabled for the API resources: they call
|
|
|
|
# write() with the whole body and then finish() straight
|
|
|
|
# after and so do not trigger the bug.
|
2015-09-22 19:27:22 +02:00
|
|
|
# GzipFile was removed in commit 184ba09
|
2015-05-19 14:19:47 +02:00
|
|
|
# return GzipFile(webclient_path) # TODO configurable?
|
|
|
|
return File(webclient_path) # TODO configurable?
|
2014-08-14 10:52:20 +02:00
|
|
|
|
2015-02-23 16:35:09 +01:00
|
|
|
def build_resource_for_static_content(self):
|
2015-05-14 17:39:19 +02:00
|
|
|
# This is old and should go away: not going to bother adding gzip
|
2015-02-23 16:35:09 +01:00
|
|
|
return File("static")
|
|
|
|
|
2014-08-18 16:01:08 +02:00
|
|
|
def build_resource_for_content_repo(self):
|
2014-09-03 12:57:23 +02:00
|
|
|
return ContentRepoResource(
|
2015-06-19 06:38:20 +02:00
|
|
|
self, self.config.uploads_path, self.auth, self.content_addr
|
2014-09-03 12:57:23 +02:00
|
|
|
)
|
2014-08-18 16:01:08 +02:00
|
|
|
|
2014-12-02 20:51:47 +01:00
|
|
|
def build_resource_for_media_repository(self):
|
|
|
|
return MediaRepositoryResource(self)
|
|
|
|
|
2014-09-23 19:40:59 +02:00
|
|
|
def build_resource_for_server_key(self):
|
|
|
|
return LocalKey(self)
|
|
|
|
|
2015-04-14 17:04:52 +02:00
|
|
|
def build_resource_for_server_key_v2(self):
|
|
|
|
return KeyApiV2Resource(self)
|
|
|
|
|
2015-03-12 16:33:53 +01:00
|
|
|
def build_resource_for_metrics(self):
|
|
|
|
if self.get_config().enable_metrics:
|
|
|
|
return MetricsResource(self)
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
def build_db_pool(self):
|
2015-03-25 18:15:20 +01:00
|
|
|
name = self.db_config["name"]
|
2015-03-20 11:55:55 +01:00
|
|
|
|
2015-03-25 18:15:20 +01:00
|
|
|
return adbapi.ConnectionPool(
|
|
|
|
name,
|
|
|
|
**self.db_config.get("args", {})
|
|
|
|
)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2015-06-12 16:33:07 +02:00
|
|
|
def _listener_http(self, config, listener_config):
|
|
|
|
port = listener_config["port"]
|
|
|
|
bind_address = listener_config.get("bind_address", "")
|
|
|
|
tls = listener_config.get("tls", False)
|
2015-06-15 18:11:44 +02:00
|
|
|
site_tag = listener_config.get("tag", port)
|
2015-06-12 16:33:07 +02:00
|
|
|
|
|
|
|
if tls and config.no_tls:
|
|
|
|
return
|
2015-03-12 16:51:33 +01:00
|
|
|
|
2015-06-12 16:33:07 +02:00
|
|
|
metrics_resource = self.get_resource_for_metrics()
|
|
|
|
|
|
|
|
resources = {}
|
|
|
|
for res in listener_config["resources"]:
|
|
|
|
for name in res["names"]:
|
|
|
|
if name == "client":
|
|
|
|
if res["compress"]:
|
|
|
|
client_v1 = gz_wrap(self.get_resource_for_client())
|
|
|
|
client_v2 = gz_wrap(self.get_resource_for_client_v2_alpha())
|
|
|
|
else:
|
|
|
|
client_v1 = self.get_resource_for_client()
|
|
|
|
client_v2 = self.get_resource_for_client_v2_alpha()
|
|
|
|
|
|
|
|
resources.update({
|
|
|
|
CLIENT_PREFIX: client_v1,
|
|
|
|
CLIENT_V2_ALPHA_PREFIX: client_v2,
|
|
|
|
})
|
|
|
|
|
|
|
|
if name == "federation":
|
|
|
|
resources.update({
|
|
|
|
FEDERATION_PREFIX: self.get_resource_for_federation(),
|
|
|
|
})
|
|
|
|
|
|
|
|
if name in ["static", "client"]:
|
|
|
|
resources.update({
|
|
|
|
STATIC_PREFIX: self.get_resource_for_static_content(),
|
|
|
|
})
|
|
|
|
|
|
|
|
if name in ["media", "federation", "client"]:
|
|
|
|
resources.update({
|
|
|
|
MEDIA_PREFIX: self.get_resource_for_media_repository(),
|
|
|
|
CONTENT_REPO_PREFIX: self.get_resource_for_content_repo(),
|
|
|
|
})
|
|
|
|
|
|
|
|
if name in ["keys", "federation"]:
|
|
|
|
resources.update({
|
|
|
|
SERVER_KEY_PREFIX: self.get_resource_for_server_key(),
|
|
|
|
SERVER_KEY_V2_PREFIX: self.get_resource_for_server_key_v2(),
|
|
|
|
})
|
|
|
|
|
|
|
|
if name == "webclient":
|
|
|
|
resources[WEB_CLIENT_PREFIX] = self.get_resource_for_web_client()
|
|
|
|
|
|
|
|
if name == "metrics" and metrics_resource:
|
|
|
|
resources[METRICS_PREFIX] = metrics_resource
|
|
|
|
|
|
|
|
root_resource = create_resource_tree(resources)
|
|
|
|
if tls:
|
2014-09-01 23:38:52 +02:00
|
|
|
reactor.listenSSL(
|
2015-06-12 16:33:07 +02:00
|
|
|
port,
|
2015-04-30 17:17:27 +02:00
|
|
|
SynapseSite(
|
2015-06-19 11:16:48 +02:00
|
|
|
"synapse.access.https.%s" % (site_tag,),
|
2015-06-15 18:11:44 +02:00
|
|
|
site_tag,
|
2015-06-12 18:13:23 +02:00
|
|
|
listener_config,
|
2015-06-12 16:33:07 +02:00
|
|
|
root_resource,
|
2015-04-30 14:58:13 +02:00
|
|
|
),
|
2015-09-09 13:02:07 +02:00
|
|
|
self.tls_server_context_factory,
|
2015-06-12 16:33:07 +02:00
|
|
|
interface=bind_address
|
2014-09-01 23:38:52 +02:00
|
|
|
)
|
2015-06-12 16:33:07 +02:00
|
|
|
else:
|
2014-09-01 23:38:52 +02:00
|
|
|
reactor.listenTCP(
|
2015-06-12 16:33:07 +02:00
|
|
|
port,
|
2015-04-30 17:17:27 +02:00
|
|
|
SynapseSite(
|
2015-06-19 11:16:48 +02:00
|
|
|
"synapse.access.http.%s" % (site_tag,),
|
2015-06-15 18:11:44 +02:00
|
|
|
site_tag,
|
2015-06-12 18:13:23 +02:00
|
|
|
listener_config,
|
2015-06-12 16:33:07 +02:00
|
|
|
root_resource,
|
2015-04-30 14:58:13 +02:00
|
|
|
),
|
2015-06-12 16:33:07 +02:00
|
|
|
interface=bind_address
|
2014-09-01 23:38:52 +02:00
|
|
|
)
|
2015-06-12 16:33:07 +02:00
|
|
|
logger.info("Synapse now listening on port %d", port)
|
2014-08-14 10:52:20 +02:00
|
|
|
|
2015-06-12 16:33:07 +02:00
|
|
|
def start_listening(self):
|
|
|
|
config = self.get_config()
|
|
|
|
|
|
|
|
for listener in config.listeners:
|
|
|
|
if listener["type"] == "http":
|
|
|
|
self._listener_http(config, listener)
|
|
|
|
elif listener["type"] == "manhole":
|
|
|
|
f = twisted.manhole.telnet.ShellFactory()
|
|
|
|
f.username = "matrix"
|
|
|
|
f.password = "rabbithole"
|
|
|
|
f.namespace['hs'] = self
|
|
|
|
reactor.listenTCP(
|
|
|
|
listener["port"],
|
|
|
|
f,
|
|
|
|
interface=listener.get("bind_address", '127.0.0.1')
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
logger.warn("Unrecognized listener type: %s", listener["type"])
|
2015-03-12 17:05:46 +01:00
|
|
|
|
2015-04-28 14:39:42 +02:00
|
|
|
def run_startup_checks(self, db_conn, database_engine):
|
2015-04-27 12:46:00 +02:00
|
|
|
all_users_native = are_all_users_on_domain(
|
2015-04-28 14:39:42 +02:00
|
|
|
db_conn.cursor(), database_engine, self.hostname
|
2015-04-24 19:11:21 +02:00
|
|
|
)
|
|
|
|
if not all_users_native:
|
2015-04-29 13:12:18 +02:00
|
|
|
quit_with_error(
|
2015-04-24 19:11:21 +02:00
|
|
|
"Found users in database not native to %s!\n"
|
2015-04-29 13:12:18 +02:00
|
|
|
"You cannot changed a synapse server_name after it's been configured"
|
|
|
|
% (self.hostname,)
|
2015-04-24 19:11:21 +02:00
|
|
|
)
|
|
|
|
|
2015-04-29 12:42:28 +02:00
|
|
|
try:
|
|
|
|
database_engine.check_database(db_conn.cursor())
|
|
|
|
except IncorrectDatabaseSetup as e:
|
2015-04-29 13:12:18 +02:00
|
|
|
quit_with_error(e.message)
|
|
|
|
|
|
|
|
|
|
|
|
def quit_with_error(error_string):
|
|
|
|
message_lines = error_string.split("\n")
|
2015-08-25 16:33:23 +02:00
|
|
|
line_length = max([len(l) for l in message_lines if len(l) < 80]) + 2
|
2015-04-29 13:12:18 +02:00
|
|
|
sys.stderr.write("*" * line_length + '\n')
|
|
|
|
for line in message_lines:
|
2015-08-25 15:19:09 +02:00
|
|
|
sys.stderr.write(" %s\n" % (line.rstrip(),))
|
2015-04-29 13:12:18 +02:00
|
|
|
sys.stderr.write("*" * line_length + '\n')
|
|
|
|
sys.exit(1)
|
2015-04-29 12:42:28 +02:00
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2015-02-18 17:21:35 +01:00
|
|
|
def get_version_string():
|
|
|
|
try:
|
2015-02-21 14:44:46 +01:00
|
|
|
null = open(os.devnull, 'w')
|
|
|
|
cwd = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
try:
|
|
|
|
git_branch = subprocess.check_output(
|
|
|
|
['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip()
|
|
|
|
git_branch = "b=" + git_branch
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_branch = ""
|
|
|
|
|
|
|
|
try:
|
|
|
|
git_tag = subprocess.check_output(
|
|
|
|
['git', 'describe', '--exact-match'],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip()
|
|
|
|
git_tag = "t=" + git_tag
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_tag = ""
|
|
|
|
|
|
|
|
try:
|
|
|
|
git_commit = subprocess.check_output(
|
|
|
|
['git', 'rev-parse', '--short', 'HEAD'],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip()
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_commit = ""
|
|
|
|
|
|
|
|
try:
|
|
|
|
dirty_string = "-this_is_a_dirty_checkout"
|
|
|
|
is_dirty = subprocess.check_output(
|
|
|
|
['git', 'describe', '--dirty=' + dirty_string],
|
|
|
|
stderr=null,
|
|
|
|
cwd=cwd,
|
|
|
|
).strip().endswith(dirty_string)
|
|
|
|
|
|
|
|
git_dirty = "dirty" if is_dirty else ""
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
git_dirty = ""
|
|
|
|
|
|
|
|
if git_branch or git_tag or git_commit or git_dirty:
|
|
|
|
git_version = ",".join(
|
|
|
|
s for s in
|
|
|
|
(git_branch, git_tag, git_commit, git_dirty,)
|
|
|
|
if s
|
2015-02-18 17:21:35 +01:00
|
|
|
)
|
2015-02-21 14:44:46 +01:00
|
|
|
|
|
|
|
return (
|
|
|
|
"Synapse/%s (%s)" % (
|
|
|
|
synapse.__version__, git_version,
|
|
|
|
)
|
|
|
|
).encode("ascii")
|
|
|
|
except Exception as e:
|
2015-09-03 10:51:01 +02:00
|
|
|
logger.info("Failed to check for git repository: %s", e)
|
2015-02-18 17:21:35 +01:00
|
|
|
|
|
|
|
return ("Synapse/%s" % (synapse.__version__,)).encode("ascii")
|
|
|
|
|
|
|
|
|
2015-02-19 12:50:49 +01:00
|
|
|
def change_resource_limit(soft_file_no):
|
|
|
|
try:
|
|
|
|
soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
|
2015-02-20 17:09:44 +01:00
|
|
|
|
|
|
|
if not soft_file_no:
|
|
|
|
soft_file_no = hard
|
|
|
|
|
2015-02-19 12:50:49 +01:00
|
|
|
resource.setrlimit(resource.RLIMIT_NOFILE, (soft_file_no, hard))
|
2015-02-20 17:09:44 +01:00
|
|
|
|
2015-02-19 12:50:49 +01:00
|
|
|
logger.info("Set file limit to: %d", soft_file_no)
|
2015-04-07 13:16:05 +02:00
|
|
|
except (ValueError, resource.error) as e:
|
2015-02-19 12:50:49 +01:00
|
|
|
logger.warn("Failed to set file limit: %s", e)
|
|
|
|
|
|
|
|
|
2015-03-10 10:58:33 +01:00
|
|
|
def setup(config_options):
|
2015-03-10 10:39:42 +01:00
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
config_options_options: The options passed to Synapse. Usually
|
|
|
|
`sys.argv[1:]`.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
HomeServer
|
|
|
|
"""
|
2014-11-18 16:57:00 +01:00
|
|
|
config = HomeServerConfig.load_config(
|
|
|
|
"Synapse Homeserver",
|
2015-01-07 14:46:37 +01:00
|
|
|
config_options,
|
2014-11-18 16:57:00 +01:00
|
|
|
generate_section="Homeserver"
|
|
|
|
)
|
|
|
|
|
|
|
|
config.setup_logging()
|
|
|
|
|
2015-03-17 12:45:37 +01:00
|
|
|
# check any extra requirements we have now we have a config
|
|
|
|
check_requirements(config)
|
2015-01-08 18:07:28 +01:00
|
|
|
|
2015-02-18 17:21:35 +01:00
|
|
|
version_string = get_version_string()
|
|
|
|
|
2014-08-31 17:06:39 +02:00
|
|
|
logger.info("Server hostname: %s", config.server_name)
|
2015-02-18 17:21:35 +01:00
|
|
|
logger.info("Server version: %s", version_string)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2015-05-29 13:17:33 +02:00
|
|
|
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
|
|
|
|
2015-09-09 13:02:07 +02:00
|
|
|
tls_server_context_factory = context_factory.ServerContextFactory(config)
|
2014-09-01 17:30:43 +02:00
|
|
|
|
2015-04-27 16:57:43 +02:00
|
|
|
database_engine = create_engine(config.database_config["name"])
|
|
|
|
config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection
|
2015-04-01 15:12:33 +02:00
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
hs = SynapseHomeServer(
|
2014-08-31 17:06:39 +02:00
|
|
|
config.server_name,
|
2015-04-27 16:57:43 +02:00
|
|
|
db_config=config.database_config,
|
2015-09-09 13:02:07 +02:00
|
|
|
tls_server_context_factory=tls_server_context_factory,
|
2014-09-02 18:57:04 +02:00
|
|
|
config=config,
|
2014-09-03 12:57:23 +02:00
|
|
|
content_addr=config.content_addr,
|
2015-02-18 17:21:35 +01:00
|
|
|
version_string=version_string,
|
2015-04-01 15:12:33 +02:00
|
|
|
database_engine=database_engine,
|
2014-08-12 16:10:52 +02:00
|
|
|
)
|
|
|
|
|
2015-08-29 23:23:21 +02:00
|
|
|
logger.info("Preparing database: %s...", config.database_config['name'])
|
2014-09-10 17:23:58 +02:00
|
|
|
|
2014-12-16 15:20:32 +01:00
|
|
|
try:
|
2015-04-01 16:09:51 +02:00
|
|
|
db_conn = database_engine.module.connect(
|
|
|
|
**{
|
2015-04-27 16:57:43 +02:00
|
|
|
k: v for k, v in config.database_config.get("args", {}).items()
|
2015-04-01 16:09:51 +02:00
|
|
|
if not k.startswith("cp_")
|
|
|
|
}
|
|
|
|
)
|
2015-04-01 15:12:33 +02:00
|
|
|
|
2015-04-02 11:06:22 +02:00
|
|
|
database_engine.prepare_database(db_conn)
|
2015-04-28 14:39:42 +02:00
|
|
|
hs.run_startup_checks(db_conn, database_engine)
|
2015-04-01 15:12:33 +02:00
|
|
|
|
|
|
|
db_conn.commit()
|
2014-12-16 15:20:32 +01:00
|
|
|
except UpgradeDatabaseException:
|
|
|
|
sys.stderr.write(
|
|
|
|
"\nFailed to upgrade database.\n"
|
2015-01-19 16:30:48 +01:00
|
|
|
"Have you checked for version specific instructions in"
|
|
|
|
" UPGRADES.rst?\n"
|
2014-12-16 15:20:32 +01:00
|
|
|
)
|
|
|
|
sys.exit(1)
|
2014-09-10 17:23:58 +02:00
|
|
|
|
2015-08-29 23:23:21 +02:00
|
|
|
logger.info("Database prepared in %s.", config.database_config['name'])
|
2014-09-10 16:42:15 +02:00
|
|
|
|
2015-03-12 16:51:33 +01:00
|
|
|
hs.start_listening()
|
2014-09-10 17:16:24 +02:00
|
|
|
|
2014-11-19 19:20:59 +01:00
|
|
|
hs.get_pusherpool().start()
|
2015-02-06 17:52:22 +01:00
|
|
|
hs.get_state_handler().start_caching()
|
2015-02-09 15:22:52 +01:00
|
|
|
hs.get_datastore().start_profiling()
|
2015-02-18 11:14:10 +01:00
|
|
|
hs.get_replication_layer().start_get_pdu_cache()
|
2015-02-06 17:52:22 +01:00
|
|
|
|
2015-03-10 10:39:42 +01:00
|
|
|
return hs
|
|
|
|
|
2014-11-20 18:26:36 +01:00
|
|
|
|
2015-01-07 14:46:37 +01:00
|
|
|
class SynapseService(service.Service):
|
2015-03-10 10:39:42 +01:00
|
|
|
"""A twisted Service class that will start synapse. Used to run synapse
|
|
|
|
via twistd and a .tac.
|
|
|
|
"""
|
2015-01-07 14:46:37 +01:00
|
|
|
def __init__(self, config):
|
|
|
|
self.config = config
|
|
|
|
|
|
|
|
def startService(self):
|
2015-03-10 10:58:33 +01:00
|
|
|
hs = setup(self.config)
|
2015-03-10 10:39:42 +01:00
|
|
|
change_resource_limit(hs.config.soft_file_limit)
|
2015-01-07 14:46:37 +01:00
|
|
|
|
|
|
|
def stopService(self):
|
|
|
|
return self._port.stopListening()
|
|
|
|
|
|
|
|
|
2015-06-15 17:36:49 +02:00
|
|
|
class SynapseRequest(Request):
|
2015-06-15 19:18:05 +02:00
|
|
|
def __init__(self, site, *args, **kw):
|
2015-06-12 18:13:23 +02:00
|
|
|
Request.__init__(self, *args, **kw)
|
2015-06-15 19:18:05 +02:00
|
|
|
self.site = site
|
2015-06-15 17:36:49 +02:00
|
|
|
self.authenticated_entity = None
|
2015-06-15 19:18:05 +02:00
|
|
|
self.start_time = 0
|
2015-06-15 17:36:49 +02:00
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
# We overwrite this so that we don't log ``access_token``
|
|
|
|
return '<%s at 0x%x method=%s uri=%s clientproto=%s site=%s>' % (
|
|
|
|
self.__class__.__name__,
|
|
|
|
id(self),
|
|
|
|
self.method,
|
|
|
|
self.get_redacted_uri(),
|
|
|
|
self.clientproto,
|
2015-06-15 19:18:05 +02:00
|
|
|
self.site.site_tag,
|
2015-06-15 17:36:49 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def get_redacted_uri(self):
|
|
|
|
return re.sub(
|
|
|
|
r'(\?.*access_token=)[^&]*(.*)$',
|
|
|
|
r'\1<redacted>\2',
|
|
|
|
self.uri
|
|
|
|
)
|
|
|
|
|
2015-06-15 18:11:44 +02:00
|
|
|
def get_user_agent(self):
|
|
|
|
return self.requestHeaders.getRawHeaders("User-Agent", [None])[-1]
|
|
|
|
|
2015-06-15 19:18:05 +02:00
|
|
|
def started_processing(self):
|
|
|
|
self.site.access_logger.info(
|
|
|
|
"%s - %s - Received request: %s %s",
|
|
|
|
self.getClientIP(),
|
|
|
|
self.site.site_tag,
|
|
|
|
self.method,
|
|
|
|
self.get_redacted_uri()
|
|
|
|
)
|
|
|
|
self.start_time = int(time.time() * 1000)
|
|
|
|
|
|
|
|
def finished_processing(self):
|
|
|
|
self.site.access_logger.info(
|
|
|
|
"%s - %s - {%s}"
|
|
|
|
" Processed request: %dms %sB %s \"%s %s %s\" \"%s\"",
|
|
|
|
self.getClientIP(),
|
|
|
|
self.site.site_tag,
|
|
|
|
self.authenticated_entity,
|
|
|
|
int(time.time() * 1000) - self.start_time,
|
|
|
|
self.sentLength,
|
|
|
|
self.code,
|
|
|
|
self.method,
|
|
|
|
self.get_redacted_uri(),
|
|
|
|
self.clientproto,
|
|
|
|
self.get_user_agent(),
|
|
|
|
)
|
|
|
|
|
|
|
|
@contextlib.contextmanager
|
|
|
|
def processing(self):
|
|
|
|
self.started_processing()
|
|
|
|
yield
|
|
|
|
self.finished_processing()
|
|
|
|
|
2015-06-15 17:36:49 +02:00
|
|
|
|
|
|
|
class XForwardedForRequest(SynapseRequest):
|
|
|
|
def __init__(self, *args, **kw):
|
|
|
|
SynapseRequest.__init__(self, *args, **kw)
|
2015-06-12 18:13:23 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
Add a layer on top of another request that only uses the value of an
|
|
|
|
X-Forwarded-For header as the result of C{getClientIP}.
|
|
|
|
"""
|
|
|
|
def getClientIP(self):
|
|
|
|
"""
|
|
|
|
@return: The client address (the first address) in the value of the
|
|
|
|
I{X-Forwarded-For header}. If the header is not present, return
|
|
|
|
C{b"-"}.
|
|
|
|
"""
|
|
|
|
return self.requestHeaders.getRawHeaders(
|
|
|
|
b"x-forwarded-for", [b"-"])[0].split(b",")[0].strip()
|
|
|
|
|
|
|
|
|
2015-06-15 17:36:49 +02:00
|
|
|
class SynapseRequestFactory(object):
|
2015-06-15 19:18:05 +02:00
|
|
|
def __init__(self, site, x_forwarded_for):
|
|
|
|
self.site = site
|
2015-06-15 17:36:49 +02:00
|
|
|
self.x_forwarded_for = x_forwarded_for
|
|
|
|
|
|
|
|
def __call__(self, *args, **kwargs):
|
|
|
|
if self.x_forwarded_for:
|
2015-06-15 19:18:05 +02:00
|
|
|
return XForwardedForRequest(self.site, *args, **kwargs)
|
2015-06-15 17:36:49 +02:00
|
|
|
else:
|
2015-06-15 19:18:05 +02:00
|
|
|
return SynapseRequest(self.site, *args, **kwargs)
|
2015-06-12 18:13:23 +02:00
|
|
|
|
|
|
|
|
2015-04-30 17:17:27 +02:00
|
|
|
class SynapseSite(Site):
|
|
|
|
"""
|
|
|
|
Subclass of a twisted http Site that does access logging with python's
|
|
|
|
standard logging
|
|
|
|
"""
|
2015-06-15 19:18:05 +02:00
|
|
|
def __init__(self, logger_name, site_tag, config, resource, *args, **kwargs):
|
2015-04-30 17:17:27 +02:00
|
|
|
Site.__init__(self, resource, *args, **kwargs)
|
2015-06-15 17:36:49 +02:00
|
|
|
|
2015-06-15 19:18:05 +02:00
|
|
|
self.site_tag = site_tag
|
2015-06-15 17:36:49 +02:00
|
|
|
|
2015-06-15 19:18:05 +02:00
|
|
|
proxied = config.get("x_forwarded", False)
|
|
|
|
self.requestFactory = SynapseRequestFactory(self, proxied)
|
2015-04-30 17:17:27 +02:00
|
|
|
self.access_logger = logging.getLogger(logger_name)
|
|
|
|
|
|
|
|
def log(self, request):
|
2015-06-15 19:18:05 +02:00
|
|
|
pass
|
2015-04-30 17:17:27 +02:00
|
|
|
|
|
|
|
|
2015-06-12 12:52:52 +02:00
|
|
|
def create_resource_tree(desired_tree, redirect_root_to_web_client=True):
|
|
|
|
"""Create the resource tree for this Home Server.
|
|
|
|
|
|
|
|
This in unduly complicated because Twisted does not support putting
|
|
|
|
child resources more than 1 level deep at a time.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
web_client (bool): True to enable the web client.
|
|
|
|
redirect_root_to_web_client (bool): True to redirect '/' to the
|
|
|
|
location of the web client. This does nothing if web_client is not
|
|
|
|
True.
|
|
|
|
"""
|
|
|
|
if redirect_root_to_web_client and WEB_CLIENT_PREFIX in desired_tree:
|
|
|
|
root_resource = RootRedirect(WEB_CLIENT_PREFIX)
|
|
|
|
else:
|
|
|
|
root_resource = Resource()
|
|
|
|
|
|
|
|
# ideally we'd just use getChild and putChild but getChild doesn't work
|
|
|
|
# unless you give it a Request object IN ADDITION to the name :/ So
|
|
|
|
# instead, we'll store a copy of this mapping so we can actually add
|
|
|
|
# extra resources to existing nodes. See self._resource_id for the key.
|
|
|
|
resource_mappings = {}
|
|
|
|
for full_path, res in desired_tree.items():
|
|
|
|
logger.info("Attaching %s to path %s", res, full_path)
|
|
|
|
last_resource = root_resource
|
|
|
|
for path_seg in full_path.split('/')[1:-1]:
|
|
|
|
if path_seg not in last_resource.listNames():
|
|
|
|
# resource doesn't exist, so make a "dummy resource"
|
|
|
|
child_resource = Resource()
|
|
|
|
last_resource.putChild(path_seg, child_resource)
|
|
|
|
res_id = _resource_id(last_resource, path_seg)
|
|
|
|
resource_mappings[res_id] = child_resource
|
|
|
|
last_resource = child_resource
|
|
|
|
else:
|
|
|
|
# we have an existing Resource, use that instead.
|
|
|
|
res_id = _resource_id(last_resource, path_seg)
|
|
|
|
last_resource = resource_mappings[res_id]
|
|
|
|
|
|
|
|
# ===========================
|
|
|
|
# now attach the actual desired resource
|
|
|
|
last_path_seg = full_path.split('/')[-1]
|
|
|
|
|
|
|
|
# if there is already a resource here, thieve its children and
|
|
|
|
# replace it
|
|
|
|
res_id = _resource_id(last_resource, last_path_seg)
|
|
|
|
if res_id in resource_mappings:
|
|
|
|
# there is a dummy resource at this path already, which needs
|
|
|
|
# to be replaced with the desired resource.
|
|
|
|
existing_dummy_resource = resource_mappings[res_id]
|
|
|
|
for child_name in existing_dummy_resource.listNames():
|
2015-06-12 18:13:54 +02:00
|
|
|
child_res_id = _resource_id(
|
|
|
|
existing_dummy_resource, child_name
|
|
|
|
)
|
2015-06-12 12:52:52 +02:00
|
|
|
child_resource = resource_mappings[child_res_id]
|
|
|
|
# steal the children
|
|
|
|
res.putChild(child_name, child_resource)
|
|
|
|
|
|
|
|
# finally, insert the desired resource in the right place
|
|
|
|
last_resource.putChild(last_path_seg, res)
|
|
|
|
res_id = _resource_id(last_resource, last_path_seg)
|
|
|
|
resource_mappings[res_id] = res
|
|
|
|
|
|
|
|
return root_resource
|
|
|
|
|
|
|
|
|
|
|
|
def _resource_id(resource, path_seg):
|
|
|
|
"""Construct an arbitrary resource ID so you can retrieve the mapping
|
|
|
|
later.
|
|
|
|
|
|
|
|
If you want to represent resource A putChild resource B with path C,
|
|
|
|
the mapping should looks like _resource_id(A,C) = B.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
resource (Resource): The *parent* Resource
|
|
|
|
path_seg (str): The name of the child Resource to be attached.
|
|
|
|
Returns:
|
|
|
|
str: A unique string which can be a key to the child Resource.
|
|
|
|
"""
|
|
|
|
return "%s-%s" % (resource, path_seg)
|
|
|
|
|
|
|
|
|
2015-03-10 10:58:33 +01:00
|
|
|
def run(hs):
|
2015-05-06 18:08:00 +02:00
|
|
|
PROFILE_SYNAPSE = False
|
|
|
|
if PROFILE_SYNAPSE:
|
|
|
|
def profile(func):
|
|
|
|
from cProfile import Profile
|
|
|
|
from threading import current_thread
|
|
|
|
|
|
|
|
def profiled(*args, **kargs):
|
|
|
|
profile = Profile()
|
|
|
|
profile.enable()
|
|
|
|
func(*args, **kargs)
|
|
|
|
profile.disable()
|
|
|
|
ident = current_thread().ident
|
|
|
|
profile.dump_stats("/tmp/%s.%s.%i.pstat" % (
|
|
|
|
hs.hostname, func.__name__, ident
|
|
|
|
))
|
|
|
|
|
|
|
|
return profiled
|
|
|
|
|
|
|
|
from twisted.python.threadpool import ThreadPool
|
|
|
|
ThreadPool._worker = profile(ThreadPool._worker)
|
|
|
|
reactor.run = profile(reactor.run)
|
2015-03-10 10:58:33 +01:00
|
|
|
|
2015-09-22 13:57:40 +02:00
|
|
|
start_time = hs.get_clock().time()
|
|
|
|
|
|
|
|
@defer.inlineCallbacks
|
|
|
|
def phone_stats_home():
|
|
|
|
now = int(hs.get_clock().time())
|
|
|
|
uptime = int(now - start_time)
|
|
|
|
if uptime < 0:
|
|
|
|
uptime = 0
|
|
|
|
|
|
|
|
stats = {}
|
|
|
|
stats["homeserver"] = hs.config.server_name
|
|
|
|
stats["timestamp"] = now
|
|
|
|
stats["uptime_seconds"] = uptime
|
|
|
|
stats["total_users"] = yield hs.get_datastore().count_all_users()
|
|
|
|
|
|
|
|
all_rooms = yield hs.get_datastore().get_rooms(False)
|
|
|
|
stats["total_room_count"] = len(all_rooms)
|
|
|
|
|
|
|
|
stats["daily_active_users"] = yield hs.get_datastore().count_daily_users()
|
|
|
|
daily_messages = yield hs.get_datastore().count_daily_messages()
|
|
|
|
if daily_messages is not None:
|
|
|
|
stats["daily_messages"] = daily_messages
|
|
|
|
|
|
|
|
logger.info("Reporting stats to matrix.org: %s" % (stats,))
|
2015-09-22 14:34:29 +02:00
|
|
|
try:
|
|
|
|
yield hs.get_simple_http_client().put_json(
|
|
|
|
"https://matrix.org/report-usage-stats/push",
|
|
|
|
stats
|
|
|
|
)
|
|
|
|
except Exception as e:
|
|
|
|
logger.warn("Error reporting stats: %s", e)
|
2015-09-22 13:57:40 +02:00
|
|
|
|
|
|
|
if hs.config.report_stats:
|
|
|
|
phone_home_task = task.LoopingCall(phone_stats_home)
|
|
|
|
phone_home_task.start(60 * 60 * 24, now=False)
|
|
|
|
|
2015-03-10 10:58:33 +01:00
|
|
|
def in_thread():
|
|
|
|
with LoggingContext("run"):
|
|
|
|
change_resource_limit(hs.config.soft_file_limit)
|
|
|
|
reactor.run()
|
|
|
|
|
|
|
|
if hs.config.daemonize:
|
|
|
|
|
2015-08-07 17:36:42 +02:00
|
|
|
if hs.config.print_pidfile:
|
|
|
|
print hs.config.pid_file
|
2015-03-10 10:58:33 +01:00
|
|
|
|
|
|
|
daemon = Daemonize(
|
|
|
|
app="synapse-homeserver",
|
|
|
|
pid=hs.config.pid_file,
|
|
|
|
action=lambda: in_thread(),
|
|
|
|
auto_close_fds=False,
|
|
|
|
verbose=True,
|
|
|
|
logger=logger,
|
|
|
|
)
|
|
|
|
|
|
|
|
daemon.start()
|
|
|
|
else:
|
2015-03-10 11:19:03 +01:00
|
|
|
in_thread()
|
2014-10-30 02:21:33 +01:00
|
|
|
|
2014-11-20 18:26:36 +01:00
|
|
|
|
2014-11-18 16:57:00 +01:00
|
|
|
def main():
|
2014-10-30 12:15:39 +01:00
|
|
|
with LoggingContext("main"):
|
2015-03-17 12:45:37 +01:00
|
|
|
# check base requirements
|
2015-01-08 18:07:28 +01:00
|
|
|
check_requirements()
|
2015-03-10 10:58:33 +01:00
|
|
|
hs = setup(sys.argv[1:])
|
|
|
|
run(hs)
|
2014-08-12 16:10:52 +02:00
|
|
|
|
2014-11-20 18:26:36 +01:00
|
|
|
|
2014-08-12 16:10:52 +02:00
|
|
|
if __name__ == '__main__':
|
2014-11-18 16:57:00 +01:00
|
|
|
main()
|