From 7c50e3b81655b8d4236d0308cd21506ac35b593c Mon Sep 17 00:00:00 2001 From: Kegsay Date: Wed, 22 Apr 2015 08:38:26 +0100 Subject: [PATCH 01/23] Add info on breaking AS API changes --- UPGRADE.rst | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/UPGRADE.rst b/UPGRADE.rst index 87dd6e04a8..ab327a8136 100644 --- a/UPGRADE.rst +++ b/UPGRADE.rst @@ -1,3 +1,37 @@ +Upgrading to v0.x.x +=================== + +Application services have had a breaking API change in this version. + +They can no longer register themselves with a home server using the AS HTTP API. This +decision was made because a compromised application service with free reign to register +any regex in effect grants full read/write access to the home server if a regex of ``.*`` +is used. An attack where a compromised AS re-registers itself with ``.*`` was deemed too +big of a security risk to ignore, and so the ability to register with the HS remotely has +been removed. + +It has been replaced by specifying a list of application service registrations in +``homeserver.yaml``:: + + app_service_config_files: ["registration-01.yaml", "registration-02.yaml"] + +Where ``registration-01.yaml`` looks like:: + + url: # e.g. "https://my.application.service.com" + as_token: + hs_token: + sender_localpart: # This is a new field which denotes the user_id localpart when using the AS token + namespaces: + users: + - exclusive: + regex: # e.g. "@prefix_.*" + aliases: + - exclusive: + regex: + rooms: + - exclusive: + regex: + Upgrading to v0.8.0 =================== From f43063158afb33bc1601632583b9e6377ff76aca Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 22 Apr 2015 13:12:11 +0100 Subject: [PATCH 02/23] Appease pep8 --- synapse/api/auth.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 9a5058a364..bae210c579 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -291,7 +291,6 @@ class Auth(object): return level - def _get_named_level(self, auth_events, name, default): power_level_event = self._get_power_level_event(auth_events) From a16eaa0c337c29a932b5effddfddff78849836c9 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 22 Apr 2015 14:20:04 +0100 Subject: [PATCH 03/23] Neater fetching of user's auth level in a room - squash to int() at access time (SYN-353) --- synapse/api/auth.py | 47 ++++++++++++--------------------------------- 1 file changed, 12 insertions(+), 35 deletions(-) diff --git a/synapse/api/auth.py b/synapse/api/auth.py index bae210c579..a21120b313 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -183,17 +183,7 @@ class Auth(object): else: join_rule = JoinRules.INVITE - user_level = self._get_power_level_from_event_state( - event, - event.user_id, - auth_events, - ) - - # TODO(paul): There's an awful lot of int()-casting in this code; - # surely we should be squashing strings to integers at a higher - # level, maybe when we insert? - if user_level is not None: - user_level = int(user_level) + user_level = self._get_user_power_level(event.user_id, auth_events) # FIXME (erikj): What should we do here as the default? ban_level = self._get_named_level(auth_events, "ban", 50) @@ -274,22 +264,26 @@ class Auth(object): key = (EventTypes.PowerLevels, "", ) return auth_events.get(key) - def _get_power_level_from_event_state(self, event, user_id, auth_events): + def _get_user_power_level(self, user_id, auth_events): power_level_event = self._get_power_level_event(auth_events) - level = None if power_level_event: level = power_level_event.content.get("users", {}).get(user_id) if not level: level = power_level_event.content.get("users_default", 0) + + if level is None: + return 0 + else: + return int(level) else: key = (EventTypes.Create, "", ) create_event = auth_events.get(key) if (create_event is not None and create_event.content["creator"] == user_id): return 100 - - return level + else: + return 0 def _get_named_level(self, auth_events, name, default): power_level_event = self._get_power_level_event(auth_events) @@ -496,16 +490,7 @@ class Auth(object): else: send_level = 0 - user_level = self._get_power_level_from_event_state( - event, - event.user_id, - auth_events, - ) - - if user_level: - user_level = int(user_level) - else: - user_level = 0 + user_level = self._get_user_power_level(event.user_id, auth_events) if user_level < send_level: raise AuthError( @@ -537,11 +522,7 @@ class Auth(object): return True def _check_redaction(self, event, auth_events): - user_level = self._get_power_level_from_event_state( - event, - event.user_id, - auth_events, - ) + user_level = self._get_user_power_level(event.user_id, auth_events) redact_level = self._get_named_level(auth_events, "redact", 50) @@ -571,11 +552,7 @@ class Auth(object): if not current_state: return - user_level = self._get_power_level_from_event_state( - event, - event.user_id, - auth_events, - ) + user_level = self._get_user_power_level(event.user_id, auth_events) # Check other levels: levels_to_check = [ From 6c994913473b70e164b13a4f551da8a8d448cc33 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Thu, 23 Apr 2015 16:07:49 +0100 Subject: [PATCH 04/23] prometheus/metrics howto from Leo --- docs/metrics-howto.rst | 49 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 docs/metrics-howto.rst diff --git a/docs/metrics-howto.rst b/docs/metrics-howto.rst new file mode 100644 index 0000000000..b3e71fc770 --- /dev/null +++ b/docs/metrics-howto.rst @@ -0,0 +1,49 @@ +How to monitor Synapse metrics using Prometheus +=============================================== + +1: install prometheus: + Follow instructions at http://prometheus.io/docs/introduction/install/ + +2: enable synapse metrics: + Simply setting a (local) port number will enable it. Pick a port. + prometheus itself defaults to 9090, so starting just above that for + locally monitored services seems reasonable. E.g. 9092: + + Add to homeserver.yaml + + metrics_port: 9092 + + Restart synapse + +3: check out synapse-prometheus-config + https://github.com/matrix-org/synapse-prometheus-config + +4: arrange for synapse.html to appear in prometheus's "consoles" + directory - symlink might be easiest to ensure `git pull` keeps it + updated. + +5: arrange for synapse.rules to be invoked from the main + prometheus.conf and add a synapse target. This is easiest if + prometheus runs on the same machine as synapse, as it can then just + use localhost:: + + global: { + rule_file: "synapse.rules" + } + + job: { + name: "synapse" + + target_group: { + target: "http://localhost:9092/" + } + } + +6: start prometheus:: + + ./prometheus -config.file=prometheus.conf + +7: wait a few seconds for it to start and perform the first scrape, + then visit the console: + + http://server-where-prometheus-runs:9090/consoles/synapse.html From 8c784142845bf462b255374b4cbacc22fd572847 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 23 Apr 2015 16:14:08 +0100 Subject: [PATCH 05/23] Formatting / wording fixes to metrics doc --- docs/metrics-howto.rst | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/docs/metrics-howto.rst b/docs/metrics-howto.rst index b3e71fc770..99776cd504 100644 --- a/docs/metrics-howto.rst +++ b/docs/metrics-howto.rst @@ -1,10 +1,10 @@ How to monitor Synapse metrics using Prometheus =============================================== -1: install prometheus: +1: Install prometheus: Follow instructions at http://prometheus.io/docs/introduction/install/ -2: enable synapse metrics: +2: Enable synapse metrics: Simply setting a (local) port number will enable it. Pick a port. prometheus itself defaults to 9090, so starting just above that for locally monitored services seems reasonable. E.g. 9092: @@ -15,17 +15,18 @@ How to monitor Synapse metrics using Prometheus Restart synapse -3: check out synapse-prometheus-config +3: Check out synapse-prometheus-config https://github.com/matrix-org/synapse-prometheus-config -4: arrange for synapse.html to appear in prometheus's "consoles" - directory - symlink might be easiest to ensure `git pull` keeps it - updated. +4: Add `synapse.html` and `synapse.rules` + The `.html` file needs to appear in prometheus's "consoles" directory, and + the `.rules` file needs to be invoked somewhere in the main config file. + A symlink to each from the git checkout into the prometheus directory might be + easiest to ensure `git pull` keeps it updated. -5: arrange for synapse.rules to be invoked from the main - prometheus.conf and add a synapse target. This is easiest if - prometheus runs on the same machine as synapse, as it can then just - use localhost:: +5: Add a prometheus target for synapse + This is easiest if prometheus runs on the same machine as synapse, as it can + then just use localhost:: global: { rule_file: "synapse.rules" @@ -39,11 +40,11 @@ How to monitor Synapse metrics using Prometheus } } -6: start prometheus:: +6: Start prometheus:: ./prometheus -config.file=prometheus.conf -7: wait a few seconds for it to start and perform the first scrape, +7: Wait a few seconds for it to start and perform the first scrape, then visit the console: http://server-where-prometheus-runs:9090/consoles/synapse.html From 6d1540134133cfe07fbecfbf0c733aceade33a05 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 23 Apr 2015 16:16:08 +0100 Subject: [PATCH 06/23] Mumble ReST mumble ``fixed-width`` mumble --- docs/metrics-howto.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/metrics-howto.rst b/docs/metrics-howto.rst index 99776cd504..c1f5ae2174 100644 --- a/docs/metrics-howto.rst +++ b/docs/metrics-howto.rst @@ -18,11 +18,11 @@ How to monitor Synapse metrics using Prometheus 3: Check out synapse-prometheus-config https://github.com/matrix-org/synapse-prometheus-config -4: Add `synapse.html` and `synapse.rules` - The `.html` file needs to appear in prometheus's "consoles" directory, and - the `.rules` file needs to be invoked somewhere in the main config file. - A symlink to each from the git checkout into the prometheus directory might be - easiest to ensure `git pull` keeps it updated. +4: Add ``synapse.html`` and ``synapse.rules`` + The ``.html`` file needs to appear in prometheus's ``consoles`` directory, + and the ``.rules`` file needs to be invoked somewhere in the main config + file. A symlink to each from the git checkout into the prometheus directory + might be easiest to ensure ``git pull`` keeps it updated. 5: Add a prometheus target for synapse This is easiest if prometheus runs on the same machine as synapse, as it can From 191f7f09cee4e148949af9e33c5c8f60184acf90 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 23 Apr 2015 18:27:25 +0100 Subject: [PATCH 07/23] Generate presence event-stream JSON structures directly --- synapse/handlers/presence.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index bbc7a0f200..6332f50974 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -836,6 +836,8 @@ class PresenceEventSource(object): presence = self.hs.get_handlers().presence_handler cachemap = presence._user_cachemap + clock = self.clock + latest_serial = None updates = [] # TODO(paul): use a DeferredList ? How to limit concurrency. @@ -845,18 +847,17 @@ class PresenceEventSource(object): if cached.serial <= from_key: continue - if (yield self.is_visible(observer_user, observed_user)): - updates.append((observed_user, cached)) + if not (yield self.is_visible(observer_user, observed_user)): + continue + + if latest_serial is None or cached.serial > latest_serial: + latest_serial = cached.serial + updates.append(cached.make_event(user=observed_user, clock=clock)) # TODO(paul): limit if updates: - clock = self.clock - - latest_serial = max([x[1].serial for x in updates]) - data = [x[1].make_event(user=x[0], clock=clock) for x in updates] - - defer.returnValue((data, latest_serial)) + defer.returnValue((updates, latest_serial)) else: defer.returnValue(([], presence._user_cachemap_latest_serial)) From 8a785c3006327076245428d26e5ca1634e9caeb2 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 23 Apr 2015 18:40:19 +0100 Subject: [PATCH 08/23] Store a list of the presence serial number at which remote users went offline, so that when we delete them from the cachemap, we can still synthesize OFFLINE events for them (SYN-261) --- synapse/handlers/presence.py | 21 ++++++++++++++++++ tests/handlers/test_presence.py | 38 +++++++++++++++++++++++++++++++++ 2 files changed, 59 insertions(+) diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 6332f50974..42fb622c48 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -135,6 +135,9 @@ class PresenceHandler(BaseHandler): self._remote_sendmap = {} # map remote users to sets of local users who're interested in them self._remote_recvmap = {} + # list of (serial, set of(userids)) tuples, ordered by serial, latest + # first + self._remote_offline_serials = [] # map any user to a UserPresenceCache self._user_cachemap = {} @@ -715,6 +718,10 @@ class PresenceHandler(BaseHandler): ) if state["presence"] == PresenceState.OFFLINE: + self._remote_offline_serials.insert( + 0, + (self._user_cachemap_latest_serial, set([user.to_string()])) + ) del self._user_cachemap[user] for poll in content.get("poll", []): @@ -856,6 +863,20 @@ class PresenceEventSource(object): # TODO(paul): limit + for serial, user_ids in presence._remote_offline_serials: + if serial < from_key: + break + + for u in user_ids: + updates.append({ + "type": "m.presence", + "content": {"user_id": u, "presence": PresenceState.OFFLINE}, + }) + # TODO(paul): For the v2 API we want to tell the client their from_key + # is too old if we fell off the end of the _remote_offline_serials + # list, and get them to invalidate+resync. In v1 we have no such + # concept so this is a best-effort result. + if updates: defer.returnValue((updates, latest_serial)) else: diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index 04eba4289e..bb497b6f09 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -878,6 +878,44 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase): state ) + @defer.inlineCallbacks + def test_recv_remote_offline(self): + """ Various tests relating to SYN-261 """ + potato_set = self.handler._remote_recvmap.setdefault(self.u_potato, + set()) + potato_set.add(self.u_apple) + + self.room_members = [self.u_banana, self.u_potato] + + self.assertEquals(self.event_source.get_current_key(), 0) + + yield self.mock_federation_resource.trigger("PUT", + "/_matrix/federation/v1/send/1000000/", + _make_edu_json("elsewhere", "m.presence", + content={ + "push": [ + {"user_id": "@potato:remote", + "presence": "offline"}, + ], + } + ) + ) + + self.assertEquals(self.event_source.get_current_key(), 1) + + (events, _) = yield self.event_source.get_new_events_for_user( + self.u_apple, 0, None + ) + self.assertEquals(events, + [ + {"type": "m.presence", + "content": { + "user_id": "@potato:remote", + "presence": OFFLINE, + }} + ] + ) + @defer.inlineCallbacks def test_join_room_local(self): self.room_members = [self.u_apple, self.u_banana] From b3bda8a75f9745fd351d2c2093ffc68774e8a2e2 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 23 Apr 2015 18:40:47 +0100 Subject: [PATCH 09/23] Don't let the remote offline serial list grow arbitrarily large --- synapse/handlers/presence.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index 42fb622c48..f929bcf853 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -36,6 +36,9 @@ metrics = synapse.metrics.get_metrics_for(__name__) # Don't bother bumping "last active" time if it differs by less than 60 seconds LAST_ACTIVE_GRANULARITY = 60*1000 +# Keep no more than this number of offline serial revisions +MAX_OFFLINE_SERIALS = 1000 + # TODO(paul): Maybe there's one of these I can steal from somewhere def partition(l, func): @@ -722,6 +725,8 @@ class PresenceHandler(BaseHandler): 0, (self._user_cachemap_latest_serial, set([user.to_string()])) ) + while len(self._remote_offline_serials) > MAX_OFFLINE_SERIALS: + self._remote_offline_serials.pop() # remove the oldest del self._user_cachemap[user] for poll in content.get("poll", []): From e1e5e53127540fbaa4e23fbc628113983efd767b Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Thu, 23 Apr 2015 19:01:37 +0100 Subject: [PATCH 10/23] Remove users from the remote_offline_serials list (and clean up empty elements) when they go online again --- synapse/handlers/presence.py | 12 +++++++++++- tests/handlers/test_presence.py | 27 +++++++++++++++++++++++++++ 2 files changed, 38 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/presence.py b/synapse/handlers/presence.py index f929bcf853..571eacd343 100644 --- a/synapse/handlers/presence.py +++ b/synapse/handlers/presence.py @@ -720,14 +720,24 @@ class PresenceHandler(BaseHandler): statuscache=statuscache, ) + user_id = user.to_string() + if state["presence"] == PresenceState.OFFLINE: self._remote_offline_serials.insert( 0, - (self._user_cachemap_latest_serial, set([user.to_string()])) + (self._user_cachemap_latest_serial, set([user_id])) ) while len(self._remote_offline_serials) > MAX_OFFLINE_SERIALS: self._remote_offline_serials.pop() # remove the oldest del self._user_cachemap[user] + else: + # Remove the user from remote_offline_serials now that they're + # no longer offline + for idx, elem in enumerate(self._remote_offline_serials): + (_, user_ids) = elem + user_ids.discard(user_id) + if not user_ids: + self._remote_offline_serials.pop(idx) for poll in content.get("poll", []): user = UserID.from_string(poll) diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index bb497b6f09..9f5580c096 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -916,6 +916,33 @@ class PresencePushTestCase(MockedDatastorePresenceTestCase): ] ) + yield self.mock_federation_resource.trigger("PUT", + "/_matrix/federation/v1/send/1000001/", + _make_edu_json("elsewhere", "m.presence", + content={ + "push": [ + {"user_id": "@potato:remote", + "presence": "online"}, + ], + } + ) + ) + + self.assertEquals(self.event_source.get_current_key(), 2) + + (events, _) = yield self.event_source.get_new_events_for_user( + self.u_apple, 0, None + ) + self.assertEquals(events, + [ + {"type": "m.presence", + "content": { + "user_id": "@potato:remote", + "presence": ONLINE, + }} + ] + ) + @defer.inlineCallbacks def test_join_room_local(self): self.room_members = [self.u_apple, self.u_banana] From 74270defdaf4070ba001713ae9f1f668790fc9a3 Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 24 Apr 2015 09:27:42 +0100 Subject: [PATCH 11/23] No commas here, otherwise our error string constants become tuples. --- synapse/api/errors.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index eddd889778..72d2bd5b4c 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -35,8 +35,8 @@ class Codes(object): LIMIT_EXCEEDED = "M_LIMIT_EXCEEDED" CAPTCHA_NEEDED = "M_CAPTCHA_NEEDED" CAPTCHA_INVALID = "M_CAPTCHA_INVALID" - MISSING_PARAM = "M_MISSING_PARAM", - TOO_LARGE = "M_TOO_LARGE", + MISSING_PARAM = "M_MISSING_PARAM" + TOO_LARGE = "M_TOO_LARGE" EXCLUSIVE = "M_EXCLUSIVE" From 4e2f8b87221a7c2391a399b729b191ce40b91ab6 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Fri, 24 Apr 2015 10:35:29 +0100 Subject: [PATCH 12/23] Copyright notices --- synapse/push/baserules.py | 14 ++++++++++++++ synapse/push/rulekinds.py | 14 ++++++++++++++ synapse/python_dependencies.py | 14 ++++++++++++++ synapse/rest/media/v1/identicon_resource.py | 14 ++++++++++++++ .../schema/delta/14/upgrade_appservice_db.py | 14 ++++++++++++++ synapse/storage/schema/delta/14/v14.sql | 14 ++++++++++++++ 6 files changed, 84 insertions(+) diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index 60fd35fbfb..f8408d6596 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -1,3 +1,17 @@ +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from synapse.push.rulekinds import PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP diff --git a/synapse/push/rulekinds.py b/synapse/push/rulekinds.py index 660aa4e10e..4c591aa638 100644 --- a/synapse/push/rulekinds.py +++ b/synapse/push/rulekinds.py @@ -1,3 +1,17 @@ +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + PRIORITY_CLASS_MAP = { 'underride': 1, 'sender': 2, diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index ee72f774b3..8b457419cf 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -1,3 +1,17 @@ +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import logging from distutils.version import LooseVersion diff --git a/synapse/rest/media/v1/identicon_resource.py b/synapse/rest/media/v1/identicon_resource.py index 912856386a..603859d5d4 100644 --- a/synapse/rest/media/v1/identicon_resource.py +++ b/synapse/rest/media/v1/identicon_resource.py @@ -1,3 +1,17 @@ +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from pydenticon import Generator from twisted.web.resource import Resource diff --git a/synapse/storage/schema/delta/14/upgrade_appservice_db.py b/synapse/storage/schema/delta/14/upgrade_appservice_db.py index 847b1c5b89..9f3a4dd4c5 100644 --- a/synapse/storage/schema/delta/14/upgrade_appservice_db.py +++ b/synapse/storage/schema/delta/14/upgrade_appservice_db.py @@ -1,3 +1,17 @@ +# Copyright 2015 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import json import logging diff --git a/synapse/storage/schema/delta/14/v14.sql b/synapse/storage/schema/delta/14/v14.sql index 0212726448..1d09ad7a15 100644 --- a/synapse/storage/schema/delta/14/v14.sql +++ b/synapse/storage/schema/delta/14/v14.sql @@ -1,3 +1,17 @@ +/* Copyright 2015 OpenMarket Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ CREATE TABLE IF NOT EXISTS push_rules_enable ( id INTEGER PRIMARY KEY AUTOINCREMENT, user_name TEXT NOT NULL, From 869dc94cbb5810e50efb7b6dd8320817aca01554 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Fri, 24 Apr 2015 11:27:56 +0100 Subject: [PATCH 13/23] Call the super classes when generating config --- synapse/config/registration.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/config/registration.py b/synapse/config/registration.py index d5c8f4bf7b..ad81cc4f45 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -53,6 +53,7 @@ class RegistrationConfig(Config): @classmethod def generate_config(cls, args, config_dir_path): + super(RegistrationConfig, cls).genenerate_config(args, config_dir_path) if args.enable_registration is None: args.enable_registration = False From bdcb23ca25bc2ea72ab3bc28d76c6b72d68206b3 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Fri, 24 Apr 2015 11:29:19 +0100 Subject: [PATCH 14/23] Fix spelling --- synapse/config/registration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/config/registration.py b/synapse/config/registration.py index ad81cc4f45..f412a72f59 100644 --- a/synapse/config/registration.py +++ b/synapse/config/registration.py @@ -53,7 +53,7 @@ class RegistrationConfig(Config): @classmethod def generate_config(cls, args, config_dir_path): - super(RegistrationConfig, cls).genenerate_config(args, config_dir_path) + super(RegistrationConfig, cls).generate_config(args, config_dir_path) if args.enable_registration is None: args.enable_registration = False From ed836386680acfeebe0ad2eb26985e5a88ccc3ab Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 24 Apr 2015 14:26:33 +0100 Subject: [PATCH 15/23] Make one-to-one rule an underride otherwise bings don't work in one-to-one wrooms. Likewise a couple of other rules. --- synapse/push/baserules.py | 42 +++++++++++++++++++-------------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/synapse/push/baserules.py b/synapse/push/baserules.py index f8408d6596..f3d1cf5c5f 100644 --- a/synapse/push/baserules.py +++ b/synapse/push/baserules.py @@ -126,7 +126,25 @@ def make_base_prepend_override_rules(): def make_base_append_override_rules(): return [ { - 'rule_id': 'global/override/.m.rule.call', + 'rule_id': 'global/override/.m.rule.suppress_notices', + 'conditions': [ + { + 'kind': 'event_match', + 'key': 'content.msgtype', + 'pattern': 'm.notice', + } + ], + 'actions': [ + 'dont_notify', + ] + } + ] + + +def make_base_append_underride_rules(user): + return [ + { + 'rule_id': 'global/underride/.m.rule.call', 'conditions': [ { 'kind': 'event_match', @@ -145,19 +163,6 @@ def make_base_append_override_rules(): } ] }, - { - 'rule_id': 'global/override/.m.rule.suppress_notices', - 'conditions': [ - { - 'kind': 'event_match', - 'key': 'content.msgtype', - 'pattern': 'm.notice', - } - ], - 'actions': [ - 'dont_notify', - ] - }, { 'rule_id': 'global/override/.m.rule.contains_display_name', 'conditions': [ @@ -176,7 +181,7 @@ def make_base_append_override_rules(): ] }, { - 'rule_id': 'global/override/.m.rule.room_one_to_one', + 'rule_id': 'global/underride/.m.rule.room_one_to_one', 'conditions': [ { 'kind': 'room_member_count', @@ -193,12 +198,7 @@ def make_base_append_override_rules(): 'value': False } ] - } - ] - - -def make_base_append_underride_rules(user): - return [ + }, { 'rule_id': 'global/underride/.m.rule.invite_for_me', 'conditions': [ From 04d1725752aae0f4d4e9eb6d97a352dee1d8ef77 Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 24 Apr 2015 15:01:14 +0100 Subject: [PATCH 16/23] Pedant: OS X has a space --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 14ef6c5acf..249d08c48f 100644 --- a/README.rst +++ b/README.rst @@ -86,7 +86,7 @@ Homeserver Installation ======================= System requirements: -- POSIX-compliant system (tested on Linux & OSX) +- POSIX-compliant system (tested on Linux & OS X) - Python 2.7 Synapse is written in python but some of the libraries is uses are written in From 44ccfa6258e2a5d0b0b5fe0f7b9e87bc106a1f07 Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 24 Apr 2015 15:05:56 +0100 Subject: [PATCH 17/23] Remove ancient history --- README.rst | 4 ---- 1 file changed, 4 deletions(-) diff --git a/README.rst b/README.rst index 249d08c48f..e5edc0c765 100644 --- a/README.rst +++ b/README.rst @@ -367,10 +367,6 @@ SRV record, as that is the name other machines will expect it to have:: You may additionally want to pass one or more "-v" options, in order to increase the verbosity of logging output; at least for initial testing. -For the initial alpha release, the homeserver is not speaking TLS for -either client-server or server-server traffic for ease of debugging. We have -also not spent any time yet getting the homeserver to run behind loadbalancers. - Running a Demo Federation of Homeservers ---------------------------------------- From a654f3fe4988a624df0355879e1d31c1b12a6f13 Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 24 Apr 2015 15:07:24 +0100 Subject: [PATCH 18/23] Matrix ID server is now HTTPS --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index e5edc0c765..da0102bfaa 100644 --- a/README.rst +++ b/README.rst @@ -429,7 +429,7 @@ track 3PID logins and publish end-user public keys. It's currently early days for identity servers as Matrix is not yet using 3PIDs as the primary means of identity and E2E encryption is not complete. As such, -we are running a single identity server (http://matrix.org:8090) at the current +we are running a single identity server (https://matrix.org) at the current time. From f46eee838ae041ae19b4f7dc1bf6b44fe961e7b5 Mon Sep 17 00:00:00 2001 From: David Baker Date: Fri, 24 Apr 2015 15:25:28 +0100 Subject: [PATCH 19/23] Add note about updating your signing keys (ie. "the auto thing") --- README.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/README.rst b/README.rst index da0102bfaa..714ad6443d 100644 --- a/README.rst +++ b/README.rst @@ -128,6 +128,15 @@ To set up your homeserver, run (in your virtualenv, as before):: Substituting your host and domain name as appropriate. +This will generate you a config file that you can then customise, but it will +also generate a set of keys for you. These keys will allow your Home Server to +identify itself to other Home Servers, so don't lose or delete them. It would be +wise to back them up somewhere safe. If, for whatever reason, you do need to +change your Home Server's keys, you may find that other Home Servers have the +old key cached. If you update the signing key, you should change the name of the +key in the .signing.key file (the second word, which by default is +, 'auto') to something different. + By default, registration of new users is disabled. You can either enable registration in the config by specifying ``enable_registration: true`` (it is then recommended to also set up CAPTCHA), or From 1c82fbd2eb99d689d8fe835eca9f394518e25316 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 27 Apr 2015 13:59:37 +0100 Subject: [PATCH 20/23] Implement create_observer. `create_observer` takes a deferred and create a new deferred that *observers* the original deferred. Any callbacks added to the observing deferred will *not* affect the origin deferred. --- synapse/util/async.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/synapse/util/async.py b/synapse/util/async.py index c4fe5d522f..d8febdb90c 100644 --- a/synapse/util/async.py +++ b/synapse/util/async.py @@ -32,3 +32,22 @@ def run_on_reactor(): iteration of the main loop """ return sleep(0) + + +def create_observer(deferred): + """Creates a deferred that observes the result or failure of the given + deferred *without* affecting the given deferred. + """ + d = defer.Deferred() + + def callback(r): + d.callback(r) + return r + + def errback(f): + d.errback(f) + return f + + deferred.addCallbacks(callback, errback) + + return d From e701aec2d1e9a565d29bc27d2bde61032cba5fd1 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 27 Apr 2015 14:20:26 +0100 Subject: [PATCH 21/23] Implement locks using create_observer for fetching media and server keys --- synapse/crypto/keyring.py | 148 ++++++++++++++----------- synapse/rest/media/v1/base_resource.py | 4 +- 2 files changed, 87 insertions(+), 65 deletions(-) diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index f4db7b8a05..d98341f5c2 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -24,6 +24,8 @@ from synapse.api.errors import SynapseError, Codes from synapse.util.retryutils import get_retry_limiter +from synapse.util.async import create_observer + from OpenSSL import crypto import logging @@ -38,6 +40,8 @@ class Keyring(object): self.clock = hs.get_clock() self.hs = hs + self.key_downloads = {} + @defer.inlineCallbacks def verify_json_for_server(self, server_name, json_object): logger.debug("Verifying for %s", server_name) @@ -97,76 +101,92 @@ class Keyring(object): defer.returnValue(cached[0]) return - # Try to fetch the key from the remote server. + @defer.inlineCallbacks + def fetch_keys(): + # Try to fetch the key from the remote server. - limiter = yield get_retry_limiter( - server_name, - self.clock, - self.store, - ) - - with limiter: - (response, tls_certificate) = yield fetch_server_key( - server_name, self.hs.tls_context_factory + limiter = yield get_retry_limiter( + server_name, + self.clock, + self.store, ) - # Check the response. - - x509_certificate_bytes = crypto.dump_certificate( - crypto.FILETYPE_ASN1, tls_certificate - ) - - if ("signatures" not in response - or server_name not in response["signatures"]): - raise ValueError("Key response not signed by remote server") - - if "tls_certificate" not in response: - raise ValueError("Key response missing TLS certificate") - - tls_certificate_b64 = response["tls_certificate"] - - if encode_base64(x509_certificate_bytes) != tls_certificate_b64: - raise ValueError("TLS certificate doesn't match") - - verify_keys = {} - for key_id, key_base64 in response["verify_keys"].items(): - if is_signing_algorithm_supported(key_id): - key_bytes = decode_base64(key_base64) - verify_key = decode_verify_key_bytes(key_id, key_bytes) - verify_keys[key_id] = verify_key - - for key_id in response["signatures"][server_name]: - if key_id not in response["verify_keys"]: - raise ValueError( - "Key response must include verification keys for all" - " signatures" - ) - if key_id in verify_keys: - verify_signed_json( - response, - server_name, - verify_keys[key_id] + with limiter: + (response, tls_certificate) = yield fetch_server_key( + server_name, self.hs.tls_context_factory ) - # Cache the result in the datastore. + # Check the response. - time_now_ms = self.clock.time_msec() - - yield self.store.store_server_certificate( - server_name, - server_name, - time_now_ms, - tls_certificate, - ) - - for key_id, key in verify_keys.items(): - yield self.store.store_server_verify_key( - server_name, server_name, time_now_ms, key + x509_certificate_bytes = crypto.dump_certificate( + crypto.FILETYPE_ASN1, tls_certificate ) - for key_id in key_ids: - if key_id in verify_keys: - defer.returnValue(verify_keys[key_id]) - return + if ("signatures" not in response + or server_name not in response["signatures"]): + raise ValueError("Key response not signed by remote server") - raise ValueError("No verification key found for given key ids") + if "tls_certificate" not in response: + raise ValueError("Key response missing TLS certificate") + + tls_certificate_b64 = response["tls_certificate"] + + if encode_base64(x509_certificate_bytes) != tls_certificate_b64: + raise ValueError("TLS certificate doesn't match") + + verify_keys = {} + for key_id, key_base64 in response["verify_keys"].items(): + if is_signing_algorithm_supported(key_id): + key_bytes = decode_base64(key_base64) + verify_key = decode_verify_key_bytes(key_id, key_bytes) + verify_keys[key_id] = verify_key + + for key_id in response["signatures"][server_name]: + if key_id not in response["verify_keys"]: + raise ValueError( + "Key response must include verification keys for all" + " signatures" + ) + if key_id in verify_keys: + verify_signed_json( + response, + server_name, + verify_keys[key_id] + ) + + # Cache the result in the datastore. + + time_now_ms = self.clock.time_msec() + + yield self.store.store_server_certificate( + server_name, + server_name, + time_now_ms, + tls_certificate, + ) + + for key_id, key in verify_keys.items(): + yield self.store.store_server_verify_key( + server_name, server_name, time_now_ms, key + ) + + for key_id in key_ids: + if key_id in verify_keys: + defer.returnValue(verify_keys[key_id]) + return + + raise ValueError("No verification key found for given key ids") + + download = self.key_downloads.get(server_name) + + if download is None: + download = fetch_keys() + self.key_downloads[server_name] = download + + @download.addBoth + def callback(ret): + del self.key_downloads[server_name] + return ret + + r = yield create_observer(download) + defer.returnValue(r) diff --git a/synapse/rest/media/v1/base_resource.py b/synapse/rest/media/v1/base_resource.py index edd4f78024..08c8d75af4 100644 --- a/synapse/rest/media/v1/base_resource.py +++ b/synapse/rest/media/v1/base_resource.py @@ -25,6 +25,8 @@ from twisted.internet import defer from twisted.web.resource import Resource from twisted.protocols.basic import FileSender +from synapse.util.async import create_observer + import os import logging @@ -87,7 +89,7 @@ class BaseMediaResource(Resource): def callback(media_info): del self.downloads[key] return media_info - return download + return create_observer(download) @defer.inlineCallbacks def _get_remote_media_impl(self, server_name, media_id): From 0a016b0525c918927c8134d5cb11d9be520a9efc Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 27 Apr 2015 14:37:24 +0100 Subject: [PATCH 22/23] Pull inner function out. --- synapse/crypto/keyring.py | 155 +++++++++++++++++++------------------- 1 file changed, 78 insertions(+), 77 deletions(-) diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index d98341f5c2..14f8f536e4 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -101,86 +101,10 @@ class Keyring(object): defer.returnValue(cached[0]) return - @defer.inlineCallbacks - def fetch_keys(): - # Try to fetch the key from the remote server. - - limiter = yield get_retry_limiter( - server_name, - self.clock, - self.store, - ) - - with limiter: - (response, tls_certificate) = yield fetch_server_key( - server_name, self.hs.tls_context_factory - ) - - # Check the response. - - x509_certificate_bytes = crypto.dump_certificate( - crypto.FILETYPE_ASN1, tls_certificate - ) - - if ("signatures" not in response - or server_name not in response["signatures"]): - raise ValueError("Key response not signed by remote server") - - if "tls_certificate" not in response: - raise ValueError("Key response missing TLS certificate") - - tls_certificate_b64 = response["tls_certificate"] - - if encode_base64(x509_certificate_bytes) != tls_certificate_b64: - raise ValueError("TLS certificate doesn't match") - - verify_keys = {} - for key_id, key_base64 in response["verify_keys"].items(): - if is_signing_algorithm_supported(key_id): - key_bytes = decode_base64(key_base64) - verify_key = decode_verify_key_bytes(key_id, key_bytes) - verify_keys[key_id] = verify_key - - for key_id in response["signatures"][server_name]: - if key_id not in response["verify_keys"]: - raise ValueError( - "Key response must include verification keys for all" - " signatures" - ) - if key_id in verify_keys: - verify_signed_json( - response, - server_name, - verify_keys[key_id] - ) - - # Cache the result in the datastore. - - time_now_ms = self.clock.time_msec() - - yield self.store.store_server_certificate( - server_name, - server_name, - time_now_ms, - tls_certificate, - ) - - for key_id, key in verify_keys.items(): - yield self.store.store_server_verify_key( - server_name, server_name, time_now_ms, key - ) - - for key_id in key_ids: - if key_id in verify_keys: - defer.returnValue(verify_keys[key_id]) - return - - raise ValueError("No verification key found for given key ids") - download = self.key_downloads.get(server_name) if download is None: - download = fetch_keys() + download = self._get_server_verify_key_impl(server_name, key_ids) self.key_downloads[server_name] = download @download.addBoth @@ -190,3 +114,80 @@ class Keyring(object): r = yield create_observer(download) defer.returnValue(r) + + + @defer.inlineCallbacks + def _get_server_verify_key_impl(self, server_name, key_ids): + # Try to fetch the key from the remote server. + + limiter = yield get_retry_limiter( + server_name, + self.clock, + self.store, + ) + + with limiter: + (response, tls_certificate) = yield fetch_server_key( + server_name, self.hs.tls_context_factory + ) + + # Check the response. + + x509_certificate_bytes = crypto.dump_certificate( + crypto.FILETYPE_ASN1, tls_certificate + ) + + if ("signatures" not in response + or server_name not in response["signatures"]): + raise ValueError("Key response not signed by remote server") + + if "tls_certificate" not in response: + raise ValueError("Key response missing TLS certificate") + + tls_certificate_b64 = response["tls_certificate"] + + if encode_base64(x509_certificate_bytes) != tls_certificate_b64: + raise ValueError("TLS certificate doesn't match") + + verify_keys = {} + for key_id, key_base64 in response["verify_keys"].items(): + if is_signing_algorithm_supported(key_id): + key_bytes = decode_base64(key_base64) + verify_key = decode_verify_key_bytes(key_id, key_bytes) + verify_keys[key_id] = verify_key + + for key_id in response["signatures"][server_name]: + if key_id not in response["verify_keys"]: + raise ValueError( + "Key response must include verification keys for all" + " signatures" + ) + if key_id in verify_keys: + verify_signed_json( + response, + server_name, + verify_keys[key_id] + ) + + # Cache the result in the datastore. + + time_now_ms = self.clock.time_msec() + + yield self.store.store_server_certificate( + server_name, + server_name, + time_now_ms, + tls_certificate, + ) + + for key_id, key in verify_keys.items(): + yield self.store.store_server_verify_key( + server_name, server_name, time_now_ms, key + ) + + for key_id in key_ids: + if key_id in verify_keys: + defer.returnValue(verify_keys[key_id]) + return + + raise ValueError("No verification key found for given key ids") \ No newline at end of file From 2c70849dc32a52157217d75298c99c4cfccce639 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 27 Apr 2015 14:38:29 +0100 Subject: [PATCH 23/23] Fix newlines --- synapse/crypto/keyring.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/synapse/crypto/keyring.py b/synapse/crypto/keyring.py index 14f8f536e4..2b4faee4c1 100644 --- a/synapse/crypto/keyring.py +++ b/synapse/crypto/keyring.py @@ -115,7 +115,6 @@ class Keyring(object): r = yield create_observer(download) defer.returnValue(r) - @defer.inlineCallbacks def _get_server_verify_key_impl(self, server_name, key_ids): # Try to fetch the key from the remote server. @@ -190,4 +189,4 @@ class Keyring(object): defer.returnValue(verify_keys[key_id]) return - raise ValueError("No verification key found for given key ids") \ No newline at end of file + raise ValueError("No verification key found for given key ids")