2015-02-03 16:00:42 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-01-07 05:26:29 +01:00
|
|
|
# Copyright 2015, 2016 OpenMarket Ltd
|
2015-02-03 16:00:42 +01:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2017-09-19 13:20:11 +02:00
|
|
|
import logging
|
2015-02-03 16:00:42 +01:00
|
|
|
|
2018-05-01 17:19:39 +02:00
|
|
|
import six
|
|
|
|
|
|
|
|
from synapse.api.constants import MAX_DEPTH
|
|
|
|
from synapse.api.errors import SynapseError, Codes
|
2017-09-19 13:20:11 +02:00
|
|
|
from synapse.crypto.event_signing import check_event_content_hash
|
2017-12-30 19:40:19 +01:00
|
|
|
from synapse.events import FrozenEvent
|
2017-09-19 13:20:11 +02:00
|
|
|
from synapse.events.utils import prune_event
|
2017-12-30 19:40:19 +01:00
|
|
|
from synapse.http.servlet import assert_params_in_request
|
2017-09-20 02:32:42 +02:00
|
|
|
from synapse.util import unwrapFirstError, logcontext
|
2017-09-19 13:20:11 +02:00
|
|
|
from twisted.internet import defer
|
2015-02-03 16:00:42 +01:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class FederationBase(object):
|
2016-06-15 16:12:59 +02:00
|
|
|
def __init__(self, hs):
|
2018-03-12 15:07:39 +01:00
|
|
|
self.hs = hs
|
|
|
|
|
|
|
|
self.server_name = hs.hostname
|
|
|
|
self.keyring = hs.get_keyring()
|
2017-09-26 20:20:23 +02:00
|
|
|
self.spam_checker = hs.get_spam_checker()
|
2018-03-12 15:07:39 +01:00
|
|
|
self.store = hs.get_datastore()
|
|
|
|
self._clock = hs.get_clock()
|
2016-06-15 16:12:59 +02:00
|
|
|
|
2015-02-03 16:00:42 +01:00
|
|
|
@defer.inlineCallbacks
|
2015-06-26 10:52:24 +02:00
|
|
|
def _check_sigs_and_hash_and_fetch(self, origin, pdus, outlier=False,
|
|
|
|
include_none=False):
|
2015-02-03 16:00:42 +01:00
|
|
|
"""Takes a list of PDUs and checks the signatures and hashs of each
|
|
|
|
one. If a PDU fails its signature check then we check if we have it in
|
|
|
|
the database and if not then request if from the originating server of
|
|
|
|
that PDU.
|
|
|
|
|
|
|
|
If a PDU fails its content hash check then it is redacted.
|
|
|
|
|
|
|
|
The given list of PDUs are not modified, instead the function returns
|
|
|
|
a new list.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
pdu (list)
|
|
|
|
outlier (bool)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Deferred : A list of PDUs that have valid signatures and hashes.
|
|
|
|
"""
|
2015-06-26 10:52:24 +02:00
|
|
|
deferreds = self._check_sigs_and_hashes(pdus)
|
2015-02-12 19:35:36 +01:00
|
|
|
|
2017-09-20 02:32:42 +02:00
|
|
|
@defer.inlineCallbacks
|
|
|
|
def handle_check_result(pdu, deferred):
|
|
|
|
try:
|
|
|
|
res = yield logcontext.make_deferred_yieldable(deferred)
|
|
|
|
except SynapseError:
|
|
|
|
res = None
|
2015-02-12 19:35:36 +01:00
|
|
|
|
2015-06-26 10:52:24 +02:00
|
|
|
if not res:
|
2015-02-03 16:00:42 +01:00
|
|
|
# Check local db.
|
2017-09-20 02:32:42 +02:00
|
|
|
res = yield self.store.get_event(
|
2015-02-03 16:00:42 +01:00
|
|
|
pdu.event_id,
|
2015-02-12 19:17:11 +01:00
|
|
|
allow_rejected=True,
|
|
|
|
allow_none=True,
|
2015-02-03 16:00:42 +01:00
|
|
|
)
|
2015-06-26 10:52:24 +02:00
|
|
|
|
|
|
|
if not res and pdu.origin != origin:
|
2017-09-20 02:32:42 +02:00
|
|
|
try:
|
|
|
|
res = yield self.get_pdu(
|
|
|
|
destinations=[pdu.origin],
|
|
|
|
event_id=pdu.event_id,
|
|
|
|
outlier=outlier,
|
|
|
|
timeout=10000,
|
|
|
|
)
|
|
|
|
except SynapseError:
|
|
|
|
pass
|
|
|
|
|
2015-06-26 10:52:24 +02:00
|
|
|
if not res:
|
2015-02-12 20:29:43 +01:00
|
|
|
logger.warn(
|
|
|
|
"Failed to find copy of %s with valid signature",
|
|
|
|
pdu.event_id,
|
|
|
|
)
|
2015-06-26 10:52:24 +02:00
|
|
|
|
2017-09-20 02:32:42 +02:00
|
|
|
defer.returnValue(res)
|
2015-02-03 16:00:42 +01:00
|
|
|
|
2017-09-20 02:32:42 +02:00
|
|
|
handle = logcontext.preserve_fn(handle_check_result)
|
|
|
|
deferreds2 = [
|
|
|
|
handle(pdu, deferred)
|
|
|
|
for pdu, deferred in zip(pdus, deferreds)
|
|
|
|
]
|
|
|
|
|
|
|
|
valid_pdus = yield logcontext.make_deferred_yieldable(
|
|
|
|
defer.gatherResults(
|
|
|
|
deferreds2,
|
|
|
|
consumeErrors=True,
|
|
|
|
)
|
|
|
|
).addErrback(unwrapFirstError)
|
2015-02-12 19:35:36 +01:00
|
|
|
|
2015-06-26 10:52:24 +02:00
|
|
|
if include_none:
|
|
|
|
defer.returnValue(valid_pdus)
|
|
|
|
else:
|
|
|
|
defer.returnValue([p for p in valid_pdus if p])
|
2015-02-03 16:00:42 +01:00
|
|
|
|
|
|
|
def _check_sigs_and_hash(self, pdu):
|
2017-09-20 02:32:42 +02:00
|
|
|
return logcontext.make_deferred_yieldable(
|
|
|
|
self._check_sigs_and_hashes([pdu])[0],
|
|
|
|
)
|
2015-06-26 10:52:24 +02:00
|
|
|
|
|
|
|
def _check_sigs_and_hashes(self, pdus):
|
2017-09-19 13:20:11 +02:00
|
|
|
"""Checks that each of the received events is correctly signed by the
|
|
|
|
sending server.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
pdus (list[FrozenEvent]): the events to be checked
|
2015-02-03 16:00:42 +01:00
|
|
|
|
|
|
|
Returns:
|
2017-09-19 13:20:11 +02:00
|
|
|
list[Deferred]: for each input event, a deferred which:
|
|
|
|
* returns the original event if the checks pass
|
|
|
|
* returns a redacted version of the event (if the signature
|
|
|
|
matched but the hash did not)
|
|
|
|
* throws a SynapseError if the signature check failed.
|
2017-09-20 02:32:42 +02:00
|
|
|
The deferreds run their callbacks in the sentinel logcontext.
|
2015-02-03 16:00:42 +01:00
|
|
|
"""
|
|
|
|
|
2015-06-26 10:52:24 +02:00
|
|
|
redacted_pdus = [
|
|
|
|
prune_event(pdu)
|
|
|
|
for pdu in pdus
|
|
|
|
]
|
|
|
|
|
2017-09-20 02:32:42 +02:00
|
|
|
deferreds = self.keyring.verify_json_objects_for_server([
|
2015-06-26 10:52:24 +02:00
|
|
|
(p.origin, p.get_pdu_json())
|
|
|
|
for p in redacted_pdus
|
|
|
|
])
|
|
|
|
|
2017-09-20 02:32:42 +02:00
|
|
|
ctx = logcontext.LoggingContext.current_context()
|
2017-09-19 13:20:11 +02:00
|
|
|
|
2017-09-20 02:32:42 +02:00
|
|
|
def callback(_, pdu, redacted):
|
|
|
|
with logcontext.PreserveLoggingContext(ctx):
|
|
|
|
if not check_event_content_hash(pdu):
|
|
|
|
logger.warn(
|
|
|
|
"Event content has been tampered, redacting %s: %s",
|
|
|
|
pdu.event_id, pdu.get_pdu_json()
|
|
|
|
)
|
|
|
|
return redacted
|
|
|
|
|
2017-09-26 20:20:23 +02:00
|
|
|
if self.spam_checker.check_event_for_spam(pdu):
|
2017-09-20 02:32:42 +02:00
|
|
|
logger.warn(
|
|
|
|
"Event contains spam, redacting %s: %s",
|
|
|
|
pdu.event_id, pdu.get_pdu_json()
|
|
|
|
)
|
|
|
|
return redacted
|
|
|
|
|
|
|
|
return pdu
|
2015-06-26 10:52:24 +02:00
|
|
|
|
|
|
|
def errback(failure, pdu):
|
|
|
|
failure.trap(SynapseError)
|
2017-09-20 02:32:42 +02:00
|
|
|
with logcontext.PreserveLoggingContext(ctx):
|
|
|
|
logger.warn(
|
|
|
|
"Signature check failed for %s",
|
|
|
|
pdu.event_id,
|
|
|
|
)
|
2015-06-26 10:52:24 +02:00
|
|
|
return failure
|
2015-02-03 16:00:42 +01:00
|
|
|
|
2015-06-26 10:52:24 +02:00
|
|
|
for deferred, pdu, redacted in zip(deferreds, pdus, redacted_pdus):
|
|
|
|
deferred.addCallbacks(
|
|
|
|
callback, errback,
|
|
|
|
callbackArgs=[pdu, redacted],
|
|
|
|
errbackArgs=[pdu],
|
2015-02-03 16:00:42 +01:00
|
|
|
)
|
|
|
|
|
2015-06-26 10:52:24 +02:00
|
|
|
return deferreds
|
2017-12-30 19:40:19 +01:00
|
|
|
|
|
|
|
|
|
|
|
def event_from_pdu_json(pdu_json, outlier=False):
|
|
|
|
"""Construct a FrozenEvent from an event json received over federation
|
|
|
|
|
|
|
|
Args:
|
|
|
|
pdu_json (object): pdu as received over federation
|
|
|
|
outlier (bool): True to mark this event as an outlier
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
FrozenEvent
|
2017-12-30 19:40:19 +01:00
|
|
|
|
|
|
|
Raises:
|
2018-05-01 17:19:39 +02:00
|
|
|
SynapseError: if the pdu is missing required fields or is otherwise
|
|
|
|
not a valid matrix event
|
2017-12-30 19:40:19 +01:00
|
|
|
"""
|
2017-12-30 19:40:19 +01:00
|
|
|
# we could probably enforce a bunch of other fields here (room_id, sender,
|
|
|
|
# origin, etc etc)
|
2018-05-01 17:19:39 +02:00
|
|
|
assert_params_in_request(pdu_json, ('event_id', 'type', 'depth'))
|
|
|
|
|
|
|
|
depth = pdu_json['depth']
|
|
|
|
if not isinstance(depth, six.integer_types):
|
|
|
|
raise SynapseError(400, "Depth %r not an intger" % (depth, ),
|
|
|
|
Codes.BAD_JSON)
|
|
|
|
|
|
|
|
if depth < 0:
|
|
|
|
raise SynapseError(400, "Depth too small", Codes.BAD_JSON)
|
|
|
|
elif depth > MAX_DEPTH:
|
|
|
|
raise SynapseError(400, "Depth too large", Codes.BAD_JSON)
|
|
|
|
|
2017-12-30 19:40:19 +01:00
|
|
|
event = FrozenEvent(
|
|
|
|
pdu_json
|
|
|
|
)
|
|
|
|
|
|
|
|
event.internal_metadata.outlier = outlier
|
|
|
|
|
|
|
|
return event
|