2014-10-15 18:09:04 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2016-01-07 05:26:29 +01:00
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2014-10-15 18:09:04 +02:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
|
2018-07-09 08:09:20 +02:00
|
|
|
import hashlib
|
|
|
|
import logging
|
2015-08-24 17:17:38 +02:00
|
|
|
|
|
|
|
from canonicaljson import encode_canonical_json
|
|
|
|
from signedjson.sign import sign_json
|
2018-07-09 08:09:20 +02:00
|
|
|
from unpaddedbase64 import decode_base64, encode_base64
|
2014-10-15 18:09:04 +02:00
|
|
|
|
2018-07-09 08:09:20 +02:00
|
|
|
from synapse.api.errors import Codes, SynapseError
|
|
|
|
from synapse.events.utils import prune_event
|
2014-10-17 20:41:32 +02:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
2014-10-15 18:09:04 +02:00
|
|
|
|
|
|
|
|
2014-11-03 18:51:42 +01:00
|
|
|
def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
|
2014-10-15 18:09:04 +02:00
|
|
|
"""Check whether the hash for this PDU matches the contents"""
|
2014-12-03 17:07:21 +01:00
|
|
|
name, expected_hash = compute_content_hash(event, hash_algorithm)
|
|
|
|
logger.debug("Expecting hash: %s", encode_base64(expected_hash))
|
2017-11-28 15:06:12 +01:00
|
|
|
|
|
|
|
# some malformed events lack a 'hashes'. Protect against it being missing
|
|
|
|
# or a weird type by basically treating it the same as an unhashed event.
|
|
|
|
hashes = event.get("hashes")
|
|
|
|
if not isinstance(hashes, dict):
|
|
|
|
raise SynapseError(400, "Malformed 'hashes'", Codes.UNAUTHORIZED)
|
|
|
|
|
|
|
|
if name not in hashes:
|
2014-11-14 17:45:39 +01:00
|
|
|
raise SynapseError(
|
|
|
|
400,
|
|
|
|
"Algorithm %s not in hashes %s" % (
|
2017-11-28 15:06:12 +01:00
|
|
|
name, list(hashes),
|
2014-11-14 17:45:39 +01:00
|
|
|
),
|
|
|
|
Codes.UNAUTHORIZED,
|
|
|
|
)
|
2017-11-28 15:06:12 +01:00
|
|
|
message_hash_base64 = hashes[name]
|
2014-10-15 18:09:04 +02:00
|
|
|
try:
|
|
|
|
message_hash_bytes = decode_base64(message_hash_base64)
|
2017-10-23 16:52:32 +02:00
|
|
|
except Exception:
|
2014-11-14 17:45:39 +01:00
|
|
|
raise SynapseError(
|
|
|
|
400,
|
|
|
|
"Invalid base64: %s" % (message_hash_base64,),
|
|
|
|
Codes.UNAUTHORIZED,
|
|
|
|
)
|
2014-12-03 17:07:21 +01:00
|
|
|
return message_hash_bytes == expected_hash
|
2014-10-15 18:09:04 +02:00
|
|
|
|
|
|
|
|
2014-12-03 17:07:21 +01:00
|
|
|
def compute_content_hash(event, hash_algorithm):
|
2014-11-14 22:25:02 +01:00
|
|
|
event_json = event.get_pdu_json()
|
2014-11-03 18:51:42 +01:00
|
|
|
event_json.pop("age_ts", None)
|
|
|
|
event_json.pop("unsigned", None)
|
|
|
|
event_json.pop("signatures", None)
|
|
|
|
event_json.pop("hashes", None)
|
2014-11-14 20:10:52 +01:00
|
|
|
event_json.pop("outlier", None)
|
|
|
|
event_json.pop("destinations", None)
|
2014-12-03 17:07:21 +01:00
|
|
|
|
2014-11-03 18:51:42 +01:00
|
|
|
event_json_bytes = encode_canonical_json(event_json)
|
2014-12-03 17:07:21 +01:00
|
|
|
|
|
|
|
hashed = hash_algorithm(event_json_bytes)
|
|
|
|
return (hashed.name, hashed.digest())
|
2014-10-17 12:40:35 +02:00
|
|
|
|
|
|
|
|
2014-10-31 16:35:39 +01:00
|
|
|
def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256):
|
2014-11-10 11:21:32 +01:00
|
|
|
tmp_event = prune_event(event)
|
2014-11-14 22:25:02 +01:00
|
|
|
event_json = tmp_event.get_pdu_json()
|
2014-10-31 16:35:39 +01:00
|
|
|
event_json.pop("signatures", None)
|
2014-11-03 18:51:42 +01:00
|
|
|
event_json.pop("age_ts", None)
|
|
|
|
event_json.pop("unsigned", None)
|
2014-10-31 16:35:39 +01:00
|
|
|
event_json_bytes = encode_canonical_json(event_json)
|
|
|
|
hashed = hash_algorithm(event_json_bytes)
|
|
|
|
return (hashed.name, hashed.digest())
|
|
|
|
|
|
|
|
|
2014-11-03 18:51:42 +01:00
|
|
|
def compute_event_signature(event, signature_name, signing_key):
|
2014-11-10 11:21:32 +01:00
|
|
|
tmp_event = prune_event(event)
|
2014-11-14 22:25:02 +01:00
|
|
|
redact_json = tmp_event.get_pdu_json()
|
2014-11-03 18:51:42 +01:00
|
|
|
redact_json.pop("age_ts", None)
|
|
|
|
redact_json.pop("unsigned", None)
|
2014-12-10 11:06:12 +01:00
|
|
|
logger.debug("Signing event: %s", encode_canonical_json(redact_json))
|
2014-10-31 18:08:52 +01:00
|
|
|
redact_json = sign_json(redact_json, signature_name, signing_key)
|
2014-12-10 11:06:12 +01:00
|
|
|
logger.debug("Signed event: %s", encode_canonical_json(redact_json))
|
2014-11-03 18:51:42 +01:00
|
|
|
return redact_json["signatures"]
|
|
|
|
|
|
|
|
|
|
|
|
def add_hashes_and_signatures(event, signature_name, signing_key,
|
|
|
|
hash_algorithm=hashlib.sha256):
|
2014-12-03 17:07:21 +01:00
|
|
|
# if hasattr(event, "old_state_events"):
|
|
|
|
# state_json_bytes = encode_canonical_json(
|
|
|
|
# [e.event_id for e in event.old_state_events.values()]
|
|
|
|
# )
|
|
|
|
# hashed = hash_algorithm(state_json_bytes)
|
|
|
|
# event.state_hash = {
|
|
|
|
# hashed.name: encode_base64(hashed.digest())
|
|
|
|
# }
|
2014-11-07 12:36:40 +01:00
|
|
|
|
2014-12-03 17:07:21 +01:00
|
|
|
name, digest = compute_content_hash(event, hash_algorithm=hash_algorithm)
|
2014-10-31 18:08:52 +01:00
|
|
|
|
2014-11-03 12:32:12 +01:00
|
|
|
if not hasattr(event, "hashes"):
|
|
|
|
event.hashes = {}
|
2014-12-03 17:07:21 +01:00
|
|
|
event.hashes[name] = encode_base64(digest)
|
2014-11-03 18:51:42 +01:00
|
|
|
|
|
|
|
event.signatures = compute_event_signature(
|
|
|
|
event,
|
|
|
|
signature_name=signature_name,
|
|
|
|
signing_key=signing_key,
|
|
|
|
)
|