From 1c445f88f64beabf0bd9bec3950a4a4c0d529e8a Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Wed, 15 Oct 2014 17:09:04 +0100 Subject: persist hashes and origin signatures for PDUs --- synapse/crypto/event_signing.py | 70 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 synapse/crypto/event_signing.py (limited to 'synapse/crypto') diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py new file mode 100644 index 0000000000..6557727e06 --- /dev/null +++ b/synapse/crypto/event_signing.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- + +# Copyright 2014 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from synapse.api.events.utils import prune_pdu +from syutil.jsonutil import encode_canonical_json +from syutil.base64util import encode_base64, decode_base64 +from syutil.crypto.jsonsign import sign_json, verify_signed_json + +import hashlib + + +def hash_event_pdu(pdu, hash_algortithm=hashlib.sha256): + hashed = _compute_hash(pdu, hash_algortithm) + hashes[hashed.name] = encode_base64(hashed.digest()) + pdu.hashes = hashes + return pdu + + +def check_event_pdu_hash(pdu, hash_algorithm=hashlib.sha256): + """Check whether the hash for this PDU matches the contents""" + computed_hash = _compute_hash(pdu, hash_algortithm) + if computed_hash.name not in pdu.hashes: + raise Exception("Algorithm %s not in hashes %s" % ( + computed_hash.name, list(pdu.hashes) + )) + message_hash_base64 = hashes[computed_hash.name] + try: + message_hash_bytes = decode_base64(message_hash_base64) + except: + raise Exception("Invalid base64: %s" % (message_hash_base64,)) + return message_hash_bytes == computed_hash.digest() + + +def _compute_hash(pdu, hash_algorithm): + pdu_json = pdu.get_dict() + pdu_json.pop("meta", None) + pdu_json.pop("signatures", None) + hashes = pdu_json.pop("hashes", {}) + pdu_json_bytes = encode_canonical_json(pdu_json) + return hash_algorithm(pdu_json_bytes) + + +def sign_event_pdu(pdu, signature_name, signing_key): + tmp_pdu = Pdu(**pdu.get_dict()) + tmp_pdu = prune_pdu(tmp_pdu) + pdu_json = tmp_pdu.get_dict() + pdu_jdon = sign_json(pdu_json, signature_name, signing_key) + pdu.signatures = pdu_json["signatures"] + return pdu + + +def verify_signed_event_pdu(pdu, signature_name, verify_key): + tmp_pdu = Pdu(**pdu.get_dict()) + tmp_pdu = prune_pdu(tmp_pdu) + pdu_json = tmp_pdu.get_dict() + verify_signed_json(pdu_json, signature_name, verify_key) -- cgit 1.5.1 From 66104da10c4191aa1e048f2379190574755109e6 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Thu, 16 Oct 2014 00:09:48 +0100 Subject: Sign outgoing PDUs. --- synapse/crypto/event_signing.py | 4 ++-- synapse/federation/pdu_codec.py | 6 +++++- synapse/storage/__init__.py | 7 ++++--- synapse/storage/signatures.py | 6 +++--- tests/federation/test_pdu_codec.py | 13 ++++++++++--- tests/rest/test_events.py | 7 +++++-- tests/rest/test_profile.py | 8 ++++++-- tests/rest/test_rooms.py | 32 +++++++++++++++++++++++++------- tests/utils.py | 3 ++- 9 files changed, 62 insertions(+), 24 deletions(-) (limited to 'synapse/crypto') diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index 6557727e06..a115967c0a 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -15,6 +15,7 @@ # limitations under the License. +from synapse.federation.units import Pdu from synapse.api.events.utils import prune_pdu from syutil.jsonutil import encode_canonical_json from syutil.base64util import encode_base64, decode_base64 @@ -25,8 +26,7 @@ import hashlib def hash_event_pdu(pdu, hash_algortithm=hashlib.sha256): hashed = _compute_hash(pdu, hash_algortithm) - hashes[hashed.name] = encode_base64(hashed.digest()) - pdu.hashes = hashes + pdu.hashes[hashed.name] = encode_base64(hashed.digest()) return pdu diff --git a/synapse/federation/pdu_codec.py b/synapse/federation/pdu_codec.py index cef61108dd..bcac5f9ae8 100644 --- a/synapse/federation/pdu_codec.py +++ b/synapse/federation/pdu_codec.py @@ -14,6 +14,7 @@ # limitations under the License. from .units import Pdu +from synapse.crypto.event_signing import hash_event_pdu, sign_event_pdu import copy @@ -33,6 +34,7 @@ def encode_event_id(pdu_id, origin): class PduCodec(object): def __init__(self, hs): + self.signing_key = hs.config.signing_key[0] self.server_name = hs.hostname self.event_factory = hs.get_event_factory() self.clock = hs.get_clock() @@ -99,4 +101,6 @@ class PduCodec(object): if "ts" not in kwargs: kwargs["ts"] = int(self.clock.time_msec()) - return Pdu(**kwargs) + pdu = Pdu(**kwargs) + pdu = hash_event_pdu(pdu) + return sign_event_pdu(pdu, self.server_name, self.signing_key) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index bfeab7d1e8..b2a3f0b56c 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -42,6 +42,7 @@ from .transactions import TransactionStore from .keys import KeyStore from .signatures import SignatureStore +from syutil.base64util import decode_base64 import json import logging @@ -168,11 +169,11 @@ class DataStore(RoomMemberStore, RoomStore, txn, pdu.pdu_id, pdu.origin, hash_alg, hash_bytes, ) - signatures = pdu.sigatures.get(pdu.orgin, {}) + signatures = pdu.signatures.get(pdu.origin, {}) - for key_id, signature_base64 in signatures: + for key_id, signature_base64 in signatures.items(): signature_bytes = decode_base64(signature_base64) - self.store_pdu_origin_signatures_txn( + self._store_pdu_origin_signature_txn( txn, pdu.pdu_id, pdu.origin, key_id, signature_bytes, ) diff --git a/synapse/storage/signatures.py b/synapse/storage/signatures.py index bb860f09f0..1f0a680500 100644 --- a/synapse/storage/signatures.py +++ b/synapse/storage/signatures.py @@ -47,7 +47,7 @@ class SignatureStore(SQLBaseStore): algorithm (str): Hashing algorithm. hash_bytes (bytes): Hash function output bytes. """ - self._simple_insert_txn(self, txn, "pdu_hashes", { + self._simple_insert_txn(txn, "pdu_hashes", { "pdu_id": pdu_id, "origin": origin, "algorithm": algorithm, @@ -66,7 +66,7 @@ class SignatureStore(SQLBaseStore): query = ( "SELECT key_id, signature" " FROM pdu_origin_signatures" - " WHERE WHERE pdu_id = ? and origin = ?" + " WHERE pdu_id = ? and origin = ?" ) txn.execute(query, (pdu_id, origin)) return dict(txn.fetchall()) @@ -81,7 +81,7 @@ class SignatureStore(SQLBaseStore): key_id (str): Id for the signing key. signature (bytes): The signature. """ - self._simple_insert_txn(self, txn, "pdu_origin_signatures", { + self._simple_insert_txn(txn, "pdu_origin_signatures", { "pdu_id": pdu_id, "origin": origin, "key_id": key_id, diff --git a/tests/federation/test_pdu_codec.py b/tests/federation/test_pdu_codec.py index 344e1baf60..80851a4258 100644 --- a/tests/federation/test_pdu_codec.py +++ b/tests/federation/test_pdu_codec.py @@ -23,14 +23,21 @@ from synapse.federation.units import Pdu from synapse.server import HomeServer -from mock import Mock +from mock import Mock, NonCallableMock + +from ..utils import MockKey class PduCodecTestCase(unittest.TestCase): def setUp(self): - self.hs = HomeServer("blargle.net") - self.event_factory = self.hs.get_event_factory() + self.mock_config = NonCallableMock() + self.mock_config.signing_key = [MockKey()] + self.hs = HomeServer( + "blargle.net", + config=self.mock_config, + ) + self.event_factory = self.hs.get_event_factory() self.codec = PduCodec(self.hs) def test_decode_event_id(self): diff --git a/tests/rest/test_events.py b/tests/rest/test_events.py index 79b371c04d..362c7bc01c 100644 --- a/tests/rest/test_events.py +++ b/tests/rest/test_events.py @@ -28,7 +28,7 @@ from synapse.server import HomeServer # python imports import json -from ..utils import MockHttpResource, MemoryDataStore +from ..utils import MockHttpResource, MemoryDataStore, MockKey from .utils import RestTestCase from mock import Mock, NonCallableMock @@ -122,6 +122,9 @@ class EventStreamPermissionsTestCase(RestTestCase): persistence_service = Mock(spec=["get_latest_pdus_in_context"]) persistence_service.get_latest_pdus_in_context.return_value = [] + self.mock_config = NonCallableMock() + self.mock_config.signing_key = [MockKey()] + hs = HomeServer( "test", db_pool=None, @@ -139,7 +142,7 @@ class EventStreamPermissionsTestCase(RestTestCase): ratelimiter=NonCallableMock(spec_set=[ "send_message", ]), - config=NonCallableMock(), + config=self.mock_config, ) self.ratelimiter = hs.get_ratelimiter() self.ratelimiter.send_message.return_value = (True, 0) diff --git a/tests/rest/test_profile.py b/tests/rest/test_profile.py index b0f48e7fd8..3a0d1e700a 100644 --- a/tests/rest/test_profile.py +++ b/tests/rest/test_profile.py @@ -18,9 +18,9 @@ from tests import unittest from twisted.internet import defer -from mock import Mock +from mock import Mock, NonCallableMock -from ..utils import MockHttpResource +from ..utils import MockHttpResource, MockKey from synapse.api.errors import SynapseError, AuthError from synapse.server import HomeServer @@ -41,6 +41,9 @@ class ProfileTestCase(unittest.TestCase): "set_avatar_url", ]) + self.mock_config = NonCallableMock() + self.mock_config.signing_key = [MockKey()] + hs = HomeServer("test", db_pool=None, http_client=None, @@ -48,6 +51,7 @@ class ProfileTestCase(unittest.TestCase): federation=Mock(), replication_layer=Mock(), datastore=None, + config=self.mock_config, ) def _get_user_by_req(request=None): diff --git a/tests/rest/test_rooms.py b/tests/rest/test_rooms.py index 1ce9b8a83d..7170193051 100644 --- a/tests/rest/test_rooms.py +++ b/tests/rest/test_rooms.py @@ -27,7 +27,7 @@ from synapse.server import HomeServer import json import urllib -from ..utils import MockHttpResource, MemoryDataStore +from ..utils import MockHttpResource, MemoryDataStore, MockKey from .utils import RestTestCase from mock import Mock, NonCallableMock @@ -50,6 +50,9 @@ class RoomPermissionsTestCase(RestTestCase): persistence_service = Mock(spec=["get_latest_pdus_in_context"]) persistence_service.get_latest_pdus_in_context.return_value = [] + self.mock_config = NonCallableMock() + self.mock_config.signing_key = [MockKey()] + hs = HomeServer( "red", db_pool=None, @@ -61,7 +64,7 @@ class RoomPermissionsTestCase(RestTestCase): ratelimiter=NonCallableMock(spec_set=[ "send_message", ]), - config=NonCallableMock(), + config=self.mock_config, ) self.ratelimiter = hs.get_ratelimiter() self.ratelimiter.send_message.return_value = (True, 0) @@ -408,6 +411,9 @@ class RoomsMemberListTestCase(RestTestCase): persistence_service = Mock(spec=["get_latest_pdus_in_context"]) persistence_service.get_latest_pdus_in_context.return_value = [] + self.mock_config = NonCallableMock() + self.mock_config.signing_key = [MockKey()] + hs = HomeServer( "red", db_pool=None, @@ -419,7 +425,7 @@ class RoomsMemberListTestCase(RestTestCase): ratelimiter=NonCallableMock(spec_set=[ "send_message", ]), - config=NonCallableMock(), + config=self.mock_config, ) self.ratelimiter = hs.get_ratelimiter() self.ratelimiter.send_message.return_value = (True, 0) @@ -497,6 +503,9 @@ class RoomsCreateTestCase(RestTestCase): persistence_service = Mock(spec=["get_latest_pdus_in_context"]) persistence_service.get_latest_pdus_in_context.return_value = [] + self.mock_config = NonCallableMock() + self.mock_config.signing_key = [MockKey()] + hs = HomeServer( "red", db_pool=None, @@ -508,7 +517,7 @@ class RoomsCreateTestCase(RestTestCase): ratelimiter=NonCallableMock(spec_set=[ "send_message", ]), - config=NonCallableMock(), + config=self.mock_config, ) self.ratelimiter = hs.get_ratelimiter() self.ratelimiter.send_message.return_value = (True, 0) @@ -598,6 +607,9 @@ class RoomTopicTestCase(RestTestCase): persistence_service = Mock(spec=["get_latest_pdus_in_context"]) persistence_service.get_latest_pdus_in_context.return_value = [] + self.mock_config = NonCallableMock() + self.mock_config.signing_key = [MockKey()] + hs = HomeServer( "red", db_pool=None, @@ -609,7 +621,7 @@ class RoomTopicTestCase(RestTestCase): ratelimiter=NonCallableMock(spec_set=[ "send_message", ]), - config=NonCallableMock(), + config=self.mock_config, ) self.ratelimiter = hs.get_ratelimiter() self.ratelimiter.send_message.return_value = (True, 0) @@ -712,6 +724,9 @@ class RoomMemberStateTestCase(RestTestCase): persistence_service = Mock(spec=["get_latest_pdus_in_context"]) persistence_service.get_latest_pdus_in_context.return_value = [] + self.mock_config = NonCallableMock() + self.mock_config.signing_key = [MockKey()] + hs = HomeServer( "red", db_pool=None, @@ -723,7 +738,7 @@ class RoomMemberStateTestCase(RestTestCase): ratelimiter=NonCallableMock(spec_set=[ "send_message", ]), - config=NonCallableMock(), + config=self.mock_config, ) self.ratelimiter = hs.get_ratelimiter() self.ratelimiter.send_message.return_value = (True, 0) @@ -853,6 +868,9 @@ class RoomMessagesTestCase(RestTestCase): persistence_service = Mock(spec=["get_latest_pdus_in_context"]) persistence_service.get_latest_pdus_in_context.return_value = [] + self.mock_config = NonCallableMock() + self.mock_config.signing_key = [MockKey()] + hs = HomeServer( "red", db_pool=None, @@ -864,7 +882,7 @@ class RoomMessagesTestCase(RestTestCase): ratelimiter=NonCallableMock(spec_set=[ "send_message", ]), - config=NonCallableMock(), + config=self.mock_config, ) self.ratelimiter = hs.get_ratelimiter() self.ratelimiter.send_message.return_value = (True, 0) diff --git a/tests/utils.py b/tests/utils.py index 60fd6085ac..d8be73dba8 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -118,13 +118,14 @@ class MockHttpResource(HttpServer): class MockKey(object): alg = "mock_alg" version = "mock_version" + signature = b"\x9a\x87$" @property def verify_key(self): return self def sign(self, message): - return b"\x9a\x87$" + return self def verify(self, message, sig): assert sig == b"\x9a\x87$" -- cgit 1.5.1 From c8f996e29ffd7055bc6521ea610fc12ff50502e5 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Fri, 17 Oct 2014 11:40:35 +0100 Subject: Hash the same content covered by the signature when referencing previous PDUs rather than reusing the PDU content hashes --- synapse/crypto/event_signing.py | 19 +++++++++++---- synapse/federation/pdu_codec.py | 6 +++-- synapse/storage/__init__.py | 9 ++++++- synapse/storage/pdu.py | 4 ++-- synapse/storage/schema/signatures.sql | 18 ++++++++++++-- synapse/storage/signatures.py | 44 +++++++++++++++++++++++++++++++---- 6 files changed, 84 insertions(+), 16 deletions(-) (limited to 'synapse/crypto') diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index a115967c0a..32d60bd30a 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -24,15 +24,15 @@ from syutil.crypto.jsonsign import sign_json, verify_signed_json import hashlib -def hash_event_pdu(pdu, hash_algortithm=hashlib.sha256): - hashed = _compute_hash(pdu, hash_algortithm) +def add_event_pdu_content_hash(pdu, hash_algorithm=hashlib.sha256): + hashed = _compute_content_hash(pdu, hash_algorithm) pdu.hashes[hashed.name] = encode_base64(hashed.digest()) return pdu -def check_event_pdu_hash(pdu, hash_algorithm=hashlib.sha256): +def check_event_pdu_content_hash(pdu, hash_algorithm=hashlib.sha256): """Check whether the hash for this PDU matches the contents""" - computed_hash = _compute_hash(pdu, hash_algortithm) + computed_hash = _compute_content_hash(pdu, hash_algortithm) if computed_hash.name not in pdu.hashes: raise Exception("Algorithm %s not in hashes %s" % ( computed_hash.name, list(pdu.hashes) @@ -45,7 +45,7 @@ def check_event_pdu_hash(pdu, hash_algorithm=hashlib.sha256): return message_hash_bytes == computed_hash.digest() -def _compute_hash(pdu, hash_algorithm): +def _compute_content_hash(pdu, hash_algorithm): pdu_json = pdu.get_dict() pdu_json.pop("meta", None) pdu_json.pop("signatures", None) @@ -54,6 +54,15 @@ def _compute_hash(pdu, hash_algorithm): return hash_algorithm(pdu_json_bytes) +def compute_pdu_event_reference_hash(pdu, hash_algorithm=hashlib.sha256): + tmp_pdu = Pdu(**pdu.get_dict()) + tmp_pdu = prune_pdu(tmp_pdu) + pdu_json = tmp_pdu.get_dict() + pdu_json_bytes = encode_canonical_json(pdu_json) + hashed = hash_algorithm(pdu_json_bytes) + return (hashed.name, hashed.digest()) + + def sign_event_pdu(pdu, signature_name, signing_key): tmp_pdu = Pdu(**pdu.get_dict()) tmp_pdu = prune_pdu(tmp_pdu) diff --git a/synapse/federation/pdu_codec.py b/synapse/federation/pdu_codec.py index 11fd7264b3..7e574f451d 100644 --- a/synapse/federation/pdu_codec.py +++ b/synapse/federation/pdu_codec.py @@ -14,7 +14,9 @@ # limitations under the License. from .units import Pdu -from synapse.crypto.event_signing import hash_event_pdu, sign_event_pdu +from synapse.crypto.event_signing import ( + add_event_pdu_content_hash, sign_event_pdu +) import copy @@ -97,5 +99,5 @@ class PduCodec(object): kwargs["ts"] = int(self.clock.time_msec()) pdu = Pdu(**kwargs) - pdu = hash_event_pdu(pdu) + pdu = add_event_pdu_content_hash(pdu) return sign_event_pdu(pdu, self.server_name, self.signing_key) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index af05b47932..1738260cc1 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -44,6 +44,8 @@ from .signatures import SignatureStore from syutil.base64util import decode_base64 +from synapse.crypto.event_signing import compute_pdu_event_reference_hash + import json import logging import os @@ -165,7 +167,7 @@ class DataStore(RoomMemberStore, RoomStore, for hash_alg, hash_base64 in pdu.hashes.items(): hash_bytes = decode_base64(hash_base64) - self._store_pdu_hash_txn( + self._store_pdu_content_hash_txn( txn, pdu.pdu_id, pdu.origin, hash_alg, hash_bytes, ) @@ -185,6 +187,11 @@ class DataStore(RoomMemberStore, RoomStore, hash_bytes ) + (ref_alg, ref_hash_bytes) = compute_pdu_event_reference_hash(pdu) + self._store_pdu_reference_hash_txn( + txn, pdu.pdu_id, pdu.origin, ref_alg, ref_hash_bytes + ) + if pdu.is_state: self._persist_state_txn(txn, pdu.prev_pdus, cols) else: diff --git a/synapse/storage/pdu.py b/synapse/storage/pdu.py index a423b42dbd..3a90c382f0 100644 --- a/synapse/storage/pdu.py +++ b/synapse/storage/pdu.py @@ -69,7 +69,7 @@ class PduStore(SQLBaseStore): edge_hashes = self._get_prev_pdu_hashes_txn(txn, pdu_id, origin) - hashes = self._get_pdu_hashes_txn(txn, pdu_id, origin) + hashes = self._get_pdu_content_hashes_txn(txn, pdu_id, origin) signatures = self._get_pdu_origin_signatures_txn( txn, pdu_id, origin ) @@ -317,7 +317,7 @@ class PduStore(SQLBaseStore): results = [] for pdu_id, origin, depth in txn.fetchall(): - hashes = self._get_pdu_hashes_txn(txn, pdu_id, origin) + hashes = self._get_pdu_reference_hashes_txn(txn, pdu_id, origin) sha256_bytes = hashes["sha256"] prev_hashes = {"sha256": encode_base64(sha256_bytes)} results.append((pdu_id, origin, prev_hashes, depth)) diff --git a/synapse/storage/schema/signatures.sql b/synapse/storage/schema/signatures.sql index a72c4dc35f..1c45a51bec 100644 --- a/synapse/storage/schema/signatures.sql +++ b/synapse/storage/schema/signatures.sql @@ -13,7 +13,7 @@ * limitations under the License. */ -CREATE TABLE IF NOT EXISTS pdu_hashes ( +CREATE TABLE IF NOT EXISTS pdu_content_hashes ( pdu_id TEXT, origin TEXT, algorithm TEXT, @@ -21,7 +21,21 @@ CREATE TABLE IF NOT EXISTS pdu_hashes ( CONSTRAINT uniqueness UNIQUE (pdu_id, origin, algorithm) ); -CREATE INDEX IF NOT EXISTS pdu_hashes_id ON pdu_hashes (pdu_id, origin); +CREATE INDEX IF NOT EXISTS pdu_content_hashes_id ON pdu_content_hashes ( + pdu_id, origin +); + +CREATE TABLE IF NOT EXISTS pdu_reference_hashes ( + pdu_id TEXT, + origin TEXT, + algorithm TEXT, + hash BLOB, + CONSTRAINT uniqueness UNIQUE (pdu_id, origin, algorithm) +); + +CREATE INDEX IF NOT EXISTS pdu_reference_hashes_id ON pdu_reference_hashes ( + pdu_id, origin +); CREATE TABLE IF NOT EXISTS pdu_origin_signatures ( pdu_id TEXT, diff --git a/synapse/storage/signatures.py b/synapse/storage/signatures.py index 1147102489..85eec7ffbe 100644 --- a/synapse/storage/signatures.py +++ b/synapse/storage/signatures.py @@ -21,7 +21,7 @@ from twisted.internet import defer class SignatureStore(SQLBaseStore): """Persistence for PDU signatures and hashes""" - def _get_pdu_hashes_txn(self, txn, pdu_id, origin): + def _get_pdu_content_hashes_txn(self, txn, pdu_id, origin): """Get all the hashes for a given PDU. Args: txn (cursor): @@ -32,13 +32,14 @@ class SignatureStore(SQLBaseStore): """ query = ( "SELECT algorithm, hash" - " FROM pdu_hashes" + " FROM pdu_content_hashes" " WHERE pdu_id = ? and origin = ?" ) txn.execute(query, (pdu_id, origin)) return dict(txn.fetchall()) - def _store_pdu_hash_txn(self, txn, pdu_id, origin, algorithm, hash_bytes): + def _store_pdu_content_hash_txn(self, txn, pdu_id, origin, algorithm, + hash_bytes): """Store a hash for a PDU Args: txn (cursor): @@ -47,13 +48,48 @@ class SignatureStore(SQLBaseStore): algorithm (str): Hashing algorithm. hash_bytes (bytes): Hash function output bytes. """ - self._simple_insert_txn(txn, "pdu_hashes", { + self._simple_insert_txn(txn, "pdu_content_hashes", { "pdu_id": pdu_id, "origin": origin, "algorithm": algorithm, "hash": buffer(hash_bytes), }) + def _get_pdu_reference_hashes_txn(self, txn, pdu_id, origin): + """Get all the hashes for a given PDU. + Args: + txn (cursor): + pdu_id (str): Id for the PDU. + origin (str): origin of the PDU. + Returns: + A dict of algorithm -> hash. + """ + query = ( + "SELECT algorithm, hash" + " FROM pdu_reference_hashes" + " WHERE pdu_id = ? and origin = ?" + ) + txn.execute(query, (pdu_id, origin)) + return dict(txn.fetchall()) + + def _store_pdu_reference_hash_txn(self, txn, pdu_id, origin, algorithm, + hash_bytes): + """Store a hash for a PDU + Args: + txn (cursor): + pdu_id (str): Id for the PDU. + origin (str): origin of the PDU. + algorithm (str): Hashing algorithm. + hash_bytes (bytes): Hash function output bytes. + """ + self._simple_insert_txn(txn, "pdu_reference_hashes", { + "pdu_id": pdu_id, + "origin": origin, + "algorithm": algorithm, + "hash": buffer(hash_bytes), + }) + + def _get_pdu_origin_signatures_txn(self, txn, pdu_id, origin): """Get all the signatures for a given PDU. Args: -- cgit 1.5.1 From c5cec1cc77029c21f0117c318c522ab320de3923 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Fri, 17 Oct 2014 16:50:04 +0100 Subject: Rename 'meta' to 'unsigned' --- docs/server-server/signing.rst | 16 ++++++++-------- synapse/crypto/event_signing.py | 4 +++- synapse/federation/replication.py | 14 +++++++------- synapse/federation/units.py | 1 - 4 files changed, 18 insertions(+), 17 deletions(-) (limited to 'synapse/crypto') diff --git a/docs/server-server/signing.rst b/docs/server-server/signing.rst index dae10f121b..60c701ca91 100644 --- a/docs/server-server/signing.rst +++ b/docs/server-server/signing.rst @@ -1,13 +1,13 @@ Signing JSON ============ -JSON is signed by encoding the JSON object without ``signatures`` or ``meta`` +JSON is signed by encoding the JSON object without ``signatures`` or ``unsigned`` keys using a canonical encoding. The JSON bytes are then signed using the signature algorithm and the signature encoded using base64 with the padding stripped. The resulting base64 signature is added to an object under the *signing key identifier* which is added to the ``signatures`` object under the name of the server signing it which is added back to the original JSON object -along with the ``meta`` object. +along with the ``unsigned`` object. The *signing key identifier* is the concatenation of the *signing algorithm* and a *key version*. The *signing algorithm* identifies the algorithm used to @@ -15,8 +15,8 @@ sign the JSON. The currently support value for *signing algorithm* is ``ed25519`` as implemented by NACL (http://nacl.cr.yp.to/). The *key version* is used to distinguish between different signing keys used by the same entity. -The ``meta`` object and the ``signatures`` object are not covered by the -signature. Therefore intermediate servers can add metadata such as time stamps +The ``unsigned`` object and the ``signatures`` object are not covered by the +signature. Therefore intermediate servers can add unsigneddata such as time stamps and additional signatures. @@ -27,7 +27,7 @@ and additional signatures. "signing_keys": { "ed25519:1": "XSl0kuyvrXNj6A+7/tkrB9sxSbRi08Of5uRhxOqZtEQ" }, - "meta": { + "unsigned": { "retrieved_ts_ms": 922834800000 }, "signatures": { @@ -41,7 +41,7 @@ and additional signatures. def sign_json(json_object, signing_key, signing_name): signatures = json_object.pop("signatures", {}) - meta = json_object.pop("meta", None) + unsigned = json_object.pop("unsigned", None) signed = signing_key.sign(encode_canonical_json(json_object)) signature_base64 = encode_base64(signed.signature) @@ -50,8 +50,8 @@ and additional signatures. signatures.setdefault(sigature_name, {})[key_id] = signature_base64 json_object["signatures"] = signatures - if meta is not None: - json_object["meta"] = meta + if unsigned is not None: + json_object["unsigned"] = unsigned return json_object diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index 32d60bd30a..a236f7d708 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -47,7 +47,9 @@ def check_event_pdu_content_hash(pdu, hash_algorithm=hashlib.sha256): def _compute_content_hash(pdu, hash_algorithm): pdu_json = pdu.get_dict() - pdu_json.pop("meta", None) + #TODO: Make "age_ts" key internal + pdu_json.pop("age_ts") + pdu_json.pop("unsigned", None) pdu_json.pop("signatures", None) hashes = pdu_json.pop("hashes", {}) pdu_json_bytes = encode_canonical_json(pdu_json) diff --git a/synapse/federation/replication.py b/synapse/federation/replication.py index c4993aa5ee..f2a5d4d5e2 100644 --- a/synapse/federation/replication.py +++ b/synapse/federation/replication.py @@ -295,10 +295,10 @@ class ReplicationLayer(object): transaction = Transaction(**transaction_data) for p in transaction.pdus: - if "meta" in p: - meta = p["meta"] - if "age" in meta: - p["age"] = meta["age"] + if "unsigned" in p: + unsigned = p["unsigned"] + if "age" in unsigned: + p["age"] = unsigned["age"] if "age" in p: p["age_ts"] = int(self._clock.time_msec()) - int(p["age"]) del p["age"] @@ -422,7 +422,7 @@ class ReplicationLayer(object): for p in pdus: if "age_ts" in p: age = time_now - p["age_ts"] - p.setdefault("meta", {})["age"] = int(age) + p.setdefault("unsigned", {})["age"] = int(age) del p["age_ts"] return Transaction( origin=self.server_name, @@ -620,8 +620,8 @@ class _TransactionQueue(object): if "pdus" in data: for p in data["pdus"]: if "age_ts" in p: - meta = p.setdefault("meta", {}) - meta["age"] = now - int(p["age_ts"]) + unsigned = p.setdefault("unsigned", {}) + unsigned["age"] = now - int(p["age_ts"]) del p["age_ts"] return data diff --git a/synapse/federation/units.py b/synapse/federation/units.py index c4a10a4123..c629e5793e 100644 --- a/synapse/federation/units.py +++ b/synapse/federation/units.py @@ -72,7 +72,6 @@ class Pdu(JsonEncodedObject): "prev_state_origin", "required_power_level", "user_id", - "meta" ] internal_keys = [ -- cgit 1.5.1 From 8afbece68319728e20c3b32c2f949fd1745d405e Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Fri, 17 Oct 2014 19:41:32 +0100 Subject: Remove signatures from pdu when computing hashes to use for prev pdus, make sure is_state is a boolean. --- synapse/crypto/event_signing.py | 6 +++++- synapse/federation/units.py | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) (limited to 'synapse/crypto') diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index a236f7d708..d3b501c6e7 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -22,6 +22,9 @@ from syutil.base64util import encode_base64, decode_base64 from syutil.crypto.jsonsign import sign_json, verify_signed_json import hashlib +import logging + +logger = logging.getLogger(__name__) def add_event_pdu_content_hash(pdu, hash_algorithm=hashlib.sha256): @@ -48,7 +51,7 @@ def check_event_pdu_content_hash(pdu, hash_algorithm=hashlib.sha256): def _compute_content_hash(pdu, hash_algorithm): pdu_json = pdu.get_dict() #TODO: Make "age_ts" key internal - pdu_json.pop("age_ts") + pdu_json.pop("age_ts", None) pdu_json.pop("unsigned", None) pdu_json.pop("signatures", None) hashes = pdu_json.pop("hashes", {}) @@ -60,6 +63,7 @@ def compute_pdu_event_reference_hash(pdu, hash_algorithm=hashlib.sha256): tmp_pdu = Pdu(**pdu.get_dict()) tmp_pdu = prune_pdu(tmp_pdu) pdu_json = tmp_pdu.get_dict() + pdu_json.pop("signatures", None) pdu_json_bytes = encode_canonical_json(pdu_json) hashed = hash_algorithm(pdu_json_bytes) return (hashed.name, hashed.digest()) diff --git a/synapse/federation/units.py b/synapse/federation/units.py index b81e162512..b779d259bd 100644 --- a/synapse/federation/units.py +++ b/synapse/federation/units.py @@ -101,7 +101,7 @@ class Pdu(JsonEncodedObject): super(Pdu, self).__init__( destinations=destinations, - is_state=is_state, + is_state=bool(is_state), prev_pdus=prev_pdus, outlier=outlier, hashes=hashes, -- cgit 1.5.1 From 5e2236f9ffe3a66bbe0ff37b1793e8fa59a1c475 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Mon, 27 Oct 2014 11:19:15 +0000 Subject: fix pyflakes warnings --- synapse/crypto/event_signing.py | 8 ++++---- synapse/federation/units.py | 2 ++ synapse/storage/signatures.py | 2 -- 3 files changed, 6 insertions(+), 6 deletions(-) (limited to 'synapse/crypto') diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index d3b501c6e7..61edd2c6f9 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -35,12 +35,12 @@ def add_event_pdu_content_hash(pdu, hash_algorithm=hashlib.sha256): def check_event_pdu_content_hash(pdu, hash_algorithm=hashlib.sha256): """Check whether the hash for this PDU matches the contents""" - computed_hash = _compute_content_hash(pdu, hash_algortithm) + computed_hash = _compute_content_hash(pdu, hash_algorithm) if computed_hash.name not in pdu.hashes: raise Exception("Algorithm %s not in hashes %s" % ( computed_hash.name, list(pdu.hashes) )) - message_hash_base64 = hashes[computed_hash.name] + message_hash_base64 = pdu.hashes[computed_hash.name] try: message_hash_bytes = decode_base64(message_hash_base64) except: @@ -54,7 +54,7 @@ def _compute_content_hash(pdu, hash_algorithm): pdu_json.pop("age_ts", None) pdu_json.pop("unsigned", None) pdu_json.pop("signatures", None) - hashes = pdu_json.pop("hashes", {}) + pdu_json.pop("hashes", None) pdu_json_bytes = encode_canonical_json(pdu_json) return hash_algorithm(pdu_json_bytes) @@ -73,7 +73,7 @@ def sign_event_pdu(pdu, signature_name, signing_key): tmp_pdu = Pdu(**pdu.get_dict()) tmp_pdu = prune_pdu(tmp_pdu) pdu_json = tmp_pdu.get_dict() - pdu_jdon = sign_json(pdu_json, signature_name, signing_key) + pdu_json = sign_json(pdu_json, signature_name, signing_key) pdu.signatures = pdu_json["signatures"] return pdu diff --git a/synapse/federation/units.py b/synapse/federation/units.py index b779d259bd..adc3385644 100644 --- a/synapse/federation/units.py +++ b/synapse/federation/units.py @@ -155,6 +155,8 @@ class Pdu(JsonEncodedObject): return Pdu( prev_pdus=prev_pdus, + hashes=hashes, + signatures=signatures, **args ) else: diff --git a/synapse/storage/signatures.py b/synapse/storage/signatures.py index 85eec7ffbe..82be946d3f 100644 --- a/synapse/storage/signatures.py +++ b/synapse/storage/signatures.py @@ -15,8 +15,6 @@ from _base import SQLBaseStore -from twisted.internet import defer - class SignatureStore(SQLBaseStore): """Persistence for PDU signatures and hashes""" -- cgit 1.5.1 From d30d79b5bed98c7e46852c54875c976d3ac3bc0c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 31 Oct 2014 15:35:39 +0000 Subject: Make prev_event signing work again. --- synapse/crypto/event_signing.py | 13 ++++++++++++- synapse/storage/__init__.py | 11 +++++------ synapse/storage/event_federation.py | 2 +- 3 files changed, 18 insertions(+), 8 deletions(-) (limited to 'synapse/crypto') diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index 61edd2c6f9..07e383e221 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -16,11 +16,12 @@ from synapse.federation.units import Pdu -from synapse.api.events.utils import prune_pdu +from synapse.api.events.utils import prune_pdu, prune_event from syutil.jsonutil import encode_canonical_json from syutil.base64util import encode_base64, decode_base64 from syutil.crypto.jsonsign import sign_json, verify_signed_json +import copy import hashlib import logging @@ -69,6 +70,16 @@ def compute_pdu_event_reference_hash(pdu, hash_algorithm=hashlib.sha256): return (hashed.name, hashed.digest()) +def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256): + tmp_event = copy.deepcopy(event) + tmp_event = prune_event(tmp_event) + event_json = tmp_event.get_dict() + event_json.pop("signatures", None) + event_json_bytes = encode_canonical_json(event_json) + hashed = hash_algorithm(event_json_bytes) + return (hashed.name, hashed.digest()) + + def sign_event_pdu(pdu, signature_name, signing_key): tmp_pdu = Pdu(**pdu.get_dict()) tmp_pdu = prune_pdu(tmp_pdu) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index c2560f6045..31a0022d54 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -46,7 +46,7 @@ from .signatures import SignatureStore from syutil.base64util import decode_base64 -from synapse.crypto.event_signing import compute_pdu_event_reference_hash +from synapse.crypto.event_signing import compute_event_reference_hash import json @@ -271,11 +271,10 @@ class DataStore(RoomMemberStore, RoomStore, txn, event.event_id, prev_event_id, alg, hash_bytes ) - # TODO - # (ref_alg, ref_hash_bytes) = compute_pdu_event_reference_hash(pdu) - # self._store_event_reference_hash_txn( - # txn, event.event_id, ref_alg, ref_hash_bytes - # ) + (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event) + self._store_event_reference_hash_txn( + txn, event.event_id, ref_alg, ref_hash_bytes + ) self._update_min_depth_for_room_txn(txn, event.room_id, event.depth) diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py index 8357071db6..dcc116bad2 100644 --- a/synapse/storage/event_federation.py +++ b/synapse/storage/event_federation.py @@ -69,7 +69,7 @@ class EventFederationStore(SQLBaseStore): results = [] for event_id, depth in txn.fetchall(): - hashes = self._get_prev_event_hashes_txn(txn, event_id) + hashes = self._get_event_reference_hashes_txn(txn, event_id) prev_hashes = { k: encode_base64(v) for k, v in hashes.items() if k == "sha256" -- cgit 1.5.1 From ecabff7eb49ea799d9f52fad1e05f1f9a4b31e1c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 31 Oct 2014 17:08:52 +0000 Subject: Sign evnets --- synapse/crypto/event_signing.py | 20 ++++++++++++++++++++ synapse/storage/__init__.py | 6 ++++++ 2 files changed, 26 insertions(+) (limited to 'synapse/crypto') diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index 07e383e221..cb2db01c04 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -94,3 +94,23 @@ def verify_signed_event_pdu(pdu, signature_name, verify_key): tmp_pdu = prune_pdu(tmp_pdu) pdu_json = tmp_pdu.get_dict() verify_signed_json(pdu_json, signature_name, verify_key) + + +def add_hashes_and_signatures(event, signature_name, signing_key, + hash_algorithm=hashlib.sha256): + tmp_event = copy.deepcopy(event) + tmp_event = prune_event(tmp_event) + redact_json = tmp_event.get_dict() + redact_json.pop("signatures", None) + redact_json = sign_json(redact_json, signature_name, signing_key) + event.signatures = redact_json["signatures"] + + event_json = event.get_full_dict() + #TODO: We need to sign the JSON that is going out via fedaration. + event_json.pop("age_ts", None) + event_json.pop("unsigned", None) + event_json.pop("signatures", None) + event_json.pop("hashes", None) + event_json_bytes = encode_canonical_json(event_json) + hashed = hash_algorithm(event_json_bytes) + event.hashes[hashed.name] = encode_base64(hashed.digest()) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 31a0022d54..1f39a4094e 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -255,6 +255,12 @@ class DataStore(RoomMemberStore, RoomStore, } ) + for hash_alg, hash_base64 in event.hashes.items(): + hash_bytes = decode_base64(hash_base64) + self._store_event_content_hash_txn( + txn, event.event_id, hash_alg, hash_bytes, + ) + if hasattr(event, "signatures"): signatures = event.signatures.get(event.origin, {}) -- cgit 1.5.1 From 82a6b83524ed2f7cb12bd2fc43a2651558c392dd Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 3 Nov 2014 11:32:12 +0000 Subject: Don't assume event has hashes key already --- synapse/crypto/event_signing.py | 2 ++ 1 file changed, 2 insertions(+) (limited to 'synapse/crypto') diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index cb2db01c04..0e8bc7eb6c 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -113,4 +113,6 @@ def add_hashes_and_signatures(event, signature_name, signing_key, event_json.pop("hashes", None) event_json_bytes = encode_canonical_json(event_json) hashed = hash_algorithm(event_json_bytes) + if not hasattr(event, "hashes"): + event.hashes = {} event.hashes[hashed.name] = encode_base64(hashed.digest()) -- cgit 1.5.1 From 68698e0ac8c39083f6ab7d377a48b5bead3d3598 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 3 Nov 2014 17:51:42 +0000 Subject: Fix bugs in generating event signatures and hashing --- scripts/check_event_hash.py | 12 +++-- scripts/check_signature.py | 1 - synapse/api/events/__init__.py | 1 + synapse/crypto/event_signing.py | 100 +++++++++++++++------------------------- synapse/federation/pdu_codec.py | 13 +----- synapse/federation/units.py | 11 +---- 6 files changed, 50 insertions(+), 88 deletions(-) (limited to 'synapse/crypto') diff --git a/scripts/check_event_hash.py b/scripts/check_event_hash.py index 9fa4452ee6..7c32f8102a 100644 --- a/scripts/check_event_hash.py +++ b/scripts/check_event_hash.py @@ -6,6 +6,7 @@ import hashlib import sys import json + class dictobj(dict): def __init__(self, *args, **kargs): dict.__init__(self, *args, **kargs) @@ -14,9 +15,12 @@ class dictobj(dict): def get_dict(self): return dict(self) + def get_full_dict(self): + return dict(self) + def main(): - parser = parser = argparse.ArgumentParser() + parser = argparse.ArgumentParser() parser.add_argument("input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin) args = parser.parse_args() @@ -29,14 +33,14 @@ def main(): } for alg_name in event_json.hashes: - if check_event_pdu_content_hash(event_json, algorithms[alg_name]): + if check_event_content_hash(event_json, algorithms[alg_name]): print "PASS content hash %s" % (alg_name,) else: print "FAIL content hash %s" % (alg_name,) for algorithm in algorithms.values(): - name, h_bytes = compute_pdu_event_reference_hash(event_json, algorithm) - print "Reference hash %s: %s" % (name, encode_base64(bytes)) + name, h_bytes = compute_event_reference_hash(event_json, algorithm) + print "Reference hash %s: %s" % (name, encode_base64(h_bytes)) if __name__=="__main__": main() diff --git a/scripts/check_signature.py b/scripts/check_signature.py index e7964e7e71..e146e18e24 100644 --- a/scripts/check_signature.py +++ b/scripts/check_signature.py @@ -1,5 +1,4 @@ -from synapse.crypto.event_signing import verify_signed_event_pdu from syutil.crypto.jsonsign import verify_signed_json from syutil.crypto.signing_key import ( decode_verify_key_bytes, write_signing_keys diff --git a/synapse/api/events/__init__.py b/synapse/api/events/__init__.py index b855811b98..168b812311 100644 --- a/synapse/api/events/__init__.py +++ b/synapse/api/events/__init__.py @@ -61,6 +61,7 @@ class SynapseEvent(JsonEncodedObject): "prev_content", "prev_state", "redacted_because", + "origin_server_ts", ] internal_keys = [ diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index 0e8bc7eb6c..de5d2e7465 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -15,11 +15,11 @@ # limitations under the License. -from synapse.federation.units import Pdu -from synapse.api.events.utils import prune_pdu, prune_event +from synapse.api.events.utils import prune_event from syutil.jsonutil import encode_canonical_json from syutil.base64util import encode_base64, decode_base64 -from syutil.crypto.jsonsign import sign_json, verify_signed_json +from syutil.crypto.jsonsign import sign_json +from synapse.api.events.room import GenericEvent import copy import hashlib @@ -28,20 +28,14 @@ import logging logger = logging.getLogger(__name__) -def add_event_pdu_content_hash(pdu, hash_algorithm=hashlib.sha256): - hashed = _compute_content_hash(pdu, hash_algorithm) - pdu.hashes[hashed.name] = encode_base64(hashed.digest()) - return pdu - - -def check_event_pdu_content_hash(pdu, hash_algorithm=hashlib.sha256): +def check_event_content_hash(event, hash_algorithm=hashlib.sha256): """Check whether the hash for this PDU matches the contents""" - computed_hash = _compute_content_hash(pdu, hash_algorithm) - if computed_hash.name not in pdu.hashes: + computed_hash = _compute_content_hash(event, hash_algorithm) + if computed_hash.name not in event.hashes: raise Exception("Algorithm %s not in hashes %s" % ( - computed_hash.name, list(pdu.hashes) + computed_hash.name, list(event.hashes) )) - message_hash_base64 = pdu.hashes[computed_hash.name] + message_hash_base64 = event.hashes[computed_hash.name] try: message_hash_bytes = decode_base64(message_hash_base64) except: @@ -49,70 +43,52 @@ def check_event_pdu_content_hash(pdu, hash_algorithm=hashlib.sha256): return message_hash_bytes == computed_hash.digest() -def _compute_content_hash(pdu, hash_algorithm): - pdu_json = pdu.get_dict() - #TODO: Make "age_ts" key internal - pdu_json.pop("age_ts", None) - pdu_json.pop("unsigned", None) - pdu_json.pop("signatures", None) - pdu_json.pop("hashes", None) - pdu_json_bytes = encode_canonical_json(pdu_json) - return hash_algorithm(pdu_json_bytes) - - -def compute_pdu_event_reference_hash(pdu, hash_algorithm=hashlib.sha256): - tmp_pdu = Pdu(**pdu.get_dict()) - tmp_pdu = prune_pdu(tmp_pdu) - pdu_json = tmp_pdu.get_dict() - pdu_json.pop("signatures", None) - pdu_json_bytes = encode_canonical_json(pdu_json) - hashed = hash_algorithm(pdu_json_bytes) - return (hashed.name, hashed.digest()) +def _compute_content_hash(event, hash_algorithm): + event_json = event.get_full_dict() + #TODO: We need to sign the JSON that is going out via fedaration. + event_json.pop("age_ts", None) + event_json.pop("unsigned", None) + event_json.pop("signatures", None) + event_json.pop("hashes", None) + event_json_bytes = encode_canonical_json(event_json) + return hash_algorithm(event_json_bytes) def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256): - tmp_event = copy.deepcopy(event) + # FIXME(erikj): GenericEvent! + tmp_event = GenericEvent(**event.get_full_dict()) tmp_event = prune_event(tmp_event) event_json = tmp_event.get_dict() event_json.pop("signatures", None) + event_json.pop("age_ts", None) + event_json.pop("unsigned", None) event_json_bytes = encode_canonical_json(event_json) hashed = hash_algorithm(event_json_bytes) return (hashed.name, hashed.digest()) -def sign_event_pdu(pdu, signature_name, signing_key): - tmp_pdu = Pdu(**pdu.get_dict()) - tmp_pdu = prune_pdu(tmp_pdu) - pdu_json = tmp_pdu.get_dict() - pdu_json = sign_json(pdu_json, signature_name, signing_key) - pdu.signatures = pdu_json["signatures"] - return pdu - - -def verify_signed_event_pdu(pdu, signature_name, verify_key): - tmp_pdu = Pdu(**pdu.get_dict()) - tmp_pdu = prune_pdu(tmp_pdu) - pdu_json = tmp_pdu.get_dict() - verify_signed_json(pdu_json, signature_name, verify_key) - - -def add_hashes_and_signatures(event, signature_name, signing_key, - hash_algorithm=hashlib.sha256): +def compute_event_signature(event, signature_name, signing_key): tmp_event = copy.deepcopy(event) tmp_event = prune_event(tmp_event) - redact_json = tmp_event.get_dict() + redact_json = tmp_event.get_full_dict() redact_json.pop("signatures", None) + redact_json.pop("age_ts", None) + redact_json.pop("unsigned", None) + logger.debug("Signing event: %s", redact_json) redact_json = sign_json(redact_json, signature_name, signing_key) - event.signatures = redact_json["signatures"] + return redact_json["signatures"] + + +def add_hashes_and_signatures(event, signature_name, signing_key, + hash_algorithm=hashlib.sha256): + hashed = _compute_content_hash(event, hash_algorithm=hash_algorithm) - event_json = event.get_full_dict() - #TODO: We need to sign the JSON that is going out via fedaration. - event_json.pop("age_ts", None) - event_json.pop("unsigned", None) - event_json.pop("signatures", None) - event_json.pop("hashes", None) - event_json_bytes = encode_canonical_json(event_json) - hashed = hash_algorithm(event_json_bytes) if not hasattr(event, "hashes"): event.hashes = {} event.hashes[hashed.name] = encode_base64(hashed.digest()) + + event.signatures = compute_event_signature( + event, + signature_name=signature_name, + signing_key=signing_key, + ) diff --git a/synapse/federation/pdu_codec.py b/synapse/federation/pdu_codec.py index 5ec97a698e..52c84efb5b 100644 --- a/synapse/federation/pdu_codec.py +++ b/synapse/federation/pdu_codec.py @@ -14,10 +14,6 @@ # limitations under the License. from .units import Pdu -from synapse.crypto.event_signing import ( - add_event_pdu_content_hash, sign_event_pdu -) -from synapse.types import EventID import copy @@ -49,17 +45,10 @@ class PduCodec(object): def pdu_from_event(self, event): d = event.get_full_dict() - if hasattr(event, "state_key"): - d["is_state"] = True - kwargs = copy.deepcopy(event.unrecognized_keys) kwargs.update({ k: v for k, v in d.items() }) - if "origin_server_ts" not in kwargs: - kwargs["origin_server_ts"] = int(self.clock.time_msec()) - pdu = Pdu(**kwargs) - pdu = add_event_pdu_content_hash(pdu) - return sign_event_pdu(pdu, self.server_name, self.signing_key) + return pdu diff --git a/synapse/federation/units.py b/synapse/federation/units.py index c94dcf64cf..c2d8dca8f3 100644 --- a/synapse/federation/units.py +++ b/synapse/federation/units.py @@ -65,8 +65,7 @@ class Pdu(JsonEncodedObject): "content", "outlier", "hashes", - "signatures", - "is_state", # Below this are keys valid only for State Pdus. + "signatures", # Below this are keys valid only for State Pdus. "state_key", "prev_state", "required_power_level", @@ -91,16 +90,10 @@ class Pdu(JsonEncodedObject): # TODO: We need to make this properly load content rather than # just leaving it as a dict. (OR DO WE?!) - def __init__(self, destinations=[], is_state=False, prev_events=[], + def __init__(self, destinations=[], prev_events=[], outlier=False, hashes={}, signatures={}, **kwargs): - if is_state: - for required_key in ["state_key"]: - if required_key not in kwargs: - raise RuntimeError("Key %s is required" % required_key) - super(Pdu, self).__init__( destinations=destinations, - is_state=bool(is_state), prev_events=prev_events, outlier=outlier, hashes=hashes, -- cgit 1.5.1 From 97a096b507b92d70a2e07d49122b4f5d93b7dac4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 7 Nov 2014 11:36:40 +0000 Subject: Add hash of current state to events --- synapse/api/events/__init__.py | 1 + synapse/crypto/event_signing.py | 11 ++++++++++- 2 files changed, 11 insertions(+), 1 deletion(-) (limited to 'synapse/crypto') diff --git a/synapse/api/events/__init__.py b/synapse/api/events/__init__.py index e5980c4be3..8d65c29ac1 100644 --- a/synapse/api/events/__init__.py +++ b/synapse/api/events/__init__.py @@ -75,6 +75,7 @@ class SynapseEvent(JsonEncodedObject): "signatures", "prev_state", "auth_events", + "state_hash", ] required_keys = [ diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index de5d2e7465..7d800615fe 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -45,7 +45,7 @@ def check_event_content_hash(event, hash_algorithm=hashlib.sha256): def _compute_content_hash(event, hash_algorithm): event_json = event.get_full_dict() - #TODO: We need to sign the JSON that is going out via fedaration. + # TODO: We need to sign the JSON that is going out via fedaration. event_json.pop("age_ts", None) event_json.pop("unsigned", None) event_json.pop("signatures", None) @@ -81,6 +81,15 @@ def compute_event_signature(event, signature_name, signing_key): def add_hashes_and_signatures(event, signature_name, signing_key, hash_algorithm=hashlib.sha256): + if hasattr(event, "old_state_events"): + state_json_bytes = encode_canonical_json( + [e.event_id for e in event.old_state_events.values()] + ) + hashed = hash_algorithm(state_json_bytes) + event.state_hash = { + hashed.name: encode_base64(hashed.digest()) + } + hashed = _compute_content_hash(event, hash_algorithm=hash_algorithm) if not hasattr(event, "hashes"): -- cgit 1.5.1 From 1c06806f90a6368cdc3b9fa3b9053021b7c40e94 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 10 Nov 2014 10:21:32 +0000 Subject: Finish redaction algorithm. --- synapse/api/events/__init__.py | 4 ++-- synapse/api/events/utils.py | 39 ++++++++++++++++++++++++++------------- synapse/crypto/event_signing.py | 7 ++----- synapse/federation/units.py | 6 ++---- synapse/storage/_base.py | 2 +- 5 files changed, 33 insertions(+), 25 deletions(-) (limited to 'synapse/crypto') diff --git a/synapse/api/events/__init__.py b/synapse/api/events/__init__.py index 8d65c29ac1..f1e53f23ab 100644 --- a/synapse/api/events/__init__.py +++ b/synapse/api/events/__init__.py @@ -86,8 +86,8 @@ class SynapseEvent(JsonEncodedObject): def __init__(self, raises=True, **kwargs): super(SynapseEvent, self).__init__(**kwargs) - if "content" in kwargs: - self.check_json(self.content, raises=raises) + # if "content" in kwargs: + # self.check_json(self.content, raises=raises) def get_content_template(self): """ Retrieve the JSON template for this event as a dict. diff --git a/synapse/api/events/utils.py b/synapse/api/events/utils.py index 5fc79105b5..802648f8f7 100644 --- a/synapse/api/events/utils.py +++ b/synapse/api/events/utils.py @@ -18,24 +18,31 @@ from .room import ( RoomAliasesEvent, RoomCreateEvent, ) + def prune_event(event): - """ Prunes the given event of all keys we don't know about or think could - potentially be dodgy. + """ Returns a pruned version of the given event, which removes all keys we + don't know about or think could potentially be dodgy. This is used when we "redact" an event. We want to remove all fields that the user has specified, but we do want to keep necessary information like type, state_key etc. """ - return _prune_event_or_pdu(event.type, event) - -def prune_pdu(pdu): - """Removes keys that contain unrestricted and non-essential data from a PDU - """ - return _prune_event_or_pdu(pdu.type, pdu) + event_type = event.type -def _prune_event_or_pdu(event_type, event): - # Remove all extraneous fields. - event.unrecognized_keys = {} + allowed_keys = [ + "event_id", + "user_id", + "room_id", + "hashes", + "signatures", + "content", + "type", + "state_key", + "depth", + "prev_events", + "prev_state", + "auth_events", + ] new_content = {} @@ -65,6 +72,12 @@ def _prune_event_or_pdu(event_type, event): elif event_type == RoomAliasesEvent.TYPE: add_fields("aliases") - event.content = new_content + allowed_fields = { + k: v + for k, v in event.get_full_dict().items() + if k in allowed_keys + } + + allowed_fields["content"] = new_content - return event + return type(event)(**allowed_fields) diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index 7d800615fe..056e8f6ca4 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -55,9 +55,7 @@ def _compute_content_hash(event, hash_algorithm): def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256): - # FIXME(erikj): GenericEvent! - tmp_event = GenericEvent(**event.get_full_dict()) - tmp_event = prune_event(tmp_event) + tmp_event = prune_event(event) event_json = tmp_event.get_dict() event_json.pop("signatures", None) event_json.pop("age_ts", None) @@ -68,8 +66,7 @@ def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256): def compute_event_signature(event, signature_name, signing_key): - tmp_event = copy.deepcopy(event) - tmp_event = prune_event(tmp_event) + tmp_event = prune_event(event) redact_json = tmp_event.get_full_dict() redact_json.pop("signatures", None) redact_json.pop("age_ts", None) diff --git a/synapse/federation/units.py b/synapse/federation/units.py index 2070ffe1e2..d98014cac7 100644 --- a/synapse/federation/units.py +++ b/synapse/federation/units.py @@ -56,17 +56,15 @@ class Pdu(JsonEncodedObject): "origin_server_ts", "type", "destinations", - "transaction_id", "prev_events", "depth", "content", - "outlier", "hashes", + "user_id", + "auth_events", "signatures", # Below this are keys valid only for State Pdus. "state_key", "prev_state", - "required_power_level", - "user_id", ] internal_keys = [ diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 9aa404695d..3ab81a78d5 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -509,7 +509,7 @@ class SQLBaseStore(object): ) if del_evs: - prune_event(ev) + ev = prune_event(ev) ev.redacted_because = del_evs[0] return events -- cgit 1.5.1 From 6cb6cb9e6908ad9b71ebd63ca535eb6c7c48be86 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 10 Nov 2014 10:31:00 +0000 Subject: Tidy up some of the unused sql tables --- synapse/crypto/event_signing.py | 2 -- synapse/storage/__init__.py | 21 ++----------- synapse/storage/room.py | 27 ---------------- synapse/storage/schema/im.sql | 68 ++++------------------------------------- 4 files changed, 9 insertions(+), 109 deletions(-) (limited to 'synapse/crypto') diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index 056e8f6ca4..baa93b0ee4 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -19,9 +19,7 @@ from synapse.api.events.utils import prune_event from syutil.jsonutil import encode_canonical_json from syutil.base64util import encode_base64, decode_base64 from syutil.crypto.jsonsign import sign_json -from synapse.api.events.room import GenericEvent -import copy import hashlib import logging diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 96adf20c89..7d810e6a62 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -16,9 +16,7 @@ from twisted.internet import defer from synapse.api.events.room import ( - RoomMemberEvent, RoomTopicEvent, FeedbackEvent, - RoomNameEvent, - RoomJoinRulesEvent, + RoomMemberEvent, RoomTopicEvent, FeedbackEvent, RoomNameEvent, RoomRedactionEvent, ) @@ -95,8 +93,7 @@ class DataStore(RoomMemberStore, RoomStore, @defer.inlineCallbacks @log_function - def persist_event(self, event=None, backfilled=False, pdu=None, - is_new_state=True): + def persist_event(self, event, backfilled=False, is_new_state=True): stream_ordering = None if backfilled: if not self.min_token_deferred.called: @@ -107,8 +104,7 @@ class DataStore(RoomMemberStore, RoomStore, try: yield self.runInteraction( "persist_event", - self._persist_pdu_event_txn, - pdu=pdu, + self._persist_event_txn, event=event, backfilled=backfilled, stream_ordering=stream_ordering, @@ -139,15 +135,6 @@ class DataStore(RoomMemberStore, RoomStore, event = self._parse_event_from_row(events_dict) defer.returnValue(event) - def _persist_pdu_event_txn(self, txn, pdu=None, event=None, - backfilled=False, stream_ordering=None, - is_new_state=True): - if event is not None: - return self._persist_event_txn( - txn, event, backfilled, stream_ordering, - is_new_state=is_new_state, - ) - @log_function def _persist_event_txn(self, txn, event, backfilled, stream_ordering=None, is_new_state=True): @@ -159,8 +146,6 @@ class DataStore(RoomMemberStore, RoomStore, self._store_room_name_txn(txn, event) elif event.type == RoomTopicEvent.TYPE: self._store_room_topic_txn(txn, event) - elif event.type == RoomJoinRulesEvent.TYPE: - self._store_join_rule(txn, event) elif event.type == RoomRedactionEvent.TYPE: self._store_redaction(txn, event) diff --git a/synapse/storage/room.py b/synapse/storage/room.py index 0c83c11ad3..ca70506d28 100644 --- a/synapse/storage/room.py +++ b/synapse/storage/room.py @@ -132,22 +132,6 @@ class RoomStore(SQLBaseStore): defer.returnValue(ret) - @defer.inlineCallbacks - def get_room_join_rule(self, room_id): - sql = ( - "SELECT join_rule FROM room_join_rules as r " - "INNER JOIN current_state_events as c " - "ON r.event_id = c.event_id " - "WHERE c.room_id = ? " - ) - - rows = yield self._execute(None, sql, room_id) - - if len(rows) == 1: - defer.returnValue(rows[0][0]) - else: - defer.returnValue(None) - def _store_room_topic_txn(self, txn, event): self._simple_insert_txn( txn, @@ -170,17 +154,6 @@ class RoomStore(SQLBaseStore): } ) - def _store_join_rule(self, txn, event): - self._simple_insert_txn( - txn, - "room_join_rules", - { - "event_id": event.event_id, - "room_id": event.room_id, - "join_rule": event.content["join_rule"], - }, - ) - class RoomsTable(Table): table_name = "rooms" diff --git a/synapse/storage/schema/im.sql b/synapse/storage/schema/im.sql index 8d6f655993..8ba732a23b 100644 --- a/synapse/storage/schema/im.sql +++ b/synapse/storage/schema/im.sql @@ -85,80 +85,24 @@ CREATE TABLE IF NOT EXISTS topics( topic TEXT NOT NULL ); +CREATE INDEX IF NOT EXISTS topics_event_id ON topics(event_id); +CREATE INDEX IF NOT EXISTS topics_room_id ON topics(room_id); + CREATE TABLE IF NOT EXISTS room_names( event_id TEXT NOT NULL, room_id TEXT NOT NULL, name TEXT NOT NULL ); +CREATE INDEX IF NOT EXISTS room_names_event_id ON room_names(event_id); +CREATE INDEX IF NOT EXISTS room_names_room_id ON room_names(room_id); + CREATE TABLE IF NOT EXISTS rooms( room_id TEXT PRIMARY KEY NOT NULL, is_public INTEGER, creator TEXT ); -CREATE TABLE IF NOT EXISTS room_join_rules( - event_id TEXT NOT NULL, - room_id TEXT NOT NULL, - join_rule TEXT NOT NULL -); -CREATE INDEX IF NOT EXISTS room_join_rules_event_id ON room_join_rules(event_id); -CREATE INDEX IF NOT EXISTS room_join_rules_room_id ON room_join_rules(room_id); - - -CREATE TABLE IF NOT EXISTS room_power_levels( - event_id TEXT NOT NULL, - room_id TEXT NOT NULL, - user_id TEXT NOT NULL, - level INTEGER NOT NULL -); -CREATE INDEX IF NOT EXISTS room_power_levels_event_id ON room_power_levels(event_id); -CREATE INDEX IF NOT EXISTS room_power_levels_room_id ON room_power_levels(room_id); -CREATE INDEX IF NOT EXISTS room_power_levels_room_user ON room_power_levels(room_id, user_id); - - -CREATE TABLE IF NOT EXISTS room_default_levels( - event_id TEXT NOT NULL, - room_id TEXT NOT NULL, - level INTEGER NOT NULL -); - -CREATE INDEX IF NOT EXISTS room_default_levels_event_id ON room_default_levels(event_id); -CREATE INDEX IF NOT EXISTS room_default_levels_room_id ON room_default_levels(room_id); - - -CREATE TABLE IF NOT EXISTS room_add_state_levels( - event_id TEXT NOT NULL, - room_id TEXT NOT NULL, - level INTEGER NOT NULL -); - -CREATE INDEX IF NOT EXISTS room_add_state_levels_event_id ON room_add_state_levels(event_id); -CREATE INDEX IF NOT EXISTS room_add_state_levels_room_id ON room_add_state_levels(room_id); - - -CREATE TABLE IF NOT EXISTS room_send_event_levels( - event_id TEXT NOT NULL, - room_id TEXT NOT NULL, - level INTEGER NOT NULL -); - -CREATE INDEX IF NOT EXISTS room_send_event_levels_event_id ON room_send_event_levels(event_id); -CREATE INDEX IF NOT EXISTS room_send_event_levels_room_id ON room_send_event_levels(room_id); - - -CREATE TABLE IF NOT EXISTS room_ops_levels( - event_id TEXT NOT NULL, - room_id TEXT NOT NULL, - ban_level INTEGER, - kick_level INTEGER, - redact_level INTEGER -); - -CREATE INDEX IF NOT EXISTS room_ops_levels_event_id ON room_ops_levels(event_id); -CREATE INDEX IF NOT EXISTS room_ops_levels_room_id ON room_ops_levels(room_id); - - CREATE TABLE IF NOT EXISTS room_hosts( room_id TEXT NOT NULL, host TEXT NOT NULL, -- cgit 1.5.1