From 5fefc12d1e2da56895d5652e3d7516ac59ab8824 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 14 Oct 2014 16:59:51 +0100 Subject: Begin implementing state groups. --- synapse/storage/__init__.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) (limited to 'synapse/storage/__init__.py') diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 6dadeb8cce..10456688ef 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -40,6 +40,7 @@ from .stream import StreamStore from .pdu import StatePduStore, PduStore, PdusTable from .transactions import TransactionStore from .keys import KeyStore +from .state import StateStore import json import logging @@ -59,6 +60,7 @@ SCHEMAS = [ "room_aliases", "keys", "redactions", + "state", ] @@ -76,7 +78,7 @@ class _RollbackButIsFineException(Exception): class DataStore(RoomMemberStore, RoomStore, RegistrationStore, StreamStore, ProfileStore, FeedbackStore, PresenceStore, PduStore, StatePduStore, TransactionStore, - DirectoryStore, KeyStore): + DirectoryStore, KeyStore, StateStore): def __init__(self, hs): super(DataStore, self).__init__(hs) @@ -222,6 +224,8 @@ class DataStore(RoomMemberStore, RoomStore, ) raise _RollbackButIsFineException("_persist_event") + self._store_state_groups_txn(txn, event) + is_state = hasattr(event, "state_key") and event.state_key is not None if is_new_state and is_state: vals = { -- cgit 1.5.1 From 1c445f88f64beabf0bd9bec3950a4a4c0d529e8a Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Wed, 15 Oct 2014 17:09:04 +0100 Subject: persist hashes and origin signatures for PDUs --- synapse/api/events/utils.py | 23 ++++++++---- synapse/crypto/event_signing.py | 70 +++++++++++++++++++++++++++++++++++ synapse/federation/units.py | 17 ++++++++- synapse/storage/__init__.py | 21 ++++++++++- synapse/storage/pdu.py | 11 +++++- synapse/storage/schema/signatures.sql | 4 +- tests/federation/test_federation.py | 4 +- 7 files changed, 135 insertions(+), 15 deletions(-) create mode 100644 synapse/crypto/event_signing.py (limited to 'synapse/storage/__init__.py') diff --git a/synapse/api/events/utils.py b/synapse/api/events/utils.py index c3a32be8c1..7fdf45a264 100644 --- a/synapse/api/events/utils.py +++ b/synapse/api/events/utils.py @@ -27,7 +27,14 @@ def prune_event(event): the user has specified, but we do want to keep necessary information like type, state_key etc. """ + return _prune_event_or_pdu(event.type, event) +def prune_pdu(pdu): + """Removes keys that contain unrestricted and non-essential data from a PDU + """ + return _prune_event_or_pdu(pdu.pdu_type, pdu) + +def _prune_event_or_pdu(event_type, event): # Remove all extraneous fields. event.unrecognized_keys = {} @@ -38,25 +45,25 @@ def prune_event(event): if field in event.content: new_content[field] = event.content[field] - if event.type == RoomMemberEvent.TYPE: + if event_type == RoomMemberEvent.TYPE: add_fields("membership") - elif event.type == RoomCreateEvent.TYPE: + elif event_type == RoomCreateEvent.TYPE: add_fields("creator") - elif event.type == RoomJoinRulesEvent.TYPE: + elif event_type == RoomJoinRulesEvent.TYPE: add_fields("join_rule") - elif event.type == RoomPowerLevelsEvent.TYPE: + elif event_type == RoomPowerLevelsEvent.TYPE: # TODO: Actually check these are valid user_ids etc. add_fields("default") for k, v in event.content.items(): if k.startswith("@") and isinstance(v, (int, long)): new_content[k] = v - elif event.type == RoomAddStateLevelEvent.TYPE: + elif event_type == RoomAddStateLevelEvent.TYPE: add_fields("level") - elif event.type == RoomSendEventLevelEvent.TYPE: + elif event_type == RoomSendEventLevelEvent.TYPE: add_fields("level") - elif event.type == RoomOpsPowerLevelsEvent.TYPE: + elif event_type == RoomOpsPowerLevelsEvent.TYPE: add_fields("kick_level", "ban_level", "redact_level") - elif event.type == RoomAliasesEvent.TYPE: + elif event_type == RoomAliasesEvent.TYPE: add_fields("aliases") event.content = new_content diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py new file mode 100644 index 0000000000..6557727e06 --- /dev/null +++ b/synapse/crypto/event_signing.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- + +# Copyright 2014 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from synapse.api.events.utils import prune_pdu +from syutil.jsonutil import encode_canonical_json +from syutil.base64util import encode_base64, decode_base64 +from syutil.crypto.jsonsign import sign_json, verify_signed_json + +import hashlib + + +def hash_event_pdu(pdu, hash_algortithm=hashlib.sha256): + hashed = _compute_hash(pdu, hash_algortithm) + hashes[hashed.name] = encode_base64(hashed.digest()) + pdu.hashes = hashes + return pdu + + +def check_event_pdu_hash(pdu, hash_algorithm=hashlib.sha256): + """Check whether the hash for this PDU matches the contents""" + computed_hash = _compute_hash(pdu, hash_algortithm) + if computed_hash.name not in pdu.hashes: + raise Exception("Algorithm %s not in hashes %s" % ( + computed_hash.name, list(pdu.hashes) + )) + message_hash_base64 = hashes[computed_hash.name] + try: + message_hash_bytes = decode_base64(message_hash_base64) + except: + raise Exception("Invalid base64: %s" % (message_hash_base64,)) + return message_hash_bytes == computed_hash.digest() + + +def _compute_hash(pdu, hash_algorithm): + pdu_json = pdu.get_dict() + pdu_json.pop("meta", None) + pdu_json.pop("signatures", None) + hashes = pdu_json.pop("hashes", {}) + pdu_json_bytes = encode_canonical_json(pdu_json) + return hash_algorithm(pdu_json_bytes) + + +def sign_event_pdu(pdu, signature_name, signing_key): + tmp_pdu = Pdu(**pdu.get_dict()) + tmp_pdu = prune_pdu(tmp_pdu) + pdu_json = tmp_pdu.get_dict() + pdu_jdon = sign_json(pdu_json, signature_name, signing_key) + pdu.signatures = pdu_json["signatures"] + return pdu + + +def verify_signed_event_pdu(pdu, signature_name, verify_key): + tmp_pdu = Pdu(**pdu.get_dict()) + tmp_pdu = prune_pdu(tmp_pdu) + pdu_json = tmp_pdu.get_dict() + verify_signed_json(pdu_json, signature_name, verify_key) diff --git a/synapse/federation/units.py b/synapse/federation/units.py index d97aeb698e..3518efb215 100644 --- a/synapse/federation/units.py +++ b/synapse/federation/units.py @@ -18,6 +18,7 @@ server protocol. """ from synapse.util.jsonobject import JsonEncodedObject +from syutil.base64util import encode_base64 import logging import json @@ -63,6 +64,8 @@ class Pdu(JsonEncodedObject): "depth", "content", "outlier", + "hashes", + "signatures", "is_state", # Below this are keys valid only for State Pdus. "state_key", "power_level", @@ -91,7 +94,7 @@ class Pdu(JsonEncodedObject): # just leaving it as a dict. (OR DO WE?!) def __init__(self, destinations=[], is_state=False, prev_pdus=[], - outlier=False, **kwargs): + outlier=False, hashes={}, signatures={}, **kwargs): if is_state: for required_key in ["state_key"]: if required_key not in kwargs: @@ -102,6 +105,8 @@ class Pdu(JsonEncodedObject): is_state=is_state, prev_pdus=prev_pdus, outlier=outlier, + hashes=hashes, + signatures=signatures, **kwargs ) @@ -126,6 +131,16 @@ class Pdu(JsonEncodedObject): if "unrecognized_keys" in d and d["unrecognized_keys"]: args.update(json.loads(d["unrecognized_keys"])) + hashes = { + alg: encode_base64(hsh) + for alg, hsh in pdu_tuple.hashes.items() + } + + signatures = { + kid: encode_base64(sig) + for kid, sig in pdu_tuple.signatures.items() + } + return Pdu( prev_pdus=pdu_tuple.prev_pdu_list, **args diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 6dadeb8cce..bfeab7d1e8 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -40,6 +40,8 @@ from .stream import StreamStore from .pdu import StatePduStore, PduStore, PdusTable from .transactions import TransactionStore from .keys import KeyStore +from .signatures import SignatureStore + import json import logging @@ -59,6 +61,7 @@ SCHEMAS = [ "room_aliases", "keys", "redactions", + "signatures", ] @@ -76,7 +79,7 @@ class _RollbackButIsFineException(Exception): class DataStore(RoomMemberStore, RoomStore, RegistrationStore, StreamStore, ProfileStore, FeedbackStore, PresenceStore, PduStore, StatePduStore, TransactionStore, - DirectoryStore, KeyStore): + DirectoryStore, KeyStore, SignatureStore): def __init__(self, hs): super(DataStore, self).__init__(hs) @@ -144,6 +147,8 @@ class DataStore(RoomMemberStore, RoomStore, def _persist_event_pdu_txn(self, txn, pdu): cols = dict(pdu.__dict__) unrec_keys = dict(pdu.unrecognized_keys) + del cols["hashes"] + del cols["signatures"] del cols["content"] del cols["prev_pdus"] cols["content_json"] = json.dumps(pdu.content) @@ -157,6 +162,20 @@ class DataStore(RoomMemberStore, RoomStore, logger.debug("Persisting: %s", repr(cols)) + for hash_alg, hash_base64 in pdu.hashes.items(): + hash_bytes = decode_base64(hash_base64) + self._store_pdu_hash_txn( + txn, pdu.pdu_id, pdu.origin, hash_alg, hash_bytes, + ) + + signatures = pdu.sigatures.get(pdu.orgin, {}) + + for key_id, signature_base64 in signatures: + signature_bytes = decode_base64(signature_base64) + self.store_pdu_origin_signatures_txn( + txn, pdu.pdu_id, pdu.origin, key_id, signature_bytes, + ) + if pdu.is_state: self._persist_state_txn(txn, pdu.prev_pdus, cols) else: diff --git a/synapse/storage/pdu.py b/synapse/storage/pdu.py index d70467dcd6..9d624429b7 100644 --- a/synapse/storage/pdu.py +++ b/synapse/storage/pdu.py @@ -64,6 +64,11 @@ class PduStore(SQLBaseStore): for r in PduEdgesTable.decode_results(txn.fetchall()) ] + hashes = self._get_pdu_hashes_txn(txn, pdu_id, origin) + signatures = self._get_pdu_origin_signatures_txn( + txn, pdu_id, origin + ) + query = ( "SELECT %(fields)s FROM %(pdus)s as p " "LEFT JOIN %(state)s as s " @@ -80,7 +85,9 @@ class PduStore(SQLBaseStore): row = txn.fetchone() if row: - results.append(PduTuple(PduEntry(*row), edges)) + results.append(PduTuple( + PduEntry(*row), edges, hashes, signatures + )) return results @@ -908,7 +915,7 @@ This does not include a prev_pdus key. PduTuple = namedtuple( "PduTuple", - ("pdu_entry", "prev_pdu_list") + ("pdu_entry", "prev_pdu_list", "hashes", "signatures") ) """ This is a tuple of a `PduEntry` and a list of `PduIdTuple` that represent the `prev_pdus` key of a PDU. diff --git a/synapse/storage/schema/signatures.sql b/synapse/storage/schema/signatures.sql index ba3bbb5471..86ee0f2377 100644 --- a/synapse/storage/schema/signatures.sql +++ b/synapse/storage/schema/signatures.sql @@ -28,9 +28,9 @@ CREATE TABLE IF NOT EXISTS pdu_origin_signatures ( origin TEXT, key_id TEXT, signature BLOB, - CONSTRAINT uniqueness UNIQUE (pdu_id, origin, algorithm) + CONSTRAINT uniqueness UNIQUE (pdu_id, origin, key_id) ); CREATE INDEX IF NOT EXISTS pdu_origin_signatures_id ON pdu_origin_signatures ( - pdu_id, origin, + pdu_id, origin ); diff --git a/tests/federation/test_federation.py b/tests/federation/test_federation.py index d86ce83b28..03b2167cf7 100644 --- a/tests/federation/test_federation.py +++ b/tests/federation/test_federation.py @@ -41,7 +41,7 @@ def make_pdu(prev_pdus=[], **kwargs): } pdu_fields.update(kwargs) - return PduTuple(PduEntry(**pdu_fields), prev_pdus) + return PduTuple(PduEntry(**pdu_fields), prev_pdus, {}, {}) class FederationTestCase(unittest.TestCase): @@ -183,6 +183,8 @@ class FederationTestCase(unittest.TestCase): "is_state": False, "content": {"testing": "content here"}, "depth": 1, + "hashes": {}, + "signatures": {}, }, ] }, -- cgit 1.5.1 From 66104da10c4191aa1e048f2379190574755109e6 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Thu, 16 Oct 2014 00:09:48 +0100 Subject: Sign outgoing PDUs. --- synapse/crypto/event_signing.py | 4 ++-- synapse/federation/pdu_codec.py | 6 +++++- synapse/storage/__init__.py | 7 ++++--- synapse/storage/signatures.py | 6 +++--- tests/federation/test_pdu_codec.py | 13 ++++++++++--- tests/rest/test_events.py | 7 +++++-- tests/rest/test_profile.py | 8 ++++++-- tests/rest/test_rooms.py | 32 +++++++++++++++++++++++++------- tests/utils.py | 3 ++- 9 files changed, 62 insertions(+), 24 deletions(-) (limited to 'synapse/storage/__init__.py') diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index 6557727e06..a115967c0a 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -15,6 +15,7 @@ # limitations under the License. +from synapse.federation.units import Pdu from synapse.api.events.utils import prune_pdu from syutil.jsonutil import encode_canonical_json from syutil.base64util import encode_base64, decode_base64 @@ -25,8 +26,7 @@ import hashlib def hash_event_pdu(pdu, hash_algortithm=hashlib.sha256): hashed = _compute_hash(pdu, hash_algortithm) - hashes[hashed.name] = encode_base64(hashed.digest()) - pdu.hashes = hashes + pdu.hashes[hashed.name] = encode_base64(hashed.digest()) return pdu diff --git a/synapse/federation/pdu_codec.py b/synapse/federation/pdu_codec.py index cef61108dd..bcac5f9ae8 100644 --- a/synapse/federation/pdu_codec.py +++ b/synapse/federation/pdu_codec.py @@ -14,6 +14,7 @@ # limitations under the License. from .units import Pdu +from synapse.crypto.event_signing import hash_event_pdu, sign_event_pdu import copy @@ -33,6 +34,7 @@ def encode_event_id(pdu_id, origin): class PduCodec(object): def __init__(self, hs): + self.signing_key = hs.config.signing_key[0] self.server_name = hs.hostname self.event_factory = hs.get_event_factory() self.clock = hs.get_clock() @@ -99,4 +101,6 @@ class PduCodec(object): if "ts" not in kwargs: kwargs["ts"] = int(self.clock.time_msec()) - return Pdu(**kwargs) + pdu = Pdu(**kwargs) + pdu = hash_event_pdu(pdu) + return sign_event_pdu(pdu, self.server_name, self.signing_key) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index bfeab7d1e8..b2a3f0b56c 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -42,6 +42,7 @@ from .transactions import TransactionStore from .keys import KeyStore from .signatures import SignatureStore +from syutil.base64util import decode_base64 import json import logging @@ -168,11 +169,11 @@ class DataStore(RoomMemberStore, RoomStore, txn, pdu.pdu_id, pdu.origin, hash_alg, hash_bytes, ) - signatures = pdu.sigatures.get(pdu.orgin, {}) + signatures = pdu.signatures.get(pdu.origin, {}) - for key_id, signature_base64 in signatures: + for key_id, signature_base64 in signatures.items(): signature_bytes = decode_base64(signature_base64) - self.store_pdu_origin_signatures_txn( + self._store_pdu_origin_signature_txn( txn, pdu.pdu_id, pdu.origin, key_id, signature_bytes, ) diff --git a/synapse/storage/signatures.py b/synapse/storage/signatures.py index bb860f09f0..1f0a680500 100644 --- a/synapse/storage/signatures.py +++ b/synapse/storage/signatures.py @@ -47,7 +47,7 @@ class SignatureStore(SQLBaseStore): algorithm (str): Hashing algorithm. hash_bytes (bytes): Hash function output bytes. """ - self._simple_insert_txn(self, txn, "pdu_hashes", { + self._simple_insert_txn(txn, "pdu_hashes", { "pdu_id": pdu_id, "origin": origin, "algorithm": algorithm, @@ -66,7 +66,7 @@ class SignatureStore(SQLBaseStore): query = ( "SELECT key_id, signature" " FROM pdu_origin_signatures" - " WHERE WHERE pdu_id = ? and origin = ?" + " WHERE pdu_id = ? and origin = ?" ) txn.execute(query, (pdu_id, origin)) return dict(txn.fetchall()) @@ -81,7 +81,7 @@ class SignatureStore(SQLBaseStore): key_id (str): Id for the signing key. signature (bytes): The signature. """ - self._simple_insert_txn(self, txn, "pdu_origin_signatures", { + self._simple_insert_txn(txn, "pdu_origin_signatures", { "pdu_id": pdu_id, "origin": origin, "key_id": key_id, diff --git a/tests/federation/test_pdu_codec.py b/tests/federation/test_pdu_codec.py index 344e1baf60..80851a4258 100644 --- a/tests/federation/test_pdu_codec.py +++ b/tests/federation/test_pdu_codec.py @@ -23,14 +23,21 @@ from synapse.federation.units import Pdu from synapse.server import HomeServer -from mock import Mock +from mock import Mock, NonCallableMock + +from ..utils import MockKey class PduCodecTestCase(unittest.TestCase): def setUp(self): - self.hs = HomeServer("blargle.net") - self.event_factory = self.hs.get_event_factory() + self.mock_config = NonCallableMock() + self.mock_config.signing_key = [MockKey()] + self.hs = HomeServer( + "blargle.net", + config=self.mock_config, + ) + self.event_factory = self.hs.get_event_factory() self.codec = PduCodec(self.hs) def test_decode_event_id(self): diff --git a/tests/rest/test_events.py b/tests/rest/test_events.py index 79b371c04d..362c7bc01c 100644 --- a/tests/rest/test_events.py +++ b/tests/rest/test_events.py @@ -28,7 +28,7 @@ from synapse.server import HomeServer # python imports import json -from ..utils import MockHttpResource, MemoryDataStore +from ..utils import MockHttpResource, MemoryDataStore, MockKey from .utils import RestTestCase from mock import Mock, NonCallableMock @@ -122,6 +122,9 @@ class EventStreamPermissionsTestCase(RestTestCase): persistence_service = Mock(spec=["get_latest_pdus_in_context"]) persistence_service.get_latest_pdus_in_context.return_value = [] + self.mock_config = NonCallableMock() + self.mock_config.signing_key = [MockKey()] + hs = HomeServer( "test", db_pool=None, @@ -139,7 +142,7 @@ class EventStreamPermissionsTestCase(RestTestCase): ratelimiter=NonCallableMock(spec_set=[ "send_message", ]), - config=NonCallableMock(), + config=self.mock_config, ) self.ratelimiter = hs.get_ratelimiter() self.ratelimiter.send_message.return_value = (True, 0) diff --git a/tests/rest/test_profile.py b/tests/rest/test_profile.py index b0f48e7fd8..3a0d1e700a 100644 --- a/tests/rest/test_profile.py +++ b/tests/rest/test_profile.py @@ -18,9 +18,9 @@ from tests import unittest from twisted.internet import defer -from mock import Mock +from mock import Mock, NonCallableMock -from ..utils import MockHttpResource +from ..utils import MockHttpResource, MockKey from synapse.api.errors import SynapseError, AuthError from synapse.server import HomeServer @@ -41,6 +41,9 @@ class ProfileTestCase(unittest.TestCase): "set_avatar_url", ]) + self.mock_config = NonCallableMock() + self.mock_config.signing_key = [MockKey()] + hs = HomeServer("test", db_pool=None, http_client=None, @@ -48,6 +51,7 @@ class ProfileTestCase(unittest.TestCase): federation=Mock(), replication_layer=Mock(), datastore=None, + config=self.mock_config, ) def _get_user_by_req(request=None): diff --git a/tests/rest/test_rooms.py b/tests/rest/test_rooms.py index 1ce9b8a83d..7170193051 100644 --- a/tests/rest/test_rooms.py +++ b/tests/rest/test_rooms.py @@ -27,7 +27,7 @@ from synapse.server import HomeServer import json import urllib -from ..utils import MockHttpResource, MemoryDataStore +from ..utils import MockHttpResource, MemoryDataStore, MockKey from .utils import RestTestCase from mock import Mock, NonCallableMock @@ -50,6 +50,9 @@ class RoomPermissionsTestCase(RestTestCase): persistence_service = Mock(spec=["get_latest_pdus_in_context"]) persistence_service.get_latest_pdus_in_context.return_value = [] + self.mock_config = NonCallableMock() + self.mock_config.signing_key = [MockKey()] + hs = HomeServer( "red", db_pool=None, @@ -61,7 +64,7 @@ class RoomPermissionsTestCase(RestTestCase): ratelimiter=NonCallableMock(spec_set=[ "send_message", ]), - config=NonCallableMock(), + config=self.mock_config, ) self.ratelimiter = hs.get_ratelimiter() self.ratelimiter.send_message.return_value = (True, 0) @@ -408,6 +411,9 @@ class RoomsMemberListTestCase(RestTestCase): persistence_service = Mock(spec=["get_latest_pdus_in_context"]) persistence_service.get_latest_pdus_in_context.return_value = [] + self.mock_config = NonCallableMock() + self.mock_config.signing_key = [MockKey()] + hs = HomeServer( "red", db_pool=None, @@ -419,7 +425,7 @@ class RoomsMemberListTestCase(RestTestCase): ratelimiter=NonCallableMock(spec_set=[ "send_message", ]), - config=NonCallableMock(), + config=self.mock_config, ) self.ratelimiter = hs.get_ratelimiter() self.ratelimiter.send_message.return_value = (True, 0) @@ -497,6 +503,9 @@ class RoomsCreateTestCase(RestTestCase): persistence_service = Mock(spec=["get_latest_pdus_in_context"]) persistence_service.get_latest_pdus_in_context.return_value = [] + self.mock_config = NonCallableMock() + self.mock_config.signing_key = [MockKey()] + hs = HomeServer( "red", db_pool=None, @@ -508,7 +517,7 @@ class RoomsCreateTestCase(RestTestCase): ratelimiter=NonCallableMock(spec_set=[ "send_message", ]), - config=NonCallableMock(), + config=self.mock_config, ) self.ratelimiter = hs.get_ratelimiter() self.ratelimiter.send_message.return_value = (True, 0) @@ -598,6 +607,9 @@ class RoomTopicTestCase(RestTestCase): persistence_service = Mock(spec=["get_latest_pdus_in_context"]) persistence_service.get_latest_pdus_in_context.return_value = [] + self.mock_config = NonCallableMock() + self.mock_config.signing_key = [MockKey()] + hs = HomeServer( "red", db_pool=None, @@ -609,7 +621,7 @@ class RoomTopicTestCase(RestTestCase): ratelimiter=NonCallableMock(spec_set=[ "send_message", ]), - config=NonCallableMock(), + config=self.mock_config, ) self.ratelimiter = hs.get_ratelimiter() self.ratelimiter.send_message.return_value = (True, 0) @@ -712,6 +724,9 @@ class RoomMemberStateTestCase(RestTestCase): persistence_service = Mock(spec=["get_latest_pdus_in_context"]) persistence_service.get_latest_pdus_in_context.return_value = [] + self.mock_config = NonCallableMock() + self.mock_config.signing_key = [MockKey()] + hs = HomeServer( "red", db_pool=None, @@ -723,7 +738,7 @@ class RoomMemberStateTestCase(RestTestCase): ratelimiter=NonCallableMock(spec_set=[ "send_message", ]), - config=NonCallableMock(), + config=self.mock_config, ) self.ratelimiter = hs.get_ratelimiter() self.ratelimiter.send_message.return_value = (True, 0) @@ -853,6 +868,9 @@ class RoomMessagesTestCase(RestTestCase): persistence_service = Mock(spec=["get_latest_pdus_in_context"]) persistence_service.get_latest_pdus_in_context.return_value = [] + self.mock_config = NonCallableMock() + self.mock_config.signing_key = [MockKey()] + hs = HomeServer( "red", db_pool=None, @@ -864,7 +882,7 @@ class RoomMessagesTestCase(RestTestCase): ratelimiter=NonCallableMock(spec_set=[ "send_message", ]), - config=NonCallableMock(), + config=self.mock_config, ) self.ratelimiter = hs.get_ratelimiter() self.ratelimiter.send_message.return_value = (True, 0) diff --git a/tests/utils.py b/tests/utils.py index 60fd6085ac..d8be73dba8 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -118,13 +118,14 @@ class MockHttpResource(HttpServer): class MockKey(object): alg = "mock_alg" version = "mock_version" + signature = b"\x9a\x87$" @property def verify_key(self): return self def sign(self, message): - return b"\x9a\x87$" + return self def verify(self, message, sig): assert sig == b"\x9a\x87$" -- cgit 1.5.1 From bb04447c44036ebf3ae5dde7a4cc7a7909d50ef6 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Thu, 16 Oct 2014 23:25:12 +0100 Subject: Include hashes of previous pdus when referencing them --- synapse/api/events/__init__.py | 2 +- synapse/federation/pdu_codec.py | 13 ++++--------- synapse/federation/replication.py | 2 +- synapse/federation/units.py | 10 +++++++++- synapse/state.py | 4 ---- synapse/storage/__init__.py | 20 ++++++++++++++------ synapse/storage/pdu.py | 22 ++++++++++++++++------ synapse/storage/schema/signatures.sql | 16 ++++++++++++++++ synapse/storage/signatures.py | 31 +++++++++++++++++++++++++++++++ tests/federation/test_federation.py | 2 +- tests/federation/test_pdu_codec.py | 4 ++-- 11 files changed, 95 insertions(+), 31 deletions(-) (limited to 'synapse/storage/__init__.py') diff --git a/synapse/api/events/__init__.py b/synapse/api/events/__init__.py index f66fea2904..a5a55742e0 100644 --- a/synapse/api/events/__init__.py +++ b/synapse/api/events/__init__.py @@ -65,13 +65,13 @@ class SynapseEvent(JsonEncodedObject): internal_keys = [ "is_state", - "prev_events", "depth", "destinations", "origin", "outlier", "power_level", "redacted", + "prev_pdus", ] required_keys = [ diff --git a/synapse/federation/pdu_codec.py b/synapse/federation/pdu_codec.py index bcac5f9ae8..11fd7264b3 100644 --- a/synapse/federation/pdu_codec.py +++ b/synapse/federation/pdu_codec.py @@ -45,9 +45,7 @@ class PduCodec(object): kwargs["event_id"] = encode_event_id(pdu.pdu_id, pdu.origin) kwargs["room_id"] = pdu.context kwargs["etype"] = pdu.pdu_type - kwargs["prev_events"] = [ - encode_event_id(p[0], p[1]) for p in pdu.prev_pdus - ] + kwargs["prev_pdus"] = pdu.prev_pdus if hasattr(pdu, "prev_state_id") and hasattr(pdu, "prev_state_origin"): kwargs["prev_state"] = encode_event_id( @@ -78,11 +76,8 @@ class PduCodec(object): d["context"] = event.room_id d["pdu_type"] = event.type - if hasattr(event, "prev_events"): - d["prev_pdus"] = [ - decode_event_id(e, self.server_name) - for e in event.prev_events - ] + if hasattr(event, "prev_pdus"): + d["prev_pdus"] = event.prev_pdus if hasattr(event, "prev_state"): d["prev_state_id"], d["prev_state_origin"] = ( @@ -95,7 +90,7 @@ class PduCodec(object): kwargs = copy.deepcopy(event.unrecognized_keys) kwargs.update({ k: v for k, v in d.items() - if k not in ["event_id", "room_id", "type", "prev_events"] + if k not in ["event_id", "room_id", "type"] }) if "ts" not in kwargs: diff --git a/synapse/federation/replication.py b/synapse/federation/replication.py index 9363ac7300..788a49b8e8 100644 --- a/synapse/federation/replication.py +++ b/synapse/federation/replication.py @@ -443,7 +443,7 @@ class ReplicationLayer(object): min_depth = yield self.store.get_min_depth_for_context(pdu.context) if min_depth and pdu.depth > min_depth: - for pdu_id, origin in pdu.prev_pdus: + for pdu_id, origin, hashes in pdu.prev_pdus: exists = yield self._get_persisted_pdu(pdu_id, origin) if not exists: diff --git a/synapse/federation/units.py b/synapse/federation/units.py index 3518efb215..6a43007837 100644 --- a/synapse/federation/units.py +++ b/synapse/federation/units.py @@ -141,8 +141,16 @@ class Pdu(JsonEncodedObject): for kid, sig in pdu_tuple.signatures.items() } + prev_pdus = [] + for prev_pdu in pdu_tuple.prev_pdu_list: + prev_hashes = pdu_tuple.edge_hashes.get(prev_pdu, {}) + prev_hashes = { + alg: encode_base64(hsh) for alg, hsh in prev_hashes.items() + } + prev_pdus.append((prev_pdu[0], prev_pdu[1], prev_hashes)) + return Pdu( - prev_pdus=pdu_tuple.prev_pdu_list, + prev_pdus=prev_pdus, **args ) else: diff --git a/synapse/state.py b/synapse/state.py index 9db84c9b5c..bc6b928ec7 100644 --- a/synapse/state.py +++ b/synapse/state.py @@ -72,10 +72,6 @@ class StateHandler(object): snapshot.fill_out_prev_events(event) - event.prev_events = [ - e for e in event.prev_events if e != event.event_id - ] - current_state = snapshot.prev_state_pdu if current_state: diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index b2a3f0b56c..af05b47932 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -177,6 +177,14 @@ class DataStore(RoomMemberStore, RoomStore, txn, pdu.pdu_id, pdu.origin, key_id, signature_bytes, ) + for prev_pdu_id, prev_origin, prev_hashes in pdu.prev_pdus: + for alg, hash_base64 in prev_hashes.items(): + hash_bytes = decode_base64(hash_base64) + self._store_prev_pdu_hash_txn( + txn, pdu.pdu_id, pdu.origin, prev_pdu_id, prev_origin, alg, + hash_bytes + ) + if pdu.is_state: self._persist_state_txn(txn, pdu.prev_pdus, cols) else: @@ -352,6 +360,7 @@ class DataStore(RoomMemberStore, RoomStore, prev_pdus = self._get_latest_pdus_in_context( txn, room_id ) + if state_type is not None and state_key is not None: prev_state_pdu = self._get_current_state_pdu( txn, room_id, state_type, state_key @@ -401,17 +410,16 @@ class Snapshot(object): self.prev_state_pdu = prev_state_pdu def fill_out_prev_events(self, event): - if hasattr(event, "prev_events"): + if hasattr(event, "prev_pdus"): return - es = [ - "%s@%s" % (p_id, origin) for p_id, origin, _ in self.prev_pdus + event.prev_pdus = [ + (p_id, origin, hashes) + for p_id, origin, hashes, _ in self.prev_pdus ] - event.prev_events = [e for e in es if e != event.event_id] - if self.prev_pdus: - event.depth = max([int(v) for _, _, v in self.prev_pdus]) + 1 + event.depth = max([int(v) for _, _, _, v in self.prev_pdus]) + 1 else: event.depth = 0 diff --git a/synapse/storage/pdu.py b/synapse/storage/pdu.py index 9d624429b7..a423b42dbd 100644 --- a/synapse/storage/pdu.py +++ b/synapse/storage/pdu.py @@ -20,10 +20,13 @@ from ._base import SQLBaseStore, Table, JoinHelper from synapse.federation.units import Pdu from synapse.util.logutils import log_function +from syutil.base64util import encode_base64 + from collections import namedtuple import logging + logger = logging.getLogger(__name__) @@ -64,6 +67,8 @@ class PduStore(SQLBaseStore): for r in PduEdgesTable.decode_results(txn.fetchall()) ] + edge_hashes = self._get_prev_pdu_hashes_txn(txn, pdu_id, origin) + hashes = self._get_pdu_hashes_txn(txn, pdu_id, origin) signatures = self._get_pdu_origin_signatures_txn( txn, pdu_id, origin @@ -86,7 +91,7 @@ class PduStore(SQLBaseStore): row = txn.fetchone() if row: results.append(PduTuple( - PduEntry(*row), edges, hashes, signatures + PduEntry(*row), edges, hashes, signatures, edge_hashes )) return results @@ -310,9 +315,14 @@ class PduStore(SQLBaseStore): (context, ) ) - results = txn.fetchall() + results = [] + for pdu_id, origin, depth in txn.fetchall(): + hashes = self._get_pdu_hashes_txn(txn, pdu_id, origin) + sha256_bytes = hashes["sha256"] + prev_hashes = {"sha256": encode_base64(sha256_bytes)} + results.append((pdu_id, origin, prev_hashes, depth)) - return [(row[0], row[1], row[2]) for row in results] + return results @defer.inlineCallbacks def get_oldest_pdus_in_context(self, context): @@ -431,7 +441,7 @@ class PduStore(SQLBaseStore): "DELETE FROM %s WHERE pdu_id = ? AND origin = ?" % PduForwardExtremitiesTable.table_name ) - txn.executemany(query, prev_pdus) + txn.executemany(query, list(p[:2] for p in prev_pdus)) # We only insert as a forward extremety the new pdu if there are no # other pdus that reference it as a prev pdu @@ -454,7 +464,7 @@ class PduStore(SQLBaseStore): # deleted in a second if they're incorrect anyway. txn.executemany( PduBackwardExtremitiesTable.insert_statement(), - [(i, o, context) for i, o in prev_pdus] + [(i, o, context) for i, o, _ in prev_pdus] ) # Also delete from the backwards extremities table all ones that @@ -915,7 +925,7 @@ This does not include a prev_pdus key. PduTuple = namedtuple( "PduTuple", - ("pdu_entry", "prev_pdu_list", "hashes", "signatures") + ("pdu_entry", "prev_pdu_list", "hashes", "signatures", "edge_hashes") ) """ This is a tuple of a `PduEntry` and a list of `PduIdTuple` that represent the `prev_pdus` key of a PDU. diff --git a/synapse/storage/schema/signatures.sql b/synapse/storage/schema/signatures.sql index 86ee0f2377..a72c4dc35f 100644 --- a/synapse/storage/schema/signatures.sql +++ b/synapse/storage/schema/signatures.sql @@ -34,3 +34,19 @@ CREATE TABLE IF NOT EXISTS pdu_origin_signatures ( CREATE INDEX IF NOT EXISTS pdu_origin_signatures_id ON pdu_origin_signatures ( pdu_id, origin ); + +CREATE TABLE IF NOT EXISTS pdu_edge_hashes( + pdu_id TEXT, + origin TEXT, + prev_pdu_id TEXT, + prev_origin TEXT, + algorithm TEXT, + hash BLOB, + CONSTRAINT uniqueness UNIQUE ( + pdu_id, origin, prev_pdu_id, prev_origin, algorithm + ) +); + +CREATE INDEX IF NOT EXISTS pdu_edge_hashes_id ON pdu_edge_hashes( + pdu_id, origin +); diff --git a/synapse/storage/signatures.py b/synapse/storage/signatures.py index 1f0a680500..1147102489 100644 --- a/synapse/storage/signatures.py +++ b/synapse/storage/signatures.py @@ -88,3 +88,34 @@ class SignatureStore(SQLBaseStore): "signature": buffer(signature_bytes), }) + def _get_prev_pdu_hashes_txn(self, txn, pdu_id, origin): + """Get all the hashes for previous PDUs of a PDU + Args: + txn (cursor): + pdu_id (str): Id of the PDU. + origin (str): Origin of the PDU. + Returns: + dict of (pdu_id, origin) -> dict of algorithm -> hash_bytes. + """ + query = ( + "SELECT prev_pdu_id, prev_origin, algorithm, hash" + " FROM pdu_edge_hashes" + " WHERE pdu_id = ? and origin = ?" + ) + txn.execute(query, (pdu_id, origin)) + results = {} + for prev_pdu_id, prev_origin, algorithm, hash_bytes in txn.fetchall(): + hashes = results.setdefault((prev_pdu_id, prev_origin), {}) + hashes[algorithm] = hash_bytes + return results + + def _store_prev_pdu_hash_txn(self, txn, pdu_id, origin, prev_pdu_id, + prev_origin, algorithm, hash_bytes): + self._simple_insert_txn(txn, "pdu_edge_hashes", { + "pdu_id": pdu_id, + "origin": origin, + "prev_pdu_id": prev_pdu_id, + "prev_origin": prev_origin, + "algorithm": algorithm, + "hash": buffer(hash_bytes), + }) diff --git a/tests/federation/test_federation.py b/tests/federation/test_federation.py index 03b2167cf7..eed50e6335 100644 --- a/tests/federation/test_federation.py +++ b/tests/federation/test_federation.py @@ -41,7 +41,7 @@ def make_pdu(prev_pdus=[], **kwargs): } pdu_fields.update(kwargs) - return PduTuple(PduEntry(**pdu_fields), prev_pdus, {}, {}) + return PduTuple(PduEntry(**pdu_fields), prev_pdus, {}, {}, {}) class FederationTestCase(unittest.TestCase): diff --git a/tests/federation/test_pdu_codec.py b/tests/federation/test_pdu_codec.py index 80851a4258..0ad8cf6641 100644 --- a/tests/federation/test_pdu_codec.py +++ b/tests/federation/test_pdu_codec.py @@ -88,7 +88,7 @@ class PduCodecTestCase(unittest.TestCase): self.assertEquals(pdu.context, event.room_id) self.assertEquals(pdu.is_state, event.is_state) self.assertEquals(pdu.depth, event.depth) - self.assertEquals(["alice@bob.com"], event.prev_events) + self.assertEquals(pdu.prev_pdus, event.prev_pdus) self.assertEquals(pdu.content, event.content) def test_pdu_from_event(self): @@ -144,7 +144,7 @@ class PduCodecTestCase(unittest.TestCase): self.assertEquals(pdu.context, event.room_id) self.assertEquals(pdu.is_state, event.is_state) self.assertEquals(pdu.depth, event.depth) - self.assertEquals(["alice@bob.com"], event.prev_events) + self.assertEquals(pdu.prev_pdus, event.prev_pdus) self.assertEquals(pdu.content, event.content) self.assertEquals(pdu.state_key, event.state_key) -- cgit 1.5.1 From c8f996e29ffd7055bc6521ea610fc12ff50502e5 Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Fri, 17 Oct 2014 11:40:35 +0100 Subject: Hash the same content covered by the signature when referencing previous PDUs rather than reusing the PDU content hashes --- synapse/crypto/event_signing.py | 19 +++++++++++---- synapse/federation/pdu_codec.py | 6 +++-- synapse/storage/__init__.py | 9 ++++++- synapse/storage/pdu.py | 4 ++-- synapse/storage/schema/signatures.sql | 18 ++++++++++++-- synapse/storage/signatures.py | 44 +++++++++++++++++++++++++++++++---- 6 files changed, 84 insertions(+), 16 deletions(-) (limited to 'synapse/storage/__init__.py') diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index a115967c0a..32d60bd30a 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -24,15 +24,15 @@ from syutil.crypto.jsonsign import sign_json, verify_signed_json import hashlib -def hash_event_pdu(pdu, hash_algortithm=hashlib.sha256): - hashed = _compute_hash(pdu, hash_algortithm) +def add_event_pdu_content_hash(pdu, hash_algorithm=hashlib.sha256): + hashed = _compute_content_hash(pdu, hash_algorithm) pdu.hashes[hashed.name] = encode_base64(hashed.digest()) return pdu -def check_event_pdu_hash(pdu, hash_algorithm=hashlib.sha256): +def check_event_pdu_content_hash(pdu, hash_algorithm=hashlib.sha256): """Check whether the hash for this PDU matches the contents""" - computed_hash = _compute_hash(pdu, hash_algortithm) + computed_hash = _compute_content_hash(pdu, hash_algortithm) if computed_hash.name not in pdu.hashes: raise Exception("Algorithm %s not in hashes %s" % ( computed_hash.name, list(pdu.hashes) @@ -45,7 +45,7 @@ def check_event_pdu_hash(pdu, hash_algorithm=hashlib.sha256): return message_hash_bytes == computed_hash.digest() -def _compute_hash(pdu, hash_algorithm): +def _compute_content_hash(pdu, hash_algorithm): pdu_json = pdu.get_dict() pdu_json.pop("meta", None) pdu_json.pop("signatures", None) @@ -54,6 +54,15 @@ def _compute_hash(pdu, hash_algorithm): return hash_algorithm(pdu_json_bytes) +def compute_pdu_event_reference_hash(pdu, hash_algorithm=hashlib.sha256): + tmp_pdu = Pdu(**pdu.get_dict()) + tmp_pdu = prune_pdu(tmp_pdu) + pdu_json = tmp_pdu.get_dict() + pdu_json_bytes = encode_canonical_json(pdu_json) + hashed = hash_algorithm(pdu_json_bytes) + return (hashed.name, hashed.digest()) + + def sign_event_pdu(pdu, signature_name, signing_key): tmp_pdu = Pdu(**pdu.get_dict()) tmp_pdu = prune_pdu(tmp_pdu) diff --git a/synapse/federation/pdu_codec.py b/synapse/federation/pdu_codec.py index 11fd7264b3..7e574f451d 100644 --- a/synapse/federation/pdu_codec.py +++ b/synapse/federation/pdu_codec.py @@ -14,7 +14,9 @@ # limitations under the License. from .units import Pdu -from synapse.crypto.event_signing import hash_event_pdu, sign_event_pdu +from synapse.crypto.event_signing import ( + add_event_pdu_content_hash, sign_event_pdu +) import copy @@ -97,5 +99,5 @@ class PduCodec(object): kwargs["ts"] = int(self.clock.time_msec()) pdu = Pdu(**kwargs) - pdu = hash_event_pdu(pdu) + pdu = add_event_pdu_content_hash(pdu) return sign_event_pdu(pdu, self.server_name, self.signing_key) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index af05b47932..1738260cc1 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -44,6 +44,8 @@ from .signatures import SignatureStore from syutil.base64util import decode_base64 +from synapse.crypto.event_signing import compute_pdu_event_reference_hash + import json import logging import os @@ -165,7 +167,7 @@ class DataStore(RoomMemberStore, RoomStore, for hash_alg, hash_base64 in pdu.hashes.items(): hash_bytes = decode_base64(hash_base64) - self._store_pdu_hash_txn( + self._store_pdu_content_hash_txn( txn, pdu.pdu_id, pdu.origin, hash_alg, hash_bytes, ) @@ -185,6 +187,11 @@ class DataStore(RoomMemberStore, RoomStore, hash_bytes ) + (ref_alg, ref_hash_bytes) = compute_pdu_event_reference_hash(pdu) + self._store_pdu_reference_hash_txn( + txn, pdu.pdu_id, pdu.origin, ref_alg, ref_hash_bytes + ) + if pdu.is_state: self._persist_state_txn(txn, pdu.prev_pdus, cols) else: diff --git a/synapse/storage/pdu.py b/synapse/storage/pdu.py index a423b42dbd..3a90c382f0 100644 --- a/synapse/storage/pdu.py +++ b/synapse/storage/pdu.py @@ -69,7 +69,7 @@ class PduStore(SQLBaseStore): edge_hashes = self._get_prev_pdu_hashes_txn(txn, pdu_id, origin) - hashes = self._get_pdu_hashes_txn(txn, pdu_id, origin) + hashes = self._get_pdu_content_hashes_txn(txn, pdu_id, origin) signatures = self._get_pdu_origin_signatures_txn( txn, pdu_id, origin ) @@ -317,7 +317,7 @@ class PduStore(SQLBaseStore): results = [] for pdu_id, origin, depth in txn.fetchall(): - hashes = self._get_pdu_hashes_txn(txn, pdu_id, origin) + hashes = self._get_pdu_reference_hashes_txn(txn, pdu_id, origin) sha256_bytes = hashes["sha256"] prev_hashes = {"sha256": encode_base64(sha256_bytes)} results.append((pdu_id, origin, prev_hashes, depth)) diff --git a/synapse/storage/schema/signatures.sql b/synapse/storage/schema/signatures.sql index a72c4dc35f..1c45a51bec 100644 --- a/synapse/storage/schema/signatures.sql +++ b/synapse/storage/schema/signatures.sql @@ -13,7 +13,7 @@ * limitations under the License. */ -CREATE TABLE IF NOT EXISTS pdu_hashes ( +CREATE TABLE IF NOT EXISTS pdu_content_hashes ( pdu_id TEXT, origin TEXT, algorithm TEXT, @@ -21,7 +21,21 @@ CREATE TABLE IF NOT EXISTS pdu_hashes ( CONSTRAINT uniqueness UNIQUE (pdu_id, origin, algorithm) ); -CREATE INDEX IF NOT EXISTS pdu_hashes_id ON pdu_hashes (pdu_id, origin); +CREATE INDEX IF NOT EXISTS pdu_content_hashes_id ON pdu_content_hashes ( + pdu_id, origin +); + +CREATE TABLE IF NOT EXISTS pdu_reference_hashes ( + pdu_id TEXT, + origin TEXT, + algorithm TEXT, + hash BLOB, + CONSTRAINT uniqueness UNIQUE (pdu_id, origin, algorithm) +); + +CREATE INDEX IF NOT EXISTS pdu_reference_hashes_id ON pdu_reference_hashes ( + pdu_id, origin +); CREATE TABLE IF NOT EXISTS pdu_origin_signatures ( pdu_id TEXT, diff --git a/synapse/storage/signatures.py b/synapse/storage/signatures.py index 1147102489..85eec7ffbe 100644 --- a/synapse/storage/signatures.py +++ b/synapse/storage/signatures.py @@ -21,7 +21,7 @@ from twisted.internet import defer class SignatureStore(SQLBaseStore): """Persistence for PDU signatures and hashes""" - def _get_pdu_hashes_txn(self, txn, pdu_id, origin): + def _get_pdu_content_hashes_txn(self, txn, pdu_id, origin): """Get all the hashes for a given PDU. Args: txn (cursor): @@ -32,13 +32,14 @@ class SignatureStore(SQLBaseStore): """ query = ( "SELECT algorithm, hash" - " FROM pdu_hashes" + " FROM pdu_content_hashes" " WHERE pdu_id = ? and origin = ?" ) txn.execute(query, (pdu_id, origin)) return dict(txn.fetchall()) - def _store_pdu_hash_txn(self, txn, pdu_id, origin, algorithm, hash_bytes): + def _store_pdu_content_hash_txn(self, txn, pdu_id, origin, algorithm, + hash_bytes): """Store a hash for a PDU Args: txn (cursor): @@ -47,13 +48,48 @@ class SignatureStore(SQLBaseStore): algorithm (str): Hashing algorithm. hash_bytes (bytes): Hash function output bytes. """ - self._simple_insert_txn(txn, "pdu_hashes", { + self._simple_insert_txn(txn, "pdu_content_hashes", { "pdu_id": pdu_id, "origin": origin, "algorithm": algorithm, "hash": buffer(hash_bytes), }) + def _get_pdu_reference_hashes_txn(self, txn, pdu_id, origin): + """Get all the hashes for a given PDU. + Args: + txn (cursor): + pdu_id (str): Id for the PDU. + origin (str): origin of the PDU. + Returns: + A dict of algorithm -> hash. + """ + query = ( + "SELECT algorithm, hash" + " FROM pdu_reference_hashes" + " WHERE pdu_id = ? and origin = ?" + ) + txn.execute(query, (pdu_id, origin)) + return dict(txn.fetchall()) + + def _store_pdu_reference_hash_txn(self, txn, pdu_id, origin, algorithm, + hash_bytes): + """Store a hash for a PDU + Args: + txn (cursor): + pdu_id (str): Id for the PDU. + origin (str): origin of the PDU. + algorithm (str): Hashing algorithm. + hash_bytes (bytes): Hash function output bytes. + """ + self._simple_insert_txn(txn, "pdu_reference_hashes", { + "pdu_id": pdu_id, + "origin": origin, + "algorithm": algorithm, + "hash": buffer(hash_bytes), + }) + + def _get_pdu_origin_signatures_txn(self, txn, pdu_id, origin): """Get all the signatures for a given PDU. Args: -- cgit 1.5.1 From da1dda3e1d9d3272527d35c23162c4baf7339d74 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 28 Oct 2014 11:18:04 +0000 Subject: Add transaction level logging and timing information. Add a _simple_delete method --- synapse/storage/__init__.py | 3 +- synapse/storage/_base.py | 74 ++++++++++++++++++++++++++++++++--------- synapse/storage/directory.py | 1 + synapse/storage/pdu.py | 13 +++++++- synapse/storage/registration.py | 7 ++-- synapse/storage/room.py | 2 ++ synapse/storage/state.py | 1 + synapse/storage/stream.py | 5 ++- synapse/storage/transactions.py | 6 ++++ 9 files changed, 91 insertions(+), 21 deletions(-) (limited to 'synapse/storage/__init__.py') diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 15a72d0cd7..a50e19349a 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -109,6 +109,7 @@ class DataStore(RoomMemberStore, RoomStore, try: yield self.runInteraction( + "persist_event", self._persist_pdu_event_txn, pdu=pdu, event=event, @@ -394,7 +395,7 @@ class DataStore(RoomMemberStore, RoomStore, prev_state_pdu=prev_state_pdu, ) - return self.runInteraction(_snapshot) + return self.runInteraction("snapshot_room", _snapshot) class Snapshot(object): diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index d3e8741889..1192216971 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -29,15 +29,17 @@ import time logger = logging.getLogger(__name__) sql_logger = logging.getLogger("synapse.storage.SQL") +transaction_logger = logging.getLogger("synapse.storage.txn") class LoggingTransaction(object): """An object that almost-transparently proxies for the 'txn' object passed to the constructor. Adds logging to the .execute() method.""" - __slots__ = ["txn"] + __slots__ = ["txn", "name"] - def __init__(self, txn): + def __init__(self, txn, name): object.__setattr__(self, "txn", txn) + object.__setattr__(self, "name", name) def __getattr__(self, name): return getattr(self.txn, name) @@ -47,12 +49,15 @@ class LoggingTransaction(object): def execute(self, sql, *args, **kwargs): # TODO(paul): Maybe use 'info' and 'debug' for values? - sql_logger.debug("[SQL] %s", sql) + sql_logger.debug("[SQL] {%s} %s", self.name, sql) try: if args and args[0]: values = args[0] - sql_logger.debug("[SQL values] " + - ", ".join(("<%s>",) * len(values)), *values) + sql_logger.debug( + "[SQL values] {%s} " + ", ".join(("<%s>",) * len(values)), + self.name, + *values + ) except: # Don't let logging failures stop SQL from working pass @@ -64,10 +69,11 @@ class LoggingTransaction(object): ) finally: end = time.clock() * 1000 - sql_logger.debug("[SQL time] %f", end - start) + sql_logger.debug("[SQL time] {%s} %f", self.name, end - start) class SQLBaseStore(object): + _TXN_ID = 0 def __init__(self, hs): self.hs = hs @@ -75,10 +81,24 @@ class SQLBaseStore(object): self.event_factory = hs.get_event_factory() self._clock = hs.get_clock() - def runInteraction(self, func, *args, **kwargs): + def runInteraction(self, desc, func, *args, **kwargs): """Wraps the .runInteraction() method on the underlying db_pool.""" def inner_func(txn, *args, **kwargs): - return func(LoggingTransaction(txn), *args, **kwargs) + start = time.clock() * 1000 + txn_id = str(SQLBaseStore._TXN_ID) + SQLBaseStore._TXN_ID += 1 + + name = "%s-%s" % (desc, txn_id, ) + + transaction_logger.debug("[TXN START] {%s}", name) + try: + return func(LoggingTransaction(txn, name), *args, **kwargs) + finally: + end = time.clock() * 1000 + transaction_logger.debug( + "[TXN END] {%s} %f", + name, end - start + ) return self._db_pool.runInteraction(inner_func, *args, **kwargs) @@ -114,7 +134,7 @@ class SQLBaseStore(object): else: return cursor.fetchall() - return self.runInteraction(interaction) + return self.runInteraction("_execute", interaction) def _execute_and_decode(self, query, *args): return self._execute(self.cursor_to_dict, query, *args) @@ -131,6 +151,7 @@ class SQLBaseStore(object): or_replace : bool; if True performs an INSERT OR REPLACE """ return self.runInteraction( + "_simple_insert", self._simple_insert_txn, table, values, or_replace=or_replace, or_ignore=or_ignore, ) @@ -168,6 +189,7 @@ class SQLBaseStore(object): statement returns no rows """ return self._simple_selectupdate_one( + "_simple_select_one", table, keyvalues, retcols=retcols, allow_none=allow_none ) @@ -217,7 +239,7 @@ class SQLBaseStore(object): txn.execute(sql, keyvalues.values()) return txn.fetchall() - res = yield self.runInteraction(func) + res = yield self.runInteraction("_simple_select_onecol", func) defer.returnValue([r[0] for r in res]) @@ -240,7 +262,7 @@ class SQLBaseStore(object): txn.execute(sql, keyvalues.values()) return self.cursor_to_dict(txn) - return self.runInteraction(func) + return self.runInteraction("_simple_select_list", func) def _simple_update_one(self, table, keyvalues, updatevalues, retcols=None): @@ -308,7 +330,7 @@ class SQLBaseStore(object): raise StoreError(500, "More than one row matched") return ret - return self.runInteraction(func) + return self.runInteraction("_simple_selectupdate_one", func) def _simple_delete_one(self, table, keyvalues): """Executes a DELETE query on the named table, expecting to delete a @@ -320,7 +342,7 @@ class SQLBaseStore(object): """ sql = "DELETE FROM %s WHERE %s" % ( table, - " AND ".join("%s = ?" % (k) for k in keyvalues) + " AND ".join("%s = ?" % (k, ) for k in keyvalues) ) def func(txn): @@ -329,7 +351,25 @@ class SQLBaseStore(object): raise StoreError(404, "No row found") if txn.rowcount > 1: raise StoreError(500, "more than one row matched") - return self.runInteraction(func) + return self.runInteraction("_simple_delete_one", func) + + def _simple_delete(self, table, keyvalues): + """Executes a DELETE query on the named table. + + Args: + table : string giving the table name + keyvalues : dict of column names and values to select the row with + """ + + return self.runInteraction("_simple_delete", self._simple_delete_txn) + + def _simple_delete_txn(self, txn, table, keyvalues): + sql = "DELETE FROM %s WHERE %s" % ( + table, + " AND ".join("%s = ?" % (k, ) for k in keyvalues) + ) + + return txn.execute(sql, keyvalues.values()) def _simple_max_id(self, table): """Executes a SELECT query on the named table, expecting to return the @@ -347,7 +387,7 @@ class SQLBaseStore(object): return 0 return max_id - return self.runInteraction(func) + return self.runInteraction("_simple_max_id", func) def _parse_event_from_row(self, row_dict): d = copy.deepcopy({k: v for k, v in row_dict.items()}) @@ -371,7 +411,9 @@ class SQLBaseStore(object): ) def _parse_events(self, rows): - return self.runInteraction(self._parse_events_txn, rows) + return self.runInteraction( + "_parse_events", self._parse_events_txn, rows + ) def _parse_events_txn(self, txn, rows): events = [self._parse_event_from_row(r) for r in rows] diff --git a/synapse/storage/directory.py b/synapse/storage/directory.py index 52373a28a6..d6a7113b9c 100644 --- a/synapse/storage/directory.py +++ b/synapse/storage/directory.py @@ -95,6 +95,7 @@ class DirectoryStore(SQLBaseStore): def delete_room_alias(self, room_alias): return self.runInteraction( + "delete_room_alias", self._delete_room_alias_txn, room_alias, ) diff --git a/synapse/storage/pdu.py b/synapse/storage/pdu.py index 9bdc831fd8..4a4341907b 100644 --- a/synapse/storage/pdu.py +++ b/synapse/storage/pdu.py @@ -47,7 +47,7 @@ class PduStore(SQLBaseStore): """ return self.runInteraction( - self._get_pdu_tuple, pdu_id, origin + "get_pdu", self._get_pdu_tuple, pdu_id, origin ) def _get_pdu_tuple(self, txn, pdu_id, origin): @@ -108,6 +108,7 @@ class PduStore(SQLBaseStore): """ return self.runInteraction( + "get_current_state_for_context", self._get_current_state_for_context, context ) @@ -156,6 +157,7 @@ class PduStore(SQLBaseStore): """ return self.runInteraction( + "mark_pdu_as_processed", self._mark_as_processed, pdu_id, pdu_origin ) @@ -165,6 +167,7 @@ class PduStore(SQLBaseStore): def get_all_pdus_from_context(self, context): """Get a list of all PDUs for a given context.""" return self.runInteraction( + "get_all_pdus_from_context", self._get_all_pdus_from_context, context, ) @@ -192,6 +195,7 @@ class PduStore(SQLBaseStore): list: A list of PduTuples """ return self.runInteraction( + "get_backfill", self._get_backfill, context, pdu_list, limit ) @@ -253,6 +257,7 @@ class PduStore(SQLBaseStore): context (str) """ return self.runInteraction( + "get_min_depth_for_context", self._get_min_depth_for_context, context ) @@ -291,6 +296,7 @@ class PduStore(SQLBaseStore): def get_latest_pdus_in_context(self, context): return self.runInteraction( + "get_latest_pdus_in_context", self._get_latest_pdus_in_context, context ) @@ -370,6 +376,7 @@ class PduStore(SQLBaseStore): """ return self.runInteraction( + "is_pdu_new", self._is_pdu_new, pdu_id=pdu_id, origin=origin, @@ -523,6 +530,7 @@ class StatePduStore(SQLBaseStore): def get_unresolved_state_tree(self, new_state_pdu): return self.runInteraction( + "get_unresolved_state_tree", self._get_unresolved_state_tree, new_state_pdu ) @@ -562,6 +570,7 @@ class StatePduStore(SQLBaseStore): def update_current_state(self, pdu_id, origin, context, pdu_type, state_key): return self.runInteraction( + "update_current_state", self._update_current_state, pdu_id, origin, context, pdu_type, state_key ) @@ -601,6 +610,7 @@ class StatePduStore(SQLBaseStore): """ return self.runInteraction( + "get_current_state_pdu", self._get_current_state_pdu, context, pdu_type, state_key ) @@ -660,6 +670,7 @@ class StatePduStore(SQLBaseStore): bool: True if the new_pdu clobbered the current state, False if not """ return self.runInteraction( + "handle_new_state", self._handle_new_state, new_pdu ) diff --git a/synapse/storage/registration.py b/synapse/storage/registration.py index 719806f82b..a2ca6f9a69 100644 --- a/synapse/storage/registration.py +++ b/synapse/storage/registration.py @@ -62,8 +62,10 @@ class RegistrationStore(SQLBaseStore): Raises: StoreError if the user_id could not be registered. """ - yield self.runInteraction(self._register, user_id, token, - password_hash) + yield self.runInteraction( + "register", + self._register, user_id, token, password_hash + ) def _register(self, txn, user_id, token, password_hash): now = int(self.clock.time()) @@ -100,6 +102,7 @@ class RegistrationStore(SQLBaseStore): StoreError if no user was found. """ return self.runInteraction( + "get_user_by_token", self._query_for_auth, token ) diff --git a/synapse/storage/room.py b/synapse/storage/room.py index 8cd46334cf..7e48ce9cc3 100644 --- a/synapse/storage/room.py +++ b/synapse/storage/room.py @@ -150,6 +150,7 @@ class RoomStore(SQLBaseStore): def get_power_level(self, room_id, user_id): return self.runInteraction( + "get_power_level", self._get_power_level, room_id, user_id, ) @@ -183,6 +184,7 @@ class RoomStore(SQLBaseStore): def get_ops_levels(self, room_id): return self.runInteraction( + "get_ops_levels", self._get_ops_levels, room_id, ) diff --git a/synapse/storage/state.py b/synapse/storage/state.py index 0aa979c9f0..e08acd6404 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -59,6 +59,7 @@ class StateStore(SQLBaseStore): def store_state_groups(self, event): return self.runInteraction( + "store_state_groups", self._store_state_groups_txn, event ) diff --git a/synapse/storage/stream.py b/synapse/storage/stream.py index d61f909939..8f7f61d29d 100644 --- a/synapse/storage/stream.py +++ b/synapse/storage/stream.py @@ -309,7 +309,10 @@ class StreamStore(SQLBaseStore): defer.returnValue(ret) def get_room_events_max_id(self): - return self.runInteraction(self._get_room_events_max_id_txn) + return self.runInteraction( + "get_room_events_max_id", + self._get_room_events_max_id_txn + ) def _get_room_events_max_id_txn(self, txn): txn.execute( diff --git a/synapse/storage/transactions.py b/synapse/storage/transactions.py index 2ba8e30efe..908014d38b 100644 --- a/synapse/storage/transactions.py +++ b/synapse/storage/transactions.py @@ -42,6 +42,7 @@ class TransactionStore(SQLBaseStore): """ return self.runInteraction( + "get_received_txn_response", self._get_received_txn_response, transaction_id, origin ) @@ -73,6 +74,7 @@ class TransactionStore(SQLBaseStore): """ return self.runInteraction( + "set_received_txn_response", self._set_received_txn_response, transaction_id, origin, code, response_dict ) @@ -106,6 +108,7 @@ class TransactionStore(SQLBaseStore): """ return self.runInteraction( + "prep_send_transaction", self._prep_send_transaction, transaction_id, destination, origin_server_ts, pdu_list ) @@ -161,6 +164,7 @@ class TransactionStore(SQLBaseStore): response_json (str) """ return self.runInteraction( + "delivered_txn", self._delivered_txn, transaction_id, destination, code, response_dict ) @@ -186,6 +190,7 @@ class TransactionStore(SQLBaseStore): list: A list of `ReceivedTransactionsTable.EntryType` """ return self.runInteraction( + "get_transactions_after", self._get_transactions_after, transaction_id, destination ) @@ -216,6 +221,7 @@ class TransactionStore(SQLBaseStore): list: A list of PduTuple """ return self.runInteraction( + "get_pdus_after_transaction", self._get_pdus_after_transaction, transaction_id, destination ) -- cgit 1.5.1 From 2d1dfb3b34583a4de7e1e53f685c2564a7fc731f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 28 Oct 2014 16:42:35 +0000 Subject: Begin implementing all the PDU storage stuff in Events land --- synapse/api/events/__init__.py | 4 +- synapse/federation/pdu_codec.py | 11 ++- synapse/storage/__init__.py | 72 ++++++++++---- synapse/storage/_base.py | 53 +++++++---- synapse/storage/event_federation.py | 143 ++++++++++++++++++++++++++++ synapse/storage/schema/event_edges.sql | 51 ++++++++++ synapse/storage/schema/event_signatures.sql | 65 +++++++++++++ synapse/storage/schema/im.sql | 1 + synapse/storage/signatures.py | 127 ++++++++++++++++++++++++ 9 files changed, 485 insertions(+), 42 deletions(-) create mode 100644 synapse/storage/event_federation.py create mode 100644 synapse/storage/schema/event_edges.sql create mode 100644 synapse/storage/schema/event_signatures.sql (limited to 'synapse/storage/__init__.py') diff --git a/synapse/api/events/__init__.py b/synapse/api/events/__init__.py index a5a55742e0..b855811b98 100644 --- a/synapse/api/events/__init__.py +++ b/synapse/api/events/__init__.py @@ -71,7 +71,9 @@ class SynapseEvent(JsonEncodedObject): "outlier", "power_level", "redacted", - "prev_pdus", + "prev_events", + "hashes", + "signatures", ] required_keys = [ diff --git a/synapse/federation/pdu_codec.py b/synapse/federation/pdu_codec.py index 991aae2a56..2cd591410b 100644 --- a/synapse/federation/pdu_codec.py +++ b/synapse/federation/pdu_codec.py @@ -47,7 +47,10 @@ class PduCodec(object): kwargs["event_id"] = encode_event_id(pdu.pdu_id, pdu.origin) kwargs["room_id"] = pdu.context kwargs["etype"] = pdu.pdu_type - kwargs["prev_pdus"] = pdu.prev_pdus + kwargs["prev_events"] = [ + encode_event_id(i, o) + for i, o in pdu.prev_pdus + ] if hasattr(pdu, "prev_state_id") and hasattr(pdu, "prev_state_origin"): kwargs["prev_state"] = encode_event_id( @@ -78,8 +81,8 @@ class PduCodec(object): d["context"] = event.room_id d["pdu_type"] = event.type - if hasattr(event, "prev_pdus"): - d["prev_pdus"] = event.prev_pdus + if hasattr(event, "prev_events"): + d["prev_pdus"] = [decode_event_id(e) for e in event.prev_events] if hasattr(event, "prev_state"): d["prev_state_id"], d["prev_state_origin"] = ( @@ -92,7 +95,7 @@ class PduCodec(object): kwargs = copy.deepcopy(event.unrecognized_keys) kwargs.update({ k: v for k, v in d.items() - if k not in ["event_id", "room_id", "type"] + if k not in ["event_id", "room_id", "type", "prev_events"] }) if "origin_server_ts" not in kwargs: diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index a50e19349a..678de0cf50 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -40,6 +40,7 @@ from .stream import StreamStore from .pdu import StatePduStore, PduStore, PdusTable from .transactions import TransactionStore from .keys import KeyStore +from .event_federation import EventFederationStore from .state import StateStore from .signatures import SignatureStore @@ -69,6 +70,7 @@ SCHEMAS = [ "redactions", "state", "signatures", + "event_edges", ] @@ -83,10 +85,12 @@ class _RollbackButIsFineException(Exception): """ pass + class DataStore(RoomMemberStore, RoomStore, RegistrationStore, StreamStore, ProfileStore, FeedbackStore, PresenceStore, PduStore, StatePduStore, TransactionStore, - DirectoryStore, KeyStore, StateStore, SignatureStore): + DirectoryStore, KeyStore, StateStore, SignatureStore, + EventFederationStore, ): def __init__(self, hs): super(DataStore, self).__init__(hs) @@ -230,6 +234,10 @@ class DataStore(RoomMemberStore, RoomStore, elif event.type == RoomRedactionEvent.TYPE: self._store_redaction(txn, event) + outlier = False + if hasattr(event, "outlier"): + outlier = event.outlier + vals = { "topological_ordering": event.depth, "event_id": event.event_id, @@ -237,20 +245,20 @@ class DataStore(RoomMemberStore, RoomStore, "room_id": event.room_id, "content": json.dumps(event.content), "processed": True, + "outlier": outlier, + "depth": event.depth, } if stream_ordering is not None: vals["stream_ordering"] = stream_ordering - if hasattr(event, "outlier"): - vals["outlier"] = event.outlier - else: - vals["outlier"] = False - unrec = { k: v for k, v in event.get_full_dict().items() - if k not in vals.keys() and k not in ["redacted", "redacted_because"] + if k not in vals.keys() and k not in [ + "redacted", "redacted_because", "signatures", "hashes", + "prev_events", + ] } vals["unrecognized_keys"] = json.dumps(unrec) @@ -264,6 +272,14 @@ class DataStore(RoomMemberStore, RoomStore, ) raise _RollbackButIsFineException("_persist_event") + self._handle_prev_events( + txn, + outlier=outlier, + event_id=event.event_id, + prev_events=event.prev_events, + room_id=event.room_id, + ) + self._store_state_groups_txn(txn, event) is_state = hasattr(event, "state_key") and event.state_key is not None @@ -291,6 +307,28 @@ class DataStore(RoomMemberStore, RoomStore, } ) + signatures = event.signatures.get(event.origin, {}) + + for key_id, signature_base64 in signatures.items(): + signature_bytes = decode_base64(signature_base64) + self._store_event_origin_signature_txn( + txn, event.event_id, key_id, signature_bytes, + ) + + for prev_event_id, prev_hashes in event.prev_events: + for alg, hash_base64 in prev_hashes.items(): + hash_bytes = decode_base64(hash_base64) + self._store_prev_event_hash_txn( + txn, event.event_id, prev_event_id, alg, hash_bytes + ) + + (ref_alg, ref_hash_bytes) = compute_pdu_event_reference_hash(pdu) + self._store_pdu_reference_hash_txn( + txn, pdu.pdu_id, pdu.origin, ref_alg, ref_hash_bytes + ) + + self._update_min_depth_for_room_txn(txn, event.room_id, event.depth) + def _store_redaction(self, txn, event): txn.execute( "INSERT OR IGNORE INTO redactions " @@ -373,7 +411,7 @@ class DataStore(RoomMemberStore, RoomStore, """ def _snapshot(txn): membership_state = self._get_room_member(txn, user_id, room_id) - prev_pdus = self._get_latest_pdus_in_context( + prev_events = self._get_latest_events_in_room( txn, room_id ) @@ -388,7 +426,7 @@ class DataStore(RoomMemberStore, RoomStore, store=self, room_id=room_id, user_id=user_id, - prev_pdus=prev_pdus, + prev_events=prev_events, membership_state=membership_state, state_type=state_type, state_key=state_key, @@ -404,7 +442,7 @@ class Snapshot(object): store (DataStore): The datastore. room_id (RoomId): The room of the snapshot. user_id (UserId): The user this snapshot is for. - prev_pdus (list): The list of PDU ids this snapshot is after. + prev_events (list): The list of event ids this snapshot is after. membership_state (RoomMemberEvent): The current state of the user in the room. state_type (str, optional): State type captured by the snapshot @@ -413,29 +451,29 @@ class Snapshot(object): the previous value of the state type and key in the room. """ - def __init__(self, store, room_id, user_id, prev_pdus, + def __init__(self, store, room_id, user_id, prev_events, membership_state, state_type=None, state_key=None, prev_state_pdu=None): self.store = store self.room_id = room_id self.user_id = user_id - self.prev_pdus = prev_pdus + self.prev_events = prev_events self.membership_state = membership_state self.state_type = state_type self.state_key = state_key self.prev_state_pdu = prev_state_pdu def fill_out_prev_events(self, event): - if hasattr(event, "prev_pdus"): + if hasattr(event, "prev_events"): return - event.prev_pdus = [ + event.prev_events = [ (p_id, origin, hashes) - for p_id, origin, hashes, _ in self.prev_pdus + for p_id, origin, hashes, _ in self.prev_events ] - if self.prev_pdus: - event.depth = max([int(v) for _, _, _, v in self.prev_pdus]) + 1 + if self.prev_events: + event.depth = max([int(v) for _, _, _, v in self.prev_events]) + 1 else: event.depth = 0 diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 1192216971..30732caa83 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -193,7 +193,6 @@ class SQLBaseStore(object): table, keyvalues, retcols=retcols, allow_none=allow_none ) - @defer.inlineCallbacks def _simple_select_one_onecol(self, table, keyvalues, retcol, allow_none=False): """Executes a SELECT query on the named table, which is expected to @@ -204,19 +203,41 @@ class SQLBaseStore(object): keyvalues : dict of column names and values to select the row with retcol : string giving the name of the column to return """ - ret = yield self._simple_select_one( + return self.runInteraction( + "_simple_select_one_onecol_txn", + self._simple_select_one_onecol_txn, + table, keyvalues, retcol, allow_none=allow_none, + ) + + def _simple_select_one_onecol_txn(self, txn, table, keyvalues, retcol, + allow_none=False): + ret = self._simple_select_onecol_txn( + txn, table=table, keyvalues=keyvalues, - retcols=[retcol], - allow_none=allow_none + retcols=retcol, ) if ret: - defer.returnValue(ret[retcol]) + return ret[retcol] else: - defer.returnValue(None) + if allow_none: + return None + else: + raise StoreError(404, "No row found") + + def _simple_select_onecol_txn(self, txn, table, keyvalues, retcol): + sql = "SELECT %(retcol)s FROM %(table)s WHERE %(where)s" % { + "retcol": retcol, + "table": table, + "where": " AND ".join("%s = ?" % k for k in keyvalues.keys()), + } + + txn.execute(sql, keyvalues.values()) + + return [r[0] for r in txn.fetchall()] + - @defer.inlineCallbacks def _simple_select_onecol(self, table, keyvalues, retcol): """Executes a SELECT query on the named table, which returns a list comprising of the values of the named column from the selected rows. @@ -229,19 +250,11 @@ class SQLBaseStore(object): Returns: Deferred: Results in a list """ - sql = "SELECT %(retcol)s FROM %(table)s WHERE %(where)s" % { - "retcol": retcol, - "table": table, - "where": " AND ".join("%s = ?" % k for k in keyvalues.keys()), - } - - def func(txn): - txn.execute(sql, keyvalues.values()) - return txn.fetchall() - - res = yield self.runInteraction("_simple_select_onecol", func) - - defer.returnValue([r[0] for r in res]) + return self.runInteraction( + "_simple_select_onecol", + self._simple_select_onecol_txn, + table, keyvalues, retcol + ) def _simple_select_list(self, table, keyvalues, retcols): """Executes a SELECT query on the named table, which may return zero or diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py new file mode 100644 index 0000000000..27ad9aea4d --- /dev/null +++ b/synapse/storage/event_federation.py @@ -0,0 +1,143 @@ +# -*- coding: utf-8 -*- +# Copyright 2014 OpenMarket Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ._base import SQLBaseStore +from twisted.internet import defer + +import logging + + +logger = logging.getLogger(__name__) + + +class EventFederationStore(SQLBaseStore): + + def _get_latest_events_in_room(self, txn, room_id): + self._simple_select_onecol_txn( + txn, + table="event_forward_extremities", + keyvalues={ + "room_id": room_id, + }, + retcol="event_id", + ) + + results = [] + for pdu_id, origin, depth in txn.fetchall(): + hashes = self._get_pdu_reference_hashes_txn(txn, pdu_id, origin) + sha256_bytes = hashes["sha256"] + prev_hashes = {"sha256": encode_base64(sha256_bytes)} + results.append((pdu_id, origin, prev_hashes, depth)) + + def _get_min_depth_interaction(self, txn, room_id): + min_depth = self._simple_select_one_onecol_txn( + txn, + table="room_depth", + keyvalues={"room_id": room_id,}, + retcol="min_depth", + allow_none=True, + ) + + return int(min_depth) if min_depth is not None else None + + def _update_min_depth_for_room_txn(self, txn, room_id, depth): + min_depth = self._get_min_depth_interaction(txn, room_id) + + do_insert = depth < min_depth if min_depth else True + + if do_insert: + self._simple_insert_txn( + txn, + table="room_depth", + values={ + "room_id": room_id, + "min_depth": depth, + }, + or_replace=True, + ) + + def _handle_prev_events(self, txn, outlier, event_id, prev_events, + room_id): + for e_id in prev_events: + # TODO (erikj): This could be done as a bulk insert + self._simple_insert_txn( + txn, + table="event_edges", + values={ + "event_id": event_id, + "prev_event": e_id, + "room_id": room_id, + } + ) + + # Update the extremities table if this is not an outlier. + if not outlier: + for e_id in prev_events: + # TODO (erikj): This could be done as a bulk insert + self._simple_delete_txn( + txn, + table="event_forward_extremities", + keyvalues={ + "event_id": e_id, + "room_id": room_id, + } + ) + + + + # We only insert as a forward extremity the new pdu if there are no + # other pdus that reference it as a prev pdu + query = ( + "INSERT INTO %(table)s (event_id, room_id) " + "SELECT ?, ? WHERE NOT EXISTS (" + "SELECT 1 FROM %(event_edges)s WHERE " + "prev_event_id = ? " + ")" + ) % { + "table": "event_forward_extremities", + "event_edges": "event_edges", + } + + logger.debug("query: %s", query) + + txn.execute(query, (event_id, room_id, event_id)) + + # Insert all the prev_pdus as a backwards thing, they'll get + # deleted in a second if they're incorrect anyway. + for e_id in prev_events: + # TODO (erikj): This could be done as a bulk insert + self._simple_insert_txn( + txn, + table="event_backward_extremities", + values={ + "event_id": e_id, + "room_id": room_id, + } + ) + + # Also delete from the backwards extremities table all ones that + # reference pdus that we have already seen + query = ( + "DELETE FROM %(event_back)s as b WHERE EXISTS (" + "SELECT 1 FROM %(events)s AS events " + "WHERE " + "b.event_id = events.event_id " + "AND not events.outlier " + ")" + ) % { + "event_back": "event_backward_extremities", + "events": "events", + } + txn.execute(query) \ No newline at end of file diff --git a/synapse/storage/schema/event_edges.sql b/synapse/storage/schema/event_edges.sql new file mode 100644 index 0000000000..6a28314ece --- /dev/null +++ b/synapse/storage/schema/event_edges.sql @@ -0,0 +1,51 @@ + +CREATE TABLE IF NOT EXISTS event_forward_extremities( + event_id TEXT, + room_id TEXT, + CONSTRAINT uniqueness UNIQUE (event_id, room_id) ON CONFLICT REPLACE +); + +CREATE INDEX IF NOT EXISTS ev_extrem_room ON event_forward_extremities(room_id); +CREATE INDEX IF NOT EXISTS ev_extrem_id ON event_forward_extremities(event_id); +-- + +CREATE TABLE IF NOT EXISTS event_backward_extremities( + event_id TEXT, + room_id TEXT, + CONSTRAINT uniqueness UNIQUE (event_id, room_id) ON CONFLICT REPLACE +); + +CREATE INDEX IF NOT EXISTS ev_b_extrem_room ON event_backward_extremities(room_id); +CREATE INDEX IF NOT EXISTS ev_b_extrem_id ON event_backward_extremities(event_id); +-- + +CREATE TABLE IF NOT EXISTS event_edges( + event_id TEXT, + prev_event_id TEXT, + room_id TEXT, + CONSTRAINT uniqueness UNIQUE (event_id, prev_event_id, room_id) +); + +CREATE INDEX IF NOT EXISTS ev_edges_id ON event_edges(event_id); +CREATE INDEX IF NOT EXISTS ev_edges_prev_id ON event_edges(prev_event_id); +-- + + +CREATE TABLE IF NOT EXISTS room_depth( + room_id TEXT, + min_depth INTEGER, + CONSTRAINT uniqueness UNIQUE (room_id) +); + +CREATE INDEX IF NOT EXISTS room_depth_room ON room_depth(room_id); +-- + +create TABLE IF NOT EXISTS event_destinations( + event_id TEXT, + destination TEXT, + delivered_ts INTEGER DEFAULT 0, -- or 0 if not delivered + CONSTRAINT uniqueness UNIQUE (event_id, destination) ON CONFLICT REPLACE +); + +CREATE INDEX IF NOT EXISTS event_destinations_id ON event_destinations(event_id); +-- \ No newline at end of file diff --git a/synapse/storage/schema/event_signatures.sql b/synapse/storage/schema/event_signatures.sql new file mode 100644 index 0000000000..5491c7ecec --- /dev/null +++ b/synapse/storage/schema/event_signatures.sql @@ -0,0 +1,65 @@ +/* Copyright 2014 OpenMarket Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +CREATE TABLE IF NOT EXISTS event_content_hashes ( + event_id TEXT, + algorithm TEXT, + hash BLOB, + CONSTRAINT uniqueness UNIQUE (event_id, algorithm) +); + +CREATE INDEX IF NOT EXISTS event_content_hashes_id ON event_content_hashes( + event_id +); + + +CREATE TABLE IF NOT EXISTS event_reference_hashes ( + event_id TEXT, + algorithm TEXT, + hash BLOB, + CONSTRAINT uniqueness UNIQUE (event_id, algorithm) +); + +CREATE INDEX IF NOT EXISTS event_reference_hashes_id ON event_reference_hashes ( + event_id +); + + +CREATE TABLE IF NOT EXISTS event_origin_signatures ( + event_id TEXT, + origin TEXT, + key_id TEXT, + signature BLOB, + CONSTRAINT uniqueness UNIQUE (event_id, key_id) +); + +CREATE INDEX IF NOT EXISTS event_origin_signatures_id ON event_origin_signatures ( + event_id +); + + +CREATE TABLE IF NOT EXISTS event_edge_hashes( + event_id TEXT, + prev_event_id TEXT, + algorithm TEXT, + hash BLOB, + CONSTRAINT uniqueness UNIQUE ( + event_id, prev_event_id, algorithm + ) +); + +CREATE INDEX IF NOT EXISTS event_edge_hashes_id ON event_edge_hashes( + event_id +); diff --git a/synapse/storage/schema/im.sql b/synapse/storage/schema/im.sql index 3aa83f5c8c..8d6f655993 100644 --- a/synapse/storage/schema/im.sql +++ b/synapse/storage/schema/im.sql @@ -23,6 +23,7 @@ CREATE TABLE IF NOT EXISTS events( unrecognized_keys TEXT, processed BOOL NOT NULL, outlier BOOL NOT NULL, + depth INTEGER DEFAULT 0 NOT NULL, CONSTRAINT ev_uniq UNIQUE (event_id) ); diff --git a/synapse/storage/signatures.py b/synapse/storage/signatures.py index 82be946d3f..b8f8fd44cb 100644 --- a/synapse/storage/signatures.py +++ b/synapse/storage/signatures.py @@ -153,3 +153,130 @@ class SignatureStore(SQLBaseStore): "algorithm": algorithm, "hash": buffer(hash_bytes), }) + + ## Events ## + + def _get_event_content_hashes_txn(self, txn, event_id): + """Get all the hashes for a given Event. + Args: + txn (cursor): + event_id (str): Id for the Event. + Returns: + A dict of algorithm -> hash. + """ + query = ( + "SELECT algorithm, hash" + " FROM event_content_hashes" + " WHERE event_id = ?" + ) + txn.execute(query, (event_id, )) + return dict(txn.fetchall()) + + def _store_event_content_hash_txn(self, txn, event_id, algorithm, + hash_bytes): + """Store a hash for a Event + Args: + txn (cursor): + event_id (str): Id for the Event. + algorithm (str): Hashing algorithm. + hash_bytes (bytes): Hash function output bytes. + """ + self._simple_insert_txn(txn, "event_content_hashes", { + "event_id": event_id, + "algorithm": algorithm, + "hash": buffer(hash_bytes), + }) + + def _get_event_reference_hashes_txn(self, txn, event_id): + """Get all the hashes for a given PDU. + Args: + txn (cursor): + event_id (str): Id for the Event. + Returns: + A dict of algorithm -> hash. + """ + query = ( + "SELECT algorithm, hash" + " FROM event_reference_hashes" + " WHERE event_id = ?" + ) + txn.execute(query, (event_id, )) + return dict(txn.fetchall()) + + def _store_event_reference_hash_txn(self, txn, event_id, algorithm, + hash_bytes): + """Store a hash for a PDU + Args: + txn (cursor): + event_id (str): Id for the Event. + algorithm (str): Hashing algorithm. + hash_bytes (bytes): Hash function output bytes. + """ + self._simple_insert_txn(txn, "event_reference_hashes", { + "event_id": event_id, + "algorithm": algorithm, + "hash": buffer(hash_bytes), + }) + + + def _get_event_origin_signatures_txn(self, txn, event_id): + """Get all the signatures for a given PDU. + Args: + txn (cursor): + event_id (str): Id for the Event. + Returns: + A dict of key_id -> signature_bytes. + """ + query = ( + "SELECT key_id, signature" + " FROM event_origin_signatures" + " WHERE event_id = ? " + ) + txn.execute(query, (event_id, )) + return dict(txn.fetchall()) + + def _store_event_origin_signature_txn(self, txn, event_id, origin, key_id, + signature_bytes): + """Store a signature from the origin server for a PDU. + Args: + txn (cursor): + event_id (str): Id for the Event. + origin (str): origin of the Event. + key_id (str): Id for the signing key. + signature (bytes): The signature. + """ + self._simple_insert_txn(txn, "event_origin_signatures", { + "event_id": event_id, + "origin": origin, + "key_id": key_id, + "signature": buffer(signature_bytes), + }) + + def _get_prev_event_hashes_txn(self, txn, event_id): + """Get all the hashes for previous PDUs of a PDU + Args: + txn (cursor): + event_id (str): Id for the Event. + Returns: + dict of (pdu_id, origin) -> dict of algorithm -> hash_bytes. + """ + query = ( + "SELECT prev_event_id, algorithm, hash" + " FROM event_edge_hashes" + " WHERE event_id = ?" + ) + txn.execute(query, (event_id, )) + results = {} + for prev_event_id, algorithm, hash_bytes in txn.fetchall(): + hashes = results.setdefault(prev_event_id, {}) + hashes[algorithm] = hash_bytes + return results + + def _store_prev_event_hash_txn(self, txn, event_id, prev_event_id, + algorithm, hash_bytes): + self._simple_insert_txn(txn, "event_edge_hashes", { + "event_id": event_id, + "prev_event_id": prev_event_id, + "algorithm": algorithm, + "hash": buffer(hash_bytes), + }) \ No newline at end of file -- cgit 1.5.1 From a10c2ec88d98abe035a60ab0027c1914d4ad7d77 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 28 Oct 2014 17:15:32 +0000 Subject: Don't reference PDU when persisting event --- synapse/storage/__init__.py | 5 +++-- synapse/storage/event_federation.py | 4 ++-- 2 files changed, 5 insertions(+), 4 deletions(-) (limited to 'synapse/storage/__init__.py') diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 678de0cf50..f89e518690 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -322,9 +322,10 @@ class DataStore(RoomMemberStore, RoomStore, txn, event.event_id, prev_event_id, alg, hash_bytes ) + # TODO (ref_alg, ref_hash_bytes) = compute_pdu_event_reference_hash(pdu) - self._store_pdu_reference_hash_txn( - txn, pdu.pdu_id, pdu.origin, ref_alg, ref_hash_bytes + self._store_event_reference_hash_txn( + txn, event.event_id, ref_alg, ref_hash_bytes ) self._update_min_depth_for_room_txn(txn, event.room_id, event.depth) diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py index 27ad9aea4d..7688fc550f 100644 --- a/synapse/storage/event_federation.py +++ b/synapse/storage/event_federation.py @@ -14,7 +14,7 @@ # limitations under the License. from ._base import SQLBaseStore -from twisted.internet import defer +from syutil.base64util import encode_base64 import logging @@ -36,7 +36,7 @@ class EventFederationStore(SQLBaseStore): results = [] for pdu_id, origin, depth in txn.fetchall(): - hashes = self._get_pdu_reference_hashes_txn(txn, pdu_id, origin) + hashes = self._get_prev_event_hashes_txn(txn, pdu_id, origin) sha256_bytes = hashes["sha256"] prev_hashes = {"sha256": encode_base64(sha256_bytes)} results.append((pdu_id, origin, prev_hashes, depth)) -- cgit 1.5.1 From e7858b6d7ef37849a3d2d5004743cdd21ec330a8 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Oct 2014 16:59:24 +0000 Subject: Start filling out and using new events tables --- synapse/federation/pdu_codec.py | 12 +++-- synapse/handlers/_base.py | 4 ++ synapse/handlers/federation.py | 90 +++++++++++++++++++--------------- synapse/state.py | 11 +++-- synapse/storage/__init__.py | 45 ++++++++++------- synapse/storage/_base.py | 33 ++++++++++--- synapse/storage/event_federation.py | 49 ++++++++++++------ synapse/storage/schema/event_edges.sql | 8 ++- 8 files changed, 159 insertions(+), 93 deletions(-) (limited to 'synapse/storage/__init__.py') diff --git a/synapse/federation/pdu_codec.py b/synapse/federation/pdu_codec.py index 2cd591410b..dccbccb85b 100644 --- a/synapse/federation/pdu_codec.py +++ b/synapse/federation/pdu_codec.py @@ -48,8 +48,8 @@ class PduCodec(object): kwargs["room_id"] = pdu.context kwargs["etype"] = pdu.pdu_type kwargs["prev_events"] = [ - encode_event_id(i, o) - for i, o in pdu.prev_pdus + (encode_event_id(i, o), s) + for i, o, s in pdu.prev_pdus ] if hasattr(pdu, "prev_state_id") and hasattr(pdu, "prev_state_origin"): @@ -82,7 +82,13 @@ class PduCodec(object): d["pdu_type"] = event.type if hasattr(event, "prev_events"): - d["prev_pdus"] = [decode_event_id(e) for e in event.prev_events] + def f(e, s): + i, o = decode_event_id(e, self.server_name) + return i, o, s + d["prev_pdus"] = [ + f(e, s) + for e, s in event.prev_events + ] if hasattr(event, "prev_state"): d["prev_state_id"], d["prev_state_origin"] = ( diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index cd6c35f194..787a01efc5 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -16,6 +16,8 @@ from twisted.internet import defer from synapse.api.errors import LimitExceededError +from synapse.util.async import run_on_reactor + class BaseHandler(object): def __init__(self, hs): @@ -45,6 +47,8 @@ class BaseHandler(object): @defer.inlineCallbacks def _on_new_room_event(self, event, snapshot, extra_destinations=[], extra_users=[], suppress_auth=False): + yield run_on_reactor() + snapshot.fill_out_prev_events(event) yield self.state_handler.annotate_state_groups(event) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index b575986fc3..5f86ed03fa 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -22,6 +22,7 @@ from synapse.api.constants import Membership from synapse.util.logutils import log_function from synapse.federation.pdu_codec import PduCodec, encode_event_id from synapse.api.errors import SynapseError +from synapse.util.async import run_on_reactor from twisted.internet import defer, reactor @@ -81,6 +82,8 @@ class FederationHandler(BaseHandler): processing. """ + yield run_on_reactor() + pdu = self.pdu_codec.pdu_from_event(event) if not hasattr(pdu, "destinations") or not pdu.destinations: @@ -102,6 +105,8 @@ class FederationHandler(BaseHandler): self.room_queues[event.room_id].append(pdu) return + logger.debug("Processing event: %s", event.event_id) + if state: state = [self.pdu_codec.event_from_pdu(p) for p in state] @@ -216,58 +221,65 @@ class FederationHandler(BaseHandler): assert(event.state_key == joinee) assert(event.room_id == room_id) - self.room_queues[room_id] = [] - - event.event_id = self.event_factory.create_event_id() - event.content = content + event.outlier = False - state = yield self.replication_layer.send_join( - target_host, - self.pdu_codec.pdu_from_event(event) - ) + self.room_queues[room_id] = [] - state = [self.pdu_codec.event_from_pdu(p) for p in state] + try: + event.event_id = self.event_factory.create_event_id() + event.content = content - logger.debug("do_invite_join state: %s", state) + state = yield self.replication_layer.send_join( + target_host, + self.pdu_codec.pdu_from_event(event) + ) - is_new_state = yield self.state_handler.annotate_state_groups( - event, - state=state - ) + state = [self.pdu_codec.event_from_pdu(p) for p in state] - try: - yield self.store.store_room( - room_id=room_id, - room_creator_user_id="", - is_public=False - ) - except: - # FIXME - pass + logger.debug("do_invite_join state: %s", state) - for e in state: - # FIXME: Auth these. is_new_state = yield self.state_handler.annotate_state_groups( - e, + event, + state=state ) + logger.debug("do_invite_join event: %s", event) + + try: + yield self.store.store_room( + room_id=room_id, + room_creator_user_id="", + is_public=False + ) + except: + # FIXME + pass + + for e in state: + # FIXME: Auth these. + e.outlier = True + + yield self.state_handler.annotate_state_groups( + e, + ) + + yield self.store.persist_event( + e, + backfilled=False, + is_new_state=False + ) + yield self.store.persist_event( - e, + event, backfilled=False, - is_new_state=False + is_new_state=is_new_state ) + finally: + room_queue = self.room_queues[room_id] + del self.room_queues[room_id] - yield self.store.persist_event( - event, - backfilled=False, - is_new_state=is_new_state - ) - - room_queue = self.room_queues[room_id] - del self.room_queues[room_id] - - for p in room_queue: - yield self.on_receive_pdu(p, backfilled=False) + for p in room_queue: + yield self.on_receive_pdu(p, backfilled=False) defer.returnValue(True) diff --git a/synapse/state.py b/synapse/state.py index cc6a7db96b..993c4f18d3 100644 --- a/synapse/state.py +++ b/synapse/state.py @@ -143,7 +143,9 @@ class StateHandler(object): defer.returnValue(False) return - new_state = yield self.resolve_state_groups(event.prev_events) + new_state = yield self.resolve_state_groups( + [e for e, _ in event.prev_events] + ) event.old_state_events = copy.deepcopy(new_state) @@ -157,12 +159,11 @@ class StateHandler(object): @defer.inlineCallbacks def get_current_state(self, room_id, event_type=None, state_key=""): - # FIXME: HACK! - pdus = yield self.store.get_latest_pdus_in_context(room_id) + events = yield self.store.get_latest_events_in_room(room_id) event_ids = [ - encode_event_id(pdu_id, origin) - for pdu_id, origin, _ in pdus + e_id + for e_id, _ in events ] res = yield self.resolve_state_groups(event_ids) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index f89e518690..d75c366834 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -71,6 +71,7 @@ SCHEMAS = [ "state", "signatures", "event_edges", + "event_signatures", ] @@ -134,7 +135,8 @@ class DataStore(RoomMemberStore, RoomStore, "type", "room_id", "content", - "unrecognized_keys" + "unrecognized_keys", + "depth", ], allow_none=allow_none, ) @@ -263,7 +265,12 @@ class DataStore(RoomMemberStore, RoomStore, vals["unrecognized_keys"] = json.dumps(unrec) try: - self._simple_insert_txn(txn, "events", vals) + self._simple_insert_txn( + txn, + "events", + vals, + or_replace=(not outlier), + ) except: logger.warn( "Failed to persist, probably duplicate: %s", @@ -307,13 +314,14 @@ class DataStore(RoomMemberStore, RoomStore, } ) - signatures = event.signatures.get(event.origin, {}) + if hasattr(event, "signatures"): + signatures = event.signatures.get(event.origin, {}) - for key_id, signature_base64 in signatures.items(): - signature_bytes = decode_base64(signature_base64) - self._store_event_origin_signature_txn( - txn, event.event_id, key_id, signature_bytes, - ) + for key_id, signature_base64 in signatures.items(): + signature_bytes = decode_base64(signature_base64) + self._store_event_origin_signature_txn( + txn, event.event_id, event.origin, key_id, signature_bytes, + ) for prev_event_id, prev_hashes in event.prev_events: for alg, hash_base64 in prev_hashes.items(): @@ -323,10 +331,10 @@ class DataStore(RoomMemberStore, RoomStore, ) # TODO - (ref_alg, ref_hash_bytes) = compute_pdu_event_reference_hash(pdu) - self._store_event_reference_hash_txn( - txn, event.event_id, ref_alg, ref_hash_bytes - ) + # (ref_alg, ref_hash_bytes) = compute_pdu_event_reference_hash(pdu) + # self._store_event_reference_hash_txn( + # txn, event.event_id, ref_alg, ref_hash_bytes + # ) self._update_min_depth_for_room_txn(txn, event.room_id, event.depth) @@ -412,9 +420,7 @@ class DataStore(RoomMemberStore, RoomStore, """ def _snapshot(txn): membership_state = self._get_room_member(txn, user_id, room_id) - prev_events = self._get_latest_events_in_room( - txn, room_id - ) + prev_events = self._get_latest_events_in_room(txn, room_id) if state_type is not None and state_key is not None: prev_state_pdu = self._get_current_state_pdu( @@ -469,12 +475,12 @@ class Snapshot(object): return event.prev_events = [ - (p_id, origin, hashes) - for p_id, origin, hashes, _ in self.prev_events + (event_id, hashes) + for event_id, hashes, _ in self.prev_events ] if self.prev_events: - event.depth = max([int(v) for _, _, _, v in self.prev_events]) + 1 + event.depth = max([int(v) for _, _, v in self.prev_events]) + 1 else: event.depth = 0 @@ -533,9 +539,10 @@ def prepare_database(db_conn): db_conn.commit() else: - sql_script = "BEGIN TRANSACTION;" + sql_script = "BEGIN TRANSACTION;\n" for sql_loc in SCHEMAS: sql_script += read_schema(sql_loc) + sql_script += "\n" sql_script += "COMMIT TRANSACTION;" c.executescript(sql_script) db_conn.commit() diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 30732caa83..464b12f032 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -19,10 +19,12 @@ from twisted.internet import defer from synapse.api.errors import StoreError from synapse.api.events.utils import prune_event from synapse.util.logutils import log_function +from syutil.base64util import encode_base64 import collections import copy import json +import sys import time @@ -67,6 +69,9 @@ class LoggingTransaction(object): return self.txn.execute( sql, *args, **kwargs ) + except: + logger.exception("[SQL FAIL] {%s}", self.name) + raise finally: end = time.clock() * 1000 sql_logger.debug("[SQL time] {%s} %f", self.name, end - start) @@ -85,14 +90,20 @@ class SQLBaseStore(object): """Wraps the .runInteraction() method on the underlying db_pool.""" def inner_func(txn, *args, **kwargs): start = time.clock() * 1000 - txn_id = str(SQLBaseStore._TXN_ID) - SQLBaseStore._TXN_ID += 1 + txn_id = SQLBaseStore._TXN_ID + + # We don't really need these to be unique, so lets stop it from + # growing really large. + self._TXN_ID = (self._TXN_ID + 1) % (sys.maxint - 1) - name = "%s-%s" % (desc, txn_id, ) + name = "%s-%x" % (desc, txn_id, ) transaction_logger.debug("[TXN START] {%s}", name) try: return func(LoggingTransaction(txn, name), *args, **kwargs) + except: + logger.exception("[TXN FAIL] {%s}", name) + raise finally: end = time.clock() * 1000 transaction_logger.debug( @@ -189,7 +200,6 @@ class SQLBaseStore(object): statement returns no rows """ return self._simple_selectupdate_one( - "_simple_select_one", table, keyvalues, retcols=retcols, allow_none=allow_none ) @@ -215,11 +225,11 @@ class SQLBaseStore(object): txn, table=table, keyvalues=keyvalues, - retcols=retcol, + retcol=retcol, ) if ret: - return ret[retcol] + return ret[0] else: if allow_none: return None @@ -434,6 +444,17 @@ class SQLBaseStore(object): sql = "SELECT * FROM events WHERE event_id = ?" for ev in events: + signatures = self._get_event_origin_signatures_txn( + txn, ev.event_id, + ) + + ev.signatures = { + k: encode_base64(v) for k, v in signatures.items() + } + + prev_events = self._get_latest_events_in_room(txn, ev.room_id) + ev.prev_events = [(e_id, s,) for e_id, s, _ in prev_events] + if hasattr(ev, "prev_state"): # Load previous state_content. # TODO: Should we be pulling this out above? diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py index 7688fc550f..5f94c31818 100644 --- a/synapse/storage/event_federation.py +++ b/synapse/storage/event_federation.py @@ -24,6 +24,13 @@ logger = logging.getLogger(__name__) class EventFederationStore(SQLBaseStore): + def get_latest_events_in_room(self, room_id): + return self.runInteraction( + "get_latest_events_in_room", + self._get_latest_events_in_room, + room_id, + ) + def _get_latest_events_in_room(self, txn, room_id): self._simple_select_onecol_txn( txn, @@ -34,12 +41,25 @@ class EventFederationStore(SQLBaseStore): retcol="event_id", ) + sql = ( + "SELECT e.event_id, e.depth FROM events as e " + "INNER JOIN event_forward_extremities as f " + "ON e.event_id = f.event_id " + "WHERE f.room_id = ?" + ) + + txn.execute(sql, (room_id, )) + results = [] - for pdu_id, origin, depth in txn.fetchall(): - hashes = self._get_prev_event_hashes_txn(txn, pdu_id, origin) - sha256_bytes = hashes["sha256"] - prev_hashes = {"sha256": encode_base64(sha256_bytes)} - results.append((pdu_id, origin, prev_hashes, depth)) + for event_id, depth in txn.fetchall(): + hashes = self._get_prev_event_hashes_txn(txn, event_id) + prev_hashes = { + k: encode_base64(v) for k, v in hashes.items() + if k == "sha256" + } + results.append((event_id, prev_hashes, depth)) + + return results def _get_min_depth_interaction(self, txn, room_id): min_depth = self._simple_select_one_onecol_txn( @@ -70,21 +90,21 @@ class EventFederationStore(SQLBaseStore): def _handle_prev_events(self, txn, outlier, event_id, prev_events, room_id): - for e_id in prev_events: + for e_id, _ in prev_events: # TODO (erikj): This could be done as a bulk insert self._simple_insert_txn( txn, table="event_edges", values={ "event_id": event_id, - "prev_event": e_id, + "prev_event_id": e_id, "room_id": room_id, } ) # Update the extremities table if this is not an outlier. if not outlier: - for e_id in prev_events: + for e_id, _ in prev_events: # TODO (erikj): This could be done as a bulk insert self._simple_delete_txn( txn, @@ -116,7 +136,7 @@ class EventFederationStore(SQLBaseStore): # Insert all the prev_pdus as a backwards thing, they'll get # deleted in a second if they're incorrect anyway. - for e_id in prev_events: + for e_id, _ in prev_events: # TODO (erikj): This could be done as a bulk insert self._simple_insert_txn( txn, @@ -130,14 +150,11 @@ class EventFederationStore(SQLBaseStore): # Also delete from the backwards extremities table all ones that # reference pdus that we have already seen query = ( - "DELETE FROM %(event_back)s as b WHERE EXISTS (" - "SELECT 1 FROM %(events)s AS events " + "DELETE FROM event_backward_extremities WHERE EXISTS (" + "SELECT 1 FROM events " "WHERE " - "b.event_id = events.event_id " + "event_backward_extremities.event_id = events.event_id " "AND not events.outlier " ")" - ) % { - "event_back": "event_backward_extremities", - "events": "events", - } + ) txn.execute(query) \ No newline at end of file diff --git a/synapse/storage/schema/event_edges.sql b/synapse/storage/schema/event_edges.sql index 6a28314ece..e5f768c705 100644 --- a/synapse/storage/schema/event_edges.sql +++ b/synapse/storage/schema/event_edges.sql @@ -7,7 +7,7 @@ CREATE TABLE IF NOT EXISTS event_forward_extremities( CREATE INDEX IF NOT EXISTS ev_extrem_room ON event_forward_extremities(room_id); CREATE INDEX IF NOT EXISTS ev_extrem_id ON event_forward_extremities(event_id); --- + CREATE TABLE IF NOT EXISTS event_backward_extremities( event_id TEXT, @@ -17,7 +17,7 @@ CREATE TABLE IF NOT EXISTS event_backward_extremities( CREATE INDEX IF NOT EXISTS ev_b_extrem_room ON event_backward_extremities(room_id); CREATE INDEX IF NOT EXISTS ev_b_extrem_id ON event_backward_extremities(event_id); --- + CREATE TABLE IF NOT EXISTS event_edges( event_id TEXT, @@ -28,7 +28,6 @@ CREATE TABLE IF NOT EXISTS event_edges( CREATE INDEX IF NOT EXISTS ev_edges_id ON event_edges(event_id); CREATE INDEX IF NOT EXISTS ev_edges_prev_id ON event_edges(prev_event_id); --- CREATE TABLE IF NOT EXISTS room_depth( @@ -38,7 +37,7 @@ CREATE TABLE IF NOT EXISTS room_depth( ); CREATE INDEX IF NOT EXISTS room_depth_room ON room_depth(room_id); --- + create TABLE IF NOT EXISTS event_destinations( event_id TEXT, @@ -48,4 +47,3 @@ create TABLE IF NOT EXISTS event_destinations( ); CREATE INDEX IF NOT EXISTS event_destinations_id ON event_destinations(event_id); --- \ No newline at end of file -- cgit 1.5.1 From bfa36a72b9a852130cc42fb9322f6596e89725a7 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 31 Oct 2014 14:00:14 +0000 Subject: Remove PDU tables. --- synapse/federation/persistence.py | 70 --- synapse/federation/replication.py | 2 +- synapse/storage/__init__.py | 60 +-- synapse/storage/pdu.py | 949 -------------------------------------- synapse/storage/schema/pdu.sql | 106 ----- synapse/storage/transactions.py | 45 -- 6 files changed, 2 insertions(+), 1230 deletions(-) delete mode 100644 synapse/storage/pdu.py delete mode 100644 synapse/storage/schema/pdu.sql (limited to 'synapse/storage/__init__.py') diff --git a/synapse/federation/persistence.py b/synapse/federation/persistence.py index 7043fcc504..a565375e68 100644 --- a/synapse/federation/persistence.py +++ b/synapse/federation/persistence.py @@ -32,76 +32,6 @@ import logging logger = logging.getLogger(__name__) -class PduActions(object): - """ Defines persistence actions that relate to handling PDUs. - """ - - def __init__(self, datastore): - self.store = datastore - - @log_function - def mark_as_processed(self, pdu): - """ Persist the fact that we have fully processed the given `Pdu` - - Returns: - Deferred - """ - return self.store.mark_pdu_as_processed(pdu.pdu_id, pdu.origin) - - @defer.inlineCallbacks - @log_function - def after_transaction(self, transaction_id, destination, origin): - """ Returns all `Pdu`s that we sent to the given remote home server - after a given transaction id. - - Returns: - Deferred: Results in a list of `Pdu`s - """ - results = yield self.store.get_pdus_after_transaction( - transaction_id, - destination - ) - - defer.returnValue([Pdu.from_pdu_tuple(p) for p in results]) - - @defer.inlineCallbacks - @log_function - def get_all_pdus_from_context(self, context): - results = yield self.store.get_all_pdus_from_context(context) - defer.returnValue([Pdu.from_pdu_tuple(p) for p in results]) - - @defer.inlineCallbacks - @log_function - def backfill(self, context, pdu_list, limit): - """ For a given list of PDU id and origins return the proceeding - `limit` `Pdu`s in the given `context`. - - Returns: - Deferred: Results in a list of `Pdu`s. - """ - results = yield self.store.get_backfill( - context, pdu_list, limit - ) - - defer.returnValue([Pdu.from_pdu_tuple(p) for p in results]) - - @log_function - def is_new(self, pdu): - """ When we receive a `Pdu` from a remote home server, we want to - figure out whether it is `new`, i.e. it is not some historic PDU that - we haven't seen simply because we haven't backfilled back that far. - - Returns: - Deferred: Results in a `bool` - """ - return self.store.is_pdu_new( - pdu_id=pdu.pdu_id, - origin=pdu.origin, - context=pdu.context, - depth=pdu.depth - ) - - class TransactionActions(object): """ Defines persistence actions that relate to handling Transactions. """ diff --git a/synapse/federation/replication.py b/synapse/federation/replication.py index a0bd2e0572..159af4eed7 100644 --- a/synapse/federation/replication.py +++ b/synapse/federation/replication.py @@ -21,7 +21,7 @@ from twisted.internet import defer from .units import Transaction, Pdu, Edu -from .persistence import PduActions, TransactionActions +from .persistence import TransactionActions from synapse.util.logutils import log_function diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index d75c366834..3faa571dd9 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -37,7 +37,6 @@ from .registration import RegistrationStore from .room import RoomStore from .roommember import RoomMemberStore from .stream import StreamStore -from .pdu import StatePduStore, PduStore, PdusTable from .transactions import TransactionStore from .keys import KeyStore from .event_federation import EventFederationStore @@ -60,7 +59,6 @@ logger = logging.getLogger(__name__) SCHEMAS = [ "transactions", - "pdu", "users", "profiles", "presence", @@ -89,7 +87,7 @@ class _RollbackButIsFineException(Exception): class DataStore(RoomMemberStore, RoomStore, RegistrationStore, StreamStore, ProfileStore, FeedbackStore, - PresenceStore, PduStore, StatePduStore, TransactionStore, + PresenceStore, TransactionStore, DirectoryStore, KeyStore, StateStore, SignatureStore, EventFederationStore, ): @@ -150,68 +148,12 @@ class DataStore(RoomMemberStore, RoomStore, def _persist_pdu_event_txn(self, txn, pdu=None, event=None, backfilled=False, stream_ordering=None, is_new_state=True): - if pdu is not None: - self._persist_event_pdu_txn(txn, pdu) if event is not None: return self._persist_event_txn( txn, event, backfilled, stream_ordering, is_new_state=is_new_state, ) - def _persist_event_pdu_txn(self, txn, pdu): - cols = dict(pdu.__dict__) - unrec_keys = dict(pdu.unrecognized_keys) - del cols["hashes"] - del cols["signatures"] - del cols["content"] - del cols["prev_pdus"] - cols["content_json"] = json.dumps(pdu.content) - - unrec_keys.update({ - k: v for k, v in cols.items() - if k not in PdusTable.fields - }) - - cols["unrecognized_keys"] = json.dumps(unrec_keys) - - cols["ts"] = cols.pop("origin_server_ts") - - logger.debug("Persisting: %s", repr(cols)) - - for hash_alg, hash_base64 in pdu.hashes.items(): - hash_bytes = decode_base64(hash_base64) - self._store_pdu_content_hash_txn( - txn, pdu.pdu_id, pdu.origin, hash_alg, hash_bytes, - ) - - signatures = pdu.signatures.get(pdu.origin, {}) - - for key_id, signature_base64 in signatures.items(): - signature_bytes = decode_base64(signature_base64) - self._store_pdu_origin_signature_txn( - txn, pdu.pdu_id, pdu.origin, key_id, signature_bytes, - ) - - for prev_pdu_id, prev_origin, prev_hashes in pdu.prev_pdus: - for alg, hash_base64 in prev_hashes.items(): - hash_bytes = decode_base64(hash_base64) - self._store_prev_pdu_hash_txn( - txn, pdu.pdu_id, pdu.origin, prev_pdu_id, prev_origin, alg, - hash_bytes - ) - - (ref_alg, ref_hash_bytes) = compute_pdu_event_reference_hash(pdu) - self._store_pdu_reference_hash_txn( - txn, pdu.pdu_id, pdu.origin, ref_alg, ref_hash_bytes - ) - - if pdu.is_state: - self._persist_state_txn(txn, pdu.prev_pdus, cols) - else: - self._persist_pdu_txn(txn, pdu.prev_pdus, cols) - - self._update_min_depth_for_context_txn(txn, pdu.context, pdu.depth) - @log_function def _persist_event_txn(self, txn, event, backfilled, stream_ordering=None, is_new_state=True): diff --git a/synapse/storage/pdu.py b/synapse/storage/pdu.py deleted file mode 100644 index 4a4341907b..0000000000 --- a/synapse/storage/pdu.py +++ /dev/null @@ -1,949 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2014 OpenMarket Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from twisted.internet import defer - -from ._base import SQLBaseStore, Table, JoinHelper - -from synapse.federation.units import Pdu -from synapse.util.logutils import log_function - -from syutil.base64util import encode_base64 - -from collections import namedtuple - -import logging - - -logger = logging.getLogger(__name__) - - -class PduStore(SQLBaseStore): - """A collection of queries for handling PDUs. - """ - - def get_pdu(self, pdu_id, origin): - """Given a pdu_id and origin, get a PDU. - - Args: - txn - pdu_id (str) - origin (str) - - Returns: - PduTuple: If the pdu does not exist in the database, returns None - """ - - return self.runInteraction( - "get_pdu", self._get_pdu_tuple, pdu_id, origin - ) - - def _get_pdu_tuple(self, txn, pdu_id, origin): - res = self._get_pdu_tuples(txn, [(pdu_id, origin)]) - return res[0] if res else None - - def _get_pdu_tuples(self, txn, pdu_id_tuples): - results = [] - for pdu_id, origin in pdu_id_tuples: - txn.execute( - PduEdgesTable.select_statement("pdu_id = ? AND origin = ?"), - (pdu_id, origin) - ) - - edges = [ - (r.prev_pdu_id, r.prev_origin) - for r in PduEdgesTable.decode_results(txn.fetchall()) - ] - - edge_hashes = self._get_prev_pdu_hashes_txn(txn, pdu_id, origin) - - hashes = self._get_pdu_content_hashes_txn(txn, pdu_id, origin) - signatures = self._get_pdu_origin_signatures_txn( - txn, pdu_id, origin - ) - - query = ( - "SELECT %(fields)s FROM %(pdus)s as p " - "LEFT JOIN %(state)s as s " - "ON p.pdu_id = s.pdu_id AND p.origin = s.origin " - "WHERE p.pdu_id = ? AND p.origin = ? " - ) % { - "fields": _pdu_state_joiner.get_fields( - PdusTable="p", StatePdusTable="s"), - "pdus": PdusTable.table_name, - "state": StatePdusTable.table_name, - } - - txn.execute(query, (pdu_id, origin)) - - row = txn.fetchone() - if row: - results.append(PduTuple( - PduEntry(*row), edges, hashes, signatures, edge_hashes - )) - - return results - - def get_current_state_for_context(self, context): - """Get a list of PDUs that represent the current state for a given - context - - Args: - context (str) - - Returns: - list: A list of PduTuples - """ - - return self.runInteraction( - "get_current_state_for_context", - self._get_current_state_for_context, - context - ) - - def _get_current_state_for_context(self, txn, context): - query = ( - "SELECT pdu_id, origin FROM %s WHERE context = ?" - % CurrentStateTable.table_name - ) - - logger.debug("get_current_state %s, Args=%s", query, context) - txn.execute(query, (context,)) - - res = txn.fetchall() - - logger.debug("get_current_state %d results", len(res)) - - return self._get_pdu_tuples(txn, res) - - def _persist_pdu_txn(self, txn, prev_pdus, cols): - """Inserts a (non-state) PDU into the database. - - Args: - txn, - prev_pdus (list) - **cols: The columns to insert into the PdusTable. - """ - entry = PdusTable.EntryType( - **{k: cols.get(k, None) for k in PdusTable.fields} - ) - - txn.execute(PdusTable.insert_statement(), entry) - - self._handle_prev_pdus( - txn, entry.outlier, entry.pdu_id, entry.origin, - prev_pdus, entry.context - ) - - def mark_pdu_as_processed(self, pdu_id, pdu_origin): - """Mark a received PDU as processed. - - Args: - txn - pdu_id (str) - pdu_origin (str) - """ - - return self.runInteraction( - "mark_pdu_as_processed", - self._mark_as_processed, pdu_id, pdu_origin - ) - - def _mark_as_processed(self, txn, pdu_id, pdu_origin): - txn.execute("UPDATE %s SET have_processed = 1" % PdusTable.table_name) - - def get_all_pdus_from_context(self, context): - """Get a list of all PDUs for a given context.""" - return self.runInteraction( - "get_all_pdus_from_context", - self._get_all_pdus_from_context, context, - ) - - def _get_all_pdus_from_context(self, txn, context): - query = ( - "SELECT pdu_id, origin FROM %s " - "WHERE context = ?" - ) % PdusTable.table_name - - txn.execute(query, (context,)) - - return self._get_pdu_tuples(txn, txn.fetchall()) - - def get_backfill(self, context, pdu_list, limit): - """Get a list of Pdus for a given topic that occured before (and - including) the pdus in pdu_list. Return a list of max size `limit`. - - Args: - txn - context (str) - pdu_list (list) - limit (int) - - Return: - list: A list of PduTuples - """ - return self.runInteraction( - "get_backfill", - self._get_backfill, context, pdu_list, limit - ) - - def _get_backfill(self, txn, context, pdu_list, limit): - logger.debug( - "backfill: %s, %s, %s", - context, repr(pdu_list), limit - ) - - # We seed the pdu_results with the things from the pdu_list. - pdu_results = pdu_list - - front = pdu_list - - query = ( - "SELECT prev_pdu_id, prev_origin FROM %(edges_table)s " - "WHERE context = ? AND pdu_id = ? AND origin = ? " - "LIMIT ?" - ) % { - "edges_table": PduEdgesTable.table_name, - } - - # We iterate through all pdu_ids in `front` to select their previous - # pdus. These are dumped in `new_front`. We continue until we reach the - # limit *or* new_front is empty (i.e., we've run out of things to - # select - while front and len(pdu_results) < limit: - - new_front = [] - for pdu_id, origin in front: - logger.debug( - "_backfill_interaction: i=%s, o=%s", - pdu_id, origin - ) - - txn.execute( - query, - (context, pdu_id, origin, limit - len(pdu_results)) - ) - - for row in txn.fetchall(): - logger.debug( - "_backfill_interaction: got i=%s, o=%s", - *row - ) - new_front.append(row) - - front = new_front - pdu_results += new_front - - # We also want to update the `prev_pdus` attributes before returning. - return self._get_pdu_tuples(txn, pdu_results) - - def get_min_depth_for_context(self, context): - """Get the current minimum depth for a context - - Args: - txn - context (str) - """ - return self.runInteraction( - "get_min_depth_for_context", - self._get_min_depth_for_context, context - ) - - def _get_min_depth_for_context(self, txn, context): - return self._get_min_depth_interaction(txn, context) - - def _get_min_depth_interaction(self, txn, context): - txn.execute( - "SELECT min_depth FROM %s WHERE context = ?" - % ContextDepthTable.table_name, - (context,) - ) - - row = txn.fetchone() - - return row[0] if row else None - - def _update_min_depth_for_context_txn(self, txn, context, depth): - """Update the minimum `depth` of the given context, which is the line - on which we stop backfilling backwards. - - Args: - context (str) - depth (int) - """ - min_depth = self._get_min_depth_interaction(txn, context) - - do_insert = depth < min_depth if min_depth else True - - if do_insert: - txn.execute( - "INSERT OR REPLACE INTO %s (context, min_depth) " - "VALUES (?,?)" % ContextDepthTable.table_name, - (context, depth) - ) - - def get_latest_pdus_in_context(self, context): - return self.runInteraction( - "get_latest_pdus_in_context", - self._get_latest_pdus_in_context, - context - ) - - def _get_latest_pdus_in_context(self, txn, context): - """Get's a list of the most current pdus for a given context. This is - used when we are sending a Pdu and need to fill out the `prev_pdus` - key - - Args: - txn - context - """ - query = ( - "SELECT p.pdu_id, p.origin, p.depth FROM %(pdus)s as p " - "INNER JOIN %(forward)s as f ON p.pdu_id = f.pdu_id " - "AND f.origin = p.origin " - "WHERE f.context = ?" - ) % { - "pdus": PdusTable.table_name, - "forward": PduForwardExtremitiesTable.table_name, - } - - logger.debug("get_prev query: %s", query) - - txn.execute( - query, - (context, ) - ) - - results = [] - for pdu_id, origin, depth in txn.fetchall(): - hashes = self._get_pdu_reference_hashes_txn(txn, pdu_id, origin) - sha256_bytes = hashes["sha256"] - prev_hashes = {"sha256": encode_base64(sha256_bytes)} - results.append((pdu_id, origin, prev_hashes, depth)) - - return results - - @defer.inlineCallbacks - def get_oldest_pdus_in_context(self, context): - """Get a list of Pdus that we haven't backfilled beyond yet (and havent - seen). This list is used when we want to backfill backwards and is the - list we send to the remote server. - - Args: - txn - context (str) - - Returns: - list: A list of PduIdTuple. - """ - results = yield self._execute( - None, - "SELECT pdu_id, origin FROM %(back)s WHERE context = ?" - % {"back": PduBackwardExtremitiesTable.table_name, }, - context - ) - - defer.returnValue([PduIdTuple(i, o) for i, o in results]) - - def is_pdu_new(self, pdu_id, origin, context, depth): - """For a given Pdu, try and figure out if it's 'new', i.e., if it's - not something we got randomly from the past, for example when we - request the current state of the room that will probably return a bunch - of pdus from before we joined. - - Args: - txn - pdu_id (str) - origin (str) - context (str) - depth (int) - - Returns: - bool - """ - - return self.runInteraction( - "is_pdu_new", - self._is_pdu_new, - pdu_id=pdu_id, - origin=origin, - context=context, - depth=depth - ) - - def _is_pdu_new(self, txn, pdu_id, origin, context, depth): - # If depth > min depth in back table, then we classify it as new. - # OR if there is nothing in the back table, then it kinda needs to - # be a new thing. - query = ( - "SELECT min(p.depth) FROM %(edges)s as e " - "INNER JOIN %(back)s as b " - "ON e.prev_pdu_id = b.pdu_id AND e.prev_origin = b.origin " - "INNER JOIN %(pdus)s as p " - "ON e.pdu_id = p.pdu_id AND p.origin = e.origin " - "WHERE p.context = ?" - ) % { - "pdus": PdusTable.table_name, - "edges": PduEdgesTable.table_name, - "back": PduBackwardExtremitiesTable.table_name, - } - - txn.execute(query, (context,)) - - min_depth, = txn.fetchone() - - if not min_depth or depth > int(min_depth): - logger.debug( - "is_new true: id=%s, o=%s, d=%s min_depth=%s", - pdu_id, origin, depth, min_depth - ) - return True - - # If this pdu is in the forwards table, then it also is a new one - query = ( - "SELECT * FROM %(forward)s WHERE pdu_id = ? AND origin = ?" - ) % { - "forward": PduForwardExtremitiesTable.table_name, - } - - txn.execute(query, (pdu_id, origin)) - - # Did we get anything? - if txn.fetchall(): - logger.debug( - "is_new true: id=%s, o=%s, d=%s was forward", - pdu_id, origin, depth - ) - return True - - logger.debug( - "is_new false: id=%s, o=%s, d=%s", - pdu_id, origin, depth - ) - - # FINE THEN. It's probably old. - return False - - @staticmethod - @log_function - def _handle_prev_pdus(txn, outlier, pdu_id, origin, prev_pdus, - context): - txn.executemany( - PduEdgesTable.insert_statement(), - [(pdu_id, origin, p[0], p[1], context) for p in prev_pdus] - ) - - # Update the extremities table if this is not an outlier. - if not outlier: - - # First, we delete the new one from the forwards extremities table. - query = ( - "DELETE FROM %s WHERE pdu_id = ? AND origin = ?" - % PduForwardExtremitiesTable.table_name - ) - txn.executemany(query, list(p[:2] for p in prev_pdus)) - - # We only insert as a forward extremety the new pdu if there are no - # other pdus that reference it as a prev pdu - query = ( - "INSERT INTO %(table)s (pdu_id, origin, context) " - "SELECT ?, ?, ? WHERE NOT EXISTS (" - "SELECT 1 FROM %(pdu_edges)s WHERE " - "prev_pdu_id = ? AND prev_origin = ?" - ")" - ) % { - "table": PduForwardExtremitiesTable.table_name, - "pdu_edges": PduEdgesTable.table_name - } - - logger.debug("query: %s", query) - - txn.execute(query, (pdu_id, origin, context, pdu_id, origin)) - - # Insert all the prev_pdus as a backwards thing, they'll get - # deleted in a second if they're incorrect anyway. - txn.executemany( - PduBackwardExtremitiesTable.insert_statement(), - [(i, o, context) for i, o, _ in prev_pdus] - ) - - # Also delete from the backwards extremities table all ones that - # reference pdus that we have already seen - query = ( - "DELETE FROM %(pdu_back)s WHERE EXISTS (" - "SELECT 1 FROM %(pdus)s AS pdus " - "WHERE " - "%(pdu_back)s.pdu_id = pdus.pdu_id " - "AND %(pdu_back)s.origin = pdus.origin " - "AND not pdus.outlier " - ")" - ) % { - "pdu_back": PduBackwardExtremitiesTable.table_name, - "pdus": PdusTable.table_name, - } - txn.execute(query) - - -class StatePduStore(SQLBaseStore): - """A collection of queries for handling state PDUs. - """ - - def _persist_state_txn(self, txn, prev_pdus, cols): - """Inserts a state PDU into the database - - Args: - txn, - prev_pdus (list) - **cols: The columns to insert into the PdusTable and StatePdusTable - """ - pdu_entry = PdusTable.EntryType( - **{k: cols.get(k, None) for k in PdusTable.fields} - ) - state_entry = StatePdusTable.EntryType( - **{k: cols.get(k, None) for k in StatePdusTable.fields} - ) - - logger.debug("Inserting pdu: %s", repr(pdu_entry)) - logger.debug("Inserting state: %s", repr(state_entry)) - - txn.execute(PdusTable.insert_statement(), pdu_entry) - txn.execute(StatePdusTable.insert_statement(), state_entry) - - self._handle_prev_pdus( - txn, - pdu_entry.outlier, pdu_entry.pdu_id, pdu_entry.origin, prev_pdus, - pdu_entry.context - ) - - def get_unresolved_state_tree(self, new_state_pdu): - return self.runInteraction( - "get_unresolved_state_tree", - self._get_unresolved_state_tree, new_state_pdu - ) - - @log_function - def _get_unresolved_state_tree(self, txn, new_pdu): - current = self._get_current_interaction( - txn, - new_pdu.context, new_pdu.pdu_type, new_pdu.state_key - ) - - ReturnType = namedtuple( - "StateReturnType", ["new_branch", "current_branch"] - ) - return_value = ReturnType([new_pdu], []) - - if not current: - logger.debug("get_unresolved_state_tree No current state.") - return (return_value, None) - - return_value.current_branch.append(current) - - enum_branches = self._enumerate_state_branches( - txn, new_pdu, current - ) - - missing_branch = None - for branch, prev_state, state in enum_branches: - if state: - return_value[branch].append(state) - else: - # We don't have prev_state :( - missing_branch = branch - break - - return (return_value, missing_branch) - - def update_current_state(self, pdu_id, origin, context, pdu_type, - state_key): - return self.runInteraction( - "update_current_state", - self._update_current_state, - pdu_id, origin, context, pdu_type, state_key - ) - - def _update_current_state(self, txn, pdu_id, origin, context, pdu_type, - state_key): - query = ( - "INSERT OR REPLACE INTO %(curr)s (%(fields)s) VALUES (%(qs)s)" - ) % { - "curr": CurrentStateTable.table_name, - "fields": CurrentStateTable.get_fields_string(), - "qs": ", ".join(["?"] * len(CurrentStateTable.fields)) - } - - query_args = CurrentStateTable.EntryType( - pdu_id=pdu_id, - origin=origin, - context=context, - pdu_type=pdu_type, - state_key=state_key - ) - - txn.execute(query, query_args) - - def get_current_state_pdu(self, context, pdu_type, state_key): - """For a given context, pdu_type, state_key 3-tuple, return what is - currently considered the current state. - - Args: - txn - context (str) - pdu_type (str) - state_key (str) - - Returns: - PduEntry - """ - - return self.runInteraction( - "get_current_state_pdu", - self._get_current_state_pdu, context, pdu_type, state_key - ) - - def _get_current_state_pdu(self, txn, context, pdu_type, state_key): - return self._get_current_interaction(txn, context, pdu_type, state_key) - - def _get_current_interaction(self, txn, context, pdu_type, state_key): - logger.debug( - "_get_current_interaction %s %s %s", - context, pdu_type, state_key - ) - - fields = _pdu_state_joiner.get_fields( - PdusTable="p", StatePdusTable="s") - - current_query = ( - "SELECT %(fields)s FROM %(state)s as s " - "INNER JOIN %(pdus)s as p " - "ON s.pdu_id = p.pdu_id AND s.origin = p.origin " - "INNER JOIN %(curr)s as c " - "ON s.pdu_id = c.pdu_id AND s.origin = c.origin " - "WHERE s.context = ? AND s.pdu_type = ? AND s.state_key = ? " - ) % { - "fields": fields, - "curr": CurrentStateTable.table_name, - "state": StatePdusTable.table_name, - "pdus": PdusTable.table_name, - } - - txn.execute( - current_query, - (context, pdu_type, state_key) - ) - - row = txn.fetchone() - - result = PduEntry(*row) if row else None - - if not result: - logger.debug("_get_current_interaction not found") - else: - logger.debug( - "_get_current_interaction found %s %s", - result.pdu_id, result.origin - ) - - return result - - def handle_new_state(self, new_pdu): - """Actually perform conflict resolution on the new_pdu on the - assumption we have all the pdus required to perform it. - - Args: - new_pdu - - Returns: - bool: True if the new_pdu clobbered the current state, False if not - """ - return self.runInteraction( - "handle_new_state", - self._handle_new_state, new_pdu - ) - - def _handle_new_state(self, txn, new_pdu): - logger.debug( - "handle_new_state %s %s", - new_pdu.pdu_id, new_pdu.origin - ) - - current = self._get_current_interaction( - txn, - new_pdu.context, new_pdu.pdu_type, new_pdu.state_key - ) - - is_current = False - - if (not current or not current.prev_state_id - or not current.prev_state_origin): - # Oh, we don't have any state for this yet. - is_current = True - elif (current.pdu_id == new_pdu.prev_state_id - and current.origin == new_pdu.prev_state_origin): - # Oh! A direct clobber. Just do it. - is_current = True - else: - ## - # Ok, now loop through until we get to a common ancestor. - max_new = int(new_pdu.power_level) - max_current = int(current.power_level) - - enum_branches = self._enumerate_state_branches( - txn, new_pdu, current - ) - for branch, prev_state, state in enum_branches: - if not state: - raise RuntimeError( - "Could not find state_pdu %s %s" % - ( - prev_state.prev_state_id, - prev_state.prev_state_origin - ) - ) - - if branch == 0: - max_new = max(int(state.depth), max_new) - else: - max_current = max(int(state.depth), max_current) - - is_current = max_new > max_current - - if is_current: - logger.debug("handle_new_state make current") - - # Right, this is a new thing, so woo, just insert it. - txn.execute( - "INSERT OR REPLACE INTO %(curr)s (%(fields)s) VALUES (%(qs)s)" - % { - "curr": CurrentStateTable.table_name, - "fields": CurrentStateTable.get_fields_string(), - "qs": ", ".join(["?"] * len(CurrentStateTable.fields)) - }, - CurrentStateTable.EntryType( - *(new_pdu.__dict__[k] for k in CurrentStateTable.fields) - ) - ) - else: - logger.debug("handle_new_state not current") - - logger.debug("handle_new_state done") - - return is_current - - @log_function - def _enumerate_state_branches(self, txn, pdu_a, pdu_b): - branch_a = pdu_a - branch_b = pdu_b - - while True: - if (branch_a.pdu_id == branch_b.pdu_id - and branch_a.origin == branch_b.origin): - # Woo! We found a common ancestor - logger.debug("_enumerate_state_branches Found common ancestor") - break - - do_branch_a = ( - hasattr(branch_a, "prev_state_id") and - branch_a.prev_state_id - ) - - do_branch_b = ( - hasattr(branch_b, "prev_state_id") and - branch_b.prev_state_id - ) - - logger.debug( - "do_branch_a=%s, do_branch_b=%s", - do_branch_a, do_branch_b - ) - - if do_branch_a and do_branch_b: - do_branch_a = int(branch_a.depth) > int(branch_b.depth) - - if do_branch_a: - pdu_tuple = PduIdTuple( - branch_a.prev_state_id, - branch_a.prev_state_origin - ) - - prev_branch = branch_a - - logger.debug("getting branch_a prev %s", pdu_tuple) - branch_a = self._get_pdu_tuple(txn, *pdu_tuple) - if branch_a: - branch_a = Pdu.from_pdu_tuple(branch_a) - - logger.debug("branch_a=%s", branch_a) - - yield (0, prev_branch, branch_a) - - if not branch_a: - break - elif do_branch_b: - pdu_tuple = PduIdTuple( - branch_b.prev_state_id, - branch_b.prev_state_origin - ) - - prev_branch = branch_b - - logger.debug("getting branch_b prev %s", pdu_tuple) - branch_b = self._get_pdu_tuple(txn, *pdu_tuple) - if branch_b: - branch_b = Pdu.from_pdu_tuple(branch_b) - - logger.debug("branch_b=%s", branch_b) - - yield (1, prev_branch, branch_b) - - if not branch_b: - break - else: - break - - -class PdusTable(Table): - table_name = "pdus" - - fields = [ - "pdu_id", - "origin", - "context", - "pdu_type", - "ts", - "depth", - "is_state", - "content_json", - "unrecognized_keys", - "outlier", - "have_processed", - ] - - EntryType = namedtuple("PdusEntry", fields) - - -class PduDestinationsTable(Table): - table_name = "pdu_destinations" - - fields = [ - "pdu_id", - "origin", - "destination", - "delivered_ts", - ] - - EntryType = namedtuple("PduDestinationsEntry", fields) - - -class PduEdgesTable(Table): - table_name = "pdu_edges" - - fields = [ - "pdu_id", - "origin", - "prev_pdu_id", - "prev_origin", - "context" - ] - - EntryType = namedtuple("PduEdgesEntry", fields) - - -class PduForwardExtremitiesTable(Table): - table_name = "pdu_forward_extremities" - - fields = [ - "pdu_id", - "origin", - "context", - ] - - EntryType = namedtuple("PduForwardExtremitiesEntry", fields) - - -class PduBackwardExtremitiesTable(Table): - table_name = "pdu_backward_extremities" - - fields = [ - "pdu_id", - "origin", - "context", - ] - - EntryType = namedtuple("PduBackwardExtremitiesEntry", fields) - - -class ContextDepthTable(Table): - table_name = "context_depth" - - fields = [ - "context", - "min_depth", - ] - - EntryType = namedtuple("ContextDepthEntry", fields) - - -class StatePdusTable(Table): - table_name = "state_pdus" - - fields = [ - "pdu_id", - "origin", - "context", - "pdu_type", - "state_key", - "power_level", - "prev_state_id", - "prev_state_origin", - ] - - EntryType = namedtuple("StatePdusEntry", fields) - - -class CurrentStateTable(Table): - table_name = "current_state" - - fields = [ - "pdu_id", - "origin", - "context", - "pdu_type", - "state_key", - ] - - EntryType = namedtuple("CurrentStateEntry", fields) - -_pdu_state_joiner = JoinHelper(PdusTable, StatePdusTable) - - -# TODO: These should probably be put somewhere more sensible -PduIdTuple = namedtuple("PduIdTuple", ("pdu_id", "origin")) - -PduEntry = _pdu_state_joiner.EntryType -""" We are always interested in the join of the PdusTable and StatePdusTable, -rather than just the PdusTable. - -This does not include a prev_pdus key. -""" - -PduTuple = namedtuple( - "PduTuple", - ("pdu_entry", "prev_pdu_list", "hashes", "signatures", "edge_hashes") -) -""" This is a tuple of a `PduEntry` and a list of `PduIdTuple` that represent -the `prev_pdus` key of a PDU. -""" diff --git a/synapse/storage/schema/pdu.sql b/synapse/storage/schema/pdu.sql deleted file mode 100644 index 16e111a56c..0000000000 --- a/synapse/storage/schema/pdu.sql +++ /dev/null @@ -1,106 +0,0 @@ -/* Copyright 2014 OpenMarket Ltd - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ --- Stores pdus and their content -CREATE TABLE IF NOT EXISTS pdus( - pdu_id TEXT, - origin TEXT, - context TEXT, - pdu_type TEXT, - ts INTEGER, - depth INTEGER DEFAULT 0 NOT NULL, - is_state BOOL, - content_json TEXT, - unrecognized_keys TEXT, - outlier BOOL NOT NULL, - have_processed BOOL, - CONSTRAINT pdu_id_origin UNIQUE (pdu_id, origin) -); - --- Stores what the current state pdu is for a given (context, pdu_type, key) tuple -CREATE TABLE IF NOT EXISTS state_pdus( - pdu_id TEXT, - origin TEXT, - context TEXT, - pdu_type TEXT, - state_key TEXT, - power_level TEXT, - prev_state_id TEXT, - prev_state_origin TEXT, - CONSTRAINT pdu_id_origin UNIQUE (pdu_id, origin) - CONSTRAINT prev_pdu_id_origin UNIQUE (prev_state_id, prev_state_origin) -); - -CREATE TABLE IF NOT EXISTS current_state( - pdu_id TEXT, - origin TEXT, - context TEXT, - pdu_type TEXT, - state_key TEXT, - CONSTRAINT pdu_id_origin UNIQUE (pdu_id, origin) - CONSTRAINT uniqueness UNIQUE (context, pdu_type, state_key) ON CONFLICT REPLACE -); - --- Stores where each pdu we want to send should be sent and the delivery status. -create TABLE IF NOT EXISTS pdu_destinations( - pdu_id TEXT, - origin TEXT, - destination TEXT, - delivered_ts INTEGER DEFAULT 0, -- or 0 if not delivered - CONSTRAINT uniqueness UNIQUE (pdu_id, origin, destination) ON CONFLICT REPLACE -); - -CREATE TABLE IF NOT EXISTS pdu_forward_extremities( - pdu_id TEXT, - origin TEXT, - context TEXT, - CONSTRAINT uniqueness UNIQUE (pdu_id, origin, context) ON CONFLICT REPLACE -); - -CREATE TABLE IF NOT EXISTS pdu_backward_extremities( - pdu_id TEXT, - origin TEXT, - context TEXT, - CONSTRAINT uniqueness UNIQUE (pdu_id, origin, context) ON CONFLICT REPLACE -); - -CREATE TABLE IF NOT EXISTS pdu_edges( - pdu_id TEXT, - origin TEXT, - prev_pdu_id TEXT, - prev_origin TEXT, - context TEXT, - CONSTRAINT uniqueness UNIQUE (pdu_id, origin, prev_pdu_id, prev_origin, context) -); - -CREATE TABLE IF NOT EXISTS context_depth( - context TEXT, - min_depth INTEGER, - CONSTRAINT uniqueness UNIQUE (context) -); - -CREATE INDEX IF NOT EXISTS context_depth_context ON context_depth(context); - - -CREATE INDEX IF NOT EXISTS pdu_id ON pdus(pdu_id, origin); - -CREATE INDEX IF NOT EXISTS dests_id ON pdu_destinations (pdu_id, origin); --- CREATE INDEX IF NOT EXISTS dests ON pdu_destinations (destination); - -CREATE INDEX IF NOT EXISTS pdu_extrem_context ON pdu_forward_extremities(context); -CREATE INDEX IF NOT EXISTS pdu_extrem_id ON pdu_forward_extremities(pdu_id, origin); - -CREATE INDEX IF NOT EXISTS pdu_edges_id ON pdu_edges(pdu_id, origin); - -CREATE INDEX IF NOT EXISTS pdu_b_extrem_context ON pdu_backward_extremities(context); diff --git a/synapse/storage/transactions.py b/synapse/storage/transactions.py index 6624348fd0..ea67900788 100644 --- a/synapse/storage/transactions.py +++ b/synapse/storage/transactions.py @@ -14,7 +14,6 @@ # limitations under the License. from ._base import SQLBaseStore, Table -from .pdu import PdusTable from collections import namedtuple @@ -207,50 +206,6 @@ class TransactionStore(SQLBaseStore): return ReceivedTransactionsTable.decode_results(txn.fetchall()) - def get_pdus_after_transaction(self, transaction_id, destination): - """For a given local transaction_id that we sent to a given destination - home server, return a list of PDUs that were sent to that destination - after it. - - Args: - txn - transaction_id (str) - destination (str) - - Returns - list: A list of PduTuple - """ - return self.runInteraction( - "get_pdus_after_transaction", - self._get_pdus_after_transaction, - transaction_id, destination - ) - - def _get_pdus_after_transaction(self, txn, transaction_id, destination): - - # Query that first get's all transaction_ids with an id greater than - # the one given from the `sent_transactions` table. Then JOIN on this - # from the `tx->pdu` table to get a list of (pdu_id, origin) that - # specify the pdus that were sent in those transactions. - query = ( - "SELECT pdu_id, pdu_origin FROM %(tx_pdu)s as tp " - "INNER JOIN %(sent_tx)s as st " - "ON tp.transaction_id = st.transaction_id " - "AND tp.destination = st.destination " - "WHERE st.id > (" - "SELECT id FROM %(sent_tx)s " - "WHERE transaction_id = ? AND destination = ?" - ) % { - "tx_pdu": TransactionsToPduTable.table_name, - "sent_tx": SentTransactions.table_name, - } - - txn.execute(query, (transaction_id, destination)) - - pdus = PdusTable.decode_results(txn.fetchall()) - - return self._get_pdu_tuples(txn, pdus) - class ReceivedTransactionsTable(Table): table_name = "received_transactions" -- cgit 1.5.1 From 2f39dc19a26cca25305d10654916d7413a56a23a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 31 Oct 2014 14:27:14 +0000 Subject: Remove more references to dead PDU tables --- synapse/federation/pdu_codec.py | 4 +-- synapse/state.py | 23 ++++-------- synapse/storage/__init__.py | 9 ----- synapse/storage/schema/signatures.sql | 66 ----------------------------------- 4 files changed, 8 insertions(+), 94 deletions(-) delete mode 100644 synapse/storage/schema/signatures.sql (limited to 'synapse/storage/__init__.py') diff --git a/synapse/federation/pdu_codec.py b/synapse/federation/pdu_codec.py index 6d31286290..d4c896e163 100644 --- a/synapse/federation/pdu_codec.py +++ b/synapse/federation/pdu_codec.py @@ -32,11 +32,11 @@ class PduCodec(object): self.hs = hs def encode_event_id(self, local, domain): - return EventID.create(local, domain, self.hs).to_string() + return local def decode_event_id(self, event_id): e_id = self.hs.parse_eventid(event_id) - return e_id.localpart, e_id.domain + return event_id, e_id.domain def event_from_pdu(self, pdu): kwargs = {} diff --git a/synapse/state.py b/synapse/state.py index f7249705ce..2548deed28 100644 --- a/synapse/state.py +++ b/synapse/state.py @@ -77,29 +77,18 @@ class StateHandler(object): snapshot.fill_out_prev_events(event) yield self.annotate_state_groups(event) - current_state = snapshot.prev_state_pdu + if event.old_state_events: + current_state = event.old_state_events.get( + (event.type, event.state_key) + ) - if current_state: - event.prev_state = EventID.create( - current_state.pdu_id, current_state.origin, self.hs - ).to_string() + if current_state: + event.prev_state = current_state.event_id # TODO check current_state to see if the min power level is less # than the power level of the user # power_level = self._get_power_level_for_event(event) - e_id = self.hs.parse_eventid(event.event_id) - pdu_id = e_id.localpart - origin = e_id.domain - - yield self.store.update_current_state( - pdu_id=pdu_id, - origin=origin, - context=key.context, - pdu_type=key.type, - state_key=key.state_key - ) - defer.returnValue(True) @defer.inlineCallbacks diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 3faa571dd9..c2560f6045 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -67,7 +67,6 @@ SCHEMAS = [ "keys", "redactions", "state", - "signatures", "event_edges", "event_signatures", ] @@ -364,13 +363,6 @@ class DataStore(RoomMemberStore, RoomStore, membership_state = self._get_room_member(txn, user_id, room_id) prev_events = self._get_latest_events_in_room(txn, room_id) - if state_type is not None and state_key is not None: - prev_state_pdu = self._get_current_state_pdu( - txn, room_id, state_type, state_key - ) - else: - prev_state_pdu = None - return Snapshot( store=self, room_id=room_id, @@ -379,7 +371,6 @@ class DataStore(RoomMemberStore, RoomStore, membership_state=membership_state, state_type=state_type, state_key=state_key, - prev_state_pdu=prev_state_pdu, ) return self.runInteraction("snapshot_room", _snapshot) diff --git a/synapse/storage/schema/signatures.sql b/synapse/storage/schema/signatures.sql deleted file mode 100644 index 1c45a51bec..0000000000 --- a/synapse/storage/schema/signatures.sql +++ /dev/null @@ -1,66 +0,0 @@ -/* Copyright 2014 OpenMarket Ltd - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -CREATE TABLE IF NOT EXISTS pdu_content_hashes ( - pdu_id TEXT, - origin TEXT, - algorithm TEXT, - hash BLOB, - CONSTRAINT uniqueness UNIQUE (pdu_id, origin, algorithm) -); - -CREATE INDEX IF NOT EXISTS pdu_content_hashes_id ON pdu_content_hashes ( - pdu_id, origin -); - -CREATE TABLE IF NOT EXISTS pdu_reference_hashes ( - pdu_id TEXT, - origin TEXT, - algorithm TEXT, - hash BLOB, - CONSTRAINT uniqueness UNIQUE (pdu_id, origin, algorithm) -); - -CREATE INDEX IF NOT EXISTS pdu_reference_hashes_id ON pdu_reference_hashes ( - pdu_id, origin -); - -CREATE TABLE IF NOT EXISTS pdu_origin_signatures ( - pdu_id TEXT, - origin TEXT, - key_id TEXT, - signature BLOB, - CONSTRAINT uniqueness UNIQUE (pdu_id, origin, key_id) -); - -CREATE INDEX IF NOT EXISTS pdu_origin_signatures_id ON pdu_origin_signatures ( - pdu_id, origin -); - -CREATE TABLE IF NOT EXISTS pdu_edge_hashes( - pdu_id TEXT, - origin TEXT, - prev_pdu_id TEXT, - prev_origin TEXT, - algorithm TEXT, - hash BLOB, - CONSTRAINT uniqueness UNIQUE ( - pdu_id, origin, prev_pdu_id, prev_origin, algorithm - ) -); - -CREATE INDEX IF NOT EXISTS pdu_edge_hashes_id ON pdu_edge_hashes( - pdu_id, origin -); -- cgit 1.5.1 From d30d79b5bed98c7e46852c54875c976d3ac3bc0c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 31 Oct 2014 15:35:39 +0000 Subject: Make prev_event signing work again. --- synapse/crypto/event_signing.py | 13 ++++++++++++- synapse/storage/__init__.py | 11 +++++------ synapse/storage/event_federation.py | 2 +- 3 files changed, 18 insertions(+), 8 deletions(-) (limited to 'synapse/storage/__init__.py') diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index 61edd2c6f9..07e383e221 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -16,11 +16,12 @@ from synapse.federation.units import Pdu -from synapse.api.events.utils import prune_pdu +from synapse.api.events.utils import prune_pdu, prune_event from syutil.jsonutil import encode_canonical_json from syutil.base64util import encode_base64, decode_base64 from syutil.crypto.jsonsign import sign_json, verify_signed_json +import copy import hashlib import logging @@ -69,6 +70,16 @@ def compute_pdu_event_reference_hash(pdu, hash_algorithm=hashlib.sha256): return (hashed.name, hashed.digest()) +def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256): + tmp_event = copy.deepcopy(event) + tmp_event = prune_event(tmp_event) + event_json = tmp_event.get_dict() + event_json.pop("signatures", None) + event_json_bytes = encode_canonical_json(event_json) + hashed = hash_algorithm(event_json_bytes) + return (hashed.name, hashed.digest()) + + def sign_event_pdu(pdu, signature_name, signing_key): tmp_pdu = Pdu(**pdu.get_dict()) tmp_pdu = prune_pdu(tmp_pdu) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index c2560f6045..31a0022d54 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -46,7 +46,7 @@ from .signatures import SignatureStore from syutil.base64util import decode_base64 -from synapse.crypto.event_signing import compute_pdu_event_reference_hash +from synapse.crypto.event_signing import compute_event_reference_hash import json @@ -271,11 +271,10 @@ class DataStore(RoomMemberStore, RoomStore, txn, event.event_id, prev_event_id, alg, hash_bytes ) - # TODO - # (ref_alg, ref_hash_bytes) = compute_pdu_event_reference_hash(pdu) - # self._store_event_reference_hash_txn( - # txn, event.event_id, ref_alg, ref_hash_bytes - # ) + (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event) + self._store_event_reference_hash_txn( + txn, event.event_id, ref_alg, ref_hash_bytes + ) self._update_min_depth_for_room_txn(txn, event.room_id, event.depth) diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py index 8357071db6..dcc116bad2 100644 --- a/synapse/storage/event_federation.py +++ b/synapse/storage/event_federation.py @@ -69,7 +69,7 @@ class EventFederationStore(SQLBaseStore): results = [] for event_id, depth in txn.fetchall(): - hashes = self._get_prev_event_hashes_txn(txn, event_id) + hashes = self._get_event_reference_hashes_txn(txn, event_id) prev_hashes = { k: encode_base64(v) for k, v in hashes.items() if k == "sha256" -- cgit 1.5.1 From ecabff7eb49ea799d9f52fad1e05f1f9a4b31e1c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 31 Oct 2014 17:08:52 +0000 Subject: Sign evnets --- synapse/crypto/event_signing.py | 20 ++++++++++++++++++++ synapse/storage/__init__.py | 6 ++++++ 2 files changed, 26 insertions(+) (limited to 'synapse/storage/__init__.py') diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index 07e383e221..cb2db01c04 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -94,3 +94,23 @@ def verify_signed_event_pdu(pdu, signature_name, verify_key): tmp_pdu = prune_pdu(tmp_pdu) pdu_json = tmp_pdu.get_dict() verify_signed_json(pdu_json, signature_name, verify_key) + + +def add_hashes_and_signatures(event, signature_name, signing_key, + hash_algorithm=hashlib.sha256): + tmp_event = copy.deepcopy(event) + tmp_event = prune_event(tmp_event) + redact_json = tmp_event.get_dict() + redact_json.pop("signatures", None) + redact_json = sign_json(redact_json, signature_name, signing_key) + event.signatures = redact_json["signatures"] + + event_json = event.get_full_dict() + #TODO: We need to sign the JSON that is going out via fedaration. + event_json.pop("age_ts", None) + event_json.pop("unsigned", None) + event_json.pop("signatures", None) + event_json.pop("hashes", None) + event_json_bytes = encode_canonical_json(event_json) + hashed = hash_algorithm(event_json_bytes) + event.hashes[hashed.name] = encode_base64(hashed.digest()) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 31a0022d54..1f39a4094e 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -255,6 +255,12 @@ class DataStore(RoomMemberStore, RoomStore, } ) + for hash_alg, hash_base64 in event.hashes.items(): + hash_bytes = decode_base64(hash_base64) + self._store_event_content_hash_txn( + txn, event.event_id, hash_alg, hash_bytes, + ) + if hasattr(event, "signatures"): signatures = event.signatures.get(event.origin, {}) -- cgit 1.5.1 From f139c02e95e55f793c86ae6b7ad079d93aae0754 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 3 Nov 2014 11:34:49 +0000 Subject: Formatting --- synapse/storage/__init__.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) (limited to 'synapse/storage/__init__.py') diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 1f39a4094e..6b8fed4502 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -199,7 +199,10 @@ class DataStore(RoomMemberStore, RoomStore, k: v for k, v in event.get_full_dict().items() if k not in vals.keys() and k not in [ - "redacted", "redacted_because", "signatures", "hashes", + "redacted", + "redacted_because", + "signatures", + "hashes", "prev_events", ] } -- cgit 1.5.1 From 4317c8e5835f0c15bf882f737d3e3c2a5b85f73f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 6 Nov 2014 15:10:55 +0000 Subject: Implement new replace_state and changed prev_state `prev_state` is now a list of previous state ids, similiar to prev_events. `replace_state` now points to what we think was replaced. --- synapse/api/events/__init__.py | 1 + synapse/handlers/directory.py | 5 +- synapse/handlers/federation.py | 4 +- synapse/handlers/message.py | 11 ++-- synapse/handlers/profile.py | 6 +-- synapse/handlers/room.py | 16 ++---- synapse/rest/room.py | 2 +- synapse/state.py | 39 ++------------ synapse/storage/__init__.py | 92 +++++++++++++++++++++++++--------- synapse/storage/_base.py | 66 +++++++++++++++++------- synapse/storage/event_federation.py | 64 ++++++++++++++++++++--- synapse/storage/schema/event_edges.sql | 40 ++++++++++----- synapse/util/jsonobject.py | 2 +- 13 files changed, 220 insertions(+), 128 deletions(-) (limited to 'synapse/storage/__init__.py') diff --git a/synapse/api/events/__init__.py b/synapse/api/events/__init__.py index 168b812311..fc3f350570 100644 --- a/synapse/api/events/__init__.py +++ b/synapse/api/events/__init__.py @@ -60,6 +60,7 @@ class SynapseEvent(JsonEncodedObject): "age_ts", "prev_content", "prev_state", + "replaces_state", "redacted_because", "origin_server_ts", ] diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index 6e897e915d..164363cdc5 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -147,10 +147,7 @@ class DirectoryHandler(BaseHandler): content={"aliases": aliases}, ) - snapshot = yield self.store.snapshot_room( - room_id=room_id, - user_id=user_id, - ) + snapshot = yield self.store.snapshot_room(event) yield self._on_new_room_event( event, snapshot, extra_users=[user_id], suppress_auth=True diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 1464a60937..513ec9a5e3 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -313,9 +313,7 @@ class FederationHandler(BaseHandler): state_key=user_id, ) - snapshot = yield self.store.snapshot_room( - event.room_id, event.user_id, - ) + snapshot = yield self.store.snapshot_room(event) snapshot.fill_out_prev_events(event) yield self.state_handler.annotate_state_groups(event) diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index c6f6ab14d1..8394013df3 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -81,7 +81,7 @@ class MessageHandler(BaseHandler): user = self.hs.parse_userid(event.user_id) assert user.is_mine, "User must be our own: %s" % (user,) - snapshot = yield self.store.snapshot_room(event.room_id, event.user_id) + snapshot = yield self.store.snapshot_room(event) yield self._on_new_room_event( event, snapshot, suppress_auth=suppress_auth @@ -141,12 +141,7 @@ class MessageHandler(BaseHandler): SynapseError if something went wrong. """ - snapshot = yield self.store.snapshot_room( - event.room_id, - event.user_id, - state_type=event.type, - state_key=event.state_key, - ) + snapshot = yield self.store.snapshot_room(event) yield self._on_new_room_event(event, snapshot) @@ -214,7 +209,7 @@ class MessageHandler(BaseHandler): @defer.inlineCallbacks def send_feedback(self, event): - snapshot = yield self.store.snapshot_room(event.room_id, event.user_id) + snapshot = yield self.store.snapshot_room(event) # store message in db yield self._on_new_room_event(event, snapshot) diff --git a/synapse/handlers/profile.py b/synapse/handlers/profile.py index 4cd0a06093..e47814483a 100644 --- a/synapse/handlers/profile.py +++ b/synapse/handlers/profile.py @@ -17,7 +17,6 @@ from twisted.internet import defer from synapse.api.errors import SynapseError, AuthError, CodeMessageException from synapse.api.constants import Membership -from synapse.api.events.room import RoomMemberEvent from ._base import BaseHandler @@ -196,10 +195,7 @@ class ProfileHandler(BaseHandler): ) for j in joins: - snapshot = yield self.store.snapshot_room( - j.room_id, j.state_key, RoomMemberEvent.TYPE, - j.state_key - ) + snapshot = yield self.store.snapshot_room(j) content = { "membership": j.content["membership"], diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index f176ad39bf..55c893eb58 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -122,10 +122,7 @@ class RoomCreationHandler(BaseHandler): @defer.inlineCallbacks def handle_event(event): - snapshot = yield self.store.snapshot_room( - room_id=room_id, - user_id=user_id, - ) + snapshot = yield self.store.snapshot_room(event) logger.debug("Event: %s", event) @@ -364,10 +361,8 @@ class RoomMemberHandler(BaseHandler): """ target_user_id = event.state_key - snapshot = yield self.store.snapshot_room( - event.room_id, event.user_id, - RoomMemberEvent.TYPE, target_user_id - ) + snapshot = yield self.store.snapshot_room(event) + ## TODO(markjh): get prev state from snapshot. prev_state = yield self.store.get_room_member( target_user_id, event.room_id @@ -442,10 +437,7 @@ class RoomMemberHandler(BaseHandler): content=content, ) - snapshot = yield self.store.snapshot_room( - room_id, joinee.to_string(), RoomMemberEvent.TYPE, - joinee.to_string() - ) + snapshot = yield self.store.snapshot_room(new_event) yield self._do_join(new_event, snapshot, room_host=host, do_auth=True) diff --git a/synapse/rest/room.py b/synapse/rest/room.py index ec0ce78fda..997895dab0 100644 --- a/synapse/rest/room.py +++ b/synapse/rest/room.py @@ -138,7 +138,7 @@ class RoomStateEventRestServlet(RestServlet): raise SynapseError( 404, "Event not found.", errcode=Codes.NOT_FOUND ) - defer.returnValue((200, data[0].get_dict()["content"])) + defer.returnValue((200, data.get_dict()["content"])) @defer.inlineCallbacks def on_PUT(self, request, room_id, event_type, state_key): diff --git a/synapse/state.py b/synapse/state.py index 32744e047c..97a8160a33 100644 --- a/synapse/state.py +++ b/synapse/state.py @@ -45,40 +45,6 @@ class StateHandler(object): self.server_name = hs.hostname self.hs = hs - @defer.inlineCallbacks - @log_function - def handle_new_event(self, event, snapshot): - """ Given an event this works out if a) we have sufficient power level - to update the state and b) works out what the prev_state should be. - - Returns: - Deferred: Resolved with a boolean indicating if we successfully - updated the state. - - Raised: - AuthError - """ - # This needs to be done in a transaction. - - if not hasattr(event, "state_key"): - return - - # Now I need to fill out the prev state and work out if it has auth - # (w.r.t. to power levels) - - snapshot.fill_out_prev_events(event) - yield self.annotate_state_groups(event) - - if event.old_state_events: - current_state = event.old_state_events.get( - (event.type, event.state_key) - ) - - if current_state: - event.prev_state = current_state.event_id - - defer.returnValue(True) - @defer.inlineCallbacks @log_function def annotate_state_groups(self, event, old_state=None): @@ -111,7 +77,10 @@ class StateHandler(object): event.old_state_events = copy.deepcopy(new_state) if hasattr(event, "state_key"): - new_state[(event.type, event.state_key)] = event + key = (event.type, event.state_key) + if key in new_state: + event.replaces_state = new_state[key].event_id + new_state[key] = event event.state_group = None event.state_events = new_state diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 6b8fed4502..2d62fc2ed0 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -242,8 +242,8 @@ class DataStore(RoomMemberStore, RoomStore, "state_key": event.state_key, } - if hasattr(event, "prev_state"): - vals["prev_state"] = event.prev_state + if hasattr(event, "replaces_state"): + vals["prev_state"] = event.replaces_state self._simple_insert_txn(txn, "state_events", vals) @@ -258,6 +258,40 @@ class DataStore(RoomMemberStore, RoomStore, } ) + for e_id, h in event.prev_state: + self._simple_insert_txn( + txn, + table="event_edges", + values={ + "event_id": event.event_id, + "prev_event_id": e_id, + "room_id": event.room_id, + "is_state": 1, + }, + or_ignore=True, + ) + + if not backfilled: + self._simple_insert_txn( + txn, + table="state_forward_extremities", + values={ + "event_id": event.event_id, + "room_id": event.room_id, + "type": event.type, + "state_key": event.state_key, + } + ) + + for prev_state_id, _ in event.prev_state: + self._simple_delete_txn( + txn, + table="state_forward_extremities", + keyvalues={ + "event_id": prev_state_id, + } + ) + for hash_alg, hash_base64 in event.hashes.items(): hash_bytes = decode_base64(hash_base64) self._store_event_content_hash_txn( @@ -357,7 +391,7 @@ class DataStore(RoomMemberStore, RoomStore, ], ) - def snapshot_room(self, room_id, user_id, state_type=None, state_key=None): + def snapshot_room(self, event): """Snapshot the room for an update by a user Args: room_id (synapse.types.RoomId): The room to snapshot. @@ -368,16 +402,29 @@ class DataStore(RoomMemberStore, RoomStore, synapse.storage.Snapshot: A snapshot of the state of the room. """ def _snapshot(txn): - membership_state = self._get_room_member(txn, user_id, room_id) - prev_events = self._get_latest_events_in_room(txn, room_id) + prev_events = self._get_latest_events_in_room( + txn, + event.room_id + ) + + prev_state = None + state_key = None + if hasattr(event, "state_key"): + state_key = event.state_key + prev_state = self._get_latest_state_in_room( + txn, + event.room_id, + type=event.type, + state_key=state_key, + ) return Snapshot( store=self, - room_id=room_id, - user_id=user_id, + room_id=event.room_id, + user_id=event.user_id, prev_events=prev_events, - membership_state=membership_state, - state_type=state_type, + prev_state=prev_state, + state_type=event.type, state_key=state_key, ) @@ -400,30 +447,29 @@ class Snapshot(object): """ def __init__(self, store, room_id, user_id, prev_events, - membership_state, state_type=None, state_key=None, - prev_state_pdu=None): + prev_state, state_type=None, state_key=None): self.store = store self.room_id = room_id self.user_id = user_id self.prev_events = prev_events - self.membership_state = membership_state + self.prev_state = prev_state self.state_type = state_type self.state_key = state_key - self.prev_state_pdu = prev_state_pdu def fill_out_prev_events(self, event): - if hasattr(event, "prev_events"): - return + if not hasattr(event, "prev_events"): + event.prev_events = [ + (event_id, hashes) + for event_id, hashes, _ in self.prev_events + ] - event.prev_events = [ - (event_id, hashes) - for event_id, hashes, _ in self.prev_events - ] + if self.prev_events: + event.depth = max([int(v) for _, _, v in self.prev_events]) + 1 + else: + event.depth = 0 - if self.prev_events: - event.depth = max([int(v) for _, _, v in self.prev_events]) + 1 - else: - event.depth = 0 + if not hasattr(event, "prev_state") and self.prev_state is not None: + event.prev_state = self.prev_state def schema_path(schema): diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 7d445b4633..7821fc4726 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -245,7 +245,6 @@ class SQLBaseStore(object): return [r[0] for r in txn.fetchall()] - def _simple_select_onecol(self, table, keyvalues, retcol): """Executes a SELECT query on the named table, which returns a list comprising of the values of the named column from the selected rows. @@ -273,17 +272,30 @@ class SQLBaseStore(object): keyvalues : dict of column names and values to select the rows with retcols : list of strings giving the names of the columns to return """ + return self.runInteraction( + "_simple_select_list", + self._simple_select_list_txn, + table, keyvalues, retcols + ) + + def _simple_select_list_txn(self, txn, table, keyvalues, retcols): + """Executes a SELECT query on the named table, which may return zero or + more rows, returning the result as a list of dicts. + + Args: + txn : Transaction object + table : string giving the table name + keyvalues : dict of column names and values to select the rows with + retcols : list of strings giving the names of the columns to return + """ sql = "SELECT %s FROM %s WHERE %s" % ( ", ".join(retcols), table, - " AND ".join("%s = ?" % (k) for k in keyvalues) + " AND ".join("%s = ?" % (k, ) for k in keyvalues) ) - def func(txn): - txn.execute(sql, keyvalues.values()) - return self.cursor_to_dict(txn) - - return self.runInteraction("_simple_select_list", func) + txn.execute(sql, keyvalues.values()) + return self.cursor_to_dict(txn) def _simple_update_one(self, table, keyvalues, updatevalues, retcols=None): @@ -417,6 +429,10 @@ class SQLBaseStore(object): d.pop("topological_ordering", None) d.pop("processed", None) d["origin_server_ts"] = d.pop("ts", 0) + replaces_state = d.pop("prev_state", None) + + if replaces_state: + d["replaces_state"] = replaces_state d.update(json.loads(row_dict["unrecognized_keys"])) d["content"] = json.loads(d["content"]) @@ -450,16 +466,32 @@ class SQLBaseStore(object): k: encode_base64(v) for k, v in signatures.items() } - ev.prev_events = self._get_prev_events(txn, ev.event_id) - - if hasattr(ev, "prev_state"): - # Load previous state_content. - # TODO: Should we be pulling this out above? - cursor = txn.execute(select_event_sql, (ev.prev_state,)) - prevs = self.cursor_to_dict(cursor) - if prevs: - prev = self._parse_event_from_row(prevs[0]) - ev.prev_content = prev.content + prevs = self._get_prev_events_and_state(txn, ev.event_id) + + ev.prev_events = [ + (e_id, h) + for e_id, h, is_state in prevs + if is_state == 0 + ] + + if hasattr(ev, "state_key"): + ev.prev_state = [ + (e_id, h) + for e_id, h, is_state in prevs + if is_state == 1 + ] + + if hasattr(ev, "replaces_state"): + # Load previous state_content. + # FIXME (erikj): Handle multiple prev_states. + cursor = txn.execute( + select_event_sql, + (ev.replaces_state,) + ) + prevs = self.cursor_to_dict(cursor) + if prevs: + prev = self._parse_event_from_row(prevs[0]) + ev.prev_content = prev.content if not hasattr(ev, "redacted"): logger.debug("Doesn't have redacted key: %s", ev) diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py index f427aba879..180a764134 100644 --- a/synapse/storage/event_federation.py +++ b/synapse/storage/event_federation.py @@ -69,19 +69,21 @@ class EventFederationStore(SQLBaseStore): return results - def _get_prev_events(self, txn, event_id): - prev_ids = self._simple_select_onecol_txn( + def _get_latest_state_in_room(self, txn, room_id, type, state_key): + event_ids = self._simple_select_onecol_txn( txn, - table="event_edges", + table="state_forward_extremities", keyvalues={ - "event_id": event_id, + "room_id": room_id, + "type": type, + "state_key": state_key, }, - retcol="prev_event_id", + retcol="event_id", ) results = [] - for prev_event_id in prev_ids: - hashes = self._get_event_reference_hashes_txn(txn, prev_event_id) + for event_id in event_ids: + hashes = self._get_event_reference_hashes_txn(txn, event_id) prev_hashes = { k: encode_base64(v) for k, v in hashes.items() if k == "sha256" @@ -90,6 +92,53 @@ class EventFederationStore(SQLBaseStore): return results + def _get_prev_events(self, txn, event_id): + results = self._get_prev_events_and_state( + txn, + event_id, + is_state=0, + ) + + return [(e_id, h, ) for e_id, h, _ in results] + + def _get_prev_state(self, txn, event_id): + results = self._get_prev_events_and_state( + txn, + event_id, + is_state=1, + ) + + return [(e_id, h, ) for e_id, h, _ in results] + + def _get_prev_events_and_state(self, txn, event_id, is_state=None): + keyvalues = { + "event_id": event_id, + } + + if is_state is not None: + keyvalues["is_state"] = is_state + + res = self._simple_select_list_txn( + txn, + table="event_edges", + keyvalues=keyvalues, + retcols=["prev_event_id", "is_state"], + ) + + results = [] + for d in res: + hashes = self._get_event_reference_hashes_txn( + txn, + d["prev_event_id"] + ) + prev_hashes = { + k: encode_base64(v) for k, v in hashes.items() + if k == "sha256" + } + results.append((d["prev_event_id"], prev_hashes, d["is_state"])) + + return results + def get_min_depth(self, room_id): return self.runInteraction( "get_min_depth", @@ -135,6 +184,7 @@ class EventFederationStore(SQLBaseStore): "event_id": event_id, "prev_event_id": e_id, "room_id": room_id, + "is_state": 0, }, or_ignore=True, ) diff --git a/synapse/storage/schema/event_edges.sql b/synapse/storage/schema/event_edges.sql index e5f768c705..51695826a8 100644 --- a/synapse/storage/schema/event_edges.sql +++ b/synapse/storage/schema/event_edges.sql @@ -1,7 +1,7 @@ CREATE TABLE IF NOT EXISTS event_forward_extremities( - event_id TEXT, - room_id TEXT, + event_id TEXT NOT NULL, + room_id TEXT NOT NULL, CONSTRAINT uniqueness UNIQUE (event_id, room_id) ON CONFLICT REPLACE ); @@ -10,8 +10,8 @@ CREATE INDEX IF NOT EXISTS ev_extrem_id ON event_forward_extremities(event_id); CREATE TABLE IF NOT EXISTS event_backward_extremities( - event_id TEXT, - room_id TEXT, + event_id TEXT NOT NULL, + room_id TEXT NOT NULL, CONSTRAINT uniqueness UNIQUE (event_id, room_id) ON CONFLICT REPLACE ); @@ -20,10 +20,11 @@ CREATE INDEX IF NOT EXISTS ev_b_extrem_id ON event_backward_extremities(event_id CREATE TABLE IF NOT EXISTS event_edges( - event_id TEXT, - prev_event_id TEXT, - room_id TEXT, - CONSTRAINT uniqueness UNIQUE (event_id, prev_event_id, room_id) + event_id TEXT NOT NULL, + prev_event_id TEXT NOT NULL, + room_id TEXT NOT NULL, + is_state INTEGER NOT NULL, + CONSTRAINT uniqueness UNIQUE (event_id, prev_event_id, room_id, is_state) ); CREATE INDEX IF NOT EXISTS ev_edges_id ON event_edges(event_id); @@ -31,8 +32,8 @@ CREATE INDEX IF NOT EXISTS ev_edges_prev_id ON event_edges(prev_event_id); CREATE TABLE IF NOT EXISTS room_depth( - room_id TEXT, - min_depth INTEGER, + room_id TEXT NOT NULL, + min_depth INTEGER NOT NULL, CONSTRAINT uniqueness UNIQUE (room_id) ); @@ -40,10 +41,25 @@ CREATE INDEX IF NOT EXISTS room_depth_room ON room_depth(room_id); create TABLE IF NOT EXISTS event_destinations( - event_id TEXT, - destination TEXT, + event_id TEXT NOT NULL, + destination TEXT NOT NULL, delivered_ts INTEGER DEFAULT 0, -- or 0 if not delivered CONSTRAINT uniqueness UNIQUE (event_id, destination) ON CONFLICT REPLACE ); CREATE INDEX IF NOT EXISTS event_destinations_id ON event_destinations(event_id); + + +CREATE TABLE IF NOT EXISTS state_forward_extremities( + event_id TEXT NOT NULL, + room_id TEXT NOT NULL, + type TEXT NOT NULL, + state_key TEXT NOT NULL, + CONSTRAINT uniqueness UNIQUE (event_id, room_id) ON CONFLICT REPLACE +); + +CREATE INDEX IF NOT EXISTS st_extrem_keys ON state_forward_extremities( + room_id, type, state_key +); +CREATE INDEX IF NOT EXISTS st_extrem_id ON state_forward_extremities(event_id); + diff --git a/synapse/util/jsonobject.py b/synapse/util/jsonobject.py index c91eb897a8..e79b68f661 100644 --- a/synapse/util/jsonobject.py +++ b/synapse/util/jsonobject.py @@ -80,7 +80,7 @@ class JsonEncodedObject(object): def get_full_dict(self): d = { - k: v for (k, v) in self.__dict__.items() + k: _encode(v) for (k, v) in self.__dict__.items() if k in self.valid_keys or k in self.internal_keys } d.update(self.unrecognized_keys) -- cgit 1.5.1 From 351c64e99e5677096f9a2ae2cd7e84dbc1887878 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 6 Nov 2014 16:59:13 +0000 Subject: Amalgamate all power levels. Remove concept of reqired power levels, something similiar can be done using the new power level event. --- synapse/api/auth.py | 221 ++++++++++++++--------------------------- synapse/api/events/__init__.py | 3 +- synapse/api/events/factory.py | 7 +- synapse/api/events/room.py | 22 ---- synapse/api/events/utils.py | 23 ++--- synapse/handlers/room.py | 52 +++------- synapse/storage/__init__.py | 12 --- synapse/storage/room.py | 157 ----------------------------- 8 files changed, 102 insertions(+), 395 deletions(-) (limited to 'synapse/storage/__init__.py') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 9eb0491c97..462e97bd90 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -21,8 +21,8 @@ from synapse.api.constants import Membership, JoinRules from synapse.api.errors import AuthError, StoreError, Codes, SynapseError from synapse.api.events.room import ( RoomMemberEvent, RoomPowerLevelsEvent, RoomRedactionEvent, - RoomJoinRulesEvent, RoomOpsPowerLevelsEvent, InviteJoinEvent, - RoomCreateEvent, RoomSendEventLevelEvent, RoomAddStateLevelEvent, + RoomJoinRulesEvent, InviteJoinEvent, + RoomCreateEvent, ) from synapse.util.logutils import log_function @@ -51,6 +51,7 @@ class Auth(object): if event.old_state_events is None: # Oh, we don't know what the state of the room was, so we # are trusting that this is allowed (at least for now) + logger.warn("Trusting event: %s", event.event_id) return True if hasattr(event, "outlier") and event.outlier is True: @@ -64,7 +65,7 @@ class Auth(object): return True if event.type == RoomMemberEvent.TYPE: - self._can_replace_state(event) + self._can_send_event(event) allowed = self.is_membership_change_allowed(event) if allowed: logger.debug("Allowing! %s", event) @@ -72,16 +73,7 @@ class Auth(object): logger.debug("Denying! %s", event) return allowed - if not event.type == InviteJoinEvent.TYPE: - self.check_event_sender_in_room(event) - - if is_state: - # TODO (erikj): This really only should be called for *new* - # state - self._can_add_state(event) - self._can_replace_state(event) - else: - self._can_send_event(event) + self._can_send_event(event) if event.type == RoomPowerLevelsEvent.TYPE: self._check_power_levels(event) @@ -239,21 +231,21 @@ class Auth(object): power_level_event = event.old_state_events.get(key) level = None if power_level_event: - level = power_level_event.content.get(user_id) + level = power_level_event.content.get("users", {}).get(user_id) if not level: - level = power_level_event.content.get("default", 0) + level = power_level_event.content.get("users_default", 0) return level def _get_ops_level_from_event_state(self, event): - key = (RoomOpsPowerLevelsEvent.TYPE, "", ) - ops_event = event.old_state_events.get(key) + key = (RoomPowerLevelsEvent.TYPE, "", ) + power_level_event = event.old_state_events.get(key) - if ops_event: + if power_level_event: return ( - ops_event.content.get("ban_level"), - ops_event.content.get("kick_level"), - ops_event.content.get("redact_level"), + power_level_event.content.get("ban", 50), + power_level_event.content.get("kick", 50), + power_level_event.content.get("redact", 50), ) return None, None, None, @@ -325,13 +317,22 @@ class Auth(object): @log_function def _can_send_event(self, event): - key = (RoomSendEventLevelEvent.TYPE, "", ) + key = (RoomPowerLevelsEvent.TYPE, "", ) send_level_event = event.old_state_events.get(key) send_level = None if send_level_event: - send_level = send_level_event.content.get(event.user_id) + send_level = send_level_event.content.get("events", {}).get( + event.type + ) if not send_level: - send_level = send_level_event.content.get("level", 0) + if hasattr(event, "state_key"): + send_level = send_level_event.content.get( + "state_default", 50 + ) + else: + send_level = send_level_event.content.get( + "events_default", 0 + ) if send_level: send_level = int(send_level) @@ -350,85 +351,21 @@ class Auth(object): if user_level < send_level: raise AuthError( - 403, "You don't have permission to post to the room" - ) - - return True - - def _can_add_state(self, event): - key = (RoomAddStateLevelEvent.TYPE, "", ) - add_level_event = event.old_state_events.get(key) - add_level = None - if add_level_event: - add_level = add_level_event.content.get(event.user_id) - if not add_level: - add_level = add_level_event.content.get("level", 0) - - if add_level: - add_level = int(add_level) - else: - add_level = 0 - - user_level = self._get_power_level_from_event_state( - event, - event.user_id, - ) - - user_level = int(user_level) - - if user_level < add_level: - raise AuthError( - 403, "You don't have permission to add state to the room" + 403, "You don't have permission to post that to the room" ) return True - def _can_replace_state(self, event): - user_level = self._get_power_level_from_event_state( - event, - event.user_id, - ) - - if user_level: - user_level = int(user_level) - else: - user_level = 0 - - logger.debug( - "Checking power level for %s, %s", event.user_id, user_level - ) - - key = (event.type, event.state_key, ) - current_state = event.old_state_events.get(key) - - if current_state and hasattr(current_state, "required_power_level"): - req = current_state.required_power_level - - logger.debug("Checked power level for %s, %s", event.user_id, req) - if user_level < req: - raise AuthError( - 403, - "You don't have permission to change that state" - ) - def _check_redaction(self, event): user_level = self._get_power_level_from_event_state( event, event.user_id, ) - if user_level: - user_level = int(user_level) - else: - user_level = 0 - _, _, redact_level = self._get_ops_level_from_event_state( event ) - if not redact_level: - redact_level = 50 - if user_level < redact_level: raise AuthError( 403, @@ -436,14 +373,9 @@ class Auth(object): ) def _check_power_levels(self, event): - for k, v in event.content.items(): - if k == "default": - continue - - # FIXME (erikj): We don't want hsob_Ts in content. - if k == "hsob_ts": - continue - + user_list = event.content.get("users", {}) + # Validate users + for k, v in user_list.items(): try: self.hs.parse_userid(k) except: @@ -459,72 +391,63 @@ class Auth(object): if not current_state: return - else: - current_state = current_state[0] user_level = self._get_power_level_from_event_state( event, event.user_id, ) - if user_level: - user_level = int(user_level) - else: - user_level = 0 + # Check other levels: + levels_to_check = [ + ("users_default", []), + ("events_default", []), + ("ban", []), + ("redact", []), + ("kick", []), + ] + + old_list = current_state.content.get("users") + for user in set(old_list.keys() + user_list.keys()): + levels_to_check.append( + (user, ["users"]) + ) - old_list = current_state.content + old_list = current_state.content.get("events") + new_list = event.content.get("events") + for ev_id in set(old_list.keys() + new_list.keys()): + levels_to_check.append( + (ev_id, ["events"]) + ) - # FIXME (erikj) - old_people = {k: v for k, v in old_list.items() if k.startswith("@")} - new_people = { - k: v for k, v in event.content.items() - if k.startswith("@") - } + old_state = current_state.content + new_state = event.content - removed = set(old_people.keys()) - set(new_people.keys()) - added = set(new_people.keys()) - set(old_people.keys()) - same = set(old_people.keys()) & set(new_people.keys()) + for level_to_check, dir in levels_to_check: + old_loc = old_state + for d in dir: + old_loc = old_loc.get(d, {}) - for r in removed: - if int(old_list[r]) > user_level: - raise AuthError( - 403, - "You don't have permission to remove user: %s" % (r, ) - ) + new_loc = new_state + for d in dir: + new_loc = new_loc.get(d, {}) - for n in added: - if int(event.content[n]) > user_level: - raise AuthError( - 403, - "You don't have permission to add ops level greater " - "than your own" - ) + if level_to_check in old_loc: + old_level = int(old_loc[level_to_check]) + else: + old_level = None - for s in same: - if int(event.content[s]) != int(old_list[s]): - if int(event.content[s]) > user_level: - raise AuthError( - 403, - "You don't have permission to add ops level greater " - "than your own" - ) + if level_to_check in new_loc: + new_level = int(new_loc[level_to_check]) + else: + new_level = None - if "default" in old_list: - old_default = int(old_list["default"]) + if new_level is not None and old_level is not None: + if new_level == old_level: + continue - if old_default > user_level: + if old_level > user_level or new_level > user_level: raise AuthError( 403, - "You don't have permission to add ops level greater than " - "your own" + "You don't have permission to add ops level greater " + "than your own" ) - - if "default" in event.content: - new_default = int(event.content["default"]) - - if new_default > user_level: - raise AuthError( - 403, - "You don't have permission to add ops level greater " - "than your own" - ) diff --git a/synapse/api/events/__init__.py b/synapse/api/events/__init__.py index 98a66144e7..84d3a98365 100644 --- a/synapse/api/events/__init__.py +++ b/synapse/api/events/__init__.py @@ -56,12 +56,12 @@ class SynapseEvent(JsonEncodedObject): "user_id", # sender/initiator "content", # HTTP body, JSON "state_key", - "required_power_level", "age_ts", "prev_content", "replaces_state", "redacted_because", "origin_server_ts", + "auth_chains", ] internal_keys = [ @@ -70,7 +70,6 @@ class SynapseEvent(JsonEncodedObject): "destinations", "origin", "outlier", - "power_level", "redacted", "prev_events", "hashes", diff --git a/synapse/api/events/factory.py b/synapse/api/events/factory.py index 9134c82eff..a1ec708a81 100644 --- a/synapse/api/events/factory.py +++ b/synapse/api/events/factory.py @@ -16,8 +16,8 @@ from synapse.api.events.room import ( RoomTopicEvent, MessageEvent, RoomMemberEvent, FeedbackEvent, InviteJoinEvent, RoomConfigEvent, RoomNameEvent, GenericEvent, - RoomPowerLevelsEvent, RoomJoinRulesEvent, RoomOpsPowerLevelsEvent, - RoomCreateEvent, RoomAddStateLevelEvent, RoomSendEventLevelEvent, + RoomPowerLevelsEvent, RoomJoinRulesEvent, + RoomCreateEvent, RoomRedactionEvent, ) @@ -39,9 +39,6 @@ class EventFactory(object): RoomPowerLevelsEvent, RoomJoinRulesEvent, RoomCreateEvent, - RoomAddStateLevelEvent, - RoomSendEventLevelEvent, - RoomOpsPowerLevelsEvent, RoomRedactionEvent, ] diff --git a/synapse/api/events/room.py b/synapse/api/events/room.py index cd936074fc..25bc883706 100644 --- a/synapse/api/events/room.py +++ b/synapse/api/events/room.py @@ -153,28 +153,6 @@ class RoomPowerLevelsEvent(SynapseStateEvent): def get_content_template(self): return {} - -class RoomAddStateLevelEvent(SynapseStateEvent): - TYPE = "m.room.add_state_level" - - def get_content_template(self): - return {} - - -class RoomSendEventLevelEvent(SynapseStateEvent): - TYPE = "m.room.send_event_level" - - def get_content_template(self): - return {} - - -class RoomOpsPowerLevelsEvent(SynapseStateEvent): - TYPE = "m.room.ops_levels" - - def get_content_template(self): - return {} - - class RoomAliasesEvent(SynapseStateEvent): TYPE = "m.room.aliases" diff --git a/synapse/api/events/utils.py b/synapse/api/events/utils.py index 31601fd3a9..5fc79105b5 100644 --- a/synapse/api/events/utils.py +++ b/synapse/api/events/utils.py @@ -15,7 +15,6 @@ from .room import ( RoomMemberEvent, RoomJoinRulesEvent, RoomPowerLevelsEvent, - RoomAddStateLevelEvent, RoomSendEventLevelEvent, RoomOpsPowerLevelsEvent, RoomAliasesEvent, RoomCreateEvent, ) @@ -52,17 +51,17 @@ def _prune_event_or_pdu(event_type, event): elif event_type == RoomJoinRulesEvent.TYPE: add_fields("join_rule") elif event_type == RoomPowerLevelsEvent.TYPE: - # TODO: Actually check these are valid user_ids etc. - add_fields("default") - for k, v in event.content.items(): - if k.startswith("@") and isinstance(v, (int, long)): - new_content[k] = v - elif event_type == RoomAddStateLevelEvent.TYPE: - add_fields("level") - elif event_type == RoomSendEventLevelEvent.TYPE: - add_fields("level") - elif event_type == RoomOpsPowerLevelsEvent.TYPE: - add_fields("kick_level", "ban_level", "redact_level") + add_fields( + "users", + "users_default", + "events", + "events_default", + "events_default", + "state_default", + "ban", + "kick", + "redact", + ) elif event_type == RoomAliasesEvent.TYPE: add_fields("aliases") diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 55c893eb58..42a6c9f9bf 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -21,8 +21,7 @@ from synapse.api.constants import Membership, JoinRules from synapse.api.errors import StoreError, SynapseError from synapse.api.events.room import ( RoomMemberEvent, RoomCreateEvent, RoomPowerLevelsEvent, - RoomJoinRulesEvent, RoomAddStateLevelEvent, RoomTopicEvent, - RoomSendEventLevelEvent, RoomOpsPowerLevelsEvent, RoomNameEvent, + RoomTopicEvent, RoomNameEvent, RoomJoinRulesEvent, ) from synapse.util import stringutils from ._base import BaseHandler @@ -139,7 +138,6 @@ class RoomCreationHandler(BaseHandler): etype=RoomNameEvent.TYPE, room_id=room_id, user_id=user_id, - required_power_level=50, content={"name": name}, ) @@ -151,7 +149,6 @@ class RoomCreationHandler(BaseHandler): etype=RoomTopicEvent.TYPE, room_id=room_id, user_id=user_id, - required_power_level=50, content={"topic": topic}, ) @@ -196,7 +193,6 @@ class RoomCreationHandler(BaseHandler): event_keys = { "room_id": room_id, "user_id": creator.to_string(), - "required_power_level": 100, } def create(etype, **content): @@ -213,7 +209,21 @@ class RoomCreationHandler(BaseHandler): power_levels_event = self.event_factory.create_event( etype=RoomPowerLevelsEvent.TYPE, - content={creator.to_string(): 100, "default": 0}, + content={ + "users": { + creator.to_string(): 100, + }, + "users_default": 0, + "events": { + RoomNameEvent.TYPE: 100, + RoomPowerLevelsEvent.TYPE: 100, + }, + "events_default": 0, + "state_default": 50, + "ban": 50, + "kick": 50, + "redact": 50 + }, **event_keys ) @@ -223,30 +233,10 @@ class RoomCreationHandler(BaseHandler): join_rule=join_rule, ) - add_state_event = create( - etype=RoomAddStateLevelEvent.TYPE, - level=100, - ) - - send_event = create( - etype=RoomSendEventLevelEvent.TYPE, - level=0, - ) - - ops = create( - etype=RoomOpsPowerLevelsEvent.TYPE, - ban_level=50, - kick_level=50, - redact_level=50, - ) - return [ creation_event, power_levels_event, join_rules_event, - add_state_event, - send_event, - ops, ] @@ -388,16 +378,6 @@ class RoomMemberHandler(BaseHandler): else: # This is not a JOIN, so we can handle it normally. - # If we're banning someone, set a req power level - if event.membership == Membership.BAN: - if not hasattr(event, "required_power_level") or event.required_power_level is None: - # Add some default required_power_level - user_level = yield self.store.get_power_level( - event.room_id, - event.user_id, - ) - event.required_power_level = user_level - if prev_state and prev_state.membership == event.membership: # double same action, treat this event as a NOOP. defer.returnValue({}) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 2d62fc2ed0..2a1970914f 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -17,13 +17,9 @@ from twisted.internet import defer from synapse.api.events.room import ( RoomMemberEvent, RoomTopicEvent, FeedbackEvent, -# RoomConfigEvent, RoomNameEvent, RoomJoinRulesEvent, RoomPowerLevelsEvent, - RoomAddStateLevelEvent, - RoomSendEventLevelEvent, - RoomOpsPowerLevelsEvent, RoomRedactionEvent, ) @@ -166,14 +162,6 @@ class DataStore(RoomMemberStore, RoomStore, self._store_room_topic_txn(txn, event) elif event.type == RoomJoinRulesEvent.TYPE: self._store_join_rule(txn, event) - elif event.type == RoomPowerLevelsEvent.TYPE: - self._store_power_levels(txn, event) - elif event.type == RoomAddStateLevelEvent.TYPE: - self._store_add_state_level(txn, event) - elif event.type == RoomSendEventLevelEvent.TYPE: - self._store_send_event_level(txn, event) - elif event.type == RoomOpsPowerLevelsEvent.TYPE: - self._store_ops_level(txn, event) elif event.type == RoomRedactionEvent.TYPE: self._store_redaction(txn, event) diff --git a/synapse/storage/room.py b/synapse/storage/room.py index 7e48ce9cc3..0c83c11ad3 100644 --- a/synapse/storage/room.py +++ b/synapse/storage/room.py @@ -148,85 +148,6 @@ class RoomStore(SQLBaseStore): else: defer.returnValue(None) - def get_power_level(self, room_id, user_id): - return self.runInteraction( - "get_power_level", - self._get_power_level, - room_id, user_id, - ) - - def _get_power_level(self, txn, room_id, user_id): - sql = ( - "SELECT level FROM room_power_levels as r " - "INNER JOIN current_state_events as c " - "ON r.event_id = c.event_id " - "WHERE c.room_id = ? AND r.user_id = ? " - ) - - rows = txn.execute(sql, (room_id, user_id,)).fetchall() - - if len(rows) == 1: - return rows[0][0] - - sql = ( - "SELECT level FROM room_default_levels as r " - "INNER JOIN current_state_events as c " - "ON r.event_id = c.event_id " - "WHERE c.room_id = ? " - ) - - rows = txn.execute(sql, (room_id,)).fetchall() - - if len(rows) == 1: - return rows[0][0] - else: - return None - - def get_ops_levels(self, room_id): - return self.runInteraction( - "get_ops_levels", - self._get_ops_levels, - room_id, - ) - - def _get_ops_levels(self, txn, room_id): - sql = ( - "SELECT ban_level, kick_level, redact_level " - "FROM room_ops_levels as r " - "INNER JOIN current_state_events as c " - "ON r.event_id = c.event_id " - "WHERE c.room_id = ? " - ) - - rows = txn.execute(sql, (room_id,)).fetchall() - - if len(rows) == 1: - return OpsLevel(rows[0][0], rows[0][1], rows[0][2]) - else: - return OpsLevel(None, None) - - def get_add_state_level(self, room_id): - return self._get_level_from_table("room_add_state_levels", room_id) - - def get_send_event_level(self, room_id): - return self._get_level_from_table("room_send_event_levels", room_id) - - @defer.inlineCallbacks - def _get_level_from_table(self, table, room_id): - sql = ( - "SELECT level FROM %(table)s as r " - "INNER JOIN current_state_events as c " - "ON r.event_id = c.event_id " - "WHERE c.room_id = ? " - ) % {"table": table} - - rows = yield self._execute(None, sql, room_id) - - if len(rows) == 1: - defer.returnValue(rows[0][0]) - else: - defer.returnValue(None) - def _store_room_topic_txn(self, txn, event): self._simple_insert_txn( txn, @@ -260,84 +181,6 @@ class RoomStore(SQLBaseStore): }, ) - def _store_power_levels(self, txn, event): - for user_id, level in event.content.items(): - if user_id == "default": - self._simple_insert_txn( - txn, - "room_default_levels", - { - "event_id": event.event_id, - "room_id": event.room_id, - "level": level, - }, - ) - else: - self._simple_insert_txn( - txn, - "room_power_levels", - { - "event_id": event.event_id, - "room_id": event.room_id, - "user_id": user_id, - "level": level - }, - ) - - def _store_default_level(self, txn, event): - self._simple_insert_txn( - txn, - "room_default_levels", - { - "event_id": event.event_id, - "room_id": event.room_id, - "level": event.content["default_level"], - }, - ) - - def _store_add_state_level(self, txn, event): - self._simple_insert_txn( - txn, - "room_add_state_levels", - { - "event_id": event.event_id, - "room_id": event.room_id, - "level": event.content["level"], - }, - ) - - def _store_send_event_level(self, txn, event): - self._simple_insert_txn( - txn, - "room_send_event_levels", - { - "event_id": event.event_id, - "room_id": event.room_id, - "level": event.content["level"], - }, - ) - - def _store_ops_level(self, txn, event): - content = { - "event_id": event.event_id, - "room_id": event.room_id, - } - - if "kick_level" in event.content: - content["kick_level"] = event.content["kick_level"] - - if "ban_level" in event.content: - content["ban_level"] = event.content["ban_level"] - - if "redact_level" in event.content: - content["redact_level"] = event.content["redact_level"] - - self._simple_insert_txn( - txn, - "room_ops_levels", - content, - ) - class RoomsTable(Table): table_name = "rooms" -- cgit 1.5.1 From bf6b72eb558cca94e209a541188079750bfefea0 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 6 Nov 2014 18:42:18 +0000 Subject: Start implementing auth chains --- synapse/api/auth.py | 3 +- synapse/api/events/__init__.py | 2 +- synapse/handlers/_base.py | 59 ++++++++++++++++++++++++++++++++-- synapse/storage/__init__.py | 12 ++++++- synapse/storage/_base.py | 2 ++ synapse/storage/event_federation.py | 21 ++++++++++++ synapse/storage/schema/event_edges.sql | 10 ++++++ synapse/storage/signatures.py | 12 +++++++ 8 files changed, 115 insertions(+), 6 deletions(-) (limited to 'synapse/storage/__init__.py') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index bb25c4ec55..e1302553d7 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -21,8 +21,7 @@ from synapse.api.constants import Membership, JoinRules from synapse.api.errors import AuthError, StoreError, Codes, SynapseError from synapse.api.events.room import ( RoomMemberEvent, RoomPowerLevelsEvent, RoomRedactionEvent, - RoomJoinRulesEvent, InviteJoinEvent, - RoomCreateEvent, + RoomJoinRulesEvent, RoomCreateEvent, ) from synapse.util.logutils import log_function diff --git a/synapse/api/events/__init__.py b/synapse/api/events/__init__.py index 84d3a98365..513a48f568 100644 --- a/synapse/api/events/__init__.py +++ b/synapse/api/events/__init__.py @@ -61,7 +61,7 @@ class SynapseEvent(JsonEncodedObject): "replaces_state", "redacted_because", "origin_server_ts", - "auth_chains", + "auth_events", ] internal_keys = [ diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index 509f7b550c..2613fa7fce 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -14,11 +14,15 @@ # limitations under the License. from twisted.internet import defer -from synapse.api.errors import LimitExceededError +from synapse.api.errors import LimitExceededError from synapse.util.async import run_on_reactor - from synapse.crypto.event_signing import add_hashes_and_signatures +from synapse.api.events.room import ( + RoomCreateEvent, RoomMemberEvent, RoomPowerLevelsEvent, RoomJoinRulesEvent, +) +from synapse.api.constants import Membership, JoinRules +from syutil.base64util import encode_base64 import logging @@ -55,6 +59,53 @@ class BaseHandler(object): retry_after_ms=int(1000*(time_allowed - time_now)), ) + @defer.inlineCallbacks + def _add_auth(self, event): + if event.type == RoomCreateEvent.TYPE: + event.auth_events = [] + return + + auth_events = [] + + key = (RoomPowerLevelsEvent.TYPE, "", ) + power_level_event = event.old_state_events.get(key) + + if power_level_event: + auth_events.append(power_level_event.event_id) + + key = (RoomJoinRulesEvent.TYPE, "", ) + join_rule_event = event.old_state_events.get(key) + + key = (RoomMemberEvent.TYPE, event.user_id, ) + member_event = event.old_state_events.get(key) + + if join_rule_event: + join_rule = join_rule_event.content.get("join_rule") + is_public = join_rule == JoinRules.PUBLIC if join_rule else False + + if event.type == RoomMemberEvent.TYPE: + if event.content["membership"] == Membership.JOIN: + if is_public: + auth_events.append(join_rule_event.event_id) + elif member_event: + auth_events.append(member_event.event_id) + + if member_event: + if member_event.content["membership"] == Membership.JOIN: + auth_events.append(member_event.event_id) + + hashes = yield self.store.get_event_reference_hashes( + auth_events + ) + hashes = [ + { + k: encode_base64(v) for k, v in h.items() + if k == "sha256" + } + for h in hashes + ] + event.auth_events = zip(auth_events, hashes) + @defer.inlineCallbacks def _on_new_room_event(self, event, snapshot, extra_destinations=[], extra_users=[], suppress_auth=False): @@ -64,6 +115,8 @@ class BaseHandler(object): yield self.state_handler.annotate_state_groups(event) + yield self._add_auth(event) + logger.debug("Signing event...") add_hashes_and_signatures( @@ -76,6 +129,8 @@ class BaseHandler(object): logger.debug("Authing...") self.auth.check(event, raises=True) logger.debug("Authed") + else: + logger.debug("Suppressed auth.") yield self.store.persist_event(event) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 2a1970914f..48ad4d864f 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -19,7 +19,6 @@ from synapse.api.events.room import ( RoomMemberEvent, RoomTopicEvent, FeedbackEvent, RoomNameEvent, RoomJoinRulesEvent, - RoomPowerLevelsEvent, RoomRedactionEvent, ) @@ -302,6 +301,17 @@ class DataStore(RoomMemberStore, RoomStore, txn, event.event_id, prev_event_id, alg, hash_bytes ) + for auth_id, _ in event.auth_events: + self._simple_insert_txn( + txn, + table="event_auth", + values={ + "event_id": event.event_id, + "room_id": event.room_id, + "auth_id": auth_id, + }, + ) + (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event) self._store_event_reference_hash_txn( txn, event.event_id, ref_alg, ref_hash_bytes diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 7821fc4726..9aa404695d 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -474,6 +474,8 @@ class SQLBaseStore(object): if is_state == 0 ] + ev.auth_events = self._get_auth_events(txn, ev.event_id) + if hasattr(ev, "state_key"): ev.prev_state = [ (e_id, h) diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py index 180a764134..86c68ebf87 100644 --- a/synapse/storage/event_federation.py +++ b/synapse/storage/event_federation.py @@ -139,6 +139,27 @@ class EventFederationStore(SQLBaseStore): return results + def _get_auth_events(self, txn, event_id): + auth_ids = self._simple_select_onecol_txn( + txn, + table="event_auth", + keyvalues={ + "event_id": event_id, + }, + retcol="auth_id", + ) + + results = [] + for auth_id in auth_ids: + hashes = self._get_event_reference_hashes_txn(txn, auth_id) + prev_hashes = { + k: encode_base64(v) for k, v in hashes.items() + if k == "sha256" + } + results.append((auth_id, prev_hashes)) + + return results + def get_min_depth(self, room_id): return self.runInteraction( "get_min_depth", diff --git a/synapse/storage/schema/event_edges.sql b/synapse/storage/schema/event_edges.sql index 51695826a8..be1c72a775 100644 --- a/synapse/storage/schema/event_edges.sql +++ b/synapse/storage/schema/event_edges.sql @@ -63,3 +63,13 @@ CREATE INDEX IF NOT EXISTS st_extrem_keys ON state_forward_extremities( ); CREATE INDEX IF NOT EXISTS st_extrem_id ON state_forward_extremities(event_id); + +CREATE TABLE IF NOT EXISTS event_auth( + event_id TEXT NOT NULL, + auth_id TEXT NOT NULL, + room_id TEXT NOT NULL, + CONSTRAINT uniqueness UNIQUE (event_id, auth_id, room_id) +); + +CREATE INDEX IF NOT EXISTS evauth_edges_id ON event_auth(event_id); +CREATE INDEX IF NOT EXISTS evauth_edges_auth_id ON event_auth(auth_id); \ No newline at end of file diff --git a/synapse/storage/signatures.py b/synapse/storage/signatures.py index b4b3d5d7ea..84a49088a2 100644 --- a/synapse/storage/signatures.py +++ b/synapse/storage/signatures.py @@ -55,6 +55,18 @@ class SignatureStore(SQLBaseStore): or_ignore=True, ) + def get_event_reference_hashes(self, event_ids): + def f(txn): + return [ + self._get_event_reference_hashes_txn(txn, ev) + for ev in event_ids + ] + + return self.runInteraction( + "get_event_reference_hashes", + f + ) + def _get_event_reference_hashes_txn(self, txn, event_id): """Get all the hashes for a given PDU. Args: -- cgit 1.5.1 From 49948d72f3c76f0ca32b844955d3add7922180ea Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 7 Nov 2014 10:42:44 +0000 Subject: Fix joining over federation --- synapse/api/auth.py | 53 +++++++++++++++++++++++++++++++++++++++-- synapse/api/events/__init__.py | 2 +- synapse/handlers/_base.py | 54 +----------------------------------------- synapse/handlers/federation.py | 1 + synapse/storage/__init__.py | 1 + 5 files changed, 55 insertions(+), 56 deletions(-) (limited to 'synapse/storage/__init__.py') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index e1302553d7..d4f284bd60 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -24,6 +24,7 @@ from synapse.api.events.room import ( RoomJoinRulesEvent, RoomCreateEvent, ) from synapse.util.logutils import log_function +from syutil.base64util import encode_base64 import logging @@ -61,8 +62,6 @@ class Auth(object): # FIXME return True - self._can_send_event(event) - if event.type == RoomMemberEvent.TYPE: allowed = self.is_membership_change_allowed(event) if allowed: @@ -71,6 +70,8 @@ class Auth(object): logger.debug("Denying! %s", event) return allowed + self._can_send_event(event) + if event.type == RoomPowerLevelsEvent.TYPE: self._check_power_levels(event) @@ -311,6 +312,54 @@ class Auth(object): def is_server_admin(self, user): return self.store.is_server_admin(user) + @defer.inlineCallbacks + def add_auth_events(self, event): + if event.type == RoomCreateEvent.TYPE: + event.auth_events = [] + return + + auth_events = [] + + key = (RoomPowerLevelsEvent.TYPE, "", ) + power_level_event = event.old_state_events.get(key) + + if power_level_event: + auth_events.append(power_level_event.event_id) + + key = (RoomJoinRulesEvent.TYPE, "", ) + join_rule_event = event.old_state_events.get(key) + + key = (RoomMemberEvent.TYPE, event.user_id, ) + member_event = event.old_state_events.get(key) + + if join_rule_event: + join_rule = join_rule_event.content.get("join_rule") + is_public = join_rule == JoinRules.PUBLIC if join_rule else False + + if event.type == RoomMemberEvent.TYPE: + if event.content["membership"] == Membership.JOIN: + if is_public: + auth_events.append(join_rule_event.event_id) + elif member_event: + auth_events.append(member_event.event_id) + + if member_event: + if member_event.content["membership"] == Membership.JOIN: + auth_events.append(member_event.event_id) + + hashes = yield self.store.get_event_reference_hashes( + auth_events + ) + hashes = [ + { + k: encode_base64(v) for k, v in h.items() + if k == "sha256" + } + for h in hashes + ] + event.auth_events = zip(auth_events, hashes) + + @log_function def _can_send_event(self, event): key = (RoomPowerLevelsEvent.TYPE, "", ) diff --git a/synapse/api/events/__init__.py b/synapse/api/events/__init__.py index 513a48f568..e5980c4be3 100644 --- a/synapse/api/events/__init__.py +++ b/synapse/api/events/__init__.py @@ -61,7 +61,6 @@ class SynapseEvent(JsonEncodedObject): "replaces_state", "redacted_because", "origin_server_ts", - "auth_events", ] internal_keys = [ @@ -75,6 +74,7 @@ class SynapseEvent(JsonEncodedObject): "hashes", "signatures", "prev_state", + "auth_events", ] required_keys = [ diff --git a/synapse/handlers/_base.py b/synapse/handlers/_base.py index 2613fa7fce..f630280031 100644 --- a/synapse/handlers/_base.py +++ b/synapse/handlers/_base.py @@ -18,11 +18,6 @@ from twisted.internet import defer from synapse.api.errors import LimitExceededError from synapse.util.async import run_on_reactor from synapse.crypto.event_signing import add_hashes_and_signatures -from synapse.api.events.room import ( - RoomCreateEvent, RoomMemberEvent, RoomPowerLevelsEvent, RoomJoinRulesEvent, -) -from synapse.api.constants import Membership, JoinRules -from syutil.base64util import encode_base64 import logging @@ -59,53 +54,6 @@ class BaseHandler(object): retry_after_ms=int(1000*(time_allowed - time_now)), ) - @defer.inlineCallbacks - def _add_auth(self, event): - if event.type == RoomCreateEvent.TYPE: - event.auth_events = [] - return - - auth_events = [] - - key = (RoomPowerLevelsEvent.TYPE, "", ) - power_level_event = event.old_state_events.get(key) - - if power_level_event: - auth_events.append(power_level_event.event_id) - - key = (RoomJoinRulesEvent.TYPE, "", ) - join_rule_event = event.old_state_events.get(key) - - key = (RoomMemberEvent.TYPE, event.user_id, ) - member_event = event.old_state_events.get(key) - - if join_rule_event: - join_rule = join_rule_event.content.get("join_rule") - is_public = join_rule == JoinRules.PUBLIC if join_rule else False - - if event.type == RoomMemberEvent.TYPE: - if event.content["membership"] == Membership.JOIN: - if is_public: - auth_events.append(join_rule_event.event_id) - elif member_event: - auth_events.append(member_event.event_id) - - if member_event: - if member_event.content["membership"] == Membership.JOIN: - auth_events.append(member_event.event_id) - - hashes = yield self.store.get_event_reference_hashes( - auth_events - ) - hashes = [ - { - k: encode_base64(v) for k, v in h.items() - if k == "sha256" - } - for h in hashes - ] - event.auth_events = zip(auth_events, hashes) - @defer.inlineCallbacks def _on_new_room_event(self, event, snapshot, extra_destinations=[], extra_users=[], suppress_auth=False): @@ -115,7 +63,7 @@ class BaseHandler(object): yield self.state_handler.annotate_state_groups(event) - yield self._add_auth(event) + yield self.auth.add_auth_events(event) logger.debug("Signing event...") diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index f0448a05d8..09593303a4 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -317,6 +317,7 @@ class FederationHandler(BaseHandler): snapshot.fill_out_prev_events(event) yield self.state_handler.annotate_state_groups(event) + yield self.auth.add_auth_events(event) self.auth.check(event, raises=True) pdu = self.pdu_codec.pdu_from_event(event) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 48ad4d864f..96adf20c89 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -310,6 +310,7 @@ class DataStore(RoomMemberStore, RoomStore, "room_id": event.room_id, "auth_id": auth_id, }, + or_ignore=True, ) (ref_alg, ref_hash_bytes) = compute_event_reference_hash(event) -- cgit 1.5.1 From 6cb6cb9e6908ad9b71ebd63ca535eb6c7c48be86 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 10 Nov 2014 10:31:00 +0000 Subject: Tidy up some of the unused sql tables --- synapse/crypto/event_signing.py | 2 -- synapse/storage/__init__.py | 21 ++----------- synapse/storage/room.py | 27 ---------------- synapse/storage/schema/im.sql | 68 ++++------------------------------------- 4 files changed, 9 insertions(+), 109 deletions(-) (limited to 'synapse/storage/__init__.py') diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index 056e8f6ca4..baa93b0ee4 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -19,9 +19,7 @@ from synapse.api.events.utils import prune_event from syutil.jsonutil import encode_canonical_json from syutil.base64util import encode_base64, decode_base64 from syutil.crypto.jsonsign import sign_json -from synapse.api.events.room import GenericEvent -import copy import hashlib import logging diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 96adf20c89..7d810e6a62 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -16,9 +16,7 @@ from twisted.internet import defer from synapse.api.events.room import ( - RoomMemberEvent, RoomTopicEvent, FeedbackEvent, - RoomNameEvent, - RoomJoinRulesEvent, + RoomMemberEvent, RoomTopicEvent, FeedbackEvent, RoomNameEvent, RoomRedactionEvent, ) @@ -95,8 +93,7 @@ class DataStore(RoomMemberStore, RoomStore, @defer.inlineCallbacks @log_function - def persist_event(self, event=None, backfilled=False, pdu=None, - is_new_state=True): + def persist_event(self, event, backfilled=False, is_new_state=True): stream_ordering = None if backfilled: if not self.min_token_deferred.called: @@ -107,8 +104,7 @@ class DataStore(RoomMemberStore, RoomStore, try: yield self.runInteraction( "persist_event", - self._persist_pdu_event_txn, - pdu=pdu, + self._persist_event_txn, event=event, backfilled=backfilled, stream_ordering=stream_ordering, @@ -139,15 +135,6 @@ class DataStore(RoomMemberStore, RoomStore, event = self._parse_event_from_row(events_dict) defer.returnValue(event) - def _persist_pdu_event_txn(self, txn, pdu=None, event=None, - backfilled=False, stream_ordering=None, - is_new_state=True): - if event is not None: - return self._persist_event_txn( - txn, event, backfilled, stream_ordering, - is_new_state=is_new_state, - ) - @log_function def _persist_event_txn(self, txn, event, backfilled, stream_ordering=None, is_new_state=True): @@ -159,8 +146,6 @@ class DataStore(RoomMemberStore, RoomStore, self._store_room_name_txn(txn, event) elif event.type == RoomTopicEvent.TYPE: self._store_room_topic_txn(txn, event) - elif event.type == RoomJoinRulesEvent.TYPE: - self._store_join_rule(txn, event) elif event.type == RoomRedactionEvent.TYPE: self._store_redaction(txn, event) diff --git a/synapse/storage/room.py b/synapse/storage/room.py index 0c83c11ad3..ca70506d28 100644 --- a/synapse/storage/room.py +++ b/synapse/storage/room.py @@ -132,22 +132,6 @@ class RoomStore(SQLBaseStore): defer.returnValue(ret) - @defer.inlineCallbacks - def get_room_join_rule(self, room_id): - sql = ( - "SELECT join_rule FROM room_join_rules as r " - "INNER JOIN current_state_events as c " - "ON r.event_id = c.event_id " - "WHERE c.room_id = ? " - ) - - rows = yield self._execute(None, sql, room_id) - - if len(rows) == 1: - defer.returnValue(rows[0][0]) - else: - defer.returnValue(None) - def _store_room_topic_txn(self, txn, event): self._simple_insert_txn( txn, @@ -170,17 +154,6 @@ class RoomStore(SQLBaseStore): } ) - def _store_join_rule(self, txn, event): - self._simple_insert_txn( - txn, - "room_join_rules", - { - "event_id": event.event_id, - "room_id": event.room_id, - "join_rule": event.content["join_rule"], - }, - ) - class RoomsTable(Table): table_name = "rooms" diff --git a/synapse/storage/schema/im.sql b/synapse/storage/schema/im.sql index 8d6f655993..8ba732a23b 100644 --- a/synapse/storage/schema/im.sql +++ b/synapse/storage/schema/im.sql @@ -85,80 +85,24 @@ CREATE TABLE IF NOT EXISTS topics( topic TEXT NOT NULL ); +CREATE INDEX IF NOT EXISTS topics_event_id ON topics(event_id); +CREATE INDEX IF NOT EXISTS topics_room_id ON topics(room_id); + CREATE TABLE IF NOT EXISTS room_names( event_id TEXT NOT NULL, room_id TEXT NOT NULL, name TEXT NOT NULL ); +CREATE INDEX IF NOT EXISTS room_names_event_id ON room_names(event_id); +CREATE INDEX IF NOT EXISTS room_names_room_id ON room_names(room_id); + CREATE TABLE IF NOT EXISTS rooms( room_id TEXT PRIMARY KEY NOT NULL, is_public INTEGER, creator TEXT ); -CREATE TABLE IF NOT EXISTS room_join_rules( - event_id TEXT NOT NULL, - room_id TEXT NOT NULL, - join_rule TEXT NOT NULL -); -CREATE INDEX IF NOT EXISTS room_join_rules_event_id ON room_join_rules(event_id); -CREATE INDEX IF NOT EXISTS room_join_rules_room_id ON room_join_rules(room_id); - - -CREATE TABLE IF NOT EXISTS room_power_levels( - event_id TEXT NOT NULL, - room_id TEXT NOT NULL, - user_id TEXT NOT NULL, - level INTEGER NOT NULL -); -CREATE INDEX IF NOT EXISTS room_power_levels_event_id ON room_power_levels(event_id); -CREATE INDEX IF NOT EXISTS room_power_levels_room_id ON room_power_levels(room_id); -CREATE INDEX IF NOT EXISTS room_power_levels_room_user ON room_power_levels(room_id, user_id); - - -CREATE TABLE IF NOT EXISTS room_default_levels( - event_id TEXT NOT NULL, - room_id TEXT NOT NULL, - level INTEGER NOT NULL -); - -CREATE INDEX IF NOT EXISTS room_default_levels_event_id ON room_default_levels(event_id); -CREATE INDEX IF NOT EXISTS room_default_levels_room_id ON room_default_levels(room_id); - - -CREATE TABLE IF NOT EXISTS room_add_state_levels( - event_id TEXT NOT NULL, - room_id TEXT NOT NULL, - level INTEGER NOT NULL -); - -CREATE INDEX IF NOT EXISTS room_add_state_levels_event_id ON room_add_state_levels(event_id); -CREATE INDEX IF NOT EXISTS room_add_state_levels_room_id ON room_add_state_levels(room_id); - - -CREATE TABLE IF NOT EXISTS room_send_event_levels( - event_id TEXT NOT NULL, - room_id TEXT NOT NULL, - level INTEGER NOT NULL -); - -CREATE INDEX IF NOT EXISTS room_send_event_levels_event_id ON room_send_event_levels(event_id); -CREATE INDEX IF NOT EXISTS room_send_event_levels_room_id ON room_send_event_levels(room_id); - - -CREATE TABLE IF NOT EXISTS room_ops_levels( - event_id TEXT NOT NULL, - room_id TEXT NOT NULL, - ban_level INTEGER, - kick_level INTEGER, - redact_level INTEGER -); - -CREATE INDEX IF NOT EXISTS room_ops_levels_event_id ON room_ops_levels(event_id); -CREATE INDEX IF NOT EXISTS room_ops_levels_room_id ON room_ops_levels(room_id); - - CREATE TABLE IF NOT EXISTS room_hosts( room_id TEXT NOT NULL, host TEXT NOT NULL, -- cgit 1.5.1 From 5d439b127ba34b951dfd09a7d3c684c2d50df702 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 10 Nov 2014 13:46:44 +0000 Subject: PEP8 --- synapse/api/auth.py | 3 +-- synapse/api/events/room.py | 1 + synapse/federation/replication.py | 1 - synapse/federation/transport.py | 9 ++++++--- synapse/federation/units.py | 7 +++---- synapse/handlers/federation.py | 5 ++++- synapse/storage/__init__.py | 7 ++++--- synapse/storage/event_federation.py | 9 +++------ 8 files changed, 22 insertions(+), 20 deletions(-) (limited to 'synapse/storage/__init__.py') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 48f9d460a3..a5c6964707 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -369,7 +369,6 @@ class Auth(object): ] event.auth_events = zip(auth_events, hashes) - @log_function def _can_send_event(self, event): key = (RoomPowerLevelsEvent.TYPE, "", ) @@ -452,7 +451,7 @@ class Auth(object): event.user_id, ) - # Check other levels: + # Check other levels: levels_to_check = [ ("users_default", []), ("events_default", []), diff --git a/synapse/api/events/room.py b/synapse/api/events/room.py index 25bc883706..8c4ac45d02 100644 --- a/synapse/api/events/room.py +++ b/synapse/api/events/room.py @@ -153,6 +153,7 @@ class RoomPowerLevelsEvent(SynapseStateEvent): def get_content_template(self): return {} + class RoomAliasesEvent(SynapseStateEvent): TYPE = "m.room.aliases" diff --git a/synapse/federation/replication.py b/synapse/federation/replication.py index e798304353..bacba36755 100644 --- a/synapse/federation/replication.py +++ b/synapse/federation/replication.py @@ -549,7 +549,6 @@ class ReplicationLayer(object): origin, pdu.room_id, pdu.event_id, ) - if not backfilled: ret = yield self.handler.on_receive_pdu( pdu, diff --git a/synapse/federation/transport.py b/synapse/federation/transport.py index d84a44c211..95c40c6c1b 100644 --- a/synapse/federation/transport.py +++ b/synapse/federation/transport.py @@ -284,7 +284,7 @@ class TransportLayer(object): origin = None if request.method == "PUT": - #TODO: Handle other method types? other content types? + # TODO: Handle other method types? other content types? try: content_bytes = request.content.read() content = json.loads(content_bytes) @@ -296,11 +296,13 @@ class TransportLayer(object): try: params = auth.split(" ")[1].split(",") param_dict = dict(kv.split("=") for kv in params) + def strip_quotes(value): if value.startswith("\""): return value[1:-1] else: return value + origin = strip_quotes(param_dict["origin"]) key = strip_quotes(param_dict["key"]) sig = strip_quotes(param_dict["sig"]) @@ -321,7 +323,7 @@ class TransportLayer(object): if auth.startswith("X-Matrix"): (origin, key, sig) = parse_auth_header(auth) json_request["origin"] = origin - json_request["signatures"].setdefault(origin,{})[key] = sig + json_request["signatures"].setdefault(origin, {})[key] = sig if not json_request["signatures"]: raise SynapseError( @@ -515,7 +517,8 @@ class TransportLayer(object): return try: - code, response = yield self.received_handler.on_incoming_transaction( + handler = self.received_handler + code, response = yield handler.on_incoming_transaction( transaction_data ) except: diff --git a/synapse/federation/units.py b/synapse/federation/units.py index d98014cac7..f4e7b62bd9 100644 --- a/synapse/federation/units.py +++ b/synapse/federation/units.py @@ -192,7 +192,9 @@ class Transaction(JsonEncodedObject): transaction_id and origin_server_ts keys. """ if "origin_server_ts" not in kwargs: - raise KeyError("Require 'origin_server_ts' to construct a Transaction") + raise KeyError( + "Require 'origin_server_ts' to construct a Transaction" + ) if "transaction_id" not in kwargs: raise KeyError( "Require 'transaction_id' to construct a Transaction" @@ -204,6 +206,3 @@ class Transaction(JsonEncodedObject): kwargs["pdus"] = [p.get_dict() for p in pdus] return Transaction(**kwargs) - - - diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 587fa308c8..e909af6bd8 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -521,6 +521,9 @@ class FederationHandler(BaseHandler): @log_function def _on_user_joined(self, user, room_id): - waiters = self.waiting_for_join_list.get((user.to_string(), room_id), []) + waiters = self.waiting_for_join_list.get( + (user.to_string(), room_id), + [] + ) while waiters: waiters.pop().callback(None) diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 7d810e6a62..4034437f6b 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -494,11 +494,13 @@ def prepare_database(db_conn): user_version = row[0] if user_version > SCHEMA_VERSION: - raise ValueError("Cannot use this database as it is too " + + raise ValueError( + "Cannot use this database as it is too " + "new for the server to understand" ) elif user_version < SCHEMA_VERSION: - logging.info("Upgrading database from version %d", + logging.info( + "Upgrading database from version %d", user_version ) @@ -520,4 +522,3 @@ def prepare_database(db_conn): c.execute("PRAGMA user_version = %d" % SCHEMA_VERSION) c.close() - diff --git a/synapse/storage/event_federation.py b/synapse/storage/event_federation.py index a707030145..a027db3868 100644 --- a/synapse/storage/event_federation.py +++ b/synapse/storage/event_federation.py @@ -215,7 +215,7 @@ class EventFederationStore(SQLBaseStore): min_depth = self._simple_select_one_onecol_txn( txn, table="room_depth", - keyvalues={"room_id": room_id,}, + keyvalues={"room_id": room_id}, retcol="min_depth", allow_none=True, ) @@ -267,10 +267,8 @@ class EventFederationStore(SQLBaseStore): } ) - - - # We only insert as a forward extremity the new pdu if there are no - # other pdus that reference it as a prev pdu + # We only insert as a forward extremity the new pdu if there are + # no other pdus that reference it as a prev pdu query = ( "INSERT OR IGNORE INTO %(table)s (event_id, room_id) " "SELECT ?, ? WHERE NOT EXISTS (" @@ -312,7 +310,6 @@ class EventFederationStore(SQLBaseStore): ) txn.execute(query) - def get_backfill_events(self, room_id, event_list, limit): """Get a list of Events for a given topic that occured before (and including) the pdus in pdu_list. Return a list of max size `limit`. -- cgit 1.5.1 From 6fea478d2e7737c2462b074b935d4427ced5f3d4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 12 Nov 2014 11:22:51 +0000 Subject: Fix bugs with invites/joins across federatiom. Both in terms of auth and not trying to fetch missing PDUs for invites, joins etc. --- synapse/api/auth.py | 19 ++++++++++++++++--- synapse/federation/replication.py | 7 +------ synapse/handlers/federation.py | 12 +++--------- synapse/handlers/room.py | 10 ++++++++-- synapse/storage/__init__.py | 14 +++++++++++--- synapse/storage/state.py | 9 ++++++--- tests/handlers/test_room.py | 22 +++++++++------------- 7 files changed, 54 insertions(+), 39 deletions(-) (limited to 'synapse/storage/__init__.py') diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 6c2d3db26e..87f19a96d6 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -36,6 +36,7 @@ class Auth(object): def __init__(self, hs): self.hs = hs self.store = hs.get_datastore() + self.state = hs.get_state_handler() def check(self, event, raises=False): """ Checks if this event is correctly authed. @@ -90,7 +91,7 @@ class Auth(object): ) logger.info("Denying! %s", event) if raises: - raise e + raise return False @@ -109,9 +110,21 @@ class Auth(object): @defer.inlineCallbacks def check_host_in_room(self, room_id, host): - joined_hosts = yield self.store.get_joined_hosts_for_room(room_id) + curr_state = yield self.state.get_current_state(room_id) + + for event in curr_state: + if event.type == RoomMemberEvent.TYPE: + try: + if self.hs.parse_userid(event.state_key).domain != host: + continue + except: + logger.warn("state_key not user_id: %s", event.state_key) + continue + + if event.content["membership"] == Membership.JOIN: + defer.returnValue(True) - defer.returnValue(host in joined_hosts) + defer.returnValue(False) def check_event_sender_in_room(self, event): key = (RoomMemberEvent.TYPE, event.user_id, ) diff --git a/synapse/federation/replication.py b/synapse/federation/replication.py index 5c625ddabf..beec17e386 100644 --- a/synapse/federation/replication.py +++ b/synapse/federation/replication.py @@ -267,8 +267,6 @@ class ReplicationLayer(object): transaction = Transaction(**transaction_data) pdus = [Pdu(outlier=True, **p) for p in transaction.pdus] - for pdu in pdus: - yield self._handle_new_pdu(destination, pdu) defer.returnValue(pdus) @@ -452,15 +450,12 @@ class ReplicationLayer(object): ) logger.debug("Got content: %s", content) + state = [Pdu(outlier=True, **p) for p in content.get("state", [])] - for pdu in state: - yield self._handle_new_pdu(destination, pdu) auth_chain = [ Pdu(outlier=True, **p) for p in content.get("auth_chain", []) ] - for pdu in auth_chain: - yield self._handle_new_pdu(destination, pdu) defer.returnValue(state) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index d8d5730b65..99655c8bb0 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -229,12 +229,6 @@ class FederationHandler(BaseHandler): @log_function @defer.inlineCallbacks def do_invite_join(self, target_host, room_id, joinee, content, snapshot): - hosts = yield self.store.get_joined_hosts_for_room(room_id) - if self.hs.hostname in hosts: - # We are already in the room. - logger.debug("We're already in the room apparently") - defer.returnValue(False) - pdu = yield self.replication_layer.make_join( target_host, room_id, @@ -268,7 +262,7 @@ class FederationHandler(BaseHandler): logger.debug("do_invite_join state: %s", state) - is_new_state = yield self.state_handler.annotate_event_with_state( + yield self.state_handler.annotate_event_with_state( event, old_state=state ) @@ -296,13 +290,13 @@ class FederationHandler(BaseHandler): yield self.store.persist_event( e, backfilled=False, - is_new_state=False + is_new_state=True ) yield self.store.persist_event( event, backfilled=False, - is_new_state=is_new_state + is_new_state=True ) finally: room_queue = self.room_queues[room_id] diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 3642fcfc6d..825957f721 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -24,6 +24,7 @@ from synapse.api.events.room import ( RoomTopicEvent, RoomNameEvent, RoomJoinRulesEvent, ) from synapse.util import stringutils +from synapse.util.async import run_on_reactor from ._base import BaseHandler import logging @@ -432,9 +433,12 @@ class RoomMemberHandler(BaseHandler): # that we are allowed to join when we decide whether or not we # need to do the invite/join dance. - hosts = yield self.store.get_joined_hosts_for_room(room_id) + is_host_in_room = yield self.auth.check_host_in_room( + event.room_id, + self.hs.hostname + ) - if self.hs.hostname in hosts: + if is_host_in_room: should_do_dance = False elif room_host: should_do_dance = True @@ -517,6 +521,8 @@ class RoomMemberHandler(BaseHandler): @defer.inlineCallbacks def _do_local_membership_update(self, event, membership, snapshot, do_auth): + yield run_on_reactor() + # If we're inviting someone, then we should also send it to that # HS. target_user_id = event.state_key diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 4034437f6b..72290eb5a0 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -186,6 +186,7 @@ class DataStore(RoomMemberStore, RoomStore, "events", vals, or_replace=(not outlier), + or_ignore=bool(outlier), ) except: logger.warn( @@ -217,7 +218,12 @@ class DataStore(RoomMemberStore, RoomStore, if hasattr(event, "replaces_state"): vals["prev_state"] = event.replaces_state - self._simple_insert_txn(txn, "state_events", vals) + self._simple_insert_txn( + txn, + "state_events", + vals, + or_replace=True, + ) self._simple_insert_txn( txn, @@ -227,7 +233,8 @@ class DataStore(RoomMemberStore, RoomStore, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, - } + }, + or_replace=True, ) for e_id, h in event.prev_state: @@ -252,7 +259,8 @@ class DataStore(RoomMemberStore, RoomStore, "room_id": event.room_id, "type": event.type, "state_key": event.state_key, - } + }, + or_replace=True, ) for prev_state_id, _ in event.prev_state: diff --git a/synapse/storage/state.py b/synapse/storage/state.py index 68975969f5..2f3a70b4e5 100644 --- a/synapse/storage/state.py +++ b/synapse/storage/state.py @@ -70,7 +70,8 @@ class StateStore(SQLBaseStore): values={ "room_id": event.room_id, "event_id": event.event_id, - } + }, + or_ignore=True, ) for state in event.state_events.values(): @@ -83,7 +84,8 @@ class StateStore(SQLBaseStore): "type": state.type, "state_key": state.state_key, "event_id": state.event_id, - } + }, + or_ignore=True, ) self._simple_insert_txn( @@ -92,5 +94,6 @@ class StateStore(SQLBaseStore): values={ "state_group": state_group, "event_id": event.event_id, - } + }, + or_replace=True, ) diff --git a/tests/handlers/test_room.py b/tests/handlers/test_room.py index ee264e5ee2..cbe591ab90 100644 --- a/tests/handlers/test_room.py +++ b/tests/handlers/test_room.py @@ -44,7 +44,6 @@ class RoomMemberHandlerTestCase(unittest.TestCase): ]), datastore=NonCallableMock(spec_set=[ "persist_event", - "get_joined_hosts_for_room", "get_room_member", "get_room", "store_room", @@ -58,9 +57,14 @@ class RoomMemberHandlerTestCase(unittest.TestCase): "profile_handler", "federation_handler", ]), - auth=NonCallableMock(spec_set=["check", "add_auth_events"]), + auth=NonCallableMock(spec_set=[ + "check", + "add_auth_events", + "check_host_in_room", + ]), state_handler=NonCallableMock(spec_set=[ "annotate_event_with_state", + "get_current_state", ]), config=self.mock_config, ) @@ -76,6 +80,7 @@ class RoomMemberHandlerTestCase(unittest.TestCase): self.notifier = hs.get_notifier() self.state_handler = hs.get_state_handler() self.distributor = hs.get_distributor() + self.auth = hs.get_auth() self.hs = hs self.handlers.federation_handler = self.federation @@ -108,11 +113,7 @@ class RoomMemberHandlerTestCase(unittest.TestCase): content=content, ) - joined = ["red", "green"] - - self.datastore.get_joined_hosts_for_room.return_value = ( - defer.succeed(joined) - ) + self.auth.check_host_in_room.return_value = defer.succeed(True) store_id = "store_id_fooo" self.datastore.persist_event.return_value = defer.succeed(store_id) @@ -164,12 +165,7 @@ class RoomMemberHandlerTestCase(unittest.TestCase): room_id=room_id, ) - joined = ["red", "green"] - - def get_joined(*args): - return defer.succeed(joined) - - self.datastore.get_joined_hosts_for_room.side_effect = get_joined + self.auth.check_host_in_room.return_value = defer.succeed(True) store_id = "store_id_fooo" self.datastore.persist_event.return_value = defer.succeed(store_id) -- cgit 1.5.1 From f04b3d5042b85fa81efff9b561ca7af8d9709756 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 12 Nov 2014 17:02:18 +0000 Subject: Store all signatures on events rather than just dropping them --- synapse/storage/__init__.py | 15 ++++++++------- synapse/storage/_base.py | 7 +++++-- synapse/storage/schema/event_signatures.sql | 6 +++--- synapse/storage/signatures.py | 24 +++++++++++++++--------- 4 files changed, 31 insertions(+), 21 deletions(-) (limited to 'synapse/storage/__init__.py') diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index 72290eb5a0..d8f351a675 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -279,13 +279,14 @@ class DataStore(RoomMemberStore, RoomStore, ) if hasattr(event, "signatures"): - signatures = event.signatures.get(event.origin, {}) - - for key_id, signature_base64 in signatures.items(): - signature_bytes = decode_base64(signature_base64) - self._store_event_origin_signature_txn( - txn, event.event_id, event.origin, key_id, signature_bytes, - ) + logger.debug("sigs: %s", event.signatures) + for name, sigs in event.signatures.items(): + for key_id, signature_base64 in sigs.items(): + signature_bytes = decode_base64(signature_base64) + self._store_event_signature_txn( + txn, event.event_id, name, key_id, + signature_bytes, + ) for prev_event_id, prev_hashes in event.prev_events: for alg, hash_base64 in prev_hashes.items(): diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index a1ee0318f6..670387b04a 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -470,12 +470,15 @@ class SQLBaseStore(object): select_event_sql = "SELECT * FROM events WHERE event_id = ?" for i, ev in enumerate(events): - signatures = self._get_event_origin_signatures_txn( + signatures = self._get_event_signatures_txn( txn, ev.event_id, ) ev.signatures = { - k: encode_base64(v) for k, v in signatures.items() + n: { + k: encode_base64(v) for k, v in s.items() + } + for n, s in signatures.items() } prevs = self._get_prev_events_and_state(txn, ev.event_id) diff --git a/synapse/storage/schema/event_signatures.sql b/synapse/storage/schema/event_signatures.sql index 5491c7ecec..4efa8a3e63 100644 --- a/synapse/storage/schema/event_signatures.sql +++ b/synapse/storage/schema/event_signatures.sql @@ -37,15 +37,15 @@ CREATE INDEX IF NOT EXISTS event_reference_hashes_id ON event_reference_hashes ( ); -CREATE TABLE IF NOT EXISTS event_origin_signatures ( +CREATE TABLE IF NOT EXISTS event_signatures ( event_id TEXT, - origin TEXT, + signature_name TEXT, key_id TEXT, signature BLOB, CONSTRAINT uniqueness UNIQUE (event_id, key_id) ); -CREATE INDEX IF NOT EXISTS event_origin_signatures_id ON event_origin_signatures ( +CREATE INDEX IF NOT EXISTS event_signatures_id ON event_signatures ( event_id ); diff --git a/synapse/storage/signatures.py b/synapse/storage/signatures.py index 84a49088a2..d90e08fff1 100644 --- a/synapse/storage/signatures.py +++ b/synapse/storage/signatures.py @@ -103,24 +103,30 @@ class SignatureStore(SQLBaseStore): or_ignore=True, ) - - def _get_event_origin_signatures_txn(self, txn, event_id): + def _get_event_signatures_txn(self, txn, event_id): """Get all the signatures for a given PDU. Args: txn (cursor): event_id (str): Id for the Event. Returns: - A dict of key_id -> signature_bytes. + A dict of sig name -> dict(key_id -> signature_bytes) """ query = ( - "SELECT key_id, signature" - " FROM event_origin_signatures" + "SELECT signature_name, key_id, signature" + " FROM event_signatures" " WHERE event_id = ? " ) txn.execute(query, (event_id, )) - return dict(txn.fetchall()) + rows = txn.fetchall() + + res = {} + + for name, key, sig in rows: + res.setdefault(name, {})[key] = sig + + return res - def _store_event_origin_signature_txn(self, txn, event_id, origin, key_id, + def _store_event_signature_txn(self, txn, event_id, signature_name, key_id, signature_bytes): """Store a signature from the origin server for a PDU. Args: @@ -132,10 +138,10 @@ class SignatureStore(SQLBaseStore): """ self._simple_insert_txn( txn, - "event_origin_signatures", + "event_signatures", { "event_id": event_id, - "origin": origin, + "signature_name": signature_name, "key_id": key_id, "signature": buffer(signature_bytes), }, -- cgit 1.5.1 From 8c2b5ea7c44e3915068cd9ec18e5c22d0a3acfcc Mon Sep 17 00:00:00 2001 From: Mark Haines Date: Fri, 14 Nov 2014 19:10:52 +0000 Subject: Fix PDU and event signatures --- synapse/crypto/event_signing.py | 11 ++++++++++- synapse/handlers/federation.py | 5 +++-- synapse/storage/__init__.py | 4 ++-- synapse/storage/feedback.py | 2 +- 4 files changed, 16 insertions(+), 6 deletions(-) (limited to 'synapse/storage/__init__.py') diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index c7e6bec8f5..79274fd552 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -16,6 +16,7 @@ from synapse.api.events.utils import prune_event +from synapse.federation.units import Pdu from syutil.jsonutil import encode_canonical_json from syutil.base64util import encode_base64, decode_base64 from syutil.crypto.jsonsign import sign_json @@ -58,6 +59,8 @@ def _compute_content_hash(event, hash_algorithm): event_json.pop("unsigned", None) event_json.pop("signatures", None) event_json.pop("hashes", None) + event_json.pop("outlier", None) + event_json.pop("destinations", None) event_json_bytes = encode_canonical_json(event_json) return hash_algorithm(event_json_bytes) @@ -75,7 +78,13 @@ def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256): def compute_event_signature(event, signature_name, signing_key): tmp_event = prune_event(event) - redact_json = tmp_event.get_full_dict() + tmp_event.origin = event.origin + tmp_event.origin_server_ts = event.origin_server_ts + d = tmp_event.get_full_dict() + kwargs = dict(event.unrecognized_keys) + kwargs.update({k: v for k, v in d.items()}) + tmp_pdu = Pdu(**kwargs) + redact_json = tmp_pdu.get_dict() redact_json.pop("signatures", None) redact_json.pop("age_ts", None) redact_json.pop("unsigned", None) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index fce935b444..fc00128c56 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -139,9 +139,10 @@ class FederationHandler(BaseHandler): affected=event.event_id, ) - if not check_event_content_hash(pdu): + if not check_event_content_hash(event): logger.warn( - "Event content has been tampered, redacting %s", event.event_id + "Event content has been tampered, redacting %s, %s", + event.event_id, encode_canonical_json(event.get_full_dict()) ) event = redacted_event diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index d8f351a675..c36d938d96 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -132,8 +132,8 @@ class DataStore(RoomMemberStore, RoomStore, if not events_dict: defer.returnValue(None) - event = self._parse_event_from_row(events_dict) - defer.returnValue(event) + event = yield self._parse_events([events_dict]) + defer.returnValue(event[0]) @log_function def _persist_event_txn(self, txn, event, backfilled, stream_ordering=None, diff --git a/synapse/storage/feedback.py b/synapse/storage/feedback.py index 8a18617188..21511577c5 100644 --- a/synapse/storage/feedback.py +++ b/synapse/storage/feedback.py @@ -41,7 +41,7 @@ class FeedbackStore(SQLBaseStore): defer.returnValue( [ - self._parse_event_from_row(r) + (yield self._parse_events(r)) for r in rows ] ) -- cgit 1.5.1 From 5b46ce579ba9a198546ff9e19f743eb91a126422 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 19 Nov 2014 18:00:57 +0000 Subject: Bump version, changelog and upgrade.rst --- CHANGES.rst | 24 ++++++++++++++++++++++++ UPGRADE.rst | 30 ++++++++++++++++++++++++++++++ VERSION | 2 +- database-prepare-for-0.5.0.sh | 21 +++++++++++++++++++++ synapse/__init__.py | 2 +- synapse/storage/__init__.py | 2 +- 6 files changed, 78 insertions(+), 3 deletions(-) create mode 100755 database-prepare-for-0.5.0.sh (limited to 'synapse/storage/__init__.py') diff --git a/CHANGES.rst b/CHANGES.rst index 78c178bafd..5a284c3853 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,3 +1,27 @@ +Changes in synapse 0.5.0 (2014-11-19) +===================================== +This release includes changes to the federation protocol and client-server API +that is not backwards compatible. + +This release also changes the internal database schemas and so requires servers to +drop their current history. See UPGRADES.rst for details. + +Homeserver: + * Add authentication and authorization to the federation protocol. Events are + now signed by their originating homeservers. + * Implement the new authorization model for rooms. + * Split out web client into a seperate repository: matrix-angular-sdk. + * Change the structure of PDUs. + * Fix bug where user could not join rooms via an alias containing 4-byte + UTF-8 characters. + * Merge concept of PDUs and Events internally. + * Improve logging by adding request ids to log lines. + * Implement a very basic room initial sync API. + * Implement the new invite/join federation APIs. + +Webclient: + * The webclient has been moved to a seperate repository. + Changes in synapse 0.4.2 (2014-10-31) ===================================== diff --git a/UPGRADE.rst b/UPGRADE.rst index 99ce1a2d3d..2229470c3f 100644 --- a/UPGRADE.rst +++ b/UPGRADE.rst @@ -1,3 +1,33 @@ +Upgrading to v0.5.0 +=================== + +This release completely changes the database schema and so requires upgrading +it before starting the new version of the homeserver. + +The script "database-prepare-for-0.5.0.sh" should be used to upgrade the +database. This will save all user information, such as logins and profiles, +but will otherwise purge the database. This includes messages, which +rooms the home server was a member of and room alias mappings. + +If you would like to keep your history, please take a copy of your database +file and ask for help in #matrix:matrix.org. The upgrade process is, +unfortunately, non trivial and requires human intervention to resolve any +resutling conflicts during the upgrade process. + +Before running the command the homeserver should be first completely +shutdown. To run it, simply specify the location of the database, e.g.: + + ./database-prepare-for-0.5.0.sh "homeserver.db" + +Once this has successfully completed it will be safe to restart the +homeserver. You may notice that the homeserver takes a few seconds longer to +restart than usual as it reinitializes the database. + +On startup of the new version, users can either rejoin remote rooms using room +aliases or by being reinvited. Alternatively, if any other homeserver sends a +message to a room that the homeserver was previously in the local HS will +automatically rejoin the room. + Upgrading to v0.4.0 =================== diff --git a/VERSION b/VERSION index 2b7c5ae018..8f0916f768 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.4.2 +0.5.0 diff --git a/database-prepare-for-0.5.0.sh b/database-prepare-for-0.5.0.sh new file mode 100755 index 0000000000..e824cb583e --- /dev/null +++ b/database-prepare-for-0.5.0.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +# This is will prepare a synapse database for running with v0.5.0 of synapse. +# It will store all the user information, but will *delete* all messages and +# room data. + +set -e + +cp "$1" "$1.bak" + +DUMP=$(sqlite3 "$1" << 'EOF' +.dump users +.dump access_tokens +.dump presence +.dump profiles +EOF +) + +rm "$1" + +sqlite3 "$1" <<< "$DUMP" diff --git a/synapse/__init__.py b/synapse/__init__.py index 23ae5f003f..14564e735e 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -16,4 +16,4 @@ """ This is a reference implementation of a synapse home server. """ -__version__ = "0.4.2" +__version__ = "0.5.0" diff --git a/synapse/storage/__init__.py b/synapse/storage/__init__.py index c36d938d96..330d3b793f 100644 --- a/synapse/storage/__init__.py +++ b/synapse/storage/__init__.py @@ -67,7 +67,7 @@ SCHEMAS = [ # Remember to update this number every time an incompatible change is made to # database schema files, so the users will be informed on server restarts. -SCHEMA_VERSION = 6 +SCHEMA_VERSION = 7 class _RollbackButIsFineException(Exception): -- cgit 1.5.1