From 1c2dcf762a8fe28390e9a98a01577aaadca7f1c0 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Mon, 9 Mar 2015 13:10:31 +0000 Subject: Partially implement txn store methods with tests. --- tests/storage/test_appservice.py | 171 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 169 insertions(+), 2 deletions(-) (limited to 'tests/storage') diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index ca5b92ec85..30c0b43d96 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -15,9 +15,11 @@ from tests import unittest from twisted.internet import defer -from synapse.appservice import ApplicationService +from synapse.appservice import ApplicationService, ApplicationServiceState from synapse.server import HomeServer -from synapse.storage.appservice import ApplicationServiceStore +from synapse.storage.appservice import ( + ApplicationServiceStore, ApplicationServiceTransactionStore +) from mock import Mock from tests.utils import SQLiteMemoryDbPool, MockClock @@ -114,3 +116,168 @@ class ApplicationServiceStoreTestCase(unittest.TestCase): def test_retrieval_of_all_services(self): services = yield self.store.get_app_services() self.assertEquals(len(services), 3) + + +class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): + + @defer.inlineCallbacks + def setUp(self): + self.db_pool = SQLiteMemoryDbPool() + yield self.db_pool.prepare() + hs = HomeServer( + "test", db_pool=self.db_pool, clock=MockClock(), config=Mock() + ) + self.as_list = [ + { + "token": "token1", + "url": "https://matrix-as.org", + "id": 3 + }, + { + "token": "alpha_tok", + "url": "https://alpha.com", + "id": 5 + }, + { + "token": "beta_tok", + "url": "https://beta.com", + "id": 6 + }, + { + "token": "delta_tok", + "url": "https://delta.com", + "id": 7 + }, + ] + for s in self.as_list: + yield self._add_service(s["id"], s["url"], s["token"]) + self.store = TestTransactionStore(hs) + + def _add_service(self, as_id, url, token): + return self.db_pool.runQuery( + "INSERT INTO application_services(id, url, token) VALUES(?,?,?)", + (as_id, url, token) + ) + + def _set_state(self, id, state, txn=None): + return self.db_pool.runQuery( + "INSERT INTO application_services_state(as_id, state, last_txn) " + "VALUES(?,?,?)", + (id, state, txn) + ) + + @defer.inlineCallbacks + def test_get_appservice_state_none(self): + service = Mock(id=999) + state = yield self.store.get_appservice_state(service) + self.assertEquals(None, state) + + @defer.inlineCallbacks + def test_get_appservice_state_up(self): + yield self._set_state( + self.as_list[0]["id"], ApplicationServiceState.UP + ) + service = Mock(id=self.as_list[0]["id"]) + state = yield self.store.get_appservice_state(service) + self.assertEquals(ApplicationServiceState.UP, state) + + @defer.inlineCallbacks + def test_get_appservice_state_down(self): + yield self._set_state( + self.as_list[0]["id"], ApplicationServiceState.UP + ) + yield self._set_state( + self.as_list[1]["id"], ApplicationServiceState.DOWN + ) + yield self._set_state( + self.as_list[2]["id"], ApplicationServiceState.DOWN + ) + service = Mock(id=self.as_list[1]["id"]) + state = yield self.store.get_appservice_state(service) + self.assertEquals(ApplicationServiceState.DOWN, state) + + @defer.inlineCallbacks + def test_get_appservices_by_state_none(self): + services = yield self.store.get_appservices_by_state( + ApplicationServiceState.DOWN + ) + self.assertEquals(0, len(services)) + + @defer.inlineCallbacks + def test_set_appservices_state_down(self): + service = Mock(id=self.as_list[1]["id"]) + yield self.store.set_appservice_state( + service, + ApplicationServiceState.DOWN + ) + rows = yield self.db_pool.runQuery( + "SELECT as_id FROM application_services_state WHERE state=?", + (ApplicationServiceState.DOWN,) + ) + self.assertEquals(service.id, rows[0][0]) + + @defer.inlineCallbacks + def test_set_appservices_state_multiple_up(self): + service = Mock(id=self.as_list[1]["id"]) + yield self.store.set_appservice_state( + service, + ApplicationServiceState.UP + ) + yield self.store.set_appservice_state( + service, + ApplicationServiceState.DOWN + ) + yield self.store.set_appservice_state( + service, + ApplicationServiceState.UP + ) + rows = yield self.db_pool.runQuery( + "SELECT as_id FROM application_services_state WHERE state=?", + (ApplicationServiceState.UP,) + ) + self.assertEquals(service.id, rows[0][0]) + + @defer.inlineCallbacks + def test_get_appservices_by_state_single(self): + yield self._set_state( + self.as_list[0]["id"], ApplicationServiceState.DOWN + ) + yield self._set_state( + self.as_list[1]["id"], ApplicationServiceState.UP + ) + + services = yield self.store.get_appservices_by_state( + ApplicationServiceState.DOWN + ) + self.assertEquals(1, len(services)) + self.assertEquals(self.as_list[0]["id"], services[0].id) + + @defer.inlineCallbacks + def test_get_appservices_by_state_multiple(self): + yield self._set_state( + self.as_list[0]["id"], ApplicationServiceState.DOWN + ) + yield self._set_state( + self.as_list[1]["id"], ApplicationServiceState.UP + ) + yield self._set_state( + self.as_list[2]["id"], ApplicationServiceState.DOWN + ) + yield self._set_state( + self.as_list[3]["id"], ApplicationServiceState.UP + ) + + services = yield self.store.get_appservices_by_state( + ApplicationServiceState.DOWN + ) + self.assertEquals(2, len(services)) + self.assertEquals(self.as_list[2]["id"], services[0].id) + self.assertEquals(self.as_list[0]["id"], services[1].id) + + +# required for ApplicationServiceTransactionStoreTestCase tests +class TestTransactionStore(ApplicationServiceTransactionStore, + ApplicationServiceStore): + + def __init__(self, hs): + super(TestTransactionStore, self).__init__(hs) -- cgit 1.4.1 From 1ead1caa18bdbf708446f1faa3d6f3dd13e63c29 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Mon, 9 Mar 2015 13:54:20 +0000 Subject: Implement create_appservice_txn with tests. --- synapse/storage/appservice.py | 46 ++++++++++++++++++++++++--- tests/storage/test_appservice.py | 67 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 109 insertions(+), 4 deletions(-) (limited to 'tests/storage') diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index 37078f9ef0..1360a00eae 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -17,9 +17,10 @@ from simplejson import JSONDecodeError import simplejson as json from twisted.internet import defer +from syutil.jsonutil import encode_canonical_json from synapse.api.constants import Membership from synapse.api.errors import StoreError -from synapse.appservice import ApplicationService, ApplicationServiceState +from synapse.appservice import ApplicationService, AppServiceTransaction from synapse.storage.roommember import RoomsForUser from ._base import SQLBaseStore @@ -417,9 +418,46 @@ class ApplicationServiceTransactionStore(SQLBaseStore): Returns: AppServiceTransaction: A new transaction. """ - # TODO: work out txn id (highest txn id for this service += 1) - # TODO: Within same db transaction, Insert new txn into txn table - pass + return self.runInteraction( + "create_appservice_txn", + self._create_appservice_txn, + service, events + ) + + def _create_appservice_txn(self, txn, service, events): + # work out new txn id (highest txn id for this service += 1) + # The highest id may be the last one sent (in which case it is last_txn) + # or it may be the highest in the txns list (which are waiting to be/are + # being sent) + result = txn.execute( + "SELECT last_txn FROM application_services_state WHERE as_id=?", + (service.id,) + ) + last_txn_id = result.fetchone() + if last_txn_id is None: # no row exists + last_txn_id = 0 + else: + last_txn_id = int(last_txn_id[0]) # select 'last_txn' col + + result = txn.execute( + "SELECT MAX(txn_id) FROM application_services_txns WHERE as_id=?", + (service.id,) + ) + highest_txn_id = result.fetchone()[0] + if highest_txn_id is None: + highest_txn_id = 0 + + new_txn_id = max(highest_txn_id, last_txn_id) + 1 + + # Insert new txn into txn table + txn.execute( + "INSERT INTO application_services_txns(as_id, txn_id, content) " + "VALUES(?,?,?)", + (service.id, new_txn_id, encode_canonical_json(events)) + ) + return AppServiceTransaction( + service=service, id=new_txn_id, events=events + ) def complete_appservice_txn(self, txn_id, service): """Completes an application service transaction. diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index 30c0b43d96..7a8cdb5593 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -21,6 +21,7 @@ from synapse.storage.appservice import ( ApplicationServiceStore, ApplicationServiceTransactionStore ) +import json from mock import Mock from tests.utils import SQLiteMemoryDbPool, MockClock @@ -166,6 +167,20 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): (id, state, txn) ) + def _insert_txn(self, as_id, txn_id, content): + return self.db_pool.runQuery( + "INSERT INTO application_services_txns(as_id, txn_id, content) " + "VALUES(?,?,?)", + (as_id, txn_id, json.dumps(content)) + ) + + def _set_last_txn(self, as_id, txn_id): + return self.db_pool.runQuery( + "INSERT INTO application_services_state(as_id, last_txn, state) " + "VALUES(?,?,?)", + (as_id, txn_id, ApplicationServiceState.UP) + ) + @defer.inlineCallbacks def test_get_appservice_state_none(self): service = Mock(id=999) @@ -237,6 +252,58 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): ) self.assertEquals(service.id, rows[0][0]) + @defer.inlineCallbacks + def test_create_appservice_txn_first(self): + service = Mock(id=self.as_list[0]["id"]) + events = [{"type": "nothing"}, {"type": "here"}] + txn = yield self.store.create_appservice_txn(service, events) + self.assertEquals(txn.id, 1) + self.assertEquals(txn.events, events) + self.assertEquals(txn.service, service) + + @defer.inlineCallbacks + def test_create_appservice_txn_older_last_txn(self): + service = Mock(id=self.as_list[0]["id"]) + events = [{"type": "nothing"}, {"type": "here"}] + yield self._set_last_txn(service.id, 9643) # AS is falling behind + yield self._insert_txn(service.id, 9644, events) + yield self._insert_txn(service.id, 9645, events) + txn = yield self.store.create_appservice_txn(service, events) + self.assertEquals(txn.id, 9646) + self.assertEquals(txn.events, events) + self.assertEquals(txn.service, service) + + @defer.inlineCallbacks + def test_create_appservice_txn_up_to_date_last_txn(self): + service = Mock(id=self.as_list[0]["id"]) + events = [{"type": "nothing"}, {"type": "here"}] + yield self._set_last_txn(service.id, 9643) + txn = yield self.store.create_appservice_txn(service, events) + self.assertEquals(txn.id, 9644) + self.assertEquals(txn.events, events) + self.assertEquals(txn.service, service) + + @defer.inlineCallbacks + def test_create_appservice_txn_up_fuzzing(self): + service = Mock(id=self.as_list[0]["id"]) + events = [{"type": "nothing"}, {"type": "here"}] + yield self._set_last_txn(service.id, 9643) + + # dump in rows with higher IDs to make sure the queries aren't wrong. + yield self._set_last_txn(self.as_list[1]["id"], 119643) + yield self._set_last_txn(self.as_list[2]["id"], 9) + yield self._set_last_txn(self.as_list[3]["id"], 9643) + yield self._insert_txn(self.as_list[1]["id"], 119644, events) + yield self._insert_txn(self.as_list[1]["id"], 119645, events) + yield self._insert_txn(self.as_list[1]["id"], 119646, events) + yield self._insert_txn(self.as_list[2]["id"], 10, events) + yield self._insert_txn(self.as_list[3]["id"], 9643, events) + + txn = yield self.store.create_appservice_txn(service, events) + self.assertEquals(txn.id, 9644) + self.assertEquals(txn.events, events) + self.assertEquals(txn.service, service) + @defer.inlineCallbacks def test_get_appservices_by_state_single(self): yield self._set_state( -- cgit 1.4.1 From 0a60bbf4fac4262da3fee702ca46d2f019597ef1 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Mon, 9 Mar 2015 15:53:03 +0000 Subject: Finish appservice txn storage impl and tests. --- synapse/storage/appservice.py | 85 ++++++++++++++++++---- .../storage/schema/delta/15/appservice_txns.sql | 2 +- tests/storage/test_appservice.py | 68 +++++++++++++++++ 3 files changed, 139 insertions(+), 16 deletions(-) (limited to 'tests/storage') diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index 1360a00eae..d89b0cc8c9 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -429,15 +429,7 @@ class ApplicationServiceTransactionStore(SQLBaseStore): # The highest id may be the last one sent (in which case it is last_txn) # or it may be the highest in the txns list (which are waiting to be/are # being sent) - result = txn.execute( - "SELECT last_txn FROM application_services_state WHERE as_id=?", - (service.id,) - ) - last_txn_id = result.fetchone() - if last_txn_id is None: # no row exists - last_txn_id = 0 - else: - last_txn_id = int(last_txn_id[0]) # select 'last_txn' col + last_txn_id = self._get_last_txn(txn, service.id) result = txn.execute( "SELECT MAX(txn_id) FROM application_services_txns WHERE as_id=?", @@ -467,12 +459,43 @@ class ApplicationServiceTransactionStore(SQLBaseStore): service(ApplicationService): The application service which was sent this transaction. Returns: - A Deferred which resolves to True if this transaction was completed + A Deferred which resolves if this transaction was stored successfully. """ - # TODO: Set current txn_id for AS to 'txn_id' - # TODO: Delete txn contents - pass + return self.runInteraction( + "complete_appservice_txn", + self._complete_appservice_txn, + txn_id, service + ) + + def _complete_appservice_txn(self, txn, txn_id, service): + txn_id = int(txn_id) + + # Debugging query: Make sure the txn being completed is EXACTLY +1 from + # what was there before. If it isn't, we've got problems (e.g. the AS + # has probably missed some events), so whine loudly but still continue, + # since it shouldn't fail completion of the transaction. + last_txn_id = self._get_last_txn(txn, service.id) + if (last_txn_id + 1) != txn_id: + logger.error( + "appservice: Completing a transaction which has an ID > 1 from " + "the last ID sent to this AS. We've either dropped events or " + "sent it to the AS out of order. FIX ME. last_txn=%s " + "completing_txn=%s service_id=%s", last_txn_id, txn_id, + service.id + ) + + # Set current txn_id for AS to 'txn_id' + self._simple_upsert_txn( + txn, "application_services_state", dict(as_id=service.id), + dict(last_txn=txn_id) + ) + + # Delete txn contents + self._simple_delete_txn( + txn, "application_services_txns", + dict(txn_id=txn_id, as_id=service.id) + ) def get_oldest_unsent_txn(self, service): """Get the oldest transaction which has not been sent for this @@ -484,6 +507,38 @@ class ApplicationServiceTransactionStore(SQLBaseStore): A Deferred which resolves to an AppServiceTransaction or None. """ - # TODO: Monotonically increasing txn ids, so just select the smallest + return self.runInteraction( + "get_oldest_unsent_appservice_txn", + self._get_oldest_unsent_txn, + service + ) + + def _get_oldest_unsent_txn(self, txn, service): + # Monotonically increasing txn ids, so just select the smallest # one in the txns table (we delete them when they are sent) - pass + result = txn.execute( + "SELECT *,MIN(txn_id) FROM application_services_txns WHERE as_id=?", + (service.id,) + ) + entry = self.cursor_to_dict(result)[0] + + if not entry or entry["txn_id"] is None: + # the min(txn_id) part will force a row, so entry may not be None + return None + + return AppServiceTransaction( + service=service, id=entry["txn_id"], events=json.loads( + entry["content"] + ) + ) + + def _get_last_txn(self, txn, service_id): + result = txn.execute( + "SELECT last_txn FROM application_services_state WHERE as_id=?", + (service_id,) + ) + last_txn_id = result.fetchone() + if last_txn_id is None: # no row exists + return 0 + else: + return int(last_txn_id[0]) # select 'last_txn' col diff --git a/synapse/storage/schema/delta/15/appservice_txns.sql b/synapse/storage/schema/delta/15/appservice_txns.sql index 11f0c799aa..ff15aa019e 100644 --- a/synapse/storage/schema/delta/15/appservice_txns.sql +++ b/synapse/storage/schema/delta/15/appservice_txns.sql @@ -15,7 +15,7 @@ CREATE TABLE IF NOT EXISTS application_services_state( as_id INTEGER PRIMARY KEY, - state TEXT NOT NULL, + state TEXT, last_txn TEXT, FOREIGN KEY(as_id) REFERENCES application_services(id) ); diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index 7a8cdb5593..d1809c7f3b 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -304,6 +304,74 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): self.assertEquals(txn.events, events) self.assertEquals(txn.service, service) + @defer.inlineCallbacks + def test_complete_appservice_txn_first_txn(self): + service = Mock(id=self.as_list[0]["id"]) + events = [{"foo": "bar"}] + txn_id = 1 + + yield self._insert_txn(service.id, txn_id, events) + yield self.store.complete_appservice_txn(txn_id=txn_id, service=service) + + res = yield self.db_pool.runQuery( + "SELECT last_txn FROM application_services_state WHERE as_id=?", + (service.id,) + ) + self.assertEquals(1, len(res)) + self.assertEquals(str(txn_id), res[0][0]) + + res = yield self.db_pool.runQuery( + "SELECT * FROM application_services_txns WHERE txn_id=?", + (txn_id,) + ) + self.assertEquals(0, len(res)) + + @defer.inlineCallbacks + def test_complete_appservice_txn_existing_in_state_table(self): + service = Mock(id=self.as_list[0]["id"]) + events = [{"foo": "bar"}] + txn_id = 5 + yield self._set_last_txn(service.id, 4) + yield self._insert_txn(service.id, txn_id, events) + yield self.store.complete_appservice_txn(txn_id=txn_id, service=service) + + res = yield self.db_pool.runQuery( + "SELECT last_txn, state FROM application_services_state WHERE " + "as_id=?", + (service.id,) + ) + self.assertEquals(1, len(res)) + self.assertEquals(str(txn_id), res[0][0]) + self.assertEquals(ApplicationServiceState.UP, res[0][1]) + + res = yield self.db_pool.runQuery( + "SELECT * FROM application_services_txns WHERE txn_id=?", + (txn_id,) + ) + self.assertEquals(0, len(res)) + + @defer.inlineCallbacks + def test_get_oldest_unsent_txn_none(self): + service = Mock(id=self.as_list[0]["id"]) + + txn = yield self.store.get_oldest_unsent_txn(service) + self.assertEquals(None, txn) + + @defer.inlineCallbacks + def test_get_oldest_unsent_txn(self): + service = Mock(id=self.as_list[0]["id"]) + events = [{"type": "nothing"}, {"type": "here"}] + + yield self._insert_txn(self.as_list[1]["id"], 9, {"badger": "mushroom"}) + yield self._insert_txn(service.id, 10, events) + yield self._insert_txn(service.id, 11, [{"foo":"bar"}]) + yield self._insert_txn(service.id, 12, [{"argh":"bargh"}]) + + txn = yield self.store.get_oldest_unsent_txn(service) + self.assertEquals(service, txn.service) + self.assertEquals(10, txn.id) + self.assertEquals(events, txn.events) + @defer.inlineCallbacks def test_get_appservices_by_state_single(self): yield self._set_state( -- cgit 1.4.1 From b98cd03193476dea5f8b47e79d4122bb18449ae2 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Mon, 9 Mar 2015 17:25:20 +0000 Subject: Use event IDs instead of dumping event content in the txns table. --- synapse/storage/appservice.py | 14 +++++----- .../storage/schema/delta/15/appservice_txns.sql | 2 +- tests/storage/test_appservice.py | 30 ++++++++++++---------- 3 files changed, 26 insertions(+), 20 deletions(-) (limited to 'tests/storage') diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index d89b0cc8c9..c3c0a0bd43 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -442,10 +442,11 @@ class ApplicationServiceTransactionStore(SQLBaseStore): new_txn_id = max(highest_txn_id, last_txn_id) + 1 # Insert new txn into txn table + event_ids = [e.event_id for e in events] txn.execute( - "INSERT INTO application_services_txns(as_id, txn_id, content) " + "INSERT INTO application_services_txns(as_id, txn_id, event_ids) " "VALUES(?,?,?)", - (service.id, new_txn_id, encode_canonical_json(events)) + (service.id, new_txn_id, json.dumps(event_ids)) ) return AppServiceTransaction( service=service, id=new_txn_id, events=events @@ -491,7 +492,7 @@ class ApplicationServiceTransactionStore(SQLBaseStore): dict(last_txn=txn_id) ) - # Delete txn contents + # Delete txn self._simple_delete_txn( txn, "application_services_txns", dict(txn_id=txn_id, as_id=service.id) @@ -526,10 +527,11 @@ class ApplicationServiceTransactionStore(SQLBaseStore): # the min(txn_id) part will force a row, so entry may not be None return None + event_ids = json.loads(entry["event_ids"]) + events = self._get_events_txn(event_ids) + return AppServiceTransaction( - service=service, id=entry["txn_id"], events=json.loads( - entry["content"] - ) + service=service, id=entry["txn_id"], events=events ) def _get_last_txn(self, txn, service_id): diff --git a/synapse/storage/schema/delta/15/appservice_txns.sql b/synapse/storage/schema/delta/15/appservice_txns.sql index ff15aa019e..13bbb2de2e 100644 --- a/synapse/storage/schema/delta/15/appservice_txns.sql +++ b/synapse/storage/schema/delta/15/appservice_txns.sql @@ -23,7 +23,7 @@ CREATE TABLE IF NOT EXISTS application_services_state( CREATE TABLE IF NOT EXISTS application_services_txns( as_id INTEGER NOT NULL, txn_id INTEGER NOT NULL, - content TEXT NOT NULL, + event_ids TEXT NOT NULL, UNIQUE(as_id, txn_id) ON CONFLICT ROLLBACK ); diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index d1809c7f3b..e79599f7fb 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -167,11 +167,11 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): (id, state, txn) ) - def _insert_txn(self, as_id, txn_id, content): + def _insert_txn(self, as_id, txn_id, events): return self.db_pool.runQuery( - "INSERT INTO application_services_txns(as_id, txn_id, content) " + "INSERT INTO application_services_txns(as_id, txn_id, event_ids) " "VALUES(?,?,?)", - (as_id, txn_id, json.dumps(content)) + (as_id, txn_id, json.dumps([e.event_id for e in events])) ) def _set_last_txn(self, as_id, txn_id): @@ -255,7 +255,7 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def test_create_appservice_txn_first(self): service = Mock(id=self.as_list[0]["id"]) - events = [{"type": "nothing"}, {"type": "here"}] + events = [Mock(event_id="e1"), Mock(event_id="e2")] txn = yield self.store.create_appservice_txn(service, events) self.assertEquals(txn.id, 1) self.assertEquals(txn.events, events) @@ -264,7 +264,7 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def test_create_appservice_txn_older_last_txn(self): service = Mock(id=self.as_list[0]["id"]) - events = [{"type": "nothing"}, {"type": "here"}] + events = [Mock(event_id="e1"), Mock(event_id="e2")] yield self._set_last_txn(service.id, 9643) # AS is falling behind yield self._insert_txn(service.id, 9644, events) yield self._insert_txn(service.id, 9645, events) @@ -276,7 +276,7 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def test_create_appservice_txn_up_to_date_last_txn(self): service = Mock(id=self.as_list[0]["id"]) - events = [{"type": "nothing"}, {"type": "here"}] + events = [Mock(event_id="e1"), Mock(event_id="e2")] yield self._set_last_txn(service.id, 9643) txn = yield self.store.create_appservice_txn(service, events) self.assertEquals(txn.id, 9644) @@ -286,7 +286,7 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def test_create_appservice_txn_up_fuzzing(self): service = Mock(id=self.as_list[0]["id"]) - events = [{"type": "nothing"}, {"type": "here"}] + events = [Mock(event_id="e1"), Mock(event_id="e2")] yield self._set_last_txn(service.id, 9643) # dump in rows with higher IDs to make sure the queries aren't wrong. @@ -307,7 +307,7 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def test_complete_appservice_txn_first_txn(self): service = Mock(id=self.as_list[0]["id"]) - events = [{"foo": "bar"}] + events = [Mock(event_id="e1"), Mock(event_id="e2")] txn_id = 1 yield self._insert_txn(service.id, txn_id, events) @@ -329,7 +329,7 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def test_complete_appservice_txn_existing_in_state_table(self): service = Mock(id=self.as_list[0]["id"]) - events = [{"foo": "bar"}] + events = [Mock(event_id="e1"), Mock(event_id="e2")] txn_id = 5 yield self._set_last_txn(service.id, 4) yield self._insert_txn(service.id, txn_id, events) @@ -360,12 +360,16 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def test_get_oldest_unsent_txn(self): service = Mock(id=self.as_list[0]["id"]) - events = [{"type": "nothing"}, {"type": "here"}] + events = [Mock(event_id="e1"), Mock(event_id="e2")] + other_events = [Mock(event_id="e5"), Mock(event_id="e6")] - yield self._insert_txn(self.as_list[1]["id"], 9, {"badger": "mushroom"}) + # we aren't testing store._base stuff here, so mock this out + self.store._get_events_txn = Mock(return_value=events) + + yield self._insert_txn(self.as_list[1]["id"], 9, other_events) yield self._insert_txn(service.id, 10, events) - yield self._insert_txn(service.id, 11, [{"foo":"bar"}]) - yield self._insert_txn(service.id, 12, [{"argh":"bargh"}]) + yield self._insert_txn(service.id, 11, other_events) + yield self._insert_txn(service.id, 12, other_events) txn = yield self.store.get_oldest_unsent_txn(service) self.assertEquals(service, txn.service) -- cgit 1.4.1 From 0f86312c4cb262ad1b69207dd46712707dee75bb Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Fri, 20 Mar 2015 18:13:49 +0000 Subject: Pull out the cache logic from the @cached wrapper into its own class we can reuse --- synapse/storage/_base.py | 89 +++++++++++++++++++++++++++------------------ tests/storage/test__base.py | 34 ++++++++++++++++- 2 files changed, 87 insertions(+), 36 deletions(-) (limited to 'tests/storage') diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 2552a74f85..27ea65a0f6 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -53,6 +53,47 @@ cache_counter = metrics.register_cache( ) +class Cache(object): + + def __init__(self, name, max_entries=1000, keylen=1): + self.cache = OrderedDict() + + self.max_entries = max_entries + self.name = name + self.keylen = keylen + + caches_by_name[name] = self.cache + + def get(self, *keyargs): + if len(keyargs) != self.keylen: + raise ValueError("Expected a key to have %d items", self.keylen) + + if keyargs in self.cache: + cache_counter.inc_hits(self.name) + return self.cache[keyargs] + + cache_counter.inc_misses(self.name) + raise KeyError() + + def prefill(self, *args): # because I can't *keyargs, value + keyargs = args[:-1] + value = args[-1] + + if len(keyargs) != self.keylen: + raise ValueError("Expected a key to have %d items", self.keylen) + + while len(self.cache) > self.max_entries: + self.cache.popitem(last=False) + + self.cache[keyargs] = value + + def invalidate(self, *keyargs): + if len(keyargs) != self.keylen: + raise ValueError("Expected a key to have %d items", self.keylen) + + self.cache.pop(keyargs, None) + + # TODO(paul): # * consider other eviction strategies - LRU? def cached(max_entries=1000, num_args=1): @@ -70,48 +111,26 @@ def cached(max_entries=1000, num_args=1): calling the calculation function. """ def wrap(orig): - cache = OrderedDict() - name = orig.__name__ - - caches_by_name[name] = cache - - def prefill(*args): # because I can't *keyargs, value - keyargs = args[:-1] - value = args[-1] - - if len(keyargs) != num_args: - raise ValueError("Expected a call to have %d arguments", num_args) - - while len(cache) > max_entries: - cache.popitem(last=False) - - cache[keyargs] = value + cache = Cache( + name=orig.__name__, + max_entries=max_entries, + keylen=num_args, + ) @functools.wraps(orig) @defer.inlineCallbacks def wrapped(self, *keyargs): - if len(keyargs) != num_args: - raise ValueError("Expected a call to have %d arguments", num_args) - - if keyargs in cache: - cache_counter.inc_hits(name) - defer.returnValue(cache[keyargs]) - - cache_counter.inc_misses(name) - ret = yield orig(self, *keyargs) - - prefill(*keyargs + (ret,)) - - defer.returnValue(ret) + try: + defer.returnValue(cache.get(*keyargs)) + except KeyError: + ret = yield orig(self, *keyargs) - def invalidate(*keyargs): - if len(keyargs) != num_args: - raise ValueError("Expected a call to have %d arguments", num_args) + cache.prefill(*keyargs + (ret,)) - cache.pop(keyargs, None) + defer.returnValue(ret) - wrapped.invalidate = invalidate - wrapped.prefill = prefill + wrapped.invalidate = cache.invalidate + wrapped.prefill = cache.prefill return wrapped return wrap diff --git a/tests/storage/test__base.py b/tests/storage/test__base.py index 55d22f665a..783abc2b00 100644 --- a/tests/storage/test__base.py +++ b/tests/storage/test__base.py @@ -17,7 +17,39 @@ from tests import unittest from twisted.internet import defer -from synapse.storage._base import cached +from synapse.storage._base import Cache, cached + + +class CacheTestCase(unittest.TestCase): + + def setUp(self): + self.cache = Cache("test") + + def test_empty(self): + failed = False + try: + self.cache.get("foo") + except KeyError: + failed = True + + self.assertTrue(failed) + + def test_hit(self): + self.cache.prefill("foo", 123) + + self.assertEquals(self.cache.get("foo"), 123) + + def test_invalidate(self): + self.cache.prefill("foo", 123) + self.cache.invalidate("foo") + + failed = False + try: + self.cache.get("foo") + except KeyError: + failed = True + + self.assertTrue(failed) class CacheDecoratorTestCase(unittest.TestCase): -- cgit 1.4.1 From 7ab9f91a605d501cadee1c212eca2ef0467adc50 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 25 Mar 2015 18:50:43 +0000 Subject: Unit-test that Cache() key eviction is ordered --- tests/storage/test__base.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) (limited to 'tests/storage') diff --git a/tests/storage/test__base.py b/tests/storage/test__base.py index 783abc2b00..b6853ba2d4 100644 --- a/tests/storage/test__base.py +++ b/tests/storage/test__base.py @@ -51,6 +51,24 @@ class CacheTestCase(unittest.TestCase): self.assertTrue(failed) + def test_eviction(self): + cache = Cache("test", max_entries=2) + + cache.prefill(1, "one") + cache.prefill(2, "two") + cache.prefill(3, "three") # 1 will be evicted + + failed = False + try: + cache.get(1) + except KeyError: + failed = True + + self.assertTrue(failed) + + cache.get(2) + cache.get(3) + class CacheDecoratorTestCase(unittest.TestCase): -- cgit 1.4.1 From 9ba6487b3fe985c4ec84b02d9804aea7e2df6c40 Mon Sep 17 00:00:00 2001 From: "Paul \"LeoNerd\" Evans" Date: Wed, 25 Mar 2015 19:05:34 +0000 Subject: Allow a choice of LRU behaviour for Cache() by using LruCache() or OrderedDict() --- synapse/storage/_base.py | 20 ++++++++++++-------- tests/storage/test__base.py | 22 ++++++++++++++++++++++ 2 files changed, 34 insertions(+), 8 deletions(-) (limited to 'tests/storage') diff --git a/synapse/storage/_base.py b/synapse/storage/_base.py index 27ea65a0f6..6fa63f052e 100644 --- a/synapse/storage/_base.py +++ b/synapse/storage/_base.py @@ -55,10 +55,14 @@ cache_counter = metrics.register_cache( class Cache(object): - def __init__(self, name, max_entries=1000, keylen=1): - self.cache = OrderedDict() + def __init__(self, name, max_entries=1000, keylen=1, lru=False): + if lru: + self.cache = LruCache(max_size=max_entries) + self.max_entries = None + else: + self.cache = OrderedDict() + self.max_entries = max_entries - self.max_entries = max_entries self.name = name self.keylen = keylen @@ -82,8 +86,9 @@ class Cache(object): if len(keyargs) != self.keylen: raise ValueError("Expected a key to have %d items", self.keylen) - while len(self.cache) > self.max_entries: - self.cache.popitem(last=False) + if self.max_entries is not None: + while len(self.cache) >= self.max_entries: + self.cache.popitem(last=False) self.cache[keyargs] = value @@ -94,9 +99,7 @@ class Cache(object): self.cache.pop(keyargs, None) -# TODO(paul): -# * consider other eviction strategies - LRU? -def cached(max_entries=1000, num_args=1): +def cached(max_entries=1000, num_args=1, lru=False): """ A method decorator that applies a memoizing cache around the function. The function is presumed to take zero or more arguments, which are used in @@ -115,6 +118,7 @@ def cached(max_entries=1000, num_args=1): name=orig.__name__, max_entries=max_entries, keylen=num_args, + lru=lru, ) @functools.wraps(orig) diff --git a/tests/storage/test__base.py b/tests/storage/test__base.py index b6853ba2d4..96caf8c4c1 100644 --- a/tests/storage/test__base.py +++ b/tests/storage/test__base.py @@ -69,6 +69,28 @@ class CacheTestCase(unittest.TestCase): cache.get(2) cache.get(3) + def test_eviction_lru(self): + cache = Cache("test", max_entries=2, lru=True) + + cache.prefill(1, "one") + cache.prefill(2, "two") + + # Now access 1 again, thus causing 2 to be least-recently used + cache.get(1) + + cache.prefill(3, "three") + + failed = False + try: + cache.get(2) + except KeyError: + failed = True + + self.assertTrue(failed) + + cache.get(1) + cache.get(3) + class CacheDecoratorTestCase(unittest.TestCase): -- cgit 1.4.1 From d33ae65efc14a18a8a690d39d6e9c81aaafa1062 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Tue, 31 Mar 2015 11:00:00 +0100 Subject: Remove more reg/unreg methods. Read config not database for cache. --- synapse/handlers/appservice.py | 37 ------- synapse/storage/appservice.py | 219 +++++++++++---------------------------- tests/storage/test_appservice.py | 39 ------- 3 files changed, 59 insertions(+), 236 deletions(-) (limited to 'tests/storage') diff --git a/synapse/handlers/appservice.py b/synapse/handlers/appservice.py index 58b5b60bb7..59cf15b037 100644 --- a/synapse/handlers/appservice.py +++ b/synapse/handlers/appservice.py @@ -16,10 +16,8 @@ from twisted.internet import defer from synapse.api.constants import EventTypes, Membership -from synapse.api.errors import Codes, StoreError, SynapseError from synapse.appservice import ApplicationService from synapse.types import UserID -import synapse.util.stringutils as stringutils import logging @@ -49,38 +47,6 @@ class ApplicationServicesHandler(object): self.scheduler = appservice_scheduler self.started_scheduler = False - @defer.inlineCallbacks - def register(self, app_service): - logger.info("Register -> %s", app_service) - # check the token is recognised - try: - stored_service = yield self.store.get_app_service_by_token( - app_service.token - ) - if not stored_service: - raise StoreError(404, "Application service not found") - app_service.id = stored_service.id - except StoreError: - raise SynapseError( - 403, "Unrecognised application services token. " - "Consult the home server admin.", - errcode=Codes.FORBIDDEN - ) - app_service.hs_token = self._generate_hs_token() - - # create a sender for this application service which is used when - # creating rooms, etc.. - account = yield self.hs.get_handlers().registration_handler.register() - app_service.sender = account[0] - - yield self.store.update_app_service(app_service) - defer.returnValue(app_service) - - @defer.inlineCallbacks - def unregister(self, token): - logger.info("Unregister as_token=%s", token) - yield self.store.unregister_app_service(token) - @defer.inlineCallbacks def notify_interested_services(self, event): """Notifies (pushes) all application services interested in this event. @@ -223,6 +189,3 @@ class ApplicationServicesHandler(object): exists = yield self.query_user_exists(user_id) defer.returnValue(exists) defer.returnValue(True) - - def _generate_hs_token(self): - return stringutils.random_string(24) diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index 93304a745f..fe9372a7c6 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -13,12 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging +import yaml from simplejson import JSONDecodeError import simplejson as json from twisted.internet import defer from synapse.api.constants import Membership -from synapse.api.errors import StoreError from synapse.appservice import ApplicationService, AppServiceTransaction from synapse.storage.roommember import RoomsForUser from ._base import SQLBaseStore @@ -27,141 +27,18 @@ from ._base import SQLBaseStore logger = logging.getLogger(__name__) -def log_failure(failure): - logger.error("Failed to detect application services: %s", failure.value) - logger.error(failure.getTraceback()) - - class ApplicationServiceStore(SQLBaseStore): def __init__(self, hs): super(ApplicationServiceStore, self).__init__(hs) self.services_cache = [] - self.cache_defer = self._populate_appservice_cache() - self.cache_defer.addErrback(log_failure) - - @defer.inlineCallbacks - def unregister_app_service(self, token): - """Unregisters this service. - - This removes all AS specific regex and the base URL. The token is the - only thing preserved for future registration attempts. - """ - yield self.cache_defer # make sure the cache is ready - yield self.runInteraction( - "unregister_app_service", - self._unregister_app_service_txn, - token, - ) - # update cache TODO: Should this be in the txn? - for service in self.services_cache: - if service.token == token: - service.url = None - service.namespaces = None - service.hs_token = None - - def _unregister_app_service_txn(self, txn, token): - # kill the url to prevent pushes - txn.execute( - "UPDATE application_services SET url=NULL WHERE token=?", - (token,) - ) - - # cleanup regex - as_id = self._get_as_id_txn(txn, token) - if not as_id: - logger.warning( - "unregister_app_service_txn: Failed to find as_id for token=", - token - ) - return False - - txn.execute( - "DELETE FROM application_services_regex WHERE as_id=?", - (as_id,) - ) - return True - - @defer.inlineCallbacks - def update_app_service(self, service): - """Update an application service, clobbering what was previously there. - - Args: - service(ApplicationService): The updated service. - """ - yield self.cache_defer # make sure the cache is ready - - # NB: There is no "insert" since we provide no public-facing API to - # allocate new ASes. It relies on the server admin inserting the AS - # token into the database manually. - - if not service.token or not service.url: - raise StoreError(400, "Token and url must be specified.") - - if not service.hs_token: - raise StoreError(500, "No HS token") - - as_id = yield self.runInteraction( - "update_app_service", - self._update_app_service_txn, - service + self._populate_appservice_cache( + hs.config.app_service_config_files ) - service.id = as_id - # update cache TODO: Should this be in the txn? - for (index, cache_service) in enumerate(self.services_cache): - if service.token == cache_service.token: - self.services_cache[index] = service - logger.info("Updated: %s", service) - return - # new entry - self.services_cache.append(service) - logger.info("Updated(new): %s", service) - - def _update_app_service_txn(self, txn, service): - as_id = self._get_as_id_txn(txn, service.token) - if not as_id: - logger.warning( - "update_app_service_txn: Failed to find as_id for token=", - service.token - ) - return - - txn.execute( - "UPDATE application_services SET url=?, hs_token=?, sender=? " - "WHERE id=?", - (service.url, service.hs_token, service.sender, as_id,) - ) - # cleanup regex - txn.execute( - "DELETE FROM application_services_regex WHERE as_id=?", - (as_id,) - ) - for (ns_int, ns_str) in enumerate(ApplicationService.NS_LIST): - if ns_str in service.namespaces: - for regex_obj in service.namespaces[ns_str]: - txn.execute( - "INSERT INTO application_services_regex(" - "as_id, namespace, regex) values(?,?,?)", - (as_id, ns_int, json.dumps(regex_obj)) - ) - return as_id - - def _get_as_id_txn(self, txn, token): - cursor = txn.execute( - "SELECT id FROM application_services WHERE token=?", - (token,) - ) - res = cursor.fetchone() - if res: - return res[0] - - @defer.inlineCallbacks def get_app_services(self): - yield self.cache_defer # make sure the cache is ready - defer.returnValue(self.services_cache) + defer.succeed(self.services_cache) - @defer.inlineCallbacks def get_app_service_by_user_id(self, user_id): """Retrieve an application service from their user ID. @@ -175,37 +52,24 @@ class ApplicationServiceStore(SQLBaseStore): Returns: synapse.appservice.ApplicationService or None. """ - - yield self.cache_defer # make sure the cache is ready - for service in self.services_cache: if service.sender == user_id: - defer.returnValue(service) + defer.succeed(service) return - defer.returnValue(None) + defer.succeed(None) - @defer.inlineCallbacks - def get_app_service_by_token(self, token, from_cache=True): + def get_app_service_by_token(self, token): """Get the application service with the given appservice token. Args: token (str): The application service token. - from_cache (bool): True to get this service from the cache, False to - check the database. - Raises: - StoreError if there was a problem retrieving this service. + Returns: + synapse.appservice.ApplicationService or None. """ - yield self.cache_defer # make sure the cache is ready - - if from_cache: - for service in self.services_cache: - if service.token == token: - defer.returnValue(service) - return - defer.returnValue(None) - - # TODO: The from_cache=False impl - # TODO: This should be JOINed with the application_services_regex table. + for service in self.services_cache: + if service.token == token: + return defer.succeed(service) + defer.succeed(None) def get_app_service_rooms(self, service): """Get a list of RoomsForUser for this application service. @@ -336,18 +200,53 @@ class ApplicationServiceStore(SQLBaseStore): )) return service_list - @defer.inlineCallbacks - def _populate_appservice_cache(self): - """Populates the ApplicationServiceCache from the database.""" - sql = ("SELECT r.*, a.* FROM application_services AS a LEFT JOIN " - "application_services_regex AS r ON a.id = r.as_id") - - results = yield self._execute_and_decode("appservice_cache", sql) - services = self._parse_services_dict(results) + def _load_appservice(self, as_info): + required_string_fields = ["url", "as_token", "hs_token", "sender"] + for field in required_string_fields: + if not isinstance(as_info.get(field), basestring): + raise KeyError("Required string field: '%s'", field) + + # namespace checks + if not isinstance(as_info.get("namespaces"), dict): + raise KeyError("Requires 'namespaces' object.") + for ns in ApplicationService.NS_LIST: + # specific namespaces are optional + if ns in as_info["namespaces"]: + # expect a list of dicts with exclusive and regex keys + for regex_obj in as_info["namespaces"][ns]: + if not isinstance(regex_obj, dict): + raise ValueError( + "Expected namespace entry in %s to be an object," + " but got %s", ns, regex_obj + ) + if not isinstance(regex_obj.get("regex"), basestring): + raise ValueError( + "Missing/bad type 'regex' key in %s", regex_obj + ) + if not isinstance(regex_obj.get("exclusive"), bool): + raise ValueError( + "Missing/bad type 'exclusive' key in %s", regex_obj + ) + return ApplicationService( + token=as_info["as_token"], + url=as_info["url"], + namespaces=as_info["namespaces"], + hs_token=as_info["hs_token"], + sender=as_info["sender"] + ) - for service in services: - logger.info("Found application service: %s", service) - self.services_cache.append(service) + def _populate_appservice_cache(self, config_files): + """Populates a cache of Application Services from the config files.""" + for config_file in config_files: + try: + with open(config_file, 'r') as f: + as_info = yaml.load(f) + appservice = self._load_appservice(as_info) + logger.info("Loaded application service: %s", appservice) + self.services_cache.append(appservice) + except Exception as e: + logger.error("Failed to load appservice from '%s'", config_file) + logger.exception(e) class ApplicationServiceTransactionStore(SQLBaseStore): diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index e79599f7fb..82bfea15a6 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -49,45 +49,6 @@ class ApplicationServiceStoreTestCase(unittest.TestCase): # must be done after inserts self.store = ApplicationServiceStore(hs) - @defer.inlineCallbacks - def test_update_and_retrieval_of_service(self): - url = "https://matrix.org/appservices/foobar" - hs_token = "hstok" - user_regex = [ - {"regex": "@foobar_.*:matrix.org", "exclusive": True} - ] - alias_regex = [ - {"regex": "#foobar_.*:matrix.org", "exclusive": False} - ] - room_regex = [ - - ] - service = ApplicationService( - url=url, hs_token=hs_token, token=self.as_token, namespaces={ - ApplicationService.NS_USERS: user_regex, - ApplicationService.NS_ALIASES: alias_regex, - ApplicationService.NS_ROOMS: room_regex - }) - yield self.store.update_app_service(service) - - stored_service = yield self.store.get_app_service_by_token( - self.as_token - ) - self.assertEquals(stored_service.token, self.as_token) - self.assertEquals(stored_service.url, url) - self.assertEquals( - stored_service.namespaces[ApplicationService.NS_ALIASES], - alias_regex - ) - self.assertEquals( - stored_service.namespaces[ApplicationService.NS_ROOMS], - room_regex - ) - self.assertEquals( - stored_service.namespaces[ApplicationService.NS_USERS], - user_regex - ) - @defer.inlineCallbacks def test_retrieve_unknown_service_token(self): service = yield self.store.get_app_service_by_token("invalid_token") -- cgit 1.4.1 From b59aa745560608c8503421bd9542c99fc1c571b5 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Tue, 31 Mar 2015 11:35:45 +0100 Subject: Fix tests and missing returns on deferreds. --- synapse/appservice/__init__.py | 2 +- synapse/storage/appservice.py | 18 +++++++++++------- tests/storage/test_appservice.py | 41 ++++++++++++++++++++++++++++------------ 3 files changed, 41 insertions(+), 20 deletions(-) (limited to 'tests/storage') diff --git a/synapse/appservice/__init__.py b/synapse/appservice/__init__.py index c60db16b74..a8108c1efb 100644 --- a/synapse/appservice/__init__.py +++ b/synapse/appservice/__init__.py @@ -95,7 +95,7 @@ class ApplicationService(object): # rooms: [ {regex: "[A-z]+.*", exclusive: true}, ...], # } if not namespaces: - return None + namespaces = {} for ns in ApplicationService.NS_LIST: if ns not in namespaces: diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index fe9372a7c6..a520a859d3 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -37,7 +37,7 @@ class ApplicationServiceStore(SQLBaseStore): ) def get_app_services(self): - defer.succeed(self.services_cache) + return defer.succeed(self.services_cache) def get_app_service_by_user_id(self, user_id): """Retrieve an application service from their user ID. @@ -54,9 +54,8 @@ class ApplicationServiceStore(SQLBaseStore): """ for service in self.services_cache: if service.sender == user_id: - defer.succeed(service) - return - defer.succeed(None) + return defer.succeed(service) + return defer.succeed(None) def get_app_service_by_token(self, token): """Get the application service with the given appservice token. @@ -69,7 +68,7 @@ class ApplicationServiceStore(SQLBaseStore): for service in self.services_cache: if service.token == token: return defer.succeed(service) - defer.succeed(None) + return defer.succeed(None) def get_app_service_rooms(self, service): """Get a list of RoomsForUser for this application service. @@ -237,11 +236,16 @@ class ApplicationServiceStore(SQLBaseStore): def _populate_appservice_cache(self, config_files): """Populates a cache of Application Services from the config files.""" + if not isinstance(config_files, list): + logger.warning( + "Expected %s to be a list of AS config files.", config_files + ) + return + for config_file in config_files: try: with open(config_file, 'r') as f: - as_info = yaml.load(f) - appservice = self._load_appservice(as_info) + appservice = self._load_appservice(yaml.load(f)) logger.info("Loaded application service: %s", appservice) self.services_cache.append(appservice) except Exception as e: diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index 82bfea15a6..b856438fd2 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -22,6 +22,8 @@ from synapse.storage.appservice import ( ) import json +import os +import yaml from mock import Mock from tests.utils import SQLiteMemoryDbPool, MockClock @@ -30,25 +32,40 @@ class ApplicationServiceStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): + self.as_yaml_files = [] db_pool = SQLiteMemoryDbPool() yield db_pool.prepare() hs = HomeServer( - "test", db_pool=db_pool, clock=MockClock(), config=Mock() + "test", db_pool=db_pool, clock=MockClock(), + config=Mock( + app_service_config_files=self.as_yaml_files + ) ) + self.as_token = "token1" - db_pool.runQuery( - "INSERT INTO application_services(token) VALUES(?)", - (self.as_token,) - ) - db_pool.runQuery( - "INSERT INTO application_services(token) VALUES(?)", ("token2",) - ) - db_pool.runQuery( - "INSERT INTO application_services(token) VALUES(?)", ("token3",) - ) + self.as_url = "some_url" + self._add_appservice(self.as_token, self.as_url, "some_hs_token", "bob") + self._add_appservice("token2", "some_url", "some_hs_token", "bob") + self._add_appservice("token3", "some_url", "some_hs_token", "bob") # must be done after inserts self.store = ApplicationServiceStore(hs) + def tearDown(self): + # TODO: suboptimal that we need to create files for tests! + for f in self.as_yaml_files: + try: + os.remove(f) + except: + pass + + def _add_appservice(self, as_token, url, hs_token, sender): + as_yaml = dict(url=url, as_token=as_token, hs_token=hs_token, + sender=sender, namespaces={}) + # use the token as the filename + with open(as_token, 'w') as outfile: + outfile.write(yaml.dump(as_yaml)) + self.as_yaml_files.append(as_token) + @defer.inlineCallbacks def test_retrieve_unknown_service_token(self): service = yield self.store.get_app_service_by_token("invalid_token") @@ -60,7 +77,7 @@ class ApplicationServiceStoreTestCase(unittest.TestCase): self.as_token ) self.assertEquals(stored_service.token, self.as_token) - self.assertEquals(stored_service.url, None) + self.assertEquals(stored_service.url, self.as_url) self.assertEquals( stored_service.namespaces[ApplicationService.NS_ALIASES], [] -- cgit 1.4.1 From c217504949a90712f41a0422215f923b4d114a17 Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Tue, 31 Mar 2015 12:07:56 +0100 Subject: Edit SQL schema to use string IDs not ints. Use token as ID. Update tests. --- synapse/storage/appservice.py | 23 ++++++++----- .../storage/schema/delta/15/appservice_txns.sql | 7 ++-- tests/storage/test_appservice.py | 38 +++++++++++++--------- 3 files changed, 40 insertions(+), 28 deletions(-) (limited to 'tests/storage') diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index a520a859d3..a8780eca1e 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -231,7 +231,8 @@ class ApplicationServiceStore(SQLBaseStore): url=as_info["url"], namespaces=as_info["namespaces"], hs_token=as_info["hs_token"], - sender=as_info["sender"] + sender=as_info["sender"], + id=as_info["as_token"] # the token is the only unique thing here ) def _populate_appservice_cache(self, config_files): @@ -268,16 +269,20 @@ class ApplicationServiceTransactionStore(SQLBaseStore): A Deferred which resolves to a list of ApplicationServices, which may be empty. """ - sql = ( - "SELECT r.*, a.* FROM application_services_state AS s LEFT JOIN" - " application_services AS a ON a.id=s.as_id LEFT JOIN" - " application_services_regex AS r ON r.as_id=a.id WHERE state = ?" - ) - results = yield self._execute_and_decode( - "get_appservices_by_state", sql, state + results = yield self._simple_select_list( + "application_services_state", + dict(state=state), + ["as_id"] ) # NB: This assumes this class is linked with ApplicationServiceStore - defer.returnValue(self._parse_services_dict(results)) + as_list = yield self.get_app_services() + services = [] + + for res in results: + for service in as_list: + if service.id == res["as_id"]: + services.append(service) + defer.returnValue(services) @defer.inlineCallbacks def get_appservice_state(self, service): diff --git a/synapse/storage/schema/delta/15/appservice_txns.sql b/synapse/storage/schema/delta/15/appservice_txns.sql index 13bbb2de2e..2b27e2a429 100644 --- a/synapse/storage/schema/delta/15/appservice_txns.sql +++ b/synapse/storage/schema/delta/15/appservice_txns.sql @@ -14,14 +14,13 @@ */ CREATE TABLE IF NOT EXISTS application_services_state( - as_id INTEGER PRIMARY KEY, + as_id TEXT PRIMARY KEY, state TEXT, - last_txn TEXT, - FOREIGN KEY(as_id) REFERENCES application_services(id) + last_txn TEXT ); CREATE TABLE IF NOT EXISTS application_services_txns( - as_id INTEGER NOT NULL, + as_id TEXT NOT NULL, txn_id INTEGER NOT NULL, event_ids TEXT NOT NULL, UNIQUE(as_id, txn_id) ON CONFLICT ROLLBACK diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index b856438fd2..58551e40b9 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -101,42 +101,48 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): @defer.inlineCallbacks def setUp(self): + self.as_yaml_files = [] self.db_pool = SQLiteMemoryDbPool() yield self.db_pool.prepare() - hs = HomeServer( - "test", db_pool=self.db_pool, clock=MockClock(), config=Mock() - ) self.as_list = [ { "token": "token1", "url": "https://matrix-as.org", - "id": 3 + "id": "token1" }, { "token": "alpha_tok", "url": "https://alpha.com", - "id": 5 + "id": "alpha_tok" }, { "token": "beta_tok", "url": "https://beta.com", - "id": 6 + "id": "beta_tok" }, { "token": "delta_tok", "url": "https://delta.com", - "id": 7 + "id": "delta_tok" }, ] for s in self.as_list: - yield self._add_service(s["id"], s["url"], s["token"]) - self.store = TestTransactionStore(hs) + yield self._add_service(s["url"], s["token"]) - def _add_service(self, as_id, url, token): - return self.db_pool.runQuery( - "INSERT INTO application_services(id, url, token) VALUES(?,?,?)", - (as_id, url, token) + hs = HomeServer( + "test", db_pool=self.db_pool, clock=MockClock(), config=Mock( + app_service_config_files=self.as_yaml_files + ) ) + self.store = TestTransactionStore(hs) + + def _add_service(self, url, as_token): + as_yaml = dict(url=url, as_token=as_token, hs_token="something", + sender="a_sender", namespaces={}) + # use the token as the filename + with open(as_token, 'w') as outfile: + outfile.write(yaml.dump(as_yaml)) + self.as_yaml_files.append(as_token) def _set_state(self, id, state, txn=None): return self.db_pool.runQuery( @@ -388,8 +394,10 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): ApplicationServiceState.DOWN ) self.assertEquals(2, len(services)) - self.assertEquals(self.as_list[2]["id"], services[0].id) - self.assertEquals(self.as_list[0]["id"], services[1].id) + self.assertEquals( + set([self.as_list[2]["id"], self.as_list[0]["id"]]), + set([services[0].id, services[1].id]) + ) # required for ApplicationServiceTransactionStoreTestCase tests -- cgit 1.4.1 From cf1fa59f4b72dbf5c9d735eaf051f1456721d91f Mon Sep 17 00:00:00 2001 From: Kegan Dougal Date: Tue, 31 Mar 2015 13:48:03 +0100 Subject: Use a sender localpart instead of a user ID. Form the user ID at runtime instead, This gives less room for error in AS config files since they cannot specify the domain of another HS. --- synapse/storage/appservice.py | 11 +++++++++-- tests/storage/test_appservice.py | 4 ++-- 2 files changed, 11 insertions(+), 4 deletions(-) (limited to 'tests/storage') diff --git a/synapse/storage/appservice.py b/synapse/storage/appservice.py index a8780eca1e..557e377ca5 100644 --- a/synapse/storage/appservice.py +++ b/synapse/storage/appservice.py @@ -21,6 +21,7 @@ from twisted.internet import defer from synapse.api.constants import Membership from synapse.appservice import ApplicationService, AppServiceTransaction from synapse.storage.roommember import RoomsForUser +from synapse.types import UserID from ._base import SQLBaseStore @@ -31,6 +32,7 @@ class ApplicationServiceStore(SQLBaseStore): def __init__(self, hs): super(ApplicationServiceStore, self).__init__(hs) + self.hostname = hs.hostname self.services_cache = [] self._populate_appservice_cache( hs.config.app_service_config_files @@ -200,11 +202,16 @@ class ApplicationServiceStore(SQLBaseStore): return service_list def _load_appservice(self, as_info): - required_string_fields = ["url", "as_token", "hs_token", "sender"] + required_string_fields = [ + "url", "as_token", "hs_token", "sender_localpart" + ] for field in required_string_fields: if not isinstance(as_info.get(field), basestring): raise KeyError("Required string field: '%s'", field) + user = UserID(as_info["sender_localpart"], self.hostname) + user_id = user.to_string() + # namespace checks if not isinstance(as_info.get("namespaces"), dict): raise KeyError("Requires 'namespaces' object.") @@ -231,7 +238,7 @@ class ApplicationServiceStore(SQLBaseStore): url=as_info["url"], namespaces=as_info["namespaces"], hs_token=as_info["hs_token"], - sender=as_info["sender"], + sender=user_id, id=as_info["as_token"] # the token is the only unique thing here ) diff --git a/tests/storage/test_appservice.py b/tests/storage/test_appservice.py index 58551e40b9..675959c56c 100644 --- a/tests/storage/test_appservice.py +++ b/tests/storage/test_appservice.py @@ -60,7 +60,7 @@ class ApplicationServiceStoreTestCase(unittest.TestCase): def _add_appservice(self, as_token, url, hs_token, sender): as_yaml = dict(url=url, as_token=as_token, hs_token=hs_token, - sender=sender, namespaces={}) + sender_localpart=sender, namespaces={}) # use the token as the filename with open(as_token, 'w') as outfile: outfile.write(yaml.dump(as_yaml)) @@ -138,7 +138,7 @@ class ApplicationServiceTransactionStoreTestCase(unittest.TestCase): def _add_service(self, url, as_token): as_yaml = dict(url=url, as_token=as_token, hs_token="something", - sender="a_sender", namespaces={}) + sender_localpart="a_sender", namespaces={}) # use the token as the filename with open(as_token, 'w') as outfile: outfile.write(yaml.dump(as_yaml)) -- cgit 1.4.1