From 44a498418c62a835aae9bff8550f844888b3ab84 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 11 Jan 2018 22:40:51 +0000 Subject: Optimise LoggingContext creation and copying It turns out that the only thing we use the __dict__ of LoggingContext for is `request`, and given we create lots of LoggingContexts and then copy them every time we do a db transaction or log line, using the __dict__ seems a bit redundant. Let's try to optimise things by making the request attribute explicit. --- tests/crypto/test_keyring.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) (limited to 'tests/crypto/test_keyring.py') diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index 570312da84..c899fecf5d 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -68,7 +68,7 @@ class KeyringTestCase(unittest.TestCase): def check_context(self, _, expected): self.assertEquals( - getattr(LoggingContext.current_context(), "test_key", None), + getattr(LoggingContext.current_context(), "request", None), expected ) @@ -82,7 +82,7 @@ class KeyringTestCase(unittest.TestCase): lookup_2_deferred = defer.Deferred() with LoggingContext("one") as context_one: - context_one.test_key = "one" + context_one.request = "one" wait_1_deferred = kr.wait_for_previous_lookups( ["server1"], @@ -96,7 +96,7 @@ class KeyringTestCase(unittest.TestCase): wait_1_deferred.addBoth(self.check_context, "one") with LoggingContext("two") as context_two: - context_two.test_key = "two" + context_two.request = "two" # set off another wait. It should block because the first lookup # hasn't yet completed. @@ -137,7 +137,7 @@ class KeyringTestCase(unittest.TestCase): @defer.inlineCallbacks def get_perspectives(**kwargs): self.assertEquals( - LoggingContext.current_context().test_key, "11", + LoggingContext.current_context().request, "11", ) with logcontext.PreserveLoggingContext(): yield persp_deferred @@ -145,7 +145,7 @@ class KeyringTestCase(unittest.TestCase): self.http_client.post_json.side_effect = get_perspectives with LoggingContext("11") as context_11: - context_11.test_key = "11" + context_11.request = "11" # start off a first set of lookups res_deferreds = kr.verify_json_objects_for_server( @@ -173,7 +173,7 @@ class KeyringTestCase(unittest.TestCase): self.assertIs(LoggingContext.current_context(), context_11) context_12 = LoggingContext("12") - context_12.test_key = "12" + context_12.request = "12" with logcontext.PreserveLoggingContext(context_12): # a second request for a server with outstanding requests # should block rather than start a second call @@ -211,7 +211,7 @@ class KeyringTestCase(unittest.TestCase): sentinel_context = LoggingContext.current_context() with LoggingContext("one") as context_one: - context_one.test_key = "one" + context_one.request = "one" defer = kr.verify_json_for_server("server9", {}) try: -- cgit 1.4.1 From d7eacc4f87ca8353f612171fa7e070c14e3b35ee Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 25 Jan 2018 23:14:24 +0000 Subject: Create dbpool as normal in tests ... instead of creating our own special SQLiteMemoryDbPool, whose purpose was a bit of a mystery. For some reason this makes one of the tests run slightly slower, so bump the sleep(). Sorry. --- tests/crypto/test_keyring.py | 4 +-- tests/utils.py | 60 +++++++++++++------------------------------- 2 files changed, 19 insertions(+), 45 deletions(-) (limited to 'tests/crypto/test_keyring.py') diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py index c899fecf5d..d4ec02ffc2 100644 --- a/tests/crypto/test_keyring.py +++ b/tests/crypto/test_keyring.py @@ -167,7 +167,7 @@ class KeyringTestCase(unittest.TestCase): # wait a tick for it to send the request to the perspectives server # (it first tries the datastore) - yield async.sleep(0.005) + yield async.sleep(1) # XXX find out why this takes so long! self.http_client.post_json.assert_called_once() self.assertIs(LoggingContext.current_context(), context_11) @@ -183,7 +183,7 @@ class KeyringTestCase(unittest.TestCase): res_deferreds_2 = kr.verify_json_objects_for_server( [("server10", json1)], ) - yield async.sleep(0.005) + yield async.sleep(01) self.http_client.post_json.assert_not_called() res_deferreds_2[0].addBoth(self.check_context, None) diff --git a/tests/utils.py b/tests/utils.py index ab5e2341c9..50de4199be 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -19,7 +19,6 @@ import urllib import urlparse from mock import Mock, patch -from twisted.enterprise.adbapi import ConnectionPool from twisted.internet import defer, reactor from synapse.api.errors import CodeMessageException, cs_error @@ -60,30 +59,37 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs): config.update_user_directory = False config.use_frozen_dicts = True - config.database_config = {"name": "sqlite3"} config.ldap_enabled = False if "clock" not in kargs: kargs["clock"] = MockClock() + config.database_config = { + "name": "sqlite3", + "args": { + "database": ":memory:", + "cp_min": 1, + "cp_max": 1, + }, + } db_engine = create_engine(config.database_config) + + # we need to configure the connection pool to run the on_new_connection + # function, so that we can test code that uses custom sqlite functions + # (like rank). + config.database_config["args"]["cp_openfun"] = db_engine.on_new_connection + if datastore is None: - # we need to configure the connection pool to run the on_new_connection - # function, so that we can test code that uses custom sqlite functions - # (like rank). - db_pool = SQLiteMemoryDbPool( - cp_openfun=db_engine.on_new_connection, - ) - yield db_pool.prepare() hs = HomeServer( - name, db_pool=db_pool, config=config, + name, config=config, + db_config=config.database_config, version_string="Synapse/tests", database_engine=db_engine, - get_db_conn=db_pool.get_db_conn, room_list_handler=object(), tls_server_context_factory=Mock(), **kargs ) + yield prepare_database(hs.get_db_conn(), db_engine, config) hs.setup() else: hs = HomeServer( @@ -308,38 +314,6 @@ class MockClock(object): return d -class SQLiteMemoryDbPool(ConnectionPool, object): - def __init__(self, **kwargs): - connkw = { - "cp_min": 1, - "cp_max": 1, - } - connkw.update(kwargs) - - super(SQLiteMemoryDbPool, self).__init__( - "sqlite3", ":memory:", **connkw - ) - - self.config = Mock() - self.config.password_providers = [] - self.config.database_config = {"name": "sqlite3"} - - def prepare(self): - engine = self.create_engine() - return self.runWithConnection( - lambda conn: prepare_database(conn, engine, self.config) - ) - - def get_db_conn(self): - conn = self.connect() - engine = self.create_engine() - prepare_database(conn, engine, self.config) - return conn - - def create_engine(self): - return create_engine(self.config.database_config) - - def _format_call(args, kwargs): return ", ".join( ["%r" % (a) for a in args] + -- cgit 1.4.1