summary refs log tree commit diff
path: root/tests/util
diff options
context:
space:
mode:
authorBrendan Abolivier <babolivier@matrix.org>2020-06-10 11:42:30 +0100
committerBrendan Abolivier <babolivier@matrix.org>2020-06-10 11:42:30 +0100
commitec0a7b9034806d6b2ba086bae58f5c6b0fd14672 (patch)
treef2af547b1342795e10548f8fb7a9cfc93e03df37 /tests/util
parentchangelog (diff)
parent1.15.0rc1 (diff)
downloadsynapse-ec0a7b9034806d6b2ba086bae58f5c6b0fd14672.tar.xz
Merge branch 'develop' into babolivier/mark_unread
Diffstat (limited to 'tests/util')
-rw-r--r--tests/util/caches/test_descriptors.py30
-rw-r--r--tests/util/test_async_utils.py15
-rw-r--r--tests/util/test_expiring_cache.py2
-rw-r--r--tests/util/test_itertools.py47
-rw-r--r--tests/util/test_linearizer.py38
-rw-r--r--tests/util/test_logcontext.py42
-rw-r--r--tests/util/test_lrucache.py6
-rw-r--r--tests/util/test_snapshot_cache.py63
-rw-r--r--tests/util/test_stream_change_cache.py92
-rw-r--r--tests/util/test_stringutils.py51
10 files changed, 263 insertions, 123 deletions
diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py
index 5713870f48..4d2b9e0d64 100644
--- a/tests/util/caches/test_descriptors.py
+++ b/tests/util/caches/test_descriptors.py
@@ -22,8 +22,10 @@ from twisted.internet import defer, reactor
 
 from synapse.api.errors import SynapseError
 from synapse.logging.context import (
+    SENTINEL_CONTEXT,
     LoggingContext,
     PreserveLoggingContext,
+    current_context,
     make_deferred_yieldable,
 )
 from synapse.util.caches import descriptors
@@ -194,7 +196,7 @@ class DescriptorTestCase(unittest.TestCase):
             with LoggingContext() as c1:
                 c1.name = "c1"
                 r = yield obj.fn(1)
-                self.assertEqual(LoggingContext.current_context(), c1)
+                self.assertEqual(current_context(), c1)
             return r
 
         def check_result(r):
@@ -204,12 +206,12 @@ class DescriptorTestCase(unittest.TestCase):
 
         # set off a deferred which will do a cache lookup
         d1 = do_lookup()
-        self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel)
+        self.assertEqual(current_context(), SENTINEL_CONTEXT)
         d1.addCallback(check_result)
 
         # and another
         d2 = do_lookup()
-        self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel)
+        self.assertEqual(current_context(), SENTINEL_CONTEXT)
         d2.addCallback(check_result)
 
         # let the lookup complete
@@ -239,14 +241,14 @@ class DescriptorTestCase(unittest.TestCase):
                 try:
                     d = obj.fn(1)
                     self.assertEqual(
-                        LoggingContext.current_context(), LoggingContext.sentinel
+                        current_context(), SENTINEL_CONTEXT,
                     )
                     yield d
                     self.fail("No exception thrown")
                 except SynapseError:
                     pass
 
-                self.assertEqual(LoggingContext.current_context(), c1)
+                self.assertEqual(current_context(), c1)
 
             # the cache should now be empty
             self.assertEqual(len(obj.fn.cache.cache), 0)
@@ -255,7 +257,7 @@ class DescriptorTestCase(unittest.TestCase):
 
         # set off a deferred which will do a cache lookup
         d1 = do_lookup()
-        self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel)
+        self.assertEqual(current_context(), SENTINEL_CONTEXT)
 
         return d1
 
@@ -310,14 +312,14 @@ class DescriptorTestCase(unittest.TestCase):
 
         obj.mock.return_value = ["spam", "eggs"]
         r = obj.fn(1, 2)
-        self.assertEqual(r, ["spam", "eggs"])
+        self.assertEqual(r.result, ["spam", "eggs"])
         obj.mock.assert_called_once_with(1, 2)
         obj.mock.reset_mock()
 
         # a call with different params should call the mock again
         obj.mock.return_value = ["chips"]
         r = obj.fn(1, 3)
-        self.assertEqual(r, ["chips"])
+        self.assertEqual(r.result, ["chips"])
         obj.mock.assert_called_once_with(1, 3)
         obj.mock.reset_mock()
 
@@ -325,9 +327,9 @@ class DescriptorTestCase(unittest.TestCase):
         self.assertEqual(len(obj.fn.cache.cache), 3)
 
         r = obj.fn(1, 2)
-        self.assertEqual(r, ["spam", "eggs"])
+        self.assertEqual(r.result, ["spam", "eggs"])
         r = obj.fn(1, 3)
-        self.assertEqual(r, ["chips"])
+        self.assertEqual(r.result, ["chips"])
         obj.mock.assert_not_called()
 
     def test_cache_iterable_with_sync_exception(self):
@@ -366,10 +368,10 @@ class CachedListDescriptorTestCase(unittest.TestCase):
 
             @descriptors.cachedList("fn", "args1", inlineCallbacks=True)
             def list_fn(self, args1, arg2):
-                assert LoggingContext.current_context().request == "c1"
+                assert current_context().request == "c1"
                 # we want this to behave like an asynchronous function
                 yield run_on_reactor()
-                assert LoggingContext.current_context().request == "c1"
+                assert current_context().request == "c1"
                 return self.mock(args1, arg2)
 
         with LoggingContext() as c1:
@@ -377,9 +379,9 @@ class CachedListDescriptorTestCase(unittest.TestCase):
             obj = Cls()
             obj.mock.return_value = {10: "fish", 20: "chips"}
             d1 = obj.list_fn([10, 20], 2)
-            self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel)
+            self.assertEqual(current_context(), SENTINEL_CONTEXT)
             r = yield d1
-            self.assertEqual(LoggingContext.current_context(), c1)
+            self.assertEqual(current_context(), c1)
             obj.mock.assert_called_once_with([10, 20], 2)
             self.assertEqual(r, {10: "fish", 20: "chips"})
             obj.mock.reset_mock()
diff --git a/tests/util/test_async_utils.py b/tests/util/test_async_utils.py
index f60918069a..17fd86d02d 100644
--- a/tests/util/test_async_utils.py
+++ b/tests/util/test_async_utils.py
@@ -16,7 +16,12 @@ from twisted.internet import defer
 from twisted.internet.defer import CancelledError, Deferred
 from twisted.internet.task import Clock
 
-from synapse.logging.context import LoggingContext, PreserveLoggingContext
+from synapse.logging.context import (
+    SENTINEL_CONTEXT,
+    LoggingContext,
+    PreserveLoggingContext,
+    current_context,
+)
 from synapse.util.async_helpers import timeout_deferred
 
 from tests.unittest import TestCase
@@ -79,10 +84,10 @@ class TimeoutDeferredTest(TestCase):
             # the errbacks should be run in the test logcontext
             def errback(res, deferred_name):
                 self.assertIs(
-                    LoggingContext.current_context(),
+                    current_context(),
                     context_one,
                     "errback %s run in unexpected logcontext %s"
-                    % (deferred_name, LoggingContext.current_context()),
+                    % (deferred_name, current_context()),
                 )
                 return res
 
@@ -90,7 +95,7 @@ class TimeoutDeferredTest(TestCase):
             original_deferred.addErrback(errback, "orig")
             timing_out_d = timeout_deferred(original_deferred, 1.0, self.clock)
             self.assertNoResult(timing_out_d)
-            self.assertIs(LoggingContext.current_context(), LoggingContext.sentinel)
+            self.assertIs(current_context(), SENTINEL_CONTEXT)
             timing_out_d.addErrback(errback, "timingout")
 
             self.clock.pump((1.0,))
@@ -99,4 +104,4 @@ class TimeoutDeferredTest(TestCase):
                 blocking_was_cancelled[0], "non-completing deferred was not cancelled"
             )
             self.failureResultOf(timing_out_d, defer.TimeoutError)
-            self.assertIs(LoggingContext.current_context(), context_one)
+            self.assertIs(current_context(), context_one)
diff --git a/tests/util/test_expiring_cache.py b/tests/util/test_expiring_cache.py
index 50bc7702d2..49ffeebd0e 100644
--- a/tests/util/test_expiring_cache.py
+++ b/tests/util/test_expiring_cache.py
@@ -21,7 +21,7 @@ from tests.utils import MockClock
 from .. import unittest
 
 
-class ExpiringCacheTestCase(unittest.TestCase):
+class ExpiringCacheTestCase(unittest.HomeserverTestCase):
     def test_get_set(self):
         clock = MockClock()
         cache = ExpiringCache("test", clock, max_len=1)
diff --git a/tests/util/test_itertools.py b/tests/util/test_itertools.py
new file mode 100644
index 0000000000..0ab0a91483
--- /dev/null
+++ b/tests/util/test_itertools.py
@@ -0,0 +1,47 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from synapse.util.iterutils import chunk_seq
+
+from tests.unittest import TestCase
+
+
+class ChunkSeqTests(TestCase):
+    def test_short_seq(self):
+        parts = chunk_seq("123", 8)
+
+        self.assertEqual(
+            list(parts), ["123"],
+        )
+
+    def test_long_seq(self):
+        parts = chunk_seq("abcdefghijklmnop", 8)
+
+        self.assertEqual(
+            list(parts), ["abcdefgh", "ijklmnop"],
+        )
+
+    def test_uneven_parts(self):
+        parts = chunk_seq("abcdefghijklmnop", 5)
+
+        self.assertEqual(
+            list(parts), ["abcde", "fghij", "klmno", "p"],
+        )
+
+    def test_empty_input(self):
+        parts = chunk_seq([], 5)
+
+        self.assertEqual(
+            list(parts), [],
+        )
diff --git a/tests/util/test_linearizer.py b/tests/util/test_linearizer.py
index 0ec8ef90ce..ca3858b184 100644
--- a/tests/util/test_linearizer.py
+++ b/tests/util/test_linearizer.py
@@ -19,7 +19,7 @@ from six.moves import range
 from twisted.internet import defer, reactor
 from twisted.internet.defer import CancelledError
 
-from synapse.logging.context import LoggingContext
+from synapse.logging.context import LoggingContext, current_context
 from synapse.util import Clock
 from synapse.util.async_helpers import Linearizer
 
@@ -45,6 +45,38 @@ class LinearizerTestCase(unittest.TestCase):
         with (yield d2):
             pass
 
+    @defer.inlineCallbacks
+    def test_linearizer_is_queued(self):
+        linearizer = Linearizer()
+
+        key = object()
+
+        d1 = linearizer.queue(key)
+        cm1 = yield d1
+
+        # Since d1 gets called immediately, "is_queued" should return false.
+        self.assertFalse(linearizer.is_queued(key))
+
+        d2 = linearizer.queue(key)
+        self.assertFalse(d2.called)
+
+        # Now d2 is queued up behind successful completion of cm1
+        self.assertTrue(linearizer.is_queued(key))
+
+        with cm1:
+            self.assertFalse(d2.called)
+
+            # cm1 still not done, so d2 still queued.
+            self.assertTrue(linearizer.is_queued(key))
+
+        # And now d2 is called and nothing is in the queue again
+        self.assertFalse(linearizer.is_queued(key))
+
+        with (yield d2):
+            self.assertFalse(linearizer.is_queued(key))
+
+        self.assertFalse(linearizer.is_queued(key))
+
     def test_lots_of_queued_things(self):
         # we have one slow thing, and lots of fast things queued up behind it.
         # it should *not* explode the stack.
@@ -54,11 +86,11 @@ class LinearizerTestCase(unittest.TestCase):
         def func(i, sleep=False):
             with LoggingContext("func(%s)" % i) as lc:
                 with (yield linearizer.queue("")):
-                    self.assertEqual(LoggingContext.current_context(), lc)
+                    self.assertEqual(current_context(), lc)
                     if sleep:
                         yield Clock(reactor).sleep(0)
 
-                self.assertEqual(LoggingContext.current_context(), lc)
+                self.assertEqual(current_context(), lc)
 
         func(0, sleep=True)
         for i in range(1, 100):
diff --git a/tests/util/test_logcontext.py b/tests/util/test_logcontext.py
index 8b8455c8b7..95301c013c 100644
--- a/tests/util/test_logcontext.py
+++ b/tests/util/test_logcontext.py
@@ -2,8 +2,10 @@ import twisted.python.failure
 from twisted.internet import defer, reactor
 
 from synapse.logging.context import (
+    SENTINEL_CONTEXT,
     LoggingContext,
     PreserveLoggingContext,
+    current_context,
     make_deferred_yieldable,
     nested_logging_context,
     run_in_background,
@@ -15,7 +17,7 @@ from .. import unittest
 
 class LoggingContextTestCase(unittest.TestCase):
     def _check_test_key(self, value):
-        self.assertEquals(LoggingContext.current_context().request, value)
+        self.assertEquals(current_context().request, value)
 
     def test_with_context(self):
         with LoggingContext() as context_one:
@@ -41,7 +43,7 @@ class LoggingContextTestCase(unittest.TestCase):
             self._check_test_key("one")
 
     def _test_run_in_background(self, function):
-        sentinel_context = LoggingContext.current_context()
+        sentinel_context = current_context()
 
         callback_completed = [False]
 
@@ -71,7 +73,7 @@ class LoggingContextTestCase(unittest.TestCase):
             # make sure that the context was reset before it got thrown back
             # into the reactor
             try:
-                self.assertIs(LoggingContext.current_context(), sentinel_context)
+                self.assertIs(current_context(), sentinel_context)
                 d2.callback(None)
             except BaseException:
                 d2.errback(twisted.python.failure.Failure())
@@ -108,7 +110,7 @@ class LoggingContextTestCase(unittest.TestCase):
         async def testfunc():
             self._check_test_key("one")
             d = Clock(reactor).sleep(0)
-            self.assertIs(LoggingContext.current_context(), LoggingContext.sentinel)
+            self.assertIs(current_context(), SENTINEL_CONTEXT)
             await d
             self._check_test_key("one")
 
@@ -129,14 +131,14 @@ class LoggingContextTestCase(unittest.TestCase):
             reactor.callLater(0, d.callback, None)
             return d
 
-        sentinel_context = LoggingContext.current_context()
+        sentinel_context = current_context()
 
         with LoggingContext() as context_one:
             context_one.request = "one"
 
             d1 = make_deferred_yieldable(blocking_function())
             # make sure that the context was reset by make_deferred_yieldable
-            self.assertIs(LoggingContext.current_context(), sentinel_context)
+            self.assertIs(current_context(), sentinel_context)
 
             yield d1
 
@@ -145,14 +147,14 @@ class LoggingContextTestCase(unittest.TestCase):
 
     @defer.inlineCallbacks
     def test_make_deferred_yieldable_with_chained_deferreds(self):
-        sentinel_context = LoggingContext.current_context()
+        sentinel_context = current_context()
 
         with LoggingContext() as context_one:
             context_one.request = "one"
 
             d1 = make_deferred_yieldable(_chained_deferred_function())
             # make sure that the context was reset by make_deferred_yieldable
-            self.assertIs(LoggingContext.current_context(), sentinel_context)
+            self.assertIs(current_context(), sentinel_context)
 
             yield d1
 
@@ -179,6 +181,30 @@ class LoggingContextTestCase(unittest.TestCase):
             nested_context = nested_logging_context(suffix="bar")
             self.assertEqual(nested_context.request, "foo-bar")
 
+    @defer.inlineCallbacks
+    def test_make_deferred_yieldable_with_await(self):
+        # an async function which retuns an incomplete coroutine, but doesn't
+        # follow the synapse rules.
+
+        async def blocking_function():
+            d = defer.Deferred()
+            reactor.callLater(0, d.callback, None)
+            await d
+
+        sentinel_context = current_context()
+
+        with LoggingContext() as context_one:
+            context_one.request = "one"
+
+            d1 = make_deferred_yieldable(blocking_function())
+            # make sure that the context was reset by make_deferred_yieldable
+            self.assertIs(current_context(), sentinel_context)
+
+            yield d1
+
+            # now it should be restored
+            self._check_test_key("one")
+
 
 # a function which returns a deferred which has been "called", but
 # which had a function which returned another incomplete deferred on
diff --git a/tests/util/test_lrucache.py b/tests/util/test_lrucache.py
index 786947375d..0adb2174af 100644
--- a/tests/util/test_lrucache.py
+++ b/tests/util/test_lrucache.py
@@ -22,7 +22,7 @@ from synapse.util.caches.treecache import TreeCache
 from .. import unittest
 
 
-class LruCacheTestCase(unittest.TestCase):
+class LruCacheTestCase(unittest.HomeserverTestCase):
     def test_get_set(self):
         cache = LruCache(1)
         cache["key"] = "value"
@@ -84,7 +84,7 @@ class LruCacheTestCase(unittest.TestCase):
         self.assertEquals(len(cache), 0)
 
 
-class LruCacheCallbacksTestCase(unittest.TestCase):
+class LruCacheCallbacksTestCase(unittest.HomeserverTestCase):
     def test_get(self):
         m = Mock()
         cache = LruCache(1)
@@ -233,7 +233,7 @@ class LruCacheCallbacksTestCase(unittest.TestCase):
         self.assertEquals(m3.call_count, 1)
 
 
-class LruCacheSizedTestCase(unittest.TestCase):
+class LruCacheSizedTestCase(unittest.HomeserverTestCase):
     def test_evict(self):
         cache = LruCache(5, size_callback=len)
         cache["key1"] = [0]
diff --git a/tests/util/test_snapshot_cache.py b/tests/util/test_snapshot_cache.py
deleted file mode 100644
index 1a44f72425..0000000000
--- a/tests/util/test_snapshot_cache.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright 2015, 2016 OpenMarket Ltd
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from twisted.internet.defer import Deferred
-
-from synapse.util.caches.snapshot_cache import SnapshotCache
-
-from .. import unittest
-
-
-class SnapshotCacheTestCase(unittest.TestCase):
-    def setUp(self):
-        self.cache = SnapshotCache()
-        self.cache.DURATION_MS = 1
-
-    def test_get_set(self):
-        # Check that getting a missing key returns None
-        self.assertEquals(self.cache.get(0, "key"), None)
-
-        # Check that setting a key with a deferred returns
-        # a deferred that resolves when the initial deferred does
-        d = Deferred()
-        set_result = self.cache.set(0, "key", d)
-        self.assertIsNotNone(set_result)
-        self.assertFalse(set_result.called)
-
-        # Check that getting the key before the deferred has resolved
-        # returns a deferred that resolves when the initial deferred does.
-        get_result_at_10 = self.cache.get(10, "key")
-        self.assertIsNotNone(get_result_at_10)
-        self.assertFalse(get_result_at_10.called)
-
-        # Check that the returned deferreds resolve when the initial deferred
-        # does.
-        d.callback("v")
-        self.assertTrue(set_result.called)
-        self.assertTrue(get_result_at_10.called)
-
-        # Check that getting the key after the deferred has resolved
-        # before the cache expires returns a resolved deferred.
-        get_result_at_11 = self.cache.get(11, "key")
-        self.assertIsNotNone(get_result_at_11)
-        if isinstance(get_result_at_11, Deferred):
-            # The cache may return the actual result rather than a deferred
-            self.assertTrue(get_result_at_11.called)
-
-        # Check that getting the key after the deferred has resolved
-        # after the cache expires returns None
-        get_result_at_12 = self.cache.get(12, "key")
-        self.assertIsNone(get_result_at_12)
diff --git a/tests/util/test_stream_change_cache.py b/tests/util/test_stream_change_cache.py
index f2be63706b..13b753e367 100644
--- a/tests/util/test_stream_change_cache.py
+++ b/tests/util/test_stream_change_cache.py
@@ -1,11 +1,9 @@
-from mock import patch
-
 from synapse.util.caches.stream_change_cache import StreamChangeCache
 
 from tests import unittest
 
 
-class StreamChangeCacheTests(unittest.TestCase):
+class StreamChangeCacheTests(unittest.HomeserverTestCase):
     """
     Tests for StreamChangeCache.
     """
@@ -28,26 +26,33 @@ class StreamChangeCacheTests(unittest.TestCase):
         cache.entity_has_changed("user@foo.com", 6)
         cache.entity_has_changed("bar@baz.net", 7)
 
+        # also test multiple things changing on the same stream ID
+        cache.entity_has_changed("user2@foo.com", 8)
+        cache.entity_has_changed("bar2@baz.net", 8)
+
         # If it's been changed after that stream position, return True
         self.assertTrue(cache.has_entity_changed("user@foo.com", 4))
         self.assertTrue(cache.has_entity_changed("bar@baz.net", 4))
+        self.assertTrue(cache.has_entity_changed("bar2@baz.net", 4))
+        self.assertTrue(cache.has_entity_changed("user2@foo.com", 4))
 
         # If it's been changed at that stream position, return False
         self.assertFalse(cache.has_entity_changed("user@foo.com", 6))
+        self.assertFalse(cache.has_entity_changed("user2@foo.com", 8))
 
         # If there's no changes after that stream position, return False
         self.assertFalse(cache.has_entity_changed("user@foo.com", 7))
+        self.assertFalse(cache.has_entity_changed("user2@foo.com", 9))
 
         # If the entity does not exist, return False.
-        self.assertFalse(cache.has_entity_changed("not@here.website", 7))
+        self.assertFalse(cache.has_entity_changed("not@here.website", 9))
 
         # If we request before the stream cache's earliest known position,
         # return True, whether it's a known entity or not.
         self.assertTrue(cache.has_entity_changed("user@foo.com", 0))
         self.assertTrue(cache.has_entity_changed("not@here.website", 0))
 
-    @patch("synapse.util.caches.CACHE_SIZE_FACTOR", 1.0)
-    def test_has_entity_changed_pops_off_start(self):
+    def test_entity_has_changed_pops_off_start(self):
         """
         StreamChangeCache.entity_has_changed will respect the max size and
         purge the oldest items upon reaching that max size.
@@ -64,11 +69,20 @@ class StreamChangeCacheTests(unittest.TestCase):
         # The oldest item has been popped off
         self.assertTrue("user@foo.com" not in cache._entity_to_key)
 
+        self.assertEqual(
+            cache.get_all_entities_changed(2), ["bar@baz.net", "user@elsewhere.org"],
+        )
+        self.assertIsNone(cache.get_all_entities_changed(1))
+
         # If we update an existing entity, it keeps the two existing entities
         cache.entity_has_changed("bar@baz.net", 5)
         self.assertEqual(
-            set(["bar@baz.net", "user@elsewhere.org"]), set(cache._entity_to_key)
+            {"bar@baz.net", "user@elsewhere.org"}, set(cache._entity_to_key)
         )
+        self.assertEqual(
+            cache.get_all_entities_changed(2), ["user@elsewhere.org", "bar@baz.net"],
+        )
+        self.assertIsNone(cache.get_all_entities_changed(1))
 
     def test_get_all_entities_changed(self):
         """
@@ -80,18 +94,52 @@ class StreamChangeCacheTests(unittest.TestCase):
 
         cache.entity_has_changed("user@foo.com", 2)
         cache.entity_has_changed("bar@baz.net", 3)
+        cache.entity_has_changed("anotheruser@foo.com", 3)
         cache.entity_has_changed("user@elsewhere.org", 4)
 
-        self.assertEqual(
-            cache.get_all_entities_changed(1),
-            ["user@foo.com", "bar@baz.net", "user@elsewhere.org"],
-        )
-        self.assertEqual(
-            cache.get_all_entities_changed(2), ["bar@baz.net", "user@elsewhere.org"]
-        )
+        r = cache.get_all_entities_changed(1)
+
+        # either of these are valid
+        ok1 = [
+            "user@foo.com",
+            "bar@baz.net",
+            "anotheruser@foo.com",
+            "user@elsewhere.org",
+        ]
+        ok2 = [
+            "user@foo.com",
+            "anotheruser@foo.com",
+            "bar@baz.net",
+            "user@elsewhere.org",
+        ]
+        self.assertTrue(r == ok1 or r == ok2)
+
+        r = cache.get_all_entities_changed(2)
+        self.assertTrue(r == ok1[1:] or r == ok2[1:])
+
         self.assertEqual(cache.get_all_entities_changed(3), ["user@elsewhere.org"])
         self.assertEqual(cache.get_all_entities_changed(0), None)
 
+        # ... later, things gest more updates
+        cache.entity_has_changed("user@foo.com", 5)
+        cache.entity_has_changed("bar@baz.net", 5)
+        cache.entity_has_changed("anotheruser@foo.com", 6)
+
+        ok1 = [
+            "user@elsewhere.org",
+            "user@foo.com",
+            "bar@baz.net",
+            "anotheruser@foo.com",
+        ]
+        ok2 = [
+            "user@elsewhere.org",
+            "bar@baz.net",
+            "user@foo.com",
+            "anotheruser@foo.com",
+        ]
+        r = cache.get_all_entities_changed(3)
+        self.assertTrue(r == ok1 or r == ok2)
+
     def test_has_any_entity_changed(self):
         """
         StreamChangeCache.has_any_entity_changed will return True if any
@@ -137,7 +185,7 @@ class StreamChangeCacheTests(unittest.TestCase):
             cache.get_entities_changed(
                 ["user@foo.com", "bar@baz.net", "user@elsewhere.org"], stream_pos=2
             ),
-            set(["bar@baz.net", "user@elsewhere.org"]),
+            {"bar@baz.net", "user@elsewhere.org"},
         )
 
         # Query all the entries mid-way through the stream, but include one
@@ -153,7 +201,7 @@ class StreamChangeCacheTests(unittest.TestCase):
                 ],
                 stream_pos=2,
             ),
-            set(["bar@baz.net", "user@elsewhere.org"]),
+            {"bar@baz.net", "user@elsewhere.org"},
         )
 
         # Query all the entries, but before the first known point. We will get
@@ -168,21 +216,13 @@ class StreamChangeCacheTests(unittest.TestCase):
                 ],
                 stream_pos=0,
             ),
-            set(
-                [
-                    "user@foo.com",
-                    "bar@baz.net",
-                    "user@elsewhere.org",
-                    "not@here.website",
-                ]
-            ),
+            {"user@foo.com", "bar@baz.net", "user@elsewhere.org", "not@here.website"},
         )
 
         # Query a subset of the entries mid-way through the stream. We should
         # only get back the subset.
         self.assertEqual(
-            cache.get_entities_changed(["bar@baz.net"], stream_pos=2),
-            set(["bar@baz.net"]),
+            cache.get_entities_changed(["bar@baz.net"], stream_pos=2), {"bar@baz.net"},
         )
 
     def test_max_pos(self):
diff --git a/tests/util/test_stringutils.py b/tests/util/test_stringutils.py
new file mode 100644
index 0000000000..4f4da29a98
--- /dev/null
+++ b/tests/util/test_stringutils.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from synapse.api.errors import SynapseError
+from synapse.util.stringutils import assert_valid_client_secret
+
+from .. import unittest
+
+
+class StringUtilsTestCase(unittest.TestCase):
+    def test_client_secret_regex(self):
+        """Ensure that client_secret does not contain illegal characters"""
+        good = [
+            "abcde12345",
+            "ABCabc123",
+            "_--something==_",
+            "...--==-18913",
+            "8Dj2odd-e9asd.cd==_--ddas-secret-",
+            # We temporarily allow : characters: https://github.com/matrix-org/synapse/issues/6766
+            # To be removed in a future release
+            "SECRET:1234567890",
+        ]
+
+        bad = [
+            "--+-/secret",
+            "\\dx--dsa288",
+            "",
+            "AAS//",
+            "asdj**",
+            ">X><Z<!!-)))",
+            "a@b.com",
+        ]
+
+        for client_secret in good:
+            assert_valid_client_secret(client_secret)
+
+        for client_secret in bad:
+            with self.assertRaises(SynapseError):
+                assert_valid_client_secret(client_secret)