diff --git a/tests/crypto/test_keyring.py b/tests/crypto/test_keyring.py
index 30fcc4c1bf..946482b7e7 100644
--- a/tests/crypto/test_keyring.py
+++ b/tests/crypto/test_keyring.py
@@ -16,6 +16,7 @@ import time
from mock import Mock
+import attr
import canonicaljson
import signedjson.key
import signedjson.sign
@@ -68,6 +69,11 @@ class MockPerspectiveServer:
signedjson.sign.sign_json(res, self.server_name, self.key)
+@attr.s(slots=True)
+class FakeRequest:
+ id = attr.ib()
+
+
@logcontext_clean
class KeyringTestCase(unittest.HomeserverTestCase):
def check_context(self, val, expected):
@@ -89,7 +95,7 @@ class KeyringTestCase(unittest.HomeserverTestCase):
first_lookup_deferred = Deferred()
async def first_lookup_fetch(keys_to_fetch):
- self.assertEquals(current_context().request, "context_11")
+ self.assertEquals(current_context().request.id, "context_11")
self.assertEqual(keys_to_fetch, {"server10": {get_key_id(key1): 0}})
await make_deferred_yieldable(first_lookup_deferred)
@@ -102,9 +108,7 @@ class KeyringTestCase(unittest.HomeserverTestCase):
mock_fetcher.get_keys.side_effect = first_lookup_fetch
async def first_lookup():
- with LoggingContext("context_11") as context_11:
- context_11.request = "context_11"
-
+ with LoggingContext("context_11", request=FakeRequest("context_11")):
res_deferreds = kr.verify_json_objects_for_server(
[("server10", json1, 0, "test10"), ("server11", {}, 0, "test11")]
)
@@ -130,7 +134,7 @@ class KeyringTestCase(unittest.HomeserverTestCase):
# should block rather than start a second call
async def second_lookup_fetch(keys_to_fetch):
- self.assertEquals(current_context().request, "context_12")
+ self.assertEquals(current_context().request.id, "context_12")
return {
"server10": {
get_key_id(key1): FetchKeyResult(get_verify_key(key1), 100)
@@ -142,9 +146,7 @@ class KeyringTestCase(unittest.HomeserverTestCase):
second_lookup_state = [0]
async def second_lookup():
- with LoggingContext("context_12") as context_12:
- context_12.request = "context_12"
-
+ with LoggingContext("context_12", request=FakeRequest("context_12")):
res_deferreds_2 = kr.verify_json_objects_for_server(
[("server10", json1, 0, "test")]
)
@@ -589,10 +591,7 @@ def get_key_id(key):
@defer.inlineCallbacks
def run_in_context(f, *args, **kwargs):
- with LoggingContext("testctx") as ctx:
- # we set the "request" prop to make it easier to follow what's going on in the
- # logs.
- ctx.request = "testctx"
+ with LoggingContext("testctx"):
rv = yield f(*args, **kwargs)
return rv
diff --git a/tests/logging/test_terse_json.py b/tests/logging/test_terse_json.py
index 48a74e2eee..bfe0d11c93 100644
--- a/tests/logging/test_terse_json.py
+++ b/tests/logging/test_terse_json.py
@@ -12,15 +12,20 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
import json
import logging
-from io import StringIO
+from io import BytesIO, StringIO
+
+from mock import Mock, patch
+
+from twisted.web.server import Request
+from synapse.http.site import SynapseRequest
from synapse.logging._terse_json import JsonFormatter, TerseJsonFormatter
from synapse.logging.context import LoggingContext, LoggingContextFilter
from tests.logging import LoggerCleanupMixin
+from tests.server import FakeChannel
from tests.unittest import TestCase
@@ -120,7 +125,7 @@ class TerseJsonTestCase(LoggerCleanupMixin, TestCase):
handler.addFilter(LoggingContextFilter())
logger = self.get_logger(handler)
- with LoggingContext(request="test"):
+ with LoggingContext("name"):
logger.info("Hello there, %s!", "wally")
log = self.get_log_line()
@@ -134,4 +139,61 @@ class TerseJsonTestCase(LoggerCleanupMixin, TestCase):
]
self.assertCountEqual(log.keys(), expected_log_keys)
self.assertEqual(log["log"], "Hello there, wally!")
- self.assertEqual(log["request"], "test")
+ self.assertTrue(log["request"].startswith("name@"))
+
+ def test_with_request_context(self):
+ """
+ Information from the logging context request should be added to the JSON response.
+ """
+ handler = logging.StreamHandler(self.output)
+ handler.setFormatter(JsonFormatter())
+ handler.addFilter(LoggingContextFilter())
+ logger = self.get_logger(handler)
+
+ # A full request isn't needed here.
+ site = Mock(spec=["site_tag", "server_version_string", "getResourceFor"])
+ site.site_tag = "test-site"
+ site.server_version_string = "Server v1"
+ request = SynapseRequest(FakeChannel(site, None))
+ # Call requestReceived to finish instantiating the object.
+ request.content = BytesIO()
+ # Partially skip some of the internal processing of SynapseRequest.
+ request._started_processing = Mock()
+ request.request_metrics = Mock(spec=["name"])
+ with patch.object(Request, "render"):
+ request.requestReceived(b"POST", b"/_matrix/client/versions", b"1.1")
+
+ # Also set the requester to ensure the processing works.
+ request.requester = "@foo:test"
+
+ with LoggingContext(parent_context=request.logcontext):
+ logger.info("Hello there, %s!", "wally")
+
+ log = self.get_log_line()
+
+ # The terse logger includes additional request information, if possible.
+ expected_log_keys = [
+ "log",
+ "level",
+ "namespace",
+ "request",
+ "ip_address",
+ "site_tag",
+ "requester",
+ "authenticated_entity",
+ "method",
+ "url",
+ "protocol",
+ "user_agent",
+ ]
+ self.assertCountEqual(log.keys(), expected_log_keys)
+ self.assertEqual(log["log"], "Hello there, wally!")
+ self.assertTrue(log["request"].startswith("POST-"))
+ self.assertEqual(log["ip_address"], "127.0.0.1")
+ self.assertEqual(log["site_tag"], "test-site")
+ self.assertEqual(log["requester"], "@foo:test")
+ self.assertEqual(log["authenticated_entity"], "@foo:test")
+ self.assertEqual(log["method"], "POST")
+ self.assertEqual(log["url"], "/_matrix/client/versions")
+ self.assertEqual(log["protocol"], "1.1")
+ self.assertEqual(log["user_agent"], "")
diff --git a/tests/unittest.py b/tests/unittest.py
index 58a4daa1ec..57b6a395c7 100644
--- a/tests/unittest.py
+++ b/tests/unittest.py
@@ -471,7 +471,7 @@ class HomeserverTestCase(TestCase):
kwargs["config"] = config_obj
async def run_bg_updates():
- with LoggingContext("run_bg_updates", request="run_bg_updates-1"):
+ with LoggingContext("run_bg_updates"):
while not await stor.db_pool.updates.has_completed_background_updates():
await stor.db_pool.updates.do_next_background_update(1)
diff --git a/tests/util/caches/test_descriptors.py b/tests/util/caches/test_descriptors.py
index afb11b9caf..e434e21aee 100644
--- a/tests/util/caches/test_descriptors.py
+++ b/tests/util/caches/test_descriptors.py
@@ -661,14 +661,13 @@ class CachedListDescriptorTestCase(unittest.TestCase):
@descriptors.cachedList("fn", "args1")
async def list_fn(self, args1, arg2):
- assert current_context().request == "c1"
+ assert current_context().name == "c1"
# we want this to behave like an asynchronous function
await run_on_reactor()
- assert current_context().request == "c1"
+ assert current_context().name == "c1"
return self.mock(args1, arg2)
- with LoggingContext() as c1:
- c1.request = "c1"
+ with LoggingContext("c1") as c1:
obj = Cls()
obj.mock.return_value = {10: "fish", 20: "chips"}
d1 = obj.list_fn([10, 20], 2)
diff --git a/tests/util/test_logcontext.py b/tests/util/test_logcontext.py
index 58ee918f65..5d9c4665aa 100644
--- a/tests/util/test_logcontext.py
+++ b/tests/util/test_logcontext.py
@@ -17,11 +17,10 @@ from .. import unittest
class LoggingContextTestCase(unittest.TestCase):
def _check_test_key(self, value):
- self.assertEquals(current_context().request, value)
+ self.assertEquals(current_context().name, value)
def test_with_context(self):
- with LoggingContext() as context_one:
- context_one.request = "test"
+ with LoggingContext("test"):
self._check_test_key("test")
@defer.inlineCallbacks
@@ -30,15 +29,13 @@ class LoggingContextTestCase(unittest.TestCase):
@defer.inlineCallbacks
def competing_callback():
- with LoggingContext() as competing_context:
- competing_context.request = "competing"
+ with LoggingContext("competing"):
yield clock.sleep(0)
self._check_test_key("competing")
reactor.callLater(0, competing_callback)
- with LoggingContext() as context_one:
- context_one.request = "one"
+ with LoggingContext("one"):
yield clock.sleep(0)
self._check_test_key("one")
@@ -47,9 +44,7 @@ class LoggingContextTestCase(unittest.TestCase):
callback_completed = [False]
- with LoggingContext() as context_one:
- context_one.request = "one"
-
+ with LoggingContext("one"):
# fire off function, but don't wait on it.
d2 = run_in_background(function)
@@ -133,9 +128,7 @@ class LoggingContextTestCase(unittest.TestCase):
sentinel_context = current_context()
- with LoggingContext() as context_one:
- context_one.request = "one"
-
+ with LoggingContext("one"):
d1 = make_deferred_yieldable(blocking_function())
# make sure that the context was reset by make_deferred_yieldable
self.assertIs(current_context(), sentinel_context)
@@ -149,9 +142,7 @@ class LoggingContextTestCase(unittest.TestCase):
def test_make_deferred_yieldable_with_chained_deferreds(self):
sentinel_context = current_context()
- with LoggingContext() as context_one:
- context_one.request = "one"
-
+ with LoggingContext("one"):
d1 = make_deferred_yieldable(_chained_deferred_function())
# make sure that the context was reset by make_deferred_yieldable
self.assertIs(current_context(), sentinel_context)
@@ -166,9 +157,7 @@ class LoggingContextTestCase(unittest.TestCase):
"""Check that make_deferred_yieldable does the right thing when its
argument isn't actually a deferred"""
- with LoggingContext() as context_one:
- context_one.request = "one"
-
+ with LoggingContext("one"):
d1 = make_deferred_yieldable("bum")
self._check_test_key("one")
@@ -177,9 +166,9 @@ class LoggingContextTestCase(unittest.TestCase):
self._check_test_key("one")
def test_nested_logging_context(self):
- with LoggingContext(request="foo"):
+ with LoggingContext("foo"):
nested_context = nested_logging_context(suffix="bar")
- self.assertEqual(nested_context.request, "foo-bar")
+ self.assertEqual(nested_context.name, "foo-bar")
@defer.inlineCallbacks
def test_make_deferred_yieldable_with_await(self):
@@ -193,9 +182,7 @@ class LoggingContextTestCase(unittest.TestCase):
sentinel_context = current_context()
- with LoggingContext() as context_one:
- context_one.request = "one"
-
+ with LoggingContext("one"):
d1 = make_deferred_yieldable(blocking_function())
# make sure that the context was reset by make_deferred_yieldable
self.assertIs(current_context(), sentinel_context)
|