summary refs log tree commit diff
path: root/synmark
diff options
context:
space:
mode:
authorPatrick Cloke <clokep@users.noreply.github.com>2020-10-29 07:27:37 -0400
committerGitHub <noreply@github.com>2020-10-29 07:27:37 -0400
commit00b24aa545091395f9a92d531836f6bf7b4460e0 (patch)
tree10d7333f2d1d9aaa0a6888c9ce3afb7d6feebf58 /synmark
parentDon't require hiredis to run unit tests (#8680) (diff)
downloadsynapse-00b24aa545091395f9a92d531836f6bf7b4460e0.tar.xz
Support generating structured logs in addition to standard logs. (#8607)
This modifies the configuration of structured logging to be usable from
the standard Python logging configuration.

This also separates the formatting of logs from the transport allowing
JSON logs to files or standard logs to sockets.
Diffstat (limited to 'synmark')
-rw-r--r--synmark/__init__.py39
-rw-r--r--synmark/__main__.py6
-rw-r--r--synmark/suites/logging.py60
3 files changed, 39 insertions, 66 deletions
diff --git a/synmark/__init__.py b/synmark/__init__.py
index 09bc7e7927..3d4ec3e184 100644
--- a/synmark/__init__.py
+++ b/synmark/__init__.py
@@ -21,45 +21,6 @@ except ImportError:
     from twisted.internet.pollreactor import PollReactor as Reactor
 from twisted.internet.main import installReactor
 
-from synapse.config.homeserver import HomeServerConfig
-from synapse.util import Clock
-
-from tests.utils import default_config, setup_test_homeserver
-
-
-async def make_homeserver(reactor, config=None):
-    """
-    Make a Homeserver suitable for running benchmarks against.
-
-    Args:
-        reactor: A Twisted reactor to run under.
-        config: A HomeServerConfig to use, or None.
-    """
-    cleanup_tasks = []
-    clock = Clock(reactor)
-
-    if not config:
-        config = default_config("test")
-
-    config_obj = HomeServerConfig()
-    config_obj.parse_config_dict(config, "", "")
-
-    hs = setup_test_homeserver(
-        cleanup_tasks.append, config=config_obj, reactor=reactor, clock=clock
-    )
-    stor = hs.get_datastore()
-
-    # Run the database background updates.
-    if hasattr(stor.db_pool.updates, "do_next_background_update"):
-        while not await stor.db_pool.updates.has_completed_background_updates():
-            await stor.db_pool.updates.do_next_background_update(1)
-
-    def cleanup():
-        for i in cleanup_tasks:
-            i()
-
-    return hs, clock.sleep, cleanup
-
 
 def make_reactor():
     """
diff --git a/synmark/__main__.py b/synmark/__main__.py
index 17df9ddeb7..de13c1a909 100644
--- a/synmark/__main__.py
+++ b/synmark/__main__.py
@@ -12,20 +12,20 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 import sys
 from argparse import REMAINDER
 from contextlib import redirect_stderr
 from io import StringIO
 
 import pyperf
-from synmark import make_reactor
-from synmark.suites import SUITES
 
 from twisted.internet.defer import Deferred, ensureDeferred
 from twisted.logger import globalLogBeginner, textFileLogObserver
 from twisted.python.failure import Failure
 
+from synmark import make_reactor
+from synmark.suites import SUITES
+
 from tests.utils import setupdb
 
 
diff --git a/synmark/suites/logging.py b/synmark/suites/logging.py
index d8e4c7d58f..c9d9cf761e 100644
--- a/synmark/suites/logging.py
+++ b/synmark/suites/logging.py
@@ -13,20 +13,22 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import logging
 import warnings
 from io import StringIO
 
 from mock import Mock
 
 from pyperf import perf_counter
-from synmark import make_homeserver
 
 from twisted.internet.defer import Deferred
 from twisted.internet.protocol import ServerFactory
-from twisted.logger import LogBeginner, Logger, LogPublisher
+from twisted.logger import LogBeginner, LogPublisher
 from twisted.protocols.basic import LineOnlyReceiver
 
-from synapse.logging._structured import setup_structured_logging
+from synapse.config.logger import _setup_stdlib_logging
+from synapse.logging import RemoteHandler
+from synapse.util import Clock
 
 
 class LineCounter(LineOnlyReceiver):
@@ -62,7 +64,15 @@ async def main(reactor, loops):
     logger_factory.on_done = Deferred()
     port = reactor.listenTCP(0, logger_factory, interface="127.0.0.1")
 
-    hs, wait, cleanup = await make_homeserver(reactor)
+    # A fake homeserver config.
+    class Config:
+        server_name = "synmark-" + str(loops)
+        no_redirect_stdio = True
+
+    hs_config = Config()
+
+    # To be able to sleep.
+    clock = Clock(reactor)
 
     errors = StringIO()
     publisher = LogPublisher()
@@ -72,47 +82,49 @@ async def main(reactor, loops):
     )
 
     log_config = {
-        "loggers": {"synapse": {"level": "DEBUG"}},
-        "drains": {
+        "version": 1,
+        "loggers": {"synapse": {"level": "DEBUG", "handlers": ["tersejson"]}},
+        "formatters": {"tersejson": {"class": "synapse.logging.TerseJsonFormatter"}},
+        "handlers": {
             "tersejson": {
-                "type": "network_json_terse",
+                "class": "synapse.logging.RemoteHandler",
                 "host": "127.0.0.1",
                 "port": port.getHost().port,
                 "maximum_buffer": 100,
+                "_reactor": reactor,
             }
         },
     }
 
-    logger = Logger(namespace="synapse.logging.test_terse_json", observer=publisher)
-    logging_system = setup_structured_logging(
-        hs, hs.config, log_config, logBeginner=beginner, redirect_stdlib_logging=False
+    logger = logging.getLogger("synapse.logging.test_terse_json")
+    _setup_stdlib_logging(
+        hs_config, log_config, logBeginner=beginner,
     )
 
     # Wait for it to connect...
-    await logging_system._observers[0]._service.whenConnected()
+    for handler in logging.getLogger("synapse").handlers:
+        if isinstance(handler, RemoteHandler):
+            break
+    else:
+        raise RuntimeError("Improperly configured: no RemoteHandler found.")
+
+    await handler._service.whenConnected()
 
     start = perf_counter()
 
     # Send a bunch of useful messages
     for i in range(0, loops):
-        logger.info("test message %s" % (i,))
-
-        if (
-            len(logging_system._observers[0]._buffer)
-            == logging_system._observers[0].maximum_buffer
-        ):
-            while (
-                len(logging_system._observers[0]._buffer)
-                > logging_system._observers[0].maximum_buffer / 2
-            ):
-                await wait(0.01)
+        logger.info("test message %s", i)
+
+        if len(handler._buffer) == handler.maximum_buffer:
+            while len(handler._buffer) > handler.maximum_buffer / 2:
+                await clock.sleep(0.01)
 
     await logger_factory.on_done
 
     end = perf_counter() - start
 
-    logging_system.stop()
+    handler.close()
     port.stopListening()
-    cleanup()
 
     return end