Support generating structured logs in addition to standard logs. (#8607)

This modifies the configuration of structured logging to be usable from
the standard Python logging configuration.

This also separates the formatting of logs from the transport allowing
JSON logs to files or standard logs to sockets.
This commit is contained in:
Patrick Cloke
2020-10-29 07:27:37 -04:00
committed by GitHub
parent 9a7e0d2ea6
commit 00b24aa545
19 changed files with 715 additions and 1020 deletions

View File

@@ -21,45 +21,6 @@ except ImportError:
from twisted.internet.pollreactor import PollReactor as Reactor
from twisted.internet.main import installReactor
from synapse.config.homeserver import HomeServerConfig
from synapse.util import Clock
from tests.utils import default_config, setup_test_homeserver
async def make_homeserver(reactor, config=None):
"""
Make a Homeserver suitable for running benchmarks against.
Args:
reactor: A Twisted reactor to run under.
config: A HomeServerConfig to use, or None.
"""
cleanup_tasks = []
clock = Clock(reactor)
if not config:
config = default_config("test")
config_obj = HomeServerConfig()
config_obj.parse_config_dict(config, "", "")
hs = setup_test_homeserver(
cleanup_tasks.append, config=config_obj, reactor=reactor, clock=clock
)
stor = hs.get_datastore()
# Run the database background updates.
if hasattr(stor.db_pool.updates, "do_next_background_update"):
while not await stor.db_pool.updates.has_completed_background_updates():
await stor.db_pool.updates.do_next_background_update(1)
def cleanup():
for i in cleanup_tasks:
i()
return hs, clock.sleep, cleanup
def make_reactor():
"""

View File

@@ -12,20 +12,20 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from argparse import REMAINDER
from contextlib import redirect_stderr
from io import StringIO
import pyperf
from synmark import make_reactor
from synmark.suites import SUITES
from twisted.internet.defer import Deferred, ensureDeferred
from twisted.logger import globalLogBeginner, textFileLogObserver
from twisted.python.failure import Failure
from synmark import make_reactor
from synmark.suites import SUITES
from tests.utils import setupdb

View File

@@ -13,20 +13,22 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import warnings
from io import StringIO
from mock import Mock
from pyperf import perf_counter
from synmark import make_homeserver
from twisted.internet.defer import Deferred
from twisted.internet.protocol import ServerFactory
from twisted.logger import LogBeginner, Logger, LogPublisher
from twisted.logger import LogBeginner, LogPublisher
from twisted.protocols.basic import LineOnlyReceiver
from synapse.logging._structured import setup_structured_logging
from synapse.config.logger import _setup_stdlib_logging
from synapse.logging import RemoteHandler
from synapse.util import Clock
class LineCounter(LineOnlyReceiver):
@@ -62,7 +64,15 @@ async def main(reactor, loops):
logger_factory.on_done = Deferred()
port = reactor.listenTCP(0, logger_factory, interface="127.0.0.1")
hs, wait, cleanup = await make_homeserver(reactor)
# A fake homeserver config.
class Config:
server_name = "synmark-" + str(loops)
no_redirect_stdio = True
hs_config = Config()
# To be able to sleep.
clock = Clock(reactor)
errors = StringIO()
publisher = LogPublisher()
@@ -72,47 +82,49 @@ async def main(reactor, loops):
)
log_config = {
"loggers": {"synapse": {"level": "DEBUG"}},
"drains": {
"version": 1,
"loggers": {"synapse": {"level": "DEBUG", "handlers": ["tersejson"]}},
"formatters": {"tersejson": {"class": "synapse.logging.TerseJsonFormatter"}},
"handlers": {
"tersejson": {
"type": "network_json_terse",
"class": "synapse.logging.RemoteHandler",
"host": "127.0.0.1",
"port": port.getHost().port,
"maximum_buffer": 100,
"_reactor": reactor,
}
},
}
logger = Logger(namespace="synapse.logging.test_terse_json", observer=publisher)
logging_system = setup_structured_logging(
hs, hs.config, log_config, logBeginner=beginner, redirect_stdlib_logging=False
logger = logging.getLogger("synapse.logging.test_terse_json")
_setup_stdlib_logging(
hs_config, log_config, logBeginner=beginner,
)
# Wait for it to connect...
await logging_system._observers[0]._service.whenConnected()
for handler in logging.getLogger("synapse").handlers:
if isinstance(handler, RemoteHandler):
break
else:
raise RuntimeError("Improperly configured: no RemoteHandler found.")
await handler._service.whenConnected()
start = perf_counter()
# Send a bunch of useful messages
for i in range(0, loops):
logger.info("test message %s" % (i,))
logger.info("test message %s", i)
if (
len(logging_system._observers[0]._buffer)
== logging_system._observers[0].maximum_buffer
):
while (
len(logging_system._observers[0]._buffer)
> logging_system._observers[0].maximum_buffer / 2
):
await wait(0.01)
if len(handler._buffer) == handler.maximum_buffer:
while len(handler._buffer) > handler.maximum_buffer / 2:
await clock.sleep(0.01)
await logger_factory.on_done
end = perf_counter() - start
logging_system.stop()
handler.close()
port.stopListening()
cleanup()
return end