From 81731c6e75fe904a5b44873efa361a229743d99f Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Filip=20=C5=A0t=C4=9Bdronsk=C3=BD?=
Date: Mon, 2 Dec 2019 12:12:55 +0000
Subject: [PATCH 1/4] Fix: Pillow error when uploading RGBA image (#3325)
(#6241)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Signed-Off-By: Filip Štědronský
---
changelog.d/6241.bugfix | 1 +
synapse/rest/media/v1/thumbnailer.py | 5 ++++-
2 files changed, 5 insertions(+), 1 deletion(-)
create mode 100644 changelog.d/6241.bugfix
diff --git a/changelog.d/6241.bugfix b/changelog.d/6241.bugfix
new file mode 100644
index 0000000000..25109ca4a6
--- /dev/null
+++ b/changelog.d/6241.bugfix
@@ -0,0 +1 @@
+Fix error from the Pillow library when uploading RGBA images.
diff --git a/synapse/rest/media/v1/thumbnailer.py b/synapse/rest/media/v1/thumbnailer.py
index 8cf415e29d..c234ea7421 100644
--- a/synapse/rest/media/v1/thumbnailer.py
+++ b/synapse/rest/media/v1/thumbnailer.py
@@ -129,5 +129,8 @@ class Thumbnailer(object):
def _encode_image(self, output_image, output_type):
output_bytes_io = BytesIO()
- output_image.save(output_bytes_io, self.FORMATS[output_type], quality=80)
+ fmt = self.FORMATS[output_type]
+ if fmt == "JPEG":
+ output_image = output_image.convert("RGB")
+ output_image.save(output_bytes_io, fmt, quality=80)
return output_bytes_io
From 0ad75fd98ef1943ebea98c6d9f2dc5770c643b0a Mon Sep 17 00:00:00 2001
From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
Date: Mon, 2 Dec 2019 15:09:57 +0000
Subject: [PATCH 2/4] Use python3 packages for Ubuntu (#6443)
---
INSTALL.md | 4 ++--
changelog.d/6443.doc | 1 +
2 files changed, 3 insertions(+), 2 deletions(-)
create mode 100644 changelog.d/6443.doc
diff --git a/INSTALL.md b/INSTALL.md
index 9b7360f0ef..9da2e3c734 100644
--- a/INSTALL.md
+++ b/INSTALL.md
@@ -109,8 +109,8 @@ Installing prerequisites on Ubuntu or Debian:
```
sudo apt-get install build-essential python3-dev libffi-dev \
- python-pip python-setuptools sqlite3 \
- libssl-dev python-virtualenv libjpeg-dev libxslt1-dev
+ python3-pip python3-setuptools sqlite3 \
+ libssl-dev python3-virtualenv libjpeg-dev libxslt1-dev
```
#### ArchLinux
diff --git a/changelog.d/6443.doc b/changelog.d/6443.doc
new file mode 100644
index 0000000000..67c59f92ee
--- /dev/null
+++ b/changelog.d/6443.doc
@@ -0,0 +1 @@
+Switch Ubuntu package install recommendation to use python3 packages in INSTALL.md.
\ No newline at end of file
From 72078e4be56d42421e8748e0e45d0fe1204853dd Mon Sep 17 00:00:00 2001
From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com>
Date: Mon, 2 Dec 2019 15:11:32 +0000
Subject: [PATCH 3/4] Transfer power level state events on room upgrade (#6237)
---
changelog.d/6237.bugfix | 1 +
synapse/handlers/room.py | 36 +++++++++++++++++++++++++++++++-----
2 files changed, 32 insertions(+), 5 deletions(-)
create mode 100644 changelog.d/6237.bugfix
diff --git a/changelog.d/6237.bugfix b/changelog.d/6237.bugfix
new file mode 100644
index 0000000000..9285600b00
--- /dev/null
+++ b/changelog.d/6237.bugfix
@@ -0,0 +1 @@
+Transfer non-standard power levels on room upgrade.
\ No newline at end of file
diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py
index e92b2eafd5..35a759f2fe 100644
--- a/synapse/handlers/room.py
+++ b/synapse/handlers/room.py
@@ -198,21 +198,21 @@ class RoomCreationHandler(BaseHandler):
# finally, shut down the PLs in the old room, and update them in the new
# room.
yield self._update_upgraded_room_pls(
- requester, old_room_id, new_room_id, old_room_state
+ requester, old_room_id, new_room_id, old_room_state,
)
return new_room_id
@defer.inlineCallbacks
def _update_upgraded_room_pls(
- self, requester, old_room_id, new_room_id, old_room_state
+ self, requester, old_room_id, new_room_id, old_room_state,
):
"""Send updated power levels in both rooms after an upgrade
Args:
requester (synapse.types.Requester): the user requesting the upgrade
- old_room_id (unicode): the id of the room to be replaced
- new_room_id (unicode): the id of the replacement room
+ old_room_id (str): the id of the room to be replaced
+ new_room_id (str): the id of the replacement room
old_room_state (dict[tuple[str, str], str]): the state map for the old room
Returns:
@@ -298,7 +298,7 @@ class RoomCreationHandler(BaseHandler):
tombstone_event_id (unicode|str): the ID of the tombstone event in the old
room.
Returns:
- Deferred[None]
+ Deferred
"""
user_id = requester.user.to_string()
@@ -333,6 +333,7 @@ class RoomCreationHandler(BaseHandler):
(EventTypes.Encryption, ""),
(EventTypes.ServerACL, ""),
(EventTypes.RelatedGroups, ""),
+ (EventTypes.PowerLevels, ""),
)
old_room_state_ids = yield self.store.get_filtered_current_state_ids(
@@ -346,6 +347,31 @@ class RoomCreationHandler(BaseHandler):
if old_event:
initial_state[k] = old_event.content
+ # Resolve the minimum power level required to send any state event
+ # We will give the upgrading user this power level temporarily (if necessary) such that
+ # they are able to copy all of the state events over, then revert them back to their
+ # original power level afterwards in _update_upgraded_room_pls
+
+ # Copy over user power levels now as this will not be possible with >100PL users once
+ # the room has been created
+
+ power_levels = initial_state[(EventTypes.PowerLevels, "")]
+
+ # Calculate the minimum power level needed to clone the room
+ event_power_levels = power_levels.get("events", {})
+ state_default = power_levels.get("state_default", 0)
+ ban = power_levels.get("ban")
+ needed_power_level = max(state_default, ban, max(event_power_levels.values()))
+
+ # Raise the requester's power level in the new room if necessary
+ current_power_level = power_levels["users"][requester.user.to_string()]
+ if current_power_level < needed_power_level:
+ # Assign this power level to the requester
+ power_levels["users"][requester.user.to_string()] = needed_power_level
+
+ # Set the power levels to the modified state
+ initial_state[(EventTypes.PowerLevels, "")] = power_levels
+
yield self._send_events_for_new_room(
requester,
new_room_id,
From fdec84aa427e2e3b806eb15f462d652f8554cc8d Mon Sep 17 00:00:00 2001
From: Amber Brown
Date: Tue, 3 Dec 2019 20:21:25 +1100
Subject: [PATCH 4/4] Add benchmarks for structured logging performance (#6266)
---
changelog.d/6266.misc | 1 +
synapse/logging/_terse_json.py | 1 +
synmark/__init__.py | 72 ++++++++++++++++++++
synmark/__main__.py | 90 +++++++++++++++++++++++++
synmark/suites/__init__.py | 3 +
synmark/suites/logging.py | 118 +++++++++++++++++++++++++++++++++
tox.ini | 9 +++
7 files changed, 294 insertions(+)
create mode 100644 changelog.d/6266.misc
create mode 100644 synmark/__init__.py
create mode 100644 synmark/__main__.py
create mode 100644 synmark/suites/__init__.py
create mode 100644 synmark/suites/logging.py
diff --git a/changelog.d/6266.misc b/changelog.d/6266.misc
new file mode 100644
index 0000000000..634e421a79
--- /dev/null
+++ b/changelog.d/6266.misc
@@ -0,0 +1 @@
+Add benchmarks for structured logging and improve output performance.
diff --git a/synapse/logging/_terse_json.py b/synapse/logging/_terse_json.py
index 05fc64f409..03934956f4 100644
--- a/synapse/logging/_terse_json.py
+++ b/synapse/logging/_terse_json.py
@@ -256,6 +256,7 @@ class TerseJSONToTCPLogObserver(object):
# transport is the same, just trigger a resumeProducing.
if self._producer and r.transport is self._producer.transport:
self._producer.resumeProducing()
+ self._connection_waiter = None
return
# If the producer is still producing, stop it.
diff --git a/synmark/__init__.py b/synmark/__init__.py
new file mode 100644
index 0000000000..570eb818d9
--- /dev/null
+++ b/synmark/__init__.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+
+from twisted.internet import epollreactor
+from twisted.internet.main import installReactor
+
+from synapse.config.homeserver import HomeServerConfig
+from synapse.util import Clock
+
+from tests.utils import default_config, setup_test_homeserver
+
+
+async def make_homeserver(reactor, config=None):
+ """
+ Make a Homeserver suitable for running benchmarks against.
+
+ Args:
+ reactor: A Twisted reactor to run under.
+ config: A HomeServerConfig to use, or None.
+ """
+ cleanup_tasks = []
+ clock = Clock(reactor)
+
+ if not config:
+ config = default_config("test")
+
+ config_obj = HomeServerConfig()
+ config_obj.parse_config_dict(config, "", "")
+
+ hs = await setup_test_homeserver(
+ cleanup_tasks.append, config=config_obj, reactor=reactor, clock=clock
+ )
+ stor = hs.get_datastore()
+
+ # Run the database background updates.
+ if hasattr(stor, "do_next_background_update"):
+ while not await stor.has_completed_background_updates():
+ await stor.do_next_background_update(1)
+
+ def cleanup():
+ for i in cleanup_tasks:
+ i()
+
+ return hs, clock.sleep, cleanup
+
+
+def make_reactor():
+ """
+ Instantiate and install a Twisted reactor suitable for testing (i.e. not the
+ default global one).
+ """
+ reactor = epollreactor.EPollReactor()
+
+ if "twisted.internet.reactor" in sys.modules:
+ del sys.modules["twisted.internet.reactor"]
+ installReactor(reactor)
+
+ return reactor
diff --git a/synmark/__main__.py b/synmark/__main__.py
new file mode 100644
index 0000000000..ac59befbd4
--- /dev/null
+++ b/synmark/__main__.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+from contextlib import redirect_stderr
+from io import StringIO
+
+import pyperf
+from synmark import make_reactor
+from synmark.suites import SUITES
+
+from twisted.internet.defer import ensureDeferred
+from twisted.logger import globalLogBeginner, textFileLogObserver
+from twisted.python.failure import Failure
+
+from tests.utils import setupdb
+
+
+def make_test(main):
+ """
+ Take a benchmark function and wrap it in a reactor start and stop.
+ """
+
+ def _main(loops):
+
+ reactor = make_reactor()
+
+ file_out = StringIO()
+ with redirect_stderr(file_out):
+
+ d = ensureDeferred(main(reactor, loops))
+
+ def on_done(_):
+ if isinstance(_, Failure):
+ _.printTraceback()
+ print(file_out.getvalue())
+ reactor.stop()
+ return _
+
+ d.addBoth(on_done)
+ reactor.run()
+
+ return d.result
+
+ return _main
+
+
+if __name__ == "__main__":
+
+ def add_cmdline_args(cmd, args):
+ if args.log:
+ cmd.extend(["--log"])
+
+ runner = pyperf.Runner(
+ processes=3, min_time=2, show_name=True, add_cmdline_args=add_cmdline_args
+ )
+ runner.argparser.add_argument("--log", action="store_true")
+ runner.parse_args()
+
+ orig_loops = runner.args.loops
+ runner.args.inherit_environ = ["SYNAPSE_POSTGRES"]
+
+ if runner.args.worker:
+ if runner.args.log:
+ globalLogBeginner.beginLoggingTo(
+ [textFileLogObserver(sys.__stdout__)], redirectStandardIO=False
+ )
+ setupdb()
+
+ for suite, loops in SUITES:
+ if loops:
+ runner.args.loops = loops
+ else:
+ runner.args.loops = orig_loops
+ loops = "auto"
+ runner.bench_time_func(
+ suite.__name__ + "_" + str(loops), make_test(suite.main),
+ )
diff --git a/synmark/suites/__init__.py b/synmark/suites/__init__.py
new file mode 100644
index 0000000000..cfa3b0ba38
--- /dev/null
+++ b/synmark/suites/__init__.py
@@ -0,0 +1,3 @@
+from . import logging
+
+SUITES = [(logging, 1000), (logging, 10000), (logging, None)]
diff --git a/synmark/suites/logging.py b/synmark/suites/logging.py
new file mode 100644
index 0000000000..d8e4c7d58f
--- /dev/null
+++ b/synmark/suites/logging.py
@@ -0,0 +1,118 @@
+# -*- coding: utf-8 -*-
+# Copyright 2019 The Matrix.org Foundation C.I.C.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import warnings
+from io import StringIO
+
+from mock import Mock
+
+from pyperf import perf_counter
+from synmark import make_homeserver
+
+from twisted.internet.defer import Deferred
+from twisted.internet.protocol import ServerFactory
+from twisted.logger import LogBeginner, Logger, LogPublisher
+from twisted.protocols.basic import LineOnlyReceiver
+
+from synapse.logging._structured import setup_structured_logging
+
+
+class LineCounter(LineOnlyReceiver):
+
+ delimiter = b"\n"
+
+ def __init__(self, *args, **kwargs):
+ self.count = 0
+ super().__init__(*args, **kwargs)
+
+ def lineReceived(self, line):
+ self.count += 1
+
+ if self.count >= self.factory.wait_for and self.factory.on_done:
+ on_done = self.factory.on_done
+ self.factory.on_done = None
+ on_done.callback(True)
+
+
+async def main(reactor, loops):
+ """
+ Benchmark how long it takes to send `loops` messages.
+ """
+ servers = []
+
+ def protocol():
+ p = LineCounter()
+ servers.append(p)
+ return p
+
+ logger_factory = ServerFactory.forProtocol(protocol)
+ logger_factory.wait_for = loops
+ logger_factory.on_done = Deferred()
+ port = reactor.listenTCP(0, logger_factory, interface="127.0.0.1")
+
+ hs, wait, cleanup = await make_homeserver(reactor)
+
+ errors = StringIO()
+ publisher = LogPublisher()
+ mock_sys = Mock()
+ beginner = LogBeginner(
+ publisher, errors, mock_sys, warnings, initialBufferSize=loops
+ )
+
+ log_config = {
+ "loggers": {"synapse": {"level": "DEBUG"}},
+ "drains": {
+ "tersejson": {
+ "type": "network_json_terse",
+ "host": "127.0.0.1",
+ "port": port.getHost().port,
+ "maximum_buffer": 100,
+ }
+ },
+ }
+
+ logger = Logger(namespace="synapse.logging.test_terse_json", observer=publisher)
+ logging_system = setup_structured_logging(
+ hs, hs.config, log_config, logBeginner=beginner, redirect_stdlib_logging=False
+ )
+
+ # Wait for it to connect...
+ await logging_system._observers[0]._service.whenConnected()
+
+ start = perf_counter()
+
+ # Send a bunch of useful messages
+ for i in range(0, loops):
+ logger.info("test message %s" % (i,))
+
+ if (
+ len(logging_system._observers[0]._buffer)
+ == logging_system._observers[0].maximum_buffer
+ ):
+ while (
+ len(logging_system._observers[0]._buffer)
+ > logging_system._observers[0].maximum_buffer / 2
+ ):
+ await wait(0.01)
+
+ await logger_factory.on_done
+
+ end = perf_counter() - start
+
+ logging_system.stop()
+ port.stopListening()
+ cleanup()
+
+ return end
diff --git a/tox.ini b/tox.ini
index 62b350ea6a..903a245fb0 100644
--- a/tox.ini
+++ b/tox.ini
@@ -102,6 +102,15 @@ commands =
{envbindir}/coverage run "{envbindir}/trial" {env:TRIAL_FLAGS:} {posargs:tests} {env:TOXSUFFIX:}
+[testenv:benchmark]
+deps =
+ {[base]deps}
+ pyperf
+setenv =
+ SYNAPSE_POSTGRES = 1
+commands =
+ python -m synmark {posargs:}
+
[testenv:packaging]
skip_install=True
deps =