1
0

Compare commits

..

80 Commits

Author SHA1 Message Date
Erik Johnston
89c0cd4acc Merge branch 'release-v0.9.0' of github.com:matrix-org/synapse 2015-05-07 19:07:00 +01:00
Erik Johnston
6101ce427a Slight rewording 2015-05-07 18:58:28 +01:00
Erik Johnston
5fe26a9b5c Reword docs/application_services.rst 2015-05-07 18:54:53 +01:00
Erik Johnston
35698484a5 Add some information on registering AS's 2015-05-07 18:51:09 +01:00
Erik Johnston
63562f6d5a Bump date 2015-05-07 18:20:13 +01:00
Erik Johnston
a151693a3b Bump syweb version 2015-05-07 18:01:46 +01:00
Erik Johnston
ac29318b84 Add link to registration spec 2015-05-07 17:58:50 +01:00
Erik Johnston
4605953b0f Add JIRA issue id 2015-05-07 16:53:18 +01:00
Erik Johnston
3188e94ac4 Explain the change in AS /register api 2015-05-07 16:12:02 +01:00
Erik Johnston
771fc05d30 Change log: Link to application services spec. 2015-05-06 13:59:32 +01:00
Erik Johnston
938939fd89 Move CAPTCHA_SETUP to docs/ 2015-05-06 13:48:06 +01:00
Erik Johnston
028a570e17 Linkify docs/postgres.sql 2015-05-06 13:42:40 +01:00
Erik Johnston
0e4393652f Update change log to be more detailed 2015-05-06 13:31:59 +01:00
Erik Johnston
f10fd8a470 Merge branch 'develop' of github.com:matrix-org/synapse into release-v0.9.0 2015-05-06 12:54:36 +01:00
Mark Haines
3c11c9c122 Merge pull request #140 from matrix-org/erikj/scripts_refactor
Seperate scripts/ into scripts/ and scripts-dev/
2015-05-06 12:54:07 +01:00
Erik Johnston
673375fe2d Acutally add scripts-dev/ 2015-05-06 11:46:02 +01:00
Erik Johnston
3c92231094 Re-add scripts/register_new_matrix_user 2015-05-06 11:45:18 +01:00
Erik Johnston
119e5d7702 Seperate scripts/ into scripts/ and scripts-dev/, where scripts/* are automatically added to the package 2015-05-06 11:41:19 +01:00
Erik Johnston
271ee604f8 Update change log 2015-05-06 11:29:54 +01:00
Erik Johnston
04c01882fc Bump version 2015-05-06 09:59:13 +01:00
Mark Haines
f4664a6cbd Merge pull request #138 from matrix-org/erikj/SYN-371
SYN-371 - Failed to persist event
2015-05-05 18:53:15 +01:00
Mark Haines
ecb26beda5 Merge pull request #137 from matrix-org/erikj/executemany
executemany support
2015-05-05 18:30:35 +01:00
Erik Johnston
0c4ac271ca Merge branch 'erikj/executemany' of github.com:matrix-org/synapse into erikj/SYN-371 2015-05-05 18:21:19 +01:00
Erik Johnston
0cf7e480b4 And use buffer(...) there as well 2015-05-05 18:20:01 +01:00
Erik Johnston
ed2584050f Merge branch 'develop' of github.com:matrix-org/synapse into erikj/executemany 2015-05-05 18:15:20 +01:00
Erik Johnston
977338a7af Use buffer(...) when inserting into bytea column 2015-05-05 18:12:53 +01:00
Mark Haines
31049c4d72 Merge pull request #139 from matrix-org/bugs/SYN-369
Fix race with cache invalidation. SYN-369
2015-05-05 17:46:13 +01:00
Mark Haines
deb0237166 Add some doc-string 2015-05-05 17:45:11 +01:00
Mark Haines
e45b05647e Fix the --help option for synapse 2015-05-05 17:39:59 +01:00
Erik Johnston
3d5a955e08 Missed events are not outliers 2015-05-05 17:36:57 +01:00
Mark Haines
d18f37e026 Collect the invalidate callbacks on the transaction object rather than passing around a separate list 2015-05-05 17:32:21 +01:00
Erik Johnston
9951542393 Add a comment about the zip(*[zip(sorted(...),...)]) 2015-05-05 17:06:55 +01:00
Mark Haines
041b6cba61 SYN-369: Add comments to the sequence number logic in the cache 2015-05-05 16:32:44 +01:00
Mark Haines
63075118a5 Add debug flag in synapse/storage/_base.py for debugging the cache logic by comparing what is in the cache with what was in the database on every access 2015-05-05 16:24:04 +01:00
Erik Johnston
531d7955fd Don't insert without deduplication. In this case we never actually use this table, so simply remove the insert entirely 2015-05-05 16:12:28 +01:00
Mark Haines
bfa4a7f8b0 Invalidate the room_member cache if the current state events updates 2015-05-05 15:43:49 +01:00
Mark Haines
d0fece8d3c Missing return for when the event was already persisted 2015-05-05 15:39:09 +01:00
Erik Johnston
bdcd7693c8 Fix indentation 2015-05-05 15:14:48 +01:00
Erik Johnston
43c2e8deae Add support for using executemany 2015-05-05 15:13:25 +01:00
Erik Johnston
1692dc019d Don't call 'encode_parameter' no-op 2015-05-05 15:00:30 +01:00
Mark Haines
a9aea68fd5 Invalidate the caches from the correct thread 2015-05-05 14:57:08 +01:00
Mark Haines
261d809a47 Sequence the modifications to the cache so that selects don't race with inserts 2015-05-05 14:13:50 +01:00
Erik Johnston
d9cc5de9e5 Correctly name transaction 2015-05-05 10:24:10 +01:00
Erik Johnston
b8940cd902 Remove some unused indexes 2015-05-01 16:14:25 +01:00
Erik Johnston
1942382246 Don't log enqueue_ 2015-05-01 16:14:25 +01:00
David Baker
eb9bd2d949 user_id now in user_threepids 2015-05-01 15:04:37 +01:00
Erik Johnston
2d386d7038 That wasn't a deferred 2015-05-01 14:41:25 +01:00
Erik Johnston
4ac2823b3c Remove inlineCallbacks from non-generator 2015-05-01 14:41:25 +01:00
Erik Johnston
22c7c5eb8f Typo 2015-05-01 14:41:25 +01:00
Erik Johnston
42c12c04f6 Remove some run_on_reactors 2015-05-01 14:41:25 +01:00
Erik Johnston
adb5b76ff5 Don't log all auth events every time we call auth.check 2015-05-01 14:41:25 +01:00
Mark Haines
3bcdf3664c Use the daemonize key from the config if it exists 2015-05-01 14:34:55 +01:00
David Baker
9eeb03c0dd Don't use self.execute: it's designed for fetching stuff 2015-05-01 14:21:25 +01:00
Mark Haines
32937f3ea0 database config is not kept in separate config file anymore 2015-05-01 14:06:54 +01:00
Mark Haines
7b50769eb9 Merge pull request #136 from matrix-org/markjh/config_cleanup
Config restructuring.
2015-05-01 14:04:39 +01:00
David Baker
7693f24792 No id field on user 2015-05-01 13:55:42 +01:00
Mark Haines
46a65c282f Allow generate-config to run against an existing config file to generate default keys 2015-05-01 13:54:38 +01:00
David Baker
92b20713d7 More missed get_user_by_id API changes 2015-05-01 13:45:54 +01:00
Erik Johnston
da4ed08739 One too many lens 2015-05-01 13:29:38 +01:00
Erik Johnston
9060dc6b59 Change public room list to use defer.gatherResults 2015-05-01 13:28:36 +01:00
David Baker
1fae1b3166 This api now no longer returns an array 2015-05-01 13:26:41 +01:00
Erik Johnston
80b4119279 Don't wait for storage of access_token 2015-05-01 13:14:05 +01:00
Erik Johnston
4011cf1c42 Cache latest_event_ids_in_room 2015-05-01 13:06:26 +01:00
Mark Haines
50c87b8eed Allow "manhole" to be ommited from the config 2015-04-30 18:11:47 +01:00
Mark Haines
345995fcde Remove the ~, comment the lines instead 2015-04-30 18:10:19 +01:00
Mark Haines
62cebee8ee Update key.py 2015-04-30 17:54:01 +01:00
Mark Haines
95cbfee8ae Update metrics.py 2015-04-30 17:52:20 +01:00
Mark Haines
4ad8350607 Update README.rst 2015-04-30 17:48:29 +01:00
Mark Haines
6ea9cf58be missing import 2015-04-30 17:21:21 +01:00
Mark Haines
c95480963e read the pid_file from the config file in synctl 2015-04-30 17:12:15 +01:00
Mark Haines
069296dbb0 Can't specify bind-port on the cmdline anymore 2015-04-30 17:08:07 +01:00
Mark Haines
2d4d2bbae4 Merge branch 'develop' into markjh/config_cleanup
Conflicts:
	synapse/config/captcha.py
2015-04-30 16:54:55 +01:00
Mark Haines
2f1348f339 Write a default log_config when generating config 2015-04-30 16:52:57 +01:00
Mark Haines
74aaacf82a Don't break when sizes or durations are given as integers 2015-04-30 16:04:02 +01:00
Mark Haines
c28f1d16f0 Add a random string to the auto generated key id 2015-04-30 15:13:14 +01:00
Mark Haines
265f30bd3f Allow --enable-registration to be passed on the commandline 2015-04-30 15:04:06 +01:00
Mark Haines
c9e62927f2 Use disable_registration keys if they are present 2015-04-30 14:34:09 +01:00
Mark Haines
1aa11cf7ce Allow multiple config files, set up a default config before applying the config files 2015-04-30 13:48:15 +01:00
Mark Haines
6b69ddd17a remove duplicate parse_size method 2015-04-30 04:26:29 +01:00
Mark Haines
d624e2a638 Manually generate the default config yaml, remove most of the commandline arguments for synapse anticipating that people will use the yaml instead. Simpify implementing config options by not requiring the classes to hit the super class 2015-04-30 04:24:44 +01:00
63 changed files with 922 additions and 681 deletions

View File

@@ -1,9 +1,52 @@
Changes in synapse vX
=====================
Changes in synapse v0.9.0 (2015-05-07)
======================================
* Changed config option from ``disable_registration`` to
``enable_registration``. Old option will be ignored.
General:
* Add support for using a PostgreSQL database instead of SQLite. See
`docs/postgres.rst`_ for details.
* Add password change and reset APIs. See `Registration`_ in the spec.
* Fix memory leak due to not releasing stale notifiers - SYN-339.
* Fix race in caches that occasionally caused some presence updates to be
dropped - SYN-369.
* Check server name has not changed on restart.
Federation:
* Add key distribution mechanisms for fetching public keys of unavailable
remote home servers. See `Retrieving Server Keys`_ in the spec.
Configuration:
* Add support for multiple config files.
* Add support for dictionaries in config files.
* Remove support for specifying config options on the command line, except
for:
* ``--daemonize`` - Daemonize the home server.
* ``--manhole`` - Turn on the twisted telnet manhole service on the given
port.
* ``--database-path`` - The path to a sqlite database to use.
* ``--verbose`` - The verbosity level.
* ``--log-file`` - File to log to.
* ``--log-config`` - Python logging config file.
* ``--enable-registration`` - Enable registration for new users.
Application services:
* Reliably retry sending of events from Synapse to application services, as per
`Application Services`_ spec.
* Application services can no longer register via the ``/register`` API,
instead their configuration should be saved to a file and listed in the
synapse ``app_service_config_files`` config option. The AS configuration file
has the same format as the old ``/register`` request.
See `docs/application_services.rst`_ for more information.
.. _`docs/postgres.rst`: docs/postgres.rst
.. _`docs/application_services.rst`: docs/application_services.rst
.. _`Registration`: https://github.com/matrix-org/matrix-doc/blob/master/specification/10_client_server_api.rst#registration
.. _`Retrieving Server Keys`: https://github.com/matrix-org/matrix-doc/blob/6f2698/specification/30_server_server_api.rst#retrieving-server-keys
.. _`Application Services`: https://github.com/matrix-org/matrix-doc/blob/0c6bd9/specification/25_application_service_api.rst#home-server---application-service-api
Changes in synapse v0.8.1 (2015-03-18)
======================================

View File

@@ -318,7 +318,7 @@ ArchLinux
If running `$ synctl start` fails with 'returned non-zero exit status 1',
you will need to explicitly call Python2.7 - either running as::
$ python2.7 -m synapse.app.homeserver --daemonize -c homeserver.yaml --pid-file homeserver.pid
$ python2.7 -m synapse.app.homeserver --daemonize -c homeserver.yaml
...or by editing synctl with the correct python executable.
@@ -409,7 +409,6 @@ SRV record, as that is the name other machines will expect it to have::
$ python -m synapse.app.homeserver \
--server-name YOURDOMAIN \
--bind-port 8448 \
--config-path homeserver.yaml \
--generate-config
$ python -m synapse.app.homeserver --config-path homeserver.yaml

View File

@@ -16,30 +16,30 @@ if [ $# -eq 1 ]; then
fi
fi
export PYTHONPATH=$(readlink -f $(pwd))
echo $PYTHONPATH
for port in 8080 8081 8082; do
echo "Starting server on port $port... "
https_port=$((port + 400))
mkdir -p demo/$port
pushd demo/$port
#rm $DIR/etc/$port.config
python -m synapse.app.homeserver \
--generate-config \
--config-path "demo/etc/$port.config" \
-p "$https_port" \
--unsecure-port "$port" \
-H "localhost:$https_port" \
-f "$DIR/$port.log" \
-d "$DIR/$port.db" \
-D --pid-file "$DIR/$port.pid" \
--manhole $((port + 1000)) \
--tls-dh-params-path "demo/demo.tls.dh" \
--media-store-path "demo/media_store.$port" \
$PARAMS $SYNAPSE_PARAMS \
--enable-registration
--config-path "$DIR/etc/$port.config" \
python -m synapse.app.homeserver \
--config-path "demo/etc/$port.config" \
--config-path "$DIR/etc/$port.config" \
-D \
-vv \
popd
done
cd "$CWD"

View File

@@ -0,0 +1,36 @@
Registering an Application Service
==================================
The registration of new application services depends on the homeserver used.
In synapse, you need to create a new configuration file for your AS and add it
to the list specified under the ``app_service_config_files`` config
option in your synapse config.
For example:
.. code-block:: yaml
app_service_config_files:
- /home/matrix/.synapse/<your-AS>.yaml
The format of the AS configuration file is as follows:
.. code-block:: yaml
url: <base url of AS>
as_token: <token AS will add to requests to HS>
hs_token: <token HS will ad to requests to AS>
sender_localpart: <localpart of AS user>
namespaces:
users: # List of users we're interested in
- exclusive: <bool>
regex: <regex>
- ...
aliases: [] # List of aliases we're interested in
rooms: [] # List of room ids we're interested in
See the spec_ for further details on how application services work.
.. _spec: https://github.com/matrix-org/matrix-doc/blob/master/specification/25_application_service_api.rst#application-service-api

View File

@@ -34,19 +34,15 @@ Synapse config
When you are ready to start using PostgreSQL, add the following line to your
config file::
database_config: <db_config_file>
Where ``<db_config_file>`` is the file name that points to a yaml file of the
following form::
name: psycopg2
args:
user: <user>
password: <pass>
database: <db>
host: <host>
cp_min: 5
cp_max: 10
database:
name: psycopg2
args:
user: <user>
password: <pass>
database: <db>
host: <host>
cp_min: 5
cp_max: 10
All key, values in ``args`` are passed to the ``psycopg2.connect(..)``
function, except keys beginning with ``cp_``, which are consumed by the twisted

1
scripts/port_from_sqlite_to_postgres.py Normal file → Executable file
View File

@@ -1,3 +1,4 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2015 OpenMarket Ltd
#

2
scripts/upgrade_db_to_v0.6.0.py Normal file → Executable file
View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
from synapse.storage import SCHEMA_VERSION, read_schema
from synapse.storage._base import SQLBaseStore
from synapse.storage.signatures import SignatureStore

View File

@@ -14,6 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import glob
import os
from setuptools import setup, find_packages
@@ -55,5 +56,5 @@ setup(
include_package_data=True,
zip_safe=False,
long_description=long_description,
scripts=["synctl", "register_new_matrix_user"],
scripts=["synctl"] + glob.glob("scripts/*"),
)

View File

@@ -16,4 +16,4 @@
""" This is a reference implementation of a Matrix home server.
"""
__version__ = "0.8.1-r4"
__version__ = "0.9.0"

View File

@@ -20,7 +20,6 @@ from twisted.internet import defer
from synapse.api.constants import EventTypes, Membership, JoinRules
from synapse.api.errors import AuthError, Codes, SynapseError
from synapse.util.logutils import log_function
from synapse.util.async import run_on_reactor
from synapse.types import UserID, ClientInfo
import logging
@@ -65,7 +64,10 @@ class Auth(object):
if event.type == EventTypes.Aliases:
return True
logger.debug("Auth events: %s", auth_events)
logger.debug(
"Auth events: %s",
[a.event_id for a in auth_events.values()]
)
if event.type == EventTypes.Member:
allowed = self.is_membership_change_allowed(
@@ -360,7 +362,7 @@ class Auth(object):
default=[""]
)[0]
if user and access_token and ip_addr:
yield self.store.insert_client_ip(
self.store.insert_client_ip(
user=user,
access_token=access_token,
device_id=user_info["device_id"],
@@ -424,8 +426,6 @@ class Auth(object):
@defer.inlineCallbacks
def add_auth_events(self, builder, context):
yield run_on_reactor()
auth_ids = self.compute_auth_events(builder, context.current_state)
auth_events_entries = yield self.store.add_event_hashes(

View File

@@ -409,7 +409,6 @@ def setup(config_options):
config.server_name,
domain_with_port=domain_with_port,
upload_dir=os.path.abspath("uploads"),
db_name=config.database_path,
db_config=config.database_config,
tls_context_factory=tls_context_factory,
config=config,
@@ -422,9 +421,7 @@ def setup(config_options):
redirect_root_to_web_client=True,
)
db_name = hs.get_db_name()
logger.info("Preparing database: %s...", db_name)
logger.info("Preparing database: %r...", config.database_config)
try:
db_conn = database_engine.module.connect(
@@ -446,7 +443,7 @@ def setup(config_options):
)
sys.exit(1)
logger.info("Database prepared in %s.", db_name)
logger.info("Database prepared in %r.", config.database_config)
if config.manhole:
f = twisted.manhole.telnet.ShellFactory()

View File

@@ -18,15 +18,18 @@ import sys
import os
import subprocess
import signal
import yaml
SYNAPSE = ["python", "-B", "-m", "synapse.app.homeserver"]
CONFIGFILE = "homeserver.yaml"
PIDFILE = "homeserver.pid"
GREEN = "\x1b[1;32m"
NORMAL = "\x1b[m"
CONFIG = yaml.load(open(CONFIGFILE))
PIDFILE = CONFIG["pid_file"]
def start():
if not os.path.exists(CONFIGFILE):
@@ -40,7 +43,7 @@ def start():
sys.exit(1)
print "Starting ...",
args = SYNAPSE
args.extend(["--daemonize", "-c", CONFIGFILE, "--pid-file", PIDFILE])
args.extend(["--daemonize", "-c", CONFIGFILE])
subprocess.check_call(args)
print GREEN + "started" + NORMAL

View File

@@ -14,9 +14,10 @@
# limitations under the License.
import argparse
import sys
import os
import yaml
import sys
from textwrap import dedent
class ConfigError(Exception):
@@ -24,18 +25,35 @@ class ConfigError(Exception):
class Config(object):
def __init__(self, args):
pass
@staticmethod
def parse_size(string):
def parse_size(value):
if isinstance(value, int) or isinstance(value, long):
return value
sizes = {"K": 1024, "M": 1024 * 1024}
size = 1
suffix = string[-1]
suffix = value[-1]
if suffix in sizes:
string = string[:-1]
value = value[:-1]
size = sizes[suffix]
return int(string) * size
return int(value) * size
@staticmethod
def parse_duration(value):
if isinstance(value, int) or isinstance(value, long):
return value
second = 1000
hour = 60 * 60 * second
day = 24 * hour
week = 7 * day
year = 365 * day
sizes = {"s": second, "h": hour, "d": day, "w": week, "y": year}
size = 1
suffix = value[-1]
if suffix in sizes:
value = value[:-1]
size = sizes[suffix]
return int(value) * size
@staticmethod
def abspath(file_path):
@@ -77,17 +95,6 @@ class Config(object):
with open(file_path) as file_stream:
return file_stream.read()
@classmethod
def read_yaml_file(cls, file_path, config_name):
cls.check_file(file_path, config_name)
with open(file_path) as file_stream:
try:
return yaml.load(file_stream)
except:
raise ConfigError(
"Error parsing yaml in file %r" % (file_path,)
)
@staticmethod
def default_path(name):
return os.path.abspath(os.path.join(os.path.curdir, name))
@@ -97,84 +104,130 @@ class Config(object):
with open(file_path) as file_stream:
return yaml.load(file_stream)
@classmethod
def add_arguments(cls, parser):
pass
def invoke_all(self, name, *args, **kargs):
results = []
for cls in type(self).mro():
if name in cls.__dict__:
results.append(getattr(cls, name)(self, *args, **kargs))
return results
@classmethod
def generate_config(cls, args, config_dir_path):
pass
def generate_config(self, config_dir_path, server_name):
default_config = "# vim:ft=yaml\n"
default_config += "\n\n".join(dedent(conf) for conf in self.invoke_all(
"default_config", config_dir_path, server_name
))
config = yaml.load(default_config)
return default_config, config
@classmethod
def load_config(cls, description, argv, generate_section=None):
obj = cls()
config_parser = argparse.ArgumentParser(add_help=False)
config_parser.add_argument(
"-c", "--config-path",
action="append",
metavar="CONFIG_FILE",
help="Specify config file"
)
config_parser.add_argument(
"--generate-config",
action="store_true",
help="Generate config file"
help="Generate a config file for the server name"
)
config_parser.add_argument(
"-H", "--server-name",
help="The server name to generate a config file for"
)
config_args, remaining_args = config_parser.parse_known_args(argv)
if config_args.generate_config:
if not config_args.config_path:
config_parser.error(
"Must specify where to generate the config file"
"Must supply a config file.\nA config file can be automatically"
" generated using \"--generate-config -h SERVER_NAME"
" -c CONFIG-FILE\""
)
config_dir_path = os.path.dirname(config_args.config_path)
if os.path.exists(config_args.config_path):
defaults = cls.read_config_file(config_args.config_path)
else:
defaults = {}
else:
if config_args.config_path:
defaults = cls.read_config_file(config_args.config_path)
else:
defaults = {}
parser = argparse.ArgumentParser(
parents=[config_parser],
description=description,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
cls.add_arguments(parser)
parser.set_defaults(**defaults)
args = parser.parse_args(remaining_args)
if config_args.generate_config:
config_dir_path = os.path.dirname(config_args.config_path)
config_dir_path = os.path.dirname(config_args.config_path[0])
config_dir_path = os.path.abspath(config_dir_path)
server_name = config_args.server_name
if not server_name:
print "Most specify a server_name to a generate config for."
sys.exit(1)
(config_path,) = config_args.config_path
if not os.path.exists(config_dir_path):
os.makedirs(config_dir_path)
cls.generate_config(args, config_dir_path)
config = {}
for key, value in vars(args).items():
if (key not in set(["config_path", "generate_config"])
and value is not None):
config[key] = value
with open(config_args.config_path, "w") as config_file:
# TODO(mark/paul) We might want to output emacs-style mode
# markers as well as vim-style mode markers into the file,
# to further hint to people this is a YAML file.
config_file.write("# vim:ft=yaml\n")
yaml.dump(config, config_file, default_flow_style=False)
print (
"A config file has been generated in %s for server name"
" '%s' with corresponding SSL keys and self-signed"
" certificates. Please review this file and customise it to"
" your needs."
) % (
config_args.config_path, config['server_name']
)
if os.path.exists(config_path):
print "Config file %r already exists" % (config_path,)
yaml_config = cls.read_config_file(config_path)
yaml_name = yaml_config["server_name"]
if server_name != yaml_name:
print (
"Config file %r has a different server_name: "
" %r != %r" % (config_path, server_name, yaml_name)
)
sys.exit(1)
config_bytes, config = obj.generate_config(
config_dir_path, server_name
)
config.update(yaml_config)
print "Generating any missing keys for %r" % (server_name,)
obj.invoke_all("generate_files", config)
sys.exit(0)
with open(config_path, "wb") as config_file:
config_bytes, config = obj.generate_config(
config_dir_path, server_name
)
obj.invoke_all("generate_files", config)
config_file.write(config_bytes)
print (
"A config file has been generated in %s for server name"
" '%s' with corresponding SSL keys and self-signed"
" certificates. Please review this file and customise it to"
" your needs."
) % (config_path, server_name)
print (
"If this server name is incorrect, you will need to regenerate"
" the SSL certificates"
)
sys.exit(0)
return cls(args)
parser = argparse.ArgumentParser(
parents=[config_parser],
description=description,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
obj.invoke_all("add_arguments", parser)
args = parser.parse_args(remaining_args)
if not config_args.config_path:
config_parser.error(
"Must supply a config file.\nA config file can be automatically"
" generated using \"--generate-config -h SERVER_NAME"
" -c CONFIG-FILE\""
)
config_dir_path = os.path.dirname(config_args.config_path[0])
config_dir_path = os.path.abspath(config_dir_path)
specified_config = {}
for config_path in config_args.config_path:
yaml_config = cls.read_config_file(config_path)
specified_config.update(yaml_config)
server_name = specified_config["server_name"]
_, config = obj.generate_config(config_dir_path, server_name)
config.pop("log_config")
config.update(specified_config)
obj.invoke_all("read_config", config)
obj.invoke_all("read_arguments", args)
return obj

View File

@@ -17,15 +17,11 @@ from ._base import Config
class AppServiceConfig(Config):
def __init__(self, args):
super(AppServiceConfig, self).__init__(args)
self.app_service_config_files = args.app_service_config_files
def read_config(self, config):
self.app_service_config_files = config.get("app_service_config_files", [])
@classmethod
def add_arguments(cls, parser):
super(AppServiceConfig, cls).add_arguments(parser)
group = parser.add_argument_group("appservice")
group.add_argument(
"--app-service-config-files", type=str, nargs='+',
help="A list of application service config files to use."
)
def default_config(cls, config_dir_path, server_name):
return """\
# A list of application service config file to use
app_service_config_files: []
"""

View File

@@ -17,42 +17,35 @@ from ._base import Config
class CaptchaConfig(Config):
def __init__(self, args):
super(CaptchaConfig, self).__init__(args)
self.recaptcha_private_key = args.recaptcha_private_key
self.recaptcha_public_key = args.recaptcha_public_key
self.enable_registration_captcha = args.enable_registration_captcha
def read_config(self, config):
self.recaptcha_private_key = config["recaptcha_private_key"]
self.recaptcha_public_key = config["recaptcha_public_key"]
self.enable_registration_captcha = config["enable_registration_captcha"]
# XXX: This is used for more than just captcha
self.captcha_ip_origin_is_x_forwarded = (
args.captcha_ip_origin_is_x_forwarded
config["captcha_ip_origin_is_x_forwarded"]
)
self.captcha_bypass_secret = args.captcha_bypass_secret
self.captcha_bypass_secret = config.get("captcha_bypass_secret")
@classmethod
def add_arguments(cls, parser):
super(CaptchaConfig, cls).add_arguments(parser)
group = parser.add_argument_group("recaptcha")
group.add_argument(
"--recaptcha-public-key", type=str, default="YOUR_PUBLIC_KEY",
help="This Home Server's ReCAPTCHA public key."
)
group.add_argument(
"--recaptcha-private-key", type=str, default="YOUR_PRIVATE_KEY",
help="This Home Server's ReCAPTCHA private key."
)
group.add_argument(
"--enable-registration-captcha", type=bool, default=False,
help="Enables ReCaptcha checks when registering, preventing signup"
+ " unless a captcha is answered. Requires a valid ReCaptcha "
+ "public/private key."
)
group.add_argument(
"--captcha_ip_origin_is_x_forwarded", type=bool, default=False,
help="When checking captchas, use the X-Forwarded-For (XFF) header"
+ " as the client IP and not the actual client IP."
)
group.add_argument(
"--captcha_bypass_secret", type=str,
help="A secret key used to bypass the captcha test entirely."
)
def default_config(self, config_dir_path, server_name):
return """\
## Captcha ##
# This Home Server's ReCAPTCHA public key.
recaptcha_private_key: "YOUR_PUBLIC_KEY"
# This Home Server's ReCAPTCHA private key.
recaptcha_public_key: "YOUR_PRIVATE_KEY"
# Enables ReCaptcha checks when registering, preventing signup
# unless a captcha is answered. Requires a valid ReCaptcha
# public/private key.
enable_registration_captcha: False
# When checking captchas, use the X-Forwarded-For (XFF) header
# as the client IP and not the actual client IP.
captcha_ip_origin_is_x_forwarded: False
# A secret key used to bypass the captcha test entirely.
#captcha_bypass_secret: "YOUR_SECRET_HERE"
"""

View File

@@ -14,28 +14,21 @@
# limitations under the License.
from ._base import Config
import os
import yaml
class DatabaseConfig(Config):
def __init__(self, args):
super(DatabaseConfig, self).__init__(args)
if args.database_path == ":memory:":
self.database_path = ":memory:"
else:
self.database_path = self.abspath(args.database_path)
self.event_cache_size = self.parse_size(args.event_cache_size)
if args.database_config:
with open(args.database_config) as f:
self.database_config = yaml.safe_load(f)
else:
def read_config(self, config):
self.event_cache_size = self.parse_size(
config.get("event_cache_size", "10K")
)
self.database_config = config.get("database")
if self.database_config is None:
self.database_config = {
"name": "sqlite3",
"args": {
"database": self.database_path,
},
"args": {},
}
name = self.database_config.get("name", None)
@@ -50,24 +43,37 @@ class DatabaseConfig(Config):
else:
raise RuntimeError("Unsupported database type '%s'" % (name,))
@classmethod
def add_arguments(cls, parser):
super(DatabaseConfig, cls).add_arguments(parser)
self.set_databasepath(config.get("database_path"))
def default_config(self, config, config_dir_path):
database_path = self.abspath("homeserver.db")
return """\
# Database configuration
database:
# The database engine name
name: "sqlite3"
# Arguments to pass to the engine
args:
# Path to the database
database: "%(database_path)s"
# Number of events to cache in memory.
event_cache_size: "10K"
""" % locals()
def read_arguments(self, args):
self.set_databasepath(args.database_path)
def set_databasepath(self, database_path):
if database_path != ":memory:":
database_path = self.abspath(database_path)
if self.database_config.get("name", None) == "sqlite3":
if database_path is not None:
self.database_config["args"]["database"] = database_path
def add_arguments(self, parser):
db_group = parser.add_argument_group("database")
db_group.add_argument(
"-d", "--database-path", default="homeserver.db",
metavar="SQLITE_DATABASE_PATH", help="The database name."
"-d", "--database-path", metavar="SQLITE_DATABASE_PATH",
help="The path to a sqlite database to use."
)
db_group.add_argument(
"--event-cache-size", default="100K",
help="Number of events to cache in memory."
)
db_group.add_argument(
"--database-config", default=None,
help="Location of the database configuration file."
)
@classmethod
def generate_config(cls, args, config_dir_path):
super(DatabaseConfig, cls).generate_config(args, config_dir_path)
args.database_path = os.path.abspath(args.database_path)

View File

@@ -36,4 +36,6 @@ class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig,
if __name__ == '__main__':
import sys
HomeServerConfig.load_config("Generate config", sys.argv[1:], "HomeServer")
sys.stdout.write(
HomeServerConfig().generate_config(sys.argv[1], sys.argv[2])[0]
)

View File

@@ -20,48 +20,58 @@ from syutil.crypto.signing_key import (
is_signing_algorithm_supported, decode_verify_key_bytes
)
from syutil.base64util import decode_base64
from synapse.util.stringutils import random_string
class KeyConfig(Config):
def __init__(self, args):
super(KeyConfig, self).__init__(args)
self.signing_key = self.read_signing_key(args.signing_key_path)
def read_config(self, config):
self.signing_key = self.read_signing_key(config["signing_key_path"])
self.old_signing_keys = self.read_old_signing_keys(
args.old_signing_key_path
config["old_signing_keys"]
)
self.key_refresh_interval = self.parse_duration(
config["key_refresh_interval"]
)
self.key_refresh_interval = args.key_refresh_interval
self.perspectives = self.read_perspectives(
args.perspectives_config_path
config["perspectives"]
)
@classmethod
def add_arguments(cls, parser):
super(KeyConfig, cls).add_arguments(parser)
key_group = parser.add_argument_group("keys")
key_group.add_argument("--signing-key-path",
help="The signing key to sign messages with")
key_group.add_argument("--old-signing-key-path",
help="The keys that the server used to sign"
" sign messages with but won't use"
" to sign new messages. E.g. it has"
" lost its private key")
key_group.add_argument("--key-refresh-interval",
default=24 * 60 * 60 * 1000, # 1 Day
help="How long a key response is valid for."
" Used to set the exipiry in /key/v2/."
" Controls how frequently servers will"
" query what keys are still valid")
key_group.add_argument("--perspectives-config-path",
help="The trusted servers to download signing"
" keys from")
def default_config(self, config_dir_path, server_name):
base_key_name = os.path.join(config_dir_path, server_name)
return """\
## Signing Keys ##
def read_perspectives(self, perspectives_config_path):
config = self.read_yaml_file(
perspectives_config_path, "perspectives_config_path"
)
# Path to the signing key to sign messages with
signing_key_path: "%(base_key_name)s.signing.key"
# The keys that the server used to sign messages with but won't use
# to sign new messages. E.g. it has lost its private key
old_signing_keys: {}
# "ed25519:auto":
# # Base64 encoded public key
# key: "The public part of your old signing key."
# # Millisecond POSIX timestamp when the key expired.
# expired_ts: 123456789123
# How long key response published by this server is valid for.
# Used to set the valid_until_ts in /key/v2 APIs.
# Determines how quickly servers will query to check which keys
# are still valid.
key_refresh_interval: "1d" # 1 Day.
# The trusted servers to download signing keys from.
perspectives:
servers:
"matrix.org":
verify_keys:
"ed25519:auto":
key: "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
""" % locals()
def read_perspectives(self, perspectives_config):
servers = {}
for server_name, server_config in config["servers"].items():
for server_name, server_config in perspectives_config["servers"].items():
for key_id, key_data in server_config["verify_keys"].items():
if is_signing_algorithm_supported(key_id):
key_base64 = key_data["key"]
@@ -82,66 +92,42 @@ class KeyConfig(Config):
" Try running again with --generate-config"
)
def read_old_signing_keys(self, old_signing_key_path):
old_signing_keys = self.read_file(
old_signing_key_path, "old_signing_key"
)
try:
return syutil.crypto.signing_key.read_old_signing_keys(
old_signing_keys.splitlines(True)
)
except Exception:
raise ConfigError(
"Error reading old signing keys."
)
def read_old_signing_keys(self, old_signing_keys):
keys = {}
for key_id, key_data in old_signing_keys.items():
if is_signing_algorithm_supported(key_id):
key_base64 = key_data["key"]
key_bytes = decode_base64(key_base64)
verify_key = decode_verify_key_bytes(key_id, key_bytes)
verify_key.expired_ts = key_data["expired_ts"]
keys[key_id] = verify_key
else:
raise ConfigError(
"Unsupported signing algorithm for old key: %r" % (key_id,)
)
return keys
@classmethod
def generate_config(cls, args, config_dir_path):
super(KeyConfig, cls).generate_config(args, config_dir_path)
base_key_name = os.path.join(config_dir_path, args.server_name)
args.pid_file = os.path.abspath(args.pid_file)
if not args.signing_key_path:
args.signing_key_path = base_key_name + ".signing.key"
if not os.path.exists(args.signing_key_path):
with open(args.signing_key_path, "w") as signing_key_file:
def generate_files(self, config):
signing_key_path = config["signing_key_path"]
if not os.path.exists(signing_key_path):
with open(signing_key_path, "w") as signing_key_file:
key_id = "a_" + random_string(4)
syutil.crypto.signing_key.write_signing_keys(
signing_key_file,
(syutil.crypto.signing_key.generate_signing_key("auto"),),
(syutil.crypto.signing_key.generate_signing_key(key_id),),
)
else:
signing_keys = cls.read_file(args.signing_key_path, "signing_key")
signing_keys = self.read_file(signing_key_path, "signing_key")
if len(signing_keys.split("\n")[0].split()) == 1:
# handle keys in the old format.
key_id = "a_" + random_string(4)
key = syutil.crypto.signing_key.decode_signing_key_base64(
syutil.crypto.signing_key.NACL_ED25519,
"auto",
key_id,
signing_keys.split("\n")[0]
)
with open(args.signing_key_path, "w") as signing_key_file:
with open(signing_key_path, "w") as signing_key_file:
syutil.crypto.signing_key.write_signing_keys(
signing_key_file,
(key,),
)
if not args.old_signing_key_path:
args.old_signing_key_path = base_key_name + ".old.signing.keys"
if not os.path.exists(args.old_signing_key_path):
with open(args.old_signing_key_path, "w"):
pass
if not args.perspectives_config_path:
args.perspectives_config_path = base_key_name + ".perspectives"
if not os.path.exists(args.perspectives_config_path):
with open(args.perspectives_config_path, "w") as perspectives_file:
perspectives_file.write(
'servers:\n'
' matrix.org:\n'
' verify_keys:\n'
' "ed25519:auto":\n'
' key: "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"\n'
)

View File

@@ -19,25 +19,88 @@ from twisted.python.log import PythonLoggingObserver
import logging
import logging.config
import yaml
from string import Template
import os
DEFAULT_LOG_CONFIG = Template("""
version: 1
formatters:
precise:
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s\
- %(message)s'
filters:
context:
(): synapse.util.logcontext.LoggingContextFilter
request: ""
handlers:
file:
class: logging.handlers.RotatingFileHandler
formatter: precise
filename: ${log_file}
maxBytes: 104857600
backupCount: 10
filters: [context]
level: INFO
console:
class: logging.StreamHandler
formatter: precise
loggers:
synapse:
level: INFO
synapse.storage.SQL:
level: INFO
root:
level: INFO
handlers: [file, console]
""")
class LoggingConfig(Config):
def __init__(self, args):
super(LoggingConfig, self).__init__(args)
self.verbosity = int(args.verbose) if args.verbose else None
self.log_config = self.abspath(args.log_config)
self.log_file = self.abspath(args.log_file)
@classmethod
def read_config(self, config):
self.verbosity = config.get("verbose", 0)
self.log_config = self.abspath(config.get("log_config"))
self.log_file = self.abspath(config.get("log_file"))
def default_config(self, config_dir_path, server_name):
log_file = self.abspath("homeserver.log")
log_config = self.abspath(
os.path.join(config_dir_path, server_name + ".log.config")
)
return """
# Logging verbosity level.
verbose: 0
# File to write logging to
log_file: "%(log_file)s"
# A yaml python logging config file
log_config: "%(log_config)s"
""" % locals()
def read_arguments(self, args):
if args.verbose is not None:
self.verbosity = args.verbose
if args.log_config is not None:
self.log_config = args.log_config
if args.log_file is not None:
self.log_file = args.log_file
def add_arguments(cls, parser):
super(LoggingConfig, cls).add_arguments(parser)
logging_group = parser.add_argument_group("logging")
logging_group.add_argument(
'-v', '--verbose', dest="verbose", action='count',
help="The verbosity level."
)
logging_group.add_argument(
'-f', '--log-file', dest="log_file", default="homeserver.log",
'-f', '--log-file', dest="log_file",
help="File to log to."
)
logging_group.add_argument(
@@ -45,6 +108,14 @@ class LoggingConfig(Config):
help="Python logging config file"
)
def generate_files(self, config):
log_config = config.get("log_config")
if log_config and not os.path.exists(log_config):
with open(log_config, "wb") as log_config_file:
log_config_file.write(
DEFAULT_LOG_CONFIG.substitute(log_file=config["log_file"])
)
def setup_logging(self):
log_format = (
"%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s"

View File

@@ -17,20 +17,17 @@ from ._base import Config
class MetricsConfig(Config):
def __init__(self, args):
super(MetricsConfig, self).__init__(args)
self.enable_metrics = args.enable_metrics
self.metrics_port = args.metrics_port
def read_config(self, config):
self.enable_metrics = config["enable_metrics"]
self.metrics_port = config.get("metrics_port")
@classmethod
def add_arguments(cls, parser):
super(MetricsConfig, cls).add_arguments(parser)
metrics_group = parser.add_argument_group("metrics")
metrics_group.add_argument(
'--enable-metrics', dest="enable_metrics", action="store_true",
help="Enable collection and rendering of performance metrics"
)
metrics_group.add_argument(
'--metrics-port', metavar="PORT", type=int,
help="Separate port to accept metrics requests on (on localhost)"
)
def default_config(self, config_dir_path, server_name):
return """\
## Metrics ###
# Enable collection and rendering of performance metrics
enable_metrics: False
# Separate port to accept metrics requests on (on localhost)
# metrics_port: 8081
"""

View File

@@ -17,56 +17,42 @@ from ._base import Config
class RatelimitConfig(Config):
def __init__(self, args):
super(RatelimitConfig, self).__init__(args)
self.rc_messages_per_second = args.rc_messages_per_second
self.rc_message_burst_count = args.rc_message_burst_count
def read_config(self, config):
self.rc_messages_per_second = config["rc_messages_per_second"]
self.rc_message_burst_count = config["rc_message_burst_count"]
self.federation_rc_window_size = args.federation_rc_window_size
self.federation_rc_sleep_limit = args.federation_rc_sleep_limit
self.federation_rc_sleep_delay = args.federation_rc_sleep_delay
self.federation_rc_reject_limit = args.federation_rc_reject_limit
self.federation_rc_concurrent = args.federation_rc_concurrent
self.federation_rc_window_size = config["federation_rc_window_size"]
self.federation_rc_sleep_limit = config["federation_rc_sleep_limit"]
self.federation_rc_sleep_delay = config["federation_rc_sleep_delay"]
self.federation_rc_reject_limit = config["federation_rc_reject_limit"]
self.federation_rc_concurrent = config["federation_rc_concurrent"]
@classmethod
def add_arguments(cls, parser):
super(RatelimitConfig, cls).add_arguments(parser)
rc_group = parser.add_argument_group("ratelimiting")
rc_group.add_argument(
"--rc-messages-per-second", type=float, default=0.2,
help="number of messages a client can send per second"
)
rc_group.add_argument(
"--rc-message-burst-count", type=float, default=10,
help="number of message a client can send before being throttled"
)
def default_config(self, config_dir_path, server_name):
return """\
## Ratelimiting ##
rc_group.add_argument(
"--federation-rc-window-size", type=int, default=10000,
help="The federation window size in milliseconds",
)
# Number of messages a client can send per second
rc_messages_per_second: 0.2
rc_group.add_argument(
"--federation-rc-sleep-limit", type=int, default=10,
help="The number of federation requests from a single server"
" in a window before the server will delay processing the"
" request.",
)
# Number of message a client can send before being throttled
rc_message_burst_count: 10.0
rc_group.add_argument(
"--federation-rc-sleep-delay", type=int, default=500,
help="The duration in milliseconds to delay processing events from"
" remote servers by if they go over the sleep limit.",
)
# The federation window size in milliseconds
federation_rc_window_size: 1000
rc_group.add_argument(
"--federation-rc-reject-limit", type=int, default=50,
help="The maximum number of concurrent federation requests allowed"
" from a single server",
)
# The number of federation requests from a single server in a window
# before the server will delay processing the request.
federation_rc_sleep_limit: 10
rc_group.add_argument(
"--federation-rc-concurrent", type=int, default=3,
help="The number of federation requests to concurrently process"
" from a single server",
)
# The duration in milliseconds to delay processing events from
# remote servers by if they go over the sleep limit.
federation_rc_sleep_delay: 500
# The maximum number of concurrent federation requests allowed
# from a single server
federation_rc_reject_limit: 50
# The number of federation requests to concurrently process from a
# single server
federation_rc_concurrent: 3
"""

View File

@@ -17,45 +17,44 @@ from ._base import Config
from synapse.util.stringutils import random_string_with_symbols
import distutils.util
from distutils.util import strtobool
class RegistrationConfig(Config):
def __init__(self, args):
super(RegistrationConfig, self).__init__(args)
# `args.enable_registration` may either be a bool or a string depending
# on if the option was given a value (e.g. --enable-registration=true
# would set `args.enable_registration` to "true" not True.)
def read_config(self, config):
self.disable_registration = not bool(
distutils.util.strtobool(str(args.enable_registration))
strtobool(str(config["enable_registration"]))
)
self.registration_shared_secret = args.registration_shared_secret
if "disable_registration" in config:
self.disable_registration = bool(
strtobool(str(config["disable_registration"]))
)
@classmethod
def add_arguments(cls, parser):
super(RegistrationConfig, cls).add_arguments(parser)
self.registration_shared_secret = config.get("registration_shared_secret")
def default_config(self, config_dir, server_name):
registration_shared_secret = random_string_with_symbols(50)
return """\
## Registration ##
# Enable registration for new users.
enable_registration: True
# If set, allows registration by anyone who also has the shared
# secret, even if registration is otherwise disabled.
registration_shared_secret: "%(registration_shared_secret)s"
""" % locals()
def add_arguments(self, parser):
reg_group = parser.add_argument_group("registration")
reg_group.add_argument(
"--enable-registration",
const=True,
default=False,
nargs='?',
help="Enable registration for new users.",
)
reg_group.add_argument(
"--registration-shared-secret", type=str,
help="If set, allows registration by anyone who also has the shared"
" secret, even if registration is otherwise disabled.",
"--enable-registration", action="store_true", default=None,
help="Enable registration for new users."
)
@classmethod
def generate_config(cls, args, config_dir_path):
super(RegistrationConfig, cls).generate_config(args, config_dir_path)
if args.enable_registration is None:
args.enable_registration = False
if args.registration_shared_secret is None:
args.registration_shared_secret = random_string_with_symbols(50)
def read_arguments(self, args):
if args.enable_registration is not None:
self.disable_registration = not bool(
strtobool(str(args.enable_registration))
)

View File

@@ -17,32 +17,20 @@ from ._base import Config
class ContentRepositoryConfig(Config):
def __init__(self, args):
super(ContentRepositoryConfig, self).__init__(args)
self.max_upload_size = self.parse_size(args.max_upload_size)
self.max_image_pixels = self.parse_size(args.max_image_pixels)
self.media_store_path = self.ensure_directory(args.media_store_path)
def read_config(self, config):
self.max_upload_size = self.parse_size(config["max_upload_size"])
self.max_image_pixels = self.parse_size(config["max_image_pixels"])
self.media_store_path = self.ensure_directory(config["media_store_path"])
def parse_size(self, string):
sizes = {"K": 1024, "M": 1024 * 1024}
size = 1
suffix = string[-1]
if suffix in sizes:
string = string[:-1]
size = sizes[suffix]
return int(string) * size
def default_config(self, config_dir_path, server_name):
media_store = self.default_path("media_store")
return """
# Directory where uploaded images and attachments are stored.
media_store_path: "%(media_store)s"
@classmethod
def add_arguments(cls, parser):
super(ContentRepositoryConfig, cls).add_arguments(parser)
db_group = parser.add_argument_group("content_repository")
db_group.add_argument(
"--max-upload-size", default="10M"
)
db_group.add_argument(
"--media-store-path", default=cls.default_path("media_store")
)
db_group.add_argument(
"--max-image-pixels", default="32M",
help="Maximum number of pixels that will be thumbnailed"
)
# The largest allowed upload size in bytes
max_upload_size: "10M"
# Maximum number of pixels that will be thumbnailed
max_image_pixels: "32M"
""" % locals()

View File

@@ -17,64 +17,88 @@ from ._base import Config
class ServerConfig(Config):
def __init__(self, args):
super(ServerConfig, self).__init__(args)
self.server_name = args.server_name
self.bind_port = args.bind_port
self.bind_host = args.bind_host
self.unsecure_port = args.unsecure_port
self.daemonize = args.daemonize
self.pid_file = self.abspath(args.pid_file)
self.web_client = args.web_client
self.manhole = args.manhole
self.soft_file_limit = args.soft_file_limit
if not args.content_addr:
host = args.server_name
def read_config(self, config):
self.server_name = config["server_name"]
self.bind_port = config["bind_port"]
self.bind_host = config["bind_host"]
self.unsecure_port = config["unsecure_port"]
self.manhole = config.get("manhole")
self.pid_file = self.abspath(config.get("pid_file"))
self.web_client = config["web_client"]
self.soft_file_limit = config["soft_file_limit"]
self.daemonize = config.get("daemonize")
# Attempt to guess the content_addr for the v0 content repostitory
content_addr = config.get("content_addr")
if not content_addr:
host = self.server_name
if ':' not in host:
host = "%s:%d" % (host, args.unsecure_port)
host = "%s:%d" % (host, self.unsecure_port)
else:
host = host.split(':')[0]
host = "%s:%d" % (host, args.unsecure_port)
args.content_addr = "http://%s" % (host,)
host = "%s:%d" % (host, self.unsecure_port)
content_addr = "http://%s" % (host,)
self.content_addr = args.content_addr
self.content_addr = content_addr
@classmethod
def add_arguments(cls, parser):
super(ServerConfig, cls).add_arguments(parser)
def default_config(self, config_dir_path, server_name):
if ":" in server_name:
bind_port = int(server_name.split(":")[1])
unsecure_port = bind_port - 400
else:
bind_port = 8448
unsecure_port = 8008
pid_file = self.abspath("homeserver.pid")
return """\
## Server ##
# The domain name of the server, with optional explicit port.
# This is used by remote servers to connect to this server,
# e.g. matrix.org, localhost:8080, etc.
server_name: "%(server_name)s"
# The port to listen for HTTPS requests on.
# For when matrix traffic is sent directly to synapse.
bind_port: %(bind_port)s
# The port to listen for HTTP requests on.
# For when matrix traffic passes through loadbalancer that unwraps TLS.
unsecure_port: %(unsecure_port)s
# Local interface to listen on.
# The empty string will cause synapse to listen on all interfaces.
bind_host: ""
# When running as a daemon, the file to store the pid in
pid_file: %(pid_file)s
# Whether to serve a web client from the HTTP/HTTPS root resource.
web_client: True
# Set the soft limit on the number of file descriptors synapse can use
# Zero is used to indicate synapse should set the soft limit to the
# hard limit.
soft_file_limit: 0
# Turn on the twisted telnet manhole service on localhost on the given
# port.
#manhole: 9000
""" % locals()
def read_arguments(self, args):
if args.manhole is not None:
self.manhole = args.manhole
if args.daemonize is not None:
self.daemonize = args.daemonize
def add_arguments(self, parser):
server_group = parser.add_argument_group("server")
server_group.add_argument(
"-H", "--server-name", default="localhost",
help="The domain name of the server, with optional explicit port. "
"This is used by remote servers to connect to this server, "
"e.g. matrix.org, localhost:8080, etc."
)
server_group.add_argument("-p", "--bind-port", metavar="PORT",
type=int, help="https port to listen on",
default=8448)
server_group.add_argument("--unsecure-port", metavar="PORT",
type=int, help="http port to listen on",
default=8008)
server_group.add_argument("--bind-host", default="",
help="Local interface to listen on")
server_group.add_argument("-D", "--daemonize", action='store_true',
default=None,
help="Daemonize the home server")
server_group.add_argument('--pid-file', default="homeserver.pid",
help="When running as a daemon, the file to"
" store the pid in")
server_group.add_argument('--web_client', default=True, type=bool,
help="Whether or not to serve a web client")
server_group.add_argument("--manhole", metavar="PORT", dest="manhole",
type=int,
help="Turn on the twisted telnet manhole"
" service on the given port.")
server_group.add_argument("--content-addr", default=None,
help="The host and scheme to use for the "
"content repository")
server_group.add_argument("--soft-file-limit", type=int, default=0,
help="Set the soft limit on the number of "
"file descriptors synapse can use. "
"Zero is used to indicate synapse "
"should set the soft limit to the hard"
"limit.")

View File

@@ -23,37 +23,44 @@ GENERATE_DH_PARAMS = False
class TlsConfig(Config):
def __init__(self, args):
super(TlsConfig, self).__init__(args)
def read_config(self, config):
self.tls_certificate = self.read_tls_certificate(
args.tls_certificate_path
config.get("tls_certificate_path")
)
self.no_tls = args.no_tls
self.no_tls = config.get("no_tls", False)
if self.no_tls:
self.tls_private_key = None
else:
self.tls_private_key = self.read_tls_private_key(
args.tls_private_key_path
config.get("tls_private_key_path")
)
self.tls_dh_params_path = self.check_file(
args.tls_dh_params_path, "tls_dh_params"
config.get("tls_dh_params_path"), "tls_dh_params"
)
@classmethod
def add_arguments(cls, parser):
super(TlsConfig, cls).add_arguments(parser)
tls_group = parser.add_argument_group("tls")
tls_group.add_argument("--tls-certificate-path",
help="PEM encoded X509 certificate for TLS")
tls_group.add_argument("--tls-private-key-path",
help="PEM encoded private key for TLS")
tls_group.add_argument("--tls-dh-params-path",
help="PEM dh parameters for ephemeral keys")
tls_group.add_argument("--no-tls", action='store_true',
help="Don't bind to the https port.")
def default_config(self, config_dir_path, server_name):
base_key_name = os.path.join(config_dir_path, server_name)
tls_certificate_path = base_key_name + ".tls.crt"
tls_private_key_path = base_key_name + ".tls.key"
tls_dh_params_path = base_key_name + ".tls.dh"
return """\
# PEM encoded X509 certificate for TLS
tls_certificate_path: "%(tls_certificate_path)s"
# PEM encoded private key for TLS
tls_private_key_path: "%(tls_private_key_path)s"
# PEM dh parameters for ephemeral keys
tls_dh_params_path: "%(tls_dh_params_path)s"
# Don't bind to the https port
no_tls: False
""" % locals()
def read_tls_certificate(self, cert_path):
cert_pem = self.read_file(cert_path, "tls_certificate")
@@ -63,22 +70,13 @@ class TlsConfig(Config):
private_key_pem = self.read_file(private_key_path, "tls_private_key")
return crypto.load_privatekey(crypto.FILETYPE_PEM, private_key_pem)
@classmethod
def generate_config(cls, args, config_dir_path):
super(TlsConfig, cls).generate_config(args, config_dir_path)
base_key_name = os.path.join(config_dir_path, args.server_name)
def generate_files(self, config):
tls_certificate_path = config["tls_certificate_path"]
tls_private_key_path = config["tls_private_key_path"]
tls_dh_params_path = config["tls_dh_params_path"]
if args.tls_certificate_path is None:
args.tls_certificate_path = base_key_name + ".tls.crt"
if args.tls_private_key_path is None:
args.tls_private_key_path = base_key_name + ".tls.key"
if args.tls_dh_params_path is None:
args.tls_dh_params_path = base_key_name + ".tls.dh"
if not os.path.exists(args.tls_private_key_path):
with open(args.tls_private_key_path, "w") as private_key_file:
if not os.path.exists(tls_private_key_path):
with open(tls_private_key_path, "w") as private_key_file:
tls_private_key = crypto.PKey()
tls_private_key.generate_key(crypto.TYPE_RSA, 2048)
private_key_pem = crypto.dump_privatekey(
@@ -86,17 +84,17 @@ class TlsConfig(Config):
)
private_key_file.write(private_key_pem)
else:
with open(args.tls_private_key_path) as private_key_file:
with open(tls_private_key_path) as private_key_file:
private_key_pem = private_key_file.read()
tls_private_key = crypto.load_privatekey(
crypto.FILETYPE_PEM, private_key_pem
)
if not os.path.exists(args.tls_certificate_path):
with open(args.tls_certificate_path, "w") as certifcate_file:
if not os.path.exists(tls_certificate_path):
with open(tls_certificate_path, "w") as certifcate_file:
cert = crypto.X509()
subject = cert.get_subject()
subject.CN = args.server_name
subject.CN = config["server_name"]
cert.set_serial_number(1000)
cert.gmtime_adj_notBefore(0)
@@ -110,16 +108,16 @@ class TlsConfig(Config):
certifcate_file.write(cert_pem)
if not os.path.exists(args.tls_dh_params_path):
if not os.path.exists(tls_dh_params_path):
if GENERATE_DH_PARAMS:
subprocess.check_call([
"openssl", "dhparam",
"-outform", "PEM",
"-out", args.tls_dh_params_path,
"-out", tls_dh_params_path,
"2048"
])
else:
with open(args.tls_dh_params_path, "w") as dh_params_file:
with open(tls_dh_params_path, "w") as dh_params_file:
dh_params_file.write(
"2048-bit DH parameters taken from rfc3526\n"
"-----BEGIN DH PARAMETERS-----\n"

View File

@@ -17,28 +17,21 @@ from ._base import Config
class VoipConfig(Config):
def __init__(self, args):
super(VoipConfig, self).__init__(args)
self.turn_uris = args.turn_uris
self.turn_shared_secret = args.turn_shared_secret
self.turn_user_lifetime = args.turn_user_lifetime
def read_config(self, config):
self.turn_uris = config.get("turn_uris", [])
self.turn_shared_secret = config["turn_shared_secret"]
self.turn_user_lifetime = self.parse_duration(config["turn_user_lifetime"])
@classmethod
def add_arguments(cls, parser):
super(VoipConfig, cls).add_arguments(parser)
group = parser.add_argument_group("voip")
group.add_argument(
"--turn-uris", type=str, default=None, action='append',
help="The public URIs of the TURN server to give to clients"
)
group.add_argument(
"--turn-shared-secret", type=str, default=None,
help=(
"The shared secret used to compute passwords for the TURN"
" server"
)
)
group.add_argument(
"--turn-user-lifetime", type=int, default=(1000 * 60 * 60),
help="How long generated TURN credentials last, in ms"
)
def default_config(self, config_dir_path, server_name):
return """\
## Turn ##
# The public URIs of the TURN server to give to clients
turn_uris: []
# The shared secret used to compute passwords for the TURN server
turn_shared_secret: "YOUR_SHARED_SECRET"
# How long generated TURN credentials last
turn_user_lifetime: "1h"
"""

View File

@@ -491,7 +491,7 @@ class FederationClient(FederationBase):
]
signed_events = yield self._check_sigs_and_hash_and_fetch(
destination, events, outlier=True
destination, events, outlier=False
)
have_gotten_all_from_destination = True

View File

@@ -23,8 +23,6 @@ from twisted.internet import defer
from synapse.util.logutils import log_function
from syutil.jsonutil import encode_canonical_json
import logging
@@ -71,7 +69,7 @@ class TransactionActions(object):
transaction.transaction_id,
transaction.origin,
code,
encode_canonical_json(response)
response,
)
@defer.inlineCallbacks
@@ -101,5 +99,5 @@ class TransactionActions(object):
transaction.transaction_id,
transaction.destination,
response_code,
encode_canonical_json(response_dict)
response_dict,
)

View File

@@ -104,7 +104,6 @@ class TransactionQueue(object):
return not destination.startswith("localhost")
@defer.inlineCallbacks
@log_function
def enqueue_pdu(self, pdu, destinations, order):
# We loop through all destinations to see whether we already have
# a transaction in progress. If we do, stick it in the pending_pdus

View File

@@ -150,8 +150,6 @@ class BaseHandler(object):
notify_d.addErrback(log_failure)
fed_d = federation_handler.handle_new_event(
federation_handler.handle_new_event(
event, destinations=destinations,
)
fed_d.addErrback(log_failure)

View File

@@ -180,7 +180,7 @@ class ApplicationServicesHandler(object):
return
user_info = yield self.store.get_user_by_id(user_id)
if len(user_info) > 0:
if not user_info:
defer.returnValue(False)
return

View File

@@ -159,7 +159,7 @@ class AuthHandler(BaseHandler):
logger.warn("Attempted to login as %s but they do not exist", user)
raise LoginError(401, "", errcode=Codes.UNAUTHORIZED)
stored_hash = user_info[0]["password_hash"]
stored_hash = user_info["password_hash"]
if bcrypt.checkpw(password, stored_hash):
defer.returnValue(user)
else:

View File

@@ -73,8 +73,6 @@ class FederationHandler(BaseHandler):
# When joining a room we need to queue any events for that room up
self.room_queues = {}
@log_function
@defer.inlineCallbacks
def handle_new_event(self, event, destinations):
""" Takes in an event from the client to server side, that has already
been authed and handled by the state module, and sends it to any
@@ -89,9 +87,7 @@ class FederationHandler(BaseHandler):
processing.
"""
yield run_on_reactor()
self.replication_layer.send_pdu(event, destinations)
return self.replication_layer.send_pdu(event, destinations)
@log_function
@defer.inlineCallbacks

View File

@@ -250,47 +250,31 @@ class MessageHandler(BaseHandler):
is joined on, may return a "messages" key with messages, depending
on the specified PaginationConfig.
"""
start_time = self.clock.time_msec()
def delta():
return self.clock.time_msec() - start_time
logger.info("initial_sync: start")
room_list = yield self.store.get_rooms_for_user_where_membership_is(
user_id=user_id,
membership_list=[Membership.INVITE, Membership.JOIN]
)
logger.info("initial_sync: got_rooms %d", delta())
user = UserID.from_string(user_id)
rooms_ret = []
now_token = yield self.hs.get_event_sources().get_current_token()
logger.info("initial_sync: now_token %d", delta())
presence_stream = self.hs.get_event_sources().sources["presence"]
pagination_config = PaginationConfig(from_token=now_token)
presence, _ = yield presence_stream.get_pagination_rows(
user, pagination_config.get_source_config("presence"), None
)
logger.info("initial_sync: presence_done %d", delta())
public_room_ids = yield self.store.get_public_room_ids()
logger.info("initial_sync: public_rooms %d", delta())
limit = pagin_config.limit
if limit is None:
limit = 10
@defer.inlineCallbacks
def handle_room(event):
logger.info("initial_sync: start: %s %d", event.room_id, delta())
d = {
"room_id": event.room_id,
"membership": event.membership,
@@ -341,15 +325,11 @@ class MessageHandler(BaseHandler):
except:
logger.exception("Failed to get snapshot")
logger.info("initial_sync: end: %s %d", event.room_id, delta())
yield defer.gatherResults(
[handle_room(e) for e in room_list],
consumeErrors=True
)
logger.info("initial_sync: done", delta())
ret = {
"rooms": rooms_ret,
"presence": presence,

View File

@@ -529,11 +529,19 @@ class RoomListHandler(BaseHandler):
@defer.inlineCallbacks
def get_public_room_list(self):
chunk = yield self.store.get_rooms(is_public=True)
for room in chunk:
joined_users = yield self.store.get_users_in_room(
room_id=room["room_id"],
)
room["num_joined_members"] = len(joined_users)
results = yield defer.gatherResults(
[
self.store.get_users_in_room(
room_id=room["room_id"],
)
for room in chunk
],
consumeErrors=True,
)
for i, room in enumerate(chunk):
room["num_joined_members"] = len(results[i])
# FIXME (erikj): START is no longer a valid value
defer.returnValue({"start": "START", "end": "END", "chunk": chunk})

View File

@@ -17,7 +17,6 @@ from twisted.internet import defer
from synapse.util.logutils import log_function
from synapse.util.logcontext import PreserveLoggingContext
from synapse.util.async import run_on_reactor
from synapse.types import StreamToken
import synapse.metrics
@@ -162,8 +161,6 @@ class Notifier(object):
listening to the room, and any listeners for the users in the
`extra_users` param.
"""
yield run_on_reactor()
# poke any interested application service.
self.hs.get_handlers().appservice_handler.notify_interested_services(
event
@@ -240,8 +237,6 @@ class Notifier(object):
Will wake up all listeners for the given users and rooms.
"""
yield run_on_reactor()
# TODO(paul): This is horrible, having to manually list every event
# source here individually
presence_source = self.event_sources.sources["presence"]

View File

@@ -33,7 +33,7 @@ REQUIREMENTS = {
}
CONDITIONAL_REQUIREMENTS = {
"web_client": {
"matrix_angular_sdk>=0.6.5": ["syweb>=0.6.5"],
"matrix_angular_sdk>=0.6.6": ["syweb>=0.6.6"],
}
}
@@ -62,8 +62,8 @@ DEPENDENCY_LINKS = [
),
github_link(
project="matrix-org/matrix-angular-sdk",
version="v0.6.5",
egg="matrix_angular_sdk-0.6.5",
version="v0.6.6",
egg="matrix_angular_sdk-0.6.6",
),
]

View File

@@ -65,12 +65,12 @@ class PasswordRestServlet(RestServlet):
if 'medium' not in threepid or 'address' not in threepid:
raise SynapseError(500, "Malformed threepid")
# if using email, we must know about the email they're authing with!
threepid_user = yield self.hs.get_datastore().get_user_by_threepid(
threepid_user_id = yield self.hs.get_datastore().get_user_id_by_threepid(
threepid['medium'], threepid['address']
)
if not threepid_user:
if not threepid_user_id:
raise SynapseError(404, "Email address not found", Codes.NOT_FOUND)
user_id = threepid_user
user_id = threepid_user_id
else:
logger.error("Auth succeeded but no known type!", result.keys())
raise SynapseError(500, "", Codes.UNKNOWN)

View File

@@ -59,7 +59,6 @@ class BaseHomeServer(object):
'config',
'clock',
'http_client',
'db_name',
'db_pool',
'persistence_service',
'replication_layer',

View File

@@ -31,7 +31,9 @@ import functools
import simplejson as json
import sys
import time
import threading
DEBUG_CACHES = False
logger = logging.getLogger(__name__)
@@ -68,9 +70,20 @@ class Cache(object):
self.name = name
self.keylen = keylen
self.sequence = 0
self.thread = None
caches_by_name[name] = self.cache
def check_thread(self):
expected_thread = self.thread
if expected_thread is None:
self.thread = threading.current_thread()
else:
if expected_thread is not threading.current_thread():
raise ValueError(
"Cache objects can only be accessed from the main thread"
)
def get(self, *keyargs):
if len(keyargs) != self.keylen:
raise ValueError("Expected a key to have %d items", self.keylen)
@@ -82,6 +95,13 @@ class Cache(object):
cache_counter.inc_misses(self.name)
raise KeyError()
def update(self, sequence, *args):
self.check_thread()
if self.sequence == sequence:
# Only update the cache if the caches sequence number matches the
# number that the cache had before the SELECT was started (SYN-369)
self.prefill(*args)
def prefill(self, *args): # because I can't *keyargs, value
keyargs = args[:-1]
value = args[-1]
@@ -96,9 +116,12 @@ class Cache(object):
self.cache[keyargs] = value
def invalidate(self, *keyargs):
self.check_thread()
if len(keyargs) != self.keylen:
raise ValueError("Expected a key to have %d items", self.keylen)
# Increment the sequence number so that any SELECT statements that
# raced with the INSERT don't update the cache (SYN-369)
self.sequence += 1
self.cache.pop(keyargs, None)
@@ -128,11 +151,26 @@ def cached(max_entries=1000, num_args=1, lru=False):
@defer.inlineCallbacks
def wrapped(self, *keyargs):
try:
defer.returnValue(cache.get(*keyargs))
cached_result = cache.get(*keyargs)
if DEBUG_CACHES:
actual_result = yield orig(self, *keyargs)
if actual_result != cached_result:
logger.error(
"Stale cache entry %s%r: cached: %r, actual %r",
orig.__name__, keyargs,
cached_result, actual_result,
)
raise ValueError("Stale cache entry")
defer.returnValue(cached_result)
except KeyError:
# Get the sequence number of the cache before reading from the
# database so that we can tell if the cache is invalidated
# while the SELECT is executing (SYN-369)
sequence = cache.sequence
ret = yield orig(self, *keyargs)
cache.prefill(*keyargs + (ret,))
cache.update(sequence, *keyargs + (ret,))
defer.returnValue(ret)
@@ -147,12 +185,20 @@ class LoggingTransaction(object):
"""An object that almost-transparently proxies for the 'txn' object
passed to the constructor. Adds logging and metrics to the .execute()
method."""
__slots__ = ["txn", "name", "database_engine"]
__slots__ = ["txn", "name", "database_engine", "after_callbacks"]
def __init__(self, txn, name, database_engine):
def __init__(self, txn, name, database_engine, after_callbacks):
object.__setattr__(self, "txn", txn)
object.__setattr__(self, "name", name)
object.__setattr__(self, "database_engine", database_engine)
object.__setattr__(self, "after_callbacks", after_callbacks)
def call_after(self, callback, *args):
"""Call the given callback on the main twisted thread after the
transaction has finished. Used to invalidate the caches on the
correct thread.
"""
self.after_callbacks.append((callback, args))
def __getattr__(self, name):
return getattr(self.txn, name)
@@ -160,22 +206,23 @@ class LoggingTransaction(object):
def __setattr__(self, name, value):
setattr(self.txn, name, value)
def execute(self, sql, *args, **kwargs):
def execute(self, sql, *args):
self._do_execute(self.txn.execute, sql, *args)
def executemany(self, sql, *args):
self._do_execute(self.txn.executemany, sql, *args)
def _do_execute(self, func, sql, *args):
# TODO(paul): Maybe use 'info' and 'debug' for values?
sql_logger.debug("[SQL] {%s} %s", self.name, sql)
sql = self.database_engine.convert_param_style(sql)
if args and args[0]:
args = list(args)
args[0] = [
self.database_engine.encode_parameter(a) for a in args[0]
]
if args:
try:
sql_logger.debug(
"[SQL values] {%s} " + ", ".join(("<%r>",) * len(args[0])),
self.name,
*args[0]
"[SQL values] {%s} %r",
self.name, args[0]
)
except:
# Don't let logging failures stop SQL from working
@@ -184,8 +231,8 @@ class LoggingTransaction(object):
start = time.time() * 1000
try:
return self.txn.execute(
sql, *args, **kwargs
return func(
sql, *args
)
except Exception as e:
logger.debug("[SQL FAIL] {%s} %s", self.name, e)
@@ -298,6 +345,8 @@ class SQLBaseStore(object):
start_time = time.time() * 1000
after_callbacks = []
def inner_func(conn, *args, **kwargs):
with LoggingContext("runInteraction") as context:
if self.database_engine.is_connection_closed(conn):
@@ -322,10 +371,10 @@ class SQLBaseStore(object):
while True:
try:
txn = conn.cursor()
return func(
LoggingTransaction(txn, name, self.database_engine),
*args, **kwargs
txn = LoggingTransaction(
txn, name, self.database_engine, after_callbacks
)
return func(txn, *args, **kwargs)
except self.database_engine.module.OperationalError as e:
# This can happen if the database disappears mid
# transaction.
@@ -374,6 +423,8 @@ class SQLBaseStore(object):
result = yield self._db_pool.runWithConnection(
inner_func, *args, **kwargs
)
for after_callback, after_args in after_callbacks:
after_callback(*after_args)
defer.returnValue(result)
def cursor_to_dict(self, cursor):
@@ -438,18 +489,49 @@ class SQLBaseStore(object):
@log_function
def _simple_insert_txn(self, txn, table, values):
keys, vals = zip(*values.items())
sql = "INSERT INTO %s (%s) VALUES(%s)" % (
table,
", ".join(k for k in values),
", ".join("?" for k in values)
", ".join(k for k in keys),
", ".join("?" for _ in keys)
)
logger.debug(
"[SQL] %s Args=%s",
sql, values.values(),
txn.execute(sql, vals)
def _simple_insert_many_txn(self, txn, table, values):
if not values:
return
# This is a *slight* abomination to get a list of tuples of key names
# and a list of tuples of value names.
#
# i.e. [{"a": 1, "b": 2}, {"c": 3, "d": 4}]
# => [("a", "b",), ("c", "d",)] and [(1, 2,), (3, 4,)]
#
# The sort is to ensure that we don't rely on dictionary iteration
# order.
keys, vals = zip(*[
zip(
*(sorted(i.items(), key=lambda kv: kv[0]))
)
for i in values
if i
])
for k in keys:
if k != keys[0]:
raise RuntimeError(
"All items must have the same keys"
)
sql = "INSERT INTO %s (%s) VALUES(%s)" % (
table,
", ".join(k for k in keys[0]),
", ".join("?" for _ in keys[0])
)
txn.execute(sql, values.values())
txn.executemany(sql, vals)
def _simple_upsert(self, table, keyvalues, values,
insertion_values={}, desc="_simple_upsert", lock=True):

View File

@@ -36,9 +36,6 @@ class PostgresEngine(object):
def convert_param_style(self, sql):
return sql.replace("?", "%s")
def encode_parameter(self, param):
return param
def on_new_connection(self, db_conn):
db_conn.set_isolation_level(
self.module.extensions.ISOLATION_LEVEL_REPEATABLE_READ

View File

@@ -26,9 +26,6 @@ class Sqlite3Engine(object):
def convert_param_style(self, sql):
return sql
def encode_parameter(self, param):
return param
def on_new_connection(self, db_conn):
self.prepare_database(db_conn)

View File

@@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import SQLBaseStore
from ._base import SQLBaseStore, cached
from syutil.base64util import encode_base64
import logging
@@ -96,6 +96,7 @@ class EventFederationStore(SQLBaseStore):
room_id,
)
@cached()
def get_latest_event_ids_in_room(self, room_id):
return self._simple_select_onecol(
table="event_forward_extremities",
@@ -103,7 +104,7 @@ class EventFederationStore(SQLBaseStore):
"room_id": room_id,
},
retcol="event_id",
desc="get_latest_events_in_room",
desc="get_latest_event_ids_in_room",
)
def _get_latest_events_in_room(self, txn, room_id):
@@ -261,18 +262,19 @@ class EventFederationStore(SQLBaseStore):
For the given event, update the event edges table and forward and
backward extremities tables.
"""
for e_id, _ in prev_events:
# TODO (erikj): This could be done as a bulk insert
self._simple_insert_txn(
txn,
table="event_edges",
values={
self._simple_insert_many_txn(
txn,
table="event_edges",
values=[
{
"event_id": event_id,
"prev_event_id": e_id,
"room_id": room_id,
"is_state": False,
},
)
}
for e_id, _ in prev_events
],
)
# Update the extremities table if this is not an outlier.
if not outlier:
@@ -306,16 +308,17 @@ class EventFederationStore(SQLBaseStore):
# Insert all the prev_events as a backwards thing, they'll get
# deleted in a second if they're incorrect anyway.
for e_id, _ in prev_events:
# TODO (erikj): This could be done as a bulk insert
self._simple_insert_txn(
txn,
table="event_backward_extremities",
values={
self._simple_insert_many_txn(
txn,
table="event_backward_extremities",
values=[
{
"event_id": e_id,
"room_id": room_id,
},
)
}
for e_id, _ in prev_events
],
)
# Also delete from the backwards extremities table all ones that
# reference events that we have already seen
@@ -329,6 +332,10 @@ class EventFederationStore(SQLBaseStore):
)
txn.execute(query)
txn.call_after(
self.get_latest_event_ids_in_room.invalidate, room_id
)
def get_backfill_events(self, room_id, event_list, limit):
"""Get a list of Events for a given topic that occurred before (and
including) the events in event_list. Return a list of max size `limit`

View File

@@ -93,7 +93,7 @@ class EventsStore(SQLBaseStore):
current_state=None):
# Remove the any existing cache entries for the event_id
self._invalidate_get_event_cache(event.event_id)
txn.call_after(self._invalidate_get_event_cache, event.event_id)
if stream_ordering is None:
with self._stream_id_gen.get_next_txn(txn) as stream_ordering:
@@ -114,6 +114,13 @@ class EventsStore(SQLBaseStore):
)
for s in current_state:
if s.type == EventTypes.Member:
txn.call_after(
self.get_rooms_for_user.invalidate, s.state_key
)
txn.call_after(
self.get_joined_hosts_for_room.invalidate, s.room_id
)
self._simple_insert_txn(
txn,
"current_state_events",
@@ -122,31 +129,9 @@ class EventsStore(SQLBaseStore):
"room_id": s.room_id,
"type": s.type,
"state_key": s.state_key,
},
}
)
if event.is_state() and is_new_state:
if not backfilled and not context.rejected:
self._simple_insert_txn(
txn,
table="state_forward_extremities",
values={
"event_id": event.event_id,
"room_id": event.room_id,
"type": event.type,
"state_key": event.state_key,
},
)
for prev_state_id, _ in event.prev_state:
self._simple_delete_txn(
txn,
table="state_forward_extremities",
keyvalues={
"event_id": prev_state_id,
}
)
outlier = event.internal_metadata.is_outlier()
if not outlier:
@@ -281,7 +266,9 @@ class EventsStore(SQLBaseStore):
)
if context.rejected:
self._store_rejections_txn(txn, event.event_id, context.rejected)
self._store_rejections_txn(
txn, event.event_id, context.rejected
)
for hash_alg, hash_base64 in event.hashes.items():
hash_bytes = decode_base64(hash_base64)
@@ -293,19 +280,22 @@ class EventsStore(SQLBaseStore):
for alg, hash_base64 in prev_hashes.items():
hash_bytes = decode_base64(hash_base64)
self._store_prev_event_hash_txn(
txn, event.event_id, prev_event_id, alg, hash_bytes
txn, event.event_id, prev_event_id, alg,
hash_bytes
)
for auth_id, _ in event.auth_events:
self._simple_insert_txn(
txn,
table="event_auth",
values={
self._simple_insert_many_txn(
txn,
table="event_auth",
values=[
{
"event_id": event.event_id,
"room_id": event.room_id,
"auth_id": auth_id,
},
)
}
for auth_id, _ in event.auth_events
],
)
(ref_alg, ref_hash_bytes) = compute_event_reference_hash(event)
self._store_event_reference_hash_txn(
@@ -330,17 +320,19 @@ class EventsStore(SQLBaseStore):
vals,
)
for e_id, h in event.prev_state:
self._simple_insert_txn(
txn,
table="event_edges",
values={
self._simple_insert_many_txn(
txn,
table="event_edges",
values=[
{
"event_id": event.event_id,
"prev_event_id": e_id,
"room_id": event.room_id,
"is_state": True,
},
)
}
for e_id, h in event.prev_state
],
)
if is_new_state and not context.rejected:
self._simple_upsert_txn(
@@ -356,9 +348,11 @@ class EventsStore(SQLBaseStore):
}
)
return
def _store_redaction(self, txn, event):
# invalidate the cache for the redacted event
self._invalidate_get_event_cache(event.redacts)
txn.call_after(self._invalidate_get_event_cache, event.redacts)
txn.execute(
"INSERT INTO redactions (event_id, redacts) VALUES (?,?)",
(event.event_id, event.redacts)

View File

@@ -112,14 +112,15 @@ class RegistrationStore(SQLBaseStore):
@defer.inlineCallbacks
def user_delete_access_tokens_apart_from(self, user_id, token_id):
rows = yield self.get_user_by_id(user_id)
if len(rows) == 0:
raise Exception("No such user!")
yield self.runInteraction(
"user_delete_access_tokens_apart_from",
self._user_delete_access_tokens_apart_from, user_id, token_id
)
yield self._execute(
"delete_access_tokens_apart_from", None,
def _user_delete_access_tokens_apart_from(self, txn, user_id, token_id):
txn.execute(
"DELETE FROM access_tokens WHERE user_id = ? AND id != ?",
rows[0]['id'], token_id
(user_id, token_id)
)
@defer.inlineCallbacks
@@ -201,15 +202,15 @@ class RegistrationStore(SQLBaseStore):
defer.returnValue(ret)
@defer.inlineCallbacks
def get_user_by_threepid(self, medium, address):
def get_user_id_by_threepid(self, medium, address):
ret = yield self._simple_select_one(
"user_threepids",
{
"medium": medium,
"address": address
},
['user'], True, 'get_user_by_threepid'
['user_id'], True, 'get_user_id_by_threepid'
)
if ret:
defer.returnValue(ret['user'])
defer.returnValue(ret['user_id'])
defer.returnValue(None)

View File

@@ -64,8 +64,8 @@ class RoomMemberStore(SQLBaseStore):
}
)
self.get_rooms_for_user.invalidate(target_user_id)
self.get_joined_hosts_for_room.invalidate(event.room_id)
txn.call_after(self.get_rooms_for_user.invalidate, target_user_id)
txn.call_after(self.get_joined_hosts_for_room.invalidate, event.room_id)
def get_room_member(self, user_id, room_id):
"""Retrieve the current state of a room member.

View File

@@ -0,0 +1,18 @@
/* Copyright 2015 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
DROP INDEX IF EXISTS sent_transaction_dest;
DROP INDEX IF EXISTS sent_transaction_sent;
DROP INDEX IF EXISTS user_ips_user;

View File

@@ -104,18 +104,20 @@ class StateStore(SQLBaseStore):
},
)
for state in state_events.values():
self._simple_insert_txn(
txn,
table="state_groups_state",
values={
self._simple_insert_many_txn(
txn,
table="state_groups_state",
values=[
{
"state_group": state_group,
"room_id": state.room_id,
"type": state.type,
"state_key": state.state_key,
"event_id": state.event_id,
},
)
}
for state in state_events.values()
],
)
self._simple_insert_txn(
txn,

View File

@@ -17,6 +17,7 @@ from ._base import SQLBaseStore, cached
from collections import namedtuple
from syutil.jsonutil import encode_canonical_json
import logging
logger = logging.getLogger(__name__)
@@ -82,7 +83,7 @@ class TransactionStore(SQLBaseStore):
"transaction_id": transaction_id,
"origin": origin,
"response_code": code,
"response_json": response_dict,
"response_json": buffer(encode_canonical_json(response_dict)),
},
or_ignore=True,
desc="set_received_txn_response",
@@ -161,7 +162,8 @@ class TransactionStore(SQLBaseStore):
return self.runInteraction(
"delivered_txn",
self._delivered_txn,
transaction_id, destination, code, response_dict
transaction_id, destination, code,
buffer(encode_canonical_json(response_dict)),
)
def _delivered_txn(self, txn, transaction_id, destination,

View File

@@ -67,7 +67,7 @@ class SQLBaseStoreTestCase(unittest.TestCase):
self.mock_txn.execute.assert_called_with(
"INSERT INTO tablename (columname) VALUES(?)",
["Value"]
("Value",)
)
@defer.inlineCallbacks
@@ -82,7 +82,7 @@ class SQLBaseStoreTestCase(unittest.TestCase):
self.mock_txn.execute.assert_called_with(
"INSERT INTO tablename (colA, colB, colC) VALUES(?, ?, ?)",
[1, 2, 3]
(1, 2, 3,)
)
@defer.inlineCallbacks